This commit is contained in:
Antonio Scandurra 2023-10-23 16:23:38 +02:00
parent cc445f7cef
commit efbf0c828d
8 changed files with 336 additions and 289 deletions

2
Cargo.lock generated
View file

@ -5964,7 +5964,7 @@ dependencies = [
"client2", "client2",
"clock", "clock",
"collections", "collections",
"copilot", "copilot2",
"ctor", "ctor",
"db2", "db2",
"env_logger 0.9.3", "env_logger 0.9.3",

View file

@ -4,7 +4,7 @@ use derive_more::{Deref, DerefMut};
use parking_lot::{RwLock, RwLockUpgradableReadGuard}; use parking_lot::{RwLock, RwLockUpgradableReadGuard};
use slotmap::{SecondaryMap, SlotMap}; use slotmap::{SecondaryMap, SlotMap};
use std::{ use std::{
any::{Any, TypeId}, any::{type_name, Any, TypeId},
fmt::{self, Display}, fmt::{self, Display},
hash::{Hash, Hasher}, hash::{Hash, Hasher},
marker::PhantomData, marker::PhantomData,
@ -17,6 +17,12 @@ use std::{
slotmap::new_key_type! { pub struct EntityId; } slotmap::new_key_type! { pub struct EntityId; }
impl EntityId {
pub fn as_u64(self) -> u64 {
self.0.as_ffi()
}
}
impl Display for EntityId { impl Display for EntityId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self) write!(f, "{}", self)
@ -234,6 +240,20 @@ where
} }
} }
impl Hash for AnyHandle {
fn hash<H: Hasher>(&self, state: &mut H) {
self.entity_id.hash(state);
}
}
impl PartialEq for AnyHandle {
fn eq(&self, other: &Self) -> bool {
self.entity_id == other.entity_id
}
}
impl Eq for AnyHandle {}
#[derive(Deref, DerefMut)] #[derive(Deref, DerefMut)]
pub struct Handle<T: Send + Sync> { pub struct Handle<T: Send + Sync> {
#[deref] #[deref]
@ -284,6 +304,31 @@ impl<T: Send + Sync> Clone for Handle<T> {
} }
} }
impl<T: 'static + Send + Sync> std::fmt::Debug for Handle<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Handle {{ entity_id: {:?}, entity_type: {:?} }}",
self.any_handle.entity_id,
type_name::<T>()
)
}
}
impl<T: Send + Sync + 'static> Hash for Handle<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.any_handle.hash(state);
}
}
impl<T: Send + Sync + 'static> PartialEq for Handle<T> {
fn eq(&self, other: &Self) -> bool {
self.any_handle == other.any_handle
}
}
impl<T: Send + Sync + 'static> Eq for Handle<T> {}
#[derive(Clone)] #[derive(Clone)]
pub struct AnyWeakHandle { pub struct AnyWeakHandle {
pub(crate) entity_id: EntityId, pub(crate) entity_id: EntityId,

View file

@ -24,6 +24,10 @@ impl<'a, T: Send + Sync + 'static> ModelContext<'a, T> {
} }
} }
pub fn entity_id(&self) -> EntityId {
self.entity_id
}
pub fn handle(&self) -> WeakHandle<T> { pub fn handle(&self) -> WeakHandle<T> {
self.app.entities.weak_handle(self.entity_id) self.app.entities.weak_handle(self.entity_id)
} }

View file

@ -20,7 +20,7 @@ test-support = [
[dependencies] [dependencies]
text = { path = "../text" } text = { path = "../text" }
copilot = { path = "../copilot" } copilot2 = { path = "../copilot2" }
client2 = { path = "../client2" } client2 = { path = "../client2" }
clock = { path = "../clock" } clock = { path = "../clock" }
collections = { path = "../collections" } collections = { path = "../collections" }

View file

@ -185,7 +185,7 @@ impl LspCommand for PrepareRename {
_: LanguageServerId, _: LanguageServerId,
cx: AsyncAppContext, cx: AsyncAppContext,
) -> Result<Option<Range<Anchor>>> { ) -> Result<Option<Range<Anchor>>> {
buffer.read_with(&cx, |buffer, _| { buffer.update(&mut cx, |buffer, _| {
if let Some( if let Some(
lsp2::PrepareRenameResponse::Range(range) lsp2::PrepareRenameResponse::Range(range)
| lsp2::PrepareRenameResponse::RangeWithPlaceholder { range, .. }, | lsp2::PrepareRenameResponse::RangeWithPlaceholder { range, .. },
@ -199,7 +199,7 @@ impl LspCommand for PrepareRename {
} }
} }
Ok(None) Ok(None)
}) })?
} }
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::PrepareRename { fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::PrepareRename {
@ -226,11 +226,11 @@ impl LspCommand for PrepareRename {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })?
.await?; .await?;
Ok(Self { Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
}) })
} }
@ -264,7 +264,7 @@ impl LspCommand for PrepareRename {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })?
.await?; .await?;
let start = message.start.and_then(deserialize_anchor); let start = message.start.and_then(deserialize_anchor);
let end = message.end.and_then(deserialize_anchor); let end = message.end.and_then(deserialize_anchor);
@ -354,10 +354,10 @@ impl LspCommand for PerformRename {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })?
.await?; .await?;
Ok(Self { Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
new_name: message.new_name, new_name: message.new_name,
push_to_history: false, push_to_history: false,
}) })
@ -389,7 +389,7 @@ impl LspCommand for PerformRename {
project project
.update(&mut cx, |project, cx| { .update(&mut cx, |project, cx| {
project.deserialize_project_transaction(message, self.push_to_history, cx) project.deserialize_project_transaction(message, self.push_to_history, cx)
}) })?
.await .await
} }
@ -458,10 +458,10 @@ impl LspCommand for GetDefinition {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })?
.await?; .await?;
Ok(Self { Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
}) })
} }
@ -559,10 +559,10 @@ impl LspCommand for GetTypeDefinition {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })?
.await?; .await?;
Ok(Self { Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
}) })
} }
@ -599,11 +599,11 @@ fn language_server_for_buffer(
cx: &mut AsyncAppContext, cx: &mut AsyncAppContext,
) -> Result<(Arc<CachedLspAdapter>, Arc<LanguageServer>)> { ) -> Result<(Arc<CachedLspAdapter>, Arc<LanguageServer>)> {
project project
.read_with(cx, |project, cx| { .update(cx, |project, cx| {
project project
.language_server_for_buffer(buffer.read(cx), server_id, cx) .language_server_for_buffer(buffer.read(cx), server_id, cx)
.map(|(adapter, server)| (adapter.clone(), server.clone())) .map(|(adapter, server)| (adapter.clone(), server.clone()))
}) })?
.ok_or_else(|| anyhow!("no language server found for buffer")) .ok_or_else(|| anyhow!("no language server found for buffer"))
} }
@ -620,7 +620,7 @@ async fn location_links_from_proto(
let buffer = project let buffer = project
.update(&mut cx, |this, cx| { .update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(origin.buffer_id, cx) this.wait_for_remote_buffer(origin.buffer_id, cx)
}) })?
.await?; .await?;
let start = origin let start = origin
.start .start
@ -631,7 +631,7 @@ async fn location_links_from_proto(
.and_then(deserialize_anchor) .and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing origin end"))?; .ok_or_else(|| anyhow!("missing origin end"))?;
buffer buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end])) .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?; .await?;
Some(Location { Some(Location {
buffer, buffer,
@ -645,7 +645,7 @@ async fn location_links_from_proto(
let buffer = project let buffer = project
.update(&mut cx, |this, cx| { .update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(target.buffer_id, cx) this.wait_for_remote_buffer(target.buffer_id, cx)
}) })?
.await?; .await?;
let start = target let start = target
.start .start
@ -656,7 +656,7 @@ async fn location_links_from_proto(
.and_then(deserialize_anchor) .and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing target end"))?; .ok_or_else(|| anyhow!("missing target end"))?;
buffer buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end])) .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?; .await?;
let target = Location { let target = Location {
buffer, buffer,
@ -714,12 +714,11 @@ async fn location_links_from_lsp(
lsp_adapter.name.clone(), lsp_adapter.name.clone(),
cx, cx,
) )
}) })?
.await?; .await?;
cx.read(|cx| { buffer.update(&mut cx, |origin_buffer, cx| {
let origin_location = origin_range.map(|origin_range| { let origin_location = origin_range.map(|origin_range| {
let origin_buffer = buffer.read(cx);
let origin_start = let origin_start =
origin_buffer.clip_point_utf16(point_from_lsp(origin_range.start), Bias::Left); origin_buffer.clip_point_utf16(point_from_lsp(origin_range.start), Bias::Left);
let origin_end = let origin_end =
@ -746,7 +745,7 @@ async fn location_links_from_lsp(
origin: origin_location, origin: origin_location,
target: target_location, target: target_location,
}) })
}); })?;
} }
Ok(definitions) Ok(definitions)
} }
@ -834,11 +833,10 @@ impl LspCommand for GetReferences {
lsp_adapter.name.clone(), lsp_adapter.name.clone(),
cx, cx,
) )
}) })?
.await?; .await?;
cx.read(|cx| { target_buffer_handle.update(&mut cx, |target_buffer, cx| {
let target_buffer = target_buffer_handle.read(cx);
let target_start = target_buffer let target_start = target_buffer
.clip_point_utf16(point_from_lsp(lsp_location.range.start), Bias::Left); .clip_point_utf16(point_from_lsp(lsp_location.range.start), Bias::Left);
let target_end = target_buffer let target_end = target_buffer
@ -848,7 +846,7 @@ impl LspCommand for GetReferences {
range: target_buffer.anchor_after(target_start) range: target_buffer.anchor_after(target_start)
..target_buffer.anchor_before(target_end), ..target_buffer.anchor_before(target_end),
}); });
}); })?;
} }
} }
@ -879,10 +877,10 @@ impl LspCommand for GetReferences {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })?
.await?; .await?;
Ok(Self { Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
}) })
} }
@ -919,7 +917,7 @@ impl LspCommand for GetReferences {
let target_buffer = project let target_buffer = project
.update(&mut cx, |this, cx| { .update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(location.buffer_id, cx) this.wait_for_remote_buffer(location.buffer_id, cx)
}) })?
.await?; .await?;
let start = location let start = location
.start .start
@ -930,7 +928,7 @@ impl LspCommand for GetReferences {
.and_then(deserialize_anchor) .and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing target end"))?; .ok_or_else(|| anyhow!("missing target end"))?;
target_buffer target_buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end])) .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?; .await?;
locations.push(Location { locations.push(Location {
buffer: target_buffer, buffer: target_buffer,
@ -982,10 +980,10 @@ impl LspCommand for GetDocumentHighlights {
_: LanguageServerId, _: LanguageServerId,
cx: AsyncAppContext, cx: AsyncAppContext,
) -> Result<Vec<DocumentHighlight>> { ) -> Result<Vec<DocumentHighlight>> {
buffer.read_with(&cx, |buffer, _| { buffer.update(&mut cx, |buffer, _| {
let mut lsp_highlights = lsp_highlights.unwrap_or_default(); let mut lsp_highlights = lsp_highlights.unwrap_or_default();
lsp_highlights.sort_unstable_by_key(|h| (h.range.start, Reverse(h.range.end))); lsp_highlights.sort_unstable_by_key(|h| (h.range.start, Reverse(h.range.end)));
Ok(lsp_highlights lsp_highlights
.into_iter() .into_iter()
.map(|lsp_highlight| { .map(|lsp_highlight| {
let start = buffer let start = buffer
@ -999,7 +997,7 @@ impl LspCommand for GetDocumentHighlights {
.unwrap_or(lsp2::DocumentHighlightKind::READ), .unwrap_or(lsp2::DocumentHighlightKind::READ),
} }
}) })
.collect()) .collect()
}) })
} }
@ -1027,10 +1025,10 @@ impl LspCommand for GetDocumentHighlights {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })?
.await?; .await?;
Ok(Self { Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
}) })
} }
@ -1075,7 +1073,7 @@ impl LspCommand for GetDocumentHighlights {
.and_then(deserialize_anchor) .and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing target end"))?; .ok_or_else(|| anyhow!("missing target end"))?;
buffer buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end])) .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?; .await?;
let kind = match proto::document_highlight::Kind::from_i32(highlight.kind) { let kind = match proto::document_highlight::Kind::from_i32(highlight.kind) {
Some(proto::document_highlight::Kind::Text) => DocumentHighlightKind::TEXT, Some(proto::document_highlight::Kind::Text) => DocumentHighlightKind::TEXT,
@ -1126,71 +1124,70 @@ impl LspCommand for GetHover {
_: Handle<Project>, _: Handle<Project>,
buffer: Handle<Buffer>, buffer: Handle<Buffer>,
_: LanguageServerId, _: LanguageServerId,
cx: AsyncAppContext, mut cx: AsyncAppContext,
) -> Result<Self::Response> { ) -> Result<Self::Response> {
Ok(message.and_then(|hover| { let Some(hover) = message else {
let (language, range) = cx.read(|cx| { return Ok(None);
let buffer = buffer.read(cx); };
(
buffer.language().cloned(),
hover.range.map(|range| {
let token_start =
buffer.clip_point_utf16(point_from_lsp(range.start), Bias::Left);
let token_end =
buffer.clip_point_utf16(point_from_lsp(range.end), Bias::Left);
buffer.anchor_after(token_start)..buffer.anchor_before(token_end)
}),
)
});
fn hover_blocks_from_marked_string( let (language, range) = buffer.update(&mut cx, |buffer, cx| {
marked_string: lsp2::MarkedString, (
) -> Option<HoverBlock> { buffer.language().cloned(),
let block = match marked_string { hover.range.map(|range| {
lsp2::MarkedString::String(content) => HoverBlock { let token_start =
text: content, buffer.clip_point_utf16(point_from_lsp(range.start), Bias::Left);
kind: HoverBlockKind::Markdown, let token_end = buffer.clip_point_utf16(point_from_lsp(range.end), Bias::Left);
}, buffer.anchor_after(token_start)..buffer.anchor_before(token_end)
lsp2::MarkedString::LanguageString(lsp2::LanguageString { }),
language, )
value, })?;
}) => HoverBlock {
fn hover_blocks_from_marked_string(
marked_string: lsp2::MarkedString,
) -> Option<HoverBlock> {
let block = match marked_string {
lsp2::MarkedString::String(content) => HoverBlock {
text: content,
kind: HoverBlockKind::Markdown,
},
lsp2::MarkedString::LanguageString(lsp2::LanguageString { language, value }) => {
HoverBlock {
text: value, text: value,
kind: HoverBlockKind::Code { language }, kind: HoverBlockKind::Code { language },
}, }
};
if block.text.is_empty() {
None
} else {
Some(block)
} }
};
if block.text.is_empty() {
None
} else {
Some(block)
} }
}
let contents = cx.read(|_| match hover.contents { let contents = match hover.contents {
lsp2::HoverContents::Scalar(marked_string) => { lsp2::HoverContents::Scalar(marked_string) => {
hover_blocks_from_marked_string(marked_string) hover_blocks_from_marked_string(marked_string)
.into_iter()
.collect()
}
lsp2::HoverContents::Array(marked_strings) => marked_strings
.into_iter() .into_iter()
.filter_map(hover_blocks_from_marked_string) .collect()
.collect(), }
lsp2::HoverContents::Markup(markup_content) => vec![HoverBlock { lsp2::HoverContents::Array(marked_strings) => marked_strings
text: markup_content.value, .into_iter()
kind: if markup_content.kind == lsp2::MarkupKind::Markdown { .filter_map(hover_blocks_from_marked_string)
HoverBlockKind::Markdown .collect(),
} else { lsp2::HoverContents::Markup(markup_content) => vec![HoverBlock {
HoverBlockKind::PlainText text: markup_content.value,
}, kind: if markup_content.kind == lsp2::MarkupKind::Markdown {
}], HoverBlockKind::Markdown
}); } else {
HoverBlockKind::PlainText
},
}],
};
Some(Hover { Ok(Some(Hover {
contents, contents,
range, range,
language, language,
})
})) }))
} }
@ -1218,10 +1215,10 @@ impl LspCommand for GetHover {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })?
.await?; .await?;
Ok(Self { Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
}) })
} }
@ -1295,7 +1292,7 @@ impl LspCommand for GetHover {
return Ok(None); return Ok(None);
} }
let language = buffer.read_with(&cx, |buffer, _| buffer.language().cloned()); let language = buffer.update(&mut cx, |buffer, _| buffer.language().cloned())?;
let range = if let (Some(start), Some(end)) = (message.start, message.end) { let range = if let (Some(start), Some(end)) = (message.start, message.end) {
language2::proto::deserialize_anchor(start) language2::proto::deserialize_anchor(start)
.and_then(|start| language2::proto::deserialize_anchor(end).map(|end| start..end)) .and_then(|start| language2::proto::deserialize_anchor(end).map(|end| start..end))
@ -1362,7 +1359,7 @@ impl LspCommand for GetCompletions {
Default::default() Default::default()
}; };
let completions = buffer.read_with(&cx, |buffer, _| { let completions = buffer.update(&mut cx, |buffer, _| {
let language = buffer.language().cloned(); let language = buffer.language().cloned();
let snapshot = buffer.snapshot(); let snapshot = buffer.snapshot();
let clipped_position = buffer.clip_point_utf16(Unclipped(self.position), Bias::Left); let clipped_position = buffer.clip_point_utf16(Unclipped(self.position), Bias::Left);
@ -1468,7 +1465,7 @@ impl LspCommand for GetCompletions {
} }
}) })
}) })
}); })?;
Ok(future::join_all(completions).await) Ok(future::join_all(completions).await)
} }
@ -1491,17 +1488,17 @@ impl LspCommand for GetCompletions {
) -> Result<Self> { ) -> Result<Self> {
let version = deserialize_version(&message.version); let version = deserialize_version(&message.version);
buffer buffer
.update(&mut cx, |buffer, _| buffer.wait_for_version(version)) .update(&mut cx, |buffer, _| buffer.wait_for_version(version))?
.await?; .await?;
let position = message let position = message
.position .position
.and_then(language2::proto::deserialize_anchor) .and_then(language2::proto::deserialize_anchor)
.map(|p| { .map(|p| {
buffer.read_with(&cx, |buffer, _| { buffer.update(&mut cx, |buffer, _| {
buffer.clip_point_utf16(Unclipped(p.to_point_utf16(buffer)), Bias::Left) buffer.clip_point_utf16(Unclipped(p.to_point_utf16(buffer)), Bias::Left)
}) })
}) })
.ok_or_else(|| anyhow!("invalid position"))?; .ok_or_else(|| anyhow!("invalid position"))??;
Ok(Self { position }) Ok(Self { position })
} }
@ -1531,10 +1528,10 @@ impl LspCommand for GetCompletions {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })?
.await?; .await?;
let language = buffer.read_with(&cx, |buffer, _| buffer.language().cloned()); let language = buffer.update(&mut cx, |buffer, _| buffer.language().cloned())?;
let completions = message.completions.into_iter().map(|completion| { let completions = message.completions.into_iter().map(|completion| {
language2::proto::deserialize_completion(completion, language.clone()) language2::proto::deserialize_completion(completion, language.clone())
}); });
@ -1639,7 +1636,7 @@ impl LspCommand for GetCodeActions {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })?
.await?; .await?;
Ok(Self { range: start..end }) Ok(Self { range: start..end })
@ -1671,7 +1668,7 @@ impl LspCommand for GetCodeActions {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })?
.await?; .await?;
message message
.actions .actions
@ -1775,15 +1772,15 @@ impl LspCommand for OnTypeFormatting {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })?
.await?; .await?;
let tab_size = buffer.read_with(&cx, |buffer, cx| { let tab_size = buffer.update(&mut cx, |buffer, cx| {
language_settings(buffer.language(), buffer.file(), cx).tab_size language_settings(buffer.language(), buffer.file(), cx).tab_size
}); })?;
Ok(Self { Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
trigger: message.trigger.clone(), trigger: message.trigger.clone(),
options: lsp_formatting_options(tab_size.get()).into(), options: lsp_formatting_options(tab_size.get()).into(),
push_to_history: false, push_to_history: false,
@ -1824,7 +1821,7 @@ impl LspCommand for OnTypeFormatting {
} }
impl InlayHints { impl InlayHints {
pub async fn lsp2_to_project_hint( pub async fn lsp_to_project_hint(
lsp_hint: lsp2::InlayHint, lsp_hint: lsp2::InlayHint,
buffer_handle: &Handle<Buffer>, buffer_handle: &Handle<Buffer>,
server_id: LanguageServerId, server_id: LanguageServerId,
@ -1838,15 +1835,14 @@ impl InlayHints {
_ => None, _ => None,
}); });
let position = cx.update(|cx| { let position = buffer_handle.update(cx, |buffer, _| {
let buffer = buffer_handle.read(cx);
let position = buffer.clip_point_utf16(point_from_lsp(lsp_hint.position), Bias::Left); let position = buffer.clip_point_utf16(point_from_lsp(lsp_hint.position), Bias::Left);
if kind == Some(InlayHintKind::Parameter) { if kind == Some(InlayHintKind::Parameter) {
buffer.anchor_before(position) buffer.anchor_before(position)
} else { } else {
buffer.anchor_after(position) buffer.anchor_after(position)
} }
}); })?;
let label = Self::lsp_inlay_label_to_project(lsp_hint.label, server_id) let label = Self::lsp_inlay_label_to_project(lsp_hint.label, server_id)
.await .await
.context("lsp to project inlay hint conversion")?; .context("lsp to project inlay hint conversion")?;
@ -1878,7 +1874,7 @@ impl InlayHints {
}) })
} }
async fn lsp2_inlay_label_to_project( async fn lsp_inlay_label_to_project(
lsp_label: lsp2::InlayHintLabel, lsp_label: lsp2::InlayHintLabel,
server_id: LanguageServerId, server_id: LanguageServerId,
) -> anyhow::Result<InlayHintLabel> { ) -> anyhow::Result<InlayHintLabel> {
@ -2109,7 +2105,7 @@ impl InlayHints {
}) })
} }
pub fn project_to_lsp2_hint(hint: InlayHint, snapshot: &BufferSnapshot) -> lsp2::InlayHint { pub fn project_to_lsp_hint(hint: InlayHint, snapshot: &BufferSnapshot) -> lsp2::InlayHint {
lsp2::InlayHint { lsp2::InlayHint {
position: point_to_lsp(hint.position.to_point_utf16(snapshot)), position: point_to_lsp(hint.position.to_point_utf16(snapshot)),
kind: hint.kind.map(|kind| match kind { kind: hint.kind.map(|kind| match kind {
@ -2303,7 +2299,7 @@ impl LspCommand for InlayHints {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })?
.await?; .await?;
Ok(Self { range: start..end }) Ok(Self { range: start..end })
@ -2335,7 +2331,7 @@ impl LspCommand for InlayHints {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })?
.await?; .await?;
let mut hints = Vec::new(); let mut hints = Vec::new();

View file

@ -10,11 +10,11 @@ mod project_tests;
#[cfg(test)] #[cfg(test)]
mod worktree_tests; mod worktree_tests;
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context as _, Result};
use client2::{proto, Client, Collaborator, TypedEnvelope, UserStore}; use client2::{proto, Client, Collaborator, TypedEnvelope, UserStore};
use clock::ReplicaId; use clock::ReplicaId;
use collections::{hash_map, BTreeMap, HashMap, HashSet}; use collections::{hash_map, BTreeMap, HashMap, HashSet};
use copilot::Copilot; use copilot2::Copilot;
use futures::{ use futures::{
channel::{ channel::{
mpsc::{self, UnboundedReceiver}, mpsc::{self, UnboundedReceiver},
@ -26,7 +26,8 @@ use futures::{
}; };
use globset::{Glob, GlobSet, GlobSetBuilder}; use globset::{Glob, GlobSet, GlobSetBuilder};
use gpui2::{ use gpui2::{
AnyHandle, AppContext, AsyncAppContext, EventEmitter, Handle, ModelContext, Task, WeakHandle, AnyHandle, AppContext, AsyncAppContext, EventEmitter, Executor, Handle, ModelContext, Task,
WeakHandle,
}; };
use itertools::Itertools; use itertools::Itertools;
use language2::{ use language2::{
@ -195,8 +196,8 @@ impl DelayedDebounced {
self.cancel_channel = Some(sender); self.cancel_channel = Some(sender);
let previous_task = self.task.take(); let previous_task = self.task.take();
self.task = Some(cx.spawn(|workspace, mut cx| async move { self.task = Some(cx.executor().spawn(|workspace, mut cx| async move {
let mut timer = cx.background().timer(delay).fuse(); let mut timer = cx.executor().timer(delay).fuse();
if let Some(previous_task) = previous_task { if let Some(previous_task) = previous_task {
previous_task.await; previous_task.await;
} }
@ -206,9 +207,9 @@ impl DelayedDebounced {
_ = timer => {} _ = timer => {}
} }
workspace if let Ok(task) = workspace.update(&mut cx, |workspace, cx| (func)(workspace, cx)) {
.update(&mut cx, |workspace, cx| (func)(workspace, cx)) task.await;
.await; }
})); }));
} }
} }
@ -646,7 +647,7 @@ impl Project {
opened_buffer: watch::channel(), opened_buffer: watch::channel(),
client_subscriptions: Vec::new(), client_subscriptions: Vec::new(),
_subscriptions: vec![ _subscriptions: vec![
cx.observe_global::<SettingsStore, _>(Self::on_settings_changed), cx.observe_global::<SettingsStore>(Self::on_settings_changed),
cx.on_release(Self::release), cx.on_release(Self::release),
cx.on_app_quit(Self::shutdown_language_servers), cx.on_app_quit(Self::shutdown_language_servers),
], ],
@ -673,7 +674,7 @@ impl Project {
}, },
copilot_lsp_subscription, copilot_lsp_subscription,
copilot_log_subscription: None, copilot_log_subscription: None,
current_lsp_settings: settings2::get::<ProjectSettings>(cx).lsp2.clone(), current_lsp_settings: settings2::get::<ProjectSettings>(cx).lsp.clone(),
node: Some(node), node: Some(node),
prettier_instances: HashMap::default(), prettier_instances: HashMap::default(),
} }
@ -696,7 +697,7 @@ impl Project {
project_id: remote_id, project_id: remote_id,
}) })
.await?; .await?;
let this = cx.add_model(|cx| { let this = cx.entity(|cx| {
let replica_id = response.payload.replica_id as ReplicaId; let replica_id = response.payload.replica_id as ReplicaId;
let mut worktrees = Vec::new(); let mut worktrees = Vec::new();
@ -775,7 +776,7 @@ impl Project {
}, },
copilot_lsp_subscription, copilot_lsp_subscription,
copilot_log_subscription: None, copilot_log_subscription: None,
current_lsp_settings: settings2::get::<ProjectSettings>(cx).lsp2.clone(), current_lsp_settings: settings2::get::<ProjectSettings>(cx).lsp.clone(),
node: None, node: None,
prettier_instances: HashMap::default(), prettier_instances: HashMap::default(),
}; };
@ -793,7 +794,7 @@ impl Project {
.map(|peer| peer.user_id) .map(|peer| peer.user_id)
.collect(); .collect();
user_store user_store
.update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx)) .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
.await?; .await?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
@ -914,7 +915,7 @@ impl Project {
let mut language_servers_to_restart = Vec::new(); let mut language_servers_to_restart = Vec::new();
let languages = self.languages.to_vec(); let languages = self.languages.to_vec();
let new_lsp_settings = settings2::get::<ProjectSettings>(cx).lsp2.clone(); let new_lsp_settings = settings2::get::<ProjectSettings>(cx).lsp.clone();
let current_lsp_settings = &self.current_lsp_settings; let current_lsp_settings = &self.current_lsp_settings;
for (worktree_id, started_lsp_name) in self.language_server_ids.keys() { for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
let language = languages.iter().find_map(|l| { let language = languages.iter().find_map(|l| {
@ -1153,7 +1154,7 @@ impl Project {
} else { } else {
let client = self.client.clone(); let client = self.client.clone();
let project_id = self.remote_id().unwrap(); let project_id = self.remote_id().unwrap();
Some(cx.spawn_weak(|_, mut cx| async move { Some(cx.spawn(|_, mut cx| async move {
let response = client let response = client
.request(proto::CreateProjectEntry { .request(proto::CreateProjectEntry {
worktree_id: project_path.worktree_id.to_proto(), worktree_id: project_path.worktree_id.to_proto(),
@ -1197,7 +1198,7 @@ impl Project {
let client = self.client.clone(); let client = self.client.clone();
let project_id = self.remote_id().unwrap(); let project_id = self.remote_id().unwrap();
Some(cx.spawn_weak(|_, mut cx| async move { Some(cx.spawn(|_, mut cx| async move {
let response = client let response = client
.request(proto::CopyProjectEntry { .request(proto::CopyProjectEntry {
project_id, project_id,
@ -1240,7 +1241,7 @@ impl Project {
let client = self.client.clone(); let client = self.client.clone();
let project_id = self.remote_id().unwrap(); let project_id = self.remote_id().unwrap();
Some(cx.spawn_weak(|_, mut cx| async move { Some(cx.spawn(|_, mut cx| async move {
let response = client let response = client
.request(proto::RenameProjectEntry { .request(proto::RenameProjectEntry {
project_id, project_id,
@ -1258,7 +1259,7 @@ impl Project {
response.worktree_scan_id as usize, response.worktree_scan_id as usize,
cx, cx,
) )
}) })?
.await .await
})) }))
} }
@ -1280,7 +1281,7 @@ impl Project {
} else { } else {
let client = self.client.clone(); let client = self.client.clone();
let project_id = self.remote_id().unwrap(); let project_id = self.remote_id().unwrap();
Some(cx.spawn_weak(|_, mut cx| async move { Some(cx.spawn(|_, mut cx| async move {
let response = client let response = client
.request(proto::DeleteProjectEntry { .request(proto::DeleteProjectEntry {
project_id, project_id,
@ -1317,7 +1318,7 @@ impl Project {
project_id: self.remote_id().unwrap(), project_id: self.remote_id().unwrap(),
entry_id: entry_id.to_proto(), entry_id: entry_id.to_proto(),
}); });
Some(cx.spawn_weak(|_, mut cx| async move { Some(cx.spawn(|_, mut cx| async move {
let response = request.await?; let response = request.await?;
if let Some(worktree) = worktree.upgrade() { if let Some(worktree) = worktree.upgrade() {
worktree worktree
@ -1341,7 +1342,7 @@ impl Project {
self.client_subscriptions.push( self.client_subscriptions.push(
self.client self.client
.subscribe_to_entity(project_id)? .subscribe_to_entity(project_id)?
.set_model(&cx.handle(), &mut cx.to_async()), .set_model(&cx.handle().upgrade(), &mut cx.to_async()),
); );
for open_buffer in self.opened_buffers.values_mut() { for open_buffer in self.opened_buffers.values_mut() {
@ -1382,7 +1383,7 @@ impl Project {
let store = cx.global::<SettingsStore>(); let store = cx.global::<SettingsStore>();
for worktree in self.worktrees(cx) { for worktree in self.worktrees(cx) {
let worktree_id = worktree.read(cx).id().to_proto(); let worktree_id = worktree.read(cx).id().to_proto();
for (path, content) in store.local_settings(worktree.id()) { for (path, content) in store.local_settings(worktree.entity_id().as_u64() as usize) {
self.client self.client
.send(proto::UpdateWorktreeSettings { .send(proto::UpdateWorktreeSettings {
project_id, project_id,
@ -1506,7 +1507,7 @@ impl Project {
message_id: u32, message_id: u32,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Result<()> { ) -> Result<()> {
cx.update_global::<SettingsStore, _, _>(|store, cx| { cx.update_global::<SettingsStore, _>(|store, cx| {
for worktree in &self.worktrees { for worktree in &self.worktrees {
store store
.clear_local_settings(worktree.handle_id(), cx) .clear_local_settings(worktree.handle_id(), cx)
@ -2432,7 +2433,7 @@ impl Project {
Duration::from_secs(1); Duration::from_secs(1);
let task = cx.spawn_weak(|this, mut cx| async move { let task = cx.spawn_weak(|this, mut cx| async move {
cx.background().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await; cx.executor().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
if let Some(this) = this.upgrade(&cx) { if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.disk_based_diagnostics_finished( this.disk_based_diagnostics_finished(
@ -2786,7 +2787,7 @@ impl Project {
}; };
let project_settings = settings2::get::<ProjectSettings>(cx); let project_settings = settings2::get::<ProjectSettings>(cx);
let lsp = project_settings.lsp2.get(&adapter.name.0); let lsp = project_settings.lsp.get(&adapter.name.0);
let override_options = lsp.map(|s| s.initialization_options.clone()).flatten(); let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
let mut initialization_options = adapter.initialization_options.clone(); let mut initialization_options = adapter.initialization_options.clone();
@ -3429,7 +3430,7 @@ impl Project {
}); });
const PROCESS_TIMEOUT: Duration = Duration::from_secs(5); const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
let mut timeout = cx.background().timer(PROCESS_TIMEOUT).fuse(); let mut timeout = cx.executor().timer(PROCESS_TIMEOUT).fuse();
let mut errored = false; let mut errored = false;
if let Some(mut process) = process { if let Some(mut process) = process {
@ -4013,7 +4014,7 @@ impl Project {
project_id, project_id,
buffer_ids: remote_buffers buffer_ids: remote_buffers
.iter() .iter()
.map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id())) .map(|buffer| buffer.update(&mut cx, |buffer, _| buffer.remote_id()))
.collect(), .collect(),
}) })
.await? .await?
@ -4022,13 +4023,13 @@ impl Project {
project_transaction = this project_transaction = this
.update(&mut cx, |this, cx| { .update(&mut cx, |this, cx| {
this.deserialize_project_transaction(response, push_to_history, cx) this.deserialize_project_transaction(response, push_to_history, cx)
}) })?
.await?; .await?;
} }
for buffer in local_buffers { for buffer in local_buffers {
let transaction = buffer let transaction = buffer
.update(&mut cx, |buffer, cx| buffer.reload(cx)) .update(&mut cx, |buffer, cx| buffer.reload(cx))?
.await?; .await?;
buffer.update(&mut cx, |buffer, cx| { buffer.update(&mut cx, |buffer, cx| {
if let Some(transaction) = transaction { if let Some(transaction) = transaction {
@ -4091,9 +4092,9 @@ impl Project {
let mut project_transaction = ProjectTransaction::default(); let mut project_transaction = ProjectTransaction::default();
for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers { for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
let settings = buffer.read_with(&cx, |buffer, cx| { let settings = buffer.update(&mut cx, |buffer, cx| {
language_settings(buffer.language(), buffer.file(), cx).clone() language_settings(buffer.language(), buffer.file(), cx).clone()
}); })?;
let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save; let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
let ensure_final_newline = settings.ensure_final_newline_on_save; let ensure_final_newline = settings.ensure_final_newline_on_save;
@ -4105,7 +4106,7 @@ impl Project {
let trailing_whitespace_diff = if remove_trailing_whitespace { let trailing_whitespace_diff = if remove_trailing_whitespace {
Some( Some(
buffer buffer
.read_with(&cx, |b, cx| b.remove_trailing_whitespace(cx)) .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))?
.await, .await,
) )
} else { } else {
@ -4182,13 +4183,13 @@ impl Project {
if let Some(prettier_task) = this if let Some(prettier_task) = this
.update(&mut cx, |project, cx| { .update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(buffer, cx) project.prettier_instance_for_buffer(buffer, cx)
}).await { })?.await {
match prettier_task.await match prettier_task.await
{ {
Ok(prettier) => { Ok(prettier) => {
let buffer_path = buffer.read_with(&cx, |buffer, cx| { let buffer_path = buffer.update(&mut cx, |buffer, cx| {
File::from_dyn(buffer.file()).map(|file| file.abs_path(cx)) File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
}); })?;
format_operation = Some(FormatOperation::Prettier( format_operation = Some(FormatOperation::Prettier(
prettier prettier
.format(buffer, buffer_path, &cx) .format(buffer, buffer_path, &cx)
@ -4225,7 +4226,7 @@ impl Project {
match prettier_task.await match prettier_task.await
{ {
Ok(prettier) => { Ok(prettier) => {
let buffer_path = buffer.read_with(&cx, |buffer, cx| { let buffer_path = buffer.update(&mut cx, |buffer, cx| {
File::from_dyn(buffer.file()).map(|file| file.abs_path(cx)) File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
}); });
format_operation = Some(FormatOperation::Prettier( format_operation = Some(FormatOperation::Prettier(
@ -4298,8 +4299,10 @@ impl Project {
trigger: trigger as i32, trigger: trigger as i32,
buffer_ids: buffers buffer_ids: buffers
.iter() .iter()
.map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id())) .map(|buffer| {
.collect(), buffer.update(&mut cx, |buffer, _| buffer.remote_id())
})
.collect::<Result<_>>()?,
}) })
.await? .await?
.transaction .transaction
@ -4307,7 +4310,7 @@ impl Project {
project_transaction = this project_transaction = this
.update(&mut cx, |this, cx| { .update(&mut cx, |this, cx| {
this.deserialize_project_transaction(response, push_to_history, cx) this.deserialize_project_transaction(response, push_to_history, cx)
}) })?
.await?; .await?;
} }
Ok(project_transaction) Ok(project_transaction)
@ -4316,7 +4319,7 @@ impl Project {
} }
async fn format_via_lsp( async fn format_via_lsp(
this: &Handle<Self>, this: &WeakHandle<Self>,
buffer: &Handle<Buffer>, buffer: &Handle<Buffer>,
abs_path: &Path, abs_path: &Path,
language_server: &Arc<LanguageServer>, language_server: &Arc<LanguageServer>,
@ -4341,7 +4344,7 @@ impl Project {
.await? .await?
} else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) { } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
let buffer_start = lsp2::Position::new(0, 0); let buffer_start = lsp2::Position::new(0, 0);
let buffer_end = buffer.read_with(cx, |b, _| point_to_lsp(b.max_point_utf16())); let buffer_end = buffer.update(&mut cx, |b, _| point_to_lsp(b.max_point_utf16()))?;
language_server language_server
.request::<lsp2::request::RangeFormatting>(lsp2::DocumentRangeFormattingParams { .request::<lsp2::request::RangeFormatting>(lsp2::DocumentRangeFormattingParams {
@ -4358,7 +4361,7 @@ impl Project {
if let Some(lsp_edits) = lsp_edits { if let Some(lsp_edits) = lsp_edits {
this.update(cx, |this, cx| { this.update(cx, |this, cx| {
this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx) this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
}) })?
.await .await
} else { } else {
Ok(Vec::new()) Ok(Vec::new())
@ -4372,7 +4375,7 @@ impl Project {
arguments: &[String], arguments: &[String],
cx: &mut AsyncAppContext, cx: &mut AsyncAppContext,
) -> Result<Option<Diff>> { ) -> Result<Option<Diff>> {
let working_dir_path = buffer.read_with(cx, |buffer, cx| { let working_dir_path = buffer.update(&mut cx, |buffer, cx| {
let file = File::from_dyn(buffer.file())?; let file = File::from_dyn(buffer.file())?;
let worktree = file.worktree.read(cx).as_local()?; let worktree = file.worktree.read(cx).as_local()?;
let mut worktree_path = worktree.abs_path().to_path_buf(); let mut worktree_path = worktree.abs_path().to_path_buf();
@ -4397,7 +4400,7 @@ impl Project {
.stdin .stdin
.as_mut() .as_mut()
.ok_or_else(|| anyhow!("failed to acquire stdin"))?; .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
let text = buffer.read_with(cx, |buffer, _| buffer.as_rope().clone()); let text = buffer.update(&mut cx, |buffer, _| buffer.as_rope().clone());
for chunk in text.chunks() { for chunk in text.chunks() {
stdin.write_all(chunk.as_bytes()).await?; stdin.write_all(chunk.as_bytes()).await?;
} }
@ -4797,7 +4800,7 @@ impl Project {
.unwrap_or(false); .unwrap_or(false);
let additional_text_edits = if can_resolve { let additional_text_edits = if can_resolve {
lang_server lang_server
.request::<lsp2::request::ResolveCompletionItem>(completion.lsp2_completion) .request::<lsp2::request::ResolveCompletionItem>(completion.lsp_completion)
.await? .await?
.additional_text_edits .additional_text_edits
} else { } else {
@ -4925,8 +4928,8 @@ impl Project {
.and_then(|d| d.get_mut("range")) .and_then(|d| d.get_mut("range"))
{ {
*lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap(); *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
action.lsp2_action = lang_server action.lsp_action = lang_server
.request::<lsp2::request::CodeActionResolveRequest>(action.lsp2_action) .request::<lsp2::request::CodeActionResolveRequest>(action.lsp_action)
.await?; .await?;
} else { } else {
let actions = this let actions = this
@ -5537,7 +5540,7 @@ impl Project {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let background = cx.background().clone(); let background = cx.executor().clone();
let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum(); let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
if path_count == 0 { if path_count == 0 {
let (_, rx) = smol::channel::bounded(1024); let (_, rx) = smol::channel::bounded(1024);
@ -5560,11 +5563,11 @@ impl Project {
} }
}) })
.collect(); .collect();
cx.background() cx.executor()
.spawn(Self::background_search( .spawn(Self::background_search(
unnamed_files, unnamed_files,
opened_buffers, opened_buffers,
cx.background().clone(), cx.executor().clone(),
self.fs.clone(), self.fs.clone(),
workers, workers,
query.clone(), query.clone(),
@ -5575,9 +5578,9 @@ impl Project {
.detach(); .detach();
let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx); let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
let background = cx.background().clone(); let background = cx.executor().clone();
let (result_tx, result_rx) = smol::channel::bounded(1024); let (result_tx, result_rx) = smol::channel::bounded(1024);
cx.background() cx.executor()
.spawn(async move { .spawn(async move {
let Ok(buffers) = buffers.await else { let Ok(buffers) = buffers.await else {
return; return;
@ -5685,7 +5688,7 @@ impl Project {
async fn background_search( async fn background_search(
unnamed_buffers: Vec<Handle<Buffer>>, unnamed_buffers: Vec<Handle<Buffer>>,
opened_buffers: HashMap<Arc<Path>, (Handle<Buffer>, BufferSnapshot)>, opened_buffers: HashMap<Arc<Path>, (Handle<Buffer>, BufferSnapshot)>,
executor: Arc<Background>, executor: Executor,
fs: Arc<dyn Fs>, fs: Arc<dyn Fs>,
workers: usize, workers: usize,
query: SearchQuery, query: SearchQuery,
@ -6459,7 +6462,7 @@ impl Project {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
cx.background() cx.executor()
.spawn(async move { .spawn(async move {
for task_result in future::join_all(prettiers_to_reload.into_iter().map(|(worktree_id, prettier_path, prettier_task)| { for task_result in future::join_all(prettiers_to_reload.into_iter().map(|(worktree_id, prettier_path, prettier_task)| {
async move { async move {
@ -6599,7 +6602,7 @@ impl Project {
this.disconnected_from_host(cx); this.disconnected_from_host(cx);
} }
Ok(()) Ok(())
}) })?
} }
async fn handle_add_collaborator( async fn handle_add_collaborator(
@ -6673,7 +6676,7 @@ impl Project {
}); });
cx.notify(); cx.notify();
Ok(()) Ok(())
}) })?
} }
async fn handle_remove_collaborator( async fn handle_remove_collaborator(
@ -6702,7 +6705,7 @@ impl Project {
cx.emit(Event::CollaboratorLeft(peer_id)); cx.emit(Event::CollaboratorLeft(peer_id));
cx.notify(); cx.notify();
Ok(()) Ok(())
}) })?
} }
async fn handle_update_project( async fn handle_update_project(
@ -6717,7 +6720,7 @@ impl Project {
this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?; this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
} }
Ok(()) Ok(())
}) })?
} }
async fn handle_update_worktree( async fn handle_update_worktree(
@ -6735,7 +6738,7 @@ impl Project {
}); });
} }
Ok(()) Ok(())
}) })?
} }
async fn handle_update_worktree_settings( async fn handle_update_worktree_settings(
@ -6759,7 +6762,7 @@ impl Project {
}); });
} }
Ok(()) Ok(())
}) })?
} }
async fn handle_create_project_entry( async fn handle_create_project_entry(
@ -6773,13 +6776,13 @@ impl Project {
this.worktree_for_id(worktree_id, cx) this.worktree_for_id(worktree_id, cx)
.ok_or_else(|| anyhow!("worktree not found")) .ok_or_else(|| anyhow!("worktree not found"))
})?; })?;
let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id()); let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id());
let entry = worktree let entry = worktree
.update(&mut cx, |worktree, cx| { .update(&mut cx, |worktree, cx| {
let worktree = worktree.as_local_mut().unwrap(); let worktree = worktree.as_local_mut().unwrap();
let path = PathBuf::from(envelope.payload.path); let path = PathBuf::from(envelope.payload.path);
worktree.create_entry(path, envelope.payload.is_directory, cx) worktree.create_entry(path, envelope.payload.is_directory, cx)
}) })?
.await?; .await?;
Ok(proto::ProjectEntryResponse { Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()), entry: Some((&entry).into()),
@ -6794,11 +6797,11 @@ impl Project {
mut cx: AsyncAppContext, mut cx: AsyncAppContext,
) -> Result<proto::ProjectEntryResponse> { ) -> Result<proto::ProjectEntryResponse> {
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
let worktree = this.read_with(&cx, |this, cx| { let worktree = this.update(&mut cx, |this, cx| {
this.worktree_for_entry(entry_id, cx) this.worktree_for_entry(entry_id, cx)
.ok_or_else(|| anyhow!("worktree not found")) .ok_or_else(|| anyhow!("worktree not found"))
})?; })??;
let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id()); let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
let entry = worktree let entry = worktree
.update(&mut cx, |worktree, cx| { .update(&mut cx, |worktree, cx| {
let new_path = PathBuf::from(envelope.payload.new_path); let new_path = PathBuf::from(envelope.payload.new_path);
@ -6807,7 +6810,7 @@ impl Project {
.unwrap() .unwrap()
.rename_entry(entry_id, new_path, cx) .rename_entry(entry_id, new_path, cx)
.ok_or_else(|| anyhow!("invalid entry")) .ok_or_else(|| anyhow!("invalid entry"))
})? })??
.await?; .await?;
Ok(proto::ProjectEntryResponse { Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()), entry: Some((&entry).into()),
@ -6822,11 +6825,11 @@ impl Project {
mut cx: AsyncAppContext, mut cx: AsyncAppContext,
) -> Result<proto::ProjectEntryResponse> { ) -> Result<proto::ProjectEntryResponse> {
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
let worktree = this.read_with(&cx, |this, cx| { let worktree = this.update(&mut cx, |this, cx| {
this.worktree_for_entry(entry_id, cx) this.worktree_for_entry(entry_id, cx)
.ok_or_else(|| anyhow!("worktree not found")) .ok_or_else(|| anyhow!("worktree not found"))
})?; })??;
let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id()); let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
let entry = worktree let entry = worktree
.update(&mut cx, |worktree, cx| { .update(&mut cx, |worktree, cx| {
let new_path = PathBuf::from(envelope.payload.new_path); let new_path = PathBuf::from(envelope.payload.new_path);
@ -6835,7 +6838,7 @@ impl Project {
.unwrap() .unwrap()
.copy_entry(entry_id, new_path, cx) .copy_entry(entry_id, new_path, cx)
.ok_or_else(|| anyhow!("invalid entry")) .ok_or_else(|| anyhow!("invalid entry"))
})? })??
.await?; .await?;
Ok(proto::ProjectEntryResponse { Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()), entry: Some((&entry).into()),
@ -6853,10 +6856,10 @@ impl Project {
this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id))); this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)));
let worktree = this.read_with(&cx, |this, cx| { let worktree = this.update(&mut cx, |this, cx| {
this.worktree_for_entry(entry_id, cx) this.worktree_for_entry(entry_id, cx)
.ok_or_else(|| anyhow!("worktree not found")) .ok_or_else(|| anyhow!("worktree not found"))
})?; })??;
let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id()); let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
worktree worktree
.update(&mut cx, |worktree, cx| { .update(&mut cx, |worktree, cx| {
@ -6881,7 +6884,7 @@ impl Project {
) -> Result<proto::ExpandProjectEntryResponse> { ) -> Result<proto::ExpandProjectEntryResponse> {
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
let worktree = this let worktree = this
.read_with(&cx, |this, cx| this.worktree_for_entry(entry_id, cx)) .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
.ok_or_else(|| anyhow!("invalid request"))?; .ok_or_else(|| anyhow!("invalid request"))?;
worktree worktree
.update(&mut cx, |worktree, cx| { .update(&mut cx, |worktree, cx| {
@ -6890,7 +6893,7 @@ impl Project {
.unwrap() .unwrap()
.expand_entry(entry_id, cx) .expand_entry(entry_id, cx)
.ok_or_else(|| anyhow!("invalid entry")) .ok_or_else(|| anyhow!("invalid entry"))
})? })??
.await?; .await?;
let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id()) as u64; let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id()) as u64;
Ok(proto::ExpandProjectEntryResponse { worktree_scan_id }) Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
@ -6923,7 +6926,7 @@ impl Project {
} }
} }
Ok(()) Ok(())
}) })?
} }
async fn handle_start_language_server( async fn handle_start_language_server(
@ -7005,7 +7008,7 @@ impl Project {
} }
Ok(()) Ok(())
}) })?
} }
async fn handle_update_buffer( async fn handle_update_buffer(
@ -7041,7 +7044,7 @@ impl Project {
} }
} }
Ok(proto::Ack {}) Ok(proto::Ack {})
}) })?
} }
async fn handle_create_buffer_for_peer( async fn handle_create_buffer_for_peer(
@ -7101,7 +7104,7 @@ impl Project {
} }
Ok(()) Ok(())
}) })?
} }
async fn handle_update_diff_base( async fn handle_update_diff_base(
@ -7127,7 +7130,7 @@ impl Project {
buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx)); buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
} }
Ok(()) Ok(())
}) })?
} }
async fn handle_update_buffer_file( async fn handle_update_buffer_file(
@ -7162,7 +7165,7 @@ impl Project {
this.detect_language_for_buffer(&buffer, cx); this.detect_language_for_buffer(&buffer, cx);
} }
Ok(()) Ok(())
}) })?
} }
async fn handle_save_buffer( async fn handle_save_buffer(
@ -7180,17 +7183,17 @@ impl Project {
.and_then(|buffer| buffer.upgrade()) .and_then(|buffer| buffer.upgrade())
.ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?; .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
anyhow::Ok((project_id, buffer)) anyhow::Ok((project_id, buffer))
})?; })??;
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&envelope.payload.version)) buffer.wait_for_version(deserialize_version(&envelope.payload.version))
}) })?
.await?; .await?;
let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id()); let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx)) this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
.await?; .await?;
Ok(buffer.read_with(&cx, |buffer, _| proto::BufferSaved { Ok(buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
project_id, project_id,
buffer_id, buffer_id,
version: serialize_version(buffer.saved_version()), version: serialize_version(buffer.saved_version()),
@ -7198,7 +7201,7 @@ impl Project {
fingerprint: language2::proto::serialize_fingerprint( fingerprint: language2::proto::serialize_fingerprint(
buffer.saved_version_fingerprint(), buffer.saved_version_fingerprint(),
), ),
})) })?)
} }
async fn handle_reload_buffers( async fn handle_reload_buffers(
@ -7219,12 +7222,12 @@ impl Project {
); );
} }
Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx)) Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
})?; })??;
let project_transaction = reload.await?; let project_transaction = reload.await?;
let project_transaction = this.update(&mut cx, |this, cx| { let project_transaction = this.update(&mut cx, |this, cx| {
this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx) this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
}); })?;
Ok(proto::ReloadBuffersResponse { Ok(proto::ReloadBuffersResponse {
transaction: Some(project_transaction), transaction: Some(project_transaction),
}) })
@ -7298,7 +7301,7 @@ impl Project {
}) })
.log_err(); .log_err();
cx.background() cx.executor()
.spawn( .spawn(
async move { async move {
let operations = operations.await; let operations = operations.await;
@ -7917,7 +7920,7 @@ impl Project {
// Any incomplete buffers have open requests waiting. Request that the host sends // Any incomplete buffers have open requests waiting. Request that the host sends
// creates these buffers for us again to unblock any waiting futures. // creates these buffers for us again to unblock any waiting futures.
for id in incomplete_buffer_ids { for id in incomplete_buffer_ids {
cx.background() cx.executor()
.spawn(client.request(proto::OpenBufferById { project_id, id })) .spawn(client.request(proto::OpenBufferById { project_id, id }))
.detach(); .detach();
} }
@ -8082,7 +8085,7 @@ impl Project {
}); });
} }
Ok(()) Ok(())
}) })?
} }
async fn handle_buffer_reloaded( async fn handle_buffer_reloaded(
@ -8116,7 +8119,7 @@ impl Project {
if let Some(buffer) = buffer { if let Some(buffer) = buffer {
buffer.update(cx, |buffer, cx| { buffer.update(cx, |buffer, cx| {
buffer.did_reload(version, fingerprint, line_ending, mtime, cx); buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
}); })?;
} }
Ok(()) Ok(())
}) })
@ -8132,7 +8135,7 @@ impl Project {
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<(Range<Anchor>, String)>>> { ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx); let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
cx.background().spawn(async move { cx.executor().spawn(async move {
let snapshot = snapshot?; let snapshot = snapshot?;
let mut lsp_edits = lsp_edits let mut lsp_edits = lsp_edits
.into_iter() .into_iter()
@ -8375,9 +8378,9 @@ impl Project {
return Task::ready(None); return Task::ready(None);
}; };
cx.spawn(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
let fs = this.update(&mut cx, |project, _| Arc::clone(&project.fs)); let fs = this.update(&mut cx, |project, _| Arc::clone(&project.fs))?;
let prettier_dir = match cx let prettier_dir = match cx
.background() .executor()
.spawn(Prettier::locate( .spawn(Prettier::locate(
worktree_path.zip(buffer_path).map( worktree_path.zip(buffer_path).map(
|(worktree_root_path, starting_path)| LocateStart { |(worktree_root_path, starting_path)| LocateStart {
@ -8535,7 +8538,7 @@ impl Project {
.cloned(); .cloned();
let fs = Arc::clone(&self.fs); let fs = Arc::clone(&self.fs);
cx.background() cx.executor()
.spawn(async move { .spawn(async move {
let prettier_wrapper_path = default_prettier_dir.join(PRETTIER_SERVER_FILE); let prettier_wrapper_path = default_prettier_dir.join(PRETTIER_SERVER_FILE);
// method creates parent directory if it doesn't exist // method creates parent directory if it doesn't exist
@ -8583,15 +8586,15 @@ fn subscribe_for_copilot_events(
cx.subscribe( cx.subscribe(
copilot, copilot,
|project, copilot, copilot_event, cx| match copilot_event { |project, copilot, copilot_event, cx| match copilot_event {
copilot::Event::CopilotLanguageServerStarted => { copilot2::Event::CopilotLanguageServerStarted => {
match copilot.read(cx).language_server() { match copilot.read(cx).language_server() {
Some((name, copilot_server)) => { Some((name, copilot_server)) => {
// Another event wants to re-add the server that was already added and subscribed to, avoid doing it again. // Another event wants to re-add the server that was already added and subscribed to, avoid doing it again.
if !copilot_server.has_notification_handler::<copilot::request::LogMessage>() { if !copilot_server.has_notification_handler::<copilot2::request::LogMessage>() {
let new_server_id = copilot_server.server_id(); let new_server_id = copilot_server.server_id();
let weak_project = cx.weak_handle(); let weak_project = cx.weak_handle();
let copilot_log_subscription = copilot_server let copilot_log_subscription = copilot_server
.on_notification::<copilot::request::LogMessage, _>( .on_notification::<copilot2::request::LogMessage, _>(
move |params, mut cx| { move |params, mut cx| {
if let Some(project) = weak_project.upgrade(&mut cx) { if let Some(project) = weak_project.upgrade(&mut cx) {
project.update(&mut cx, |_, cx| { project.update(&mut cx, |_, cx| {

View file

@ -2,7 +2,7 @@ use crate::{
copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions, copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions,
}; };
use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context as _, Result};
use client2::{proto, Client}; use client2::{proto, Client};
use clock::ReplicaId; use clock::ReplicaId;
use collections::{HashMap, HashSet, VecDeque}; use collections::{HashMap, HashSet, VecDeque};
@ -21,7 +21,9 @@ use futures::{
}; };
use fuzzy2::CharBag; use fuzzy2::CharBag;
use git::{DOT_GIT, GITIGNORE}; use git::{DOT_GIT, GITIGNORE};
use gpui2::{AppContext, AsyncAppContext, EventEmitter, Executor, Handle, ModelContext, Task}; use gpui2::{
AppContext, AsyncAppContext, Context, EventEmitter, Executor, Handle, ModelContext, Task,
};
use language2::{ use language2::{
proto::{ proto::{
deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending, deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
@ -299,7 +301,7 @@ impl Worktree {
.await .await
.context("failed to stat worktree path")?; .context("failed to stat worktree path")?;
Ok(cx.add_model(move |cx: &mut ModelContext<Worktree>| { cx.entity(move |cx: &mut ModelContext<Worktree>| {
let root_name = abs_path let root_name = abs_path
.file_name() .file_name()
.map_or(String::new(), |f| f.to_string_lossy().to_string()); .map_or(String::new(), |f| f.to_string_lossy().to_string());
@ -308,7 +310,7 @@ impl Worktree {
ignores_by_parent_abs_path: Default::default(), ignores_by_parent_abs_path: Default::default(),
git_repositories: Default::default(), git_repositories: Default::default(),
snapshot: Snapshot { snapshot: Snapshot {
id: WorktreeId::from_usize(cx.model_id()), id: WorktreeId::from_usize(cx.entity_id()),
abs_path: abs_path.clone(), abs_path: abs_path.clone(),
root_name: root_name.clone(), root_name: root_name.clone(),
root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(), root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
@ -336,8 +338,8 @@ impl Worktree {
let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded(); let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
cx.spawn_weak(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) { while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
let this = this.as_local_mut().unwrap(); let this = this.as_local_mut().unwrap();
match state { match state {
@ -361,10 +363,10 @@ impl Worktree {
}) })
.detach(); .detach();
let background_scanner_task = cx.background().spawn({ let background_scanner_task = cx.executor().spawn({
let fs = fs.clone(); let fs = fs.clone();
let snapshot = snapshot.clone(); let snapshot = snapshot.clone();
let background = cx.background().clone(); let background = cx.executor().clone();
async move { async move {
let events = fs.watch(&abs_path, Duration::from_millis(100)).await; let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
BackgroundScanner::new( BackgroundScanner::new(
@ -394,10 +396,9 @@ impl Worktree {
fs, fs,
visible, visible,
}) })
})) })
} }
// abcdefghi
pub fn remote( pub fn remote(
project_remote_id: u64, project_remote_id: u64,
replica_id: ReplicaId, replica_id: ReplicaId,
@ -426,7 +427,7 @@ impl Worktree {
let background_snapshot = Arc::new(Mutex::new(snapshot.clone())); let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel(); let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
cx.background() cx.executor()
.spawn({ .spawn({
let background_snapshot = background_snapshot.clone(); let background_snapshot = background_snapshot.clone();
async move { async move {
@ -442,27 +443,24 @@ impl Worktree {
}) })
.detach(); .detach();
cx.spawn_weak(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
while (snapshot_updated_rx.recv().await).is_some() { while (snapshot_updated_rx.recv().await).is_some() {
if let Some(this) = this.upgrade(&cx) { this.update(&mut cx, |this, cx| {
this.update(&mut cx, |this, cx| { let this = this.as_remote_mut().unwrap();
let this = this.as_remote_mut().unwrap(); this.snapshot = this.background_snapshot.lock().clone();
this.snapshot = this.background_snapshot.lock().clone(); cx.emit(Event::UpdatedEntries(Arc::from([])));
cx.emit(Event::UpdatedEntries(Arc::from([]))); cx.notify();
cx.notify(); while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
while let Some((scan_id, _)) = this.snapshot_subscriptions.front() { if this.observed_snapshot(*scan_id) {
if this.observed_snapshot(*scan_id) { let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap(); let _ = tx.send(());
let _ = tx.send(()); } else {
} else { break;
break;
}
} }
}); }
} else { })?;
break;
}
} }
anyhow::Ok(())
}) })
.detach(); .detach();
@ -598,13 +596,13 @@ impl LocalWorktree {
let path = Arc::from(path); let path = Arc::from(path);
cx.spawn(move |this, mut cx| async move { cx.spawn(move |this, mut cx| async move {
let (file, contents, diff_base) = this let (file, contents, diff_base) = this
.update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx)) .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))?
.await?; .await?;
let text_buffer = cx let text_buffer = cx
.background() .executor()
.spawn(async move { text::Buffer::new(0, id, contents) }) .spawn(async move { text::Buffer::new(0, id, contents) })
.await; .await;
Ok(cx.add_model(|_| Buffer::build(text_buffer, diff_base, Some(Arc::new(file))))) cx.entity(|_| Buffer::build(text_buffer, diff_base, Some(Arc::new(file))))
}) })
} }
@ -878,18 +876,18 @@ impl LocalWorktree {
let fs = self.fs.clone(); let fs = self.fs.clone();
let entry = self.refresh_entry(path.clone(), None, cx); let entry = self.refresh_entry(path.clone(), None, cx);
cx.spawn(|this, cx| async move { cx.spawn(|this, mut cx| async move {
let text = fs.load(&abs_path).await?; let text = fs.load(&abs_path).await?;
let entry = entry.await?; let entry = entry.await?;
let mut index_task = None; let mut index_task = None;
let snapshot = this.read_with(&cx, |this, _| this.as_local().unwrap().snapshot()); let snapshot = this.update(&mut cx, |this, _| this.as_local().unwrap().snapshot())?;
if let Some(repo) = snapshot.repository_for_path(&path) { if let Some(repo) = snapshot.repository_for_path(&path) {
let repo_path = repo.work_directory.relativize(&snapshot, &path).unwrap(); let repo_path = repo.work_directory.relativize(&snapshot, &path).unwrap();
if let Some(repo) = snapshot.git_repositories.get(&*repo.work_directory) { if let Some(repo) = snapshot.git_repositories.get(&*repo.work_directory) {
let repo = repo.repo_ptr.clone(); let repo = repo.repo_ptr.clone();
index_task = Some( index_task = Some(
cx.background() cx.executor()
.spawn(async move { repo.lock().load_index_text(&repo_path) }), .spawn(async move { repo.lock().load_index_text(&repo_path) }),
); );
} }
@ -901,10 +899,13 @@ impl LocalWorktree {
None None
}; };
let worktree = this
.upgrade()
.ok_or_else(|| anyhow!("worktree was dropped"))?;
Ok(( Ok((
File { File {
entry_id: entry.id, entry_id: entry.id,
worktree: this, worktree,
path: entry.path, path: entry.path,
mtime: entry.mtime, mtime: entry.mtime,
is_local: true, is_local: true,
@ -923,7 +924,6 @@ impl LocalWorktree {
has_changed_file: bool, has_changed_file: bool,
cx: &mut ModelContext<Worktree>, cx: &mut ModelContext<Worktree>,
) -> Task<Result<()>> { ) -> Task<Result<()>> {
let handle = cx.handle();
let buffer = buffer_handle.read(cx); let buffer = buffer_handle.read(cx);
let rpc = self.client.clone(); let rpc = self.client.clone();
@ -935,13 +935,14 @@ impl LocalWorktree {
let version = buffer.version(); let version = buffer.version();
let save = self.write_file(path, text, buffer.line_ending(), cx); let save = self.write_file(path, text, buffer.line_ending(), cx);
cx.as_mut().spawn(|mut cx| async move { cx.spawn(|this, mut cx| async move {
let entry = save.await?; let entry = save.await?;
let this = this.upgrade().context("worktree dropped")?;
if has_changed_file { if has_changed_file {
let new_file = Arc::new(File { let new_file = Arc::new(File {
entry_id: entry.id, entry_id: entry.id,
worktree: handle, worktree: this,
path: entry.path, path: entry.path,
mtime: entry.mtime, mtime: entry.mtime,
is_local: true, is_local: true,
@ -1005,7 +1006,7 @@ impl LocalWorktree {
let lowest_ancestor = self.lowest_ancestor(&path); let lowest_ancestor = self.lowest_ancestor(&path);
let abs_path = self.absolutize(&path); let abs_path = self.absolutize(&path);
let fs = self.fs.clone(); let fs = self.fs.clone();
let write = cx.background().spawn(async move { let write = cx.executor().spawn(async move {
if is_dir { if is_dir {
fs.create_dir(&abs_path).await fs.create_dir(&abs_path).await
} else { } else {
@ -1035,7 +1036,7 @@ impl LocalWorktree {
this.as_local_mut().unwrap().refresh_entry(path, None, cx), this.as_local_mut().unwrap().refresh_entry(path, None, cx),
refreshes, refreshes,
) )
}); })?;
for refresh in refreshes { for refresh in refreshes {
refresh.await.log_err(); refresh.await.log_err();
} }
@ -1055,14 +1056,14 @@ impl LocalWorktree {
let abs_path = self.absolutize(&path); let abs_path = self.absolutize(&path);
let fs = self.fs.clone(); let fs = self.fs.clone();
let write = cx let write = cx
.background() .executor()
.spawn(async move { fs.save(&abs_path, &text, line_ending).await }); .spawn(async move { fs.save(&abs_path, &text, line_ending).await });
cx.spawn(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
write.await?; write.await?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.as_local_mut().unwrap().refresh_entry(path, None, cx) this.as_local_mut().unwrap().refresh_entry(path, None, cx)
}) })?
.await .await
}) })
} }
@ -1076,7 +1077,7 @@ impl LocalWorktree {
let abs_path = self.absolutize(&entry.path); let abs_path = self.absolutize(&entry.path);
let fs = self.fs.clone(); let fs = self.fs.clone();
let delete = cx.background().spawn(async move { let delete = cx.executor().spawn(async move {
if entry.is_file() { if entry.is_file() {
fs.remove_file(&abs_path, Default::default()).await?; fs.remove_file(&abs_path, Default::default()).await?;
} else { } else {
@ -1098,7 +1099,7 @@ impl LocalWorktree {
this.as_local_mut() this.as_local_mut()
.unwrap() .unwrap()
.refresh_entries_for_paths(vec![path]) .refresh_entries_for_paths(vec![path])
}) })?
.recv() .recv()
.await; .await;
Ok(()) Ok(())
@ -1116,7 +1117,7 @@ impl LocalWorktree {
let abs_old_path = self.absolutize(&old_path); let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path); let abs_new_path = self.absolutize(&new_path);
let fs = self.fs.clone(); let fs = self.fs.clone();
let rename = cx.background().spawn(async move { let rename = cx.executor().spawn(async move {
fs.rename(&abs_old_path, &abs_new_path, Default::default()) fs.rename(&abs_old_path, &abs_new_path, Default::default())
.await .await
}); });
@ -1127,7 +1128,7 @@ impl LocalWorktree {
this.as_local_mut() this.as_local_mut()
.unwrap() .unwrap()
.refresh_entry(new_path.clone(), Some(old_path), cx) .refresh_entry(new_path.clone(), Some(old_path), cx)
}) })?
.await .await
})) }))
} }
@ -1143,7 +1144,7 @@ impl LocalWorktree {
let abs_old_path = self.absolutize(&old_path); let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path); let abs_new_path = self.absolutize(&new_path);
let fs = self.fs.clone(); let fs = self.fs.clone();
let copy = cx.background().spawn(async move { let copy = cx.executor().spawn(async move {
copy_recursive( copy_recursive(
fs.as_ref(), fs.as_ref(),
&abs_old_path, &abs_old_path,
@ -1159,7 +1160,7 @@ impl LocalWorktree {
this.as_local_mut() this.as_local_mut()
.unwrap() .unwrap()
.refresh_entry(new_path.clone(), None, cx) .refresh_entry(new_path.clone(), None, cx)
}) })?
.await .await
})) }))
} }
@ -1171,7 +1172,7 @@ impl LocalWorktree {
) -> Option<Task<Result<()>>> { ) -> Option<Task<Result<()>>> {
let path = self.entry_for_id(entry_id)?.path.clone(); let path = self.entry_for_id(entry_id)?.path.clone();
let mut refresh = self.refresh_entries_for_paths(vec![path]); let mut refresh = self.refresh_entries_for_paths(vec![path]);
Some(cx.background().spawn(async move { Some(cx.executor().spawn(async move {
refresh.next().await; refresh.next().await;
Ok(()) Ok(())
})) }))
@ -1204,15 +1205,13 @@ impl LocalWorktree {
vec![path.clone()] vec![path.clone()]
}; };
let mut refresh = self.refresh_entries_for_paths(paths); let mut refresh = self.refresh_entries_for_paths(paths);
cx.spawn_weak(move |this, mut cx| async move { cx.spawn(move |this, mut cx| async move {
refresh.recv().await; refresh.recv().await;
this.upgrade(&cx) this.update(&mut cx, |this, _| {
.ok_or_else(|| anyhow!("worktree was dropped"))? this.entry_for_path(path)
.update(&mut cx, |this, _| { .cloned()
this.entry_for_path(path) .ok_or_else(|| anyhow!("failed to read path after update"))
.cloned() })?
.ok_or_else(|| anyhow!("failed to read path after update"))
})
}) })
} }
@ -1246,8 +1245,8 @@ impl LocalWorktree {
.unbounded_send((self.snapshot(), Arc::from([]), Arc::from([]))) .unbounded_send((self.snapshot(), Arc::from([]), Arc::from([])))
.ok(); .ok();
let worktree_id = cx.model_id() as u64; let worktree_id = cx.entity_id().as_u64();
let _maintain_remote_snapshot = cx.background().spawn(async move { let _maintain_remote_snapshot = cx.executor().spawn(async move {
let mut is_first = true; let mut is_first = true;
while let Some((snapshot, entry_changes, repo_changes)) = snapshots_rx.next().await { while let Some((snapshot, entry_changes, repo_changes)) = snapshots_rx.next().await {
let update; let update;
@ -1294,7 +1293,7 @@ impl LocalWorktree {
for (&server_id, summary) in summaries { for (&server_id, summary) in summaries {
if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary { if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
project_id, project_id,
worktree_id: cx.model_id() as u64, worktree_id: cx.entity_id().as_u64(),
summary: Some(summary.to_proto(server_id, &path)), summary: Some(summary.to_proto(server_id, &path)),
}) { }) {
return Task::ready(Err(e)); return Task::ready(Err(e));
@ -1305,7 +1304,7 @@ impl LocalWorktree {
let rx = self.observe_updates(project_id, cx, move |update| { let rx = self.observe_updates(project_id, cx, move |update| {
client.request(update).map(|result| result.is_ok()) client.request(update).map(|result| result.is_ok())
}); });
cx.foreground() cx.executor()
.spawn(async move { rx.await.map_err(|_| anyhow!("share ended")) }) .spawn(async move { rx.await.map_err(|_| anyhow!("share ended")) })
} }
@ -1339,7 +1338,7 @@ impl RemoteWorktree {
let version = buffer.version(); let version = buffer.version();
let rpc = self.client.clone(); let rpc = self.client.clone();
let project_id = self.project_id; let project_id = self.project_id;
cx.as_mut().spawn(|mut cx| async move { cx.spawn(|_, mut cx| async move {
let response = rpc let response = rpc
.request(proto::SaveBuffer { .request(proto::SaveBuffer {
project_id, project_id,
@ -1356,7 +1355,7 @@ impl RemoteWorktree {
buffer_handle.update(&mut cx, |buffer, cx| { buffer_handle.update(&mut cx, |buffer, cx| {
buffer.did_save(version.clone(), fingerprint, mtime, cx); buffer.did_save(version.clone(), fingerprint, mtime, cx);
}); })?;
Ok(()) Ok(())
}) })
@ -1436,7 +1435,7 @@ impl RemoteWorktree {
let entry = snapshot.insert_entry(entry); let entry = snapshot.insert_entry(entry);
worktree.snapshot = snapshot.clone(); worktree.snapshot = snapshot.clone();
entry entry
}) })?
}) })
} }
@ -2634,7 +2633,7 @@ impl language2::File for File {
} }
fn worktree_id(&self) -> usize { fn worktree_id(&self) -> usize {
self.worktree.id() self.worktree.entity_id().as_u64() as usize
} }
fn is_deleted(&self) -> bool { fn is_deleted(&self) -> bool {
@ -2647,7 +2646,7 @@ impl language2::File for File {
fn to_proto(&self) -> rpc::proto::File { fn to_proto(&self) -> rpc::proto::File {
rpc::proto::File { rpc::proto::File {
worktree_id: self.worktree.id() as u64, worktree_id: self.worktree.entity_id().as_u64(),
entry_id: self.entry_id.to_proto(), entry_id: self.entry_id.to_proto(),
path: self.path.to_string_lossy().into(), path: self.path.to_string_lossy().into(),
mtime: Some(self.mtime.into()), mtime: Some(self.mtime.into()),
@ -2670,8 +2669,7 @@ impl language2::LocalFile for File {
let worktree = self.worktree.read(cx).as_local().unwrap(); let worktree = self.worktree.read(cx).as_local().unwrap();
let abs_path = worktree.absolutize(&self.path); let abs_path = worktree.absolutize(&self.path);
let fs = worktree.fs.clone(); let fs = worktree.fs.clone();
cx.background() cx.executor().spawn(async move { fs.load(&abs_path).await })
.spawn(async move { fs.load(&abs_path).await })
} }
fn buffer_reloaded( fn buffer_reloaded(

View file

@ -13,6 +13,7 @@ use fs::RealFs;
use futures::{channel::mpsc, SinkExt, StreamExt}; use futures::{channel::mpsc, SinkExt, StreamExt};
use gpui2::{App, AppContext, AssetSource, AsyncAppContext, SemanticVersion, Task}; use gpui2::{App, AppContext, AssetSource, AsyncAppContext, SemanticVersion, Task};
use isahc::{prelude::Configurable, Request}; use isahc::{prelude::Configurable, Request};
use language2::LanguageRegistry;
use log::LevelFilter; use log::LevelFilter;
use parking_lot::Mutex; use parking_lot::Mutex;
@ -73,7 +74,7 @@ fn main() {
let _user_keymap_file_rx = let _user_keymap_file_rx =
watch_config_file(&app.executor(), fs.clone(), paths::KEYMAP.clone()); watch_config_file(&app.executor(), fs.clone(), paths::KEYMAP.clone());
let _login_shell_env_loaded = if stdout_is_a_pty() { let login_shell_env_loaded = if stdout_is_a_pty() {
Task::ready(()) Task::ready(())
} else { } else {
app.executor().spawn(async { app.executor().spawn(async {
@ -114,7 +115,7 @@ fn main() {
// languages.set_executor(cx.background().clone()); // languages.set_executor(cx.background().clone());
// languages.set_language_server_download_dir(paths::LANGUAGES_DIR.clone()); // languages.set_language_server_download_dir(paths::LANGUAGES_DIR.clone());
// let languages = Arc::new(languages); // let languages = Arc::new(languages);
// let node_runtime = RealNodeRuntime::new(http.clone()); let node_runtime = RealNodeRuntime::new(http.clone());
// languages::init(languages.clone(), node_runtime.clone(), cx); // languages::init(languages.clone(), node_runtime.clone(), cx);
// let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx)); // let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx));