Merge branch 'main' into window_context_2

This commit is contained in:
Antonio Scandurra 2023-04-14 12:06:37 +02:00
commit 33bc47dbe2
53 changed files with 3951 additions and 2474 deletions

9
Cargo.lock generated
View file

@ -1192,7 +1192,7 @@ dependencies = [
[[package]] [[package]]
name = "collab" name = "collab"
version = "0.8.2" version = "0.8.3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-tungstenite", "async-tungstenite",
@ -4589,14 +4589,15 @@ checksum = "5da3b0203fd7ee5720aa0b5e790b591aa5d3f41c3ed2c34a3a393382198af2f7"
[[package]] [[package]]
name = "postage" name = "postage"
version = "0.4.1" version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a63d25391d04a097954b76aba742b6b5b74f213dfe3dbaeeb36e8ddc1c657f0b" checksum = "af3fb618632874fb76937c2361a7f22afd393c982a2165595407edc75b06d3c1"
dependencies = [ dependencies = [
"atomic", "atomic",
"crossbeam-queue", "crossbeam-queue",
"futures 0.3.25", "futures 0.3.25",
"log", "log",
"parking_lot 0.12.1",
"pin-project", "pin-project",
"pollster", "pollster",
"static_assertions", "static_assertions",
@ -8515,7 +8516,7 @@ checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
[[package]] [[package]]
name = "zed" name = "zed"
version = "0.82.0" version = "0.83.0"
dependencies = [ dependencies = [
"activity_indicator", "activity_indicator",
"anyhow", "anyhow",

View file

@ -75,7 +75,7 @@ serde = { version = "1.0", features = ["derive", "rc"] }
serde_derive = { version = "1.0", features = ["deserialize_in_place"] } serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
serde_json = { version = "1.0", features = ["preserve_order", "raw_value"] } serde_json = { version = "1.0", features = ["preserve_order", "raw_value"] }
rand = { version = "0.8" } rand = { version = "0.8" }
postage = { version = "0.4.1", features = ["futures-traits"] } postage = { version = "0.5", features = ["futures-traits"] }
[patch.crates-io] [patch.crates-io]
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "c51896d32dcc11a38e41f36e3deb1a6a9c4f4b14" } tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "c51896d32dcc11a38e41f36e3deb1a6a9c4f4b14" }

View file

@ -63,10 +63,10 @@ pub fn init(http_client: Arc<dyn HttpClient>, server_url: String, cx: &mut AppCo
cx.observe_global::<Settings, _>(move |updater, cx| { cx.observe_global::<Settings, _>(move |updater, cx| {
if cx.global::<Settings>().auto_update { if cx.global::<Settings>().auto_update {
if update_subscription.is_none() { if update_subscription.is_none() {
*(&mut update_subscription) = Some(updater.start_polling(cx)) update_subscription = Some(updater.start_polling(cx))
} }
} else { } else {
(&mut update_subscription).take(); update_subscription.take();
} }
}) })
.detach(); .detach();

View file

@ -419,7 +419,7 @@ impl Room {
false false
}); });
let response = self.client.request(proto::RejoinRoom { let response = self.client.request_envelope(proto::RejoinRoom {
id: self.id, id: self.id,
reshared_projects, reshared_projects,
rejoined_projects, rejoined_projects,
@ -427,6 +427,8 @@ impl Room {
cx.spawn(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
let response = response.await?; let response = response.await?;
let message_id = response.message_id;
let response = response.payload;
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?; let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.status = RoomStatus::Online; this.status = RoomStatus::Online;
@ -443,7 +445,7 @@ impl Room {
for rejoined_project in response.rejoined_projects { for rejoined_project in response.rejoined_projects {
if let Some(project) = projects.get(&rejoined_project.id) { if let Some(project) = projects.get(&rejoined_project.id) {
project.update(cx, |project, cx| { project.update(cx, |project, cx| {
project.rejoined(rejoined_project, cx).log_err(); project.rejoined(rejoined_project, message_id, cx).log_err();
}); });
} }
} }

View file

@ -45,3 +45,4 @@ collections = { path = "../collections", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] } rpc = { path = "../rpc", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"] } settings = { path = "../settings", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }

View file

@ -10,7 +10,10 @@ use async_tungstenite::tungstenite::{
error::Error as WebsocketError, error::Error as WebsocketError,
http::{Request, StatusCode}, http::{Request, StatusCode},
}; };
use futures::{future::LocalBoxFuture, AsyncReadExt, FutureExt, SinkExt, StreamExt, TryStreamExt}; use futures::{
future::LocalBoxFuture, AsyncReadExt, FutureExt, SinkExt, StreamExt, TryFutureExt as _,
TryStreamExt,
};
use gpui::{ use gpui::{
actions, actions,
platform::AppVersion, platform::AppVersion,
@ -471,18 +474,22 @@ impl Client {
pub fn subscribe_to_entity<T: Entity>( pub fn subscribe_to_entity<T: Entity>(
self: &Arc<Self>, self: &Arc<Self>,
remote_id: u64, remote_id: u64,
) -> PendingEntitySubscription<T> { ) -> Result<PendingEntitySubscription<T>> {
let id = (TypeId::of::<T>(), remote_id); let id = (TypeId::of::<T>(), remote_id);
self.state
.write() let mut state = self.state.write();
if state.entities_by_type_and_remote_id.contains_key(&id) {
return Err(anyhow!("already subscribed to entity"));
} else {
state
.entities_by_type_and_remote_id .entities_by_type_and_remote_id
.insert(id, WeakSubscriber::Pending(Default::default())); .insert(id, WeakSubscriber::Pending(Default::default()));
Ok(PendingEntitySubscription {
PendingEntitySubscription {
client: self.clone(), client: self.clone(),
remote_id, remote_id,
consumed: false, consumed: false,
_entity_type: PhantomData, _entity_type: PhantomData,
})
} }
} }
@ -1188,6 +1195,14 @@ impl Client {
&self, &self,
request: T, request: T,
) -> impl Future<Output = Result<T::Response>> { ) -> impl Future<Output = Result<T::Response>> {
self.request_envelope(request)
.map_ok(|envelope| envelope.payload)
}
pub fn request_envelope<T: RequestMessage>(
&self,
request: T,
) -> impl Future<Output = Result<TypedEnvelope<T::Response>>> {
let client_id = self.id; let client_id = self.id;
log::debug!( log::debug!(
"rpc request start. client_id:{}. name:{}", "rpc request start. client_id:{}. name:{}",
@ -1196,7 +1211,7 @@ impl Client {
); );
let response = self let response = self
.connection_id() .connection_id()
.map(|conn_id| self.peer.request(conn_id, request)); .map(|conn_id| self.peer.request_envelope(conn_id, request));
async move { async move {
let response = response?.await; let response = response?.await;
log::debug!( log::debug!(
@ -1595,14 +1610,17 @@ mod tests {
let _subscription1 = client let _subscription1 = client
.subscribe_to_entity(1) .subscribe_to_entity(1)
.unwrap()
.set_model(&model1, &mut cx.to_async()); .set_model(&model1, &mut cx.to_async());
let _subscription2 = client let _subscription2 = client
.subscribe_to_entity(2) .subscribe_to_entity(2)
.unwrap()
.set_model(&model2, &mut cx.to_async()); .set_model(&model2, &mut cx.to_async());
// Ensure dropping a subscription for the same entity type still allows receiving of // Ensure dropping a subscription for the same entity type still allows receiving of
// messages for other entity IDs of the same type. // messages for other entity IDs of the same type.
let subscription3 = client let subscription3 = client
.subscribe_to_entity(3) .subscribe_to_entity(3)
.unwrap()
.set_model(&model3, &mut cx.to_async()); .set_model(&model3, &mut cx.to_async());
drop(subscription3); drop(subscription3);
@ -1631,11 +1649,13 @@ mod tests {
}, },
); );
drop(subscription1); drop(subscription1);
let _subscription2 = let _subscription2 = client.add_message_handler(
client.add_message_handler(model, move |_, _: TypedEnvelope<proto::Ping>, _, _| { model.clone(),
move |_, _: TypedEnvelope<proto::Ping>, _, _| {
done_tx2.try_send(()).unwrap(); done_tx2.try_send(()).unwrap();
async { Ok(()) } async { Ok(()) }
}); },
);
server.send(proto::Ping {}); server.send(proto::Ping {});
done_rx2.next().await.unwrap(); done_rx2.next().await.unwrap();
} }

View file

@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
default-run = "collab" default-run = "collab"
edition = "2021" edition = "2021"
name = "collab" name = "collab"
version = "0.8.2" version = "0.8.3"
publish = false publish = false
[[bin]] [[bin]]

View file

@ -175,25 +175,39 @@ impl Database {
.map(|participant| participant.user_id) .map(|participant| participant.user_id)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
// Delete participants who failed to reconnect. // Delete participants who failed to reconnect and cancel their calls.
let mut canceled_calls_to_user_ids = Vec::new();
room_participant::Entity::delete_many() room_participant::Entity::delete_many()
.filter(stale_participant_filter) .filter(stale_participant_filter)
.exec(&*tx) .exec(&*tx)
.await?; .await?;
let called_participants = room_participant::Entity::find()
let room = self.get_room(room_id, &tx).await?; .filter(
let mut canceled_calls_to_user_ids = Vec::new(); Condition::all()
// Delete the room if it becomes empty and cancel pending calls. .add(
if room.participants.is_empty() { room_participant::Column::CallingUserId
canceled_calls_to_user_ids.extend( .is_in(stale_participant_user_ids.iter().copied()),
room.pending_participants )
.iter() .add(room_participant::Column::AnsweringConnectionId.is_null()),
.map(|pending_participant| UserId::from_proto(pending_participant.user_id)), )
); .all(&*tx)
.await?;
room_participant::Entity::delete_many() room_participant::Entity::delete_many()
.filter(room_participant::Column::RoomId.eq(room_id)) .filter(
room_participant::Column::Id
.is_in(called_participants.iter().map(|participant| participant.id)),
)
.exec(&*tx) .exec(&*tx)
.await?; .await?;
canceled_calls_to_user_ids.extend(
called_participants
.into_iter()
.map(|participant| participant.user_id),
);
let room = self.get_room(room_id, &tx).await?;
// Delete the room if it becomes empty.
if room.participants.is_empty() {
project::Entity::delete_many() project::Entity::delete_many()
.filter(project::Column::RoomId.eq(room_id)) .filter(project::Column::RoomId.eq(room_id))
.exec(&*tx) .exec(&*tx)

View file

@ -228,7 +228,7 @@ impl Server {
.add_message_handler(update_buffer_file) .add_message_handler(update_buffer_file)
.add_message_handler(buffer_reloaded) .add_message_handler(buffer_reloaded)
.add_message_handler(buffer_saved) .add_message_handler(buffer_saved)
.add_request_handler(save_buffer) .add_request_handler(forward_project_request::<proto::SaveBuffer>)
.add_request_handler(get_users) .add_request_handler(get_users)
.add_request_handler(fuzzy_search_users) .add_request_handler(fuzzy_search_users)
.add_request_handler(request_contact) .add_request_handler(request_contact)
@ -1591,51 +1591,6 @@ where
Ok(()) Ok(())
} }
async fn save_buffer(
request: proto::SaveBuffer,
response: Response<proto::SaveBuffer>,
session: Session,
) -> Result<()> {
let project_id = ProjectId::from_proto(request.project_id);
let host_connection_id = {
let collaborators = session
.db()
.await
.project_collaborators(project_id, session.connection_id)
.await?;
collaborators
.iter()
.find(|collaborator| collaborator.is_host)
.ok_or_else(|| anyhow!("host not found"))?
.connection_id
};
let response_payload = session
.peer
.forward_request(session.connection_id, host_connection_id, request.clone())
.await?;
let mut collaborators = session
.db()
.await
.project_collaborators(project_id, session.connection_id)
.await?;
collaborators.retain(|collaborator| collaborator.connection_id != session.connection_id);
let project_connection_ids = collaborators
.iter()
.map(|collaborator| collaborator.connection_id);
broadcast(
Some(host_connection_id),
project_connection_ids,
|conn_id| {
session
.peer
.forward_send(host_connection_id, conn_id, response_payload.clone())
},
);
response.send(response_payload)?;
Ok(())
}
async fn create_buffer_for_peer( async fn create_buffer_for_peer(
request: proto::CreateBufferForPeer, request: proto::CreateBufferForPeer,
session: Session, session: Session,
@ -1655,23 +1610,42 @@ async fn update_buffer(
) -> Result<()> { ) -> Result<()> {
session.executor.record_backtrace(); session.executor.record_backtrace();
let project_id = ProjectId::from_proto(request.project_id); let project_id = ProjectId::from_proto(request.project_id);
let project_connection_ids = session let mut guest_connection_ids;
let mut host_connection_id = None;
{
let collaborators = session
.db() .db()
.await .await
.project_connection_ids(project_id, session.connection_id) .project_collaborators(project_id, session.connection_id)
.await?; .await?;
guest_connection_ids = Vec::with_capacity(collaborators.len() - 1);
for collaborator in collaborators.iter() {
if collaborator.is_host {
host_connection_id = Some(collaborator.connection_id);
} else {
guest_connection_ids.push(collaborator.connection_id);
}
}
}
let host_connection_id = host_connection_id.ok_or_else(|| anyhow!("host not found"))?;
session.executor.record_backtrace(); session.executor.record_backtrace();
broadcast( broadcast(
Some(session.connection_id), Some(session.connection_id),
project_connection_ids.iter().copied(), guest_connection_ids,
|connection_id| { |connection_id| {
session session
.peer .peer
.forward_send(session.connection_id, connection_id, request.clone()) .forward_send(session.connection_id, connection_id, request.clone())
}, },
); );
if host_connection_id != session.connection_id {
session
.peer
.forward_request(session.connection_id, host_connection_id, request.clone())
.await?;
}
response.send(proto::Ack {})?; response.send(proto::Ack {})?;
Ok(()) Ok(())
} }

View file

@ -18,9 +18,10 @@ use parking_lot::Mutex;
use project::{Project, WorktreeId}; use project::{Project, WorktreeId};
use settings::Settings; use settings::Settings;
use std::{ use std::{
cell::{Ref, RefCell, RefMut},
env, env,
ops::Deref, ops::{Deref, DerefMut},
path::{Path, PathBuf}, path::Path,
sync::{ sync::{
atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst}, atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
Arc, Arc,
@ -209,13 +210,10 @@ impl TestServer {
let client = TestClient { let client = TestClient {
client, client,
username: name.to_string(), username: name.to_string(),
local_projects: Default::default(), state: Default::default(),
remote_projects: Default::default(),
next_root_dir_id: 0,
user_store, user_store,
fs, fs,
language_registry: Arc::new(LanguageRegistry::test()), language_registry: Arc::new(LanguageRegistry::test()),
buffers: Default::default(),
}; };
client.wait_for_current_user(cx).await; client.wait_for_current_user(cx).await;
client client
@ -314,12 +312,16 @@ impl Drop for TestServer {
struct TestClient { struct TestClient {
client: Arc<Client>, client: Arc<Client>,
username: String, username: String,
local_projects: Vec<ModelHandle<Project>>, state: RefCell<TestClientState>,
remote_projects: Vec<ModelHandle<Project>>,
next_root_dir_id: usize,
pub user_store: ModelHandle<UserStore>, pub user_store: ModelHandle<UserStore>,
language_registry: Arc<LanguageRegistry>, language_registry: Arc<LanguageRegistry>,
fs: Arc<FakeFs>, fs: Arc<FakeFs>,
}
#[derive(Default)]
struct TestClientState {
local_projects: Vec<ModelHandle<Project>>,
remote_projects: Vec<ModelHandle<Project>>,
buffers: HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>, buffers: HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>,
} }
@ -358,6 +360,38 @@ impl TestClient {
.await; .await;
} }
fn local_projects<'a>(&'a self) -> impl Deref<Target = Vec<ModelHandle<Project>>> + 'a {
Ref::map(self.state.borrow(), |state| &state.local_projects)
}
fn remote_projects<'a>(&'a self) -> impl Deref<Target = Vec<ModelHandle<Project>>> + 'a {
Ref::map(self.state.borrow(), |state| &state.remote_projects)
}
fn local_projects_mut<'a>(&'a self) -> impl DerefMut<Target = Vec<ModelHandle<Project>>> + 'a {
RefMut::map(self.state.borrow_mut(), |state| &mut state.local_projects)
}
fn remote_projects_mut<'a>(&'a self) -> impl DerefMut<Target = Vec<ModelHandle<Project>>> + 'a {
RefMut::map(self.state.borrow_mut(), |state| &mut state.remote_projects)
}
fn buffers_for_project<'a>(
&'a self,
project: &ModelHandle<Project>,
) -> impl DerefMut<Target = HashSet<ModelHandle<language::Buffer>>> + 'a {
RefMut::map(self.state.borrow_mut(), |state| {
state.buffers.entry(project.clone()).or_default()
})
}
fn buffers<'a>(
&'a self,
) -> impl DerefMut<Target = HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>> + 'a
{
RefMut::map(self.state.borrow_mut(), |state| &mut state.buffers)
}
fn summarize_contacts(&self, cx: &TestAppContext) -> ContactsSummary { fn summarize_contacts(&self, cx: &TestAppContext) -> ContactsSummary {
self.user_store.read_with(cx, |store, _| ContactsSummary { self.user_store.read_with(cx, |store, _| ContactsSummary {
current: store current: store
@ -431,15 +465,6 @@ impl TestClient {
let (_, root_view) = cx.add_window(|_| EmptyView); let (_, root_view) = cx.add_window(|_| EmptyView);
cx.add_view(&root_view, |cx| Workspace::test_new(project.clone(), cx)) cx.add_view(&root_view, |cx| Workspace::test_new(project.clone(), cx))
} }
fn create_new_root_dir(&mut self) -> PathBuf {
format!(
"/{}-root-{}",
self.username,
util::post_inc(&mut self.next_root_dir_id)
)
.into()
}
} }
impl Drop for TestClient { impl Drop for TestClient {

View file

@ -1629,9 +1629,7 @@ async fn test_project_reconnect(
}) })
.await .await
.unwrap(); .unwrap();
worktree_a2 deterministic.run_until_parked();
.read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
let worktree2_id = worktree_a2.read_with(cx_a, |tree, _| { let worktree2_id = worktree_a2.read_with(cx_a, |tree, _| {
assert!(tree.as_local().unwrap().is_shared()); assert!(tree.as_local().unwrap().is_shared());
tree.id() tree.id()
@ -1692,11 +1690,9 @@ async fn test_project_reconnect(
.unwrap(); .unwrap();
// While client A is disconnected, add and remove worktrees from client A's project. // While client A is disconnected, add and remove worktrees from client A's project.
project_a1 project_a1.update(cx_a, |project, cx| {
.update(cx_a, |project, cx| {
project.remove_worktree(worktree2_id, cx) project.remove_worktree(worktree2_id, cx)
}) });
.await;
let (worktree_a3, _) = project_a1 let (worktree_a3, _) = project_a1
.update(cx_a, |p, cx| { .update(cx_a, |p, cx| {
p.find_or_create_local_worktree("/root-1/dir3", true, cx) p.find_or_create_local_worktree("/root-1/dir3", true, cx)
@ -1820,18 +1816,14 @@ async fn test_project_reconnect(
}) })
.await .await
.unwrap(); .unwrap();
worktree_a4 deterministic.run_until_parked();
.read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
let worktree4_id = worktree_a4.read_with(cx_a, |tree, _| { let worktree4_id = worktree_a4.read_with(cx_a, |tree, _| {
assert!(tree.as_local().unwrap().is_shared()); assert!(tree.as_local().unwrap().is_shared());
tree.id() tree.id()
}); });
project_a1 project_a1.update(cx_a, |project, cx| {
.update(cx_a, |project, cx| {
project.remove_worktree(worktree3_id, cx) project.remove_worktree(worktree3_id, cx)
}) });
.await;
deterministic.run_until_parked(); deterministic.run_until_parked();
// While client B is disconnected, mutate a buffer on both the host and the guest. // While client B is disconnected, mutate a buffer on both the host and the guest.

File diff suppressed because it is too large Load diff

View file

@ -395,10 +395,10 @@ impl CollabTitlebarItem {
let icon; let icon;
let tooltip; let tooltip;
if room.read(cx).is_screen_sharing() { if room.read(cx).is_screen_sharing() {
icon = "icons/disable_screen_sharing_12.svg"; icon = "icons/enable_screen_sharing_12.svg";
tooltip = "Stop Sharing Screen" tooltip = "Stop Sharing Screen"
} else { } else {
icon = "icons/enable_screen_sharing_12.svg"; icon = "icons/disable_screen_sharing_12.svg";
tooltip = "Share Screen"; tooltip = "Share Screen";
} }

View file

@ -15,12 +15,9 @@ pub struct BlinkManager {
impl BlinkManager { impl BlinkManager {
pub fn new(blink_interval: Duration, cx: &mut ModelContext<Self>) -> Self { pub fn new(blink_interval: Duration, cx: &mut ModelContext<Self>) -> Self {
let weak_handle = cx.weak_handle(); cx.observe_global::<Settings, _>(move |this, cx| {
cx.observe_global::<Settings, _>(move |_, cx| {
if let Some(this) = weak_handle.upgrade(cx) {
// Make sure we blink the cursors if the setting is re-enabled // Make sure we blink the cursors if the setting is re-enabled
this.update(cx, |this, cx| this.blink_cursors(this.blink_epoch, cx)); this.blink_cursors(this.blink_epoch, cx)
}
}) })
.detach(); .detach();

View file

@ -1040,7 +1040,8 @@ impl CopilotState {
let completion = self.completions.get(self.active_completion_index)?; let completion = self.completions.get(self.active_completion_index)?;
let excerpt_id = self.excerpt_id?; let excerpt_id = self.excerpt_id?;
let completion_buffer = buffer.buffer_for_excerpt(excerpt_id)?; let completion_buffer = buffer.buffer_for_excerpt(excerpt_id)?;
if !completion.range.start.is_valid(completion_buffer) if excerpt_id != cursor.excerpt_id
|| !completion.range.start.is_valid(completion_buffer)
|| !completion.range.end.is_valid(completion_buffer) || !completion.range.end.is_valid(completion_buffer)
{ {
return None; return None;
@ -6619,13 +6620,15 @@ impl Editor {
.as_singleton() .as_singleton()
.and_then(|b| b.read(cx).file()), .and_then(|b| b.read(cx).file()),
) { ) {
let settings = cx.global::<Settings>();
let extension = Path::new(file.file_name(cx)) let extension = Path::new(file.file_name(cx))
.extension() .extension()
.and_then(|e| e.to_str()); .and_then(|e| e.to_str());
project.read(cx).client().report_event( project.read(cx).client().report_event(
name, name,
json!({ "File Extension": extension }), json!({ "File Extension": extension, "Vim Mode": settings.vim_mode }),
cx.global::<Settings>().telemetry(), settings.telemetry(),
); );
} }
} }

View file

@ -6163,6 +6163,110 @@ async fn test_copilot_completion_invalidation(
}); });
} }
#[gpui::test]
async fn test_copilot_multibuffer(
deterministic: Arc<Deterministic>,
cx: &mut gpui::TestAppContext,
) {
let (copilot, copilot_lsp) = Copilot::fake(cx);
cx.update(|cx| {
cx.set_global(Settings::test(cx));
cx.set_global(copilot)
});
let buffer_1 = cx.add_model(|cx| Buffer::new(0, "a = 1\nb = 2\n", cx));
let buffer_2 = cx.add_model(|cx| Buffer::new(0, "c = 3\nd = 4\n", cx));
let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0);
multibuffer.push_excerpts(
buffer_1.clone(),
[ExcerptRange {
context: Point::new(0, 0)..Point::new(2, 0),
primary: None,
}],
cx,
);
multibuffer.push_excerpts(
buffer_2.clone(),
[ExcerptRange {
context: Point::new(0, 0)..Point::new(2, 0),
primary: None,
}],
cx,
);
multibuffer
});
let (_, editor) = cx.add_window(|cx| build_editor(multibuffer, cx));
handle_copilot_completion_request(
&copilot_lsp,
vec![copilot::request::Completion {
text: "b = 2 + a".into(),
range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 5)),
..Default::default()
}],
vec![],
);
editor.update(cx, |editor, cx| {
// Ensure copilot suggestions are shown for the first excerpt.
editor.change_selections(None, cx, |s| {
s.select_ranges([Point::new(1, 5)..Point::new(1, 5)])
});
editor.next_copilot_suggestion(&Default::default(), cx);
});
deterministic.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
editor.update(cx, |editor, cx| {
assert!(editor.has_active_copilot_suggestion(cx));
assert_eq!(
editor.display_text(cx),
"\n\na = 1\nb = 2 + a\n\n\n\nc = 3\nd = 4\n"
);
assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4\n");
});
handle_copilot_completion_request(
&copilot_lsp,
vec![copilot::request::Completion {
text: "d = 4 + c".into(),
range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 6)),
..Default::default()
}],
vec![],
);
editor.update(cx, |editor, cx| {
// Move to another excerpt, ensuring the suggestion gets cleared.
editor.change_selections(None, cx, |s| {
s.select_ranges([Point::new(4, 5)..Point::new(4, 5)])
});
assert!(!editor.has_active_copilot_suggestion(cx));
assert_eq!(
editor.display_text(cx),
"\n\na = 1\nb = 2\n\n\n\nc = 3\nd = 4\n"
);
assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4\n");
// Type a character, ensuring we don't even try to interpolate the previous suggestion.
editor.handle_input(" ", cx);
assert!(!editor.has_active_copilot_suggestion(cx));
assert_eq!(
editor.display_text(cx),
"\n\na = 1\nb = 2\n\n\n\nc = 3\nd = 4 \n"
);
assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4 \n");
});
// Ensure the new suggestion is displayed when the debounce timeout expires.
deterministic.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
editor.update(cx, |editor, cx| {
assert!(editor.has_active_copilot_suggestion(cx));
assert_eq!(
editor.display_text(cx),
"\n\na = 1\nb = 2\n\n\n\nc = 3\nd = 4 + c\n"
);
assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4 \n");
});
}
fn empty_range(row: usize, column: usize) -> Range<DisplayPoint> { fn empty_range(row: usize, column: usize) -> Range<DisplayPoint> {
let point = DisplayPoint::new(row as u32, column as u32); let point = DisplayPoint::new(row as u32, column as u32);
point..point point..point

View file

@ -5,7 +5,7 @@ use fsevent::EventStream;
use futures::{future::BoxFuture, Stream, StreamExt}; use futures::{future::BoxFuture, Stream, StreamExt};
use git2::Repository as LibGitRepository; use git2::Repository as LibGitRepository;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use parking_lot::Mutex as SyncMutex; use parking_lot::Mutex;
use regex::Regex; use regex::Regex;
use repository::GitRepository; use repository::GitRepository;
use rope::Rope; use rope::Rope;
@ -27,8 +27,6 @@ use util::ResultExt;
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
use collections::{btree_map, BTreeMap}; use collections::{btree_map, BTreeMap};
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
use futures::lock::Mutex;
#[cfg(any(test, feature = "test-support"))]
use repository::FakeGitRepositoryState; use repository::FakeGitRepositoryState;
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
use std::sync::Weak; use std::sync::Weak;
@ -117,7 +115,7 @@ pub trait Fs: Send + Sync {
path: &Path, path: &Path,
latency: Duration, latency: Duration,
) -> Pin<Box<dyn Send + Stream<Item = Vec<fsevent::Event>>>>; ) -> Pin<Box<dyn Send + Stream<Item = Vec<fsevent::Event>>>>;
fn open_repo(&self, abs_dot_git: &Path) -> Option<Arc<SyncMutex<dyn GitRepository>>>; fn open_repo(&self, abs_dot_git: &Path) -> Option<Arc<Mutex<dyn GitRepository>>>;
fn is_fake(&self) -> bool; fn is_fake(&self) -> bool;
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
fn as_fake(&self) -> &FakeFs; fn as_fake(&self) -> &FakeFs;
@ -350,11 +348,11 @@ impl Fs for RealFs {
}))) })))
} }
fn open_repo(&self, dotgit_path: &Path) -> Option<Arc<SyncMutex<dyn GitRepository>>> { fn open_repo(&self, dotgit_path: &Path) -> Option<Arc<Mutex<dyn GitRepository>>> {
LibGitRepository::open(&dotgit_path) LibGitRepository::open(&dotgit_path)
.log_err() .log_err()
.and_then::<Arc<SyncMutex<dyn GitRepository>>, _>(|libgit_repository| { .and_then::<Arc<Mutex<dyn GitRepository>>, _>(|libgit_repository| {
Some(Arc::new(SyncMutex::new(libgit_repository))) Some(Arc::new(Mutex::new(libgit_repository)))
}) })
} }
@ -396,7 +394,7 @@ enum FakeFsEntry {
inode: u64, inode: u64,
mtime: SystemTime, mtime: SystemTime,
entries: BTreeMap<String, Arc<Mutex<FakeFsEntry>>>, entries: BTreeMap<String, Arc<Mutex<FakeFsEntry>>>,
git_repo_state: Option<Arc<SyncMutex<repository::FakeGitRepositoryState>>>, git_repo_state: Option<Arc<Mutex<repository::FakeGitRepositoryState>>>,
}, },
Symlink { Symlink {
target: PathBuf, target: PathBuf,
@ -405,18 +403,14 @@ enum FakeFsEntry {
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
impl FakeFsState { impl FakeFsState {
async fn read_path<'a>(&'a self, target: &Path) -> Result<Arc<Mutex<FakeFsEntry>>> { fn read_path<'a>(&'a self, target: &Path) -> Result<Arc<Mutex<FakeFsEntry>>> {
Ok(self Ok(self
.try_read_path(target) .try_read_path(target)
.await
.ok_or_else(|| anyhow!("path does not exist: {}", target.display()))? .ok_or_else(|| anyhow!("path does not exist: {}", target.display()))?
.0) .0)
} }
async fn try_read_path<'a>( fn try_read_path<'a>(&'a self, target: &Path) -> Option<(Arc<Mutex<FakeFsEntry>>, PathBuf)> {
&'a self,
target: &Path,
) -> Option<(Arc<Mutex<FakeFsEntry>>, PathBuf)> {
let mut path = target.to_path_buf(); let mut path = target.to_path_buf();
let mut real_path = PathBuf::new(); let mut real_path = PathBuf::new();
let mut entry_stack = Vec::new(); let mut entry_stack = Vec::new();
@ -438,10 +432,10 @@ impl FakeFsState {
} }
Component::Normal(name) => { Component::Normal(name) => {
let current_entry = entry_stack.last().cloned()?; let current_entry = entry_stack.last().cloned()?;
let current_entry = current_entry.lock().await; let current_entry = current_entry.lock();
if let FakeFsEntry::Dir { entries, .. } = &*current_entry { if let FakeFsEntry::Dir { entries, .. } = &*current_entry {
let entry = entries.get(name.to_str().unwrap()).cloned()?; let entry = entries.get(name.to_str().unwrap()).cloned()?;
let _entry = entry.lock().await; let _entry = entry.lock();
if let FakeFsEntry::Symlink { target, .. } = &*_entry { if let FakeFsEntry::Symlink { target, .. } = &*_entry {
let mut target = target.clone(); let mut target = target.clone();
target.extend(path_components); target.extend(path_components);
@ -462,7 +456,7 @@ impl FakeFsState {
entry_stack.pop().map(|entry| (entry, real_path)) entry_stack.pop().map(|entry| (entry, real_path))
} }
async fn write_path<Fn, T>(&self, path: &Path, callback: Fn) -> Result<T> fn write_path<Fn, T>(&self, path: &Path, callback: Fn) -> Result<T>
where where
Fn: FnOnce(btree_map::Entry<String, Arc<Mutex<FakeFsEntry>>>) -> Result<T>, Fn: FnOnce(btree_map::Entry<String, Arc<Mutex<FakeFsEntry>>>) -> Result<T>,
{ {
@ -472,8 +466,8 @@ impl FakeFsState {
.ok_or_else(|| anyhow!("cannot overwrite the root"))?; .ok_or_else(|| anyhow!("cannot overwrite the root"))?;
let parent_path = path.parent().unwrap(); let parent_path = path.parent().unwrap();
let parent = self.read_path(parent_path).await?; let parent = self.read_path(parent_path)?;
let mut parent = parent.lock().await; let mut parent = parent.lock();
let new_entry = parent let new_entry = parent
.dir_entries(parent_path)? .dir_entries(parent_path)?
.entry(filename.to_str().unwrap().into()); .entry(filename.to_str().unwrap().into());
@ -529,7 +523,7 @@ impl FakeFs {
} }
pub async fn insert_file(&self, path: impl AsRef<Path>, content: String) { pub async fn insert_file(&self, path: impl AsRef<Path>, content: String) {
let mut state = self.state.lock().await; let mut state = self.state.lock();
let path = path.as_ref(); let path = path.as_ref();
let inode = state.next_inode; let inode = state.next_inode;
let mtime = state.next_mtime; let mtime = state.next_mtime;
@ -552,13 +546,12 @@ impl FakeFs {
} }
Ok(()) Ok(())
}) })
.await
.unwrap(); .unwrap();
state.emit_event(&[path]); state.emit_event(&[path]);
} }
pub async fn insert_symlink(&self, path: impl AsRef<Path>, target: PathBuf) { pub async fn insert_symlink(&self, path: impl AsRef<Path>, target: PathBuf) {
let mut state = self.state.lock().await; let mut state = self.state.lock();
let path = path.as_ref(); let path = path.as_ref();
let file = Arc::new(Mutex::new(FakeFsEntry::Symlink { target })); let file = Arc::new(Mutex::new(FakeFsEntry::Symlink { target }));
state state
@ -572,21 +565,20 @@ impl FakeFs {
Ok(()) Ok(())
} }
}) })
.await
.unwrap(); .unwrap();
state.emit_event(&[path]); state.emit_event(&[path]);
} }
pub async fn pause_events(&self) { pub async fn pause_events(&self) {
self.state.lock().await.events_paused = true; self.state.lock().events_paused = true;
} }
pub async fn buffered_event_count(&self) -> usize { pub async fn buffered_event_count(&self) -> usize {
self.state.lock().await.buffered_events.len() self.state.lock().buffered_events.len()
} }
pub async fn flush_events(&self, count: usize) { pub async fn flush_events(&self, count: usize) {
self.state.lock().await.flush_events(count); self.state.lock().flush_events(count);
} }
#[must_use] #[must_use]
@ -625,9 +617,9 @@ impl FakeFs {
} }
pub async fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) { pub async fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) {
let mut state = self.state.lock().await; let mut state = self.state.lock();
let entry = state.read_path(dot_git).await.unwrap(); let entry = state.read_path(dot_git).unwrap();
let mut entry = entry.lock().await; let mut entry = entry.lock();
if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry { if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry {
let repo_state = git_repo_state.get_or_insert_with(Default::default); let repo_state = git_repo_state.get_or_insert_with(Default::default);
@ -646,12 +638,12 @@ impl FakeFs {
} }
} }
pub async fn paths(&self) -> Vec<PathBuf> { pub fn paths(&self) -> Vec<PathBuf> {
let mut result = Vec::new(); let mut result = Vec::new();
let mut queue = collections::VecDeque::new(); let mut queue = collections::VecDeque::new();
queue.push_back((PathBuf::from("/"), self.state.lock().await.root.clone())); queue.push_back((PathBuf::from("/"), self.state.lock().root.clone()));
while let Some((path, entry)) = queue.pop_front() { while let Some((path, entry)) = queue.pop_front() {
if let FakeFsEntry::Dir { entries, .. } = &*entry.lock().await { if let FakeFsEntry::Dir { entries, .. } = &*entry.lock() {
for (name, entry) in entries { for (name, entry) in entries {
queue.push_back((path.join(name), entry.clone())); queue.push_back((path.join(name), entry.clone()));
} }
@ -661,12 +653,12 @@ impl FakeFs {
result result
} }
pub async fn directories(&self) -> Vec<PathBuf> { pub fn directories(&self) -> Vec<PathBuf> {
let mut result = Vec::new(); let mut result = Vec::new();
let mut queue = collections::VecDeque::new(); let mut queue = collections::VecDeque::new();
queue.push_back((PathBuf::from("/"), self.state.lock().await.root.clone())); queue.push_back((PathBuf::from("/"), self.state.lock().root.clone()));
while let Some((path, entry)) = queue.pop_front() { while let Some((path, entry)) = queue.pop_front() {
if let FakeFsEntry::Dir { entries, .. } = &*entry.lock().await { if let FakeFsEntry::Dir { entries, .. } = &*entry.lock() {
for (name, entry) in entries { for (name, entry) in entries {
queue.push_back((path.join(name), entry.clone())); queue.push_back((path.join(name), entry.clone()));
} }
@ -676,12 +668,12 @@ impl FakeFs {
result result
} }
pub async fn files(&self) -> Vec<PathBuf> { pub fn files(&self) -> Vec<PathBuf> {
let mut result = Vec::new(); let mut result = Vec::new();
let mut queue = collections::VecDeque::new(); let mut queue = collections::VecDeque::new();
queue.push_back((PathBuf::from("/"), self.state.lock().await.root.clone())); queue.push_back((PathBuf::from("/"), self.state.lock().root.clone()));
while let Some((path, entry)) = queue.pop_front() { while let Some((path, entry)) = queue.pop_front() {
let e = entry.lock().await; let e = entry.lock();
match &*e { match &*e {
FakeFsEntry::File { .. } => result.push(path), FakeFsEntry::File { .. } => result.push(path),
FakeFsEntry::Dir { entries, .. } => { FakeFsEntry::Dir { entries, .. } => {
@ -745,11 +737,11 @@ impl FakeFsEntry {
impl Fs for FakeFs { impl Fs for FakeFs {
async fn create_dir(&self, path: &Path) -> Result<()> { async fn create_dir(&self, path: &Path) -> Result<()> {
self.simulate_random_delay().await; self.simulate_random_delay().await;
let mut state = self.state.lock().await;
let mut created_dirs = Vec::new(); let mut created_dirs = Vec::new();
let mut cur_path = PathBuf::new(); let mut cur_path = PathBuf::new();
for component in path.components() { for component in path.components() {
let mut state = self.state.lock();
cur_path.push(component); cur_path.push(component);
if cur_path == Path::new("/") { if cur_path == Path::new("/") {
continue; continue;
@ -759,8 +751,7 @@ impl Fs for FakeFs {
let mtime = state.next_mtime; let mtime = state.next_mtime;
state.next_mtime += Duration::from_nanos(1); state.next_mtime += Duration::from_nanos(1);
state.next_inode += 1; state.next_inode += 1;
state state.write_path(&cur_path, |entry| {
.write_path(&cur_path, |entry| {
entry.or_insert_with(|| { entry.or_insert_with(|| {
created_dirs.push(cur_path.clone()); created_dirs.push(cur_path.clone());
Arc::new(Mutex::new(FakeFsEntry::Dir { Arc::new(Mutex::new(FakeFsEntry::Dir {
@ -771,17 +762,16 @@ impl Fs for FakeFs {
})) }))
}); });
Ok(()) Ok(())
}) })?
.await?;
} }
state.emit_event(&created_dirs); self.state.lock().emit_event(&created_dirs);
Ok(()) Ok(())
} }
async fn create_file(&self, path: &Path, options: CreateOptions) -> Result<()> { async fn create_file(&self, path: &Path, options: CreateOptions) -> Result<()> {
self.simulate_random_delay().await; self.simulate_random_delay().await;
let mut state = self.state.lock().await; let mut state = self.state.lock();
let inode = state.next_inode; let inode = state.next_inode;
let mtime = state.next_mtime; let mtime = state.next_mtime;
state.next_mtime += Duration::from_nanos(1); state.next_mtime += Duration::from_nanos(1);
@ -791,8 +781,7 @@ impl Fs for FakeFs {
mtime, mtime,
content: String::new(), content: String::new(),
})); }));
state state.write_path(path, |entry| {
.write_path(path, |entry| {
match entry { match entry {
btree_map::Entry::Occupied(mut e) => { btree_map::Entry::Occupied(mut e) => {
if options.overwrite { if options.overwrite {
@ -806,27 +795,25 @@ impl Fs for FakeFs {
} }
} }
Ok(()) Ok(())
}) })?;
.await?;
state.emit_event(&[path]); state.emit_event(&[path]);
Ok(()) Ok(())
} }
async fn rename(&self, old_path: &Path, new_path: &Path, options: RenameOptions) -> Result<()> { async fn rename(&self, old_path: &Path, new_path: &Path, options: RenameOptions) -> Result<()> {
self.simulate_random_delay().await;
let old_path = normalize_path(old_path); let old_path = normalize_path(old_path);
let new_path = normalize_path(new_path); let new_path = normalize_path(new_path);
let mut state = self.state.lock().await; let mut state = self.state.lock();
let moved_entry = state let moved_entry = state.write_path(&old_path, |e| {
.write_path(&old_path, |e| {
if let btree_map::Entry::Occupied(e) = e { if let btree_map::Entry::Occupied(e) = e {
Ok(e.remove()) Ok(e.remove())
} else { } else {
Err(anyhow!("path does not exist: {}", &old_path.display())) Err(anyhow!("path does not exist: {}", &old_path.display()))
} }
}) })?;
.await?; state.write_path(&new_path, |e| {
state
.write_path(&new_path, |e| {
match e { match e {
btree_map::Entry::Occupied(mut e) => { btree_map::Entry::Occupied(mut e) => {
if options.overwrite { if options.overwrite {
@ -840,23 +827,23 @@ impl Fs for FakeFs {
} }
} }
Ok(()) Ok(())
}) })?;
.await?;
state.emit_event(&[old_path, new_path]); state.emit_event(&[old_path, new_path]);
Ok(()) Ok(())
} }
async fn copy_file(&self, source: &Path, target: &Path, options: CopyOptions) -> Result<()> { async fn copy_file(&self, source: &Path, target: &Path, options: CopyOptions) -> Result<()> {
self.simulate_random_delay().await;
let source = normalize_path(source); let source = normalize_path(source);
let target = normalize_path(target); let target = normalize_path(target);
let mut state = self.state.lock().await; let mut state = self.state.lock();
let mtime = state.next_mtime; let mtime = state.next_mtime;
let inode = util::post_inc(&mut state.next_inode); let inode = util::post_inc(&mut state.next_inode);
state.next_mtime += Duration::from_nanos(1); state.next_mtime += Duration::from_nanos(1);
let source_entry = state.read_path(&source).await?; let source_entry = state.read_path(&source)?;
let content = source_entry.lock().await.file_content(&source)?.clone(); let content = source_entry.lock().file_content(&source)?.clone();
let entry = state let entry = state.write_path(&target, |e| match e {
.write_path(&target, |e| match e {
btree_map::Entry::Occupied(e) => { btree_map::Entry::Occupied(e) => {
if options.overwrite { if options.overwrite {
Ok(Some(e.get().clone())) Ok(Some(e.get().clone()))
@ -874,25 +861,26 @@ impl Fs for FakeFs {
}))) })))
.clone(), .clone(),
)), )),
}) })?;
.await?;
if let Some(entry) = entry { if let Some(entry) = entry {
entry.lock().await.set_file_content(&target, content)?; entry.lock().set_file_content(&target, content)?;
} }
state.emit_event(&[target]); state.emit_event(&[target]);
Ok(()) Ok(())
} }
async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> { async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> {
self.simulate_random_delay().await;
let path = normalize_path(path); let path = normalize_path(path);
let parent_path = path let parent_path = path
.parent() .parent()
.ok_or_else(|| anyhow!("cannot remove the root"))?; .ok_or_else(|| anyhow!("cannot remove the root"))?;
let base_name = path.file_name().unwrap(); let base_name = path.file_name().unwrap();
let mut state = self.state.lock().await; let mut state = self.state.lock();
let parent_entry = state.read_path(parent_path).await?; let parent_entry = state.read_path(parent_path)?;
let mut parent_entry = parent_entry.lock().await; let mut parent_entry = parent_entry.lock();
let entry = parent_entry let entry = parent_entry
.dir_entries(parent_path)? .dir_entries(parent_path)?
.entry(base_name.to_str().unwrap().into()); .entry(base_name.to_str().unwrap().into());
@ -905,7 +893,7 @@ impl Fs for FakeFs {
} }
btree_map::Entry::Occupied(e) => { btree_map::Entry::Occupied(e) => {
{ {
let mut entry = e.get().lock().await; let mut entry = e.get().lock();
let children = entry.dir_entries(&path)?; let children = entry.dir_entries(&path)?;
if !options.recursive && !children.is_empty() { if !options.recursive && !children.is_empty() {
return Err(anyhow!("{path:?} is not empty")); return Err(anyhow!("{path:?} is not empty"));
@ -919,14 +907,16 @@ impl Fs for FakeFs {
} }
async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()> { async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()> {
self.simulate_random_delay().await;
let path = normalize_path(path); let path = normalize_path(path);
let parent_path = path let parent_path = path
.parent() .parent()
.ok_or_else(|| anyhow!("cannot remove the root"))?; .ok_or_else(|| anyhow!("cannot remove the root"))?;
let base_name = path.file_name().unwrap(); let base_name = path.file_name().unwrap();
let mut state = self.state.lock().await; let mut state = self.state.lock();
let parent_entry = state.read_path(parent_path).await?; let parent_entry = state.read_path(parent_path)?;
let mut parent_entry = parent_entry.lock().await; let mut parent_entry = parent_entry.lock();
let entry = parent_entry let entry = parent_entry
.dir_entries(parent_path)? .dir_entries(parent_path)?
.entry(base_name.to_str().unwrap().into()); .entry(base_name.to_str().unwrap().into());
@ -937,7 +927,7 @@ impl Fs for FakeFs {
} }
} }
btree_map::Entry::Occupied(e) => { btree_map::Entry::Occupied(e) => {
e.get().lock().await.file_content(&path)?; e.get().lock().file_content(&path)?;
e.remove(); e.remove();
} }
} }
@ -953,9 +943,9 @@ impl Fs for FakeFs {
async fn load(&self, path: &Path) -> Result<String> { async fn load(&self, path: &Path) -> Result<String> {
let path = normalize_path(path); let path = normalize_path(path);
self.simulate_random_delay().await; self.simulate_random_delay().await;
let state = self.state.lock().await; let state = self.state.lock();
let entry = state.read_path(&path).await?; let entry = state.read_path(&path)?;
let entry = entry.lock().await; let entry = entry.lock();
entry.file_content(&path).cloned() entry.file_content(&path).cloned()
} }
@ -978,8 +968,8 @@ impl Fs for FakeFs {
async fn canonicalize(&self, path: &Path) -> Result<PathBuf> { async fn canonicalize(&self, path: &Path) -> Result<PathBuf> {
let path = normalize_path(path); let path = normalize_path(path);
self.simulate_random_delay().await; self.simulate_random_delay().await;
let state = self.state.lock().await; let state = self.state.lock();
if let Some((_, real_path)) = state.try_read_path(&path).await { if let Some((_, real_path)) = state.try_read_path(&path) {
Ok(real_path) Ok(real_path)
} else { } else {
Err(anyhow!("path does not exist: {}", path.display())) Err(anyhow!("path does not exist: {}", path.display()))
@ -989,9 +979,9 @@ impl Fs for FakeFs {
async fn is_file(&self, path: &Path) -> bool { async fn is_file(&self, path: &Path) -> bool {
let path = normalize_path(path); let path = normalize_path(path);
self.simulate_random_delay().await; self.simulate_random_delay().await;
let state = self.state.lock().await; let state = self.state.lock();
if let Some((entry, _)) = state.try_read_path(&path).await { if let Some((entry, _)) = state.try_read_path(&path) {
entry.lock().await.is_file() entry.lock().is_file()
} else { } else {
false false
} }
@ -1000,9 +990,9 @@ impl Fs for FakeFs {
async fn metadata(&self, path: &Path) -> Result<Option<Metadata>> { async fn metadata(&self, path: &Path) -> Result<Option<Metadata>> {
self.simulate_random_delay().await; self.simulate_random_delay().await;
let path = normalize_path(path); let path = normalize_path(path);
let state = self.state.lock().await; let state = self.state.lock();
if let Some((entry, real_path)) = state.try_read_path(&path).await { if let Some((entry, real_path)) = state.try_read_path(&path) {
let entry = entry.lock().await; let entry = entry.lock();
let is_symlink = real_path != path; let is_symlink = real_path != path;
Ok(Some(match &*entry { Ok(Some(match &*entry {
@ -1031,9 +1021,9 @@ impl Fs for FakeFs {
) -> Result<Pin<Box<dyn Send + Stream<Item = Result<PathBuf>>>>> { ) -> Result<Pin<Box<dyn Send + Stream<Item = Result<PathBuf>>>>> {
self.simulate_random_delay().await; self.simulate_random_delay().await;
let path = normalize_path(path); let path = normalize_path(path);
let state = self.state.lock().await; let state = self.state.lock();
let entry = state.read_path(&path).await?; let entry = state.read_path(&path)?;
let mut entry = entry.lock().await; let mut entry = entry.lock();
let children = entry.dir_entries(&path)?; let children = entry.dir_entries(&path)?;
let paths = children let paths = children
.keys() .keys()
@ -1047,10 +1037,9 @@ impl Fs for FakeFs {
path: &Path, path: &Path,
_: Duration, _: Duration,
) -> Pin<Box<dyn Send + Stream<Item = Vec<fsevent::Event>>>> { ) -> Pin<Box<dyn Send + Stream<Item = Vec<fsevent::Event>>>> {
let mut state = self.state.lock().await;
self.simulate_random_delay().await; self.simulate_random_delay().await;
let (tx, rx) = smol::channel::unbounded(); let (tx, rx) = smol::channel::unbounded();
state.event_txs.push(tx); self.state.lock().event_txs.push(tx);
let path = path.to_path_buf(); let path = path.to_path_buf();
let executor = self.executor.clone(); let executor = self.executor.clone();
Box::pin(futures::StreamExt::filter(rx, move |events| { Box::pin(futures::StreamExt::filter(rx, move |events| {
@ -1065,22 +1054,18 @@ impl Fs for FakeFs {
})) }))
} }
fn open_repo(&self, abs_dot_git: &Path) -> Option<Arc<SyncMutex<dyn GitRepository>>> { fn open_repo(&self, abs_dot_git: &Path) -> Option<Arc<Mutex<dyn GitRepository>>> {
smol::block_on(async move { let state = self.state.lock();
let state = self.state.lock().await; let entry = state.read_path(abs_dot_git).unwrap();
let entry = state.read_path(abs_dot_git).await.unwrap(); let mut entry = entry.lock();
let mut entry = entry.lock().await;
if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry { if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry {
let state = git_repo_state let state = git_repo_state
.get_or_insert_with(|| { .get_or_insert_with(|| Arc::new(Mutex::new(FakeGitRepositoryState::default())))
Arc::new(SyncMutex::new(FakeGitRepositoryState::default()))
})
.clone(); .clone();
Some(repository::FakeGitRepository::open(state)) Some(repository::FakeGitRepository::open(state))
} else { } else {
None None
} }
})
} }
fn is_fake(&self) -> bool { fn is_fake(&self) -> bool {
@ -1213,7 +1198,7 @@ mod tests {
.await; .await;
assert_eq!( assert_eq!(
fs.files().await, fs.files(),
vec![ vec![
PathBuf::from("/root/dir1/a"), PathBuf::from("/root/dir1/a"),
PathBuf::from("/root/dir1/b"), PathBuf::from("/root/dir1/b"),

View file

@ -2086,7 +2086,7 @@ impl UpgradeModelHandle for AppContext {
&self, &self,
handle: &WeakModelHandle<T>, handle: &WeakModelHandle<T>,
) -> Option<ModelHandle<T>> { ) -> Option<ModelHandle<T>> {
if self.models.contains_key(&handle.model_id) { if self.ref_counts.lock().is_entity_alive(handle.model_id) {
Some(ModelHandle::new(handle.model_id, &self.ref_counts)) Some(ModelHandle::new(handle.model_id, &self.ref_counts))
} else { } else {
None None
@ -2094,11 +2094,11 @@ impl UpgradeModelHandle for AppContext {
} }
fn model_handle_is_upgradable<T: Entity>(&self, handle: &WeakModelHandle<T>) -> bool { fn model_handle_is_upgradable<T: Entity>(&self, handle: &WeakModelHandle<T>) -> bool {
self.models.contains_key(&handle.model_id) self.ref_counts.lock().is_entity_alive(handle.model_id)
} }
fn upgrade_any_model_handle(&self, handle: &AnyWeakModelHandle) -> Option<AnyModelHandle> { fn upgrade_any_model_handle(&self, handle: &AnyWeakModelHandle) -> Option<AnyModelHandle> {
if self.models.contains_key(&handle.model_id) { if self.ref_counts.lock().is_entity_alive(handle.model_id) {
Some(AnyModelHandle::new( Some(AnyModelHandle::new(
handle.model_id, handle.model_id,
handle.model_type, handle.model_type,

View file

@ -31,6 +31,7 @@ use super::{
ref_counts::LeakDetector, window_input_handler::WindowInputHandler, AsyncAppContext, RefCounts, ref_counts::LeakDetector, window_input_handler::WindowInputHandler, AsyncAppContext, RefCounts,
}; };
#[derive(Clone)]
pub struct TestAppContext { pub struct TestAppContext {
cx: Rc<RefCell<AppContext>>, cx: Rc<RefCell<AppContext>>,
foreground_platform: Rc<platform::test::ForegroundPlatform>, foreground_platform: Rc<platform::test::ForegroundPlatform>,

View file

@ -829,6 +829,16 @@ impl Background {
} }
} }
#[cfg(any(test, feature = "test-support"))]
pub fn rng<'a>(&'a self) -> impl 'a + std::ops::DerefMut<Target = rand::prelude::StdRng> {
match self {
Self::Deterministic { executor, .. } => {
parking_lot::lock_api::MutexGuard::map(executor.state.lock(), |s| &mut s.rng)
}
_ => panic!("this method can only be called on a deterministic executor"),
}
}
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub async fn simulate_random_delay(&self) { pub async fn simulate_random_delay(&self) {
match self { match self {

View file

@ -46,6 +46,7 @@ pub fn run_test(
Arc<executor::Deterministic>, Arc<executor::Deterministic>,
u64, u64,
)), )),
on_fail_fn: Option<fn()>,
fn_name: String, fn_name: String,
) { ) {
// let _profiler = dhat::Profiler::new_heap(); // let _profiler = dhat::Profiler::new_heap();
@ -178,6 +179,7 @@ pub fn run_test(
if is_randomized { if is_randomized {
eprintln!("failing seed: {}", atomic_seed.load(SeqCst)); eprintln!("failing seed: {}", atomic_seed.load(SeqCst));
} }
on_fail_fn.map(|f| f());
panic::resume_unwind(error); panic::resume_unwind(error);
} }
} }

View file

@ -1,4 +1,5 @@
use proc_macro::TokenStream; use proc_macro::TokenStream;
use proc_macro2::Ident;
use quote::{format_ident, quote}; use quote::{format_ident, quote};
use std::mem; use std::mem;
use syn::{ use syn::{
@ -15,6 +16,7 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
let mut num_iterations = 1; let mut num_iterations = 1;
let mut starting_seed = 0; let mut starting_seed = 0;
let mut detect_nondeterminism = false; let mut detect_nondeterminism = false;
let mut on_failure_fn_name = quote!(None);
for arg in args { for arg in args {
match arg { match arg {
@ -33,6 +35,20 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
Some("retries") => max_retries = parse_int(&meta.lit)?, Some("retries") => max_retries = parse_int(&meta.lit)?,
Some("iterations") => num_iterations = parse_int(&meta.lit)?, Some("iterations") => num_iterations = parse_int(&meta.lit)?,
Some("seed") => starting_seed = parse_int(&meta.lit)?, Some("seed") => starting_seed = parse_int(&meta.lit)?,
Some("on_failure") => {
if let Lit::Str(name) = meta.lit {
let ident = Ident::new(&name.value(), name.span());
on_failure_fn_name = quote!(Some(#ident));
} else {
return Err(TokenStream::from(
syn::Error::new(
meta.lit.span(),
"on_failure argument must be a string",
)
.into_compile_error(),
));
}
}
_ => { _ => {
return Err(TokenStream::from( return Err(TokenStream::from(
syn::Error::new(meta.path.span(), "invalid argument") syn::Error::new(meta.path.span(), "invalid argument")
@ -152,6 +168,7 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
cx.foreground().run(#inner_fn_name(#inner_fn_args)); cx.foreground().run(#inner_fn_name(#inner_fn_args));
#cx_teardowns #cx_teardowns
}, },
#on_failure_fn_name,
stringify!(#outer_fn_name).to_string(), stringify!(#outer_fn_name).to_string(),
); );
} }
@ -187,6 +204,7 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
#max_retries, #max_retries,
#detect_nondeterminism, #detect_nondeterminism,
&mut |cx, _, _, seed| #inner_fn_name(#inner_fn_args), &mut |cx, _, _, seed| #inner_fn_name(#inner_fn_args),
#on_failure_fn_name,
stringify!(#outer_fn_name).to_string(), stringify!(#outer_fn_name).to_string(),
); );
} }

View file

@ -377,7 +377,7 @@ impl Buffer {
rpc::proto::LineEnding::from_i32(message.line_ending) rpc::proto::LineEnding::from_i32(message.line_ending)
.ok_or_else(|| anyhow!("missing line_ending"))?, .ok_or_else(|| anyhow!("missing line_ending"))?,
)); ));
this.saved_version = proto::deserialize_version(message.saved_version); this.saved_version = proto::deserialize_version(&message.saved_version);
this.saved_version_fingerprint = this.saved_version_fingerprint =
proto::deserialize_fingerprint(&message.saved_version_fingerprint)?; proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
this.saved_mtime = message this.saved_mtime = message
@ -1309,21 +1309,25 @@ impl Buffer {
pub fn wait_for_edits( pub fn wait_for_edits(
&mut self, &mut self,
edit_ids: impl IntoIterator<Item = clock::Local>, edit_ids: impl IntoIterator<Item = clock::Local>,
) -> impl Future<Output = ()> { ) -> impl Future<Output = Result<()>> {
self.text.wait_for_edits(edit_ids) self.text.wait_for_edits(edit_ids)
} }
pub fn wait_for_anchors<'a>( pub fn wait_for_anchors<'a>(
&mut self, &mut self,
anchors: impl IntoIterator<Item = &'a Anchor>, anchors: impl IntoIterator<Item = &'a Anchor>,
) -> impl Future<Output = ()> { ) -> impl Future<Output = Result<()>> {
self.text.wait_for_anchors(anchors) self.text.wait_for_anchors(anchors)
} }
pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = ()> { pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
self.text.wait_for_version(version) self.text.wait_for_version(version)
} }
pub fn give_up_waiting(&mut self) {
self.text.give_up_waiting();
}
pub fn set_active_selections( pub fn set_active_selections(
&mut self, &mut self,
selections: Arc<[Selection<Anchor>]>, selections: Arc<[Selection<Anchor>]>,

View file

@ -220,7 +220,7 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operati
replica_id: undo.replica_id as ReplicaId, replica_id: undo.replica_id as ReplicaId,
value: undo.local_timestamp, value: undo.local_timestamp,
}, },
version: deserialize_version(undo.version), version: deserialize_version(&undo.version),
counts: undo counts: undo
.counts .counts
.into_iter() .into_iter()
@ -294,7 +294,7 @@ pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation
local: edit.local_timestamp, local: edit.local_timestamp,
lamport: edit.lamport_timestamp, lamport: edit.lamport_timestamp,
}, },
version: deserialize_version(edit.version), version: deserialize_version(&edit.version),
ranges: edit.ranges.into_iter().map(deserialize_range).collect(), ranges: edit.ranges.into_iter().map(deserialize_range).collect(),
new_text: edit.new_text.into_iter().map(Arc::from).collect(), new_text: edit.new_text.into_iter().map(Arc::from).collect(),
} }
@ -509,7 +509,7 @@ pub fn deserialize_transaction(transaction: proto::Transaction) -> Result<Transa
.into_iter() .into_iter()
.map(deserialize_local_timestamp) .map(deserialize_local_timestamp)
.collect(), .collect(),
start: deserialize_version(transaction.start), start: deserialize_version(&transaction.start),
}) })
} }
@ -538,7 +538,7 @@ pub fn deserialize_range(range: proto::Range) -> Range<FullOffset> {
FullOffset(range.start as usize)..FullOffset(range.end as usize) FullOffset(range.start as usize)..FullOffset(range.end as usize)
} }
pub fn deserialize_version(message: Vec<proto::VectorClockEntry>) -> clock::Global { pub fn deserialize_version(message: &[proto::VectorClockEntry]) -> clock::Global {
let mut version = clock::Global::new(); let mut version = clock::Global::new();
for entry in message { for entry in message {
version.observe(clock::Local { version.observe(clock::Local {

View file

@ -4,11 +4,13 @@ use crate::{
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use async_trait::async_trait; use async_trait::async_trait;
use client::proto::{self, PeerId}; use client::proto::{self, PeerId};
use fs::LineEnding;
use gpui::{AppContext, AsyncAppContext, ModelHandle}; use gpui::{AppContext, AsyncAppContext, ModelHandle};
use language::{ use language::{
point_from_lsp, point_to_lsp, point_from_lsp, point_to_lsp,
proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version}, proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
range_from_lsp, Anchor, Bias, Buffer, CachedLspAdapter, PointUtf16, ToPointUtf16, range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CharKind, CodeAction,
Completion, OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Unclipped,
}; };
use lsp::{DocumentHighlightKind, LanguageServer, ServerCapabilities}; use lsp::{DocumentHighlightKind, LanguageServer, ServerCapabilities};
use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag}; use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag};
@ -27,6 +29,8 @@ pub(crate) trait LspCommand: 'static + Sized {
fn to_lsp( fn to_lsp(
&self, &self,
path: &Path, path: &Path,
buffer: &Buffer,
language_server: &Arc<LanguageServer>,
cx: &AppContext, cx: &AppContext,
) -> <Self::LspRequest as lsp::request::Request>::Params; ) -> <Self::LspRequest as lsp::request::Request>::Params;
async fn response_from_lsp( async fn response_from_lsp(
@ -49,7 +53,7 @@ pub(crate) trait LspCommand: 'static + Sized {
project: &mut Project, project: &mut Project,
peer_id: PeerId, peer_id: PeerId,
buffer_version: &clock::Global, buffer_version: &clock::Global,
cx: &AppContext, cx: &mut AppContext,
) -> <Self::ProtoRequest as proto::RequestMessage>::Response; ) -> <Self::ProtoRequest as proto::RequestMessage>::Response;
async fn response_from_proto( async fn response_from_proto(
self, self,
@ -91,6 +95,14 @@ pub(crate) struct GetHover {
pub position: PointUtf16, pub position: PointUtf16,
} }
pub(crate) struct GetCompletions {
pub position: PointUtf16,
}
pub(crate) struct GetCodeActions {
pub range: Range<Anchor>,
}
#[async_trait(?Send)] #[async_trait(?Send)]
impl LspCommand for PrepareRename { impl LspCommand for PrepareRename {
type Response = Option<Range<Anchor>>; type Response = Option<Range<Anchor>>;
@ -105,7 +117,13 @@ impl LspCommand for PrepareRename {
} }
} }
fn to_lsp(&self, path: &Path, _: &AppContext) -> lsp::TextDocumentPositionParams { fn to_lsp(
&self,
path: &Path,
_: &Buffer,
_: &Arc<LanguageServer>,
_: &AppContext,
) -> lsp::TextDocumentPositionParams {
lsp::TextDocumentPositionParams { lsp::TextDocumentPositionParams {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
uri: lsp::Url::from_file_path(path).unwrap(), uri: lsp::Url::from_file_path(path).unwrap(),
@ -161,9 +179,9 @@ impl LspCommand for PrepareRename {
.ok_or_else(|| anyhow!("invalid position"))?; .ok_or_else(|| anyhow!("invalid position"))?;
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })
.await; .await?;
Ok(Self { Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
@ -175,7 +193,7 @@ impl LspCommand for PrepareRename {
_: &mut Project, _: &mut Project,
_: PeerId, _: PeerId,
buffer_version: &clock::Global, buffer_version: &clock::Global,
_: &AppContext, _: &mut AppContext,
) -> proto::PrepareRenameResponse { ) -> proto::PrepareRenameResponse {
proto::PrepareRenameResponse { proto::PrepareRenameResponse {
can_rename: range.is_some(), can_rename: range.is_some(),
@ -199,9 +217,9 @@ impl LspCommand for PrepareRename {
if message.can_rename { if message.can_rename {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })
.await; .await?;
let start = message.start.and_then(deserialize_anchor); let start = message.start.and_then(deserialize_anchor);
let end = message.end.and_then(deserialize_anchor); let end = message.end.and_then(deserialize_anchor);
Ok(start.zip(end).map(|(start, end)| start..end)) Ok(start.zip(end).map(|(start, end)| start..end))
@ -221,7 +239,13 @@ impl LspCommand for PerformRename {
type LspRequest = lsp::request::Rename; type LspRequest = lsp::request::Rename;
type ProtoRequest = proto::PerformRename; type ProtoRequest = proto::PerformRename;
fn to_lsp(&self, path: &Path, _: &AppContext) -> lsp::RenameParams { fn to_lsp(
&self,
path: &Path,
_: &Buffer,
_: &Arc<LanguageServer>,
_: &AppContext,
) -> lsp::RenameParams {
lsp::RenameParams { lsp::RenameParams {
text_document_position: lsp::TextDocumentPositionParams { text_document_position: lsp::TextDocumentPositionParams {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
@ -281,9 +305,9 @@ impl LspCommand for PerformRename {
.ok_or_else(|| anyhow!("invalid position"))?; .ok_or_else(|| anyhow!("invalid position"))?;
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })
.await; .await?;
Ok(Self { Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
new_name: message.new_name, new_name: message.new_name,
@ -296,7 +320,7 @@ impl LspCommand for PerformRename {
project: &mut Project, project: &mut Project,
peer_id: PeerId, peer_id: PeerId,
_: &clock::Global, _: &clock::Global,
cx: &AppContext, cx: &mut AppContext,
) -> proto::PerformRenameResponse { ) -> proto::PerformRenameResponse {
let transaction = project.serialize_project_transaction_for_peer(response, peer_id, cx); let transaction = project.serialize_project_transaction_for_peer(response, peer_id, cx);
proto::PerformRenameResponse { proto::PerformRenameResponse {
@ -332,7 +356,13 @@ impl LspCommand for GetDefinition {
type LspRequest = lsp::request::GotoDefinition; type LspRequest = lsp::request::GotoDefinition;
type ProtoRequest = proto::GetDefinition; type ProtoRequest = proto::GetDefinition;
fn to_lsp(&self, path: &Path, _: &AppContext) -> lsp::GotoDefinitionParams { fn to_lsp(
&self,
path: &Path,
_: &Buffer,
_: &Arc<LanguageServer>,
_: &AppContext,
) -> lsp::GotoDefinitionParams {
lsp::GotoDefinitionParams { lsp::GotoDefinitionParams {
text_document_position_params: lsp::TextDocumentPositionParams { text_document_position_params: lsp::TextDocumentPositionParams {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
@ -378,9 +408,9 @@ impl LspCommand for GetDefinition {
.ok_or_else(|| anyhow!("invalid position"))?; .ok_or_else(|| anyhow!("invalid position"))?;
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })
.await; .await?;
Ok(Self { Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
}) })
@ -391,7 +421,7 @@ impl LspCommand for GetDefinition {
project: &mut Project, project: &mut Project,
peer_id: PeerId, peer_id: PeerId,
_: &clock::Global, _: &clock::Global,
cx: &AppContext, cx: &mut AppContext,
) -> proto::GetDefinitionResponse { ) -> proto::GetDefinitionResponse {
let links = location_links_to_proto(response, project, peer_id, cx); let links = location_links_to_proto(response, project, peer_id, cx);
proto::GetDefinitionResponse { links } proto::GetDefinitionResponse { links }
@ -418,7 +448,13 @@ impl LspCommand for GetTypeDefinition {
type LspRequest = lsp::request::GotoTypeDefinition; type LspRequest = lsp::request::GotoTypeDefinition;
type ProtoRequest = proto::GetTypeDefinition; type ProtoRequest = proto::GetTypeDefinition;
fn to_lsp(&self, path: &Path, _: &AppContext) -> lsp::GotoTypeDefinitionParams { fn to_lsp(
&self,
path: &Path,
_: &Buffer,
_: &Arc<LanguageServer>,
_: &AppContext,
) -> lsp::GotoTypeDefinitionParams {
lsp::GotoTypeDefinitionParams { lsp::GotoTypeDefinitionParams {
text_document_position_params: lsp::TextDocumentPositionParams { text_document_position_params: lsp::TextDocumentPositionParams {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
@ -464,9 +500,9 @@ impl LspCommand for GetTypeDefinition {
.ok_or_else(|| anyhow!("invalid position"))?; .ok_or_else(|| anyhow!("invalid position"))?;
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })
.await; .await?;
Ok(Self { Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
}) })
@ -477,7 +513,7 @@ impl LspCommand for GetTypeDefinition {
project: &mut Project, project: &mut Project,
peer_id: PeerId, peer_id: PeerId,
_: &clock::Global, _: &clock::Global,
cx: &AppContext, cx: &mut AppContext,
) -> proto::GetTypeDefinitionResponse { ) -> proto::GetTypeDefinitionResponse {
let links = location_links_to_proto(response, project, peer_id, cx); let links = location_links_to_proto(response, project, peer_id, cx);
proto::GetTypeDefinitionResponse { links } proto::GetTypeDefinitionResponse { links }
@ -537,7 +573,7 @@ async fn location_links_from_proto(
.ok_or_else(|| anyhow!("missing origin end"))?; .ok_or_else(|| anyhow!("missing origin end"))?;
buffer buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([&start, &end])) .update(&mut cx, |buffer, _| buffer.wait_for_anchors([&start, &end]))
.await; .await?;
Some(Location { Some(Location {
buffer, buffer,
range: start..end, range: start..end,
@ -562,7 +598,7 @@ async fn location_links_from_proto(
.ok_or_else(|| anyhow!("missing target end"))?; .ok_or_else(|| anyhow!("missing target end"))?;
buffer buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([&start, &end])) .update(&mut cx, |buffer, _| buffer.wait_for_anchors([&start, &end]))
.await; .await?;
let target = Location { let target = Location {
buffer, buffer,
range: start..end, range: start..end,
@ -658,7 +694,7 @@ fn location_links_to_proto(
links: Vec<LocationLink>, links: Vec<LocationLink>,
project: &mut Project, project: &mut Project,
peer_id: PeerId, peer_id: PeerId,
cx: &AppContext, cx: &mut AppContext,
) -> Vec<proto::LocationLink> { ) -> Vec<proto::LocationLink> {
links links
.into_iter() .into_iter()
@ -693,7 +729,13 @@ impl LspCommand for GetReferences {
type LspRequest = lsp::request::References; type LspRequest = lsp::request::References;
type ProtoRequest = proto::GetReferences; type ProtoRequest = proto::GetReferences;
fn to_lsp(&self, path: &Path, _: &AppContext) -> lsp::ReferenceParams { fn to_lsp(
&self,
path: &Path,
_: &Buffer,
_: &Arc<LanguageServer>,
_: &AppContext,
) -> lsp::ReferenceParams {
lsp::ReferenceParams { lsp::ReferenceParams {
text_document_position: lsp::TextDocumentPositionParams { text_document_position: lsp::TextDocumentPositionParams {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
@ -774,9 +816,9 @@ impl LspCommand for GetReferences {
.ok_or_else(|| anyhow!("invalid position"))?; .ok_or_else(|| anyhow!("invalid position"))?;
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })
.await; .await?;
Ok(Self { Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
}) })
@ -787,7 +829,7 @@ impl LspCommand for GetReferences {
project: &mut Project, project: &mut Project,
peer_id: PeerId, peer_id: PeerId,
_: &clock::Global, _: &clock::Global,
cx: &AppContext, cx: &mut AppContext,
) -> proto::GetReferencesResponse { ) -> proto::GetReferencesResponse {
let locations = response let locations = response
.into_iter() .into_iter()
@ -827,7 +869,7 @@ impl LspCommand for GetReferences {
.ok_or_else(|| anyhow!("missing target end"))?; .ok_or_else(|| anyhow!("missing target end"))?;
target_buffer target_buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([&start, &end])) .update(&mut cx, |buffer, _| buffer.wait_for_anchors([&start, &end]))
.await; .await?;
locations.push(Location { locations.push(Location {
buffer: target_buffer, buffer: target_buffer,
range: start..end, range: start..end,
@ -851,7 +893,13 @@ impl LspCommand for GetDocumentHighlights {
capabilities.document_highlight_provider.is_some() capabilities.document_highlight_provider.is_some()
} }
fn to_lsp(&self, path: &Path, _: &AppContext) -> lsp::DocumentHighlightParams { fn to_lsp(
&self,
path: &Path,
_: &Buffer,
_: &Arc<LanguageServer>,
_: &AppContext,
) -> lsp::DocumentHighlightParams {
lsp::DocumentHighlightParams { lsp::DocumentHighlightParams {
text_document_position_params: lsp::TextDocumentPositionParams { text_document_position_params: lsp::TextDocumentPositionParams {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
@ -915,9 +963,9 @@ impl LspCommand for GetDocumentHighlights {
.ok_or_else(|| anyhow!("invalid position"))?; .ok_or_else(|| anyhow!("invalid position"))?;
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })
.await; .await?;
Ok(Self { Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
}) })
@ -928,7 +976,7 @@ impl LspCommand for GetDocumentHighlights {
_: &mut Project, _: &mut Project,
_: PeerId, _: PeerId,
_: &clock::Global, _: &clock::Global,
_: &AppContext, _: &mut AppContext,
) -> proto::GetDocumentHighlightsResponse { ) -> proto::GetDocumentHighlightsResponse {
let highlights = response let highlights = response
.into_iter() .into_iter()
@ -965,7 +1013,7 @@ impl LspCommand for GetDocumentHighlights {
.ok_or_else(|| anyhow!("missing target end"))?; .ok_or_else(|| anyhow!("missing target end"))?;
buffer buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([&start, &end])) .update(&mut cx, |buffer, _| buffer.wait_for_anchors([&start, &end]))
.await; .await?;
let kind = match proto::document_highlight::Kind::from_i32(highlight.kind) { let kind = match proto::document_highlight::Kind::from_i32(highlight.kind) {
Some(proto::document_highlight::Kind::Text) => DocumentHighlightKind::TEXT, Some(proto::document_highlight::Kind::Text) => DocumentHighlightKind::TEXT,
Some(proto::document_highlight::Kind::Read) => DocumentHighlightKind::READ, Some(proto::document_highlight::Kind::Read) => DocumentHighlightKind::READ,
@ -991,7 +1039,13 @@ impl LspCommand for GetHover {
type LspRequest = lsp::request::HoverRequest; type LspRequest = lsp::request::HoverRequest;
type ProtoRequest = proto::GetHover; type ProtoRequest = proto::GetHover;
fn to_lsp(&self, path: &Path, _: &AppContext) -> lsp::HoverParams { fn to_lsp(
&self,
path: &Path,
_: &Buffer,
_: &Arc<LanguageServer>,
_: &AppContext,
) -> lsp::HoverParams {
lsp::HoverParams { lsp::HoverParams {
text_document_position_params: lsp::TextDocumentPositionParams { text_document_position_params: lsp::TextDocumentPositionParams {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
@ -1117,9 +1171,9 @@ impl LspCommand for GetHover {
.ok_or_else(|| anyhow!("invalid position"))?; .ok_or_else(|| anyhow!("invalid position"))?;
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(message.version)) buffer.wait_for_version(deserialize_version(&message.version))
}) })
.await; .await?;
Ok(Self { Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
}) })
@ -1130,7 +1184,7 @@ impl LspCommand for GetHover {
_: &mut Project, _: &mut Project,
_: PeerId, _: PeerId,
_: &clock::Global, _: &clock::Global,
_: &AppContext, _: &mut AppContext,
) -> proto::GetHoverResponse { ) -> proto::GetHoverResponse {
if let Some(response) = response { if let Some(response) = response {
let (start, end) = if let Some(range) = response.range { let (start, end) = if let Some(range) = response.range {
@ -1199,3 +1253,342 @@ impl LspCommand for GetHover {
message.buffer_id message.buffer_id
} }
} }
#[async_trait(?Send)]
impl LspCommand for GetCompletions {
type Response = Vec<Completion>;
type LspRequest = lsp::request::Completion;
type ProtoRequest = proto::GetCompletions;
fn to_lsp(
&self,
path: &Path,
_: &Buffer,
_: &Arc<LanguageServer>,
_: &AppContext,
) -> lsp::CompletionParams {
lsp::CompletionParams {
text_document_position: lsp::TextDocumentPositionParams::new(
lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path).unwrap()),
point_to_lsp(self.position),
),
context: Default::default(),
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
}
}
async fn response_from_lsp(
self,
completions: Option<lsp::CompletionResponse>,
_: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
cx: AsyncAppContext,
) -> Result<Vec<Completion>> {
let completions = if let Some(completions) = completions {
match completions {
lsp::CompletionResponse::Array(completions) => completions,
lsp::CompletionResponse::List(list) => list.items,
}
} else {
Default::default()
};
let completions = buffer.read_with(&cx, |buffer, _| {
let language = buffer.language().cloned();
let snapshot = buffer.snapshot();
let clipped_position = buffer.clip_point_utf16(Unclipped(self.position), Bias::Left);
let mut range_for_token = None;
completions
.into_iter()
.filter_map(move |mut lsp_completion| {
// For now, we can only handle additional edits if they are returned
// when resolving the completion, not if they are present initially.
if lsp_completion
.additional_text_edits
.as_ref()
.map_or(false, |edits| !edits.is_empty())
{
return None;
}
let (old_range, mut new_text) = match lsp_completion.text_edit.as_ref() {
// If the language server provides a range to overwrite, then
// check that the range is valid.
Some(lsp::CompletionTextEdit::Edit(edit)) => {
let range = range_from_lsp(edit.range);
let start = snapshot.clip_point_utf16(range.start, Bias::Left);
let end = snapshot.clip_point_utf16(range.end, Bias::Left);
if start != range.start.0 || end != range.end.0 {
log::info!("completion out of expected range");
return None;
}
(
snapshot.anchor_before(start)..snapshot.anchor_after(end),
edit.new_text.clone(),
)
}
// If the language server does not provide a range, then infer
// the range based on the syntax tree.
None => {
if self.position != clipped_position {
log::info!("completion out of expected range");
return None;
}
let Range { start, end } = range_for_token
.get_or_insert_with(|| {
let offset = self.position.to_offset(&snapshot);
let (range, kind) = snapshot.surrounding_word(offset);
if kind == Some(CharKind::Word) {
range
} else {
offset..offset
}
})
.clone();
let text = lsp_completion
.insert_text
.as_ref()
.unwrap_or(&lsp_completion.label)
.clone();
(
snapshot.anchor_before(start)..snapshot.anchor_after(end),
text,
)
}
Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
log::info!("unsupported insert/replace completion");
return None;
}
};
let language = language.clone();
LineEnding::normalize(&mut new_text);
Some(async move {
let mut label = None;
if let Some(language) = language {
language.process_completion(&mut lsp_completion).await;
label = language.label_for_completion(&lsp_completion).await;
}
Completion {
old_range,
new_text,
label: label.unwrap_or_else(|| {
language::CodeLabel::plain(
lsp_completion.label.clone(),
lsp_completion.filter_text.as_deref(),
)
}),
lsp_completion,
}
})
})
});
Ok(futures::future::join_all(completions).await)
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetCompletions {
let anchor = buffer.anchor_after(self.position);
proto::GetCompletions {
project_id,
buffer_id: buffer.remote_id(),
position: Some(language::proto::serialize_anchor(&anchor)),
version: serialize_version(&buffer.version()),
}
}
async fn from_proto(
message: proto::GetCompletions,
_: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
let version = deserialize_version(&message.version);
buffer
.update(&mut cx, |buffer, _| buffer.wait_for_version(version))
.await?;
let position = message
.position
.and_then(language::proto::deserialize_anchor)
.map(|p| {
buffer.read_with(&cx, |buffer, _| {
buffer.clip_point_utf16(Unclipped(p.to_point_utf16(buffer)), Bias::Left)
})
})
.ok_or_else(|| anyhow!("invalid position"))?;
Ok(Self { position })
}
fn response_to_proto(
completions: Vec<Completion>,
_: &mut Project,
_: PeerId,
buffer_version: &clock::Global,
_: &mut AppContext,
) -> proto::GetCompletionsResponse {
proto::GetCompletionsResponse {
completions: completions
.iter()
.map(language::proto::serialize_completion)
.collect(),
version: serialize_version(&buffer_version),
}
}
async fn response_from_proto(
self,
message: proto::GetCompletionsResponse,
_: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Vec<Completion>> {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
.await?;
let language = buffer.read_with(&cx, |buffer, _| buffer.language().cloned());
let completions = message.completions.into_iter().map(|completion| {
language::proto::deserialize_completion(completion, language.clone())
});
futures::future::try_join_all(completions).await
}
fn buffer_id_from_proto(message: &proto::GetCompletions) -> u64 {
message.buffer_id
}
}
#[async_trait(?Send)]
impl LspCommand for GetCodeActions {
type Response = Vec<CodeAction>;
type LspRequest = lsp::request::CodeActionRequest;
type ProtoRequest = proto::GetCodeActions;
fn check_capabilities(&self, capabilities: &ServerCapabilities) -> bool {
capabilities.code_action_provider.is_some()
}
fn to_lsp(
&self,
path: &Path,
buffer: &Buffer,
language_server: &Arc<LanguageServer>,
_: &AppContext,
) -> lsp::CodeActionParams {
let relevant_diagnostics = buffer
.snapshot()
.diagnostics_in_range::<_, usize>(self.range.clone(), false)
.map(|entry| entry.to_lsp_diagnostic_stub())
.collect();
lsp::CodeActionParams {
text_document: lsp::TextDocumentIdentifier::new(
lsp::Url::from_file_path(path).unwrap(),
),
range: range_to_lsp(self.range.to_point_utf16(buffer)),
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
context: lsp::CodeActionContext {
diagnostics: relevant_diagnostics,
only: language_server.code_action_kinds(),
},
}
}
async fn response_from_lsp(
self,
actions: Option<lsp::CodeActionResponse>,
_: ModelHandle<Project>,
_: ModelHandle<Buffer>,
_: AsyncAppContext,
) -> Result<Vec<CodeAction>> {
Ok(actions
.unwrap_or_default()
.into_iter()
.filter_map(|entry| {
if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
Some(CodeAction {
range: self.range.clone(),
lsp_action,
})
} else {
None
}
})
.collect())
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetCodeActions {
proto::GetCodeActions {
project_id,
buffer_id: buffer.remote_id(),
start: Some(language::proto::serialize_anchor(&self.range.start)),
end: Some(language::proto::serialize_anchor(&self.range.end)),
version: serialize_version(&buffer.version()),
}
}
async fn from_proto(
message: proto::GetCodeActions,
_: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
let start = message
.start
.and_then(language::proto::deserialize_anchor)
.ok_or_else(|| anyhow!("invalid start"))?;
let end = message
.end
.and_then(language::proto::deserialize_anchor)
.ok_or_else(|| anyhow!("invalid end"))?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
.await?;
Ok(Self { range: start..end })
}
fn response_to_proto(
code_actions: Vec<CodeAction>,
_: &mut Project,
_: PeerId,
buffer_version: &clock::Global,
_: &mut AppContext,
) -> proto::GetCodeActionsResponse {
proto::GetCodeActionsResponse {
actions: code_actions
.iter()
.map(language::proto::serialize_code_action)
.collect(),
version: serialize_version(&buffer_version),
}
}
async fn response_from_proto(
self,
message: proto::GetCodeActionsResponse,
_: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Vec<CodeAction>> {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
.await?;
message
.actions
.into_iter()
.map(language::proto::deserialize_code_action)
.collect()
}
fn buffer_id_from_proto(message: &proto::GetCodeActions) -> u64 {
message.buffer_id
}
}

File diff suppressed because it is too large Load diff

View file

@ -2183,7 +2183,7 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
}); });
} }
#[gpui::test] #[gpui::test(iterations = 10)]
async fn test_save_file(cx: &mut gpui::TestAppContext) { async fn test_save_file(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.background()); let fs = FakeFs::new(cx.background());
fs.insert_tree( fs.insert_tree(

View file

@ -12,7 +12,9 @@ use futures::{
mpsc::{self, UnboundedSender}, mpsc::{self, UnboundedSender},
oneshot, oneshot,
}, },
select_biased, Stream, StreamExt, select_biased,
task::Poll,
Stream, StreamExt,
}; };
use fuzzy::CharBag; use fuzzy::CharBag;
use git::{DOT_GIT, GITIGNORE}; use git::{DOT_GIT, GITIGNORE};
@ -41,11 +43,11 @@ use std::{
mem, mem,
ops::{Deref, DerefMut}, ops::{Deref, DerefMut},
path::{Path, PathBuf}, path::{Path, PathBuf},
pin::Pin,
sync::{ sync::{
atomic::{AtomicUsize, Ordering::SeqCst}, atomic::{AtomicUsize, Ordering::SeqCst},
Arc, Arc,
}, },
task::Poll,
time::{Duration, SystemTime}, time::{Duration, SystemTime},
}; };
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
@ -154,20 +156,12 @@ impl DerefMut for LocalSnapshot {
} }
enum ScanState { enum ScanState {
/// The worktree is performing its initial scan of the filesystem. Started,
Initializing {
snapshot: LocalSnapshot,
barrier: Option<barrier::Sender>,
},
Initialized {
snapshot: LocalSnapshot,
},
/// The worktree is updating in response to filesystem events.
Updating,
Updated { Updated {
snapshot: LocalSnapshot, snapshot: LocalSnapshot,
changes: HashMap<Arc<Path>, PathChange>, changes: HashMap<Arc<Path>, PathChange>,
barrier: Option<barrier::Sender>, barrier: Option<barrier::Sender>,
scanning: bool,
}, },
} }
@ -221,7 +215,7 @@ impl Worktree {
root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(), root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
entries_by_path: Default::default(), entries_by_path: Default::default(),
entries_by_id: Default::default(), entries_by_id: Default::default(),
scan_id: 0, scan_id: 1,
completed_scan_id: 0, completed_scan_id: 0,
}, },
}; };
@ -244,9 +238,24 @@ impl Worktree {
cx.spawn_weak(|this, mut cx| async move { cx.spawn_weak(|this, mut cx| async move {
while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) { while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.as_local_mut() let this = this.as_local_mut().unwrap();
.unwrap() match state {
.background_scanner_updated(state, cx); ScanState::Started => {
*this.is_scanning.0.borrow_mut() = true;
}
ScanState::Updated {
snapshot,
changes,
barrier,
scanning,
} => {
*this.is_scanning.0.borrow_mut() = scanning;
this.set_snapshot(snapshot, cx);
cx.emit(Event::UpdatedEntries(changes));
drop(barrier);
}
}
cx.notify();
}); });
} }
}) })
@ -258,8 +267,14 @@ impl Worktree {
let background = cx.background().clone(); let background = cx.background().clone();
async move { async move {
let events = fs.watch(&abs_path, Duration::from_millis(100)).await; let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
BackgroundScanner::new(snapshot, scan_states_tx, fs, background) BackgroundScanner::new(
.run(events, path_changes_rx) snapshot,
fs,
scan_states_tx,
background,
path_changes_rx,
)
.run(events)
.await; .await;
} }
}); });
@ -298,7 +313,7 @@ impl Worktree {
.collect(), .collect(),
entries_by_path: Default::default(), entries_by_path: Default::default(),
entries_by_id: Default::default(), entries_by_id: Default::default(),
scan_id: 0, scan_id: 1,
completed_scan_id: 0, completed_scan_id: 0,
}; };
@ -533,38 +548,6 @@ impl LocalWorktree {
Ok(updated) Ok(updated)
} }
fn background_scanner_updated(
&mut self,
scan_state: ScanState,
cx: &mut ModelContext<Worktree>,
) {
match scan_state {
ScanState::Initializing { snapshot, barrier } => {
*self.is_scanning.0.borrow_mut() = true;
self.set_snapshot(snapshot, cx);
drop(barrier);
}
ScanState::Initialized { snapshot } => {
*self.is_scanning.0.borrow_mut() = false;
self.set_snapshot(snapshot, cx);
}
ScanState::Updating => {
*self.is_scanning.0.borrow_mut() = true;
}
ScanState::Updated {
snapshot,
changes,
barrier,
} => {
*self.is_scanning.0.borrow_mut() = false;
cx.emit(Event::UpdatedEntries(changes));
self.set_snapshot(snapshot, cx);
drop(barrier);
}
}
cx.notify();
}
fn set_snapshot(&mut self, new_snapshot: LocalSnapshot, cx: &mut ModelContext<Worktree>) { fn set_snapshot(&mut self, new_snapshot: LocalSnapshot, cx: &mut ModelContext<Worktree>) {
let updated_repos = Self::changed_repos( let updated_repos = Self::changed_repos(
&self.snapshot.git_repositories, &self.snapshot.git_repositories,
@ -838,8 +821,7 @@ impl LocalWorktree {
.unwrap() .unwrap()
.path_changes_tx .path_changes_tx
.try_send((vec![abs_path], tx)) .try_send((vec![abs_path], tx))
.unwrap(); })?;
});
rx.recv().await; rx.recv().await;
Ok(()) Ok(())
})) }))
@ -930,7 +912,7 @@ impl LocalWorktree {
} }
let (tx, mut rx) = barrier::channel(); let (tx, mut rx) = barrier::channel();
path_changes_tx.try_send((paths, tx)).unwrap(); path_changes_tx.try_send((paths, tx))?;
rx.recv().await; rx.recv().await;
this.upgrade(&cx) this.upgrade(&cx)
.ok_or_else(|| anyhow!("worktree was dropped"))? .ok_or_else(|| anyhow!("worktree was dropped"))?
@ -1064,7 +1046,7 @@ impl RemoteWorktree {
version: serialize_version(&version), version: serialize_version(&version),
}) })
.await?; .await?;
let version = deserialize_version(response.version); let version = deserialize_version(&response.version);
let fingerprint = deserialize_fingerprint(&response.fingerprint)?; let fingerprint = deserialize_fingerprint(&response.fingerprint)?;
let mtime = response let mtime = response
.mtime .mtime
@ -1224,12 +1206,11 @@ impl Snapshot {
let mut entries_by_path_edits = Vec::new(); let mut entries_by_path_edits = Vec::new();
let mut entries_by_id_edits = Vec::new(); let mut entries_by_id_edits = Vec::new();
for entry_id in update.removed_entries { for entry_id in update.removed_entries {
let entry = self if let Some(entry) = self.entry_for_id(ProjectEntryId::from_proto(entry_id)) {
.entry_for_id(ProjectEntryId::from_proto(entry_id))
.ok_or_else(|| anyhow!("unknown entry {}", entry_id))?;
entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone()))); entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
entries_by_id_edits.push(Edit::Remove(entry.id)); entries_by_id_edits.push(Edit::Remove(entry.id));
} }
}
for entry in update.updated_entries { for entry in update.updated_entries {
let entry = Entry::try_from((&self.root_char_bag, entry))?; let entry = Entry::try_from((&self.root_char_bag, entry))?;
@ -1339,14 +1320,6 @@ impl Snapshot {
&self.root_name &self.root_name
} }
pub fn scan_started(&mut self) {
self.scan_id += 1;
}
pub fn scan_completed(&mut self) {
self.completed_scan_id = self.scan_id;
}
pub fn scan_id(&self) -> usize { pub fn scan_id(&self) -> usize {
self.scan_id self.scan_id
} }
@ -1541,17 +1514,20 @@ impl LocalSnapshot {
return; return;
}; };
match parent_entry.kind {
EntryKind::PendingDir => {
parent_entry.kind = EntryKind::Dir;
}
EntryKind::Dir => {}
_ => return,
}
if let Some(ignore) = ignore { if let Some(ignore) = ignore {
self.ignores_by_parent_abs_path.insert( self.ignores_by_parent_abs_path.insert(
self.abs_path.join(&parent_path).into(), self.abs_path.join(&parent_path).into(),
(ignore, self.scan_id), (ignore, self.scan_id),
); );
} }
if matches!(parent_entry.kind, EntryKind::PendingDir) {
parent_entry.kind = EntryKind::Dir;
} else {
unreachable!();
}
if parent_path.file_name() == Some(&DOT_GIT) { if parent_path.file_name() == Some(&DOT_GIT) {
let abs_path = self.abs_path.join(&parent_path); let abs_path = self.abs_path.join(&parent_path);
@ -2137,53 +2113,47 @@ impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey {
} }
struct BackgroundScanner { struct BackgroundScanner {
fs: Arc<dyn Fs>,
snapshot: Mutex<LocalSnapshot>, snapshot: Mutex<LocalSnapshot>,
notify: UnboundedSender<ScanState>, fs: Arc<dyn Fs>,
status_updates_tx: UnboundedSender<ScanState>,
executor: Arc<executor::Background>, executor: Arc<executor::Background>,
refresh_requests_rx: channel::Receiver<(Vec<PathBuf>, barrier::Sender)>,
prev_state: Mutex<(Snapshot, Vec<Arc<Path>>)>,
finished_initial_scan: bool,
} }
impl BackgroundScanner { impl BackgroundScanner {
fn new( fn new(
snapshot: LocalSnapshot, snapshot: LocalSnapshot,
notify: UnboundedSender<ScanState>,
fs: Arc<dyn Fs>, fs: Arc<dyn Fs>,
status_updates_tx: UnboundedSender<ScanState>,
executor: Arc<executor::Background>, executor: Arc<executor::Background>,
refresh_requests_rx: channel::Receiver<(Vec<PathBuf>, barrier::Sender)>,
) -> Self { ) -> Self {
Self { Self {
fs, fs,
snapshot: Mutex::new(snapshot), status_updates_tx,
notify,
executor, executor,
refresh_requests_rx,
prev_state: Mutex::new((snapshot.snapshot.clone(), Vec::new())),
snapshot: Mutex::new(snapshot),
finished_initial_scan: false,
} }
} }
fn abs_path(&self) -> Arc<Path> {
self.snapshot.lock().abs_path.clone()
}
async fn run( async fn run(
self, &mut self,
events_rx: impl Stream<Item = Vec<fsevent::Event>>, mut events_rx: Pin<Box<dyn Send + Stream<Item = Vec<fsevent::Event>>>>,
mut changed_paths: channel::Receiver<(Vec<PathBuf>, barrier::Sender)>,
) { ) {
use futures::FutureExt as _; use futures::FutureExt as _;
// Retrieve the basic properties of the root node. let (root_abs_path, root_inode) = {
let root_char_bag; let snapshot = self.snapshot.lock();
let root_abs_path; (
let root_inode; snapshot.abs_path.clone(),
let root_is_dir; snapshot.root_entry().map(|e| e.inode),
let next_entry_id; )
{ };
let mut snapshot = self.snapshot.lock();
snapshot.scan_started();
root_char_bag = snapshot.root_char_bag;
root_abs_path = snapshot.abs_path.clone();
root_inode = snapshot.root_entry().map(|e| e.inode);
root_is_dir = snapshot.root_entry().map_or(false, |e| e.is_dir());
next_entry_id = snapshot.next_entry_id.clone();
}
// Populate ignores above the root. // Populate ignores above the root.
let ignore_stack; let ignore_stack;
@ -2207,23 +2177,126 @@ impl BackgroundScanner {
} }
}; };
if root_is_dir { // Perform an initial scan of the directory.
let mut ancestor_inodes = TreeSet::default(); let (scan_job_tx, scan_job_rx) = channel::unbounded();
if let Some(root_inode) = root_inode { smol::block_on(scan_job_tx.send(ScanJob {
ancestor_inodes.insert(root_inode); abs_path: root_abs_path,
}
let (tx, rx) = channel::unbounded();
self.executor
.block(tx.send(ScanJob {
abs_path: root_abs_path.to_path_buf(),
path: Arc::from(Path::new("")), path: Arc::from(Path::new("")),
ignore_stack, ignore_stack,
ancestor_inodes, ancestor_inodes: TreeSet::from_ordered_entries(root_inode),
scan_queue: tx.clone(), scan_queue: scan_job_tx.clone(),
})) }))
.unwrap(); .unwrap();
drop(tx); drop(scan_job_tx);
self.scan_dirs(true, scan_job_rx).await;
self.send_status_update(false, None);
// Process any any FS events that occurred while performing the initial scan.
// For these events, update events cannot be as precise, because we didn't
// have the previous state loaded yet.
if let Poll::Ready(Some(events)) = futures::poll!(events_rx.next()) {
let mut paths = events.into_iter().map(|e| e.path).collect::<Vec<_>>();
while let Poll::Ready(Some(more_events)) = futures::poll!(events_rx.next()) {
paths.extend(more_events.into_iter().map(|e| e.path));
}
self.process_events(paths).await;
self.send_status_update(false, None);
}
self.finished_initial_scan = true;
// Continue processing events until the worktree is dropped.
loop {
select_biased! {
// Process any path refresh requests from the worktree. Prioritize
// these before handling changes reported by the filesystem.
request = self.refresh_requests_rx.recv().fuse() => {
let Ok((paths, barrier)) = request else { break };
self.reload_entries_for_paths(paths, None).await;
if !self.send_status_update(false, Some(barrier)) {
break;
}
}
events = events_rx.next().fuse() => {
let Some(events) = events else { break };
let mut paths = events.into_iter().map(|e| e.path).collect::<Vec<_>>();
while let Poll::Ready(Some(more_events)) = futures::poll!(events_rx.next()) {
paths.extend(more_events.into_iter().map(|e| e.path));
}
self.process_events(paths).await;
self.send_status_update(false, None);
}
}
}
}
async fn process_events(&mut self, paths: Vec<PathBuf>) {
use futures::FutureExt as _;
let (scan_job_tx, scan_job_rx) = channel::unbounded();
if let Some(mut paths) = self
.reload_entries_for_paths(paths, Some(scan_job_tx.clone()))
.await
{
paths.sort_unstable();
util::extend_sorted(&mut self.prev_state.lock().1, paths, usize::MAX, Ord::cmp);
}
drop(scan_job_tx);
self.scan_dirs(false, scan_job_rx).await;
let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded();
let snapshot = self.update_ignore_statuses(ignore_queue_tx);
self.executor
.scoped(|scope| {
for _ in 0..self.executor.num_cpus() {
scope.spawn(async {
loop {
select_biased! {
// Process any path refresh requests before moving on to process
// the queue of ignore statuses.
request = self.refresh_requests_rx.recv().fuse() => {
let Ok((paths, barrier)) = request else { break };
self.reload_entries_for_paths(paths, None).await;
if !self.send_status_update(false, Some(barrier)) {
return;
}
}
// Recursively process directories whose ignores have changed.
job = ignore_queue_rx.recv().fuse() => {
let Ok(job) = job else { break };
self.update_ignore_status(job, &snapshot).await;
}
}
}
});
}
})
.await;
let mut snapshot = self.snapshot.lock();
let mut git_repositories = mem::take(&mut snapshot.git_repositories);
git_repositories.retain(|repo| snapshot.entry_for_path(&repo.git_dir_path).is_some());
snapshot.git_repositories = git_repositories;
snapshot.removed_entry_ids.clear();
snapshot.completed_scan_id = snapshot.scan_id;
}
async fn scan_dirs(
&self,
enable_progress_updates: bool,
scan_jobs_rx: channel::Receiver<ScanJob>,
) {
use futures::FutureExt as _;
if self
.status_updates_tx
.unbounded_send(ScanState::Started)
.is_err()
{
return;
}
let progress_update_count = AtomicUsize::new(0); let progress_update_count = AtomicUsize::new(0);
self.executor self.executor
@ -2231,10 +2304,21 @@ impl BackgroundScanner {
for _ in 0..self.executor.num_cpus() { for _ in 0..self.executor.num_cpus() {
scope.spawn(async { scope.spawn(async {
let mut last_progress_update_count = 0; let mut last_progress_update_count = 0;
let progress_update_timer = self.pause_between_progress_updates().fuse(); let progress_update_timer = self.progress_timer(enable_progress_updates).fuse();
futures::pin_mut!(progress_update_timer); futures::pin_mut!(progress_update_timer);
loop { loop {
select_biased! { select_biased! {
// Process any path refresh requests before moving on to process
// the scan queue, so that user operations are prioritized.
request = self.refresh_requests_rx.recv().fuse() => {
let Ok((paths, barrier)) = request else { break };
self.reload_entries_for_paths(paths, None).await;
if !self.send_status_update(false, Some(barrier)) {
return;
}
}
// Send periodic progress updates to the worktree. Use an atomic counter // Send periodic progress updates to the worktree. Use an atomic counter
// to ensure that only one of the workers sends a progress update after // to ensure that only one of the workers sends a progress update after
// the update interval elapses. // the update interval elapses.
@ -2247,158 +2331,66 @@ impl BackgroundScanner {
) { ) {
Ok(_) => { Ok(_) => {
last_progress_update_count += 1; last_progress_update_count += 1;
if self self.send_status_update(true, None);
.notify }
.unbounded_send(ScanState::Initializing { Err(count) => {
snapshot: self.snapshot.lock().clone(), last_progress_update_count = count;
barrier: None,
})
.is_err()
{
break;
} }
} }
Err(current_count) => last_progress_update_count = current_count, progress_update_timer.set(self.progress_timer(enable_progress_updates).fuse());
}
progress_update_timer.set(self.pause_between_progress_updates().fuse());
}
// Refresh any paths requested by the main thread.
job = changed_paths.recv().fuse() => {
let Ok((abs_paths, barrier)) = job else { break };
self.update_entries_for_paths(abs_paths, None).await;
if self
.notify
.unbounded_send(ScanState::Initializing {
snapshot: self.snapshot.lock().clone(),
barrier: Some(barrier),
})
.is_err()
{
break;
}
} }
// Recursively load directories from the file system. // Recursively load directories from the file system.
job = rx.recv().fuse() => { job = scan_jobs_rx.recv().fuse() => {
let Ok(job) = job else { break }; let Ok(job) = job else { break };
if let Err(err) = self if let Err(err) = self.scan_dir(&job).await {
.scan_dir(root_char_bag, next_entry_id.clone(), &job) if job.path.as_ref() != Path::new("") {
.await log::error!("error scanning directory {:?}: {}", job.abs_path, err);
{
log::error!("error scanning {:?}: {}", job.abs_path, err);
} }
} }
} }
} }
}); }
})
} }
}) })
.await; .await;
} }
self.snapshot.lock().scan_completed(); fn send_status_update(&self, scanning: bool, barrier: Option<barrier::Sender>) -> bool {
let mut prev_state = self.prev_state.lock();
if self let snapshot = self.snapshot.lock().clone();
.notify let mut old_snapshot = snapshot.snapshot.clone();
.unbounded_send(ScanState::Initialized { mem::swap(&mut old_snapshot, &mut prev_state.0);
snapshot: self.snapshot.lock().clone(), let changed_paths = mem::take(&mut prev_state.1);
}) let changes = self.build_change_set(&old_snapshot, &snapshot.snapshot, changed_paths);
.is_err() self.status_updates_tx
{
return;
}
// Process any events that occurred while performing the initial scan. These
// events can't be reported as precisely, because there is no snapshot of the
// worktree before they occurred.
futures::pin_mut!(events_rx);
if let Poll::Ready(Some(mut events)) = futures::poll!(events_rx.next()) {
while let Poll::Ready(Some(additional_events)) = futures::poll!(events_rx.next()) {
events.extend(additional_events);
}
let abs_paths = events.into_iter().map(|e| e.path).collect();
if self.notify.unbounded_send(ScanState::Updating).is_err() {
return;
}
if let Some(changes) = self.process_events(abs_paths, true).await {
if self
.notify
.unbounded_send(ScanState::Updated { .unbounded_send(ScanState::Updated {
snapshot: self.snapshot.lock().clone(), snapshot,
changes,
barrier: None,
})
.is_err()
{
return;
}
} else {
return;
}
}
// Continue processing events until the worktree is dropped.
loop {
let barrier;
let abs_paths;
select_biased! {
request = changed_paths.next().fuse() => {
let Some((paths, b)) = request else { break };
abs_paths = paths;
barrier = Some(b);
}
events = events_rx.next().fuse() => {
let Some(events) = events else { break };
abs_paths = events.into_iter().map(|e| e.path).collect();
barrier = None;
}
}
if self.notify.unbounded_send(ScanState::Updating).is_err() {
return;
}
if let Some(changes) = self.process_events(abs_paths, false).await {
if self
.notify
.unbounded_send(ScanState::Updated {
snapshot: self.snapshot.lock().clone(),
changes, changes,
scanning,
barrier, barrier,
}) })
.is_err() .is_ok()
{
return;
}
} else {
return;
}
}
} }
async fn pause_between_progress_updates(&self) { async fn scan_dir(&self, job: &ScanJob) -> Result<()> {
#[cfg(any(test, feature = "test-support"))]
if self.fs.is_fake() {
return self.executor.simulate_random_delay().await;
}
smol::Timer::after(Duration::from_millis(100)).await;
}
async fn scan_dir(
&self,
root_char_bag: CharBag,
next_entry_id: Arc<AtomicUsize>,
job: &ScanJob,
) -> Result<()> {
let mut new_entries: Vec<Entry> = Vec::new(); let mut new_entries: Vec<Entry> = Vec::new();
let mut new_jobs: Vec<Option<ScanJob>> = Vec::new(); let mut new_jobs: Vec<Option<ScanJob>> = Vec::new();
let mut ignore_stack = job.ignore_stack.clone(); let mut ignore_stack = job.ignore_stack.clone();
let mut new_ignore = None; let mut new_ignore = None;
let (root_abs_path, root_char_bag, next_entry_id) = {
let snapshot = self.snapshot.lock();
(
snapshot.abs_path().clone(),
snapshot.root_char_bag,
snapshot.next_entry_id.clone(),
)
};
let mut child_paths = self.fs.read_dir(&job.abs_path).await?; let mut child_paths = self.fs.read_dir(&job.abs_path).await?;
while let Some(child_abs_path) = child_paths.next().await { while let Some(child_abs_path) = child_paths.next().await {
let child_abs_path = match child_abs_path { let child_abs_path: Arc<Path> = match child_abs_path {
Ok(child_abs_path) => child_abs_path, Ok(child_abs_path) => child_abs_path.into(),
Err(error) => { Err(error) => {
log::error!("error processing entry {:?}", error); log::error!("error processing entry {:?}", error);
continue; continue;
@ -2421,8 +2413,7 @@ impl BackgroundScanner {
match build_gitignore(&child_abs_path, self.fs.as_ref()).await { match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
Ok(ignore) => { Ok(ignore) => {
let ignore = Arc::new(ignore); let ignore = Arc::new(ignore);
ignore_stack = ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone());
ignore_stack.append(job.abs_path.as_path().into(), ignore.clone());
new_ignore = Some(ignore); new_ignore = Some(ignore);
} }
Err(error) => { Err(error) => {
@ -2440,7 +2431,7 @@ impl BackgroundScanner {
// new jobs as well. // new jobs as well.
let mut new_jobs = new_jobs.iter_mut(); let mut new_jobs = new_jobs.iter_mut();
for entry in &mut new_entries { for entry in &mut new_entries {
let entry_abs_path = self.abs_path().join(&entry.path); let entry_abs_path = root_abs_path.join(&entry.path);
entry.is_ignored = entry.is_ignored =
ignore_stack.is_abs_path_ignored(&entry_abs_path, entry.is_dir()); ignore_stack.is_abs_path_ignored(&entry_abs_path, entry.is_dir());
@ -2509,69 +2500,18 @@ impl BackgroundScanner {
Ok(()) Ok(())
} }
async fn process_events( async fn reload_entries_for_paths(
&self,
abs_paths: Vec<PathBuf>,
received_before_initialized: bool,
) -> Option<HashMap<Arc<Path>, PathChange>> {
let (scan_queue_tx, scan_queue_rx) = channel::unbounded();
let prev_snapshot = {
let mut snapshot = self.snapshot.lock();
snapshot.scan_started();
snapshot.clone()
};
let event_paths = self
.update_entries_for_paths(abs_paths, Some(scan_queue_tx))
.await?;
// Scan any directories that were created as part of this event batch.
self.executor
.scoped(|scope| {
for _ in 0..self.executor.num_cpus() {
scope.spawn(async {
while let Ok(job) = scan_queue_rx.recv().await {
if let Err(err) = self
.scan_dir(
prev_snapshot.root_char_bag,
prev_snapshot.next_entry_id.clone(),
&job,
)
.await
{
log::error!("error scanning {:?}: {}", job.abs_path, err);
}
}
});
}
})
.await;
// Attempt to detect renames only over a single batch of file-system events.
self.snapshot.lock().removed_entry_ids.clear();
self.update_ignore_statuses().await;
self.update_git_repositories();
let changes = self.build_change_set(
prev_snapshot.snapshot,
event_paths,
received_before_initialized,
);
self.snapshot.lock().scan_completed();
Some(changes)
}
async fn update_entries_for_paths(
&self, &self,
mut abs_paths: Vec<PathBuf>, mut abs_paths: Vec<PathBuf>,
scan_queue_tx: Option<Sender<ScanJob>>, scan_queue_tx: Option<Sender<ScanJob>>,
) -> Option<Vec<Arc<Path>>> { ) -> Option<Vec<Arc<Path>>> {
let doing_recursive_update = scan_queue_tx.is_some();
abs_paths.sort_unstable(); abs_paths.sort_unstable();
abs_paths.dedup_by(|a, b| a.starts_with(&b)); abs_paths.dedup_by(|a, b| a.starts_with(&b));
let root_abs_path = self.snapshot.lock().abs_path.clone(); let root_abs_path = self.snapshot.lock().abs_path.clone();
let root_canonical_path = self.fs.canonicalize(&root_abs_path).await.ok()?; let root_canonical_path = self.fs.canonicalize(&root_abs_path).await.log_err()?;
let metadata = futures::future::join_all( let metadata = futures::future::join_all(
abs_paths abs_paths
.iter() .iter()
@ -2581,29 +2521,35 @@ impl BackgroundScanner {
.await; .await;
let mut snapshot = self.snapshot.lock(); let mut snapshot = self.snapshot.lock();
if scan_queue_tx.is_some() {
for abs_path in &abs_paths { if snapshot.completed_scan_id == snapshot.scan_id {
if let Ok(path) = abs_path.strip_prefix(&root_canonical_path) { snapshot.scan_id += 1;
snapshot.remove_path(path); if !doing_recursive_update {
} snapshot.completed_scan_id = snapshot.scan_id;
} }
} }
let mut event_paths = Vec::with_capacity(abs_paths.len()); // Remove any entries for paths that no longer exist or are being recursively
for (abs_path, metadata) in abs_paths.into_iter().zip(metadata.into_iter()) { // refreshed. Do this before adding any new entries, so that renames can be
let path: Arc<Path> = match abs_path.strip_prefix(&root_canonical_path) { // detected regardless of the order of the paths.
Ok(path) => Arc::from(path.to_path_buf()), let mut event_paths = Vec::<Arc<Path>>::with_capacity(abs_paths.len());
Err(_) => { for (abs_path, metadata) in abs_paths.iter().zip(metadata.iter()) {
if let Ok(path) = abs_path.strip_prefix(&root_canonical_path) {
if matches!(metadata, Ok(None)) || doing_recursive_update {
snapshot.remove_path(path);
}
event_paths.push(path.into());
} else {
log::error!( log::error!(
"unexpected event {:?} for root path {:?}", "unexpected event {:?} for root path {:?}",
abs_path, abs_path,
root_canonical_path root_canonical_path
); );
continue;
} }
}; }
event_paths.push(path.clone());
let abs_path = root_abs_path.join(&path); for (path, metadata) in event_paths.iter().cloned().zip(metadata.into_iter()) {
let abs_path: Arc<Path> = root_abs_path.join(&path).into();
match metadata { match metadata {
Ok(Some(metadata)) => { Ok(Some(metadata)) => {
@ -2628,8 +2574,7 @@ impl BackgroundScanner {
let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path); let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path);
if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) { if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) {
ancestor_inodes.insert(metadata.inode); ancestor_inodes.insert(metadata.inode);
self.executor smol::block_on(scan_queue_tx.send(ScanJob {
.block(scan_queue_tx.send(ScanJob {
abs_path, abs_path,
path, path,
ignore_stack, ignore_stack,
@ -2651,7 +2596,10 @@ impl BackgroundScanner {
Some(event_paths) Some(event_paths)
} }
async fn update_ignore_statuses(&self) { fn update_ignore_statuses(
&self,
ignore_queue_tx: Sender<UpdateIgnoreStatusJob>,
) -> LocalSnapshot {
let mut snapshot = self.snapshot.lock().clone(); let mut snapshot = self.snapshot.lock().clone();
let mut ignores_to_update = Vec::new(); let mut ignores_to_update = Vec::new();
let mut ignores_to_delete = Vec::new(); let mut ignores_to_delete = Vec::new();
@ -2676,7 +2624,6 @@ impl BackgroundScanner {
.remove(&parent_abs_path); .remove(&parent_abs_path);
} }
let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded();
ignores_to_update.sort_unstable(); ignores_to_update.sort_unstable();
let mut ignores_to_update = ignores_to_update.into_iter().peekable(); let mut ignores_to_update = ignores_to_update.into_iter().peekable();
while let Some(parent_abs_path) = ignores_to_update.next() { while let Some(parent_abs_path) = ignores_to_update.next() {
@ -2688,35 +2635,15 @@ impl BackgroundScanner {
} }
let ignore_stack = snapshot.ignore_stack_for_abs_path(&parent_abs_path, true); let ignore_stack = snapshot.ignore_stack_for_abs_path(&parent_abs_path, true);
ignore_queue_tx smol::block_on(ignore_queue_tx.send(UpdateIgnoreStatusJob {
.send(UpdateIgnoreStatusJob {
abs_path: parent_abs_path, abs_path: parent_abs_path,
ignore_stack, ignore_stack,
ignore_queue: ignore_queue_tx.clone(), ignore_queue: ignore_queue_tx.clone(),
}) }))
.await
.unwrap(); .unwrap();
} }
drop(ignore_queue_tx);
self.executor snapshot
.scoped(|scope| {
for _ in 0..self.executor.num_cpus() {
scope.spawn(async {
while let Ok(job) = ignore_queue_rx.recv().await {
self.update_ignore_status(job, &snapshot).await;
}
});
}
})
.await;
}
fn update_git_repositories(&self) {
let mut snapshot = self.snapshot.lock();
let mut git_repositories = mem::take(&mut snapshot.git_repositories);
git_repositories.retain(|repo| snapshot.entry_for_path(&repo.git_dir_path).is_some());
snapshot.git_repositories = git_repositories;
} }
async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) { async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) {
@ -2730,7 +2657,7 @@ impl BackgroundScanner {
let path = job.abs_path.strip_prefix(&snapshot.abs_path).unwrap(); let path = job.abs_path.strip_prefix(&snapshot.abs_path).unwrap();
for mut entry in snapshot.child_entries(path).cloned() { for mut entry in snapshot.child_entries(path).cloned() {
let was_ignored = entry.is_ignored; let was_ignored = entry.is_ignored;
let abs_path = self.abs_path().join(&entry.path); let abs_path = snapshot.abs_path().join(&entry.path);
entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir()); entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir());
if entry.is_dir() { if entry.is_dir() {
let child_ignore_stack = if entry.is_ignored { let child_ignore_stack = if entry.is_ignored {
@ -2764,16 +2691,16 @@ impl BackgroundScanner {
fn build_change_set( fn build_change_set(
&self, &self,
old_snapshot: Snapshot, old_snapshot: &Snapshot,
new_snapshot: &Snapshot,
event_paths: Vec<Arc<Path>>, event_paths: Vec<Arc<Path>>,
received_before_initialized: bool,
) -> HashMap<Arc<Path>, PathChange> { ) -> HashMap<Arc<Path>, PathChange> {
use PathChange::{Added, AddedOrUpdated, Removed, Updated}; use PathChange::{Added, AddedOrUpdated, Removed, Updated};
let new_snapshot = self.snapshot.lock();
let mut changes = HashMap::default(); let mut changes = HashMap::default();
let mut old_paths = old_snapshot.entries_by_path.cursor::<PathKey>(); let mut old_paths = old_snapshot.entries_by_path.cursor::<PathKey>();
let mut new_paths = new_snapshot.entries_by_path.cursor::<PathKey>(); let mut new_paths = new_snapshot.entries_by_path.cursor::<PathKey>();
let received_before_initialized = !self.finished_initial_scan;
for path in event_paths { for path in event_paths {
let path = PathKey(path); let path = PathKey(path);
@ -2801,9 +2728,9 @@ impl BackgroundScanner {
// If the worktree was not fully initialized when this event was generated, // If the worktree was not fully initialized when this event was generated,
// we can't know whether this entry was added during the scan or whether // we can't know whether this entry was added during the scan or whether
// it was merely updated. // it was merely updated.
changes.insert(old_entry.path.clone(), AddedOrUpdated); changes.insert(new_entry.path.clone(), AddedOrUpdated);
} else if old_entry.mtime != new_entry.mtime { } else if old_entry.mtime != new_entry.mtime {
changes.insert(old_entry.path.clone(), Updated); changes.insert(new_entry.path.clone(), Updated);
} }
old_paths.next(&()); old_paths.next(&());
new_paths.next(&()); new_paths.next(&());
@ -2828,6 +2755,19 @@ impl BackgroundScanner {
} }
changes changes
} }
async fn progress_timer(&self, running: bool) {
if !running {
return futures::future::pending().await;
}
#[cfg(any(test, feature = "test-support"))]
if self.fs.is_fake() {
return self.executor.simulate_random_delay().await;
}
smol::Timer::after(Duration::from_millis(100)).await;
}
} }
fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
@ -2841,7 +2781,7 @@ fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
} }
struct ScanJob { struct ScanJob {
abs_path: PathBuf, abs_path: Arc<Path>,
path: Arc<Path>, path: Arc<Path>,
ignore_stack: Arc<IgnoreStack>, ignore_stack: Arc<IgnoreStack>,
scan_queue: Sender<ScanJob>, scan_queue: Sender<ScanJob>,
@ -3526,7 +3466,7 @@ mod tests {
let fs = FakeFs::new(cx.background()); let fs = FakeFs::new(cx.background());
fs.insert_tree( fs.insert_tree(
"/a", "/root",
json!({ json!({
"b": {}, "b": {},
"c": {}, "c": {},
@ -3537,7 +3477,7 @@ mod tests {
let tree = Worktree::local( let tree = Worktree::local(
client, client,
"/a".as_ref(), "/root".as_ref(),
true, true,
fs, fs,
Default::default(), Default::default(),
@ -3557,6 +3497,7 @@ mod tests {
assert!(entry.is_dir()); assert!(entry.is_dir());
cx.foreground().run_until_parked(); cx.foreground().run_until_parked();
tree.read_with(cx, |tree, _| { tree.read_with(cx, |tree, _| {
assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir); assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
}); });
@ -3726,7 +3667,7 @@ mod tests {
) { ) {
let mut files = Vec::new(); let mut files = Vec::new();
let mut dirs = Vec::new(); let mut dirs = Vec::new();
for path in fs.as_fake().paths().await { for path in fs.as_fake().paths() {
if path.starts_with(root_path) { if path.starts_with(root_path) {
if fs.is_file(&path).await { if fs.is_file(&path).await {
files.push(path); files.push(path);

View file

@ -7,7 +7,7 @@ use collections::HashMap;
use futures::{ use futures::{
channel::{mpsc, oneshot}, channel::{mpsc, oneshot},
stream::BoxStream, stream::BoxStream,
FutureExt, SinkExt, StreamExt, FutureExt, SinkExt, StreamExt, TryFutureExt,
}; };
use parking_lot::{Mutex, RwLock}; use parking_lot::{Mutex, RwLock};
use serde::{ser::SerializeStruct, Serialize}; use serde::{ser::SerializeStruct, Serialize};
@ -71,6 +71,7 @@ impl<T> Clone for Receipt<T> {
impl<T> Copy for Receipt<T> {} impl<T> Copy for Receipt<T> {}
#[derive(Clone, Debug)]
pub struct TypedEnvelope<T> { pub struct TypedEnvelope<T> {
pub sender_id: ConnectionId, pub sender_id: ConnectionId,
pub original_sender_id: Option<PeerId>, pub original_sender_id: Option<PeerId>,
@ -370,6 +371,15 @@ impl Peer {
receiver_id: ConnectionId, receiver_id: ConnectionId,
request: T, request: T,
) -> impl Future<Output = Result<T::Response>> { ) -> impl Future<Output = Result<T::Response>> {
self.request_internal(None, receiver_id, request)
.map_ok(|envelope| envelope.payload)
}
pub fn request_envelope<T: RequestMessage>(
&self,
receiver_id: ConnectionId,
request: T,
) -> impl Future<Output = Result<TypedEnvelope<T::Response>>> {
self.request_internal(None, receiver_id, request) self.request_internal(None, receiver_id, request)
} }
@ -380,6 +390,7 @@ impl Peer {
request: T, request: T,
) -> impl Future<Output = Result<T::Response>> { ) -> impl Future<Output = Result<T::Response>> {
self.request_internal(Some(sender_id), receiver_id, request) self.request_internal(Some(sender_id), receiver_id, request)
.map_ok(|envelope| envelope.payload)
} }
pub fn request_internal<T: RequestMessage>( pub fn request_internal<T: RequestMessage>(
@ -387,7 +398,7 @@ impl Peer {
original_sender_id: Option<ConnectionId>, original_sender_id: Option<ConnectionId>,
receiver_id: ConnectionId, receiver_id: ConnectionId,
request: T, request: T,
) -> impl Future<Output = Result<T::Response>> { ) -> impl Future<Output = Result<TypedEnvelope<T::Response>>> {
let (tx, rx) = oneshot::channel(); let (tx, rx) = oneshot::channel();
let send = self.connection_state(receiver_id).and_then(|connection| { let send = self.connection_state(receiver_id).and_then(|connection| {
let message_id = connection.next_message_id.fetch_add(1, SeqCst); let message_id = connection.next_message_id.fetch_add(1, SeqCst);
@ -410,6 +421,7 @@ impl Peer {
async move { async move {
send?; send?;
let (response, _barrier) = rx.await.map_err(|_| anyhow!("connection was closed"))?; let (response, _barrier) = rx.await.map_err(|_| anyhow!("connection was closed"))?;
if let Some(proto::envelope::Payload::Error(error)) = &response.payload { if let Some(proto::envelope::Payload::Error(error)) = &response.payload {
Err(anyhow!( Err(anyhow!(
"RPC request {} failed - {}", "RPC request {} failed - {}",
@ -417,8 +429,13 @@ impl Peer {
error.message error.message
)) ))
} else { } else {
T::Response::from_envelope(response) Ok(TypedEnvelope {
.ok_or_else(|| anyhow!("received response of the wrong type")) message_id: response.id,
sender_id: receiver_id,
original_sender_id: response.original_sender_id,
payload: T::Response::from_envelope(response)
.ok_or_else(|| anyhow!("received response of the wrong type"))?,
})
} }
} }
} }

View file

@ -233,7 +233,7 @@ messages!(
(UpdateProject, Foreground), (UpdateProject, Foreground),
(UpdateProjectCollaborator, Foreground), (UpdateProjectCollaborator, Foreground),
(UpdateWorktree, Foreground), (UpdateWorktree, Foreground),
(UpdateDiffBase, Background), (UpdateDiffBase, Foreground),
(GetPrivateUserInfo, Foreground), (GetPrivateUserInfo, Foreground),
(GetPrivateUserInfoResponse, Foreground), (GetPrivateUserInfoResponse, Foreground),
); );

View file

@ -6,4 +6,4 @@ pub use conn::Connection;
pub use peer::*; pub use peer::*;
mod macros; mod macros;
pub const PROTOCOL_VERSION: u32 = 50; pub const PROTOCOL_VERSION: u32 = 51;

View file

@ -154,6 +154,12 @@ impl<K> TreeSet<K>
where where
K: Clone + Debug + Default + Ord, K: Clone + Debug + Default + Ord,
{ {
pub fn from_ordered_entries(entries: impl IntoIterator<Item = K>) -> Self {
Self(TreeMap::from_ordered_entries(
entries.into_iter().map(|key| (key, ())),
))
}
pub fn insert(&mut self, key: K) { pub fn insert(&mut self, key: K) {
self.0.insert(key, ()); self.0.insert(key, ());
} }

View file

@ -11,14 +11,14 @@ mod tests;
mod undo_map; mod undo_map;
pub use anchor::*; pub use anchor::*;
use anyhow::Result; use anyhow::{anyhow, Result};
use clock::ReplicaId; use clock::ReplicaId;
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use fs::LineEnding; use fs::LineEnding;
use locator::Locator; use locator::Locator;
use operation_queue::OperationQueue; use operation_queue::OperationQueue;
pub use patch::Patch; pub use patch::Patch;
use postage::{barrier, oneshot, prelude::*}; use postage::{oneshot, prelude::*};
pub use rope::*; pub use rope::*;
pub use selection::*; pub use selection::*;
@ -52,7 +52,7 @@ pub struct Buffer {
pub lamport_clock: clock::Lamport, pub lamport_clock: clock::Lamport,
subscriptions: Topic, subscriptions: Topic,
edit_id_resolvers: HashMap<clock::Local, Vec<oneshot::Sender<()>>>, edit_id_resolvers: HashMap<clock::Local, Vec<oneshot::Sender<()>>>,
version_barriers: Vec<(clock::Global, barrier::Sender)>, wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
} }
#[derive(Clone)] #[derive(Clone)]
@ -522,7 +522,7 @@ impl Buffer {
lamport_clock, lamport_clock,
subscriptions: Default::default(), subscriptions: Default::default(),
edit_id_resolvers: Default::default(), edit_id_resolvers: Default::default(),
version_barriers: Default::default(), wait_for_version_txs: Default::default(),
} }
} }
@ -793,8 +793,14 @@ impl Buffer {
} }
} }
} }
self.version_barriers self.wait_for_version_txs.retain_mut(|(version, tx)| {
.retain(|(version, _)| !self.snapshot.version().observed_all(version)); if self.snapshot.version().observed_all(version) {
tx.try_send(()).ok();
false
} else {
true
}
});
Ok(()) Ok(())
} }
@ -1305,7 +1311,7 @@ impl Buffer {
pub fn wait_for_edits( pub fn wait_for_edits(
&mut self, &mut self,
edit_ids: impl IntoIterator<Item = clock::Local>, edit_ids: impl IntoIterator<Item = clock::Local>,
) -> impl 'static + Future<Output = ()> { ) -> impl 'static + Future<Output = Result<()>> {
let mut futures = Vec::new(); let mut futures = Vec::new();
for edit_id in edit_ids { for edit_id in edit_ids {
if !self.version.observed(edit_id) { if !self.version.observed(edit_id) {
@ -1317,15 +1323,18 @@ impl Buffer {
async move { async move {
for mut future in futures { for mut future in futures {
future.recv().await; if future.recv().await.is_none() {
Err(anyhow!("gave up waiting for edits"))?;
} }
} }
Ok(())
}
} }
pub fn wait_for_anchors<'a>( pub fn wait_for_anchors<'a>(
&mut self, &mut self,
anchors: impl IntoIterator<Item = &'a Anchor>, anchors: impl IntoIterator<Item = &'a Anchor>,
) -> impl 'static + Future<Output = ()> { ) -> impl 'static + Future<Output = Result<()>> {
let mut futures = Vec::new(); let mut futures = Vec::new();
for anchor in anchors { for anchor in anchors {
if !self.version.observed(anchor.timestamp) if !self.version.observed(anchor.timestamp)
@ -1343,20 +1352,35 @@ impl Buffer {
async move { async move {
for mut future in futures { for mut future in futures {
future.recv().await; if future.recv().await.is_none() {
Err(anyhow!("gave up waiting for anchors"))?;
} }
} }
Ok(())
}
} }
pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = ()> { pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
let (tx, mut rx) = barrier::channel(); let mut rx = None;
if !self.snapshot.version.observed_all(&version) { if !self.snapshot.version.observed_all(&version) {
self.version_barriers.push((version, tx)); let channel = oneshot::channel();
self.wait_for_version_txs.push((version, channel.0));
rx = Some(channel.1);
} }
async move { async move {
rx.recv().await; if let Some(mut rx) = rx {
if rx.recv().await.is_none() {
Err(anyhow!("gave up waiting for version"))?;
} }
} }
Ok(())
}
}
pub fn give_up_waiting(&mut self) {
self.edit_id_resolvers.clear();
self.wait_for_version_txs.clear();
}
fn resolve_edit(&mut self, edit_id: clock::Local) { fn resolve_edit(&mut self, edit_id: clock::Local) {
for mut tx in self for mut tx in self
@ -1365,7 +1389,7 @@ impl Buffer {
.into_iter() .into_iter()
.flatten() .flatten()
{ {
let _ = tx.try_send(()); tx.try_send(()).ok();
} }
} }
} }
@ -1480,12 +1504,11 @@ impl Buffer {
start..end start..end
} }
#[allow(clippy::type_complexity)] pub fn get_random_edits<T>(
pub fn randomly_edit<T>( &self,
&mut self,
rng: &mut T, rng: &mut T,
edit_count: usize, edit_count: usize,
) -> (Vec<(Range<usize>, Arc<str>)>, Operation) ) -> Vec<(Range<usize>, Arc<str>)>
where where
T: rand::Rng, T: rand::Rng,
{ {
@ -1504,8 +1527,21 @@ impl Buffer {
edits.push((range, new_text.into())); edits.push((range, new_text.into()));
} }
edits
}
#[allow(clippy::type_complexity)]
pub fn randomly_edit<T>(
&mut self,
rng: &mut T,
edit_count: usize,
) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
where
T: rand::Rng,
{
let mut edits = self.get_random_edits(rng, edit_count);
log::info!("mutating buffer {} with {:?}", self.replica_id, edits); log::info!("mutating buffer {} with {:?}", self.replica_id, edits);
let op = self.edit(edits.iter().cloned()); let op = self.edit(edits.iter().cloned());
if let Operation::Edit(edit) = &op { if let Operation::Edit(edit) = &op {
assert_eq!(edits.len(), edit.new_text.len()); assert_eq!(edits.len(), edit.new_text.len());

View file

@ -9,13 +9,13 @@ pub struct GitHubLspBinaryVersion {
pub url: String, pub url: String,
} }
#[derive(Deserialize)] #[derive(Deserialize, Debug)]
pub struct GithubRelease { pub struct GithubRelease {
pub name: String, pub name: String,
pub assets: Vec<GithubReleaseAsset>, pub assets: Vec<GithubReleaseAsset>,
} }
#[derive(Deserialize)] #[derive(Deserialize, Debug)]
pub struct GithubReleaseAsset { pub struct GithubReleaseAsset {
pub name: String, pub name: String,
pub browser_download_url: String, pub browser_download_url: String,
@ -40,7 +40,13 @@ pub async fn latest_github_release(
.await .await
.context("error reading latest release")?; .context("error reading latest release")?;
let release: GithubRelease = let release = serde_json::from_slice::<GithubRelease>(body.as_slice());
serde_json::from_slice(body.as_slice()).context("error deserializing latest release")?; if release.is_err() {
Ok(release) log::error!(
"Github API response text: {:?}",
String::from_utf8_lossy(body.as_slice())
);
}
release.context("error deserializing latest release")
} }

View file

@ -417,7 +417,7 @@ impl<T: Item> ItemHandle for ViewHandle<T> {
for item_event in T::to_item_events(event).into_iter() { for item_event in T::to_item_events(event).into_iter() {
match item_event { match item_event {
ItemEvent::CloseItem => { ItemEvent::CloseItem => {
Pane::close_item(workspace, pane, item.id(), cx) Pane::close_item_by_id(workspace, pane, item.id(), cx)
.detach_and_log_err(cx); .detach_and_log_err(cx);
return; return;
} }

View file

@ -23,8 +23,8 @@ use gpui::{
impl_actions, impl_internal_actions, impl_actions, impl_internal_actions,
keymap_matcher::KeymapContext, keymap_matcher::KeymapContext,
platform::{CursorStyle, MouseButton, NavigationDirection, PromptLevel}, platform::{CursorStyle, MouseButton, NavigationDirection, PromptLevel},
Action, AnyViewHandle, AnyWeakViewHandle, AppContext, AsyncAppContext, Entity, EventContext, Action, AnyViewHandle, AnyWeakViewHandle, AppContext, AsyncAppContext, Entity, ModelHandle,
ModelHandle, MouseRegion, Quad, Task, View, ViewContext, ViewHandle, WeakViewHandle, MouseRegion, Quad, Task, View, ViewContext, ViewHandle, WeakViewHandle,
}; };
use project::{Project, ProjectEntryId, ProjectPath}; use project::{Project, ProjectEntryId, ProjectPath};
use serde::Deserialize; use serde::Deserialize;
@ -36,6 +36,24 @@ use util::ResultExt;
#[derive(Clone, Deserialize, PartialEq)] #[derive(Clone, Deserialize, PartialEq)]
pub struct ActivateItem(pub usize); pub struct ActivateItem(pub usize);
#[derive(Clone, PartialEq)]
pub struct CloseItemById {
pub item_id: usize,
pub pane: WeakViewHandle<Pane>,
}
#[derive(Clone, PartialEq)]
pub struct CloseItemsToTheLeftById {
pub item_id: usize,
pub pane: WeakViewHandle<Pane>,
}
#[derive(Clone, PartialEq)]
pub struct CloseItemsToTheRightById {
pub item_id: usize,
pub pane: WeakViewHandle<Pane>,
}
actions!( actions!(
pane, pane,
[ [
@ -56,12 +74,6 @@ actions!(
] ]
); );
#[derive(Clone, PartialEq)]
pub struct CloseItem {
pub item_id: usize,
pub pane: WeakViewHandle<Pane>,
}
#[derive(Clone, PartialEq)] #[derive(Clone, PartialEq)]
pub struct MoveItem { pub struct MoveItem {
pub item_id: usize, pub item_id: usize,
@ -91,11 +103,21 @@ pub struct DeployDockMenu;
#[derive(Clone, PartialEq)] #[derive(Clone, PartialEq)]
pub struct DeployNewMenu; pub struct DeployNewMenu;
#[derive(Clone, PartialEq)]
pub struct DeployTabContextMenu {
pub position: Vector2F,
pub item_id: usize,
pub pane: WeakViewHandle<Pane>,
}
impl_actions!(pane, [GoBack, GoForward, ActivateItem]); impl_actions!(pane, [GoBack, GoForward, ActivateItem]);
impl_internal_actions!( impl_internal_actions!(
pane, pane,
[ [
CloseItem, CloseItemById,
CloseItemsToTheLeftById,
CloseItemsToTheRightById,
DeployTabContextMenu,
DeploySplitMenu, DeploySplitMenu,
DeployNewMenu, DeployNewMenu,
DeployDockMenu, DeployDockMenu,
@ -126,14 +148,34 @@ pub fn init(cx: &mut AppContext) {
cx.add_async_action(Pane::close_items_to_the_left); cx.add_async_action(Pane::close_items_to_the_left);
cx.add_async_action(Pane::close_items_to_the_right); cx.add_async_action(Pane::close_items_to_the_right);
cx.add_async_action(Pane::close_all_items); cx.add_async_action(Pane::close_all_items);
cx.add_async_action(|workspace: &mut Workspace, action: &CloseItem, cx| { cx.add_async_action(|workspace: &mut Workspace, action: &CloseItemById, cx| {
let pane = action.pane.upgrade(cx)?; let pane = action.pane.upgrade(cx)?;
let task = Pane::close_item(workspace, pane, action.item_id, cx); let task = Pane::close_item_by_id(workspace, pane, action.item_id, cx);
Some(cx.foreground().spawn(async move { Some(cx.foreground().spawn(async move {
task.await?; task.await?;
Ok(()) Ok(())
})) }))
}); });
cx.add_async_action(
|workspace: &mut Workspace, action: &CloseItemsToTheLeftById, cx| {
let pane = action.pane.upgrade(cx)?;
let task = Pane::close_items_to_the_left_by_id(workspace, pane, action.item_id, cx);
Some(cx.foreground().spawn(async move {
task.await?;
Ok(())
}))
},
);
cx.add_async_action(
|workspace: &mut Workspace, action: &CloseItemsToTheRightById, cx| {
let pane = action.pane.upgrade(cx)?;
let task = Pane::close_items_to_the_right_by_id(workspace, pane, action.item_id, cx);
Some(cx.foreground().spawn(async move {
task.await?;
Ok(())
}))
},
);
cx.add_action( cx.add_action(
|workspace, |workspace,
MoveItem { MoveItem {
@ -167,6 +209,7 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(Pane::deploy_split_menu); cx.add_action(Pane::deploy_split_menu);
cx.add_action(Pane::deploy_dock_menu); cx.add_action(Pane::deploy_dock_menu);
cx.add_action(Pane::deploy_new_menu); cx.add_action(Pane::deploy_new_menu);
cx.add_action(Pane::deploy_tab_context_menu);
cx.add_action(|workspace: &mut Workspace, _: &ReopenClosedItem, cx| { cx.add_action(|workspace: &mut Workspace, _: &ReopenClosedItem, cx| {
Pane::reopen_closed_item(workspace, cx).detach(); Pane::reopen_closed_item(workspace, cx).detach();
}); });
@ -213,6 +256,7 @@ pub struct Pane {
nav_history: Rc<RefCell<NavHistory>>, nav_history: Rc<RefCell<NavHistory>>,
toolbar: ViewHandle<Toolbar>, toolbar: ViewHandle<Toolbar>,
tab_bar_context_menu: TabBarContextMenu, tab_bar_context_menu: TabBarContextMenu,
tab_context_menu: ViewHandle<ContextMenu>,
docked: Option<DockAnchor>, docked: Option<DockAnchor>,
_background_actions: BackgroundActions, _background_actions: BackgroundActions,
_workspace_id: usize, _workspace_id: usize,
@ -318,6 +362,7 @@ impl Pane {
kind: TabBarContextMenuKind::New, kind: TabBarContextMenuKind::New,
handle: context_menu, handle: context_menu,
}, },
tab_context_menu: cx.add_view(ContextMenu::new),
docked, docked,
_background_actions: background_actions, _background_actions: background_actions,
_workspace_id: workspace_id, _workspace_id: workspace_id,
@ -741,9 +786,7 @@ impl Pane {
let pane = pane_handle.read(cx); let pane = pane_handle.read(cx);
let active_item_id = pane.items[pane.active_item_index].id(); let active_item_id = pane.items[pane.active_item_index].id();
let task = Self::close_items(workspace, pane_handle, cx, move |item_id| { let task = Self::close_item_by_id(workspace, pane_handle, active_item_id, cx);
item_id == active_item_id
});
Some(cx.foreground().spawn(async move { Some(cx.foreground().spawn(async move {
task.await?; task.await?;
@ -751,6 +794,17 @@ impl Pane {
})) }))
} }
pub fn close_item_by_id(
workspace: &mut Workspace,
pane: ViewHandle<Pane>,
item_id_to_close: usize,
cx: &mut ViewContext<Workspace>,
) -> Task<Result<()>> {
Self::close_items(workspace, pane, cx, move |view_id| {
view_id == item_id_to_close
})
}
pub fn close_inactive_items( pub fn close_inactive_items(
workspace: &mut Workspace, workspace: &mut Workspace,
_: &CloseInactiveItems, _: &CloseInactiveItems,
@ -803,15 +857,7 @@ impl Pane {
let pane = pane_handle.read(cx); let pane = pane_handle.read(cx);
let active_item_id = pane.items[pane.active_item_index].id(); let active_item_id = pane.items[pane.active_item_index].id();
let item_ids: Vec<_> = pane let task = Self::close_items_to_the_left_by_id(workspace, pane_handle, active_item_id, cx);
.items()
.take_while(|item| item.id() != active_item_id)
.map(|item| item.id())
.collect();
let task = Self::close_items(workspace, pane_handle, cx, move |item_id| {
item_ids.contains(&item_id)
});
Some(cx.foreground().spawn(async move { Some(cx.foreground().spawn(async move {
task.await?; task.await?;
@ -819,6 +865,29 @@ impl Pane {
})) }))
} }
pub fn close_items_to_the_left_by_id(
workspace: &mut Workspace,
pane: ViewHandle<Pane>,
item_id: usize,
cx: &mut ViewContext<Workspace>,
) -> Task<Result<()>> {
let item_ids: Vec<_> = pane
.read(cx)
.items()
.take_while(|item| item.id() != item_id)
.map(|item| item.id())
.collect();
let task = Self::close_items(workspace, pane, cx, move |item_id| {
item_ids.contains(&item_id)
});
cx.foreground().spawn(async move {
task.await?;
Ok(())
})
}
pub fn close_items_to_the_right( pub fn close_items_to_the_right(
workspace: &mut Workspace, workspace: &mut Workspace,
_: &CloseItemsToTheRight, _: &CloseItemsToTheRight,
@ -828,16 +897,7 @@ impl Pane {
let pane = pane_handle.read(cx); let pane = pane_handle.read(cx);
let active_item_id = pane.items[pane.active_item_index].id(); let active_item_id = pane.items[pane.active_item_index].id();
let item_ids: Vec<_> = pane let task = Self::close_items_to_the_right_by_id(workspace, pane_handle, active_item_id, cx);
.items()
.rev()
.take_while(|item| item.id() != active_item_id)
.map(|item| item.id())
.collect();
let task = Self::close_items(workspace, pane_handle, cx, move |item_id| {
item_ids.contains(&item_id)
});
Some(cx.foreground().spawn(async move { Some(cx.foreground().spawn(async move {
task.await?; task.await?;
@ -845,6 +905,30 @@ impl Pane {
})) }))
} }
pub fn close_items_to_the_right_by_id(
workspace: &mut Workspace,
pane: ViewHandle<Pane>,
item_id: usize,
cx: &mut ViewContext<Workspace>,
) -> Task<Result<()>> {
let item_ids: Vec<_> = pane
.read(cx)
.items()
.rev()
.take_while(|item| item.id() != item_id)
.map(|item| item.id())
.collect();
let task = Self::close_items(workspace, pane, cx, move |item_id| {
item_ids.contains(&item_id)
});
cx.foreground().spawn(async move {
task.await?;
Ok(())
})
}
pub fn close_all_items( pub fn close_all_items(
workspace: &mut Workspace, workspace: &mut Workspace,
_: &CloseAllItems, _: &CloseAllItems,
@ -860,17 +944,6 @@ impl Pane {
})) }))
} }
pub fn close_item(
workspace: &mut Workspace,
pane: ViewHandle<Pane>,
item_id_to_close: usize,
cx: &mut ViewContext<Workspace>,
) -> Task<Result<()>> {
Self::close_items(workspace, pane, cx, move |view_id| {
view_id == item_id_to_close
})
}
pub fn close_items( pub fn close_items(
workspace: &mut Workspace, workspace: &mut Workspace,
pane: ViewHandle<Pane>, pane: ViewHandle<Pane>,
@ -1206,6 +1279,65 @@ impl Pane {
self.tab_bar_context_menu.kind = TabBarContextMenuKind::New; self.tab_bar_context_menu.kind = TabBarContextMenuKind::New;
} }
fn deploy_tab_context_menu(
&mut self,
action: &DeployTabContextMenu,
cx: &mut ViewContext<Self>,
) {
let target_item_id = action.item_id;
let target_pane = action.pane.clone();
let active_item_id = self.items[self.active_item_index].id();
let is_active_item = target_item_id == active_item_id;
// The `CloseInactiveItems` action should really be called "CloseOthers" and the behaviour should be dynamically based on the tab the action is ran on. Currenlty, this is a weird action because you can run it on a non-active tab and it will close everything by the actual active tab
self.tab_context_menu.update(cx, |menu, cx| {
menu.show(
action.position,
AnchorCorner::TopLeft,
if is_active_item {
vec![
ContextMenuItem::item("Close Active Item", CloseActiveItem),
ContextMenuItem::item("Close Inactive Items", CloseInactiveItems),
ContextMenuItem::item("Close Clean Items", CloseCleanItems),
ContextMenuItem::item("Close Items To The Left", CloseItemsToTheLeft),
ContextMenuItem::item("Close Items To The Right", CloseItemsToTheRight),
ContextMenuItem::item("Close All Items", CloseAllItems),
]
} else {
// In the case of the user right clicking on a non-active tab, for some item-closing commands, we need to provide the id of the tab, for the others, we can reuse the existing command.
vec![
ContextMenuItem::item(
"Close Inactive Item",
CloseItemById {
item_id: target_item_id,
pane: target_pane.clone(),
},
),
ContextMenuItem::item("Close Inactive Items", CloseInactiveItems),
ContextMenuItem::item("Close Clean Items", CloseCleanItems),
ContextMenuItem::item(
"Close Items To The Left",
CloseItemsToTheLeftById {
item_id: target_item_id,
pane: target_pane.clone(),
},
),
ContextMenuItem::item(
"Close Items To The Right",
CloseItemsToTheRightById {
item_id: target_item_id,
pane: target_pane.clone(),
},
),
ContextMenuItem::item("Close All Items", CloseAllItems),
]
},
cx,
);
});
}
pub fn toolbar(&self) -> &ViewHandle<Toolbar> { pub fn toolbar(&self) -> &ViewHandle<Toolbar> {
&self.toolbar &self.toolbar
} }
@ -1276,13 +1408,22 @@ impl Pane {
}) })
.on_click(MouseButton::Middle, { .on_click(MouseButton::Middle, {
let item = item.clone(); let item = item.clone();
move |_, _, cx: &mut EventContext<Self>| { let pane = pane.clone();
cx.dispatch_action(CloseItem { move |_, _, cx| {
cx.dispatch_action(CloseItemById {
item_id: item.id(), item_id: item.id(),
pane: pane.clone(), pane: pane.clone(),
}) })
} }
}) })
.on_down(MouseButton::Right, move |e, _, cx| {
let item = item.clone();
cx.dispatch_action(DeployTabContextMenu {
position: e.position,
item_id: item.id(),
pane: pane.clone(),
});
})
.boxed() .boxed()
} }
}); });
@ -1457,7 +1598,7 @@ impl Pane {
.on_click(MouseButton::Left, { .on_click(MouseButton::Left, {
let pane = pane.clone(); let pane = pane.clone();
move |_, _, cx| { move |_, _, cx| {
cx.dispatch_action(CloseItem { cx.dispatch_action(CloseItemById {
item_id, item_id,
pane: pane.clone(), pane: pane.clone(),
}) })
@ -1532,11 +1673,7 @@ impl Pane {
.boxed() .boxed()
} }
fn render_blank_pane( fn render_blank_pane(&mut self, theme: &Theme, _cx: &mut ViewContext<Self>) -> Element<Self> {
&mut self,
theme: &Theme,
_cx: &mut ViewContext<Self>,
) -> Element<Self> {
let background = theme.workspace.background; let background = theme.workspace.background;
Empty::new() Empty::new()
.contained() .contained()
@ -1635,6 +1772,7 @@ impl View for Pane {
.flex(1., true) .flex(1., true)
.boxed() .boxed()
}) })
.with_child(ChildView::new(&self.tab_context_menu, cx).boxed())
.boxed() .boxed()
} else { } else {
enum EmptyPane {} enum EmptyPane {}
@ -2237,14 +2375,14 @@ mod tests {
let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
add_labled_item(&workspace, &pane, "A", cx); add_labeled_item(&workspace, &pane, "A", false, cx);
add_labled_item(&workspace, &pane, "B", cx); add_labeled_item(&workspace, &pane, "B", false, cx);
add_labled_item(&workspace, &pane, "C", cx); add_labeled_item(&workspace, &pane, "C", false, cx);
add_labled_item(&workspace, &pane, "D", cx); add_labeled_item(&workspace, &pane, "D", false, cx);
assert_item_labels(&pane, ["A", "B", "C", "D*"], cx); assert_item_labels(&pane, ["A", "B", "C", "D*"], cx);
pane.update(cx, |pane, cx| pane.activate_item(1, false, false, cx)); pane.update(cx, |pane, cx| pane.activate_item(1, false, false, cx));
add_labled_item(&workspace, &pane, "1", cx); add_labeled_item(&workspace, &pane, "1", false, cx);
assert_item_labels(&pane, ["A", "B", "1*", "C", "D"], cx); assert_item_labels(&pane, ["A", "B", "1*", "C", "D"], cx);
workspace.update(cx, |workspace, cx| { workspace.update(cx, |workspace, cx| {
@ -2275,14 +2413,125 @@ mod tests {
assert_item_labels(&pane, ["A*"], cx); assert_item_labels(&pane, ["A*"], cx);
} }
fn add_labled_item( #[gpui::test]
async fn test_close_inactive_items(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
Settings::test_async(cx);
let fs = FakeFs::new(cx.background());
let project = Project::test(fs, None, cx).await;
let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
set_labeled_items(&workspace, &pane, ["A", "B", "C*", "D", "E"], cx);
workspace.update(cx, |workspace, cx| {
Pane::close_inactive_items(workspace, &CloseInactiveItems, cx);
});
deterministic.run_until_parked();
assert_item_labels(&pane, ["C*"], cx);
}
#[gpui::test]
async fn test_close_clean_items(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
Settings::test_async(cx);
let fs = FakeFs::new(cx.background());
let project = Project::test(fs, None, cx).await;
let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
add_labeled_item(&workspace, &pane, "A", true, cx);
add_labeled_item(&workspace, &pane, "B", false, cx);
add_labeled_item(&workspace, &pane, "C", true, cx);
add_labeled_item(&workspace, &pane, "D", false, cx);
add_labeled_item(&workspace, &pane, "E", false, cx);
assert_item_labels(&pane, ["A^", "B", "C^", "D", "E*"], cx);
workspace.update(cx, |workspace, cx| {
Pane::close_clean_items(workspace, &CloseCleanItems, cx);
});
deterministic.run_until_parked();
assert_item_labels(&pane, ["A^", "C*^"], cx);
}
#[gpui::test]
async fn test_close_items_to_the_left(
deterministic: Arc<Deterministic>,
cx: &mut TestAppContext,
) {
Settings::test_async(cx);
let fs = FakeFs::new(cx.background());
let project = Project::test(fs, None, cx).await;
let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
set_labeled_items(&workspace, &pane, ["A", "B", "C*", "D", "E"], cx);
workspace.update(cx, |workspace, cx| {
Pane::close_items_to_the_left(workspace, &CloseItemsToTheLeft, cx);
});
deterministic.run_until_parked();
assert_item_labels(&pane, ["C*", "D", "E"], cx);
}
#[gpui::test]
async fn test_close_items_to_the_right(
deterministic: Arc<Deterministic>,
cx: &mut TestAppContext,
) {
Settings::test_async(cx);
let fs = FakeFs::new(cx.background());
let project = Project::test(fs, None, cx).await;
let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
set_labeled_items(&workspace, &pane, ["A", "B", "C*", "D", "E"], cx);
workspace.update(cx, |workspace, cx| {
Pane::close_items_to_the_right(workspace, &CloseItemsToTheRight, cx);
});
deterministic.run_until_parked();
assert_item_labels(&pane, ["A", "B", "C*"], cx);
}
#[gpui::test]
async fn test_close_all_items(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
Settings::test_async(cx);
let fs = FakeFs::new(cx.background());
let project = Project::test(fs, None, cx).await;
let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
add_labeled_item(&workspace, &pane, "A", false, cx);
add_labeled_item(&workspace, &pane, "B", false, cx);
add_labeled_item(&workspace, &pane, "C", false, cx);
assert_item_labels(&pane, ["A", "B", "C*"], cx);
workspace.update(cx, |workspace, cx| {
Pane::close_all_items(workspace, &CloseAllItems, cx);
});
deterministic.run_until_parked();
assert_item_labels(&pane, [], cx);
}
fn add_labeled_item(
workspace: &ViewHandle<Workspace>, workspace: &ViewHandle<Workspace>,
pane: &ViewHandle<Pane>, pane: &ViewHandle<Pane>,
label: &str, label: &str,
is_dirty: bool,
cx: &mut TestAppContext, cx: &mut TestAppContext,
) -> Box<ViewHandle<TestItem>> { ) -> Box<ViewHandle<TestItem>> {
workspace.update(cx, |workspace, cx| { workspace.update(cx, |workspace, cx| {
let labeled_item = Box::new(cx.add_view(|_| TestItem::new().with_label(label))); let labeled_item =
Box::new(cx.add_view(|_| TestItem::new().with_label(label).with_dirty(is_dirty)));
Pane::add_item( Pane::add_item(
workspace, workspace,
@ -2362,6 +2611,9 @@ mod tests {
if ix == pane.active_item_index { if ix == pane.active_item_index {
state.push('*'); state.push('*');
} }
if item.is_dirty(cx) {
state.push('^');
}
state state
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();

View file

@ -1304,10 +1304,8 @@ impl Workspace {
RemoveWorktreeFromProject(worktree_id): &RemoveWorktreeFromProject, RemoveWorktreeFromProject(worktree_id): &RemoveWorktreeFromProject,
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) { ) {
let future = self self.project
.project
.update(cx, |project, cx| project.remove_worktree(*worktree_id, cx)); .update(cx, |project, cx| project.remove_worktree(*worktree_id, cx));
cx.foreground().spawn(future).detach();
} }
fn project_path_for_path( fn project_path_for_path(
@ -3274,9 +3272,7 @@ mod tests {
); );
// Remove a project folder // Remove a project folder
project project.update(cx, |project, cx| project.remove_worktree(worktree_id, cx));
.update(cx, |project, cx| project.remove_worktree(worktree_id, cx))
.await;
assert_eq!( assert_eq!(
cx.current_window_title(window_id).as_deref(), cx.current_window_title(window_id).as_deref(),
Some("one.txt — root2") Some("one.txt — root2")

View file

@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
description = "The fast, collaborative code editor." description = "The fast, collaborative code editor."
edition = "2021" edition = "2021"
name = "zed" name = "zed"
version = "0.82.0" version = "0.83.0"
publish = false publish = false
[lib] [lib]

View file

@ -336,7 +336,7 @@ fn init_panic_hook(app_version: String) {
let message = match info.location() { let message = match info.location() {
Some(location) => { Some(location) => {
format!( format!(
"thread '{}' panicked at '{}': {}:{}\n{:?}", "thread '{}' panicked at '{}'\n{}:{}\n{:?}",
thread, thread,
payload, payload,
location.file(), location.file(),

View file

@ -1541,7 +1541,7 @@ mod tests {
.update(cx, |workspace, cx| { .update(cx, |workspace, cx| {
let editor3_id = editor3.id(); let editor3_id = editor3.id();
drop(editor3); drop(editor3);
Pane::close_item(workspace, workspace.active_pane().clone(), editor3_id, cx) Pane::close_item_by_id(workspace, workspace.active_pane().clone(), editor3_id, cx)
}) })
.await .await
.unwrap(); .unwrap();
@ -1574,7 +1574,7 @@ mod tests {
.update(cx, |workspace, cx| { .update(cx, |workspace, cx| {
let editor2_id = editor2.id(); let editor2_id = editor2.id();
drop(editor2); drop(editor2);
Pane::close_item(workspace, workspace.active_pane().clone(), editor2_id, cx) Pane::close_item_by_id(workspace, workspace.active_pane().clone(), editor2_id, cx)
}) })
.await .await
.unwrap(); .unwrap();
@ -1724,7 +1724,7 @@ mod tests {
// Close all the pane items in some arbitrary order. // Close all the pane items in some arbitrary order.
workspace workspace
.update(cx, |workspace, cx| { .update(cx, |workspace, cx| {
Pane::close_item(workspace, pane.clone(), file1_item_id, cx) Pane::close_item_by_id(workspace, pane.clone(), file1_item_id, cx)
}) })
.await .await
.unwrap(); .unwrap();
@ -1732,7 +1732,7 @@ mod tests {
workspace workspace
.update(cx, |workspace, cx| { .update(cx, |workspace, cx| {
Pane::close_item(workspace, pane.clone(), file4_item_id, cx) Pane::close_item_by_id(workspace, pane.clone(), file4_item_id, cx)
}) })
.await .await
.unwrap(); .unwrap();
@ -1740,7 +1740,7 @@ mod tests {
workspace workspace
.update(cx, |workspace, cx| { .update(cx, |workspace, cx| {
Pane::close_item(workspace, pane.clone(), file2_item_id, cx) Pane::close_item_by_id(workspace, pane.clone(), file2_item_id, cx)
}) })
.await .await
.unwrap(); .unwrap();
@ -1748,7 +1748,7 @@ mod tests {
workspace workspace
.update(cx, |workspace, cx| { .update(cx, |workspace, cx| {
Pane::close_item(workspace, pane.clone(), file3_item_id, cx) Pane::close_item_by_id(workspace, pane.clone(), file3_item_id, cx)
}) })
.await .await
.unwrap(); .unwrap();

View file

@ -1,2 +1,3 @@
package-lock.json package-lock.json
package.json package.json
target

View file

@ -11,6 +11,7 @@
"dependencies": { "dependencies": {
"@types/chroma-js": "^2.4.0", "@types/chroma-js": "^2.4.0",
"@types/node": "^18.14.1", "@types/node": "^18.14.1",
"ayu": "^8.0.1",
"bezier-easing": "^2.1.0", "bezier-easing": "^2.1.0",
"case-anything": "^2.1.10", "case-anything": "^2.1.10",
"chroma-js": "^2.4.2", "chroma-js": "^2.4.2",
@ -106,6 +107,16 @@
"resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz",
"integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==" "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA=="
}, },
"node_modules/ayu": {
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/ayu/-/ayu-8.0.1.tgz",
"integrity": "sha512-yuPZ2kZYQoYaPRQ/78F9rXDVx1rVGCJ1neBYithBoSprD6zPdIJdAKizUXG0jtTBu7nTFyAnVFFYuLnCS3cpDw==",
"dependencies": {
"@types/chroma-js": "^2.0.0",
"chroma-js": "^2.1.0",
"nonenumerable": "^1.1.1"
}
},
"node_modules/bezier-easing": { "node_modules/bezier-easing": {
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/bezier-easing/-/bezier-easing-2.1.0.tgz", "resolved": "https://registry.npmjs.org/bezier-easing/-/bezier-easing-2.1.0.tgz",
@ -153,6 +164,11 @@
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==" "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw=="
}, },
"node_modules/nonenumerable": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/nonenumerable/-/nonenumerable-1.1.1.tgz",
"integrity": "sha512-ptUD9w9D8WqW6fuJJkZNCImkf+0vdbgUTbRK3i7jsy3olqtH96hYE6Q/S3Tx9NWbcB/ocAjYshXCAUP0lZ9B4Q=="
},
"node_modules/toml": { "node_modules/toml": {
"version": "3.0.0", "version": "3.0.0",
"resolved": "https://registry.npmjs.org/toml/-/toml-3.0.0.tgz", "resolved": "https://registry.npmjs.org/toml/-/toml-3.0.0.tgz",
@ -300,6 +316,16 @@
"resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz",
"integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==" "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA=="
}, },
"ayu": {
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/ayu/-/ayu-8.0.1.tgz",
"integrity": "sha512-yuPZ2kZYQoYaPRQ/78F9rXDVx1rVGCJ1neBYithBoSprD6zPdIJdAKizUXG0jtTBu7nTFyAnVFFYuLnCS3cpDw==",
"requires": {
"@types/chroma-js": "^2.0.0",
"chroma-js": "^2.1.0",
"nonenumerable": "^1.1.1"
}
},
"bezier-easing": { "bezier-easing": {
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/bezier-easing/-/bezier-easing-2.1.0.tgz", "resolved": "https://registry.npmjs.org/bezier-easing/-/bezier-easing-2.1.0.tgz",
@ -335,6 +361,11 @@
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==" "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw=="
}, },
"nonenumerable": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/nonenumerable/-/nonenumerable-1.1.1.tgz",
"integrity": "sha512-ptUD9w9D8WqW6fuJJkZNCImkf+0vdbgUTbRK3i7jsy3olqtH96hYE6Q/S3Tx9NWbcB/ocAjYshXCAUP0lZ9B4Q=="
},
"toml": { "toml": {
"version": "3.0.0", "version": "3.0.0",
"resolved": "https://registry.npmjs.org/toml/-/toml-3.0.0.tgz", "resolved": "https://registry.npmjs.org/toml/-/toml-3.0.0.tgz",

View file

@ -12,6 +12,7 @@
"dependencies": { "dependencies": {
"@types/chroma-js": "^2.4.0", "@types/chroma-js": "^2.4.0",
"@types/node": "^18.14.1", "@types/node": "^18.14.1",
"ayu": "^8.0.1",
"bezier-easing": "^2.1.0", "bezier-easing": "^2.1.0",
"case-anything": "^2.1.10", "case-anything": "^2.1.10",
"chroma-js": "^2.4.2", "chroma-js": "^2.4.2",

View file

@ -281,14 +281,18 @@ export function border(
} }
} }
export function svg(
export function svg(color: string, asset: String, width: Number, height: Number) { color: string,
asset: String,
width: Number,
height: Number
) {
return { return {
color, color,
asset, asset,
dimensions: { dimensions: {
width, width,
height, height,
} },
} }
} }

View file

@ -1,13 +1,13 @@
import { ColorScheme } from "../themes/common/colorScheme" import { ColorScheme } from "../themes/common/colorScheme"
import { background, border, foreground, svg, text } from "./components"; import { background, border, foreground, svg, text } from "./components"
export default function copilot(colorScheme: ColorScheme) { export default function copilot(colorScheme: ColorScheme) {
let layer = colorScheme.middle; let layer = colorScheme.middle
let content_width = 264; let content_width = 264
let ctaButton = { // Copied from welcome screen. FIXME: Move this into a ZDS component let ctaButton = {
// Copied from welcome screen. FIXME: Move this into a ZDS component
background: background(layer), background: background(layer),
border: border(layer, "default"), border: border(layer, "default"),
cornerRadius: 4, cornerRadius: 4,
@ -15,7 +15,7 @@ export default function copilot(colorScheme: ColorScheme) {
top: 4, top: 4,
bottom: 4, bottom: 4,
left: 8, left: 8,
right: 8 right: 8,
}, },
padding: { padding: {
top: 3, top: 3,
@ -29,22 +29,32 @@ export default function copilot(colorScheme: ColorScheme) {
background: background(layer, "hovered"), background: background(layer, "hovered"),
border: border(layer, "active"), border: border(layer, "active"),
}, },
}; }
return { return {
outLinkIcon: { outLinkIcon: {
icon: svg(foreground(layer, "variant"), "icons/link_out_12.svg", 12, 12), icon: svg(
foreground(layer, "variant"),
"icons/link_out_12.svg",
12,
12
),
container: { container: {
cornerRadius: 6, cornerRadius: 6,
padding: { left: 6 }, padding: { left: 6 },
}, },
hover: { hover: {
icon: svg(foreground(layer, "hovered"), "icons/link_out_12.svg", 12, 12) icon: svg(
foreground(layer, "hovered"),
"icons/link_out_12.svg",
12,
12
),
}, },
}, },
modal: { modal: {
titleText: { titleText: {
...text(layer, "sans", { size: "xs", "weight": "bold" }) ...text(layer, "sans", { size: "xs", weight: "bold" }),
}, },
titlebar: { titlebar: {
background: background(colorScheme.lowest), background: background(colorScheme.lowest),
@ -54,7 +64,7 @@ export default function copilot(colorScheme: ColorScheme) {
bottom: 4, bottom: 4,
left: 8, left: 8,
right: 8, right: 8,
} },
}, },
container: { container: {
background: background(colorScheme.lowest), background: background(colorScheme.lowest),
@ -63,10 +73,15 @@ export default function copilot(colorScheme: ColorScheme) {
left: 0, left: 0,
right: 0, right: 0,
bottom: 8, bottom: 8,
} },
}, },
closeIcon: { closeIcon: {
icon: svg(foreground(layer, "variant"), "icons/x_mark_8.svg", 8, 8), icon: svg(
foreground(layer, "variant"),
"icons/x_mark_8.svg",
8,
8
),
container: { container: {
cornerRadius: 2, cornerRadius: 2,
padding: { padding: {
@ -76,15 +91,25 @@ export default function copilot(colorScheme: ColorScheme) {
right: 4, right: 4,
}, },
margin: { margin: {
right: 0 right: 0,
} },
}, },
hover: { hover: {
icon: svg(foreground(layer, "on"), "icons/x_mark_8.svg", 8, 8), icon: svg(
foreground(layer, "on"),
"icons/x_mark_8.svg",
8,
8
),
}, },
clicked: { clicked: {
icon: svg(foreground(layer, "base"), "icons/x_mark_8.svg", 8, 8), icon: svg(
} foreground(layer, "base"),
"icons/x_mark_8.svg",
8,
8
),
},
}, },
dimensions: { dimensions: {
width: 280, width: 280,
@ -98,14 +123,19 @@ export default function copilot(colorScheme: ColorScheme) {
ctaButton, ctaButton,
header: { header: {
icon: svg(foreground(layer, "default"), "icons/zed_plus_copilot_32.svg", 92, 32), icon: svg(
foreground(layer, "default"),
"icons/zed_plus_copilot_32.svg",
92,
32
),
container: { container: {
margin: { margin: {
top: 35, top: 35,
bottom: 5, bottom: 5,
left: 0, left: 0,
right: 0 right: 0,
} },
}, },
}, },
@ -116,21 +146,20 @@ export default function copilot(colorScheme: ColorScheme) {
top: 6, top: 6,
bottom: 12, bottom: 12,
left: 0, left: 0,
right: 0 right: 0,
} },
}, },
hint: { hint: {
...text(layer, "sans", { size: "xs", color: "#838994" }), ...text(layer, "sans", { size: "xs", color: "#838994" }),
margin: { margin: {
top: 6, top: 6,
bottom: 2 bottom: 2,
} },
}, },
deviceCode: { deviceCode: {
text: text: text(layer, "mono", { size: "sm" }),
text(layer, "mono", { size: "sm" }),
cta: { cta: {
...ctaButton, ...ctaButton,
background: background(colorScheme.lowest), background: background(colorScheme.lowest),
@ -144,7 +173,7 @@ export default function copilot(colorScheme: ColorScheme) {
margin: { margin: {
left: 16, left: 16,
right: 16, right: 16,
} },
}, },
left: content_width / 2, left: content_width / 2,
leftContainer: { leftContainer: {
@ -155,9 +184,14 @@ export default function copilot(colorScheme: ColorScheme) {
right: 6, right: 6,
}, },
}, },
right: content_width * 1 / 3, right: (content_width * 1) / 3,
rightContainer: { rightContainer: {
border: border(colorScheme.lowest, "inverted", { bottom: false, right: false, top: false, left: true }), border: border(colorScheme.lowest, "inverted", {
bottom: false,
right: false,
top: false,
left: true,
}),
padding: { padding: {
top: 3, top: 3,
bottom: 5, bottom: 5,
@ -165,9 +199,14 @@ export default function copilot(colorScheme: ColorScheme) {
right: 0, right: 0,
}, },
hover: { hover: {
border: border(layer, "active", { bottom: false, right: false, top: false, left: true }), border: border(layer, "active", {
bottom: false,
right: false,
top: false,
left: true,
}),
},
}, },
}
}, },
}, },
@ -179,12 +218,15 @@ export default function copilot(colorScheme: ColorScheme) {
top: 16, top: 16,
bottom: 16, bottom: 16,
left: 0, left: 0,
right: 0 right: 0,
} },
}, },
warning: { warning: {
...text(layer, "sans", { size: "xs", color: foreground(layer, "warning") }), ...text(layer, "sans", {
size: "xs",
color: foreground(layer, "warning"),
}),
border: border(layer, "warning"), border: border(layer, "warning"),
background: background(layer, "warning"), background: background(layer, "warning"),
cornerRadius: 2, cornerRadius: 2,
@ -197,8 +239,8 @@ export default function copilot(colorScheme: ColorScheme) {
margin: { margin: {
bottom: 16, bottom: 16,
left: 8, left: 8,
right: 8 right: 8,
} },
}, },
}, },
@ -208,19 +250,18 @@ export default function copilot(colorScheme: ColorScheme) {
margin: { margin: {
top: 16, top: 16,
bottom: 16 bottom: 16,
} },
}, },
hint: { hint: {
...text(layer, "sans", { size: "xs", color: "#838994" }), ...text(layer, "sans", { size: "xs", color: "#838994" }),
margin: { margin: {
top: 24, top: 24,
bottom: 4 bottom: 4,
} },
},
}, },
}, },
} }
} }
}

View file

@ -23,7 +23,6 @@ export default function simpleMessageNotification(
right: 7, right: 7,
}, },
margin: { left: headerPadding, top: 6, bottom: 6 }, margin: { left: headerPadding, top: 6, bottom: 6 },
hover: { hover: {
...text(layer, "sans", "default", { size: "xs" }), ...text(layer, "sans", "default", { size: "xs" }),

View file

@ -1,6 +1,13 @@
import { ColorScheme } from "../themes/common/colorScheme" import { ColorScheme } from "../themes/common/colorScheme"
import { withOpacity } from "../utils/color" import { withOpacity } from "../utils/color"
import { background, border, borderColor, foreground, svg, text } from "./components" import {
background,
border,
borderColor,
foreground,
svg,
text,
} from "./components"
import statusBar from "./statusBar" import statusBar from "./statusBar"
import tabBar from "./tabBar" import tabBar from "./tabBar"
@ -46,14 +53,24 @@ export default function workspace(colorScheme: ColorScheme) {
width: 256, width: 256,
height: 256, height: 256,
}, },
logo: svg(withOpacity("#000000", colorScheme.isLight ? 0.6 : 0.8), "icons/logo_96.svg", 256, 256), logo: svg(
withOpacity("#000000", colorScheme.isLight ? 0.6 : 0.8),
"icons/logo_96.svg",
256,
256
),
logoShadow: svg(withOpacity( logoShadow: svg(
withOpacity(
colorScheme.isLight colorScheme.isLight
? "#FFFFFF" ? "#FFFFFF"
: colorScheme.lowest.base.default.background, : colorScheme.lowest.base.default.background,
colorScheme.isLight ? 1 : 0.6 colorScheme.isLight ? 1 : 0.6
), "icons/logo_96.svg", 256, 256), ),
"icons/logo_96.svg",
256,
256
),
keyboardHints: { keyboardHints: {
margin: { margin: {
top: 96, top: 96,
@ -273,11 +290,7 @@ export default function workspace(colorScheme: ColorScheme) {
}, },
hover: { hover: {
color: foreground(colorScheme.highest, "on", "hovered"), color: foreground(colorScheme.highest, "on", "hovered"),
background: background( background: background(colorScheme.highest, "on", "hovered"),
colorScheme.highest,
"on",
"hovered"
),
}, },
}, },
disconnectedOverlay: { disconnectedOverlay: {

View file

@ -0,0 +1,17 @@
import { createColorScheme } from "./common/ramps"
import { ayu, meta as themeMeta, buildTheme } from "./common/ayu-common"
export const meta = {
...themeMeta,
name: `${themeMeta.name} Dark`
}
const variant = ayu.dark
const theme = buildTheme(variant, false)
export const dark = createColorScheme(
meta.name,
false,
theme.ramps,
theme.syntax
)

View file

@ -0,0 +1,17 @@
import { createColorScheme } from "./common/ramps"
import { ayu, meta as themeMeta, buildTheme } from "./common/ayu-common"
export const meta = {
...themeMeta,
name: `${themeMeta.name} Light`
}
const variant = ayu.light
const theme = buildTheme(variant, true)
export const light = createColorScheme(
meta.name,
true,
theme.ramps,
theme.syntax
)

View file

@ -0,0 +1,17 @@
import { createColorScheme } from "./common/ramps"
import { ayu, meta as themeMeta, buildTheme } from "./common/ayu-common"
export const meta = {
...themeMeta,
name: `${themeMeta.name} Mirage`
}
const variant = ayu.mirage
const theme = buildTheme(variant, false)
export const dark = createColorScheme(
meta.name,
false,
theme.ramps,
theme.syntax
)

View file

@ -0,0 +1,90 @@
import { dark, light, mirage } from "ayu"
import { ThemeSyntax } from "./syntax"
import chroma from "chroma-js"
import { colorRamp } from "./ramps"
import { Meta } from "./colorScheme"
export const ayu = {
dark,
light,
mirage,
}
export const buildTheme = (t: typeof dark, light: boolean) => {
const color = {
lightBlue: t.syntax.tag.hex(),
yellow: t.syntax.func.hex(),
blue: t.syntax.entity.hex(),
green: t.syntax.string.hex(),
teal: t.syntax.regexp.hex(),
red: t.syntax.markup.hex(),
orange: t.syntax.keyword.hex(),
lightYellow: t.syntax.special.hex(),
gray: t.syntax.comment.hex(),
purple: t.syntax.constant.hex(),
}
const syntax: ThemeSyntax = {
constant: { color: t.syntax.constant.hex() },
"string.regex": { color: t.syntax.regexp.hex() },
string: { color: t.syntax.string.hex() },
comment: { color: t.syntax.comment.hex() },
keyword: { color: t.syntax.keyword.hex() },
operator: { color: t.syntax.operator.hex() },
number: { color: t.syntax.constant.hex() },
type: { color: color.blue },
boolean: { color: color.purple },
"punctuation.special": { color: color.purple },
"string.special": { color: t.syntax.special.hex() },
function: { color: t.syntax.func.hex() },
}
return {
ramps: {
neutral: chroma.scale([
light ? t.editor.fg.hex() : t.editor.bg.hex(),
light ? t.editor.bg.hex() : t.editor.fg.hex(),
]),
red: colorRamp(chroma(color.red)),
orange: colorRamp(chroma(color.orange)),
yellow: colorRamp(chroma(color.yellow)),
green: colorRamp(chroma(color.green)),
cyan: colorRamp(chroma(color.teal)),
blue: colorRamp(chroma(color.blue)),
violet: colorRamp(chroma(color.purple)),
magenta: colorRamp(chroma(color.lightBlue)),
},
syntax,
}
}
export const buildSyntax = (t: typeof dark): ThemeSyntax => {
return {
constant: { color: t.syntax.constant.hex() },
"string.regex": { color: t.syntax.regexp.hex() },
string: { color: t.syntax.string.hex() },
comment: { color: t.syntax.comment.hex() },
keyword: { color: t.syntax.keyword.hex() },
operator: { color: t.syntax.operator.hex() },
number: { color: t.syntax.constant.hex() },
type: { color: t.syntax.regexp.hex() },
"punctuation.special": { color: t.syntax.special.hex() },
"string.special": { color: t.syntax.special.hex() },
function: { color: t.syntax.func.hex() },
}
}
export const meta: Meta = {
name: "Ayu",
author: "dempfi",
license: {
SPDX: "MIT",
license_text: {
https_url:
"https://raw.githubusercontent.com/dempfi/ayu/master/LICENSE",
license_checksum:
"e0af0e0d1754c18ca075649d42f5c6d9a60f8bdc03c20dfd97105f2253a94173",
},
},
url: "https://github.com/dempfi/ayu",
}

View file

@ -1,31 +0,0 @@
import chroma from "chroma-js"
import { colorRamp, createColorScheme } from "../common/ramps"
const name = "Ayu"
const author = "Konstantin Pschera <me@kons.ch>"
const url = "https://github.com/ayu-theme/ayu-colors"
const license = {
type: "MIT",
url: "https://github.com/ayu-theme/ayu-colors/blob/master/license",
}
export const dark = createColorScheme(`${name} Mirage`, false, {
neutral: chroma.scale([
"#171B24",
"#1F2430",
"#242936",
"#707A8C",
"#8A9199",
"#CCCAC2",
"#D9D7CE",
"#F3F4F5",
]),
red: colorRamp(chroma("#F28779")),
orange: colorRamp(chroma("#FFAD66")),
yellow: colorRamp(chroma("#FFD173")),
green: colorRamp(chroma("#D5FF80")),
cyan: colorRamp(chroma("#95E6CB")),
blue: colorRamp(chroma("#5CCFE6")),
violet: colorRamp(chroma("#D4BFFF")),
magenta: colorRamp(chroma("#F29E74")),
})

View file

@ -1,52 +0,0 @@
import chroma from "chroma-js"
import { colorRamp, createColorScheme } from "../common/ramps"
const name = "Ayu"
const author = "Konstantin Pschera <me@kons.ch>"
const url = "https://github.com/ayu-theme/ayu-colors"
const license = {
type: "MIT",
url: "https://github.com/ayu-theme/ayu-colors/blob/master/license",
}
export const dark = createColorScheme(`${name} Dark`, false, {
neutral: chroma.scale([
"#0F1419",
"#131721",
"#272D38",
"#3E4B59",
"#BFBDB6",
"#E6E1CF",
"#E6E1CF",
"#F3F4F5",
]),
red: colorRamp(chroma("#F07178")),
orange: colorRamp(chroma("#FF8F40")),
yellow: colorRamp(chroma("#FFB454")),
green: colorRamp(chroma("#B8CC52")),
cyan: colorRamp(chroma("#95E6CB")),
blue: colorRamp(chroma("#59C2FF")),
violet: colorRamp(chroma("#D2A6FF")),
magenta: colorRamp(chroma("#E6B673")),
})
export const light = createColorScheme(`${name} Light`, true, {
neutral: chroma.scale([
"#1A1F29",
"#242936",
"#5C6773",
"#828C99",
"#ABB0B6",
"#F8F9FA",
"#F3F4F5",
"#FAFAFA",
]),
red: colorRamp(chroma("#F07178")),
orange: colorRamp(chroma("#FA8D3E")),
yellow: colorRamp(chroma("#F2AE49")),
green: colorRamp(chroma("#86B300")),
cyan: colorRamp(chroma("#4CBF99")),
blue: colorRamp(chroma("#36A3D9")),
violet: colorRamp(chroma("#A37ACC")),
magenta: colorRamp(chroma("#E6BA7E")),
})