Merge branch 'main' into pane-toolbar-tabbar-ui

This commit is contained in:
Marshall Bowers 2023-12-05 14:15:18 -05:00
commit e0ca7f844a
97 changed files with 15492 additions and 8288 deletions

57
Cargo.lock generated
View file

@ -1222,7 +1222,6 @@ version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-broadcast", "async-broadcast",
"async-trait",
"audio2", "audio2",
"client2", "client2",
"collections", "collections",
@ -1242,9 +1241,7 @@ dependencies = [
"serde_json", "serde_json",
"settings2", "settings2",
"smallvec", "smallvec",
"ui2",
"util", "util",
"workspace2",
] ]
[[package]] [[package]]
@ -2115,7 +2112,7 @@ dependencies = [
"lsp2", "lsp2",
"node_runtime", "node_runtime",
"parking_lot 0.11.2", "parking_lot 0.11.2",
"rpc", "rpc2",
"serde", "serde",
"serde_derive", "serde_derive",
"settings2", "settings2",
@ -8246,6 +8243,57 @@ dependencies = [
"workspace", "workspace",
] ]
[[package]]
name = "semantic_index2"
version = "0.1.0"
dependencies = [
"ai2",
"anyhow",
"async-trait",
"client2",
"collections",
"ctor",
"env_logger 0.9.3",
"futures 0.3.28",
"globset",
"gpui2",
"language2",
"lazy_static",
"log",
"ndarray",
"node_runtime",
"ordered-float 2.10.0",
"parking_lot 0.11.2",
"postage",
"pretty_assertions",
"project2",
"rand 0.8.5",
"rpc2",
"rusqlite",
"rust-embed",
"schemars",
"serde",
"serde_json",
"settings2",
"sha1",
"smol",
"tempdir",
"tiktoken-rs",
"tree-sitter",
"tree-sitter-cpp",
"tree-sitter-elixir",
"tree-sitter-json 0.20.0",
"tree-sitter-lua",
"tree-sitter-php",
"tree-sitter-ruby",
"tree-sitter-rust",
"tree-sitter-toml",
"tree-sitter-typescript",
"unindent",
"util",
"workspace2",
]
[[package]] [[package]]
name = "semver" name = "semver"
version = "1.0.18" version = "1.0.18"
@ -11545,7 +11593,6 @@ version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-recursion 1.0.5", "async-recursion 1.0.5",
"async-trait",
"bincode", "bincode",
"call2", "call2",
"client2", "client2",

View file

@ -96,6 +96,8 @@ members = [
"crates/rpc2", "crates/rpc2",
"crates/search", "crates/search",
"crates/search2", "crates/search2",
"crates/semantic_index",
"crates/semantic_index2",
"crates/settings", "crates/settings",
"crates/settings2", "crates/settings2",
"crates/snippet", "crates/snippet",
@ -115,7 +117,6 @@ members = [
"crates/theme_selector2", "crates/theme_selector2",
"crates/ui2", "crates/ui2",
"crates/util", "crates/util",
"crates/semantic_index",
"crates/story", "crates/story",
"crates/vim", "crates/vim",
"crates/vcs_menu", "crates/vcs_menu",

View file

@ -7,7 +7,7 @@ pub enum ProviderCredential {
NotNeeded, NotNeeded,
} }
pub trait CredentialProvider { pub trait CredentialProvider: Send + Sync {
fn has_credentials(&self) -> bool; fn has_credentials(&self) -> bool;
fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential; fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential;
fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential); fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential);

View file

@ -35,7 +35,7 @@ pub struct OpenAIEmbeddingProvider {
model: OpenAILanguageModel, model: OpenAILanguageModel,
credential: Arc<RwLock<ProviderCredential>>, credential: Arc<RwLock<ProviderCredential>>,
pub client: Arc<dyn HttpClient>, pub client: Arc<dyn HttpClient>,
pub executor: Arc<BackgroundExecutor>, pub executor: BackgroundExecutor,
rate_limit_count_rx: watch::Receiver<Option<Instant>>, rate_limit_count_rx: watch::Receiver<Option<Instant>>,
rate_limit_count_tx: Arc<Mutex<watch::Sender<Option<Instant>>>>, rate_limit_count_tx: Arc<Mutex<watch::Sender<Option<Instant>>>>,
} }
@ -66,7 +66,7 @@ struct OpenAIEmbeddingUsage {
} }
impl OpenAIEmbeddingProvider { impl OpenAIEmbeddingProvider {
pub fn new(client: Arc<dyn HttpClient>, executor: Arc<BackgroundExecutor>) -> Self { pub fn new(client: Arc<dyn HttpClient>, executor: BackgroundExecutor) -> Self {
let (rate_limit_count_tx, rate_limit_count_rx) = watch::channel_with(None); let (rate_limit_count_tx, rate_limit_count_rx) = watch::channel_with(None);
let rate_limit_count_tx = Arc::new(Mutex::new(rate_limit_count_tx)); let rate_limit_count_tx = Arc::new(Mutex::new(rate_limit_count_tx));

View file

@ -31,9 +31,7 @@ media = { path = "../media" }
project = { package = "project2", path = "../project2" } project = { package = "project2", path = "../project2" }
settings = { package = "settings2", path = "../settings2" } settings = { package = "settings2", path = "../settings2" }
util = { path = "../util" } util = { path = "../util" }
ui = {package = "ui2", path = "../ui2"}
workspace = {package = "workspace2", path = "../workspace2"}
async-trait.workspace = true
anyhow.workspace = true anyhow.workspace = true
async-broadcast = "0.4" async-broadcast = "0.4"
futures.workspace = true futures.workspace = true

View file

@ -1,32 +1,25 @@
pub mod call_settings; pub mod call_settings;
pub mod participant; pub mod participant;
pub mod room; pub mod room;
mod shared_screen;
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use async_trait::async_trait;
use audio::Audio; use audio::Audio;
use call_settings::CallSettings; use call_settings::CallSettings;
use client::{ use client::{proto, Client, TelemetrySettings, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE};
proto::{self, PeerId},
Client, TelemetrySettings, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE,
};
use collections::HashSet; use collections::HashSet;
use futures::{channel::oneshot, future::Shared, Future, FutureExt}; use futures::{channel::oneshot, future::Shared, Future, FutureExt};
use gpui::{ use gpui::{
AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, PromptLevel, AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Subscription, Task,
Subscription, Task, View, ViewContext, VisualContext, WeakModel, WindowHandle, WeakModel,
}; };
pub use participant::ParticipantLocation;
use postage::watch; use postage::watch;
use project::Project; use project::Project;
use room::Event; use room::Event;
pub use room::Room;
use settings::Settings; use settings::Settings;
use shared_screen::SharedScreen;
use std::sync::Arc; use std::sync::Arc;
use util::ResultExt;
use workspace::{item::ItemHandle, CallHandler, Pane, Workspace}; pub use participant::ParticipantLocation;
pub use room::Room;
pub fn init(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) { pub fn init(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) {
CallSettings::register(cx); CallSettings::register(cx);
@ -334,55 +327,12 @@ impl ActiveCall {
pub fn join_channel( pub fn join_channel(
&mut self, &mut self,
channel_id: u64, channel_id: u64,
requesting_window: Option<WindowHandle<Workspace>>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<Result<Option<Model<Room>>>> { ) -> Task<Result<Option<Model<Room>>>> {
if let Some(room) = self.room().cloned() { if let Some(room) = self.room().cloned() {
if room.read(cx).channel_id() == Some(channel_id) { if room.read(cx).channel_id() == Some(channel_id) {
return cx.spawn(|_, _| async move { return Task::ready(Ok(Some(room)));
todo!(); } else {
// let future = room.update(&mut cx, |room, cx| {
// room.most_active_project(cx).map(|(host, project)| {
// room.join_project(project, host, app_state.clone(), cx)
// })
// })
// if let Some(future) = future {
// future.await?;
// }
// Ok(Some(room))
});
}
let should_prompt = room.update(cx, |room, _| {
room.channel_id().is_some()
&& room.is_sharing_project()
&& room.remote_participants().len() > 0
});
if should_prompt && requesting_window.is_some() {
return cx.spawn(|this, mut cx| async move {
let answer = requesting_window.unwrap().update(&mut cx, |_, cx| {
cx.prompt(
PromptLevel::Warning,
"Leaving this call will unshare your current project.\nDo you want to switch channels?",
&["Yes, Join Channel", "Cancel"],
)
})?;
if answer.await? == 1 {
return Ok(None);
}
room.update(&mut cx, |room, cx| room.clear_state(cx))?;
this.update(&mut cx, |this, cx| {
this.join_channel(channel_id, requesting_window, cx)
})?
.await
});
}
if room.read(cx).channel_id().is_some() {
room.update(cx, |room, cx| room.clear_state(cx)); room.update(cx, |room, cx| room.clear_state(cx));
} }
} }
@ -555,197 +505,6 @@ pub fn report_call_event_for_channel(
) )
} }
pub struct Call {
active_call: Option<(Model<ActiveCall>, Vec<Subscription>)>,
}
impl Call {
pub fn new(cx: &mut ViewContext<'_, Workspace>) -> Box<dyn CallHandler> {
let mut active_call = None;
if cx.has_global::<Model<ActiveCall>>() {
let call = cx.global::<Model<ActiveCall>>().clone();
let subscriptions = vec![cx.subscribe(&call, Self::on_active_call_event)];
active_call = Some((call, subscriptions));
}
Box::new(Self { active_call })
}
fn on_active_call_event(
workspace: &mut Workspace,
_: Model<ActiveCall>,
event: &room::Event,
cx: &mut ViewContext<Workspace>,
) {
match event {
room::Event::ParticipantLocationChanged { participant_id }
| room::Event::RemoteVideoTracksChanged { participant_id } => {
workspace.leader_updated(*participant_id, cx);
}
_ => {}
}
}
}
#[async_trait(?Send)]
impl CallHandler for Call {
fn peer_state(
&mut self,
leader_id: PeerId,
project: &Model<Project>,
cx: &mut ViewContext<Workspace>,
) -> Option<(bool, bool)> {
let (call, _) = self.active_call.as_ref()?;
let room = call.read(cx).room()?.read(cx);
let participant = room.remote_participant_for_peer_id(leader_id)?;
let leader_in_this_app;
let leader_in_this_project;
match participant.location {
ParticipantLocation::SharedProject { project_id } => {
leader_in_this_app = true;
leader_in_this_project = Some(project_id) == project.read(cx).remote_id();
}
ParticipantLocation::UnsharedProject => {
leader_in_this_app = true;
leader_in_this_project = false;
}
ParticipantLocation::External => {
leader_in_this_app = false;
leader_in_this_project = false;
}
};
Some((leader_in_this_project, leader_in_this_app))
}
fn shared_screen_for_peer(
&self,
peer_id: PeerId,
pane: &View<Pane>,
cx: &mut ViewContext<Workspace>,
) -> Option<Box<dyn ItemHandle>> {
let (call, _) = self.active_call.as_ref()?;
let room = call.read(cx).room()?.read(cx);
let participant = room.remote_participant_for_peer_id(peer_id)?;
let track = participant.video_tracks.values().next()?.clone();
let user = participant.user.clone();
for item in pane.read(cx).items_of_type::<SharedScreen>() {
if item.read(cx).peer_id == peer_id {
return Some(Box::new(item));
}
}
Some(Box::new(cx.build_view(|cx| {
SharedScreen::new(&track, peer_id, user.clone(), cx)
})))
}
fn room_id(&self, cx: &AppContext) -> Option<u64> {
Some(self.active_call.as_ref()?.0.read(cx).room()?.read(cx).id())
}
fn hang_up(&self, cx: &mut AppContext) -> Task<Result<()>> {
let Some((call, _)) = self.active_call.as_ref() else {
return Task::ready(Err(anyhow!("Cannot exit a call; not in a call")));
};
call.update(cx, |this, cx| this.hang_up(cx))
}
fn active_project(&self, cx: &AppContext) -> Option<WeakModel<Project>> {
ActiveCall::global(cx).read(cx).location().cloned()
}
fn invite(
&mut self,
called_user_id: u64,
initial_project: Option<Model<Project>>,
cx: &mut AppContext,
) -> Task<Result<()>> {
ActiveCall::global(cx).update(cx, |this, cx| {
this.invite(called_user_id, initial_project, cx)
})
}
fn remote_participants(&self, cx: &AppContext) -> Option<Vec<(Arc<User>, PeerId)>> {
self.active_call
.as_ref()
.map(|call| {
call.0.read(cx).room().map(|room| {
room.read(cx)
.remote_participants()
.iter()
.map(|participant| {
(participant.1.user.clone(), participant.1.peer_id.clone())
})
.collect()
})
})
.flatten()
}
fn is_muted(&self, cx: &AppContext) -> Option<bool> {
self.active_call
.as_ref()
.map(|call| {
call.0
.read(cx)
.room()
.map(|room| room.read(cx).is_muted(cx))
})
.flatten()
}
fn toggle_mute(&self, cx: &mut AppContext) {
self.active_call.as_ref().map(|call| {
call.0.update(cx, |this, cx| {
this.room().map(|room| {
let room = room.clone();
cx.spawn(|_, mut cx| async move {
room.update(&mut cx, |this, cx| this.toggle_mute(cx))??
.await
})
.detach_and_log_err(cx);
})
})
});
}
fn toggle_screen_share(&self, cx: &mut AppContext) {
self.active_call.as_ref().map(|call| {
call.0.update(cx, |this, cx| {
this.room().map(|room| {
room.update(cx, |this, cx| {
if this.is_screen_sharing() {
this.unshare_screen(cx).log_err();
} else {
let t = this.share_screen(cx);
cx.spawn(move |_, _| async move {
t.await.log_err();
})
.detach();
}
})
})
})
});
}
fn toggle_deafen(&self, cx: &mut AppContext) {
self.active_call.as_ref().map(|call| {
call.0.update(cx, |this, cx| {
this.room().map(|room| {
room.update(cx, |this, cx| {
this.toggle_deafen(cx).log_err();
})
})
})
});
}
fn is_deafened(&self, cx: &AppContext) -> Option<bool> {
self.active_call
.as_ref()
.map(|call| {
call.0
.read(cx)
.room()
.map(|room| room.read(cx).is_deafened())
})
.flatten()
.flatten()
}
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use gpui::TestAppContext; use gpui::TestAppContext;

View file

@ -4,7 +4,7 @@ use client::{proto, User};
use collections::HashMap; use collections::HashMap;
use gpui::WeakModel; use gpui::WeakModel;
pub use live_kit_client::Frame; pub use live_kit_client::Frame;
pub(crate) use live_kit_client::{RemoteAudioTrack, RemoteVideoTrack}; pub use live_kit_client::{RemoteAudioTrack, RemoteVideoTrack};
use project::Project; use project::Project;
use std::sync::Arc; use std::sync::Arc;

View file

@ -4,8 +4,10 @@ use collab_ui::notifications::project_shared_notification::ProjectSharedNotifica
use editor::{Editor, ExcerptRange, MultiBuffer}; use editor::{Editor, ExcerptRange, MultiBuffer};
use gpui::{executor::Deterministic, geometry::vector::vec2f, TestAppContext, ViewHandle}; use gpui::{executor::Deterministic, geometry::vector::vec2f, TestAppContext, ViewHandle};
use live_kit_client::MacOSDisplay; use live_kit_client::MacOSDisplay;
use project::project_settings::ProjectSettings;
use rpc::proto::PeerId; use rpc::proto::PeerId;
use serde_json::json; use serde_json::json;
use settings::SettingsStore;
use std::{borrow::Cow, sync::Arc}; use std::{borrow::Cow, sync::Arc};
use workspace::{ use workspace::{
dock::{test::TestPanel, DockPosition}, dock::{test::TestPanel, DockPosition},
@ -1602,6 +1604,141 @@ async fn test_following_across_workspaces(
}); });
} }
#[gpui::test]
async fn test_following_into_excluded_file(
deterministic: Arc<Deterministic>,
mut cx_a: &mut TestAppContext,
mut cx_b: &mut TestAppContext,
) {
deterministic.forbid_parking();
let mut server = TestServer::start(&deterministic).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
for cx in [&mut cx_a, &mut cx_b] {
cx.update(|cx| {
cx.update_global::<SettingsStore, _, _>(|store, cx| {
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]);
});
});
});
}
server
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
.await;
let active_call_a = cx_a.read(ActiveCall::global);
let active_call_b = cx_b.read(ActiveCall::global);
cx_a.update(editor::init);
cx_b.update(editor::init);
client_a
.fs()
.insert_tree(
"/a",
json!({
".git": {
"COMMIT_EDITMSG": "write your commit message here",
},
"1.txt": "one\none\none",
"2.txt": "two\ntwo\ntwo",
"3.txt": "three\nthree\nthree",
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
active_call_a
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
.await
.unwrap();
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.build_remote_project(project_id, cx_b).await;
active_call_b
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
.await
.unwrap();
let window_a = client_a.build_workspace(&project_a, cx_a);
let workspace_a = window_a.root(cx_a);
let peer_id_a = client_a.peer_id().unwrap();
let window_b = client_b.build_workspace(&project_b, cx_b);
let workspace_b = window_b.root(cx_b);
// Client A opens editors for a regular file and an excluded file.
let editor_for_regular = workspace_a
.update(cx_a, |workspace, cx| {
workspace.open_path((worktree_id, "1.txt"), None, true, cx)
})
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
let editor_for_excluded_a = workspace_a
.update(cx_a, |workspace, cx| {
workspace.open_path((worktree_id, ".git/COMMIT_EDITMSG"), None, true, cx)
})
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
// Client A updates their selections in those editors
editor_for_regular.update(cx_a, |editor, cx| {
editor.handle_input("a", cx);
editor.handle_input("b", cx);
editor.handle_input("c", cx);
editor.select_left(&Default::default(), cx);
assert_eq!(editor.selections.ranges(cx), vec![3..2]);
});
editor_for_excluded_a.update(cx_a, |editor, cx| {
editor.select_all(&Default::default(), cx);
editor.handle_input("new commit message", cx);
editor.select_left(&Default::default(), cx);
assert_eq!(editor.selections.ranges(cx), vec![18..17]);
});
// When client B starts following client A, currently visible file is replicated
workspace_b
.update(cx_b, |workspace, cx| {
workspace.follow(peer_id_a, cx).unwrap()
})
.await
.unwrap();
let editor_for_excluded_b = workspace_b.read_with(cx_b, |workspace, cx| {
workspace
.active_item(cx)
.unwrap()
.downcast::<Editor>()
.unwrap()
});
assert_eq!(
cx_b.read(|cx| editor_for_excluded_b.project_path(cx)),
Some((worktree_id, ".git/COMMIT_EDITMSG").into())
);
assert_eq!(
editor_for_excluded_b.read_with(cx_b, |editor, cx| editor.selections.ranges(cx)),
vec![18..17]
);
// Changes from B to the excluded file are replicated in A's editor
editor_for_excluded_b.update(cx_b, |editor, cx| {
editor.handle_input("\nCo-Authored-By: B <b@b.b>", cx);
});
deterministic.run_until_parked();
editor_for_excluded_a.update(cx_a, |editor, cx| {
assert_eq!(
editor.text(cx),
"new commit messag\nCo-Authored-By: B <b@b.b>"
);
});
}
fn visible_push_notifications( fn visible_push_notifications(
cx: &mut TestAppContext, cx: &mut TestAppContext,
) -> Vec<gpui::ViewHandle<ProjectSharedNotification>> { ) -> Vec<gpui::ViewHandle<ProjectSharedNotification>> {

View file

@ -2981,11 +2981,10 @@ async fn test_fs_operations(
let entry = project_b let entry = project_b
.update(cx_b, |project, cx| { .update(cx_b, |project, cx| {
project project.create_entry((worktree_id, "c.txt"), false, cx)
.create_entry((worktree_id, "c.txt"), false, cx)
.unwrap()
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
worktree_a.read_with(cx_a, |worktree, _| { worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!( assert_eq!(
@ -3010,7 +3009,6 @@ async fn test_fs_operations(
.update(cx_b, |project, cx| { .update(cx_b, |project, cx| {
project.rename_entry(entry.id, Path::new("d.txt"), cx) project.rename_entry(entry.id, Path::new("d.txt"), cx)
}) })
.unwrap()
.await .await
.unwrap(); .unwrap();
worktree_a.read_with(cx_a, |worktree, _| { worktree_a.read_with(cx_a, |worktree, _| {
@ -3034,11 +3032,10 @@ async fn test_fs_operations(
let dir_entry = project_b let dir_entry = project_b
.update(cx_b, |project, cx| { .update(cx_b, |project, cx| {
project project.create_entry((worktree_id, "DIR"), true, cx)
.create_entry((worktree_id, "DIR"), true, cx)
.unwrap()
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
worktree_a.read_with(cx_a, |worktree, _| { worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!( assert_eq!(
@ -3061,25 +3058,19 @@ async fn test_fs_operations(
project_b project_b
.update(cx_b, |project, cx| { .update(cx_b, |project, cx| {
project project.create_entry((worktree_id, "DIR/e.txt"), false, cx)
.create_entry((worktree_id, "DIR/e.txt"), false, cx)
.unwrap()
}) })
.await .await
.unwrap(); .unwrap();
project_b project_b
.update(cx_b, |project, cx| { .update(cx_b, |project, cx| {
project project.create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
.create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
.unwrap()
}) })
.await .await
.unwrap(); .unwrap();
project_b project_b
.update(cx_b, |project, cx| { .update(cx_b, |project, cx| {
project project.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
.unwrap()
}) })
.await .await
.unwrap(); .unwrap();
@ -3120,9 +3111,7 @@ async fn test_fs_operations(
project_b project_b
.update(cx_b, |project, cx| { .update(cx_b, |project, cx| {
project project.copy_entry(entry.id, Path::new("f.txt"), cx)
.copy_entry(entry.id, Path::new("f.txt"), cx)
.unwrap()
}) })
.await .await
.unwrap(); .unwrap();

View file

@ -665,7 +665,6 @@ impl RandomizedTest for ProjectCollaborationTest {
ensure_project_shared(&project, client, cx).await; ensure_project_shared(&project, client, cx).await;
project project
.update(cx, |p, cx| p.create_entry(project_path, is_dir, cx)) .update(cx, |p, cx| p.create_entry(project_path, is_dir, cx))
.unwrap()
.await?; .await?;
} }

View file

@ -364,8 +364,7 @@ async fn test_joining_channel_ancestor_member(
let active_call_b = cx_b.read(ActiveCall::global); let active_call_b = cx_b.read(ActiveCall::global);
assert!(active_call_b assert!(active_call_b
.update(cx_b, |active_call, cx| active_call .update(cx_b, |active_call, cx| active_call.join_channel(sub_id, cx))
.join_channel(sub_id, None, cx))
.await .await
.is_ok()); .is_ok());
} }
@ -395,9 +394,7 @@ async fn test_channel_room(
let active_call_b = cx_b.read(ActiveCall::global); let active_call_b = cx_b.read(ActiveCall::global);
active_call_a active_call_a
.update(cx_a, |active_call, cx| { .update(cx_a, |active_call, cx| active_call.join_channel(zed_id, cx))
active_call.join_channel(zed_id, None, cx)
})
.await .await
.unwrap(); .unwrap();
@ -445,9 +442,7 @@ async fn test_channel_room(
}); });
active_call_b active_call_b
.update(cx_b, |active_call, cx| { .update(cx_b, |active_call, cx| active_call.join_channel(zed_id, cx))
active_call.join_channel(zed_id, None, cx)
})
.await .await
.unwrap(); .unwrap();
@ -564,16 +559,12 @@ async fn test_channel_room(
}); });
active_call_a active_call_a
.update(cx_a, |active_call, cx| { .update(cx_a, |active_call, cx| active_call.join_channel(zed_id, cx))
active_call.join_channel(zed_id, None, cx)
})
.await .await
.unwrap(); .unwrap();
active_call_b active_call_b
.update(cx_b, |active_call, cx| { .update(cx_b, |active_call, cx| active_call.join_channel(zed_id, cx))
active_call.join_channel(zed_id, None, cx)
})
.await .await
.unwrap(); .unwrap();
@ -617,9 +608,7 @@ async fn test_channel_jumping(executor: BackgroundExecutor, cx_a: &mut TestAppCo
let active_call_a = cx_a.read(ActiveCall::global); let active_call_a = cx_a.read(ActiveCall::global);
active_call_a active_call_a
.update(cx_a, |active_call, cx| { .update(cx_a, |active_call, cx| active_call.join_channel(zed_id, cx))
active_call.join_channel(zed_id, None, cx)
})
.await .await
.unwrap(); .unwrap();
@ -638,7 +627,7 @@ async fn test_channel_jumping(executor: BackgroundExecutor, cx_a: &mut TestAppCo
active_call_a active_call_a
.update(cx_a, |active_call, cx| { .update(cx_a, |active_call, cx| {
active_call.join_channel(rust_id, None, cx) active_call.join_channel(rust_id, cx)
}) })
.await .await
.unwrap(); .unwrap();
@ -804,7 +793,7 @@ async fn test_call_from_channel(
let active_call_b = cx_b.read(ActiveCall::global); let active_call_b = cx_b.read(ActiveCall::global);
active_call_a active_call_a
.update(cx_a, |call, cx| call.join_channel(channel_id, None, cx)) .update(cx_a, |call, cx| call.join_channel(channel_id, cx))
.await .await
.unwrap(); .unwrap();
@ -1297,7 +1286,7 @@ async fn test_guest_access(
// Non-members should not be allowed to join // Non-members should not be allowed to join
assert!(active_call_b assert!(active_call_b
.update(cx_b, |call, cx| call.join_channel(channel_a, None, cx)) .update(cx_b, |call, cx| call.join_channel(channel_a, cx))
.await .await
.is_err()); .is_err());
@ -1319,7 +1308,7 @@ async fn test_guest_access(
// Client B joins channel A as a guest // Client B joins channel A as a guest
active_call_b active_call_b
.update(cx_b, |call, cx| call.join_channel(channel_a, None, cx)) .update(cx_b, |call, cx| call.join_channel(channel_a, cx))
.await .await
.unwrap(); .unwrap();
@ -1352,7 +1341,7 @@ async fn test_guest_access(
assert_channels_list_shape(client_b.channel_store(), cx_b, &[]); assert_channels_list_shape(client_b.channel_store(), cx_b, &[]);
active_call_b active_call_b
.update(cx_b, |call, cx| call.join_channel(channel_b, None, cx)) .update(cx_b, |call, cx| call.join_channel(channel_b, cx))
.await .await
.unwrap(); .unwrap();
@ -1383,7 +1372,7 @@ async fn test_invite_access(
// should not be allowed to join // should not be allowed to join
assert!(active_call_b assert!(active_call_b
.update(cx_b, |call, cx| call.join_channel(channel_b_id, None, cx)) .update(cx_b, |call, cx| call.join_channel(channel_b_id, cx))
.await .await
.is_err()); .is_err());
@ -1401,7 +1390,7 @@ async fn test_invite_access(
.unwrap(); .unwrap();
active_call_b active_call_b
.update(cx_b, |call, cx| call.join_channel(channel_b_id, None, cx)) .update(cx_b, |call, cx| call.join_channel(channel_b_id, cx))
.await .await
.unwrap(); .unwrap();

View file

@ -4,10 +4,12 @@
// use call::ActiveCall; // use call::ActiveCall;
// use collab_ui::notifications::project_shared_notification::ProjectSharedNotification; // use collab_ui::notifications::project_shared_notification::ProjectSharedNotification;
// use editor::{Editor, ExcerptRange, MultiBuffer}; // use editor::{Editor, ExcerptRange, MultiBuffer};
// use gpui::{BackgroundExecutor, TestAppContext, View}; // use gpui::{point, BackgroundExecutor, TestAppContext, View, VisualTestContext, WindowContext};
// use live_kit_client::MacOSDisplay; // use live_kit_client::MacOSDisplay;
// use project::project_settings::ProjectSettings;
// use rpc::proto::PeerId; // use rpc::proto::PeerId;
// use serde_json::json; // use serde_json::json;
// use settings::SettingsStore;
// use std::borrow::Cow; // use std::borrow::Cow;
// use workspace::{ // use workspace::{
// dock::{test::TestPanel, DockPosition}, // dock::{test::TestPanel, DockPosition},
@ -24,7 +26,7 @@
// cx_c: &mut TestAppContext, // cx_c: &mut TestAppContext,
// cx_d: &mut TestAppContext, // cx_d: &mut TestAppContext,
// ) { // ) {
// let mut server = TestServer::start(&executor).await; // let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await; // let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await; // let client_b = server.create_client(cx_b, "user_b").await;
// let client_c = server.create_client(cx_c, "user_c").await; // let client_c = server.create_client(cx_c, "user_c").await;
@ -71,12 +73,22 @@
// .unwrap(); // .unwrap();
// let window_a = client_a.build_workspace(&project_a, cx_a); // let window_a = client_a.build_workspace(&project_a, cx_a);
// let workspace_a = window_a.root(cx_a); // let workspace_a = window_a.root(cx_a).unwrap();
// let window_b = client_b.build_workspace(&project_b, cx_b); // let window_b = client_b.build_workspace(&project_b, cx_b);
// let workspace_b = window_b.root(cx_b); // let workspace_b = window_b.root(cx_b).unwrap();
// todo!("could be wrong")
// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
// let cx_a = &mut cx_a;
// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
// let cx_b = &mut cx_b;
// let mut cx_c = VisualTestContext::from_window(*window_c, cx_c);
// let cx_c = &mut cx_c;
// let mut cx_d = VisualTestContext::from_window(*window_d, cx_d);
// let cx_d = &mut cx_d;
// // Client A opens some editors. // // Client A opens some editors.
// let pane_a = workspace_a.read_with(cx_a, |workspace, _| workspace.active_pane().clone()); // let pane_a = workspace_a.update(cx_a, |workspace, _| workspace.active_pane().clone());
// let editor_a1 = workspace_a // let editor_a1 = workspace_a
// .update(cx_a, |workspace, cx| { // .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id, "1.txt"), None, true, cx) // workspace.open_path((worktree_id, "1.txt"), None, true, cx)
@ -132,8 +144,8 @@
// .await // .await
// .unwrap(); // .unwrap();
// cx_c.foreground().run_until_parked(); // cx_c.executor().run_until_parked();
// let editor_b2 = workspace_b.read_with(cx_b, |workspace, cx| { // let editor_b2 = workspace_b.update(cx_b, |workspace, cx| {
// workspace // workspace
// .active_item(cx) // .active_item(cx)
// .unwrap() // .unwrap()
@ -145,19 +157,19 @@
// Some((worktree_id, "2.txt").into()) // Some((worktree_id, "2.txt").into())
// ); // );
// assert_eq!( // assert_eq!(
// editor_b2.read_with(cx_b, |editor, cx| editor.selections.ranges(cx)), // editor_b2.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
// vec![2..1] // vec![2..1]
// ); // );
// assert_eq!( // assert_eq!(
// editor_b1.read_with(cx_b, |editor, cx| editor.selections.ranges(cx)), // editor_b1.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
// vec![3..2] // vec![3..2]
// ); // );
// cx_c.foreground().run_until_parked(); // cx_c.executor().run_until_parked();
// let active_call_c = cx_c.read(ActiveCall::global); // let active_call_c = cx_c.read(ActiveCall::global);
// let project_c = client_c.build_remote_project(project_id, cx_c).await; // let project_c = client_c.build_remote_project(project_id, cx_c).await;
// let window_c = client_c.build_workspace(&project_c, cx_c); // let window_c = client_c.build_workspace(&project_c, cx_c);
// let workspace_c = window_c.root(cx_c); // let workspace_c = window_c.root(cx_c).unwrap();
// active_call_c // active_call_c
// .update(cx_c, |call, cx| call.set_location(Some(&project_c), cx)) // .update(cx_c, |call, cx| call.set_location(Some(&project_c), cx))
// .await // .await
@ -172,10 +184,13 @@
// .await // .await
// .unwrap(); // .unwrap();
// cx_d.foreground().run_until_parked(); // cx_d.executor().run_until_parked();
// let active_call_d = cx_d.read(ActiveCall::global); // let active_call_d = cx_d.read(ActiveCall::global);
// let project_d = client_d.build_remote_project(project_id, cx_d).await; // let project_d = client_d.build_remote_project(project_id, cx_d).await;
// let workspace_d = client_d.build_workspace(&project_d, cx_d).root(cx_d); // let workspace_d = client_d
// .build_workspace(&project_d, cx_d)
// .root(cx_d)
// .unwrap();
// active_call_d // active_call_d
// .update(cx_d, |call, cx| call.set_location(Some(&project_d), cx)) // .update(cx_d, |call, cx| call.set_location(Some(&project_d), cx))
// .await // .await
@ -183,7 +198,7 @@
// drop(project_d); // drop(project_d);
// // All clients see that clients B and C are following client A. // // All clients see that clients B and C are following client A.
// cx_c.foreground().run_until_parked(); // cx_c.executor().run_until_parked();
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] { // for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
// assert_eq!( // assert_eq!(
// followers_by_leader(project_id, cx), // followers_by_leader(project_id, cx),
@ -198,7 +213,7 @@
// }); // });
// // All clients see that clients B is following client A. // // All clients see that clients B is following client A.
// cx_c.foreground().run_until_parked(); // cx_c.executor().run_until_parked();
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] { // for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
// assert_eq!( // assert_eq!(
// followers_by_leader(project_id, cx), // followers_by_leader(project_id, cx),
@ -216,7 +231,7 @@
// .unwrap(); // .unwrap();
// // All clients see that clients B and C are following client A. // // All clients see that clients B and C are following client A.
// cx_c.foreground().run_until_parked(); // cx_c.executor().run_until_parked();
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] { // for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
// assert_eq!( // assert_eq!(
// followers_by_leader(project_id, cx), // followers_by_leader(project_id, cx),
@ -240,7 +255,7 @@
// .unwrap(); // .unwrap();
// // All clients see that D is following C // // All clients see that D is following C
// cx_d.foreground().run_until_parked(); // cx_d.executor().run_until_parked();
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] { // for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
// assert_eq!( // assert_eq!(
// followers_by_leader(project_id, cx), // followers_by_leader(project_id, cx),
@ -257,7 +272,7 @@
// cx_c.drop_last(workspace_c); // cx_c.drop_last(workspace_c);
// // Clients A and B see that client B is following A, and client C is not present in the followers. // // Clients A and B see that client B is following A, and client C is not present in the followers.
// cx_c.foreground().run_until_parked(); // cx_c.executor().run_until_parked();
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] { // for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
// assert_eq!( // assert_eq!(
// followers_by_leader(project_id, cx), // followers_by_leader(project_id, cx),
@ -271,12 +286,15 @@
// workspace.activate_item(&editor_a1, cx) // workspace.activate_item(&editor_a1, cx)
// }); // });
// executor.run_until_parked(); // executor.run_until_parked();
// workspace_b.read_with(cx_b, |workspace, cx| { // workspace_b.update(cx_b, |workspace, cx| {
// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b1.id()); // assert_eq!(
// workspace.active_item(cx).unwrap().item_id(),
// editor_b1.item_id()
// );
// }); // });
// // When client A opens a multibuffer, client B does so as well. // // When client A opens a multibuffer, client B does so as well.
// let multibuffer_a = cx_a.add_model(|cx| { // let multibuffer_a = cx_a.build_model(|cx| {
// let buffer_a1 = project_a.update(cx, |project, cx| { // let buffer_a1 = project_a.update(cx, |project, cx| {
// project // project
// .get_open_buffer(&(worktree_id, "1.txt").into(), cx) // .get_open_buffer(&(worktree_id, "1.txt").into(), cx)
@ -308,12 +326,12 @@
// }); // });
// let multibuffer_editor_a = workspace_a.update(cx_a, |workspace, cx| { // let multibuffer_editor_a = workspace_a.update(cx_a, |workspace, cx| {
// let editor = // let editor =
// cx.add_view(|cx| Editor::for_multibuffer(multibuffer_a, Some(project_a.clone()), cx)); // cx.build_view(|cx| Editor::for_multibuffer(multibuffer_a, Some(project_a.clone()), cx));
// workspace.add_item(Box::new(editor.clone()), cx); // workspace.add_item(Box::new(editor.clone()), cx);
// editor // editor
// }); // });
// executor.run_until_parked(); // executor.run_until_parked();
// let multibuffer_editor_b = workspace_b.read_with(cx_b, |workspace, cx| { // let multibuffer_editor_b = workspace_b.update(cx_b, |workspace, cx| {
// workspace // workspace
// .active_item(cx) // .active_item(cx)
// .unwrap() // .unwrap()
@ -321,8 +339,8 @@
// .unwrap() // .unwrap()
// }); // });
// assert_eq!( // assert_eq!(
// multibuffer_editor_a.read_with(cx_a, |editor, cx| editor.text(cx)), // multibuffer_editor_a.update(cx_a, |editor, cx| editor.text(cx)),
// multibuffer_editor_b.read_with(cx_b, |editor, cx| editor.text(cx)), // multibuffer_editor_b.update(cx_b, |editor, cx| editor.text(cx)),
// ); // );
// // When client A navigates back and forth, client B does so as well. // // When client A navigates back and forth, client B does so as well.
@ -333,8 +351,11 @@
// .await // .await
// .unwrap(); // .unwrap();
// executor.run_until_parked(); // executor.run_until_parked();
// workspace_b.read_with(cx_b, |workspace, cx| { // workspace_b.update(cx_b, |workspace, cx| {
// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b1.id()); // assert_eq!(
// workspace.active_item(cx).unwrap().item_id(),
// editor_b1.item_id()
// );
// }); // });
// workspace_a // workspace_a
@ -344,8 +365,11 @@
// .await // .await
// .unwrap(); // .unwrap();
// executor.run_until_parked(); // executor.run_until_parked();
// workspace_b.read_with(cx_b, |workspace, cx| { // workspace_b.update(cx_b, |workspace, cx| {
// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b2.id()); // assert_eq!(
// workspace.active_item(cx).unwrap().item_id(),
// editor_b2.item_id()
// );
// }); // });
// workspace_a // workspace_a
@ -355,8 +379,11 @@
// .await // .await
// .unwrap(); // .unwrap();
// executor.run_until_parked(); // executor.run_until_parked();
// workspace_b.read_with(cx_b, |workspace, cx| { // workspace_b.update(cx_b, |workspace, cx| {
// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b1.id()); // assert_eq!(
// workspace.active_item(cx).unwrap().item_id(),
// editor_b1.item_id()
// );
// }); // });
// // Changes to client A's editor are reflected on client B. // // Changes to client A's editor are reflected on client B.
@ -364,20 +391,20 @@
// editor.change_selections(None, cx, |s| s.select_ranges([1..1, 2..2])); // editor.change_selections(None, cx, |s| s.select_ranges([1..1, 2..2]));
// }); // });
// executor.run_until_parked(); // executor.run_until_parked();
// editor_b1.read_with(cx_b, |editor, cx| { // editor_b1.update(cx_b, |editor, cx| {
// assert_eq!(editor.selections.ranges(cx), &[1..1, 2..2]); // assert_eq!(editor.selections.ranges(cx), &[1..1, 2..2]);
// }); // });
// editor_a1.update(cx_a, |editor, cx| editor.set_text("TWO", cx)); // editor_a1.update(cx_a, |editor, cx| editor.set_text("TWO", cx));
// executor.run_until_parked(); // executor.run_until_parked();
// editor_b1.read_with(cx_b, |editor, cx| assert_eq!(editor.text(cx), "TWO")); // editor_b1.update(cx_b, |editor, cx| assert_eq!(editor.text(cx), "TWO"));
// editor_a1.update(cx_a, |editor, cx| { // editor_a1.update(cx_a, |editor, cx| {
// editor.change_selections(None, cx, |s| s.select_ranges([3..3])); // editor.change_selections(None, cx, |s| s.select_ranges([3..3]));
// editor.set_scroll_position(vec2f(0., 100.), cx); // editor.set_scroll_position(point(0., 100.), cx);
// }); // });
// executor.run_until_parked(); // executor.run_until_parked();
// editor_b1.read_with(cx_b, |editor, cx| { // editor_b1.update(cx_b, |editor, cx| {
// assert_eq!(editor.selections.ranges(cx), &[3..3]); // assert_eq!(editor.selections.ranges(cx), &[3..3]);
// }); // });
@ -390,11 +417,11 @@
// }); // });
// executor.run_until_parked(); // executor.run_until_parked();
// assert_eq!( // assert_eq!(
// workspace_b.read_with(cx_b, |workspace, cx| workspace // workspace_b.update(cx_b, |workspace, cx| workspace
// .active_item(cx) // .active_item(cx)
// .unwrap() // .unwrap()
// .id()), // .item_id()),
// editor_b1.id() // editor_b1.item_id()
// ); // );
// // Client A starts following client B. // // Client A starts following client B.
@ -405,15 +432,15 @@
// .await // .await
// .unwrap(); // .unwrap();
// assert_eq!( // assert_eq!(
// workspace_a.read_with(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)), // workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
// Some(peer_id_b) // Some(peer_id_b)
// ); // );
// assert_eq!( // assert_eq!(
// workspace_a.read_with(cx_a, |workspace, cx| workspace // workspace_a.update(cx_a, |workspace, cx| workspace
// .active_item(cx) // .active_item(cx)
// .unwrap() // .unwrap()
// .id()), // .item_id()),
// editor_a1.id() // editor_a1.item_id()
// ); // );
// // Client B activates an external window, which causes a new screen-sharing item to be added to the pane. // // Client B activates an external window, which causes a new screen-sharing item to be added to the pane.
@ -432,7 +459,7 @@
// .await // .await
// .unwrap(); // .unwrap();
// executor.run_until_parked(); // executor.run_until_parked();
// let shared_screen = workspace_a.read_with(cx_a, |workspace, cx| { // let shared_screen = workspace_a.update(cx_a, |workspace, cx| {
// workspace // workspace
// .active_item(cx) // .active_item(cx)
// .expect("no active item") // .expect("no active item")
@ -446,8 +473,11 @@
// .await // .await
// .unwrap(); // .unwrap();
// executor.run_until_parked(); // executor.run_until_parked();
// workspace_a.read_with(cx_a, |workspace, cx| { // workspace_a.update(cx_a, |workspace, cx| {
// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_a1.id()) // assert_eq!(
// workspace.active_item(cx).unwrap().item_id(),
// editor_a1.item_id()
// )
// }); // });
// // Client B activates a multibuffer that was created by following client A. Client A returns to that multibuffer. // // Client B activates a multibuffer that was created by following client A. Client A returns to that multibuffer.
@ -455,26 +485,26 @@
// workspace.activate_item(&multibuffer_editor_b, cx) // workspace.activate_item(&multibuffer_editor_b, cx)
// }); // });
// executor.run_until_parked(); // executor.run_until_parked();
// workspace_a.read_with(cx_a, |workspace, cx| { // workspace_a.update(cx_a, |workspace, cx| {
// assert_eq!( // assert_eq!(
// workspace.active_item(cx).unwrap().id(), // workspace.active_item(cx).unwrap().item_id(),
// multibuffer_editor_a.id() // multibuffer_editor_a.item_id()
// ) // )
// }); // });
// // Client B activates a panel, and the previously-opened screen-sharing item gets activated. // // Client B activates a panel, and the previously-opened screen-sharing item gets activated.
// let panel = window_b.add_view(cx_b, |_| TestPanel::new(DockPosition::Left)); // let panel = window_b.build_view(cx_b, |_| TestPanel::new(DockPosition::Left));
// workspace_b.update(cx_b, |workspace, cx| { // workspace_b.update(cx_b, |workspace, cx| {
// workspace.add_panel(panel, cx); // workspace.add_panel(panel, cx);
// workspace.toggle_panel_focus::<TestPanel>(cx); // workspace.toggle_panel_focus::<TestPanel>(cx);
// }); // });
// executor.run_until_parked(); // executor.run_until_parked();
// assert_eq!( // assert_eq!(
// workspace_a.read_with(cx_a, |workspace, cx| workspace // workspace_a.update(cx_a, |workspace, cx| workspace
// .active_item(cx) // .active_item(cx)
// .unwrap() // .unwrap()
// .id()), // .item_id()),
// shared_screen.id() // shared_screen.item_id()
// ); // );
// // Toggling the focus back to the pane causes client A to return to the multibuffer. // // Toggling the focus back to the pane causes client A to return to the multibuffer.
@ -482,16 +512,16 @@
// workspace.toggle_panel_focus::<TestPanel>(cx); // workspace.toggle_panel_focus::<TestPanel>(cx);
// }); // });
// executor.run_until_parked(); // executor.run_until_parked();
// workspace_a.read_with(cx_a, |workspace, cx| { // workspace_a.update(cx_a, |workspace, cx| {
// assert_eq!( // assert_eq!(
// workspace.active_item(cx).unwrap().id(), // workspace.active_item(cx).unwrap().item_id(),
// multibuffer_editor_a.id() // multibuffer_editor_a.item_id()
// ) // )
// }); // });
// // Client B activates an item that doesn't implement following, // // Client B activates an item that doesn't implement following,
// // so the previously-opened screen-sharing item gets activated. // // so the previously-opened screen-sharing item gets activated.
// let unfollowable_item = window_b.add_view(cx_b, |_| TestItem::new()); // let unfollowable_item = window_b.build_view(cx_b, |_| TestItem::new());
// workspace_b.update(cx_b, |workspace, cx| { // workspace_b.update(cx_b, |workspace, cx| {
// workspace.active_pane().update(cx, |pane, cx| { // workspace.active_pane().update(cx, |pane, cx| {
// pane.add_item(Box::new(unfollowable_item), true, true, None, cx) // pane.add_item(Box::new(unfollowable_item), true, true, None, cx)
@ -499,18 +529,18 @@
// }); // });
// executor.run_until_parked(); // executor.run_until_parked();
// assert_eq!( // assert_eq!(
// workspace_a.read_with(cx_a, |workspace, cx| workspace // workspace_a.update(cx_a, |workspace, cx| workspace
// .active_item(cx) // .active_item(cx)
// .unwrap() // .unwrap()
// .id()), // .item_id()),
// shared_screen.id() // shared_screen.item_id()
// ); // );
// // Following interrupts when client B disconnects. // // Following interrupts when client B disconnects.
// client_b.disconnect(&cx_b.to_async()); // client_b.disconnect(&cx_b.to_async());
// executor.advance_clock(RECONNECT_TIMEOUT); // executor.advance_clock(RECONNECT_TIMEOUT);
// assert_eq!( // assert_eq!(
// workspace_a.read_with(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)), // workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
// None // None
// ); // );
// } // }
@ -521,7 +551,7 @@
// cx_a: &mut TestAppContext, // cx_a: &mut TestAppContext,
// cx_b: &mut TestAppContext, // cx_b: &mut TestAppContext,
// ) { // ) {
// let mut server = TestServer::start(&executor).await; // let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await; // let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await; // let client_b = server.create_client(cx_b, "user_b").await;
// server // server
@ -560,13 +590,19 @@
// .await // .await
// .unwrap(); // .unwrap();
// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a); // let workspace_a = client_a
// let pane_a = workspace_a.read_with(cx_a, |workspace, _| workspace.active_pane().clone()); // .build_workspace(&project_a, cx_a)
// .root(cx_a)
// .unwrap();
// let pane_a = workspace_a.update(cx_a, |workspace, _| workspace.active_pane().clone());
// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b); // let workspace_b = client_b
// let pane_b = workspace_b.read_with(cx_b, |workspace, _| workspace.active_pane().clone()); // .build_workspace(&project_b, cx_b)
// .root(cx_b)
// .unwrap();
// let pane_b = workspace_b.update(cx_b, |workspace, _| workspace.active_pane().clone());
// let client_b_id = project_a.read_with(cx_a, |project, _| { // let client_b_id = project_a.update(cx_a, |project, _| {
// project.collaborators().values().next().unwrap().peer_id // project.collaborators().values().next().unwrap().peer_id
// }); // });
@ -584,7 +620,7 @@
// .await // .await
// .unwrap(); // .unwrap();
// let pane_paths = |pane: &ViewHandle<workspace::Pane>, cx: &mut TestAppContext| { // let pane_paths = |pane: &View<workspace::Pane>, cx: &mut TestAppContext| {
// pane.update(cx, |pane, cx| { // pane.update(cx, |pane, cx| {
// pane.items() // pane.items()
// .map(|item| { // .map(|item| {
@ -642,7 +678,7 @@
// cx_a: &mut TestAppContext, // cx_a: &mut TestAppContext,
// cx_b: &mut TestAppContext, // cx_b: &mut TestAppContext,
// ) { // ) {
// let mut server = TestServer::start(&executor).await; // let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await; // let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await; // let client_b = server.create_client(cx_b, "user_b").await;
// server // server
@ -685,7 +721,10 @@
// .unwrap(); // .unwrap();
// // Client A opens a file. // // Client A opens a file.
// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a); // let workspace_a = client_a
// .build_workspace(&project_a, cx_a)
// .root(cx_a)
// .unwrap();
// workspace_a // workspace_a
// .update(cx_a, |workspace, cx| { // .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id, "1.txt"), None, true, cx) // workspace.open_path((worktree_id, "1.txt"), None, true, cx)
@ -696,7 +735,10 @@
// .unwrap(); // .unwrap();
// // Client B opens a different file. // // Client B opens a different file.
// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b); // let workspace_b = client_b
// .build_workspace(&project_b, cx_b)
// .root(cx_b)
// .unwrap();
// workspace_b // workspace_b
// .update(cx_b, |workspace, cx| { // .update(cx_b, |workspace, cx| {
// workspace.open_path((worktree_id, "2.txt"), None, true, cx) // workspace.open_path((worktree_id, "2.txt"), None, true, cx)
@ -1167,7 +1209,7 @@
// cx_b: &mut TestAppContext, // cx_b: &mut TestAppContext,
// ) { // ) {
// // 2 clients connect to a server. // // 2 clients connect to a server.
// let mut server = TestServer::start(&executor).await; // let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await; // let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await; // let client_b = server.create_client(cx_b, "user_b").await;
// server // server
@ -1207,8 +1249,17 @@
// .await // .await
// .unwrap(); // .unwrap();
// todo!("could be wrong")
// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
// let cx_a = &mut cx_a;
// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
// let cx_b = &mut cx_b;
// // Client A opens some editors. // // Client A opens some editors.
// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a); // let workspace_a = client_a
// .build_workspace(&project_a, cx_a)
// .root(cx_a)
// .unwrap();
// let _editor_a1 = workspace_a // let _editor_a1 = workspace_a
// .update(cx_a, |workspace, cx| { // .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id, "1.txt"), None, true, cx) // workspace.open_path((worktree_id, "1.txt"), None, true, cx)
@ -1219,9 +1270,12 @@
// .unwrap(); // .unwrap();
// // Client B starts following client A. // // Client B starts following client A.
// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b); // let workspace_b = client_b
// let pane_b = workspace_b.read_with(cx_b, |workspace, _| workspace.active_pane().clone()); // .build_workspace(&project_b, cx_b)
// let leader_id = project_b.read_with(cx_b, |project, _| { // .root(cx_b)
// .unwrap();
// let pane_b = workspace_b.update(cx_b, |workspace, _| workspace.active_pane().clone());
// let leader_id = project_b.update(cx_b, |project, _| {
// project.collaborators().values().next().unwrap().peer_id // project.collaborators().values().next().unwrap().peer_id
// }); // });
// workspace_b // workspace_b
@ -1231,10 +1285,10 @@
// .await // .await
// .unwrap(); // .unwrap();
// assert_eq!( // assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), // workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id) // Some(leader_id)
// ); // );
// let editor_b2 = workspace_b.read_with(cx_b, |workspace, cx| { // let editor_b2 = workspace_b.update(cx_b, |workspace, cx| {
// workspace // workspace
// .active_item(cx) // .active_item(cx)
// .unwrap() // .unwrap()
@ -1245,7 +1299,7 @@
// // When client B moves, it automatically stops following client A. // // When client B moves, it automatically stops following client A.
// editor_b2.update(cx_b, |editor, cx| editor.move_right(&editor::MoveRight, cx)); // editor_b2.update(cx_b, |editor, cx| editor.move_right(&editor::MoveRight, cx));
// assert_eq!( // assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), // workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// None // None
// ); // );
@ -1256,14 +1310,14 @@
// .await // .await
// .unwrap(); // .unwrap();
// assert_eq!( // assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), // workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id) // Some(leader_id)
// ); // );
// // When client B edits, it automatically stops following client A. // // When client B edits, it automatically stops following client A.
// editor_b2.update(cx_b, |editor, cx| editor.insert("X", cx)); // editor_b2.update(cx_b, |editor, cx| editor.insert("X", cx));
// assert_eq!( // assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), // workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// None // None
// ); // );
@ -1274,16 +1328,16 @@
// .await // .await
// .unwrap(); // .unwrap();
// assert_eq!( // assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), // workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id) // Some(leader_id)
// ); // );
// // When client B scrolls, it automatically stops following client A. // // When client B scrolls, it automatically stops following client A.
// editor_b2.update(cx_b, |editor, cx| { // editor_b2.update(cx_b, |editor, cx| {
// editor.set_scroll_position(vec2f(0., 3.), cx) // editor.set_scroll_position(point(0., 3.), cx)
// }); // });
// assert_eq!( // assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), // workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// None // None
// ); // );
@ -1294,7 +1348,7 @@
// .await // .await
// .unwrap(); // .unwrap();
// assert_eq!( // assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), // workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id) // Some(leader_id)
// ); // );
@ -1303,13 +1357,13 @@
// workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, cx) // workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, cx)
// }); // });
// assert_eq!( // assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), // workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id) // Some(leader_id)
// ); // );
// workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx)); // workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx));
// assert_eq!( // assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), // workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id) // Some(leader_id)
// ); // );
@ -1321,7 +1375,7 @@
// .await // .await
// .unwrap(); // .unwrap();
// assert_eq!( // assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), // workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// None // None
// ); // );
// } // }
@ -1332,7 +1386,7 @@
// cx_a: &mut TestAppContext, // cx_a: &mut TestAppContext,
// cx_b: &mut TestAppContext, // cx_b: &mut TestAppContext,
// ) { // ) {
// let mut server = TestServer::start(&executor).await; // let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await; // let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await; // let client_b = server.create_client(cx_b, "user_b").await;
// server // server
@ -1345,20 +1399,26 @@
// client_a.fs().insert_tree("/a", json!({})).await; // client_a.fs().insert_tree("/a", json!({})).await;
// let (project_a, _) = client_a.build_local_project("/a", cx_a).await; // let (project_a, _) = client_a.build_local_project("/a", cx_a).await;
// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a); // let workspace_a = client_a
// .build_workspace(&project_a, cx_a)
// .root(cx_a)
// .unwrap();
// let project_id = active_call_a // let project_id = active_call_a
// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) // .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
// .await // .await
// .unwrap(); // .unwrap();
// let project_b = client_b.build_remote_project(project_id, cx_b).await; // let project_b = client_b.build_remote_project(project_id, cx_b).await;
// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b); // let workspace_b = client_b
// .build_workspace(&project_b, cx_b)
// .root(cx_b)
// .unwrap();
// executor.run_until_parked(); // executor.run_until_parked();
// let client_a_id = project_b.read_with(cx_b, |project, _| { // let client_a_id = project_b.update(cx_b, |project, _| {
// project.collaborators().values().next().unwrap().peer_id // project.collaborators().values().next().unwrap().peer_id
// }); // });
// let client_b_id = project_a.read_with(cx_a, |project, _| { // let client_b_id = project_a.update(cx_a, |project, _| {
// project.collaborators().values().next().unwrap().peer_id // project.collaborators().values().next().unwrap().peer_id
// }); // });
@ -1370,13 +1430,13 @@
// }); // });
// futures::try_join!(a_follow_b, b_follow_a).unwrap(); // futures::try_join!(a_follow_b, b_follow_a).unwrap();
// workspace_a.read_with(cx_a, |workspace, _| { // workspace_a.update(cx_a, |workspace, _| {
// assert_eq!( // assert_eq!(
// workspace.leader_for_pane(workspace.active_pane()), // workspace.leader_for_pane(workspace.active_pane()),
// Some(client_b_id) // Some(client_b_id)
// ); // );
// }); // });
// workspace_b.read_with(cx_b, |workspace, _| { // workspace_b.update(cx_b, |workspace, _| {
// assert_eq!( // assert_eq!(
// workspace.leader_for_pane(workspace.active_pane()), // workspace.leader_for_pane(workspace.active_pane()),
// Some(client_a_id) // Some(client_a_id)
@ -1398,7 +1458,7 @@
// // b opens a different file in project 2, a follows b // // b opens a different file in project 2, a follows b
// // b opens a different file in project 1, a cannot follow b // // b opens a different file in project 1, a cannot follow b
// // b shares the project, a joins the project and follows b // // b shares the project, a joins the project and follows b
// let mut server = TestServer::start(&executor).await; // let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await; // let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await; // let client_b = server.create_client(cx_b, "user_b").await;
// cx_a.update(editor::init); // cx_a.update(editor::init);
@ -1435,8 +1495,14 @@
// let (project_a, worktree_id_a) = client_a.build_local_project("/a", cx_a).await; // let (project_a, worktree_id_a) = client_a.build_local_project("/a", cx_a).await;
// let (project_b, worktree_id_b) = client_b.build_local_project("/b", cx_b).await; // let (project_b, worktree_id_b) = client_b.build_local_project("/b", cx_b).await;
// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a); // let workspace_a = client_a
// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b); // .build_workspace(&project_a, cx_a)
// .root(cx_a)
// .unwrap();
// let workspace_b = client_b
// .build_workspace(&project_b, cx_b)
// .root(cx_b)
// .unwrap();
// cx_a.update(|cx| collab_ui::init(&client_a.app_state, cx)); // cx_a.update(|cx| collab_ui::init(&client_a.app_state, cx));
// cx_b.update(|cx| collab_ui::init(&client_b.app_state, cx)); // cx_b.update(|cx| collab_ui::init(&client_b.app_state, cx));
@ -1455,6 +1521,12 @@
// .await // .await
// .unwrap(); // .unwrap();
// todo!("could be wrong")
// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
// let cx_a = &mut cx_a;
// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
// let cx_b = &mut cx_b;
// workspace_a // workspace_a
// .update(cx_a, |workspace, cx| { // .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id_a, "w.rs"), None, true, cx) // workspace.open_path((worktree_id_a, "w.rs"), None, true, cx)
@ -1476,11 +1548,12 @@
// let workspace_b_project_a = cx_b // let workspace_b_project_a = cx_b
// .windows() // .windows()
// .iter() // .iter()
// .max_by_key(|window| window.id()) // .max_by_key(|window| window.item_id())
// .unwrap() // .unwrap()
// .downcast::<Workspace>() // .downcast::<Workspace>()
// .unwrap() // .unwrap()
// .root(cx_b); // .root(cx_b)
// .unwrap();
// // assert that b is following a in project a in w.rs // // assert that b is following a in project a in w.rs
// workspace_b_project_a.update(cx_b, |workspace, cx| { // workspace_b_project_a.update(cx_b, |workspace, cx| {
@ -1534,7 +1607,7 @@
// workspace.leader_for_pane(workspace.active_pane()) // workspace.leader_for_pane(workspace.active_pane())
// ); // );
// let item = workspace.active_pane().read(cx).active_item().unwrap(); // let item = workspace.active_pane().read(cx).active_item().unwrap();
// assert_eq!(item.tab_description(0, cx).unwrap(), Cow::Borrowed("x.rs")); // assert_eq!(item.tab_description(0, cx).unwrap(), "x.rs".into());
// }); // });
// // b moves to y.rs in b's project, a is still following but can't yet see // // b moves to y.rs in b's project, a is still following but can't yet see
@ -1578,11 +1651,12 @@
// let workspace_a_project_b = cx_a // let workspace_a_project_b = cx_a
// .windows() // .windows()
// .iter() // .iter()
// .max_by_key(|window| window.id()) // .max_by_key(|window| window.item_id())
// .unwrap() // .unwrap()
// .downcast::<Workspace>() // .downcast::<Workspace>()
// .unwrap() // .unwrap()
// .root(cx_a); // .root(cx_a)
// .unwrap();
// workspace_a_project_b.update(cx_a, |workspace, cx| { // workspace_a_project_b.update(cx_a, |workspace, cx| {
// assert_eq!(workspace.project().read(cx).remote_id(), Some(project_b_id)); // assert_eq!(workspace.project().read(cx).remote_id(), Some(project_b_id));
@ -1596,12 +1670,151 @@
// }); // });
// } // }
// #[gpui::test]
// async fn test_following_into_excluded_file(
// executor: BackgroundExecutor,
// mut cx_a: &mut TestAppContext,
// mut cx_b: &mut TestAppContext,
// ) {
// let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await;
// for cx in [&mut cx_a, &mut cx_b] {
// cx.update(|cx| {
// cx.update_global::<SettingsStore, _>(|store, cx| {
// store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
// project_settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]);
// });
// });
// });
// }
// server
// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
// .await;
// let active_call_a = cx_a.read(ActiveCall::global);
// let active_call_b = cx_b.read(ActiveCall::global);
// cx_a.update(editor::init);
// cx_b.update(editor::init);
// client_a
// .fs()
// .insert_tree(
// "/a",
// json!({
// ".git": {
// "COMMIT_EDITMSG": "write your commit message here",
// },
// "1.txt": "one\none\none",
// "2.txt": "two\ntwo\ntwo",
// "3.txt": "three\nthree\nthree",
// }),
// )
// .await;
// let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
// active_call_a
// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
// .await
// .unwrap();
// let project_id = active_call_a
// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
// .await
// .unwrap();
// let project_b = client_b.build_remote_project(project_id, cx_b).await;
// active_call_b
// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
// .await
// .unwrap();
// let window_a = client_a.build_workspace(&project_a, cx_a);
// let workspace_a = window_a.root(cx_a).unwrap();
// let peer_id_a = client_a.peer_id().unwrap();
// let window_b = client_b.build_workspace(&project_b, cx_b);
// let workspace_b = window_b.root(cx_b).unwrap();
// todo!("could be wrong")
// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
// let cx_a = &mut cx_a;
// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
// let cx_b = &mut cx_b;
// // Client A opens editors for a regular file and an excluded file.
// let editor_for_regular = workspace_a
// .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
// })
// .await
// .unwrap()
// .downcast::<Editor>()
// .unwrap();
// let editor_for_excluded_a = workspace_a
// .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id, ".git/COMMIT_EDITMSG"), None, true, cx)
// })
// .await
// .unwrap()
// .downcast::<Editor>()
// .unwrap();
// // Client A updates their selections in those editors
// editor_for_regular.update(cx_a, |editor, cx| {
// editor.handle_input("a", cx);
// editor.handle_input("b", cx);
// editor.handle_input("c", cx);
// editor.select_left(&Default::default(), cx);
// assert_eq!(editor.selections.ranges(cx), vec![3..2]);
// });
// editor_for_excluded_a.update(cx_a, |editor, cx| {
// editor.select_all(&Default::default(), cx);
// editor.handle_input("new commit message", cx);
// editor.select_left(&Default::default(), cx);
// assert_eq!(editor.selections.ranges(cx), vec![18..17]);
// });
// // When client B starts following client A, currently visible file is replicated
// workspace_b
// .update(cx_b, |workspace, cx| {
// workspace.follow(peer_id_a, cx).unwrap()
// })
// .await
// .unwrap();
// let editor_for_excluded_b = workspace_b.update(cx_b, |workspace, cx| {
// workspace
// .active_item(cx)
// .unwrap()
// .downcast::<Editor>()
// .unwrap()
// });
// assert_eq!(
// cx_b.read(|cx| editor_for_excluded_b.project_path(cx)),
// Some((worktree_id, ".git/COMMIT_EDITMSG").into())
// );
// assert_eq!(
// editor_for_excluded_b.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
// vec![18..17]
// );
// // Changes from B to the excluded file are replicated in A's editor
// editor_for_excluded_b.update(cx_b, |editor, cx| {
// editor.handle_input("\nCo-Authored-By: B <b@b.b>", cx);
// });
// executor.run_until_parked();
// editor_for_excluded_a.update(cx_a, |editor, cx| {
// assert_eq!(
// editor.text(cx),
// "new commit messag\nCo-Authored-By: B <b@b.b>"
// );
// });
// }
// fn visible_push_notifications( // fn visible_push_notifications(
// cx: &mut TestAppContext, // cx: &mut TestAppContext,
// ) -> Vec<gpui::ViewHandle<ProjectSharedNotification>> { // ) -> Vec<gpui::View<ProjectSharedNotification>> {
// let mut ret = Vec::new(); // let mut ret = Vec::new();
// for window in cx.windows() { // for window in cx.windows() {
// window.read_with(cx, |window| { // window.update(cx, |window| {
// if let Some(handle) = window // if let Some(handle) = window
// .root_view() // .root_view()
// .clone() // .clone()
@ -1645,8 +1858,8 @@
// }) // })
// } // }
// fn pane_summaries(workspace: &ViewHandle<Workspace>, cx: &mut TestAppContext) -> Vec<PaneSummary> { // fn pane_summaries(workspace: &View<Workspace>, cx: &mut WindowContext<'_>) -> Vec<PaneSummary> {
// workspace.read_with(cx, |workspace, cx| { // workspace.update(cx, |workspace, cx| {
// let active_pane = workspace.active_pane(); // let active_pane = workspace.active_pane();
// workspace // workspace
// .panes() // .panes()

View file

@ -510,10 +510,9 @@ async fn test_joining_channels_and_calling_multiple_users_simultaneously(
// Simultaneously join channel 1 and then channel 2 // Simultaneously join channel 1 and then channel 2
active_call_a active_call_a
.update(cx_a, |call, cx| call.join_channel(channel_1, None, cx)) .update(cx_a, |call, cx| call.join_channel(channel_1, cx))
.detach(); .detach();
let join_channel_2 = let join_channel_2 = active_call_a.update(cx_a, |call, cx| call.join_channel(channel_2, cx));
active_call_a.update(cx_a, |call, cx| call.join_channel(channel_2, None, cx));
join_channel_2.await.unwrap(); join_channel_2.await.unwrap();
@ -539,8 +538,7 @@ async fn test_joining_channels_and_calling_multiple_users_simultaneously(
call.invite(client_c.user_id().unwrap(), None, cx) call.invite(client_c.user_id().unwrap(), None, cx)
}); });
let join_channel = let join_channel = active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, cx));
active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, None, cx));
b_invite.await.unwrap(); b_invite.await.unwrap();
c_invite.await.unwrap(); c_invite.await.unwrap();
@ -569,8 +567,7 @@ async fn test_joining_channels_and_calling_multiple_users_simultaneously(
.unwrap(); .unwrap();
// Simultaneously join channel 1 and call user B and user C from client A. // Simultaneously join channel 1 and call user B and user C from client A.
let join_channel = let join_channel = active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, cx));
active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, None, cx));
let b_invite = active_call_a.update(cx_a, |call, cx| { let b_invite = active_call_a.update(cx_a, |call, cx| {
call.invite(client_b.user_id().unwrap(), None, cx) call.invite(client_b.user_id().unwrap(), None, cx)
@ -2784,11 +2781,10 @@ async fn test_fs_operations(
let entry = project_b let entry = project_b
.update(cx_b, |project, cx| { .update(cx_b, |project, cx| {
project project.create_entry((worktree_id, "c.txt"), false, cx)
.create_entry((worktree_id, "c.txt"), false, cx)
.unwrap()
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
worktree_a.read_with(cx_a, |worktree, _| { worktree_a.read_with(cx_a, |worktree, _| {
@ -2815,8 +2811,8 @@ async fn test_fs_operations(
.update(cx_b, |project, cx| { .update(cx_b, |project, cx| {
project.rename_entry(entry.id, Path::new("d.txt"), cx) project.rename_entry(entry.id, Path::new("d.txt"), cx)
}) })
.unwrap()
.await .await
.unwrap()
.unwrap(); .unwrap();
worktree_a.read_with(cx_a, |worktree, _| { worktree_a.read_with(cx_a, |worktree, _| {
@ -2841,11 +2837,10 @@ async fn test_fs_operations(
let dir_entry = project_b let dir_entry = project_b
.update(cx_b, |project, cx| { .update(cx_b, |project, cx| {
project project.create_entry((worktree_id, "DIR"), true, cx)
.create_entry((worktree_id, "DIR"), true, cx)
.unwrap()
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
worktree_a.read_with(cx_a, |worktree, _| { worktree_a.read_with(cx_a, |worktree, _| {
@ -2870,27 +2865,24 @@ async fn test_fs_operations(
project_b project_b
.update(cx_b, |project, cx| { .update(cx_b, |project, cx| {
project project.create_entry((worktree_id, "DIR/e.txt"), false, cx)
.create_entry((worktree_id, "DIR/e.txt"), false, cx)
.unwrap()
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
project_b project_b
.update(cx_b, |project, cx| { .update(cx_b, |project, cx| {
project project.create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
.create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
.unwrap()
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
project_b project_b
.update(cx_b, |project, cx| { .update(cx_b, |project, cx| {
project project.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
.unwrap()
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
worktree_a.read_with(cx_a, |worktree, _| { worktree_a.read_with(cx_a, |worktree, _| {
@ -2931,11 +2923,10 @@ async fn test_fs_operations(
project_b project_b
.update(cx_b, |project, cx| { .update(cx_b, |project, cx| {
project project.copy_entry(entry.id, Path::new("f.txt"), cx)
.copy_entry(entry.id, Path::new("f.txt"), cx)
.unwrap()
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
worktree_a.read_with(cx_a, |worktree, _| { worktree_a.read_with(cx_a, |worktree, _| {

View file

@ -665,7 +665,6 @@ impl RandomizedTest for ProjectCollaborationTest {
ensure_project_shared(&project, client, cx).await; ensure_project_shared(&project, client, cx).await;
project project
.update(cx, |p, cx| p.create_entry(project_path, is_dir, cx)) .update(cx, |p, cx| p.create_entry(project_path, is_dir, cx))
.unwrap()
.await?; .await?;
} }

View file

@ -221,7 +221,6 @@ impl TestServer {
fs: fs.clone(), fs: fs.clone(),
build_window_options: |_, _, _| Default::default(), build_window_options: |_, _, _| Default::default(),
node_runtime: FakeNodeRuntime::new(), node_runtime: FakeNodeRuntime::new(),
call_factory: |_| Box::new(workspace::TestCallHandler),
}); });
cx.update(|cx| { cx.update(|cx| {

File diff suppressed because it is too large Load diff

View file

@ -31,9 +31,9 @@ use std::sync::Arc;
use call::ActiveCall; use call::ActiveCall;
use client::{Client, UserStore}; use client::{Client, UserStore};
use gpui::{ use gpui::{
div, px, rems, AppContext, Div, Element, InteractiveElement, IntoElement, Model, MouseButton, actions, div, px, rems, AppContext, Div, Element, InteractiveElement, IntoElement, Model,
ParentElement, Render, RenderOnce, Stateful, StatefulInteractiveElement, Styled, Subscription, MouseButton, ParentElement, Render, RenderOnce, Stateful, StatefulInteractiveElement, Styled,
ViewContext, VisualContext, WeakView, WindowBounds, Subscription, ViewContext, VisualContext, WeakView, WindowBounds,
}; };
use project::{Project, RepositoryEntry}; use project::{Project, RepositoryEntry};
use theme::ActiveTheme; use theme::ActiveTheme;
@ -49,6 +49,14 @@ use crate::face_pile::FacePile;
const MAX_PROJECT_NAME_LENGTH: usize = 40; const MAX_PROJECT_NAME_LENGTH: usize = 40;
const MAX_BRANCH_NAME_LENGTH: usize = 40; const MAX_BRANCH_NAME_LENGTH: usize = 40;
actions!(
ShareProject,
UnshareProject,
ToggleUserMenu,
ToggleProjectMenu,
SwitchBranch
);
// actions!( // actions!(
// collab, // collab,
// [ // [
@ -91,37 +99,23 @@ impl Render for CollabTitlebarItem {
type Element = Stateful<Div>; type Element = Stateful<Div>;
fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element { fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
let is_in_room = self let room = ActiveCall::global(cx).read(cx).room();
.workspace let is_in_room = room.is_some();
.update(cx, |this, cx| this.call_state().is_in_room(cx))
.unwrap_or_default();
let is_shared = is_in_room && self.project.read(cx).is_shared(); let is_shared = is_in_room && self.project.read(cx).is_shared();
let current_user = self.user_store.read(cx).current_user(); let current_user = self.user_store.read(cx).current_user();
let client = self.client.clone(); let client = self.client.clone();
let users = self let remote_participants = room.map(|room| {
.workspace room.read(cx)
.update(cx, |this, cx| this.call_state().remote_participants(cx)) .remote_participants()
.log_err() .values()
.flatten(); .map(|participant| (participant.user.clone(), participant.peer_id))
let is_muted = self .collect::<Vec<_>>()
.workspace });
.update(cx, |this, cx| this.call_state().is_muted(cx)) let is_muted = room.map_or(false, |room| room.read(cx).is_muted(cx));
.log_err() let is_deafened = room
.flatten() .and_then(|room| room.read(cx).is_deafened())
.unwrap_or_default(); .unwrap_or(false);
let is_deafened = self let speakers_icon = if is_deafened {
.workspace
.update(cx, |this, cx| this.call_state().is_deafened(cx))
.log_err()
.flatten()
.unwrap_or_default();
let speakers_icon = if self
.workspace
.update(cx, |this, cx| this.call_state().is_deafened(cx))
.log_err()
.flatten()
.unwrap_or_default()
{
ui::Icon::AudioOff ui::Icon::AudioOff
} else { } else {
ui::Icon::AudioOn ui::Icon::AudioOn
@ -157,7 +151,7 @@ impl Render for CollabTitlebarItem {
.children(self.render_project_branch(cx)), .children(self.render_project_branch(cx)),
) )
.when_some( .when_some(
users.zip(current_user.clone()), remote_participants.zip(current_user.clone()),
|this, (remote_participants, current_user)| { |this, (remote_participants, current_user)| {
let mut pile = FacePile::default(); let mut pile = FacePile::default();
pile.extend( pile.extend(
@ -168,25 +162,30 @@ impl Render for CollabTitlebarItem {
div().child(Avatar::data(avatar.clone())).into_any_element() div().child(Avatar::data(avatar.clone())).into_any_element()
}) })
.into_iter() .into_iter()
.chain(remote_participants.into_iter().flat_map(|(user, peer_id)| { .chain(remote_participants.into_iter().filter_map(
user.avatar.as_ref().map(|avatar| { |(user, peer_id)| {
div() let avatar = user.avatar.as_ref()?;
.child( Some(
Avatar::data(avatar.clone()).into_element().into_any(), div()
) .child(
.on_mouse_down(MouseButton::Left, { Avatar::data(avatar.clone())
let workspace = workspace.clone(); .into_element()
move |_, cx| { .into_any(),
workspace )
.update(cx, |this, cx| { .on_mouse_down(MouseButton::Left, {
this.open_shared_screen(peer_id, cx); let workspace = workspace.clone();
}) move |_, cx| {
.log_err(); workspace
} .update(cx, |this, cx| {
}) this.open_shared_screen(peer_id, cx);
.into_any_element() })
}) .log_err();
})), }
})
.into_any_element(),
)
},
)),
); );
this.child(pile.render(cx)) this.child(pile.render(cx))
}, },
@ -204,20 +203,24 @@ impl Render for CollabTitlebarItem {
"toggle_sharing", "toggle_sharing",
if is_shared { "Unshare" } else { "Share" }, if is_shared { "Unshare" } else { "Share" },
) )
.style(ButtonStyle::Subtle), .style(ButtonStyle::Subtle)
.on_click(cx.listener(
move |this, _, cx| {
if is_shared {
this.unshare_project(&Default::default(), cx);
} else {
this.share_project(&Default::default(), cx);
}
},
)),
) )
.child( .child(
IconButton::new("leave-call", ui::Icon::Exit) IconButton::new("leave-call", ui::Icon::Exit)
.style(ButtonStyle::Subtle) .style(ButtonStyle::Subtle)
.on_click({ .on_click(move |_, cx| {
let workspace = workspace.clone(); ActiveCall::global(cx)
move |_, cx| { .update(cx, |call, cx| call.hang_up(cx))
workspace .detach_and_log_err(cx);
.update(cx, |this, cx| {
this.call_state().hang_up(cx).detach();
})
.log_err();
}
}), }),
), ),
) )
@ -235,15 +238,8 @@ impl Render for CollabTitlebarItem {
) )
.style(ButtonStyle::Subtle) .style(ButtonStyle::Subtle)
.selected(is_muted) .selected(is_muted)
.on_click({ .on_click(move |_, cx| {
let workspace = workspace.clone(); crate::toggle_mute(&Default::default(), cx)
move |_, cx| {
workspace
.update(cx, |this, cx| {
this.call_state().toggle_mute(cx);
})
.log_err();
}
}), }),
) )
.child( .child(
@ -258,26 +254,15 @@ impl Render for CollabTitlebarItem {
cx, cx,
) )
}) })
.on_click({ .on_click(move |_, cx| {
let workspace = workspace.clone(); crate::toggle_mute(&Default::default(), cx)
move |_, cx| {
workspace
.update(cx, |this, cx| {
this.call_state().toggle_deafen(cx);
})
.log_err();
}
}), }),
) )
.child( .child(
IconButton::new("screen-share", ui::Icon::Screen) IconButton::new("screen-share", ui::Icon::Screen)
.style(ButtonStyle::Subtle) .style(ButtonStyle::Subtle)
.on_click(move |_, cx| { .on_click(move |_, cx| {
workspace crate::toggle_screen_sharing(&Default::default(), cx)
.update(cx, |this, cx| {
this.call_state().toggle_screen_share(cx);
})
.log_err();
}), }),
) )
.pl_2(), .pl_2(),
@ -451,46 +436,19 @@ impl CollabTitlebarItem {
// render_project_owner -> resolve if you are in a room -> Option<foo> // render_project_owner -> resolve if you are in a room -> Option<foo>
pub fn render_project_owner(&self, cx: &mut ViewContext<Self>) -> Option<impl Element> { pub fn render_project_owner(&self, cx: &mut ViewContext<Self>) -> Option<impl Element> {
// TODO: We can't finish implementing this until project sharing works let host = self.project.read(cx).host()?;
// - [ ] Show the project owner when the project is remote (maybe done) let host = self.user_store.read(cx).get_cached_user(host.user_id)?;
// - [x] Show the project owner when the project is local let participant_index = self
// - [ ] Show the project owner with a lock icon when the project is local and unshared .user_store
.read(cx)
let remote_id = self.project.read(cx).remote_id(); .participant_indices()
let is_local = remote_id.is_none(); .get(&host.id)?;
let is_shared = self.project.read(cx).is_shared();
let (user_name, participant_index) = {
if let Some(host) = self.project.read(cx).host() {
debug_assert!(!is_local);
let (Some(host_user), Some(participant_index)) = (
self.user_store.read(cx).get_cached_user(host.user_id),
self.user_store
.read(cx)
.participant_indices()
.get(&host.user_id),
) else {
return None;
};
(host_user.github_login.clone(), participant_index.0)
} else {
debug_assert!(is_local);
let name = self
.user_store
.read(cx)
.current_user()
.map(|user| user.github_login.clone())?;
(name, 0)
}
};
Some( Some(
div().border().border_color(gpui::red()).child( div().border().border_color(gpui::red()).child(
Button::new( Button::new("project_owner_trigger", host.github_login.clone())
"project_owner_trigger", .color(Color::Player(participant_index.0))
format!("{user_name} ({})", !is_shared), .style(ButtonStyle::Subtle)
) .tooltip(move |cx| Tooltip::text("Toggle following", cx)),
.color(Color::Player(participant_index))
.style(ButtonStyle::Subtle)
.tooltip(move |cx| Tooltip::text("Toggle following", cx)),
), ),
) )
} }
@ -730,21 +688,21 @@ impl CollabTitlebarItem {
cx.notify(); cx.notify();
} }
// fn share_project(&mut self, _: &ShareProject, cx: &mut ViewContext<Self>) { fn share_project(&mut self, _: &ShareProject, cx: &mut ViewContext<Self>) {
// let active_call = ActiveCall::global(cx); let active_call = ActiveCall::global(cx);
// let project = self.project.clone(); let project = self.project.clone();
// active_call active_call
// .update(cx, |call, cx| call.share_project(project, cx)) .update(cx, |call, cx| call.share_project(project, cx))
// .detach_and_log_err(cx); .detach_and_log_err(cx);
// } }
// fn unshare_project(&mut self, _: &UnshareProject, cx: &mut ViewContext<Self>) { fn unshare_project(&mut self, _: &UnshareProject, cx: &mut ViewContext<Self>) {
// let active_call = ActiveCall::global(cx); let active_call = ActiveCall::global(cx);
// let project = self.project.clone(); let project = self.project.clone();
// active_call active_call
// .update(cx, |call, cx| call.unshare_project(project, cx)) .update(cx, |call, cx| call.unshare_project(project, cx))
// .log_err(); .log_err();
// } }
// pub fn toggle_user_menu(&mut self, _: &ToggleUserMenu, cx: &mut ViewContext<Self>) { // pub fn toggle_user_menu(&mut self, _: &ToggleUserMenu, cx: &mut ViewContext<Self>) {
// self.user_menu.update(cx, |user_menu, cx| { // self.user_menu.update(cx, |user_menu, cx| {

View file

@ -9,22 +9,21 @@ mod panel_settings;
use std::{rc::Rc, sync::Arc}; use std::{rc::Rc, sync::Arc};
use call::{report_call_event_for_room, ActiveCall, Room};
pub use collab_panel::CollabPanel; pub use collab_panel::CollabPanel;
pub use collab_titlebar_item::CollabTitlebarItem; pub use collab_titlebar_item::CollabTitlebarItem;
use gpui::{ use gpui::{
point, AppContext, GlobalPixels, Pixels, PlatformDisplay, Size, WindowBounds, WindowKind, actions, point, AppContext, GlobalPixels, Pixels, PlatformDisplay, Size, Task, WindowBounds,
WindowOptions, WindowKind, WindowOptions,
}; };
pub use panel_settings::{ pub use panel_settings::{
ChatPanelSettings, CollaborationPanelSettings, NotificationPanelSettings, ChatPanelSettings, CollaborationPanelSettings, NotificationPanelSettings,
}; };
use settings::Settings; use settings::Settings;
use util::ResultExt;
use workspace::AppState; use workspace::AppState;
// actions!( actions!(ToggleScreenSharing, ToggleMute, ToggleDeafen, LeaveCall);
// collab,
// [ToggleScreenSharing, ToggleMute, ToggleDeafen, LeaveCall]
// );
pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) { pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
CollaborationPanelSettings::register(cx); CollaborationPanelSettings::register(cx);
@ -42,61 +41,61 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
// cx.add_global_action(toggle_deafen); // cx.add_global_action(toggle_deafen);
} }
// pub fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut AppContext) { pub fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut AppContext) {
// let call = ActiveCall::global(cx).read(cx); let call = ActiveCall::global(cx).read(cx);
// if let Some(room) = call.room().cloned() { if let Some(room) = call.room().cloned() {
// let client = call.client(); let client = call.client();
// let toggle_screen_sharing = room.update(cx, |room, cx| { let toggle_screen_sharing = room.update(cx, |room, cx| {
// if room.is_screen_sharing() { if room.is_screen_sharing() {
// report_call_event_for_room( report_call_event_for_room(
// "disable screen share", "disable screen share",
// room.id(), room.id(),
// room.channel_id(), room.channel_id(),
// &client, &client,
// cx, cx,
// ); );
// Task::ready(room.unshare_screen(cx)) Task::ready(room.unshare_screen(cx))
// } else { } else {
// report_call_event_for_room( report_call_event_for_room(
// "enable screen share", "enable screen share",
// room.id(), room.id(),
// room.channel_id(), room.channel_id(),
// &client, &client,
// cx, cx,
// ); );
// room.share_screen(cx) room.share_screen(cx)
// } }
// }); });
// toggle_screen_sharing.detach_and_log_err(cx); toggle_screen_sharing.detach_and_log_err(cx);
// } }
// } }
// pub fn toggle_mute(_: &ToggleMute, cx: &mut AppContext) { pub fn toggle_mute(_: &ToggleMute, cx: &mut AppContext) {
// let call = ActiveCall::global(cx).read(cx); let call = ActiveCall::global(cx).read(cx);
// if let Some(room) = call.room().cloned() { if let Some(room) = call.room().cloned() {
// let client = call.client(); let client = call.client();
// room.update(cx, |room, cx| { room.update(cx, |room, cx| {
// let operation = if room.is_muted(cx) { let operation = if room.is_muted(cx) {
// "enable microphone" "enable microphone"
// } else { } else {
// "disable microphone" "disable microphone"
// }; };
// report_call_event_for_room(operation, room.id(), room.channel_id(), &client, cx); report_call_event_for_room(operation, room.id(), room.channel_id(), &client, cx);
// room.toggle_mute(cx) room.toggle_mute(cx)
// }) })
// .map(|task| task.detach_and_log_err(cx)) .map(|task| task.detach_and_log_err(cx))
// .log_err(); .log_err();
// } }
// } }
// pub fn toggle_deafen(_: &ToggleDeafen, cx: &mut AppContext) { pub fn toggle_deafen(_: &ToggleDeafen, cx: &mut AppContext) {
// if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() { if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() {
// room.update(cx, Room::toggle_deafen) room.update(cx, Room::toggle_deafen)
// .map(|task| task.detach_and_log_err(cx)) .map(|task| task.detach_and_log_err(cx))
// .log_err(); .log_err();
// } }
// } }
fn notification_window_options( fn notification_window_options(
screen: Rc<dyn PlatformDisplay>, screen: Rc<dyn PlatformDisplay>,

View file

@ -311,7 +311,11 @@ impl PickerDelegate for CommandPaletteDelegate {
command.name.clone(), command.name.clone(),
r#match.positions.clone(), r#match.positions.clone(),
)) ))
.children(KeyBinding::for_action(&*command.action, cx)), .children(KeyBinding::for_action_in(
&*command.action,
&self.previous_focus_handle,
cx,
)),
), ),
) )
} }

View file

@ -45,6 +45,6 @@ fs = { path = "../fs", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] } gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
language = { package = "language2", path = "../language2", features = ["test-support"] } language = { package = "language2", path = "../language2", features = ["test-support"] }
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] } lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] } rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
settings = { package = "settings2", path = "../settings2", features = ["test-support"] } settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] } util = { path = "../util", features = ["test-support"] }

View file

@ -1002,229 +1002,231 @@ async fn get_copilot_lsp(http: Arc<dyn HttpClient>) -> anyhow::Result<PathBuf> {
} }
} }
// #[cfg(test)] #[cfg(test)]
// mod tests { mod tests {
// use super::*; use super::*;
// use gpui::{executor::Deterministic, TestAppContext}; use gpui::TestAppContext;
// #[gpui::test(iterations = 10)] #[gpui::test(iterations = 10)]
// async fn test_buffer_management(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) { async fn test_buffer_management(cx: &mut TestAppContext) {
// deterministic.forbid_parking(); let (copilot, mut lsp) = Copilot::fake(cx);
// let (copilot, mut lsp) = Copilot::fake(cx);
// let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "Hello")); let buffer_1 = cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "Hello"));
// let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.id()).parse().unwrap(); let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.entity_id().as_u64())
// copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_1, cx)); .parse()
// assert_eq!( .unwrap();
// lsp.receive_notification::<lsp::notification::DidOpenTextDocument>() copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_1, cx));
// .await, assert_eq!(
// lsp::DidOpenTextDocumentParams { lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
// text_document: lsp::TextDocumentItem::new( .await,
// buffer_1_uri.clone(), lsp::DidOpenTextDocumentParams {
// "plaintext".into(), text_document: lsp::TextDocumentItem::new(
// 0, buffer_1_uri.clone(),
// "Hello".into() "plaintext".into(),
// ), 0,
// } "Hello".into()
// ); ),
}
);
// let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "Goodbye")); let buffer_2 = cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "Goodbye"));
// let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.id()).parse().unwrap(); let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.entity_id().as_u64())
// copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_2, cx)); .parse()
// assert_eq!( .unwrap();
// lsp.receive_notification::<lsp::notification::DidOpenTextDocument>() copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_2, cx));
// .await, assert_eq!(
// lsp::DidOpenTextDocumentParams { lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
// text_document: lsp::TextDocumentItem::new( .await,
// buffer_2_uri.clone(), lsp::DidOpenTextDocumentParams {
// "plaintext".into(), text_document: lsp::TextDocumentItem::new(
// 0, buffer_2_uri.clone(),
// "Goodbye".into() "plaintext".into(),
// ), 0,
// } "Goodbye".into()
// ); ),
}
);
// buffer_1.update(cx, |buffer, cx| buffer.edit([(5..5, " world")], None, cx)); buffer_1.update(cx, |buffer, cx| buffer.edit([(5..5, " world")], None, cx));
// assert_eq!( assert_eq!(
// lsp.receive_notification::<lsp::notification::DidChangeTextDocument>() lsp.receive_notification::<lsp::notification::DidChangeTextDocument>()
// .await, .await,
// lsp::DidChangeTextDocumentParams { lsp::DidChangeTextDocumentParams {
// text_document: lsp::VersionedTextDocumentIdentifier::new(buffer_1_uri.clone(), 1), text_document: lsp::VersionedTextDocumentIdentifier::new(buffer_1_uri.clone(), 1),
// content_changes: vec![lsp::TextDocumentContentChangeEvent { content_changes: vec![lsp::TextDocumentContentChangeEvent {
// range: Some(lsp::Range::new( range: Some(lsp::Range::new(
// lsp::Position::new(0, 5), lsp::Position::new(0, 5),
// lsp::Position::new(0, 5) lsp::Position::new(0, 5)
// )), )),
// range_length: None, range_length: None,
// text: " world".into(), text: " world".into(),
// }], }],
// } }
// ); );
// // Ensure updates to the file are reflected in the LSP. // Ensure updates to the file are reflected in the LSP.
// buffer_1 buffer_1.update(cx, |buffer, cx| {
// .update(cx, |buffer, cx| { buffer.file_updated(
// buffer.file_updated( Arc::new(File {
// Arc::new(File { abs_path: "/root/child/buffer-1".into(),
// abs_path: "/root/child/buffer-1".into(), path: Path::new("child/buffer-1").into(),
// path: Path::new("child/buffer-1").into(), }),
// }), cx,
// cx, )
// ) });
// }) assert_eq!(
// .await; lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
// assert_eq!( .await,
// lsp.receive_notification::<lsp::notification::DidCloseTextDocument>() lsp::DidCloseTextDocumentParams {
// .await, text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri),
// lsp::DidCloseTextDocumentParams { }
// text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri), );
// } let buffer_1_uri = lsp::Url::from_file_path("/root/child/buffer-1").unwrap();
// ); assert_eq!(
// let buffer_1_uri = lsp::Url::from_file_path("/root/child/buffer-1").unwrap(); lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
// assert_eq!( .await,
// lsp.receive_notification::<lsp::notification::DidOpenTextDocument>() lsp::DidOpenTextDocumentParams {
// .await, text_document: lsp::TextDocumentItem::new(
// lsp::DidOpenTextDocumentParams { buffer_1_uri.clone(),
// text_document: lsp::TextDocumentItem::new( "plaintext".into(),
// buffer_1_uri.clone(), 1,
// "plaintext".into(), "Hello world".into()
// 1, ),
// "Hello world".into() }
// ), );
// }
// );
// // Ensure all previously-registered buffers are closed when signing out. // Ensure all previously-registered buffers are closed when signing out.
// lsp.handle_request::<request::SignOut, _, _>(|_, _| async { lsp.handle_request::<request::SignOut, _, _>(|_, _| async {
// Ok(request::SignOutResult {}) Ok(request::SignOutResult {})
// }); });
// copilot copilot
// .update(cx, |copilot, cx| copilot.sign_out(cx)) .update(cx, |copilot, cx| copilot.sign_out(cx))
// .await .await
// .unwrap(); .unwrap();
// assert_eq!( // todo!() po: these notifications now happen in reverse order?
// lsp.receive_notification::<lsp::notification::DidCloseTextDocument>() assert_eq!(
// .await, lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
// lsp::DidCloseTextDocumentParams { .await,
// text_document: lsp::TextDocumentIdentifier::new(buffer_2_uri.clone()), lsp::DidCloseTextDocumentParams {
// } text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri.clone()),
// ); }
// assert_eq!( );
// lsp.receive_notification::<lsp::notification::DidCloseTextDocument>() assert_eq!(
// .await, lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
// lsp::DidCloseTextDocumentParams { .await,
// text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri.clone()), lsp::DidCloseTextDocumentParams {
// } text_document: lsp::TextDocumentIdentifier::new(buffer_2_uri.clone()),
// ); }
);
// // Ensure all previously-registered buffers are re-opened when signing in. // Ensure all previously-registered buffers are re-opened when signing in.
// lsp.handle_request::<request::SignInInitiate, _, _>(|_, _| async { lsp.handle_request::<request::SignInInitiate, _, _>(|_, _| async {
// Ok(request::SignInInitiateResult::AlreadySignedIn { Ok(request::SignInInitiateResult::AlreadySignedIn {
// user: "user-1".into(), user: "user-1".into(),
// }) })
// }); });
// copilot copilot
// .update(cx, |copilot, cx| copilot.sign_in(cx)) .update(cx, |copilot, cx| copilot.sign_in(cx))
// .await .await
// .unwrap(); .unwrap();
// assert_eq!(
// lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
// .await,
// lsp::DidOpenTextDocumentParams {
// text_document: lsp::TextDocumentItem::new(
// buffer_2_uri.clone(),
// "plaintext".into(),
// 0,
// "Goodbye".into()
// ),
// }
// );
// assert_eq!(
// lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
// .await,
// lsp::DidOpenTextDocumentParams {
// text_document: lsp::TextDocumentItem::new(
// buffer_1_uri.clone(),
// "plaintext".into(),
// 0,
// "Hello world".into()
// ),
// }
// );
// // Dropping a buffer causes it to be closed on the LSP side as well. assert_eq!(
// cx.update(|_| drop(buffer_2)); lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
// assert_eq!( .await,
// lsp.receive_notification::<lsp::notification::DidCloseTextDocument>() lsp::DidOpenTextDocumentParams {
// .await, text_document: lsp::TextDocumentItem::new(
// lsp::DidCloseTextDocumentParams { buffer_1_uri.clone(),
// text_document: lsp::TextDocumentIdentifier::new(buffer_2_uri), "plaintext".into(),
// } 0,
// ); "Hello world".into()
// } ),
}
);
assert_eq!(
lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
.await,
lsp::DidOpenTextDocumentParams {
text_document: lsp::TextDocumentItem::new(
buffer_2_uri.clone(),
"plaintext".into(),
0,
"Goodbye".into()
),
}
);
// Dropping a buffer causes it to be closed on the LSP side as well.
cx.update(|_| drop(buffer_2));
assert_eq!(
lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
.await,
lsp::DidCloseTextDocumentParams {
text_document: lsp::TextDocumentIdentifier::new(buffer_2_uri),
}
);
}
// struct File { struct File {
// abs_path: PathBuf, abs_path: PathBuf,
// path: Arc<Path>, path: Arc<Path>,
// } }
// impl language2::File for File { impl language::File for File {
// fn as_local(&self) -> Option<&dyn language2::LocalFile> { fn as_local(&self) -> Option<&dyn language::LocalFile> {
// Some(self) Some(self)
// } }
// fn mtime(&self) -> std::time::SystemTime { fn mtime(&self) -> std::time::SystemTime {
// unimplemented!() unimplemented!()
// } }
// fn path(&self) -> &Arc<Path> { fn path(&self) -> &Arc<Path> {
// &self.path &self.path
// } }
// fn full_path(&self, _: &AppContext) -> PathBuf { fn full_path(&self, _: &AppContext) -> PathBuf {
// unimplemented!() unimplemented!()
// } }
// fn file_name<'a>(&'a self, _: &'a AppContext) -> &'a std::ffi::OsStr { fn file_name<'a>(&'a self, _: &'a AppContext) -> &'a std::ffi::OsStr {
// unimplemented!() unimplemented!()
// } }
// fn is_deleted(&self) -> bool { fn is_deleted(&self) -> bool {
// unimplemented!() unimplemented!()
// } }
// fn as_any(&self) -> &dyn std::any::Any { fn as_any(&self) -> &dyn std::any::Any {
// unimplemented!() unimplemented!()
// } }
// fn to_proto(&self) -> rpc::proto::File { fn to_proto(&self) -> rpc::proto::File {
// unimplemented!() unimplemented!()
// } }
// fn worktree_id(&self) -> usize { fn worktree_id(&self) -> usize {
// 0 0
// } }
// } }
// impl language::LocalFile for File { impl language::LocalFile for File {
// fn abs_path(&self, _: &AppContext) -> PathBuf { fn abs_path(&self, _: &AppContext) -> PathBuf {
// self.abs_path.clone() self.abs_path.clone()
// } }
// fn load(&self, _: &AppContext) -> Task<Result<String>> { fn load(&self, _: &AppContext) -> Task<Result<String>> {
// unimplemented!() unimplemented!()
// } }
// fn buffer_reloaded( fn buffer_reloaded(
// &self, &self,
// _: u64, _: u64,
// _: &clock::Global, _: &clock::Global,
// _: language::RopeFingerprint, _: language::RopeFingerprint,
// _: language::LineEnding, _: language::LineEnding,
// _: std::time::SystemTime, _: std::time::SystemTime,
// _: &mut AppContext, _: &mut AppContext,
// ) { ) {
// unimplemented!() unimplemented!()
// } }
// } }
// } }

View file

@ -201,9 +201,8 @@ impl CopilotButton {
url: COPILOT_SETTINGS_URL.to_string(), url: COPILOT_SETTINGS_URL.to_string(),
} }
.boxed_clone(), .boxed_clone(),
cx,
) )
.action("Sign Out", SignOut.boxed_clone(), cx) .action("Sign Out", SignOut.boxed_clone())
}); });
} }

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -741,49 +741,48 @@ impl WrapSnapshot {
} }
fn check_invariants(&self) { fn check_invariants(&self) {
// todo!() #[cfg(test)]
// #[cfg(test)] {
// { assert_eq!(
// assert_eq!( TabPoint::from(self.transforms.summary().input.lines),
// TabPoint::from(self.transforms.summary().input.lines), self.tab_snapshot.max_point()
// self.tab_snapshot.max_point() );
// );
// { {
// let mut transforms = self.transforms.cursor::<()>().peekable(); let mut transforms = self.transforms.cursor::<()>().peekable();
// while let Some(transform) = transforms.next() { while let Some(transform) = transforms.next() {
// if let Some(next_transform) = transforms.peek() { if let Some(next_transform) = transforms.peek() {
// assert!(transform.is_isomorphic() != next_transform.is_isomorphic()); assert!(transform.is_isomorphic() != next_transform.is_isomorphic());
// } }
// } }
// } }
// let text = language::Rope::from(self.text().as_str()); let text = language::Rope::from(self.text().as_str());
// let mut input_buffer_rows = self.tab_snapshot.buffer_rows(0); let mut input_buffer_rows = self.tab_snapshot.buffer_rows(0);
// let mut expected_buffer_rows = Vec::new(); let mut expected_buffer_rows = Vec::new();
// let mut prev_tab_row = 0; let mut prev_tab_row = 0;
// for display_row in 0..=self.max_point().row() { for display_row in 0..=self.max_point().row() {
// let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0)); let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0));
// if tab_point.row() == prev_tab_row && display_row != 0 { if tab_point.row() == prev_tab_row && display_row != 0 {
// expected_buffer_rows.push(None); expected_buffer_rows.push(None);
// } else { } else {
// expected_buffer_rows.push(input_buffer_rows.next().unwrap()); expected_buffer_rows.push(input_buffer_rows.next().unwrap());
// } }
// prev_tab_row = tab_point.row(); prev_tab_row = tab_point.row();
// assert_eq!(self.line_len(display_row), text.line_len(display_row)); assert_eq!(self.line_len(display_row), text.line_len(display_row));
// } }
// for start_display_row in 0..expected_buffer_rows.len() { for start_display_row in 0..expected_buffer_rows.len() {
// assert_eq!( assert_eq!(
// self.buffer_rows(start_display_row as u32) self.buffer_rows(start_display_row as u32)
// .collect::<Vec<_>>(), .collect::<Vec<_>>(),
// &expected_buffer_rows[start_display_row..], &expected_buffer_rows[start_display_row..],
// "invalid buffer_rows({}..)", "invalid buffer_rows({}..)",
// start_display_row start_display_row
// ); );
// } }
// } }
} }
} }
@ -1026,337 +1025,334 @@ fn consolidate_wrap_edits(edits: &mut Vec<WrapEdit>) {
} }
} }
// #[cfg(test)] #[cfg(test)]
// mod tests { mod tests {
// use super::*; use super::*;
// use crate::{ use crate::{
// display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap}, display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap},
// MultiBuffer, MultiBuffer,
// }; };
// use gpui::test::observe; use gpui::{font, px, test::observe, Platform};
// use rand::prelude::*; use rand::prelude::*;
// use settings::SettingsStore; use settings::SettingsStore;
// use smol::stream::StreamExt; use smol::stream::StreamExt;
// use std::{cmp, env, num::NonZeroU32}; use std::{cmp, env, num::NonZeroU32};
// use text::Rope; use text::Rope;
use theme::LoadThemes;
// #[gpui::test(iterations = 100)] #[gpui::test(iterations = 100)]
// async fn test_random_wraps(cx: &mut gpui::TestAppContext, mut rng: StdRng) { async fn test_random_wraps(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
// init_test(cx); // todo!() this test is flaky
init_test(cx);
// cx.foreground().set_block_on_ticks(0..=50); cx.background_executor.set_block_on_ticks(0..=50);
// let operations = env::var("OPERATIONS") let operations = env::var("OPERATIONS")
// .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
// .unwrap_or(10); .unwrap_or(10);
// let font_cache = cx.font_cache().clone(); let text_system = cx.read(|cx| cx.text_system().clone());
// let font_system = cx.platform().fonts(); let mut wrap_width = if rng.gen_bool(0.1) {
// let mut wrap_width = if rng.gen_bool(0.1) { None
// None } else {
// } else { Some(px(rng.gen_range(0.0..=1000.0)))
// Some(rng.gen_range(0.0..=1000.0)) };
// }; let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap();
// let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap(); let font = font("Helvetica");
// let family_id = font_cache let font_id = text_system.font_id(&font).unwrap();
// .load_family(&["Helvetica"], &Default::default()) let font_size = px(14.0);
// .unwrap();
// let font_id = font_cache
// .select_font(family_id, &Default::default())
// .unwrap();
// let font_size = 14.0;
// log::info!("Tab size: {}", tab_size); log::info!("Tab size: {}", tab_size);
// log::info!("Wrap width: {:?}", wrap_width); log::info!("Wrap width: {:?}", wrap_width);
// let buffer = cx.update(|cx| { let buffer = cx.update(|cx| {
// if rng.gen() { if rng.gen() {
// MultiBuffer::build_random(&mut rng, cx) MultiBuffer::build_random(&mut rng, cx)
// } else { } else {
// let len = rng.gen_range(0..10); let len = rng.gen_range(0..10);
// let text = util::RandomCharIter::new(&mut rng) let text = util::RandomCharIter::new(&mut rng)
// .take(len) .take(len)
// .collect::<String>(); .collect::<String>();
// MultiBuffer::build_simple(&text, cx) MultiBuffer::build_simple(&text, cx)
// } }
// }); });
// let mut buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); let mut buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
// log::info!("Buffer text: {:?}", buffer_snapshot.text()); log::info!("Buffer text: {:?}", buffer_snapshot.text());
// let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
// log::info!("InlayMap text: {:?}", inlay_snapshot.text()); log::info!("InlayMap text: {:?}", inlay_snapshot.text());
// let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot.clone()); let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot.clone());
// log::info!("FoldMap text: {:?}", fold_snapshot.text()); log::info!("FoldMap text: {:?}", fold_snapshot.text());
// let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size); let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size);
// let tabs_snapshot = tab_map.set_max_expansion_column(32); let tabs_snapshot = tab_map.set_max_expansion_column(32);
// log::info!("TabMap text: {:?}", tabs_snapshot.text()); log::info!("TabMap text: {:?}", tabs_snapshot.text());
// let mut line_wrapper = LineWrapper::new(font_id, font_size, font_system); let mut line_wrapper = text_system.line_wrapper(font.clone(), font_size).unwrap();
// let unwrapped_text = tabs_snapshot.text(); let unwrapped_text = tabs_snapshot.text();
// let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper); let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
// let (wrap_map, _) = let (wrap_map, _) =
// cx.update(|cx| WrapMap::new(tabs_snapshot.clone(), font_id, font_size, wrap_width, cx)); cx.update(|cx| WrapMap::new(tabs_snapshot.clone(), font, font_size, wrap_width, cx));
// let mut notifications = observe(&wrap_map, cx); let mut notifications = observe(&wrap_map, cx);
// if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
// notifications.next().await.unwrap(); notifications.next().await.unwrap();
// } }
// let (initial_snapshot, _) = wrap_map.update(cx, |map, cx| { let (initial_snapshot, _) = wrap_map.update(cx, |map, cx| {
// assert!(!map.is_rewrapping()); assert!(!map.is_rewrapping());
// map.sync(tabs_snapshot.clone(), Vec::new(), cx) map.sync(tabs_snapshot.clone(), Vec::new(), cx)
// }); });
// let actual_text = initial_snapshot.text(); let actual_text = initial_snapshot.text();
// assert_eq!( assert_eq!(
// actual_text, expected_text, actual_text, expected_text,
// "unwrapped text is: {:?}", "unwrapped text is: {:?}",
// unwrapped_text unwrapped_text
// ); );
// log::info!("Wrapped text: {:?}", actual_text); log::info!("Wrapped text: {:?}", actual_text);
// let mut next_inlay_id = 0; let mut next_inlay_id = 0;
// let mut edits = Vec::new(); let mut edits = Vec::new();
// for _i in 0..operations { for _i in 0..operations {
// log::info!("{} ==============================================", _i); log::info!("{} ==============================================", _i);
// let mut buffer_edits = Vec::new(); let mut buffer_edits = Vec::new();
// match rng.gen_range(0..=100) { match rng.gen_range(0..=100) {
// 0..=19 => { 0..=19 => {
// wrap_width = if rng.gen_bool(0.2) { wrap_width = if rng.gen_bool(0.2) {
// None None
// } else { } else {
// Some(rng.gen_range(0.0..=1000.0)) Some(px(rng.gen_range(0.0..=1000.0)))
// }; };
// log::info!("Setting wrap width to {:?}", wrap_width); log::info!("Setting wrap width to {:?}", wrap_width);
// wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx)); wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
// } }
// 20..=39 => { 20..=39 => {
// for (fold_snapshot, fold_edits) in fold_map.randomly_mutate(&mut rng) { for (fold_snapshot, fold_edits) in fold_map.randomly_mutate(&mut rng) {
// let (tabs_snapshot, tab_edits) = let (tabs_snapshot, tab_edits) =
// tab_map.sync(fold_snapshot, fold_edits, tab_size); tab_map.sync(fold_snapshot, fold_edits, tab_size);
// let (mut snapshot, wrap_edits) = let (mut snapshot, wrap_edits) =
// wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx)); wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
// snapshot.check_invariants(); snapshot.check_invariants();
// snapshot.verify_chunks(&mut rng); snapshot.verify_chunks(&mut rng);
// edits.push((snapshot, wrap_edits)); edits.push((snapshot, wrap_edits));
// } }
// } }
// 40..=59 => { 40..=59 => {
// let (inlay_snapshot, inlay_edits) = let (inlay_snapshot, inlay_edits) =
// inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng); inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng);
// let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
// let (tabs_snapshot, tab_edits) = let (tabs_snapshot, tab_edits) =
// tab_map.sync(fold_snapshot, fold_edits, tab_size); tab_map.sync(fold_snapshot, fold_edits, tab_size);
// let (mut snapshot, wrap_edits) = let (mut snapshot, wrap_edits) =
// wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx)); wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
// snapshot.check_invariants(); snapshot.check_invariants();
// snapshot.verify_chunks(&mut rng); snapshot.verify_chunks(&mut rng);
// edits.push((snapshot, wrap_edits)); edits.push((snapshot, wrap_edits));
// } }
// _ => { _ => {
// buffer.update(cx, |buffer, cx| { buffer.update(cx, |buffer, cx| {
// let subscription = buffer.subscribe(); let subscription = buffer.subscribe();
// let edit_count = rng.gen_range(1..=5); let edit_count = rng.gen_range(1..=5);
// buffer.randomly_mutate(&mut rng, edit_count, cx); buffer.randomly_mutate(&mut rng, edit_count, cx);
// buffer_snapshot = buffer.snapshot(cx); buffer_snapshot = buffer.snapshot(cx);
// buffer_edits.extend(subscription.consume()); buffer_edits.extend(subscription.consume());
// }); });
// } }
// } }
// log::info!("Buffer text: {:?}", buffer_snapshot.text()); log::info!("Buffer text: {:?}", buffer_snapshot.text());
// let (inlay_snapshot, inlay_edits) = let (inlay_snapshot, inlay_edits) =
// inlay_map.sync(buffer_snapshot.clone(), buffer_edits); inlay_map.sync(buffer_snapshot.clone(), buffer_edits);
// log::info!("InlayMap text: {:?}", inlay_snapshot.text()); log::info!("InlayMap text: {:?}", inlay_snapshot.text());
// let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
// log::info!("FoldMap text: {:?}", fold_snapshot.text()); log::info!("FoldMap text: {:?}", fold_snapshot.text());
// let (tabs_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size); let (tabs_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size);
// log::info!("TabMap text: {:?}", tabs_snapshot.text()); log::info!("TabMap text: {:?}", tabs_snapshot.text());
// let unwrapped_text = tabs_snapshot.text(); let unwrapped_text = tabs_snapshot.text();
// let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper); let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
// let (mut snapshot, wrap_edits) = let (mut snapshot, wrap_edits) =
// wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot.clone(), tab_edits, cx)); wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot.clone(), tab_edits, cx));
// snapshot.check_invariants(); snapshot.check_invariants();
// snapshot.verify_chunks(&mut rng); snapshot.verify_chunks(&mut rng);
// edits.push((snapshot, wrap_edits)); edits.push((snapshot, wrap_edits));
// if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) { if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) {
// log::info!("Waiting for wrapping to finish"); log::info!("Waiting for wrapping to finish");
// while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
// notifications.next().await.unwrap(); notifications.next().await.unwrap();
// } }
// wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty())); wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty()));
// } }
// if !wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { if !wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
// let (mut wrapped_snapshot, wrap_edits) = let (mut wrapped_snapshot, wrap_edits) =
// wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, Vec::new(), cx)); wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, Vec::new(), cx));
// let actual_text = wrapped_snapshot.text(); let actual_text = wrapped_snapshot.text();
// let actual_longest_row = wrapped_snapshot.longest_row(); let actual_longest_row = wrapped_snapshot.longest_row();
// log::info!("Wrapping finished: {:?}", actual_text); log::info!("Wrapping finished: {:?}", actual_text);
// wrapped_snapshot.check_invariants(); wrapped_snapshot.check_invariants();
// wrapped_snapshot.verify_chunks(&mut rng); wrapped_snapshot.verify_chunks(&mut rng);
// edits.push((wrapped_snapshot.clone(), wrap_edits)); edits.push((wrapped_snapshot.clone(), wrap_edits));
// assert_eq!( assert_eq!(
// actual_text, expected_text, actual_text, expected_text,
// "unwrapped text is: {:?}", "unwrapped text is: {:?}",
// unwrapped_text unwrapped_text
// ); );
// let mut summary = TextSummary::default(); let mut summary = TextSummary::default();
// for (ix, item) in wrapped_snapshot for (ix, item) in wrapped_snapshot
// .transforms .transforms
// .items(&()) .items(&())
// .into_iter() .into_iter()
// .enumerate() .enumerate()
// { {
// summary += &item.summary.output; summary += &item.summary.output;
// log::info!("{} summary: {:?}", ix, item.summary.output,); log::info!("{} summary: {:?}", ix, item.summary.output,);
// } }
// if tab_size.get() == 1 if tab_size.get() == 1
// || !wrapped_snapshot || !wrapped_snapshot
// .tab_snapshot .tab_snapshot
// .fold_snapshot .fold_snapshot
// .text() .text()
// .contains('\t') .contains('\t')
// { {
// let mut expected_longest_rows = Vec::new(); let mut expected_longest_rows = Vec::new();
// let mut longest_line_len = -1; let mut longest_line_len = -1;
// for (row, line) in expected_text.split('\n').enumerate() { for (row, line) in expected_text.split('\n').enumerate() {
// let line_char_count = line.chars().count() as isize; let line_char_count = line.chars().count() as isize;
// if line_char_count > longest_line_len { if line_char_count > longest_line_len {
// expected_longest_rows.clear(); expected_longest_rows.clear();
// longest_line_len = line_char_count; longest_line_len = line_char_count;
// } }
// if line_char_count >= longest_line_len { if line_char_count >= longest_line_len {
// expected_longest_rows.push(row as u32); expected_longest_rows.push(row as u32);
// } }
// } }
// assert!( assert!(
// expected_longest_rows.contains(&actual_longest_row), expected_longest_rows.contains(&actual_longest_row),
// "incorrect longest row {}. expected {:?} with length {}", "incorrect longest row {}. expected {:?} with length {}",
// actual_longest_row, actual_longest_row,
// expected_longest_rows, expected_longest_rows,
// longest_line_len, longest_line_len,
// ) )
// } }
// } }
// } }
// let mut initial_text = Rope::from(initial_snapshot.text().as_str()); let mut initial_text = Rope::from(initial_snapshot.text().as_str());
// for (snapshot, patch) in edits { for (snapshot, patch) in edits {
// let snapshot_text = Rope::from(snapshot.text().as_str()); let snapshot_text = Rope::from(snapshot.text().as_str());
// for edit in &patch { for edit in &patch {
// let old_start = initial_text.point_to_offset(Point::new(edit.new.start, 0)); let old_start = initial_text.point_to_offset(Point::new(edit.new.start, 0));
// let old_end = initial_text.point_to_offset(cmp::min( let old_end = initial_text.point_to_offset(cmp::min(
// Point::new(edit.new.start + edit.old.len() as u32, 0), Point::new(edit.new.start + edit.old.len() as u32, 0),
// initial_text.max_point(), initial_text.max_point(),
// )); ));
// let new_start = snapshot_text.point_to_offset(Point::new(edit.new.start, 0)); let new_start = snapshot_text.point_to_offset(Point::new(edit.new.start, 0));
// let new_end = snapshot_text.point_to_offset(cmp::min( let new_end = snapshot_text.point_to_offset(cmp::min(
// Point::new(edit.new.end, 0), Point::new(edit.new.end, 0),
// snapshot_text.max_point(), snapshot_text.max_point(),
// )); ));
// let new_text = snapshot_text let new_text = snapshot_text
// .chunks_in_range(new_start..new_end) .chunks_in_range(new_start..new_end)
// .collect::<String>(); .collect::<String>();
// initial_text.replace(old_start..old_end, &new_text); initial_text.replace(old_start..old_end, &new_text);
// } }
// assert_eq!(initial_text.to_string(), snapshot_text.to_string()); assert_eq!(initial_text.to_string(), snapshot_text.to_string());
// } }
// if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
// log::info!("Waiting for wrapping to finish"); log::info!("Waiting for wrapping to finish");
// while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
// notifications.next().await.unwrap(); notifications.next().await.unwrap();
// } }
// } }
// wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty())); wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty()));
// } }
// fn init_test(cx: &mut gpui::TestAppContext) { fn init_test(cx: &mut gpui::TestAppContext) {
// cx.foreground().forbid_parking(); cx.update(|cx| {
// cx.update(|cx| { let settings = SettingsStore::test(cx);
// cx.set_global(SettingsStore::test(cx)); cx.set_global(settings);
// theme::init((), cx); theme::init(LoadThemes::JustBase, cx);
// }); });
// } }
// fn wrap_text( fn wrap_text(
// unwrapped_text: &str, unwrapped_text: &str,
// wrap_width: Option<f32>, wrap_width: Option<Pixels>,
// line_wrapper: &mut LineWrapper, line_wrapper: &mut LineWrapper,
// ) -> String { ) -> String {
// if let Some(wrap_width) = wrap_width { if let Some(wrap_width) = wrap_width {
// let mut wrapped_text = String::new(); let mut wrapped_text = String::new();
// for (row, line) in unwrapped_text.split('\n').enumerate() { for (row, line) in unwrapped_text.split('\n').enumerate() {
// if row > 0 { if row > 0 {
// wrapped_text.push('\n') wrapped_text.push('\n')
// } }
// let mut prev_ix = 0; let mut prev_ix = 0;
// for boundary in line_wrapper.wrap_line(line, wrap_width) { for boundary in line_wrapper.wrap_line(line, wrap_width) {
// wrapped_text.push_str(&line[prev_ix..boundary.ix]); wrapped_text.push_str(&line[prev_ix..boundary.ix]);
// wrapped_text.push('\n'); wrapped_text.push('\n');
// wrapped_text.push_str(&" ".repeat(boundary.next_indent as usize)); wrapped_text.push_str(&" ".repeat(boundary.next_indent as usize));
// prev_ix = boundary.ix; prev_ix = boundary.ix;
// } }
// wrapped_text.push_str(&line[prev_ix..]); wrapped_text.push_str(&line[prev_ix..]);
// } }
// wrapped_text wrapped_text
// } else { } else {
// unwrapped_text.to_string() unwrapped_text.to_string()
// } }
// } }
// impl WrapSnapshot { impl WrapSnapshot {
// pub fn text(&self) -> String { pub fn text(&self) -> String {
// self.text_chunks(0).collect() self.text_chunks(0).collect()
// } }
// pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> { pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> {
// self.chunks( self.chunks(
// wrap_row..self.max_point().row() + 1, wrap_row..self.max_point().row() + 1,
// false, false,
// Highlights::default(), Highlights::default(),
// ) )
// .map(|h| h.text) .map(|h| h.text)
// } }
// fn verify_chunks(&mut self, rng: &mut impl Rng) { fn verify_chunks(&mut self, rng: &mut impl Rng) {
// for _ in 0..5 { for _ in 0..5 {
// let mut end_row = rng.gen_range(0..=self.max_point().row()); let mut end_row = rng.gen_range(0..=self.max_point().row());
// let start_row = rng.gen_range(0..=end_row); let start_row = rng.gen_range(0..=end_row);
// end_row += 1; end_row += 1;
// let mut expected_text = self.text_chunks(start_row).collect::<String>(); let mut expected_text = self.text_chunks(start_row).collect::<String>();
// if expected_text.ends_with('\n') { if expected_text.ends_with('\n') {
// expected_text.push('\n'); expected_text.push('\n');
// } }
// let mut expected_text = expected_text let mut expected_text = expected_text
// .lines() .lines()
// .take((end_row - start_row) as usize) .take((end_row - start_row) as usize)
// .collect::<Vec<_>>() .collect::<Vec<_>>()
// .join("\n"); .join("\n");
// if end_row <= self.max_point().row() { if end_row <= self.max_point().row() {
// expected_text.push('\n'); expected_text.push('\n');
// } }
// let actual_text = self let actual_text = self
// .chunks(start_row..end_row, true, Highlights::default()) .chunks(start_row..end_row, true, Highlights::default())
// .map(|c| c.text) .map(|c| c.text)
// .collect::<String>(); .collect::<String>();
// assert_eq!( assert_eq!(
// expected_text, expected_text,
// actual_text, actual_text,
// "chunks != highlighted_chunks for rows {:?}", "chunks != highlighted_chunks for rows {:?}",
// start_row..end_row start_row..end_row
// ); );
// } }
// } }
// } }
// } }

View file

@ -3486,7 +3486,7 @@ impl Editor {
drop(context_menu); drop(context_menu);
this.discard_copilot_suggestion(cx); this.discard_copilot_suggestion(cx);
cx.notify(); cx.notify();
} else if this.completion_tasks.is_empty() { } else if this.completion_tasks.len() <= 1 {
// If there are no more completion tasks and the last menu was // If there are no more completion tasks and the last menu was
// empty, we should hide it. If it was already hidden, we should // empty, we should hide it. If it was already hidden, we should
// also show the copilot suggestion when available. // also show the copilot suggestion when available.
@ -8240,6 +8240,11 @@ impl Editor {
self.style = Some(style); self.style = Some(style);
} }
#[cfg(any(test, feature = "test-support"))]
pub fn style(&self) -> Option<&EditorStyle> {
self.style.as_ref()
}
pub fn set_wrap_width(&self, width: Option<Pixels>, cx: &mut AppContext) -> bool { pub fn set_wrap_width(&self, width: Option<Pixels>, cx: &mut AppContext) -> bool {
self.display_map self.display_map
.update(cx, |map, cx| map.set_wrap_width(width, cx)) .update(cx, |map, cx| map.set_wrap_width(width, cx))

File diff suppressed because it is too large Load diff

View file

@ -330,7 +330,7 @@ impl EditorElement {
}); });
} }
fn modifiers_changed( pub(crate) fn modifiers_changed(
editor: &mut Editor, editor: &mut Editor,
event: &ModifiersChangedEvent, event: &ModifiersChangedEvent,
cx: &mut ViewContext<Editor>, cx: &mut ViewContext<Editor>,
@ -1755,7 +1755,7 @@ impl EditorElement {
let gutter_width; let gutter_width;
let gutter_margin; let gutter_margin;
if snapshot.show_gutter { if snapshot.show_gutter {
let descent = cx.text_system().descent(font_id, font_size).unwrap(); let descent = cx.text_system().descent(font_id, font_size);
let gutter_padding_factor = 3.5; let gutter_padding_factor = 3.5;
gutter_padding = (em_width * gutter_padding_factor).round(); gutter_padding = (em_width * gutter_padding_factor).round();
@ -3227,448 +3227,491 @@ fn scale_horizontal_mouse_autoscroll_delta(delta: Pixels) -> f32 {
(delta.pow(1.2) / 300.0).into() (delta.pow(1.2) / 300.0).into()
} }
// #[cfg(test)] #[cfg(test)]
// mod tests { mod tests {
// use super::*; use super::*;
// use crate::{ use crate::{
// display_map::{BlockDisposition, BlockProperties}, display_map::{BlockDisposition, BlockProperties},
// editor_tests::{init_test, update_test_language_settings}, editor_tests::{init_test, update_test_language_settings},
// Editor, MultiBuffer, Editor, MultiBuffer,
// }; };
// use gpui::TestAppContext; use gpui::{EmptyView, TestAppContext};
// use language::language_settings; use language::language_settings;
// use log::info; use log::info;
// use std::{num::NonZeroU32, sync::Arc}; use std::{num::NonZeroU32, sync::Arc};
// use util::test::sample_text; use util::test::sample_text;
// #[gpui::test] #[gpui::test]
// fn test_layout_line_numbers(cx: &mut TestAppContext) { fn test_shape_line_numbers(cx: &mut TestAppContext) {
// init_test(cx, |_| {}); init_test(cx, |_| {});
// let editor = cx let window = cx.add_window(|cx| {
// .add_window(|cx| { let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx);
// let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx); Editor::new(EditorMode::Full, buffer, None, cx)
// Editor::new(EditorMode::Full, buffer, None, None, cx) });
// })
// .root(cx);
// let element = EditorElement::new(editor.read_with(cx, |editor, cx| editor.style(cx)));
// let layouts = editor.update(cx, |editor, cx| { let editor = window.root(cx).unwrap();
// let snapshot = editor.snapshot(cx); let style = cx.update(|cx| editor.read(cx).style().unwrap().clone());
// element let element = EditorElement::new(&editor, style);
// .layout_line_numbers(
// 0..6,
// &Default::default(),
// DisplayPoint::new(0, 0),
// false,
// &snapshot,
// cx,
// )
// .0
// });
// assert_eq!(layouts.len(), 6);
// let relative_rows = editor.update(cx, |editor, cx| { let layouts = window
// let snapshot = editor.snapshot(cx); .update(cx, |editor, cx| {
// element.calculate_relative_line_numbers(&snapshot, &(0..6), Some(3)) let snapshot = editor.snapshot(cx);
// }); element
// assert_eq!(relative_rows[&0], 3); .shape_line_numbers(
// assert_eq!(relative_rows[&1], 2); 0..6,
// assert_eq!(relative_rows[&2], 1); &Default::default(),
// // current line has no relative number DisplayPoint::new(0, 0),
// assert_eq!(relative_rows[&4], 1); false,
// assert_eq!(relative_rows[&5], 2); &snapshot,
cx,
)
.0
})
.unwrap();
assert_eq!(layouts.len(), 6);
// // works if cursor is before screen let relative_rows = window
// let relative_rows = editor.update(cx, |editor, cx| { .update(cx, |editor, cx| {
// let snapshot = editor.snapshot(cx); let snapshot = editor.snapshot(cx);
element.calculate_relative_line_numbers(&snapshot, &(0..6), Some(3))
})
.unwrap();
assert_eq!(relative_rows[&0], 3);
assert_eq!(relative_rows[&1], 2);
assert_eq!(relative_rows[&2], 1);
// current line has no relative number
assert_eq!(relative_rows[&4], 1);
assert_eq!(relative_rows[&5], 2);
// element.calculate_relative_line_numbers(&snapshot, &(3..6), Some(1)) // works if cursor is before screen
// }); let relative_rows = window
// assert_eq!(relative_rows.len(), 3); .update(cx, |editor, cx| {
// assert_eq!(relative_rows[&3], 2); let snapshot = editor.snapshot(cx);
// assert_eq!(relative_rows[&4], 3);
// assert_eq!(relative_rows[&5], 4);
// // works if cursor is after screen element.calculate_relative_line_numbers(&snapshot, &(3..6), Some(1))
// let relative_rows = editor.update(cx, |editor, cx| { })
// let snapshot = editor.snapshot(cx); .unwrap();
assert_eq!(relative_rows.len(), 3);
assert_eq!(relative_rows[&3], 2);
assert_eq!(relative_rows[&4], 3);
assert_eq!(relative_rows[&5], 4);
// element.calculate_relative_line_numbers(&snapshot, &(0..3), Some(6)) // works if cursor is after screen
// }); let relative_rows = window
// assert_eq!(relative_rows.len(), 3); .update(cx, |editor, cx| {
// assert_eq!(relative_rows[&0], 5); let snapshot = editor.snapshot(cx);
// assert_eq!(relative_rows[&1], 4);
// assert_eq!(relative_rows[&2], 3);
// }
// #[gpui::test] element.calculate_relative_line_numbers(&snapshot, &(0..3), Some(6))
// async fn test_vim_visual_selections(cx: &mut TestAppContext) { })
// init_test(cx, |_| {}); .unwrap();
assert_eq!(relative_rows.len(), 3);
assert_eq!(relative_rows[&0], 5);
assert_eq!(relative_rows[&1], 4);
assert_eq!(relative_rows[&2], 3);
}
// let editor = cx #[gpui::test]
// .add_window(|cx| { async fn test_vim_visual_selections(cx: &mut TestAppContext) {
// let buffer = MultiBuffer::build_simple(&(sample_text(6, 6, 'a') + "\n"), cx); init_test(cx, |_| {});
// Editor::new(EditorMode::Full, buffer, None, None, cx)
// })
// .root(cx);
// let mut element = EditorElement::new(editor.read_with(cx, |editor, cx| editor.style(cx)));
// let (_, state) = editor.update(cx, |editor, cx| {
// editor.cursor_shape = CursorShape::Block;
// editor.change_selections(None, cx, |s| {
// s.select_ranges([
// Point::new(0, 0)..Point::new(1, 0),
// Point::new(3, 2)..Point::new(3, 3),
// Point::new(5, 6)..Point::new(6, 0),
// ]);
// });
// element.layout(
// SizeConstraint::new(point(500., 500.), point(500., 500.)),
// editor,
// cx,
// )
// });
// assert_eq!(state.selections.len(), 1);
// let local_selections = &state.selections[0].1;
// assert_eq!(local_selections.len(), 3);
// // moves cursor back one line
// assert_eq!(local_selections[0].head, DisplayPoint::new(0, 6));
// assert_eq!(
// local_selections[0].range,
// DisplayPoint::new(0, 0)..DisplayPoint::new(1, 0)
// );
// // moves cursor back one column let window = cx.add_window(|cx| {
// assert_eq!( let buffer = MultiBuffer::build_simple(&(sample_text(6, 6, 'a') + "\n"), cx);
// local_selections[1].range, Editor::new(EditorMode::Full, buffer, None, cx)
// DisplayPoint::new(3, 2)..DisplayPoint::new(3, 3) });
// ); let editor = window.root(cx).unwrap();
// assert_eq!(local_selections[1].head, DisplayPoint::new(3, 2)); let style = cx.update(|cx| editor.read(cx).style().unwrap().clone());
let mut element = EditorElement::new(&editor, style);
// // leaves cursor on the max point window
// assert_eq!( .update(cx, |editor, cx| {
// local_selections[2].range, editor.cursor_shape = CursorShape::Block;
// DisplayPoint::new(5, 6)..DisplayPoint::new(6, 0) editor.change_selections(None, cx, |s| {
// ); s.select_ranges([
// assert_eq!(local_selections[2].head, DisplayPoint::new(6, 0)); Point::new(0, 0)..Point::new(1, 0),
Point::new(3, 2)..Point::new(3, 3),
Point::new(5, 6)..Point::new(6, 0),
]);
});
})
.unwrap();
let state = cx
.update_window(window.into(), |_, cx| {
element.compute_layout(
Bounds {
origin: point(px(500.), px(500.)),
size: size(px(500.), px(500.)),
},
cx,
)
})
.unwrap();
// // active lines does not include 1 (even though the range of the selection does) assert_eq!(state.selections.len(), 1);
// assert_eq!( let local_selections = &state.selections[0].1;
// state.active_rows.keys().cloned().collect::<Vec<u32>>(), assert_eq!(local_selections.len(), 3);
// vec![0, 3, 5, 6] // moves cursor back one line
// ); assert_eq!(local_selections[0].head, DisplayPoint::new(0, 6));
assert_eq!(
local_selections[0].range,
DisplayPoint::new(0, 0)..DisplayPoint::new(1, 0)
);
// // multi-buffer support // moves cursor back one column
// // in DisplayPoint co-ordinates, this is what we're dealing with: assert_eq!(
// // 0: [[file local_selections[1].range,
// // 1: header]] DisplayPoint::new(3, 2)..DisplayPoint::new(3, 3)
// // 2: aaaaaa );
// // 3: bbbbbb assert_eq!(local_selections[1].head, DisplayPoint::new(3, 2));
// // 4: cccccc
// // 5:
// // 6: ...
// // 7: ffffff
// // 8: gggggg
// // 9: hhhhhh
// // 10:
// // 11: [[file
// // 12: header]]
// // 13: bbbbbb
// // 14: cccccc
// // 15: dddddd
// let editor = cx
// .add_window(|cx| {
// let buffer = MultiBuffer::build_multi(
// [
// (
// &(sample_text(8, 6, 'a') + "\n"),
// vec![
// Point::new(0, 0)..Point::new(3, 0),
// Point::new(4, 0)..Point::new(7, 0),
// ],
// ),
// (
// &(sample_text(8, 6, 'a') + "\n"),
// vec![Point::new(1, 0)..Point::new(3, 0)],
// ),
// ],
// cx,
// );
// Editor::new(EditorMode::Full, buffer, None, None, cx)
// })
// .root(cx);
// let mut element = EditorElement::new(editor.read_with(cx, |editor, cx| editor.style(cx)));
// let (_, state) = editor.update(cx, |editor, cx| {
// editor.cursor_shape = CursorShape::Block;
// editor.change_selections(None, cx, |s| {
// s.select_display_ranges([
// DisplayPoint::new(4, 0)..DisplayPoint::new(7, 0),
// DisplayPoint::new(10, 0)..DisplayPoint::new(13, 0),
// ]);
// });
// element.layout(
// SizeConstraint::new(point(500., 500.), point(500., 500.)),
// editor,
// cx,
// )
// });
// assert_eq!(state.selections.len(), 1); // leaves cursor on the max point
// let local_selections = &state.selections[0].1; assert_eq!(
// assert_eq!(local_selections.len(), 2); local_selections[2].range,
DisplayPoint::new(5, 6)..DisplayPoint::new(6, 0)
);
assert_eq!(local_selections[2].head, DisplayPoint::new(6, 0));
// // moves cursor on excerpt boundary back a line // active lines does not include 1 (even though the range of the selection does)
// // and doesn't allow selection to bleed through assert_eq!(
// assert_eq!( state.active_rows.keys().cloned().collect::<Vec<u32>>(),
// local_selections[0].range, vec![0, 3, 5, 6]
// DisplayPoint::new(4, 0)..DisplayPoint::new(6, 0) );
// );
// assert_eq!(local_selections[0].head, DisplayPoint::new(5, 0));
// // moves cursor on buffer boundary back two lines // multi-buffer support
// // and doesn't allow selection to bleed through // in DisplayPoint co-ordinates, this is what we're dealing with:
// assert_eq!( // 0: [[file
// local_selections[1].range, // 1: header]]
// DisplayPoint::new(10, 0)..DisplayPoint::new(11, 0) // 2: aaaaaa
// ); // 3: bbbbbb
// assert_eq!(local_selections[1].head, DisplayPoint::new(10, 0)); // 4: cccccc
// } // 5:
// 6: ...
// 7: ffffff
// 8: gggggg
// 9: hhhhhh
// 10:
// 11: [[file
// 12: header]]
// 13: bbbbbb
// 14: cccccc
// 15: dddddd
let window = cx.add_window(|cx| {
let buffer = MultiBuffer::build_multi(
[
(
&(sample_text(8, 6, 'a') + "\n"),
vec![
Point::new(0, 0)..Point::new(3, 0),
Point::new(4, 0)..Point::new(7, 0),
],
),
(
&(sample_text(8, 6, 'a') + "\n"),
vec![Point::new(1, 0)..Point::new(3, 0)],
),
],
cx,
);
Editor::new(EditorMode::Full, buffer, None, cx)
});
let editor = window.root(cx).unwrap();
let style = cx.update(|cx| editor.read(cx).style().unwrap().clone());
let mut element = EditorElement::new(&editor, style);
let state = window.update(cx, |editor, cx| {
editor.cursor_shape = CursorShape::Block;
editor.change_selections(None, cx, |s| {
s.select_display_ranges([
DisplayPoint::new(4, 0)..DisplayPoint::new(7, 0),
DisplayPoint::new(10, 0)..DisplayPoint::new(13, 0),
]);
});
});
// #[gpui::test] let state = cx
// fn test_layout_with_placeholder_text_and_blocks(cx: &mut TestAppContext) { .update_window(window.into(), |_, cx| {
// init_test(cx, |_| {}); element.compute_layout(
Bounds {
origin: point(px(500.), px(500.)),
size: size(px(500.), px(500.)),
},
cx,
)
})
.unwrap();
assert_eq!(state.selections.len(), 1);
let local_selections = &state.selections[0].1;
assert_eq!(local_selections.len(), 2);
// let editor = cx // moves cursor on excerpt boundary back a line
// .add_window(|cx| { // and doesn't allow selection to bleed through
// let buffer = MultiBuffer::build_simple("", cx); assert_eq!(
// Editor::new(EditorMode::Full, buffer, None, None, cx) local_selections[0].range,
// }) DisplayPoint::new(4, 0)..DisplayPoint::new(6, 0)
// .root(cx); );
assert_eq!(local_selections[0].head, DisplayPoint::new(5, 0));
dbg!("Hi");
// moves cursor on buffer boundary back two lines
// and doesn't allow selection to bleed through
assert_eq!(
local_selections[1].range,
DisplayPoint::new(10, 0)..DisplayPoint::new(11, 0)
);
assert_eq!(local_selections[1].head, DisplayPoint::new(10, 0));
}
// editor.update(cx, |editor, cx| { #[gpui::test]
// editor.set_placeholder_text("hello", cx); fn test_layout_with_placeholder_text_and_blocks(cx: &mut TestAppContext) {
// editor.insert_blocks( init_test(cx, |_| {});
// [BlockProperties {
// style: BlockStyle::Fixed,
// disposition: BlockDisposition::Above,
// height: 3,
// position: Anchor::min(),
// render: Arc::new(|_| Empty::new().into_any),
// }],
// None,
// cx,
// );
// // Blur the editor so that it displays placeholder text. let window = cx.add_window(|cx| {
// cx.blur(); let buffer = MultiBuffer::build_simple("", cx);
// }); Editor::new(EditorMode::Full, buffer, None, cx)
});
let editor = window.root(cx).unwrap();
let style = cx.update(|cx| editor.read(cx).style().unwrap().clone());
window
.update(cx, |editor, cx| {
editor.set_placeholder_text("hello", cx);
editor.insert_blocks(
[BlockProperties {
style: BlockStyle::Fixed,
disposition: BlockDisposition::Above,
height: 3,
position: Anchor::min(),
render: Arc::new(|_| div().into_any()),
}],
None,
cx,
);
// let mut element = EditorElement::new(editor.read_with(cx, |editor, cx| editor.style(cx))); // Blur the editor so that it displays placeholder text.
// let (size, mut state) = editor.update(cx, |editor, cx| { cx.blur();
// element.layout( })
// SizeConstraint::new(point(500., 500.), point(500., 500.)), .unwrap();
// editor,
// cx,
// )
// });
// assert_eq!(state.position_map.line_layouts.len(), 4); let mut element = EditorElement::new(&editor, style);
// assert_eq!( let mut state = cx
// state .update_window(window.into(), |_, cx| {
// .line_number_layouts element.compute_layout(
// .iter() Bounds {
// .map(Option::is_some) origin: point(px(500.), px(500.)),
// .collect::<Vec<_>>(), size: size(px(500.), px(500.)),
// &[false, false, false, true] },
// ); cx,
)
})
.unwrap();
let size = state.position_map.size;
// // Don't panic. assert_eq!(state.position_map.line_layouts.len(), 4);
// let bounds = Bounds::<Pixels>::new(Default::default(), size); assert_eq!(
// editor.update(cx, |editor, cx| { state
// element.paint(bounds, bounds, &mut state, editor, cx); .line_numbers
// }); .iter()
// } .map(Option::is_some)
.collect::<Vec<_>>(),
&[false, false, false, true]
);
// #[gpui::test] // Don't panic.
// fn test_all_invisibles_drawing(cx: &mut TestAppContext) { let bounds = Bounds::<Pixels>::new(Default::default(), size);
// const TAB_SIZE: u32 = 4; cx.update_window(window.into(), |_, cx| {
element.paint(bounds, &mut (), cx);
})
.unwrap()
}
// let input_text = "\t \t|\t| a b"; #[gpui::test]
// let expected_invisibles = vec![ fn test_all_invisibles_drawing(cx: &mut TestAppContext) {
// Invisible::Tab { const TAB_SIZE: u32 = 4;
// line_start_offset: 0,
// },
// Invisible::Whitespace {
// line_offset: TAB_SIZE as usize,
// },
// Invisible::Tab {
// line_start_offset: TAB_SIZE as usize + 1,
// },
// Invisible::Tab {
// line_start_offset: TAB_SIZE as usize * 2 + 1,
// },
// Invisible::Whitespace {
// line_offset: TAB_SIZE as usize * 3 + 1,
// },
// Invisible::Whitespace {
// line_offset: TAB_SIZE as usize * 3 + 3,
// },
// ];
// assert_eq!(
// expected_invisibles.len(),
// input_text
// .chars()
// .filter(|initial_char| initial_char.is_whitespace())
// .count(),
// "Hardcoded expected invisibles differ from the actual ones in '{input_text}'"
// );
// init_test(cx, |s| { let input_text = "\t \t|\t| a b";
// s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All); let expected_invisibles = vec![
// s.defaults.tab_size = NonZeroU32::new(TAB_SIZE); Invisible::Tab {
// }); line_start_offset: 0,
},
Invisible::Whitespace {
line_offset: TAB_SIZE as usize,
},
Invisible::Tab {
line_start_offset: TAB_SIZE as usize + 1,
},
Invisible::Tab {
line_start_offset: TAB_SIZE as usize * 2 + 1,
},
Invisible::Whitespace {
line_offset: TAB_SIZE as usize * 3 + 1,
},
Invisible::Whitespace {
line_offset: TAB_SIZE as usize * 3 + 3,
},
];
assert_eq!(
expected_invisibles.len(),
input_text
.chars()
.filter(|initial_char| initial_char.is_whitespace())
.count(),
"Hardcoded expected invisibles differ from the actual ones in '{input_text}'"
);
// let actual_invisibles = init_test(cx, |s| {
// collect_invisibles_from_new_editor(cx, EditorMode::Full, &input_text, 500.0); s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All);
s.defaults.tab_size = NonZeroU32::new(TAB_SIZE);
});
// assert_eq!(expected_invisibles, actual_invisibles); let actual_invisibles =
// } collect_invisibles_from_new_editor(cx, EditorMode::Full, &input_text, px(500.0));
// #[gpui::test] assert_eq!(expected_invisibles, actual_invisibles);
// fn test_invisibles_dont_appear_in_certain_editors(cx: &mut TestAppContext) { }
// init_test(cx, |s| {
// s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All);
// s.defaults.tab_size = NonZeroU32::new(4);
// });
// for editor_mode_without_invisibles in [ #[gpui::test]
// EditorMode::SingleLine, fn test_invisibles_dont_appear_in_certain_editors(cx: &mut TestAppContext) {
// EditorMode::AutoHeight { max_lines: 100 }, init_test(cx, |s| {
// ] { s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All);
// let invisibles = collect_invisibles_from_new_editor( s.defaults.tab_size = NonZeroU32::new(4);
// cx, });
// editor_mode_without_invisibles,
// "\t\t\t| | a b",
// 500.0,
// );
// assert!(invisibles.is_empty,
// "For editor mode {editor_mode_without_invisibles:?} no invisibles was expected but got {invisibles:?}");
// }
// }
// #[gpui::test] for editor_mode_without_invisibles in [
// fn test_wrapped_invisibles_drawing(cx: &mut TestAppContext) { EditorMode::SingleLine,
// let tab_size = 4; EditorMode::AutoHeight { max_lines: 100 },
// let input_text = "a\tbcd ".repeat(9); ] {
// let repeated_invisibles = [ let invisibles = collect_invisibles_from_new_editor(
// Invisible::Tab { cx,
// line_start_offset: 1, editor_mode_without_invisibles,
// }, "\t\t\t| | a b",
// Invisible::Whitespace { px(500.0),
// line_offset: tab_size as usize + 3, );
// }, assert!(invisibles.is_empty(),
// Invisible::Whitespace { "For editor mode {editor_mode_without_invisibles:?} no invisibles was expected but got {invisibles:?}");
// line_offset: tab_size as usize + 4, }
// }, }
// Invisible::Whitespace {
// line_offset: tab_size as usize + 5,
// },
// ];
// let expected_invisibles = std::iter::once(repeated_invisibles)
// .cycle()
// .take(9)
// .flatten()
// .collect::<Vec<_>>();
// assert_eq!(
// expected_invisibles.len(),
// input_text
// .chars()
// .filter(|initial_char| initial_char.is_whitespace())
// .count(),
// "Hardcoded expected invisibles differ from the actual ones in '{input_text}'"
// );
// info!("Expected invisibles: {expected_invisibles:?}");
// init_test(cx, |_| {}); #[gpui::test]
fn test_wrapped_invisibles_drawing(cx: &mut TestAppContext) {
let tab_size = 4;
let input_text = "a\tbcd ".repeat(9);
let repeated_invisibles = [
Invisible::Tab {
line_start_offset: 1,
},
Invisible::Whitespace {
line_offset: tab_size as usize + 3,
},
Invisible::Whitespace {
line_offset: tab_size as usize + 4,
},
Invisible::Whitespace {
line_offset: tab_size as usize + 5,
},
];
let expected_invisibles = std::iter::once(repeated_invisibles)
.cycle()
.take(9)
.flatten()
.collect::<Vec<_>>();
assert_eq!(
expected_invisibles.len(),
input_text
.chars()
.filter(|initial_char| initial_char.is_whitespace())
.count(),
"Hardcoded expected invisibles differ from the actual ones in '{input_text}'"
);
info!("Expected invisibles: {expected_invisibles:?}");
// // Put the same string with repeating whitespace pattern into editors of various size, init_test(cx, |_| {});
// // take deliberately small steps during resizing, to put all whitespace kinds near the wrap point.
// let resize_step = 10.0;
// let mut editor_width = 200.0;
// while editor_width <= 1000.0 {
// update_test_language_settings(cx, |s| {
// s.defaults.tab_size = NonZeroU32::new(tab_size);
// s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All);
// s.defaults.preferred_line_length = Some(editor_width as u32);
// s.defaults.soft_wrap = Some(language_settings::SoftWrap::PreferredLineLength);
// });
// let actual_invisibles = // Put the same string with repeating whitespace pattern into editors of various size,
// collect_invisibles_from_new_editor(cx, EditorMode::Full, &input_text, editor_width); // take deliberately small steps during resizing, to put all whitespace kinds near the wrap point.
let resize_step = 10.0;
let mut editor_width = 200.0;
while editor_width <= 1000.0 {
update_test_language_settings(cx, |s| {
s.defaults.tab_size = NonZeroU32::new(tab_size);
s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All);
s.defaults.preferred_line_length = Some(editor_width as u32);
s.defaults.soft_wrap = Some(language_settings::SoftWrap::PreferredLineLength);
});
// // Whatever the editor size is, ensure it has the same invisible kinds in the same order let actual_invisibles = collect_invisibles_from_new_editor(
// // (no good guarantees about the offsets: wrapping could trigger padding and its tests should check the offsets). cx,
// let mut i = 0; EditorMode::Full,
// for (actual_index, actual_invisible) in actual_invisibles.iter().enumerate() { &input_text,
// i = actual_index; px(editor_width),
// match expected_invisibles.get(i) { );
// Some(expected_invisible) => match (expected_invisible, actual_invisible) {
// (Invisible::Whitespace { .. }, Invisible::Whitespace { .. })
// | (Invisible::Tab { .. }, Invisible::Tab { .. }) => {}
// _ => {
// panic!("At index {i}, expected invisible {expected_invisible:?} does not match actual {actual_invisible:?} by kind. Actual invisibles: {actual_invisibles:?}")
// }
// },
// None => panic!("Unexpected extra invisible {actual_invisible:?} at index {i}"),
// }
// }
// let missing_expected_invisibles = &expected_invisibles[i + 1..];
// assert!(
// missing_expected_invisibles.is_empty,
// "Missing expected invisibles after index {i}: {missing_expected_invisibles:?}"
// );
// editor_width += resize_step; // Whatever the editor size is, ensure it has the same invisible kinds in the same order
// } // (no good guarantees about the offsets: wrapping could trigger padding and its tests should check the offsets).
// } let mut i = 0;
for (actual_index, actual_invisible) in actual_invisibles.iter().enumerate() {
i = actual_index;
match expected_invisibles.get(i) {
Some(expected_invisible) => match (expected_invisible, actual_invisible) {
(Invisible::Whitespace { .. }, Invisible::Whitespace { .. })
| (Invisible::Tab { .. }, Invisible::Tab { .. }) => {}
_ => {
panic!("At index {i}, expected invisible {expected_invisible:?} does not match actual {actual_invisible:?} by kind. Actual invisibles: {actual_invisibles:?}")
}
},
None => panic!("Unexpected extra invisible {actual_invisible:?} at index {i}"),
}
}
let missing_expected_invisibles = &expected_invisibles[i + 1..];
assert!(
missing_expected_invisibles.is_empty(),
"Missing expected invisibles after index {i}: {missing_expected_invisibles:?}"
);
// fn collect_invisibles_from_new_editor( editor_width += resize_step;
// cx: &mut TestAppContext, }
// editor_mode: EditorMode, }
// input_text: &str,
// editor_width: f32,
// ) -> Vec<Invisible> {
// info!(
// "Creating editor with mode {editor_mode:?}, width {editor_width} and text '{input_text}'"
// );
// let editor = cx
// .add_window(|cx| {
// let buffer = MultiBuffer::build_simple(&input_text, cx);
// Editor::new(editor_mode, buffer, None, None, cx)
// })
// .root(cx);
// let mut element = EditorElement::new(editor.read_with(cx, |editor, cx| editor.style(cx))); fn collect_invisibles_from_new_editor(
// let (_, layout_state) = editor.update(cx, |editor, cx| { cx: &mut TestAppContext,
// editor.set_soft_wrap_mode(language_settings::SoftWrap::EditorWidth, cx); editor_mode: EditorMode,
// editor.set_wrap_width(Some(editor_width), cx); input_text: &str,
editor_width: Pixels,
) -> Vec<Invisible> {
info!(
"Creating editor with mode {editor_mode:?}, width {}px and text '{input_text}'",
editor_width.0
);
let window = cx.add_window(|cx| {
let buffer = MultiBuffer::build_simple(&input_text, cx);
Editor::new(editor_mode, buffer, None, cx)
});
let editor = window.root(cx).unwrap();
let style = cx.update(|cx| editor.read(cx).style().unwrap().clone());
let mut element = EditorElement::new(&editor, style);
window
.update(cx, |editor, cx| {
editor.set_soft_wrap_mode(language_settings::SoftWrap::EditorWidth, cx);
editor.set_wrap_width(Some(editor_width), cx);
})
.unwrap();
let layout_state = cx
.update_window(window.into(), |_, cx| {
element.compute_layout(
Bounds {
origin: point(px(500.), px(500.)),
size: size(px(500.), px(500.)),
},
cx,
)
})
.unwrap();
// element.layout( layout_state
// SizeConstraint::new(point(editor_width, 500.), point(editor_width, 500.)), .position_map
// editor, .line_layouts
// cx, .iter()
// ) .map(|line_with_invisibles| &line_with_invisibles.invisibles)
// }); .flatten()
.cloned()
// layout_state .collect()
// .position_map }
// .line_layouts }
// .iter()
// .map(|line_with_invisibles| &line_with_invisibles.invisibles)
// .flatten()
// .cloned()
// .collect()
// }
// }
pub fn register_action<T: Action>( pub fn register_action<T: Action>(
view: &View<Editor>, view: &View<Editor>,
@ -3714,7 +3757,7 @@ fn compute_auto_height_layout(
let gutter_width; let gutter_width;
let gutter_margin; let gutter_margin;
if snapshot.show_gutter { if snapshot.show_gutter {
let descent = cx.text_system().descent(font_id, font_size).unwrap(); let descent = cx.text_system().descent(font_id, font_size);
let gutter_padding_factor = 3.5; let gutter_padding_factor = 3.5;
gutter_padding = (em_width * gutter_padding_factor).round(); gutter_padding = (em_width * gutter_padding_factor).round();
gutter_width = max_line_number_width + gutter_padding * 2.0; gutter_width = max_line_number_width + gutter_padding * 2.0;

View file

@ -88,195 +88,195 @@ pub fn diff_hunk_to_display(hunk: DiffHunk<u32>, snapshot: &DisplaySnapshot) ->
} }
} }
// #[cfg(any(test, feature = "test_support"))] #[cfg(test)]
// mod tests { mod tests {
// // use crate::editor_tests::init_test; use crate::editor_tests::init_test;
// use crate::Point; use crate::Point;
// use gpui::TestAppContext; use gpui::{Context, TestAppContext};
// use multi_buffer::{ExcerptRange, MultiBuffer}; use multi_buffer::{ExcerptRange, MultiBuffer};
// use project::{FakeFs, Project}; use project::{FakeFs, Project};
// use unindent::Unindent; use unindent::Unindent;
// #[gpui::test] #[gpui::test]
// async fn test_diff_hunks_in_range(cx: &mut TestAppContext) { async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
// use git::diff::DiffHunkStatus; use git::diff::DiffHunkStatus;
// init_test(cx, |_| {}); init_test(cx, |_| {});
// let fs = FakeFs::new(cx.background()); let fs = FakeFs::new(cx.background_executor.clone());
// let project = Project::test(fs, [], cx).await; let project = Project::test(fs, [], cx).await;
// // buffer has two modified hunks with two rows each // buffer has two modified hunks with two rows each
// let buffer_1 = project let buffer_1 = project
// .update(cx, |project, cx| { .update(cx, |project, cx| {
// project.create_buffer( project.create_buffer(
// " "
// 1.zero 1.zero
// 1.ONE 1.ONE
// 1.TWO 1.TWO
// 1.three 1.three
// 1.FOUR 1.FOUR
// 1.FIVE 1.FIVE
// 1.six 1.six
// " "
// .unindent() .unindent()
// .as_str(), .as_str(),
// None, None,
// cx, cx,
// ) )
// }) })
// .unwrap(); .unwrap();
// buffer_1.update(cx, |buffer, cx| { buffer_1.update(cx, |buffer, cx| {
// buffer.set_diff_base( buffer.set_diff_base(
// Some( Some(
// " "
// 1.zero 1.zero
// 1.one 1.one
// 1.two 1.two
// 1.three 1.three
// 1.four 1.four
// 1.five 1.five
// 1.six 1.six
// " "
// .unindent(), .unindent(),
// ), ),
// cx, cx,
// ); );
// }); });
// // buffer has a deletion hunk and an insertion hunk // buffer has a deletion hunk and an insertion hunk
// let buffer_2 = project let buffer_2 = project
// .update(cx, |project, cx| { .update(cx, |project, cx| {
// project.create_buffer( project.create_buffer(
// " "
// 2.zero 2.zero
// 2.one 2.one
// 2.two 2.two
// 2.three 2.three
// 2.four 2.four
// 2.five 2.five
// 2.six 2.six
// " "
// .unindent() .unindent()
// .as_str(), .as_str(),
// None, None,
// cx, cx,
// ) )
// }) })
// .unwrap(); .unwrap();
// buffer_2.update(cx, |buffer, cx| { buffer_2.update(cx, |buffer, cx| {
// buffer.set_diff_base( buffer.set_diff_base(
// Some( Some(
// " "
// 2.zero 2.zero
// 2.one 2.one
// 2.one-and-a-half 2.one-and-a-half
// 2.two 2.two
// 2.three 2.three
// 2.four 2.four
// 2.six 2.six
// " "
// .unindent(), .unindent(),
// ), ),
// cx, cx,
// ); );
// }); });
// cx.foreground().run_until_parked(); cx.background_executor.run_until_parked();
// let multibuffer = cx.add_model(|cx| { let multibuffer = cx.build_model(|cx| {
// let mut multibuffer = MultiBuffer::new(0); let mut multibuffer = MultiBuffer::new(0);
// multibuffer.push_excerpts( multibuffer.push_excerpts(
// buffer_1.clone(), buffer_1.clone(),
// [ [
// // excerpt ends in the middle of a modified hunk // excerpt ends in the middle of a modified hunk
// ExcerptRange { ExcerptRange {
// context: Point::new(0, 0)..Point::new(1, 5), context: Point::new(0, 0)..Point::new(1, 5),
// primary: Default::default(), primary: Default::default(),
// }, },
// // excerpt begins in the middle of a modified hunk // excerpt begins in the middle of a modified hunk
// ExcerptRange { ExcerptRange {
// context: Point::new(5, 0)..Point::new(6, 5), context: Point::new(5, 0)..Point::new(6, 5),
// primary: Default::default(), primary: Default::default(),
// }, },
// ], ],
// cx, cx,
// ); );
// multibuffer.push_excerpts( multibuffer.push_excerpts(
// buffer_2.clone(), buffer_2.clone(),
// [ [
// // excerpt ends at a deletion // excerpt ends at a deletion
// ExcerptRange { ExcerptRange {
// context: Point::new(0, 0)..Point::new(1, 5), context: Point::new(0, 0)..Point::new(1, 5),
// primary: Default::default(), primary: Default::default(),
// }, },
// // excerpt starts at a deletion // excerpt starts at a deletion
// ExcerptRange { ExcerptRange {
// context: Point::new(2, 0)..Point::new(2, 5), context: Point::new(2, 0)..Point::new(2, 5),
// primary: Default::default(), primary: Default::default(),
// }, },
// // excerpt fully contains a deletion hunk // excerpt fully contains a deletion hunk
// ExcerptRange { ExcerptRange {
// context: Point::new(1, 0)..Point::new(2, 5), context: Point::new(1, 0)..Point::new(2, 5),
// primary: Default::default(), primary: Default::default(),
// }, },
// // excerpt fully contains an insertion hunk // excerpt fully contains an insertion hunk
// ExcerptRange { ExcerptRange {
// context: Point::new(4, 0)..Point::new(6, 5), context: Point::new(4, 0)..Point::new(6, 5),
// primary: Default::default(), primary: Default::default(),
// }, },
// ], ],
// cx, cx,
// ); );
// multibuffer multibuffer
// }); });
// let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx)); let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx));
// assert_eq!( assert_eq!(
// snapshot.text(), snapshot.text(),
// " "
// 1.zero 1.zero
// 1.ONE 1.ONE
// 1.FIVE 1.FIVE
// 1.six 1.six
// 2.zero 2.zero
// 2.one 2.one
// 2.two 2.two
// 2.one 2.one
// 2.two 2.two
// 2.four 2.four
// 2.five 2.five
// 2.six" 2.six"
// .unindent() .unindent()
// ); );
// let expected = [ let expected = [
// (DiffHunkStatus::Modified, 1..2), (DiffHunkStatus::Modified, 1..2),
// (DiffHunkStatus::Modified, 2..3), (DiffHunkStatus::Modified, 2..3),
// //TODO: Define better when and where removed hunks show up at range extremities //TODO: Define better when and where removed hunks show up at range extremities
// (DiffHunkStatus::Removed, 6..6), (DiffHunkStatus::Removed, 6..6),
// (DiffHunkStatus::Removed, 8..8), (DiffHunkStatus::Removed, 8..8),
// (DiffHunkStatus::Added, 10..11), (DiffHunkStatus::Added, 10..11),
// ]; ];
// assert_eq!( assert_eq!(
// snapshot snapshot
// .git_diff_hunks_in_range(0..12) .git_diff_hunks_in_range(0..12)
// .map(|hunk| (hunk.status(), hunk.buffer_range)) .map(|hunk| (hunk.status(), hunk.buffer_range))
// .collect::<Vec<_>>(), .collect::<Vec<_>>(),
// &expected, &expected,
// ); );
// assert_eq!( assert_eq!(
// snapshot snapshot
// .git_diff_hunks_in_range_rev(0..12) .git_diff_hunks_in_range_rev(0..12)
// .map(|hunk| (hunk.status(), hunk.buffer_range)) .map(|hunk| (hunk.status(), hunk.buffer_range))
// .collect::<Vec<_>>(), .collect::<Vec<_>>(),
// expected expected
// .iter() .iter()
// .rev() .rev()
// .cloned() .cloned()
// .collect::<Vec<_>>() .collect::<Vec<_>>()
// .as_slice(), .as_slice(),
// ); );
// } }
// } }

View file

@ -5,7 +5,7 @@ use crate::{Editor, RangeToAnchorExt};
enum MatchingBracketHighlight {} enum MatchingBracketHighlight {}
pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut ViewContext<Editor>) { pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut ViewContext<Editor>) {
// editor.clear_background_highlights::<MatchingBracketHighlight>(cx); editor.clear_background_highlights::<MatchingBracketHighlight>(cx);
let newest_selection = editor.selections.newest::<usize>(cx); let newest_selection = editor.selections.newest::<usize>(cx);
// Don't highlight brackets if the selection isn't empty // Don't highlight brackets if the selection isn't empty
@ -30,109 +30,109 @@ pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut ViewCon
} }
} }
// #[cfg(test)] #[cfg(test)]
// mod tests { mod tests {
// use super::*; use super::*;
// use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext}; use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext};
// use indoc::indoc; use indoc::indoc;
// use language::{BracketPair, BracketPairConfig, Language, LanguageConfig}; use language::{BracketPair, BracketPairConfig, Language, LanguageConfig};
// #[gpui::test] #[gpui::test]
// async fn test_matching_bracket_highlights(cx: &mut gpui::TestAppContext) { async fn test_matching_bracket_highlights(cx: &mut gpui::TestAppContext) {
// init_test(cx, |_| {}); init_test(cx, |_| {});
// let mut cx = EditorLspTestContext::new( let mut cx = EditorLspTestContext::new(
// Language::new( Language::new(
// LanguageConfig { LanguageConfig {
// name: "Rust".into(), name: "Rust".into(),
// path_suffixes: vec!["rs".to_string()], path_suffixes: vec!["rs".to_string()],
// brackets: BracketPairConfig { brackets: BracketPairConfig {
// pairs: vec![ pairs: vec![
// BracketPair { BracketPair {
// start: "{".to_string(), start: "{".to_string(),
// end: "}".to_string(), end: "}".to_string(),
// close: false, close: false,
// newline: true, newline: true,
// }, },
// BracketPair { BracketPair {
// start: "(".to_string(), start: "(".to_string(),
// end: ")".to_string(), end: ")".to_string(),
// close: false, close: false,
// newline: true, newline: true,
// }, },
// ], ],
// ..Default::default() ..Default::default()
// }, },
// ..Default::default() ..Default::default()
// }, },
// Some(tree_sitter_rust::language()), Some(tree_sitter_rust::language()),
// ) )
// .with_brackets_query(indoc! {r#" .with_brackets_query(indoc! {r#"
// ("{" @open "}" @close) ("{" @open "}" @close)
// ("(" @open ")" @close) ("(" @open ")" @close)
// "#}) "#})
// .unwrap(), .unwrap(),
// Default::default(), Default::default(),
// cx, cx,
// ) )
// .await; .await;
// // positioning cursor inside bracket highlights both // positioning cursor inside bracket highlights both
// cx.set_state(indoc! {r#" cx.set_state(indoc! {r#"
// pub fn test("Test ˇargument") { pub fn test("Test ˇargument") {
// another_test(1, 2, 3); another_test(1, 2, 3);
// } }
// "#}); "#});
// cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#" cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
// pub fn test«(»"Test argument"«)» { pub fn test«(»"Test argument"«)» {
// another_test(1, 2, 3); another_test(1, 2, 3);
// } }
// "#}); "#});
// cx.set_state(indoc! {r#" cx.set_state(indoc! {r#"
// pub fn test("Test argument") { pub fn test("Test argument") {
// another_test(1, ˇ2, 3); another_test(1, ˇ2, 3);
// } }
// "#}); "#});
// cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#" cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
// pub fn test("Test argument") { pub fn test("Test argument") {
// another_test«(»1, 2, 3«)»; another_test«(»1, 2, 3«)»;
// } }
// "#}); "#});
// cx.set_state(indoc! {r#" cx.set_state(indoc! {r#"
// pub fn test("Test argument") { pub fn test("Test argument") {
// anotherˇ_test(1, 2, 3); anotherˇ_test(1, 2, 3);
// } }
// "#}); "#});
// cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#" cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
// pub fn test("Test argument") «{» pub fn test("Test argument") «{»
// another_test(1, 2, 3); another_test(1, 2, 3);
// «}» «}»
// "#}); "#});
// // positioning outside of brackets removes highlight // positioning outside of brackets removes highlight
// cx.set_state(indoc! {r#" cx.set_state(indoc! {r#"
// pub fˇn test("Test argument") { pub fˇn test("Test argument") {
// another_test(1, 2, 3); another_test(1, 2, 3);
// } }
// "#}); "#});
// cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#" cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
// pub fn test("Test argument") { pub fn test("Test argument") {
// another_test(1, 2, 3); another_test(1, 2, 3);
// } }
// "#}); "#});
// // non empty selection dismisses highlight // non empty selection dismisses highlight
// cx.set_state(indoc! {r#" cx.set_state(indoc! {r#"
// pub fn test("Te«st argˇ»ument") { pub fn test("Te«st argˇ»ument") {
// another_test(1, 2, 3); another_test(1, 2, 3);
// } }
// "#}); "#});
// cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#" cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
// pub fn test("Test argument") { pub fn test("Test argument") {
// another_test(1, 2, 3); another_test(1, 2, 3);
// } }
// "#}); "#});
// } }
// } }

View file

@ -2432,13 +2432,13 @@ pub mod tests {
let language = Arc::new(language); let language = Arc::new(language);
let fs = FakeFs::new(cx.background_executor.clone()); let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree( fs.insert_tree(
"/a", "/a",
json!({ json!({
"main.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|i| format!("let i = {i};\n")).collect::<Vec<_>>().join("")), "main.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|i| format!("let i = {i};\n")).collect::<Vec<_>>().join("")),
"other.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|j| format!("let j = {j};\n")).collect::<Vec<_>>().join("")), "other.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|j| format!("let j = {j};\n")).collect::<Vec<_>>().join("")),
}), }),
) )
.await; .await;
let project = Project::test(fs, ["/a".as_ref()], cx).await; let project = Project::test(fs, ["/a".as_ref()], cx).await;
project.update(cx, |project, _| { project.update(cx, |project, _| {
project.languages().add(Arc::clone(&language)) project.languages().add(Arc::clone(&language))
@ -2598,24 +2598,22 @@ pub mod tests {
cx.executor().run_until_parked(); cx.executor().run_until_parked();
editor.update(cx, |editor, cx| { editor.update(cx, |editor, cx| {
let expected_hints = vec![ let expected_hints = vec![
"main hint #0".to_string(), "main hint #0".to_string(),
"main hint #1".to_string(), "main hint #1".to_string(),
"main hint #2".to_string(), "main hint #2".to_string(),
"main hint #3".to_string(), "main hint #3".to_string(),
// todo!() there used to be no these hints, but new gpui2 presumably scrolls a bit farther "main hint #4".to_string(),
// (or renders less?) note that tests below pass "main hint #5".to_string(),
"main hint #4".to_string(), ];
"main hint #5".to_string(), assert_eq!(
]; expected_hints,
assert_eq!( cached_hint_labels(editor),
expected_hints, "When scroll is at the edge of a multibuffer, its visible excerpts only should be queried for inlay hints"
cached_hint_labels(editor), );
"When scroll is at the edge of a multibuffer, its visible excerpts only should be queried for inlay hints" assert_eq!(expected_hints, visible_hint_labels(editor, cx));
); assert_eq!(editor.inlay_hint_cache().version, expected_hints.len(), "Every visible excerpt hints should bump the verison");
assert_eq!(expected_hints, visible_hint_labels(editor, cx)); });
assert_eq!(editor.inlay_hint_cache().version, expected_hints.len(), "Every visible excerpt hints should bump the verison");
});
editor.update(cx, |editor, cx| { editor.update(cx, |editor, cx| {
editor.change_selections(Some(Autoscroll::Next), cx, |s| { editor.change_selections(Some(Autoscroll::Next), cx, |s| {
@ -2630,23 +2628,23 @@ pub mod tests {
}); });
cx.executor().run_until_parked(); cx.executor().run_until_parked();
editor.update(cx, |editor, cx| { editor.update(cx, |editor, cx| {
let expected_hints = vec![ let expected_hints = vec![
"main hint #0".to_string(), "main hint #0".to_string(),
"main hint #1".to_string(), "main hint #1".to_string(),
"main hint #2".to_string(), "main hint #2".to_string(),
"main hint #3".to_string(), "main hint #3".to_string(),
"main hint #4".to_string(), "main hint #4".to_string(),
"main hint #5".to_string(), "main hint #5".to_string(),
"other hint #0".to_string(), "other hint #0".to_string(),
"other hint #1".to_string(), "other hint #1".to_string(),
"other hint #2".to_string(), "other hint #2".to_string(),
]; ];
assert_eq!(expected_hints, cached_hint_labels(editor), assert_eq!(expected_hints, cached_hint_labels(editor),
"With more scrolls of the multibuffer, more hints should be added into the cache and nothing invalidated without edits"); "With more scrolls of the multibuffer, more hints should be added into the cache and nothing invalidated without edits");
assert_eq!(expected_hints, visible_hint_labels(editor, cx)); assert_eq!(expected_hints, visible_hint_labels(editor, cx));
assert_eq!(editor.inlay_hint_cache().version, expected_hints.len(), assert_eq!(editor.inlay_hint_cache().version, expected_hints.len(),
"Due to every excerpt having one hint, we update cache per new excerpt scrolled"); "Due to every excerpt having one hint, we update cache per new excerpt scrolled");
}); });
editor.update(cx, |editor, cx| { editor.update(cx, |editor, cx| {
editor.change_selections(Some(Autoscroll::Next), cx, |s| { editor.change_selections(Some(Autoscroll::Next), cx, |s| {
@ -2658,26 +2656,26 @@ pub mod tests {
)); ));
cx.executor().run_until_parked(); cx.executor().run_until_parked();
let last_scroll_update_version = editor.update(cx, |editor, cx| { let last_scroll_update_version = editor.update(cx, |editor, cx| {
let expected_hints = vec![ let expected_hints = vec![
"main hint #0".to_string(), "main hint #0".to_string(),
"main hint #1".to_string(), "main hint #1".to_string(),
"main hint #2".to_string(), "main hint #2".to_string(),
"main hint #3".to_string(), "main hint #3".to_string(),
"main hint #4".to_string(), "main hint #4".to_string(),
"main hint #5".to_string(), "main hint #5".to_string(),
"other hint #0".to_string(), "other hint #0".to_string(),
"other hint #1".to_string(), "other hint #1".to_string(),
"other hint #2".to_string(), "other hint #2".to_string(),
"other hint #3".to_string(), "other hint #3".to_string(),
"other hint #4".to_string(), "other hint #4".to_string(),
"other hint #5".to_string(), "other hint #5".to_string(),
]; ];
assert_eq!(expected_hints, cached_hint_labels(editor), assert_eq!(expected_hints, cached_hint_labels(editor),
"After multibuffer was scrolled to the end, all hints for all excerpts should be fetched"); "After multibuffer was scrolled to the end, all hints for all excerpts should be fetched");
assert_eq!(expected_hints, visible_hint_labels(editor, cx)); assert_eq!(expected_hints, visible_hint_labels(editor, cx));
assert_eq!(editor.inlay_hint_cache().version, expected_hints.len()); assert_eq!(editor.inlay_hint_cache().version, expected_hints.len());
expected_hints.len() expected_hints.len()
}).unwrap(); }).unwrap();
editor.update(cx, |editor, cx| { editor.update(cx, |editor, cx| {
editor.change_selections(Some(Autoscroll::Next), cx, |s| { editor.change_selections(Some(Autoscroll::Next), cx, |s| {
@ -2686,30 +2684,31 @@ pub mod tests {
}); });
cx.executor().run_until_parked(); cx.executor().run_until_parked();
editor.update(cx, |editor, cx| { editor.update(cx, |editor, cx| {
let expected_hints = vec![ let expected_hints = vec![
"main hint #0".to_string(), "main hint #0".to_string(),
"main hint #1".to_string(), "main hint #1".to_string(),
"main hint #2".to_string(), "main hint #2".to_string(),
"main hint #3".to_string(), "main hint #3".to_string(),
"main hint #4".to_string(), "main hint #4".to_string(),
"main hint #5".to_string(), "main hint #5".to_string(),
"other hint #0".to_string(), "other hint #0".to_string(),
"other hint #1".to_string(), "other hint #1".to_string(),
"other hint #2".to_string(), "other hint #2".to_string(),
"other hint #3".to_string(), "other hint #3".to_string(),
"other hint #4".to_string(), "other hint #4".to_string(),
"other hint #5".to_string(), "other hint #5".to_string(),
]; ];
assert_eq!(expected_hints, cached_hint_labels(editor), assert_eq!(expected_hints, cached_hint_labels(editor),
"After multibuffer was scrolled to the end, further scrolls up should not bring more hints"); "After multibuffer was scrolled to the end, further scrolls up should not bring more hints");
assert_eq!(expected_hints, visible_hint_labels(editor, cx)); assert_eq!(expected_hints, visible_hint_labels(editor, cx));
assert_eq!(editor.inlay_hint_cache().version, last_scroll_update_version, "No updates should happen during scrolling already scolled buffer"); assert_eq!(editor.inlay_hint_cache().version, last_scroll_update_version, "No updates should happen during scrolling already scolled buffer");
}); });
editor_edited.store(true, Ordering::Release); editor_edited.store(true, Ordering::Release);
editor.update(cx, |editor, cx| { editor.update(cx, |editor, cx| {
editor.change_selections(None, cx, |s| { editor.change_selections(None, cx, |s| {
s.select_ranges([Point::new(56, 0)..Point::new(56, 0)]) // TODO if this gets set to hint boundary (e.g. 56) we sometimes get an extra cache version bump, why?
s.select_ranges([Point::new(57, 0)..Point::new(57, 0)])
}); });
editor.handle_input("++++more text++++", cx); editor.handle_input("++++more text++++", cx);
}); });
@ -2729,15 +2728,15 @@ pub mod tests {
expected_hints, expected_hints,
cached_hint_labels(editor), cached_hint_labels(editor),
"After multibuffer edit, editor gets scolled back to the last selection; \ "After multibuffer edit, editor gets scolled back to the last selection; \
all hints should be invalidated and requeried for all of its visible excerpts" all hints should be invalidated and requeried for all of its visible excerpts"
); );
assert_eq!(expected_hints, visible_hint_labels(editor, cx)); assert_eq!(expected_hints, visible_hint_labels(editor, cx));
let current_cache_version = editor.inlay_hint_cache().version; let current_cache_version = editor.inlay_hint_cache().version;
let minimum_expected_version = last_scroll_update_version + expected_hints.len(); assert_eq!(
assert!( current_cache_version,
current_cache_version == minimum_expected_version || current_cache_version == minimum_expected_version + 1, last_scroll_update_version + expected_hints.len(),
"Due to every excerpt having one hint, cache should update per new excerpt received + 1 potential sporadic update" "We should have updated cache N times == N of new hints arrived (separately from each excerpt)"
); );
}); });
} }

File diff suppressed because it is too large Load diff

View file

@ -37,19 +37,18 @@ pub fn deploy_context_menu(
}); });
let context_menu = ui::ContextMenu::build(cx, |menu, cx| { let context_menu = ui::ContextMenu::build(cx, |menu, cx| {
menu.action("Rename Symbol", Box::new(Rename), cx) menu.action("Rename Symbol", Box::new(Rename))
.action("Go to Definition", Box::new(GoToDefinition), cx) .action("Go to Definition", Box::new(GoToDefinition))
.action("Go to Type Definition", Box::new(GoToTypeDefinition), cx) .action("Go to Type Definition", Box::new(GoToTypeDefinition))
.action("Find All References", Box::new(FindAllReferences), cx) .action("Find All References", Box::new(FindAllReferences))
.action( .action(
"Code Actions", "Code Actions",
Box::new(ToggleCodeActions { Box::new(ToggleCodeActions {
deployed_from_indicator: false, deployed_from_indicator: false,
}), }),
cx,
) )
.separator() .separator()
.action("Reveal in Finder", Box::new(RevealInFinder), cx) .action("Reveal in Finder", Box::new(RevealInFinder))
}); });
let context_menu_focus = context_menu.focus_handle(cx); let context_menu_focus = context_menu.focus_handle(cx);
cx.focus(&context_menu_focus); cx.focus(&context_menu_focus);
@ -69,42 +68,43 @@ pub fn deploy_context_menu(
cx.notify(); cx.notify();
} }
// #[cfg(test)] #[cfg(test)]
// mod tests { mod tests {
// use super::*; use super::*;
// use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext}; use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext};
// use indoc::indoc; use indoc::indoc;
// #[gpui::test] #[gpui::test]
// async fn test_mouse_context_menu(cx: &mut gpui::TestAppContext) { async fn test_mouse_context_menu(cx: &mut gpui::TestAppContext) {
// init_test(cx, |_| {}); init_test(cx, |_| {});
// let mut cx = EditorLspTestContext::new_rust( let mut cx = EditorLspTestContext::new_rust(
// lsp::ServerCapabilities { lsp::ServerCapabilities {
// hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
// ..Default::default() ..Default::default()
// }, },
// cx, cx,
// ) )
// .await; .await;
// cx.set_state(indoc! {" cx.set_state(indoc! {"
// fn teˇst() { fn teˇst() {
// do_work(); do_work();
// } }
// "}); "});
// let point = cx.display_point(indoc! {" let point = cx.display_point(indoc! {"
// fn test() { fn test() {
// do_wˇork(); do_wˇork();
// } }
// "}); "});
// cx.update_editor(|editor, cx| deploy_context_menu(editor, Default::default(), point, cx)); cx.editor(|editor, app| assert!(editor.mouse_context_menu.is_none()));
cx.update_editor(|editor, cx| deploy_context_menu(editor, Default::default(), point, cx));
// cx.assert_editor_state(indoc! {" cx.assert_editor_state(indoc! {"
// fn test() { fn test() {
// do_wˇork(); do_wˇork();
// } }
// "}); "});
// cx.editor(|editor, app| assert!(editor.mouse_context_menu.read(app).visible())); cx.editor(|editor, app| assert!(editor.mouse_context_menu.is_some()));
// } }
// } }

View file

@ -452,483 +452,475 @@ pub fn split_display_range_by_lines(
result result
} }
// #[cfg(test)] #[cfg(test)]
// mod tests { mod tests {
// use super::*; use super::*;
// use crate::{ use crate::{
// display_map::Inlay, display_map::Inlay,
// test::{}, test::{editor_test_context::EditorTestContext, marked_display_snapshot},
// Buffer, DisplayMap, ExcerptRange, InlayId, MultiBuffer, Buffer, DisplayMap, ExcerptRange, InlayId, MultiBuffer,
// }; };
// use project::Project; use gpui::{font, Context as _};
// use settings::SettingsStore; use project::Project;
// use util::post_inc; use settings::SettingsStore;
use util::post_inc;
// #[gpui::test] #[gpui::test]
// fn test_previous_word_start(cx: &mut gpui::AppContext) { fn test_previous_word_start(cx: &mut gpui::AppContext) {
// init_test(cx); init_test(cx);
// fn assert(marked_text: &str, cx: &mut gpui::AppContext) { fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx); let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
// assert_eq!( assert_eq!(
// previous_word_start(&snapshot, display_points[1]), previous_word_start(&snapshot, display_points[1]),
// display_points[0] display_points[0]
// ); );
// } }
// assert("\nˇ ˇlorem", cx); assert("\nˇ ˇlorem", cx);
// assert("ˇ\nˇ lorem", cx); assert("ˇ\nˇ lorem", cx);
// assert(" ˇloremˇ", cx); assert(" ˇloremˇ", cx);
// assert("ˇ ˇlorem", cx); assert("ˇ ˇlorem", cx);
// assert(" ˇlorˇem", cx); assert(" ˇlorˇem", cx);
// assert("\nlorem\nˇ ˇipsum", cx); assert("\nlorem\nˇ ˇipsum", cx);
// assert("\n\nˇ\nˇ", cx); assert("\n\nˇ\nˇ", cx);
// assert(" ˇlorem ˇipsum", cx); assert(" ˇlorem ˇipsum", cx);
// assert("loremˇ-ˇipsum", cx); assert("loremˇ-ˇipsum", cx);
// assert("loremˇ-#$@ˇipsum", cx); assert("loremˇ-#$@ˇipsum", cx);
// assert("ˇlorem_ˇipsum", cx); assert("ˇlorem_ˇipsum", cx);
// assert(" ˇdefγˇ", cx); assert(" ˇdefγˇ", cx);
// assert(" ˇbcΔˇ", cx); assert(" ˇbcΔˇ", cx);
// assert(" abˇ——ˇcd", cx); assert(" abˇ——ˇcd", cx);
// } }
// #[gpui::test] #[gpui::test]
// fn test_previous_subword_start(cx: &mut gpui::AppContext) { fn test_previous_subword_start(cx: &mut gpui::AppContext) {
// init_test(cx); init_test(cx);
// fn assert(marked_text: &str, cx: &mut gpui::AppContext) { fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx); let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
// assert_eq!( assert_eq!(
// previous_subword_start(&snapshot, display_points[1]), previous_subword_start(&snapshot, display_points[1]),
// display_points[0] display_points[0]
// ); );
// } }
// // Subword boundaries are respected // Subword boundaries are respected
// assert("lorem_ˇipˇsum", cx); assert("lorem_ˇipˇsum", cx);
// assert("lorem_ˇipsumˇ", cx); assert("lorem_ˇipsumˇ", cx);
// assert("ˇlorem_ˇipsum", cx); assert("ˇlorem_ˇipsum", cx);
// assert("lorem_ˇipsum_ˇdolor", cx); assert("lorem_ˇipsum_ˇdolor", cx);
// assert("loremˇIpˇsum", cx); assert("loremˇIpˇsum", cx);
// assert("loremˇIpsumˇ", cx); assert("loremˇIpsumˇ", cx);
// // Word boundaries are still respected // Word boundaries are still respected
// assert("\nˇ ˇlorem", cx); assert("\nˇ ˇlorem", cx);
// assert(" ˇloremˇ", cx); assert(" ˇloremˇ", cx);
// assert(" ˇlorˇem", cx); assert(" ˇlorˇem", cx);
// assert("\nlorem\nˇ ˇipsum", cx); assert("\nlorem\nˇ ˇipsum", cx);
// assert("\n\nˇ\nˇ", cx); assert("\n\nˇ\nˇ", cx);
// assert(" ˇlorem ˇipsum", cx); assert(" ˇlorem ˇipsum", cx);
// assert("loremˇ-ˇipsum", cx); assert("loremˇ-ˇipsum", cx);
// assert("loremˇ-#$@ˇipsum", cx); assert("loremˇ-#$@ˇipsum", cx);
// assert(" ˇdefγˇ", cx); assert(" ˇdefγˇ", cx);
// assert(" bcˇΔˇ", cx); assert(" bcˇΔˇ", cx);
// assert(" ˇbcδˇ", cx); assert(" ˇbcδˇ", cx);
// assert(" abˇ——ˇcd", cx); assert(" abˇ——ˇcd", cx);
// } }
// #[gpui::test] #[gpui::test]
// fn test_find_preceding_boundary(cx: &mut gpui::AppContext) { fn test_find_preceding_boundary(cx: &mut gpui::AppContext) {
// init_test(cx); init_test(cx);
// fn assert( fn assert(
// marked_text: &str, marked_text: &str,
// cx: &mut gpui::AppContext, cx: &mut gpui::AppContext,
// is_boundary: impl FnMut(char, char) -> bool, is_boundary: impl FnMut(char, char) -> bool,
// ) { ) {
// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx); let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
// assert_eq!( assert_eq!(
// find_preceding_boundary( find_preceding_boundary(
// &snapshot, &snapshot,
// display_points[1], display_points[1],
// FindRange::MultiLine, FindRange::MultiLine,
// is_boundary is_boundary
// ), ),
// display_points[0] display_points[0]
// ); );
// } }
// assert("abcˇdef\ngh\nijˇk", cx, |left, right| { assert("abcˇdef\ngh\nijˇk", cx, |left, right| {
// left == 'c' && right == 'd' left == 'c' && right == 'd'
// }); });
// assert("abcdef\nˇgh\nijˇk", cx, |left, right| { assert("abcdef\nˇgh\nijˇk", cx, |left, right| {
// left == '\n' && right == 'g' left == '\n' && right == 'g'
// }); });
// let mut line_count = 0; let mut line_count = 0;
// assert("abcdef\nˇgh\nijˇk", cx, |left, _| { assert("abcdef\nˇgh\nijˇk", cx, |left, _| {
// if left == '\n' { if left == '\n' {
// line_count += 1; line_count += 1;
// line_count == 2 line_count == 2
// } else { } else {
// false false
// } }
// }); });
// } }
// #[gpui::test] #[gpui::test]
// fn test_find_preceding_boundary_with_inlays(cx: &mut gpui::AppContext) { fn test_find_preceding_boundary_with_inlays(cx: &mut gpui::AppContext) {
// init_test(cx); init_test(cx);
// let input_text = "abcdefghijklmnopqrstuvwxys"; let input_text = "abcdefghijklmnopqrstuvwxys";
// let family_id = cx let font = font("Helvetica");
// .font_cache() let font_size = px(14.0);
// .load_family(&["Helvetica"], &Default::default()) let buffer = MultiBuffer::build_simple(input_text, cx);
// .unwrap(); let buffer_snapshot = buffer.read(cx).snapshot(cx);
// let font_id = cx let display_map =
// .font_cache() cx.build_model(|cx| DisplayMap::new(buffer, font, font_size, None, 1, 1, cx));
// .select_font(family_id, &Default::default())
// .unwrap();
// let font_size = 14.0;
// let buffer = MultiBuffer::build_simple(input_text, cx);
// let buffer_snapshot = buffer.read(cx).snapshot(cx);
// let display_map =
// cx.add_model(|cx| DisplayMap::new(buffer, font_id, font_size, None, 1, 1, cx));
// // add all kinds of inlays between two word boundaries: we should be able to cross them all, when looking for another boundary // add all kinds of inlays between two word boundaries: we should be able to cross them all, when looking for another boundary
// let mut id = 0; let mut id = 0;
// let inlays = (0..buffer_snapshot.len()) let inlays = (0..buffer_snapshot.len())
// .map(|offset| { .map(|offset| {
// [ [
// Inlay { Inlay {
// id: InlayId::Suggestion(post_inc(&mut id)), id: InlayId::Suggestion(post_inc(&mut id)),
// position: buffer_snapshot.anchor_at(offset, Bias::Left), position: buffer_snapshot.anchor_at(offset, Bias::Left),
// text: format!("test").into(), text: format!("test").into(),
// }, },
// Inlay { Inlay {
// id: InlayId::Suggestion(post_inc(&mut id)), id: InlayId::Suggestion(post_inc(&mut id)),
// position: buffer_snapshot.anchor_at(offset, Bias::Right), position: buffer_snapshot.anchor_at(offset, Bias::Right),
// text: format!("test").into(), text: format!("test").into(),
// }, },
// Inlay { Inlay {
// id: InlayId::Hint(post_inc(&mut id)), id: InlayId::Hint(post_inc(&mut id)),
// position: buffer_snapshot.anchor_at(offset, Bias::Left), position: buffer_snapshot.anchor_at(offset, Bias::Left),
// text: format!("test").into(), text: format!("test").into(),
// }, },
// Inlay { Inlay {
// id: InlayId::Hint(post_inc(&mut id)), id: InlayId::Hint(post_inc(&mut id)),
// position: buffer_snapshot.anchor_at(offset, Bias::Right), position: buffer_snapshot.anchor_at(offset, Bias::Right),
// text: format!("test").into(), text: format!("test").into(),
// }, },
// ] ]
// }) })
// .flatten() .flatten()
// .collect(); .collect();
// let snapshot = display_map.update(cx, |map, cx| { let snapshot = display_map.update(cx, |map, cx| {
// map.splice_inlays(Vec::new(), inlays, cx); map.splice_inlays(Vec::new(), inlays, cx);
// map.snapshot(cx) map.snapshot(cx)
// }); });
// assert_eq!( assert_eq!(
// find_preceding_boundary( find_preceding_boundary(
// &snapshot, &snapshot,
// buffer_snapshot.len().to_display_point(&snapshot), buffer_snapshot.len().to_display_point(&snapshot),
// FindRange::MultiLine, FindRange::MultiLine,
// |left, _| left == 'e', |left, _| left == 'e',
// ), ),
// snapshot snapshot
// .buffer_snapshot .buffer_snapshot
// .offset_to_point(5) .offset_to_point(5)
// .to_display_point(&snapshot), .to_display_point(&snapshot),
// "Should not stop at inlays when looking for boundaries" "Should not stop at inlays when looking for boundaries"
// ); );
// } }
// #[gpui::test] #[gpui::test]
// fn test_next_word_end(cx: &mut gpui::AppContext) { fn test_next_word_end(cx: &mut gpui::AppContext) {
// init_test(cx); init_test(cx);
// fn assert(marked_text: &str, cx: &mut gpui::AppContext) { fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx); let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
// assert_eq!( assert_eq!(
// next_word_end(&snapshot, display_points[0]), next_word_end(&snapshot, display_points[0]),
// display_points[1] display_points[1]
// ); );
// } }
// assert("\nˇ loremˇ", cx); assert("\nˇ loremˇ", cx);
// assert(" ˇloremˇ", cx); assert(" ˇloremˇ", cx);
// assert(" lorˇemˇ", cx); assert(" lorˇemˇ", cx);
// assert(" loremˇ ˇ\nipsum\n", cx); assert(" loremˇ ˇ\nipsum\n", cx);
// assert("\nˇ\nˇ\n\n", cx); assert("\nˇ\nˇ\n\n", cx);
// assert("loremˇ ipsumˇ ", cx); assert("loremˇ ipsumˇ ", cx);
// assert("loremˇ-ˇipsum", cx); assert("loremˇ-ˇipsum", cx);
// assert("loremˇ#$@-ˇipsum", cx); assert("loremˇ#$@-ˇipsum", cx);
// assert("loremˇ_ipsumˇ", cx); assert("loremˇ_ipsumˇ", cx);
// assert(" ˇbcΔˇ", cx); assert(" ˇbcΔˇ", cx);
// assert(" abˇ——ˇcd", cx); assert(" abˇ——ˇcd", cx);
// } }
// #[gpui::test] #[gpui::test]
// fn test_next_subword_end(cx: &mut gpui::AppContext) { fn test_next_subword_end(cx: &mut gpui::AppContext) {
// init_test(cx); init_test(cx);
// fn assert(marked_text: &str, cx: &mut gpui::AppContext) { fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx); let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
// assert_eq!( assert_eq!(
// next_subword_end(&snapshot, display_points[0]), next_subword_end(&snapshot, display_points[0]),
// display_points[1] display_points[1]
// ); );
// } }
// // Subword boundaries are respected // Subword boundaries are respected
// assert("loˇremˇ_ipsum", cx); assert("loˇremˇ_ipsum", cx);
// assert("ˇloremˇ_ipsum", cx); assert("ˇloremˇ_ipsum", cx);
// assert("loremˇ_ipsumˇ", cx); assert("loremˇ_ipsumˇ", cx);
// assert("loremˇ_ipsumˇ_dolor", cx); assert("loremˇ_ipsumˇ_dolor", cx);
// assert("loˇremˇIpsum", cx); assert("loˇremˇIpsum", cx);
// assert("loremˇIpsumˇDolor", cx); assert("loremˇIpsumˇDolor", cx);
// // Word boundaries are still respected // Word boundaries are still respected
// assert("\nˇ loremˇ", cx); assert("\nˇ loremˇ", cx);
// assert(" ˇloremˇ", cx); assert(" ˇloremˇ", cx);
// assert(" lorˇemˇ", cx); assert(" lorˇemˇ", cx);
// assert(" loremˇ ˇ\nipsum\n", cx); assert(" loremˇ ˇ\nipsum\n", cx);
// assert("\nˇ\nˇ\n\n", cx); assert("\nˇ\nˇ\n\n", cx);
// assert("loremˇ ipsumˇ ", cx); assert("loremˇ ipsumˇ ", cx);
// assert("loremˇ-ˇipsum", cx); assert("loremˇ-ˇipsum", cx);
// assert("loremˇ#$@-ˇipsum", cx); assert("loremˇ#$@-ˇipsum", cx);
// assert("loremˇ_ipsumˇ", cx); assert("loremˇ_ipsumˇ", cx);
// assert(" ˇbcˇΔ", cx); assert(" ˇbcˇΔ", cx);
// assert(" abˇ——ˇcd", cx); assert(" abˇ——ˇcd", cx);
// } }
// #[gpui::test] #[gpui::test]
// fn test_find_boundary(cx: &mut gpui::AppContext) { fn test_find_boundary(cx: &mut gpui::AppContext) {
// init_test(cx); init_test(cx);
// fn assert( fn assert(
// marked_text: &str, marked_text: &str,
// cx: &mut gpui::AppContext, cx: &mut gpui::AppContext,
// is_boundary: impl FnMut(char, char) -> bool, is_boundary: impl FnMut(char, char) -> bool,
// ) { ) {
// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx); let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
// assert_eq!( assert_eq!(
// find_boundary( find_boundary(
// &snapshot, &snapshot,
// display_points[0], display_points[0],
// FindRange::MultiLine, FindRange::MultiLine,
// is_boundary is_boundary
// ), ),
// display_points[1] display_points[1]
// ); );
// } }
// assert("abcˇdef\ngh\nijˇk", cx, |left, right| { assert("abcˇdef\ngh\nijˇk", cx, |left, right| {
// left == 'j' && right == 'k' left == 'j' && right == 'k'
// }); });
// assert("abˇcdef\ngh\nˇijk", cx, |left, right| { assert("abˇcdef\ngh\nˇijk", cx, |left, right| {
// left == '\n' && right == 'i' left == '\n' && right == 'i'
// }); });
// let mut line_count = 0; let mut line_count = 0;
// assert("abcˇdef\ngh\nˇijk", cx, |left, _| { assert("abcˇdef\ngh\nˇijk", cx, |left, _| {
// if left == '\n' { if left == '\n' {
// line_count += 1; line_count += 1;
// line_count == 2 line_count == 2
// } else { } else {
// false false
// } }
// }); });
// } }
// #[gpui::test] #[gpui::test]
// fn test_surrounding_word(cx: &mut gpui::AppContext) { fn test_surrounding_word(cx: &mut gpui::AppContext) {
// init_test(cx); init_test(cx);
// fn assert(marked_text: &str, cx: &mut gpui::AppContext) { fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx); let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
// assert_eq!( assert_eq!(
// surrounding_word(&snapshot, display_points[1]), surrounding_word(&snapshot, display_points[1]),
// display_points[0]..display_points[2], display_points[0]..display_points[2],
// "{}", "{}",
// marked_text.to_string() marked_text.to_string()
// ); );
// } }
// assert("ˇˇloremˇ ipsum", cx); assert("ˇˇloremˇ ipsum", cx);
// assert("ˇloˇremˇ ipsum", cx); assert("ˇloˇremˇ ipsum", cx);
// assert("ˇloremˇˇ ipsum", cx); assert("ˇloremˇˇ ipsum", cx);
// assert("loremˇ ˇ ˇipsum", cx); assert("loremˇ ˇ ˇipsum", cx);
// assert("lorem\nˇˇˇ\nipsum", cx); assert("lorem\nˇˇˇ\nipsum", cx);
// assert("lorem\nˇˇipsumˇ", cx); assert("lorem\nˇˇipsumˇ", cx);
// assert("loremˇ,ˇˇ ipsum", cx); assert("loremˇ,ˇˇ ipsum", cx);
// assert("ˇloremˇˇ, ipsum", cx); assert("ˇloremˇˇ, ipsum", cx);
// } }
// #[gpui::test] #[gpui::test]
// async fn test_move_up_and_down_with_excerpts(cx: &mut gpui::TestAppContext) { async fn test_move_up_and_down_with_excerpts(cx: &mut gpui::TestAppContext) {
// cx.update(|cx| { cx.update(|cx| {
// init_test(cx); init_test(cx);
// }); });
// let mut cx = EditorTestContext::new(cx).await; let mut cx = EditorTestContext::new(cx).await;
// let editor = cx.editor.clone(); let editor = cx.editor.clone();
// let window = cx.window.clone(); let window = cx.window.clone();
// cx.update_window(window, |cx| { cx.update_window(window, |_, cx| {
// let text_layout_details = let text_layout_details =
// editor.read_with(cx, |editor, cx| editor.text_layout_details(cx)); editor.update(cx, |editor, cx| editor.text_layout_details(cx));
// let family_id = cx let font = font("Helvetica");
// .font_cache()
// .load_family(&["Helvetica"], &Default::default())
// .unwrap();
// let font_id = cx
// .font_cache()
// .select_font(family_id, &Default::default())
// .unwrap();
// let buffer = let buffer = cx
// cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abc\ndefg\nhijkl\nmn")); .build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "abc\ndefg\nhijkl\nmn"));
// let multibuffer = cx.add_model(|cx| { let multibuffer = cx.build_model(|cx| {
// let mut multibuffer = MultiBuffer::new(0); let mut multibuffer = MultiBuffer::new(0);
// multibuffer.push_excerpts( multibuffer.push_excerpts(
// buffer.clone(), buffer.clone(),
// [ [
// ExcerptRange { ExcerptRange {
// context: Point::new(0, 0)..Point::new(1, 4), context: Point::new(0, 0)..Point::new(1, 4),
// primary: None, primary: None,
// }, },
// ExcerptRange { ExcerptRange {
// context: Point::new(2, 0)..Point::new(3, 2), context: Point::new(2, 0)..Point::new(3, 2),
// primary: None, primary: None,
// }, },
// ], ],
// cx, cx,
// ); );
// multibuffer multibuffer
// }); });
// let display_map = let display_map =
// cx.add_model(|cx| DisplayMap::new(multibuffer, font_id, 14.0, None, 2, 2, cx)); cx.build_model(|cx| DisplayMap::new(multibuffer, font, px(14.0), None, 2, 2, cx));
// let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx)); let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
// assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\nhijkl\nmn"); assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\nhijkl\nmn");
// let col_2_x = snapshot.x_for_point(DisplayPoint::new(2, 2), &text_layout_details); let col_2_x =
snapshot.x_for_display_point(DisplayPoint::new(2, 2), &text_layout_details);
// // Can't move up into the first excerpt's header // Can't move up into the first excerpt's header
// assert_eq!( assert_eq!(
// up( up(
// &snapshot, &snapshot,
// DisplayPoint::new(2, 2), DisplayPoint::new(2, 2),
// SelectionGoal::HorizontalPosition(col_2_x), SelectionGoal::HorizontalPosition(col_2_x.0),
// false, false,
// &text_layout_details &text_layout_details
// ), ),
// ( (
// DisplayPoint::new(2, 0), DisplayPoint::new(2, 0),
// SelectionGoal::HorizontalPosition(0.0) SelectionGoal::HorizontalPosition(0.0)
// ), ),
// ); );
// assert_eq!( assert_eq!(
// up( up(
// &snapshot, &snapshot,
// DisplayPoint::new(2, 0), DisplayPoint::new(2, 0),
// SelectionGoal::None, SelectionGoal::None,
// false, false,
// &text_layout_details &text_layout_details
// ), ),
// ( (
// DisplayPoint::new(2, 0), DisplayPoint::new(2, 0),
// SelectionGoal::HorizontalPosition(0.0) SelectionGoal::HorizontalPosition(0.0)
// ), ),
// ); );
// let col_4_x = snapshot.x_for_point(DisplayPoint::new(3, 4), &text_layout_details); let col_4_x =
snapshot.x_for_display_point(DisplayPoint::new(3, 4), &text_layout_details);
// // Move up and down within first excerpt // Move up and down within first excerpt
// assert_eq!( assert_eq!(
// up( up(
// &snapshot, &snapshot,
// DisplayPoint::new(3, 4), DisplayPoint::new(3, 4),
// SelectionGoal::HorizontalPosition(col_4_x), SelectionGoal::HorizontalPosition(col_4_x.0),
// false, false,
// &text_layout_details &text_layout_details
// ), ),
// ( (
// DisplayPoint::new(2, 3), DisplayPoint::new(2, 3),
// SelectionGoal::HorizontalPosition(col_4_x) SelectionGoal::HorizontalPosition(col_4_x.0)
// ), ),
// ); );
// assert_eq!( assert_eq!(
// down( down(
// &snapshot, &snapshot,
// DisplayPoint::new(2, 3), DisplayPoint::new(2, 3),
// SelectionGoal::HorizontalPosition(col_4_x), SelectionGoal::HorizontalPosition(col_4_x.0),
// false, false,
// &text_layout_details &text_layout_details
// ), ),
// ( (
// DisplayPoint::new(3, 4), DisplayPoint::new(3, 4),
// SelectionGoal::HorizontalPosition(col_4_x) SelectionGoal::HorizontalPosition(col_4_x.0)
// ), ),
// ); );
// let col_5_x = snapshot.x_for_point(DisplayPoint::new(6, 5), &text_layout_details); let col_5_x =
snapshot.x_for_display_point(DisplayPoint::new(6, 5), &text_layout_details);
// // Move up and down across second excerpt's header // Move up and down across second excerpt's header
// assert_eq!( assert_eq!(
// up( up(
// &snapshot, &snapshot,
// DisplayPoint::new(6, 5), DisplayPoint::new(6, 5),
// SelectionGoal::HorizontalPosition(col_5_x), SelectionGoal::HorizontalPosition(col_5_x.0),
// false, false,
// &text_layout_details &text_layout_details
// ), ),
// ( (
// DisplayPoint::new(3, 4), DisplayPoint::new(3, 4),
// SelectionGoal::HorizontalPosition(col_5_x) SelectionGoal::HorizontalPosition(col_5_x.0)
// ), ),
// ); );
// assert_eq!( assert_eq!(
// down( down(
// &snapshot, &snapshot,
// DisplayPoint::new(3, 4), DisplayPoint::new(3, 4),
// SelectionGoal::HorizontalPosition(col_5_x), SelectionGoal::HorizontalPosition(col_5_x.0),
// false, false,
// &text_layout_details &text_layout_details
// ), ),
// ( (
// DisplayPoint::new(6, 5), DisplayPoint::new(6, 5),
// SelectionGoal::HorizontalPosition(col_5_x) SelectionGoal::HorizontalPosition(col_5_x.0)
// ), ),
// ); );
// let max_point_x = snapshot.x_for_point(DisplayPoint::new(7, 2), &text_layout_details); let max_point_x =
snapshot.x_for_display_point(DisplayPoint::new(7, 2), &text_layout_details);
// // Can't move down off the end // Can't move down off the end
// assert_eq!( assert_eq!(
// down( down(
// &snapshot, &snapshot,
// DisplayPoint::new(7, 0), DisplayPoint::new(7, 0),
// SelectionGoal::HorizontalPosition(0.0), SelectionGoal::HorizontalPosition(0.0),
// false, false,
// &text_layout_details &text_layout_details
// ), ),
// ( (
// DisplayPoint::new(7, 2), DisplayPoint::new(7, 2),
// SelectionGoal::HorizontalPosition(max_point_x) SelectionGoal::HorizontalPosition(max_point_x.0)
// ), ),
// ); );
// assert_eq!( assert_eq!(
// down( down(
// &snapshot, &snapshot,
// DisplayPoint::new(7, 2), DisplayPoint::new(7, 2),
// SelectionGoal::HorizontalPosition(max_point_x), SelectionGoal::HorizontalPosition(max_point_x.0),
// false, false,
// &text_layout_details &text_layout_details
// ), ),
// ( (
// DisplayPoint::new(7, 2), DisplayPoint::new(7, 2),
// SelectionGoal::HorizontalPosition(max_point_x) SelectionGoal::HorizontalPosition(max_point_x.0)
// ), ),
// ); );
// }); });
// } }
// fn init_test(cx: &mut gpui::AppContext) { fn init_test(cx: &mut gpui::AppContext) {
// cx.set_global(SettingsStore::test(cx)); let settings_store = SettingsStore::test(cx);
// theme::init(cx); cx.set_global(settings_store);
// language::init(cx); theme::init(theme::LoadThemes::JustBase, cx);
// crate::init(cx); language::init(cx);
// Project::init_settings(cx); crate::init(cx);
// } Project::init_settings(cx);
// } }
}

View file

@ -358,7 +358,7 @@ impl AppContext {
{ {
let entity_id = entity.entity_id(); let entity_id = entity.entity_id();
let handle = entity.downgrade(); let handle = entity.downgrade();
self.observers.insert( let (subscription, activate) = self.observers.insert(
entity_id, entity_id,
Box::new(move |cx| { Box::new(move |cx| {
if let Some(handle) = E::upgrade_from(&handle) { if let Some(handle) = E::upgrade_from(&handle) {
@ -367,7 +367,9 @@ impl AppContext {
false false
} }
}), }),
) );
self.defer(move |_| activate());
subscription
} }
pub fn subscribe<T, E, Evt>( pub fn subscribe<T, E, Evt>(
@ -398,8 +400,7 @@ impl AppContext {
{ {
let entity_id = entity.entity_id(); let entity_id = entity.entity_id();
let entity = entity.downgrade(); let entity = entity.downgrade();
let (subscription, activate) = self.event_listeners.insert(
self.event_listeners.insert(
entity_id, entity_id,
( (
TypeId::of::<Evt>(), TypeId::of::<Evt>(),
@ -412,7 +413,9 @@ impl AppContext {
} }
}), }),
), ),
) );
self.defer(move |_| activate());
subscription
} }
pub fn windows(&self) -> Vec<AnyWindowHandle> { pub fn windows(&self) -> Vec<AnyWindowHandle> {
@ -873,13 +876,15 @@ impl AppContext {
&mut self, &mut self,
mut f: impl FnMut(&mut Self) + 'static, mut f: impl FnMut(&mut Self) + 'static,
) -> Subscription { ) -> Subscription {
self.global_observers.insert( let (subscription, activate) = self.global_observers.insert(
TypeId::of::<G>(), TypeId::of::<G>(),
Box::new(move |cx| { Box::new(move |cx| {
f(cx); f(cx);
true true
}), }),
) );
self.defer(move |_| activate());
subscription
} }
/// Move the global of the given type to the stack. /// Move the global of the given type to the stack.
@ -903,7 +908,7 @@ impl AppContext {
&mut self, &mut self,
on_new: impl 'static + Fn(&mut V, &mut ViewContext<V>), on_new: impl 'static + Fn(&mut V, &mut ViewContext<V>),
) -> Subscription { ) -> Subscription {
self.new_view_observers.insert( let (subscription, activate) = self.new_view_observers.insert(
TypeId::of::<V>(), TypeId::of::<V>(),
Box::new(move |any_view: AnyView, cx: &mut WindowContext| { Box::new(move |any_view: AnyView, cx: &mut WindowContext| {
any_view any_view
@ -913,7 +918,9 @@ impl AppContext {
on_new(view_state, cx); on_new(view_state, cx);
}) })
}), }),
) );
activate();
subscription
} }
pub fn observe_release<E, T>( pub fn observe_release<E, T>(
@ -925,13 +932,15 @@ impl AppContext {
E: Entity<T>, E: Entity<T>,
T: 'static, T: 'static,
{ {
self.release_listeners.insert( let (subscription, activate) = self.release_listeners.insert(
handle.entity_id(), handle.entity_id(),
Box::new(move |entity, cx| { Box::new(move |entity, cx| {
let entity = entity.downcast_mut().expect("invalid entity type"); let entity = entity.downcast_mut().expect("invalid entity type");
on_release(entity, cx) on_release(entity, cx)
}), }),
) );
activate();
subscription
} }
pub(crate) fn push_text_style(&mut self, text_style: TextStyleRefinement) { pub(crate) fn push_text_style(&mut self, text_style: TextStyleRefinement) {
@ -996,13 +1005,15 @@ impl AppContext {
where where
Fut: 'static + Future<Output = ()>, Fut: 'static + Future<Output = ()>,
{ {
self.quit_observers.insert( let (subscription, activate) = self.quit_observers.insert(
(), (),
Box::new(move |cx| { Box::new(move |cx| {
let future = on_quit(cx); let future = on_quit(cx);
async move { future.await }.boxed_local() async move { future.await }.boxed_local()
}), }),
) );
activate();
subscription
} }
} }

View file

@ -482,10 +482,6 @@ impl<T: 'static> WeakModel<T> {
/// Update the entity referenced by this model with the given function if /// Update the entity referenced by this model with the given function if
/// the referenced entity still exists. Returns an error if the entity has /// the referenced entity still exists. Returns an error if the entity has
/// been released. /// been released.
///
/// The update function receives a context appropriate for its environment.
/// When updating in an `AppContext`, it receives a `ModelContext`.
/// When updating an a `WindowContext`, it receives a `ViewContext`.
pub fn update<C, R>( pub fn update<C, R>(
&self, &self,
cx: &mut C, cx: &mut C,
@ -501,6 +497,21 @@ impl<T: 'static> WeakModel<T> {
.map(|this| cx.update_model(&this, update)), .map(|this| cx.update_model(&this, update)),
) )
} }
/// Reads the entity referenced by this model with the given function if
/// the referenced entity still exists. Returns an error if the entity has
/// been released.
pub fn read_with<C, R>(&self, cx: &C, read: impl FnOnce(&T, &AppContext) -> R) -> Result<R>
where
C: Context,
Result<C::Result<R>>: crate::Flatten<R>,
{
crate::Flatten::flatten(
self.upgrade()
.ok_or_else(|| anyhow!("entity release"))
.map(|this| cx.read_model(&this, read)),
)
}
} }
impl<T> Hash for WeakModel<T> { impl<T> Hash for WeakModel<T> {

View file

@ -88,13 +88,15 @@ impl<'a, T: 'static> ModelContext<'a, T> {
where where
T: 'static, T: 'static,
{ {
self.app.release_listeners.insert( let (subscription, activate) = self.app.release_listeners.insert(
self.model_state.entity_id, self.model_state.entity_id,
Box::new(move |this, cx| { Box::new(move |this, cx| {
let this = this.downcast_mut().expect("invalid entity type"); let this = this.downcast_mut().expect("invalid entity type");
on_release(this, cx); on_release(this, cx);
}), }),
) );
activate();
subscription
} }
pub fn observe_release<T2, E>( pub fn observe_release<T2, E>(
@ -109,7 +111,7 @@ impl<'a, T: 'static> ModelContext<'a, T> {
{ {
let entity_id = entity.entity_id(); let entity_id = entity.entity_id();
let this = self.weak_model(); let this = self.weak_model();
self.app.release_listeners.insert( let (subscription, activate) = self.app.release_listeners.insert(
entity_id, entity_id,
Box::new(move |entity, cx| { Box::new(move |entity, cx| {
let entity = entity.downcast_mut().expect("invalid entity type"); let entity = entity.downcast_mut().expect("invalid entity type");
@ -117,7 +119,9 @@ impl<'a, T: 'static> ModelContext<'a, T> {
this.update(cx, |this, cx| on_release(this, entity, cx)); this.update(cx, |this, cx| on_release(this, entity, cx));
} }
}), }),
) );
activate();
subscription
} }
pub fn observe_global<G: 'static>( pub fn observe_global<G: 'static>(
@ -128,10 +132,12 @@ impl<'a, T: 'static> ModelContext<'a, T> {
T: 'static, T: 'static,
{ {
let handle = self.weak_model(); let handle = self.weak_model();
self.global_observers.insert( let (subscription, activate) = self.global_observers.insert(
TypeId::of::<G>(), TypeId::of::<G>(),
Box::new(move |cx| handle.update(cx, |view, cx| f(view, cx)).is_ok()), Box::new(move |cx| handle.update(cx, |view, cx| f(view, cx)).is_ok()),
) );
self.defer(move |_| activate());
subscription
} }
pub fn on_app_quit<Fut>( pub fn on_app_quit<Fut>(
@ -143,7 +149,7 @@ impl<'a, T: 'static> ModelContext<'a, T> {
T: 'static, T: 'static,
{ {
let handle = self.weak_model(); let handle = self.weak_model();
self.app.quit_observers.insert( let (subscription, activate) = self.app.quit_observers.insert(
(), (),
Box::new(move |cx| { Box::new(move |cx| {
let future = handle.update(cx, |entity, cx| on_quit(entity, cx)).ok(); let future = handle.update(cx, |entity, cx| on_quit(entity, cx)).ok();
@ -154,7 +160,9 @@ impl<'a, T: 'static> ModelContext<'a, T> {
} }
.boxed_local() .boxed_local()
}), }),
) );
activate();
subscription
} }
pub fn notify(&mut self) { pub fn notify(&mut self) {

View file

@ -1,13 +1,13 @@
use crate::{ use crate::{
div, Action, AnyView, AnyWindowHandle, AppCell, AppContext, AsyncAppContext, div, Action, AnyView, AnyWindowHandle, AppCell, AppContext, AsyncAppContext,
BackgroundExecutor, Context, Div, Entity, EventEmitter, ForegroundExecutor, InputEvent, BackgroundExecutor, Bounds, Context, Div, Entity, EventEmitter, ForegroundExecutor, InputEvent,
KeyDownEvent, Keystroke, Model, ModelContext, Render, Result, Task, TestDispatcher, KeyDownEvent, Keystroke, Model, ModelContext, Pixels, PlatformWindow, Point, Render, Result,
TestPlatform, TestWindow, TestWindowHandlers, View, ViewContext, VisualContext, WindowContext, Size, Task, TestDispatcher, TestPlatform, TestWindow, TestWindowHandlers, View, ViewContext,
WindowHandle, WindowOptions, VisualContext, WindowBounds, WindowContext, WindowHandle, WindowOptions,
}; };
use anyhow::{anyhow, bail}; use anyhow::{anyhow, bail};
use futures::{Stream, StreamExt}; use futures::{Stream, StreamExt};
use std::{future::Future, ops::Deref, rc::Rc, sync::Arc, time::Duration}; use std::{future::Future, mem, ops::Deref, rc::Rc, sync::Arc, time::Duration};
#[derive(Clone)] #[derive(Clone)]
pub struct TestAppContext { pub struct TestAppContext {
@ -170,6 +170,45 @@ impl TestAppContext {
self.test_platform.has_pending_prompt() self.test_platform.has_pending_prompt()
} }
pub fn simulate_window_resize(&self, window_handle: AnyWindowHandle, size: Size<Pixels>) {
let (mut handlers, scale_factor) = self
.app
.borrow_mut()
.update_window(window_handle, |_, cx| {
let platform_window = cx.window.platform_window.as_test().unwrap();
let scale_factor = platform_window.scale_factor();
match &mut platform_window.bounds {
WindowBounds::Fullscreen | WindowBounds::Maximized => {
platform_window.bounds = WindowBounds::Fixed(Bounds {
origin: Point::default(),
size: size.map(|pixels| f64::from(pixels).into()),
});
}
WindowBounds::Fixed(bounds) => {
bounds.size = size.map(|pixels| f64::from(pixels).into());
}
}
(
mem::take(&mut platform_window.handlers.lock().resize),
scale_factor,
)
})
.unwrap();
for handler in &mut handlers {
handler(size, scale_factor);
}
self.app
.borrow_mut()
.update_window(window_handle, |_, cx| {
let platform_window = cx.window.platform_window.as_test().unwrap();
platform_window.handlers.lock().resize = handlers;
})
.unwrap();
}
pub fn spawn<Fut, R>(&self, f: impl FnOnce(AsyncAppContext) -> Fut) -> Task<R> pub fn spawn<Fut, R>(&self, f: impl FnOnce(AsyncAppContext) -> Fut) -> Task<R>
where where
Fut: Future<Output = R> + 'static, Fut: Future<Output = R> + 'static,
@ -343,12 +382,15 @@ impl TestAppContext {
use smol::future::FutureExt as _; use smol::future::FutureExt as _;
async { async {
while notifications.next().await.is_some() { loop {
if model.update(self, &mut predicate) { if model.update(self, &mut predicate) {
return Ok(()); return Ok(());
} }
if notifications.next().await.is_none() {
bail!("model dropped")
}
} }
bail!("model dropped")
} }
.race(timer.map(|_| Err(anyhow!("condition timed out")))) .race(timer.map(|_| Err(anyhow!("condition timed out"))))
.await .await

View file

@ -0,0 +1,48 @@
use crate::{Bounds, Element, IntoElement, Pixels, StyleRefinement, Styled, WindowContext};
pub fn canvas(callback: impl 'static + FnOnce(Bounds<Pixels>, &mut WindowContext)) -> Canvas {
Canvas {
paint_callback: Box::new(callback),
style: Default::default(),
}
}
pub struct Canvas {
paint_callback: Box<dyn FnOnce(Bounds<Pixels>, &mut WindowContext)>,
style: StyleRefinement,
}
impl IntoElement for Canvas {
type Element = Self;
fn element_id(&self) -> Option<crate::ElementId> {
None
}
fn into_element(self) -> Self::Element {
self
}
}
impl Element for Canvas {
type State = ();
fn layout(
&mut self,
_: Option<Self::State>,
cx: &mut WindowContext,
) -> (crate::LayoutId, Self::State) {
let layout_id = cx.request_layout(&self.style.clone().into(), []);
(layout_id, ())
}
fn paint(self, bounds: Bounds<Pixels>, _: &mut (), cx: &mut WindowContext) {
(self.paint_callback)(bounds, cx)
}
}
impl Styled for Canvas {
fn style(&mut self) -> &mut crate::StyleRefinement {
&mut self.style
}
}

View file

@ -221,20 +221,6 @@ pub trait InteractiveElement: Sized + Element {
/// Add a listener for the given action, fires during the bubble event phase /// Add a listener for the given action, fires during the bubble event phase
fn on_action<A: Action>(mut self, listener: impl Fn(&A, &mut WindowContext) + 'static) -> Self { fn on_action<A: Action>(mut self, listener: impl Fn(&A, &mut WindowContext) + 'static) -> Self {
// NOTE: this debug assert has the side-effect of working around
// a bug where a crate consisting only of action definitions does
// not register the actions in debug builds:
//
// https://github.com/rust-lang/rust/issues/47384
// https://github.com/mmastrac/rust-ctor/issues/280
//
// if we are relying on this side-effect still, removing the debug_assert!
// likely breaks the command_palette tests.
// debug_assert!(
// A::is_registered(),
// "{:?} is not registered as an action",
// A::qualified_name()
// );
self.interactivity().action_listeners.push(( self.interactivity().action_listeners.push((
TypeId::of::<A>(), TypeId::of::<A>(),
Box::new(move |action, phase, cx| { Box::new(move |action, phase, cx| {
@ -247,6 +233,23 @@ pub trait InteractiveElement: Sized + Element {
self self
} }
fn on_boxed_action(
mut self,
action: &Box<dyn Action>,
listener: impl Fn(&Box<dyn Action>, &mut WindowContext) + 'static,
) -> Self {
let action = action.boxed_clone();
self.interactivity().action_listeners.push((
(*action).type_id(),
Box::new(move |_, phase, cx| {
if phase == DispatchPhase::Bubble {
(listener)(&action, cx)
}
}),
));
self
}
fn on_key_down( fn on_key_down(
mut self, mut self,
listener: impl Fn(&KeyDownEvent, &mut WindowContext) + 'static, listener: impl Fn(&KeyDownEvent, &mut WindowContext) + 'static,

View file

@ -1,3 +1,4 @@
mod canvas;
mod div; mod div;
mod img; mod img;
mod overlay; mod overlay;
@ -5,6 +6,7 @@ mod svg;
mod text; mod text;
mod uniform_list; mod uniform_list;
pub use canvas::*;
pub use div::*; pub use div::*;
pub use img::*; pub use img::*;
pub use overlay::*; pub use overlay::*;

View file

@ -128,11 +128,19 @@ impl BackgroundExecutor {
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
#[track_caller] #[track_caller]
pub fn block_test<R>(&self, future: impl Future<Output = R>) -> R { pub fn block_test<R>(&self, future: impl Future<Output = R>) -> R {
self.block_internal(false, future) if let Ok(value) = self.block_internal(false, future, usize::MAX) {
value
} else {
unreachable!()
}
} }
pub fn block<R>(&self, future: impl Future<Output = R>) -> R { pub fn block<R>(&self, future: impl Future<Output = R>) -> R {
self.block_internal(true, future) if let Ok(value) = self.block_internal(true, future, usize::MAX) {
value
} else {
unreachable!()
}
} }
#[track_caller] #[track_caller]
@ -140,7 +148,8 @@ impl BackgroundExecutor {
&self, &self,
background_only: bool, background_only: bool,
future: impl Future<Output = R>, future: impl Future<Output = R>,
) -> R { mut max_ticks: usize,
) -> Result<R, ()> {
pin_mut!(future); pin_mut!(future);
let unparker = self.dispatcher.unparker(); let unparker = self.dispatcher.unparker();
let awoken = Arc::new(AtomicBool::new(false)); let awoken = Arc::new(AtomicBool::new(false));
@ -156,8 +165,13 @@ impl BackgroundExecutor {
loop { loop {
match future.as_mut().poll(&mut cx) { match future.as_mut().poll(&mut cx) {
Poll::Ready(result) => return result, Poll::Ready(result) => return Ok(result),
Poll::Pending => { Poll::Pending => {
if max_ticks == 0 {
return Err(());
}
max_ticks -= 1;
if !self.dispatcher.tick(background_only) { if !self.dispatcher.tick(background_only) {
if awoken.swap(false, SeqCst) { if awoken.swap(false, SeqCst) {
continue; continue;
@ -192,16 +206,25 @@ impl BackgroundExecutor {
return Err(future); return Err(future);
} }
#[cfg(any(test, feature = "test-support"))]
let max_ticks = self
.dispatcher
.as_test()
.map_or(usize::MAX, |dispatcher| dispatcher.gen_block_on_ticks());
#[cfg(not(any(test, feature = "test-support")))]
let max_ticks = usize::MAX;
let mut timer = self.timer(duration).fuse(); let mut timer = self.timer(duration).fuse();
let timeout = async { let timeout = async {
futures::select_biased! { futures::select_biased! {
value = future => Ok(value), value = future => Ok(value),
_ = timer => Err(()), _ = timer => Err(()),
} }
}; };
match self.block(timeout) { match self.block_internal(true, timeout, max_ticks) {
Ok(value) => Ok(value), Ok(Ok(value)) => Ok(value),
Err(_) => Err(future), _ => Err(future),
} }
} }
@ -281,6 +304,11 @@ impl BackgroundExecutor {
pub fn is_main_thread(&self) -> bool { pub fn is_main_thread(&self) -> bool {
self.dispatcher.is_main_thread() self.dispatcher.is_main_thread()
} }
#[cfg(any(test, feature = "test-support"))]
pub fn set_block_on_ticks(&self, range: std::ops::RangeInclusive<usize>) {
self.dispatcher.as_test().unwrap().set_block_on_ticks(range);
}
} }
impl ForegroundExecutor { impl ForegroundExecutor {

View file

@ -21,7 +21,7 @@ mod subscription;
mod svg_renderer; mod svg_renderer;
mod taffy; mod taffy;
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
mod test; pub mod test;
mod text_system; mod text_system;
mod util; mod util;
mod view; mod view;

View file

@ -16,7 +16,7 @@ pub struct DispatchNodeId(usize);
pub(crate) struct DispatchTree { pub(crate) struct DispatchTree {
node_stack: Vec<DispatchNodeId>, node_stack: Vec<DispatchNodeId>,
context_stack: Vec<KeyContext>, pub(crate) context_stack: Vec<KeyContext>,
nodes: Vec<DispatchNode>, nodes: Vec<DispatchNode>,
focusable_node_ids: HashMap<FocusId, DispatchNodeId>, focusable_node_ids: HashMap<FocusId, DispatchNodeId>,
keystroke_matchers: HashMap<SmallVec<[KeyContext; 4]>, KeystrokeMatcher>, keystroke_matchers: HashMap<SmallVec<[KeyContext; 4]>, KeystrokeMatcher>,
@ -163,11 +163,25 @@ impl DispatchTree {
actions actions
} }
pub fn bindings_for_action(&self, action: &dyn Action) -> Vec<KeyBinding> { pub fn bindings_for_action(
&self,
action: &dyn Action,
context_stack: &Vec<KeyContext>,
) -> Vec<KeyBinding> {
self.keymap self.keymap
.lock() .lock()
.bindings_for_action(action.type_id()) .bindings_for_action(action.type_id())
.filter(|candidate| candidate.action.partial_eq(action)) .filter(|candidate| {
if !candidate.action.partial_eq(action) {
return false;
}
for i in 1..context_stack.len() {
if candidate.matches_context(&context_stack[0..=i]) {
return true;
}
}
return false;
})
.cloned() .cloned()
.collect() .collect()
} }

View file

@ -44,7 +44,7 @@ pub(crate) fn current_platform() -> Rc<dyn Platform> {
Rc::new(MacPlatform::new()) Rc::new(MacPlatform::new())
} }
pub(crate) trait Platform: 'static { pub trait Platform: 'static {
fn background_executor(&self) -> BackgroundExecutor; fn background_executor(&self) -> BackgroundExecutor;
fn foreground_executor(&self) -> ForegroundExecutor; fn foreground_executor(&self) -> ForegroundExecutor;
fn text_system(&self) -> Arc<dyn PlatformTextSystem>; fn text_system(&self) -> Arc<dyn PlatformTextSystem>;
@ -128,7 +128,7 @@ impl Debug for DisplayId {
unsafe impl Send for DisplayId {} unsafe impl Send for DisplayId {}
pub(crate) trait PlatformWindow { pub trait PlatformWindow {
fn bounds(&self) -> WindowBounds; fn bounds(&self) -> WindowBounds;
fn content_size(&self) -> Size<Pixels>; fn content_size(&self) -> Size<Pixels>;
fn scale_factor(&self) -> f32; fn scale_factor(&self) -> f32;
@ -160,7 +160,7 @@ pub(crate) trait PlatformWindow {
fn sprite_atlas(&self) -> Arc<dyn PlatformAtlas>; fn sprite_atlas(&self) -> Arc<dyn PlatformAtlas>;
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
fn as_test(&self) -> Option<&TestWindow> { fn as_test(&mut self) -> Option<&mut TestWindow> {
None None
} }
} }

View file

@ -7,6 +7,7 @@ use parking_lot::Mutex;
use rand::prelude::*; use rand::prelude::*;
use std::{ use std::{
future::Future, future::Future,
ops::RangeInclusive,
pin::Pin, pin::Pin,
sync::Arc, sync::Arc,
task::{Context, Poll}, task::{Context, Poll},
@ -36,6 +37,7 @@ struct TestDispatcherState {
allow_parking: bool, allow_parking: bool,
waiting_backtrace: Option<Backtrace>, waiting_backtrace: Option<Backtrace>,
deprioritized_task_labels: HashSet<TaskLabel>, deprioritized_task_labels: HashSet<TaskLabel>,
block_on_ticks: RangeInclusive<usize>,
} }
impl TestDispatcher { impl TestDispatcher {
@ -53,6 +55,7 @@ impl TestDispatcher {
allow_parking: false, allow_parking: false,
waiting_backtrace: None, waiting_backtrace: None,
deprioritized_task_labels: Default::default(), deprioritized_task_labels: Default::default(),
block_on_ticks: 0..=1000,
}; };
TestDispatcher { TestDispatcher {
@ -82,8 +85,8 @@ impl TestDispatcher {
} }
pub fn simulate_random_delay(&self) -> impl 'static + Send + Future<Output = ()> { pub fn simulate_random_delay(&self) -> impl 'static + Send + Future<Output = ()> {
pub struct YieldNow { struct YieldNow {
count: usize, pub(crate) count: usize,
} }
impl Future for YieldNow { impl Future for YieldNow {
@ -142,6 +145,16 @@ impl TestDispatcher {
pub fn rng(&self) -> StdRng { pub fn rng(&self) -> StdRng {
self.state.lock().random.clone() self.state.lock().random.clone()
} }
pub fn set_block_on_ticks(&self, range: std::ops::RangeInclusive<usize>) {
self.state.lock().block_on_ticks = range;
}
pub fn gen_block_on_ticks(&self) -> usize {
let mut lock = self.state.lock();
let block_on_ticks = lock.block_on_ticks.clone();
lock.random.gen_range(block_on_ticks)
}
} }
impl Clone for TestDispatcher { impl Clone for TestDispatcher {

View file

@ -19,7 +19,7 @@ pub(crate) struct TestWindowHandlers {
} }
pub struct TestWindow { pub struct TestWindow {
bounds: WindowBounds, pub(crate) bounds: WindowBounds,
current_scene: Mutex<Option<Scene>>, current_scene: Mutex<Option<Scene>>,
display: Rc<dyn PlatformDisplay>, display: Rc<dyn PlatformDisplay>,
pub(crate) window_title: Option<String>, pub(crate) window_title: Option<String>,
@ -170,7 +170,7 @@ impl PlatformWindow for TestWindow {
self.sprite_atlas.clone() self.sprite_atlas.clone()
} }
fn as_test(&self) -> Option<&TestWindow> { fn as_test(&mut self) -> Option<&mut TestWindow> {
Some(self) Some(self)
} }
} }

View file

@ -198,7 +198,7 @@ impl SceneBuilder {
} }
} }
pub(crate) struct Scene { pub struct Scene {
pub shadows: Vec<Shadow>, pub shadows: Vec<Shadow>,
pub quads: Vec<Quad>, pub quads: Vec<Quad>,
pub paths: Vec<Path<ScaledPixels>>, pub paths: Vec<Path<ScaledPixels>>,
@ -214,7 +214,7 @@ impl Scene {
&self.paths &self.paths
} }
pub fn batches(&self) -> impl Iterator<Item = PrimitiveBatch> { pub(crate) fn batches(&self) -> impl Iterator<Item = PrimitiveBatch> {
BatchIterator { BatchIterator {
shadows: &self.shadows, shadows: &self.shadows,
shadows_start: 0, shadows_start: 0,

View file

@ -208,8 +208,9 @@ impl TextStyle {
} }
} }
/// Returns the rounded line height in pixels.
pub fn line_height_in_pixels(&self, rem_size: Pixels) -> Pixels { pub fn line_height_in_pixels(&self, rem_size: Pixels) -> Pixels {
self.line_height.to_pixels(self.font_size, rem_size) self.line_height.to_pixels(self.font_size, rem_size).round()
} }
pub fn to_run(&self, len: usize) -> TextRun { pub fn to_run(&self, len: usize) -> TextRun {

View file

@ -1,6 +1,6 @@
use collections::{BTreeMap, BTreeSet}; use collections::{BTreeMap, BTreeSet};
use parking_lot::Mutex; use parking_lot::Mutex;
use std::{fmt::Debug, mem, sync::Arc}; use std::{cell::Cell, fmt::Debug, mem, rc::Rc, sync::Arc};
use util::post_inc; use util::post_inc;
pub(crate) struct SubscriberSet<EmitterKey, Callback>( pub(crate) struct SubscriberSet<EmitterKey, Callback>(
@ -14,11 +14,16 @@ impl<EmitterKey, Callback> Clone for SubscriberSet<EmitterKey, Callback> {
} }
struct SubscriberSetState<EmitterKey, Callback> { struct SubscriberSetState<EmitterKey, Callback> {
subscribers: BTreeMap<EmitterKey, Option<BTreeMap<usize, Callback>>>, subscribers: BTreeMap<EmitterKey, Option<BTreeMap<usize, Subscriber<Callback>>>>,
dropped_subscribers: BTreeSet<(EmitterKey, usize)>, dropped_subscribers: BTreeSet<(EmitterKey, usize)>,
next_subscriber_id: usize, next_subscriber_id: usize,
} }
struct Subscriber<Callback> {
active: Rc<Cell<bool>>,
callback: Callback,
}
impl<EmitterKey, Callback> SubscriberSet<EmitterKey, Callback> impl<EmitterKey, Callback> SubscriberSet<EmitterKey, Callback>
where where
EmitterKey: 'static + Ord + Clone + Debug, EmitterKey: 'static + Ord + Clone + Debug,
@ -32,16 +37,33 @@ where
}))) })))
} }
pub fn insert(&self, emitter_key: EmitterKey, callback: Callback) -> Subscription { /// Inserts a new `[Subscription]` for the given `emitter_key`. By default, subscriptions
/// are inert, meaning that they won't be listed when calling `[SubscriberSet::remove]` or `[SubscriberSet::retain]`.
/// This method returns a tuple of a `[Subscription]` and an `impl FnOnce`, and you can use the latter
/// to activate the `[Subscription]`.
#[must_use]
pub fn insert(
&self,
emitter_key: EmitterKey,
callback: Callback,
) -> (Subscription, impl FnOnce()) {
let active = Rc::new(Cell::new(false));
let mut lock = self.0.lock(); let mut lock = self.0.lock();
let subscriber_id = post_inc(&mut lock.next_subscriber_id); let subscriber_id = post_inc(&mut lock.next_subscriber_id);
lock.subscribers lock.subscribers
.entry(emitter_key.clone()) .entry(emitter_key.clone())
.or_default() .or_default()
.get_or_insert_with(|| Default::default()) .get_or_insert_with(|| Default::default())
.insert(subscriber_id, callback); .insert(
subscriber_id,
Subscriber {
active: active.clone(),
callback,
},
);
let this = self.0.clone(); let this = self.0.clone();
Subscription {
let subscription = Subscription {
unsubscribe: Some(Box::new(move || { unsubscribe: Some(Box::new(move || {
let mut lock = this.lock(); let mut lock = this.lock();
let Some(subscribers) = lock.subscribers.get_mut(&emitter_key) else { let Some(subscribers) = lock.subscribers.get_mut(&emitter_key) else {
@ -63,7 +85,8 @@ where
lock.dropped_subscribers lock.dropped_subscribers
.insert((emitter_key, subscriber_id)); .insert((emitter_key, subscriber_id));
})), })),
} };
(subscription, move || active.set(true))
} }
pub fn remove(&self, emitter: &EmitterKey) -> impl IntoIterator<Item = Callback> { pub fn remove(&self, emitter: &EmitterKey) -> impl IntoIterator<Item = Callback> {
@ -73,6 +96,13 @@ where
.map(|s| s.into_values()) .map(|s| s.into_values())
.into_iter() .into_iter()
.flatten() .flatten()
.filter_map(|subscriber| {
if subscriber.active.get() {
Some(subscriber.callback)
} else {
None
}
})
} }
/// Call the given callback for each subscriber to the given emitter. /// Call the given callback for each subscriber to the given emitter.
@ -91,7 +121,13 @@ where
return; return;
}; };
subscribers.retain(|_, callback| f(callback)); subscribers.retain(|_, subscriber| {
if subscriber.active.get() {
f(&mut subscriber.callback)
} else {
true
}
});
let mut lock = self.0.lock(); let mut lock = self.0.lock();
// Add any new subscribers that were added while invoking the callback. // Add any new subscribers that were added while invoking the callback.

View file

@ -1,5 +1,7 @@
use crate::TestDispatcher; use crate::{Entity, Subscription, TestAppContext, TestDispatcher};
use futures::StreamExt as _;
use rand::prelude::*; use rand::prelude::*;
use smol::channel;
use std::{ use std::{
env, env,
panic::{self, RefUnwindSafe}, panic::{self, RefUnwindSafe},
@ -49,3 +51,30 @@ pub fn run_test(
} }
} }
} }
pub struct Observation<T> {
rx: channel::Receiver<T>,
_subscription: Subscription,
}
impl<T: 'static> futures::Stream for Observation<T> {
type Item = T;
fn poll_next(
mut self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Option<Self::Item>> {
self.rx.poll_next_unpin(cx)
}
}
pub fn observe<T: 'static>(entity: &impl Entity<T>, cx: &mut TestAppContext) -> Observation<()> {
let (tx, rx) = smol::channel::unbounded();
let _subscription = cx.update(|cx| {
cx.observe(entity, move |_, _| {
let _ = smol::block_on(tx.send(()));
})
});
Observation { rx, _subscription }
}

View file

@ -72,7 +72,7 @@ impl TextSystem {
} }
} }
pub fn bounding_box(&self, font_id: FontId, font_size: Pixels) -> Result<Bounds<Pixels>> { pub fn bounding_box(&self, font_id: FontId, font_size: Pixels) -> Bounds<Pixels> {
self.read_metrics(font_id, |metrics| metrics.bounding_box(font_size)) self.read_metrics(font_id, |metrics| metrics.bounding_box(font_size))
} }
@ -89,9 +89,9 @@ impl TextSystem {
let bounds = self let bounds = self
.platform_text_system .platform_text_system
.typographic_bounds(font_id, glyph_id)?; .typographic_bounds(font_id, glyph_id)?;
self.read_metrics(font_id, |metrics| { Ok(self.read_metrics(font_id, |metrics| {
(bounds / metrics.units_per_em as f32 * font_size.0).map(px) (bounds / metrics.units_per_em as f32 * font_size.0).map(px)
}) }))
} }
pub fn advance(&self, font_id: FontId, font_size: Pixels, ch: char) -> Result<Size<Pixels>> { pub fn advance(&self, font_id: FontId, font_size: Pixels, ch: char) -> Result<Size<Pixels>> {
@ -100,28 +100,28 @@ impl TextSystem {
.glyph_for_char(font_id, ch) .glyph_for_char(font_id, ch)
.ok_or_else(|| anyhow!("glyph not found for character '{}'", ch))?; .ok_or_else(|| anyhow!("glyph not found for character '{}'", ch))?;
let result = self.platform_text_system.advance(font_id, glyph_id)? let result = self.platform_text_system.advance(font_id, glyph_id)?
/ self.units_per_em(font_id)? as f32; / self.units_per_em(font_id) as f32;
Ok(result * font_size) Ok(result * font_size)
} }
pub fn units_per_em(&self, font_id: FontId) -> Result<u32> { pub fn units_per_em(&self, font_id: FontId) -> u32 {
self.read_metrics(font_id, |metrics| metrics.units_per_em as u32) self.read_metrics(font_id, |metrics| metrics.units_per_em as u32)
} }
pub fn cap_height(&self, font_id: FontId, font_size: Pixels) -> Result<Pixels> { pub fn cap_height(&self, font_id: FontId, font_size: Pixels) -> Pixels {
self.read_metrics(font_id, |metrics| metrics.cap_height(font_size)) self.read_metrics(font_id, |metrics| metrics.cap_height(font_size))
} }
pub fn x_height(&self, font_id: FontId, font_size: Pixels) -> Result<Pixels> { pub fn x_height(&self, font_id: FontId, font_size: Pixels) -> Pixels {
self.read_metrics(font_id, |metrics| metrics.x_height(font_size)) self.read_metrics(font_id, |metrics| metrics.x_height(font_size))
} }
pub fn ascent(&self, font_id: FontId, font_size: Pixels) -> Result<Pixels> { pub fn ascent(&self, font_id: FontId, font_size: Pixels) -> Pixels {
self.read_metrics(font_id, |metrics| metrics.ascent(font_size)) self.read_metrics(font_id, |metrics| metrics.ascent(font_size))
} }
pub fn descent(&self, font_id: FontId, font_size: Pixels) -> Result<Pixels> { pub fn descent(&self, font_id: FontId, font_size: Pixels) -> Pixels {
self.read_metrics(font_id, |metrics| metrics.descent(font_size)) self.read_metrics(font_id, |metrics| metrics.descent(font_size))
} }
@ -130,24 +130,24 @@ impl TextSystem {
font_id: FontId, font_id: FontId,
font_size: Pixels, font_size: Pixels,
line_height: Pixels, line_height: Pixels,
) -> Result<Pixels> { ) -> Pixels {
let ascent = self.ascent(font_id, font_size)?; let ascent = self.ascent(font_id, font_size);
let descent = self.descent(font_id, font_size)?; let descent = self.descent(font_id, font_size);
let padding_top = (line_height - ascent - descent) / 2.; let padding_top = (line_height - ascent - descent) / 2.;
Ok(padding_top + ascent) padding_top + ascent
} }
fn read_metrics<T>(&self, font_id: FontId, read: impl FnOnce(&FontMetrics) -> T) -> Result<T> { fn read_metrics<T>(&self, font_id: FontId, read: impl FnOnce(&FontMetrics) -> T) -> T {
let lock = self.font_metrics.upgradable_read(); let lock = self.font_metrics.upgradable_read();
if let Some(metrics) = lock.get(&font_id) { if let Some(metrics) = lock.get(&font_id) {
Ok(read(metrics)) read(metrics)
} else { } else {
let mut lock = RwLockUpgradableReadGuard::upgrade(lock); let mut lock = RwLockUpgradableReadGuard::upgrade(lock);
let metrics = lock let metrics = lock
.entry(font_id) .entry(font_id)
.or_insert_with(|| self.platform_text_system.font_metrics(font_id)); .or_insert_with(|| self.platform_text_system.font_metrics(font_id));
Ok(read(metrics)) read(metrics)
} }
} }

View file

@ -101,9 +101,7 @@ fn paint_line(
let mut glyph_origin = origin; let mut glyph_origin = origin;
let mut prev_glyph_position = Point::default(); let mut prev_glyph_position = Point::default();
for (run_ix, run) in layout.runs.iter().enumerate() { for (run_ix, run) in layout.runs.iter().enumerate() {
let max_glyph_size = text_system let max_glyph_size = text_system.bounding_box(run.font_id, layout.font_size).size;
.bounding_box(run.font_id, layout.font_size)?
.size;
for (glyph_ix, glyph) in run.glyphs.iter().enumerate() { for (glyph_ix, glyph) in run.glyphs.iter().enumerate() {
glyph_origin.x += glyph.position.x - prev_glyph_position.x; glyph_origin.x += glyph.position.x - prev_glyph_position.x;

View file

@ -490,7 +490,7 @@ impl<'a> WindowContext<'a> {
let entity_id = entity.entity_id(); let entity_id = entity.entity_id();
let entity = entity.downgrade(); let entity = entity.downgrade();
let window_handle = self.window.handle; let window_handle = self.window.handle;
self.app.event_listeners.insert( let (subscription, activate) = self.app.event_listeners.insert(
entity_id, entity_id,
( (
TypeId::of::<Evt>(), TypeId::of::<Evt>(),
@ -508,7 +508,9 @@ impl<'a> WindowContext<'a> {
.unwrap_or(false) .unwrap_or(false)
}), }),
), ),
) );
self.app.defer(move |_| activate());
subscription
} }
/// Create an `AsyncWindowContext`, which has a static lifetime and can be held across /// Create an `AsyncWindowContext`, which has a static lifetime and can be held across
@ -1348,6 +1350,8 @@ impl<'a> WindowContext<'a> {
.dispatch_tree .dispatch_tree
.dispatch_path(node_id); .dispatch_path(node_id);
let mut actions: Vec<Box<dyn Action>> = Vec::new();
// Capture phase // Capture phase
let mut context_stack: SmallVec<[KeyContext; 16]> = SmallVec::new(); let mut context_stack: SmallVec<[KeyContext; 16]> = SmallVec::new();
self.propagate_event = true; self.propagate_event = true;
@ -1382,22 +1386,26 @@ impl<'a> WindowContext<'a> {
let node = self.window.current_frame.dispatch_tree.node(*node_id); let node = self.window.current_frame.dispatch_tree.node(*node_id);
if !node.context.is_empty() { if !node.context.is_empty() {
if let Some(key_down_event) = event.downcast_ref::<KeyDownEvent>() { if let Some(key_down_event) = event.downcast_ref::<KeyDownEvent>() {
if let Some(action) = self if let Some(found) = self
.window .window
.current_frame .current_frame
.dispatch_tree .dispatch_tree
.dispatch_key(&key_down_event.keystroke, &context_stack) .dispatch_key(&key_down_event.keystroke, &context_stack)
{ {
self.dispatch_action_on_node(*node_id, action); actions.push(found.boxed_clone())
if !self.propagate_event {
return;
}
} }
} }
context_stack.pop(); context_stack.pop();
} }
} }
for action in actions {
self.dispatch_action_on_node(node_id, action);
if !self.propagate_event {
return;
}
}
} }
} }
@ -1425,7 +1433,6 @@ impl<'a> WindowContext<'a> {
} }
} }
} }
// Bubble phase // Bubble phase
for node_id in dispatch_path.iter().rev() { for node_id in dispatch_path.iter().rev() {
let node = self.window.current_frame.dispatch_tree.node(*node_id); let node = self.window.current_frame.dispatch_tree.node(*node_id);
@ -1453,10 +1460,12 @@ impl<'a> WindowContext<'a> {
f: impl Fn(&mut WindowContext<'_>) + 'static, f: impl Fn(&mut WindowContext<'_>) + 'static,
) -> Subscription { ) -> Subscription {
let window_handle = self.window.handle; let window_handle = self.window.handle;
self.global_observers.insert( let (subscription, activate) = self.global_observers.insert(
TypeId::of::<G>(), TypeId::of::<G>(),
Box::new(move |cx| window_handle.update(cx, |_, cx| f(cx)).is_ok()), Box::new(move |cx| window_handle.update(cx, |_, cx| f(cx)).is_ok()),
) );
self.app.defer(move |_| activate());
subscription
} }
pub fn activate_window(&self) { pub fn activate_window(&self) {
@ -1493,9 +1502,30 @@ impl<'a> WindowContext<'a> {
pub fn bindings_for_action(&self, action: &dyn Action) -> Vec<KeyBinding> { pub fn bindings_for_action(&self, action: &dyn Action) -> Vec<KeyBinding> {
self.window self.window
.current_frame .previous_frame
.dispatch_tree .dispatch_tree
.bindings_for_action(action) .bindings_for_action(
action,
&self.window.previous_frame.dispatch_tree.context_stack,
)
}
pub fn bindings_for_action_in(
&self,
action: &dyn Action,
focus_handle: &FocusHandle,
) -> Vec<KeyBinding> {
let dispatch_tree = &self.window.previous_frame.dispatch_tree;
let Some(node_id) = dispatch_tree.focusable_node_id(focus_handle.id) else {
return vec![];
};
let context_stack = dispatch_tree
.dispatch_path(node_id)
.into_iter()
.map(|node_id| dispatch_tree.node(node_id).context.clone())
.collect();
dispatch_tree.bindings_for_action(action, &context_stack)
} }
pub fn listener_for<V: Render, E>( pub fn listener_for<V: Render, E>(
@ -2096,7 +2126,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
let entity_id = entity.entity_id(); let entity_id = entity.entity_id();
let entity = entity.downgrade(); let entity = entity.downgrade();
let window_handle = self.window.handle; let window_handle = self.window.handle;
self.app.observers.insert( let (subscription, activate) = self.app.observers.insert(
entity_id, entity_id,
Box::new(move |cx| { Box::new(move |cx| {
window_handle window_handle
@ -2110,7 +2140,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
}) })
.unwrap_or(false) .unwrap_or(false)
}), }),
) );
self.app.defer(move |_| activate());
subscription
} }
pub fn subscribe<V2, E, Evt>( pub fn subscribe<V2, E, Evt>(
@ -2127,7 +2159,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
let entity_id = entity.entity_id(); let entity_id = entity.entity_id();
let handle = entity.downgrade(); let handle = entity.downgrade();
let window_handle = self.window.handle; let window_handle = self.window.handle;
self.app.event_listeners.insert( let (subscription, activate) = self.app.event_listeners.insert(
entity_id, entity_id,
( (
TypeId::of::<Evt>(), TypeId::of::<Evt>(),
@ -2145,7 +2177,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
.unwrap_or(false) .unwrap_or(false)
}), }),
), ),
) );
self.app.defer(move |_| activate());
subscription
} }
pub fn on_release( pub fn on_release(
@ -2153,13 +2187,15 @@ impl<'a, V: 'static> ViewContext<'a, V> {
on_release: impl FnOnce(&mut V, &mut WindowContext) + 'static, on_release: impl FnOnce(&mut V, &mut WindowContext) + 'static,
) -> Subscription { ) -> Subscription {
let window_handle = self.window.handle; let window_handle = self.window.handle;
self.app.release_listeners.insert( let (subscription, activate) = self.app.release_listeners.insert(
self.view.model.entity_id, self.view.model.entity_id,
Box::new(move |this, cx| { Box::new(move |this, cx| {
let this = this.downcast_mut().expect("invalid entity type"); let this = this.downcast_mut().expect("invalid entity type");
let _ = window_handle.update(cx, |_, cx| on_release(this, cx)); let _ = window_handle.update(cx, |_, cx| on_release(this, cx));
}), }),
) );
activate();
subscription
} }
pub fn observe_release<V2, E>( pub fn observe_release<V2, E>(
@ -2175,7 +2211,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
let view = self.view().downgrade(); let view = self.view().downgrade();
let entity_id = entity.entity_id(); let entity_id = entity.entity_id();
let window_handle = self.window.handle; let window_handle = self.window.handle;
self.app.release_listeners.insert( let (subscription, activate) = self.app.release_listeners.insert(
entity_id, entity_id,
Box::new(move |entity, cx| { Box::new(move |entity, cx| {
let entity = entity.downcast_mut().expect("invalid entity type"); let entity = entity.downcast_mut().expect("invalid entity type");
@ -2183,7 +2219,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
view.update(cx, |this, cx| on_release(this, entity, cx)) view.update(cx, |this, cx| on_release(this, entity, cx))
}); });
}), }),
) );
activate();
subscription
} }
pub fn notify(&mut self) { pub fn notify(&mut self) {
@ -2198,10 +2236,12 @@ impl<'a, V: 'static> ViewContext<'a, V> {
mut callback: impl FnMut(&mut V, &mut ViewContext<V>) + 'static, mut callback: impl FnMut(&mut V, &mut ViewContext<V>) + 'static,
) -> Subscription { ) -> Subscription {
let view = self.view.downgrade(); let view = self.view.downgrade();
self.window.bounds_observers.insert( let (subscription, activate) = self.window.bounds_observers.insert(
(), (),
Box::new(move |cx| view.update(cx, |view, cx| callback(view, cx)).is_ok()), Box::new(move |cx| view.update(cx, |view, cx| callback(view, cx)).is_ok()),
) );
activate();
subscription
} }
pub fn observe_window_activation( pub fn observe_window_activation(
@ -2209,10 +2249,12 @@ impl<'a, V: 'static> ViewContext<'a, V> {
mut callback: impl FnMut(&mut V, &mut ViewContext<V>) + 'static, mut callback: impl FnMut(&mut V, &mut ViewContext<V>) + 'static,
) -> Subscription { ) -> Subscription {
let view = self.view.downgrade(); let view = self.view.downgrade();
self.window.activation_observers.insert( let (subscription, activate) = self.window.activation_observers.insert(
(), (),
Box::new(move |cx| view.update(cx, |view, cx| callback(view, cx)).is_ok()), Box::new(move |cx| view.update(cx, |view, cx| callback(view, cx)).is_ok()),
) );
activate();
subscription
} }
/// Register a listener to be called when the given focus handle receives focus. /// Register a listener to be called when the given focus handle receives focus.
@ -2225,7 +2267,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
) -> Subscription { ) -> Subscription {
let view = self.view.downgrade(); let view = self.view.downgrade();
let focus_id = handle.id; let focus_id = handle.id;
self.window.focus_listeners.insert( let (subscription, activate) = self.window.focus_listeners.insert(
(), (),
Box::new(move |event, cx| { Box::new(move |event, cx| {
view.update(cx, |view, cx| { view.update(cx, |view, cx| {
@ -2235,7 +2277,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
}) })
.is_ok() .is_ok()
}), }),
) );
self.app.defer(move |_| activate());
subscription
} }
/// Register a listener to be called when the given focus handle or one of its descendants receives focus. /// Register a listener to be called when the given focus handle or one of its descendants receives focus.
@ -2248,7 +2292,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
) -> Subscription { ) -> Subscription {
let view = self.view.downgrade(); let view = self.view.downgrade();
let focus_id = handle.id; let focus_id = handle.id;
self.window.focus_listeners.insert( let (subscription, activate) = self.window.focus_listeners.insert(
(), (),
Box::new(move |event, cx| { Box::new(move |event, cx| {
view.update(cx, |view, cx| { view.update(cx, |view, cx| {
@ -2262,7 +2306,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
}) })
.is_ok() .is_ok()
}), }),
) );
self.app.defer(move |_| activate());
subscription
} }
/// Register a listener to be called when the given focus handle loses focus. /// Register a listener to be called when the given focus handle loses focus.
@ -2275,7 +2321,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
) -> Subscription { ) -> Subscription {
let view = self.view.downgrade(); let view = self.view.downgrade();
let focus_id = handle.id; let focus_id = handle.id;
self.window.focus_listeners.insert( let (subscription, activate) = self.window.focus_listeners.insert(
(), (),
Box::new(move |event, cx| { Box::new(move |event, cx| {
view.update(cx, |view, cx| { view.update(cx, |view, cx| {
@ -2285,7 +2331,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
}) })
.is_ok() .is_ok()
}), }),
) );
self.app.defer(move |_| activate());
subscription
} }
/// Register a listener to be called when the given focus handle or one of its descendants loses focus. /// Register a listener to be called when the given focus handle or one of its descendants loses focus.
@ -2298,7 +2346,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
) -> Subscription { ) -> Subscription {
let view = self.view.downgrade(); let view = self.view.downgrade();
let focus_id = handle.id; let focus_id = handle.id;
self.window.focus_listeners.insert( let (subscription, activate) = self.window.focus_listeners.insert(
(), (),
Box::new(move |event, cx| { Box::new(move |event, cx| {
view.update(cx, |view, cx| { view.update(cx, |view, cx| {
@ -2312,7 +2360,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
}) })
.is_ok() .is_ok()
}), }),
) );
self.app.defer(move |_| activate());
subscription
} }
pub fn spawn<Fut, R>( pub fn spawn<Fut, R>(
@ -2343,14 +2393,16 @@ impl<'a, V: 'static> ViewContext<'a, V> {
) -> Subscription { ) -> Subscription {
let window_handle = self.window.handle; let window_handle = self.window.handle;
let view = self.view().downgrade(); let view = self.view().downgrade();
self.global_observers.insert( let (subscription, activate) = self.global_observers.insert(
TypeId::of::<G>(), TypeId::of::<G>(),
Box::new(move |cx| { Box::new(move |cx| {
window_handle window_handle
.update(cx, |_, cx| view.update(cx, |view, cx| f(view, cx)).is_ok()) .update(cx, |_, cx| view.update(cx, |view, cx| f(view, cx)).is_ok())
.unwrap_or(false) .unwrap_or(false)
}), }),
) );
self.app.defer(move |_| activate());
subscription
} }
pub fn on_mouse_event<Event: 'static>( pub fn on_mouse_event<Event: 'static>(
@ -2708,6 +2760,7 @@ pub enum ElementId {
Integer(usize), Integer(usize),
Name(SharedString), Name(SharedString),
FocusHandle(FocusId), FocusHandle(FocusId),
NamedInteger(SharedString, usize),
} }
impl ElementId { impl ElementId {
@ -2757,3 +2810,9 @@ impl<'a> From<&'a FocusHandle> for ElementId {
ElementId::FocusHandle(handle.id) ElementId::FocusHandle(handle.id)
} }
} }
impl From<(&'static str, EntityId)> for ElementId {
fn from((name, id): (&'static str, EntityId)) -> Self {
ElementId::NamedInteger(name.into(), id.as_u64() as usize)
}
}

View file

@ -1121,20 +1121,22 @@ impl Project {
project_path: impl Into<ProjectPath>, project_path: impl Into<ProjectPath>,
is_directory: bool, is_directory: bool,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Option<Task<Result<Entry>>> { ) -> Task<Result<Option<Entry>>> {
let project_path = project_path.into(); let project_path = project_path.into();
let worktree = self.worktree_for_id(project_path.worktree_id, cx)?; let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
return Task::ready(Ok(None));
};
if self.is_local() { if self.is_local() {
Some(worktree.update(cx, |worktree, cx| { worktree.update(cx, |worktree, cx| {
worktree worktree
.as_local_mut() .as_local_mut()
.unwrap() .unwrap()
.create_entry(project_path.path, is_directory, cx) .create_entry(project_path.path, is_directory, cx)
})) })
} else { } else {
let client = self.client.clone(); let client = self.client.clone();
let project_id = self.remote_id().unwrap(); let project_id = self.remote_id().unwrap();
Some(cx.spawn_weak(|_, mut cx| async move { cx.spawn_weak(|_, mut cx| async move {
let response = client let response = client
.request(proto::CreateProjectEntry { .request(proto::CreateProjectEntry {
worktree_id: project_path.worktree_id.to_proto(), worktree_id: project_path.worktree_id.to_proto(),
@ -1143,19 +1145,20 @@ impl Project {
is_directory, is_directory,
}) })
.await?; .await?;
let entry = response match response.entry {
.entry Some(entry) => worktree
.ok_or_else(|| anyhow!("missing entry in response"))?; .update(&mut cx, |worktree, cx| {
worktree worktree.as_remote_mut().unwrap().insert_entry(
.update(&mut cx, |worktree, cx| { entry,
worktree.as_remote_mut().unwrap().insert_entry( response.worktree_scan_id as usize,
entry, cx,
response.worktree_scan_id as usize, )
cx, })
) .await
}) .map(Some),
.await None => Ok(None),
})) }
})
} }
} }
@ -1164,8 +1167,10 @@ impl Project {
entry_id: ProjectEntryId, entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>, new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Option<Task<Result<Entry>>> { ) -> Task<Result<Option<Entry>>> {
let worktree = self.worktree_for_entry(entry_id, cx)?; let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
return Task::ready(Ok(None));
};
let new_path = new_path.into(); let new_path = new_path.into();
if self.is_local() { if self.is_local() {
worktree.update(cx, |worktree, cx| { worktree.update(cx, |worktree, cx| {
@ -1178,7 +1183,7 @@ impl Project {
let client = self.client.clone(); let client = self.client.clone();
let project_id = self.remote_id().unwrap(); let project_id = self.remote_id().unwrap();
Some(cx.spawn_weak(|_, mut cx| async move { cx.spawn_weak(|_, mut cx| async move {
let response = client let response = client
.request(proto::CopyProjectEntry { .request(proto::CopyProjectEntry {
project_id, project_id,
@ -1186,19 +1191,20 @@ impl Project {
new_path: new_path.to_string_lossy().into(), new_path: new_path.to_string_lossy().into(),
}) })
.await?; .await?;
let entry = response match response.entry {
.entry Some(entry) => worktree
.ok_or_else(|| anyhow!("missing entry in response"))?; .update(&mut cx, |worktree, cx| {
worktree worktree.as_remote_mut().unwrap().insert_entry(
.update(&mut cx, |worktree, cx| { entry,
worktree.as_remote_mut().unwrap().insert_entry( response.worktree_scan_id as usize,
entry, cx,
response.worktree_scan_id as usize, )
cx, })
) .await
}) .map(Some),
.await None => Ok(None),
})) }
})
} }
} }
@ -1207,8 +1213,10 @@ impl Project {
entry_id: ProjectEntryId, entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>, new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Option<Task<Result<Entry>>> { ) -> Task<Result<Option<Entry>>> {
let worktree = self.worktree_for_entry(entry_id, cx)?; let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
return Task::ready(Ok(None));
};
let new_path = new_path.into(); let new_path = new_path.into();
if self.is_local() { if self.is_local() {
worktree.update(cx, |worktree, cx| { worktree.update(cx, |worktree, cx| {
@ -1221,7 +1229,7 @@ impl Project {
let client = self.client.clone(); let client = self.client.clone();
let project_id = self.remote_id().unwrap(); let project_id = self.remote_id().unwrap();
Some(cx.spawn_weak(|_, mut cx| async move { cx.spawn_weak(|_, mut cx| async move {
let response = client let response = client
.request(proto::RenameProjectEntry { .request(proto::RenameProjectEntry {
project_id, project_id,
@ -1229,19 +1237,20 @@ impl Project {
new_path: new_path.to_string_lossy().into(), new_path: new_path.to_string_lossy().into(),
}) })
.await?; .await?;
let entry = response match response.entry {
.entry Some(entry) => worktree
.ok_or_else(|| anyhow!("missing entry in response"))?; .update(&mut cx, |worktree, cx| {
worktree worktree.as_remote_mut().unwrap().insert_entry(
.update(&mut cx, |worktree, cx| { entry,
worktree.as_remote_mut().unwrap().insert_entry( response.worktree_scan_id as usize,
entry, cx,
response.worktree_scan_id as usize, )
cx, })
) .await
}) .map(Some),
.await None => Ok(None),
})) }
})
} }
} }
@ -1658,19 +1667,15 @@ impl Project {
pub fn open_path( pub fn open_path(
&mut self, &mut self,
path: impl Into<ProjectPath>, path: ProjectPath,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> { ) -> Task<Result<(Option<ProjectEntryId>, AnyModelHandle)>> {
let project_path = path.into(); let task = self.open_buffer(path.clone(), cx);
let task = self.open_buffer(project_path.clone(), cx);
cx.spawn_weak(|_, cx| async move { cx.spawn_weak(|_, cx| async move {
let buffer = task.await?; let buffer = task.await?;
let project_entry_id = buffer let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
.read_with(&cx, |buffer, cx| { File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx)) });
})
.with_context(|| format!("no project entry for {project_path:?}"))?;
let buffer: &AnyModelHandle = &buffer; let buffer: &AnyModelHandle = &buffer;
Ok((project_entry_id, buffer.clone())) Ok((project_entry_id, buffer.clone()))
}) })
@ -1985,8 +1990,10 @@ impl Project {
remote_id, remote_id,
); );
self.local_buffer_ids_by_entry_id if let Some(entry_id) = file.entry_id {
.insert(file.entry_id, remote_id); self.local_buffer_ids_by_entry_id
.insert(entry_id, remote_id);
}
} }
} }
@ -2441,24 +2448,25 @@ impl Project {
return None; return None;
}; };
match self.local_buffer_ids_by_entry_id.get(&file.entry_id) { let remote_id = buffer.read(cx).remote_id();
Some(_) => { if let Some(entry_id) = file.entry_id {
return None; match self.local_buffer_ids_by_entry_id.get(&entry_id) {
Some(_) => {
return None;
}
None => {
self.local_buffer_ids_by_entry_id
.insert(entry_id, remote_id);
}
} }
None => { };
let remote_id = buffer.read(cx).remote_id(); self.local_buffer_ids_by_path.insert(
self.local_buffer_ids_by_entry_id ProjectPath {
.insert(file.entry_id, remote_id); worktree_id: file.worktree_id(cx),
path: file.path.clone(),
self.local_buffer_ids_by_path.insert( },
ProjectPath { remote_id,
worktree_id: file.worktree_id(cx), );
path: file.path.clone(),
},
remote_id,
);
}
}
} }
_ => {} _ => {}
} }
@ -5776,11 +5784,6 @@ impl Project {
while let Some(ignored_abs_path) = while let Some(ignored_abs_path) =
ignored_paths_to_process.pop_front() ignored_paths_to_process.pop_front()
{ {
if !query.file_matches(Some(&ignored_abs_path))
|| snapshot.is_path_excluded(&ignored_abs_path)
{
continue;
}
if let Some(fs_metadata) = fs if let Some(fs_metadata) = fs
.metadata(&ignored_abs_path) .metadata(&ignored_abs_path)
.await .await
@ -5808,6 +5811,13 @@ impl Project {
} }
} }
} else if !fs_metadata.is_symlink { } else if !fs_metadata.is_symlink {
if !query.file_matches(Some(&ignored_abs_path))
|| snapshot.is_path_excluded(
ignored_entry.path.to_path_buf(),
)
{
continue;
}
let matches = if let Some(file) = fs let matches = if let Some(file) = fs
.open_sync(&ignored_abs_path) .open_sync(&ignored_abs_path)
.await .await
@ -6208,10 +6218,13 @@ impl Project {
return; return;
} }
let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) { let new_file = if let Some(entry) = old_file
.entry_id
.and_then(|entry_id| snapshot.entry_for_id(entry_id))
{
File { File {
is_local: true, is_local: true,
entry_id: entry.id, entry_id: Some(entry.id),
mtime: entry.mtime, mtime: entry.mtime,
path: entry.path.clone(), path: entry.path.clone(),
worktree: worktree_handle.clone(), worktree: worktree_handle.clone(),
@ -6220,7 +6233,7 @@ impl Project {
} else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) { } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
File { File {
is_local: true, is_local: true,
entry_id: entry.id, entry_id: Some(entry.id),
mtime: entry.mtime, mtime: entry.mtime,
path: entry.path.clone(), path: entry.path.clone(),
worktree: worktree_handle.clone(), worktree: worktree_handle.clone(),
@ -6250,10 +6263,12 @@ impl Project {
); );
} }
if new_file.entry_id != *entry_id { if new_file.entry_id != Some(*entry_id) {
self.local_buffer_ids_by_entry_id.remove(entry_id); self.local_buffer_ids_by_entry_id.remove(entry_id);
self.local_buffer_ids_by_entry_id if let Some(entry_id) = new_file.entry_id {
.insert(new_file.entry_id, buffer_id); self.local_buffer_ids_by_entry_id
.insert(entry_id, buffer_id);
}
} }
if new_file != *old_file { if new_file != *old_file {
@ -6816,7 +6831,7 @@ impl Project {
}) })
.await?; .await?;
Ok(proto::ProjectEntryResponse { Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()), entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64, worktree_scan_id: worktree_scan_id as u64,
}) })
} }
@ -6840,11 +6855,10 @@ impl Project {
.as_local_mut() .as_local_mut()
.unwrap() .unwrap()
.rename_entry(entry_id, new_path, cx) .rename_entry(entry_id, new_path, cx)
.ok_or_else(|| anyhow!("invalid entry")) })
})?
.await?; .await?;
Ok(proto::ProjectEntryResponse { Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()), entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64, worktree_scan_id: worktree_scan_id as u64,
}) })
} }
@ -6868,11 +6882,10 @@ impl Project {
.as_local_mut() .as_local_mut()
.unwrap() .unwrap()
.copy_entry(entry_id, new_path, cx) .copy_entry(entry_id, new_path, cx)
.ok_or_else(|| anyhow!("invalid entry")) })
})?
.await?; .await?;
Ok(proto::ProjectEntryResponse { Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()), entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64, worktree_scan_id: worktree_scan_id as u64,
}) })
} }

View file

@ -4050,6 +4050,94 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
); );
} }
#[gpui::test]
async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/dir",
json!({
".git": {},
".gitignore": "**/target\n/node_modules\n",
"target": {
"index.txt": "index_key:index_value"
},
"node_modules": {
"eslint": {
"index.ts": "const eslint_key = 'eslint value'",
"package.json": r#"{ "some_key": "some value" }"#,
},
"prettier": {
"index.ts": "const prettier_key = 'prettier value'",
"package.json": r#"{ "other_key": "other value" }"#,
},
},
"package.json": r#"{ "main_key": "main value" }"#,
}),
)
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let query = "key";
assert_eq!(
search(
&project,
SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
cx
)
.await
.unwrap(),
HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
"Only one non-ignored file should have the query"
);
assert_eq!(
search(
&project,
SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
cx
)
.await
.unwrap(),
HashMap::from_iter([
("package.json".to_string(), vec![8..11]),
("target/index.txt".to_string(), vec![6..9]),
(
"node_modules/prettier/package.json".to_string(),
vec![9..12]
),
("node_modules/prettier/index.ts".to_string(), vec![15..18]),
("node_modules/eslint/index.ts".to_string(), vec![13..16]),
("node_modules/eslint/package.json".to_string(), vec![8..11]),
]),
"Unrestricted search with ignored directories should find every file with the query"
);
assert_eq!(
search(
&project,
SearchQuery::text(
query,
false,
false,
true,
vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
vec![PathMatcher::new("*.ts").unwrap()],
)
.unwrap(),
cx
)
.await
.unwrap(),
HashMap::from_iter([(
"node_modules/prettier/package.json".to_string(),
vec![9..12]
)]),
"With search including ignored prettier directory and excluding TS files, only one file should be found"
);
}
#[test] #[test]
fn test_glob_literal_prefix() { fn test_glob_literal_prefix() {
assert_eq!(glob_literal_prefix("**/*.js"), ""); assert_eq!(glob_literal_prefix("**/*.js"), "");

View file

@ -371,15 +371,25 @@ impl SearchQuery {
pub fn file_matches(&self, file_path: Option<&Path>) -> bool { pub fn file_matches(&self, file_path: Option<&Path>) -> bool {
match file_path { match file_path {
Some(file_path) => { Some(file_path) => {
!self let mut path = file_path.to_path_buf();
.files_to_exclude() loop {
.iter() if self
.any(|exclude_glob| exclude_glob.is_match(file_path)) .files_to_exclude()
&& (self.files_to_include().is_empty() .iter()
.any(|exclude_glob| exclude_glob.is_match(&path))
{
return false;
} else if self.files_to_include().is_empty()
|| self || self
.files_to_include() .files_to_include()
.iter() .iter()
.any(|include_glob| include_glob.is_match(file_path))) .any(|include_glob| include_glob.is_match(&path))
{
return true;
} else if !path.pop() {
return false;
}
}
} }
None => self.files_to_include().is_empty(), None => self.files_to_include().is_empty(),
} }

View file

@ -960,8 +960,6 @@ impl LocalWorktree {
cx.spawn(|this, cx| async move { cx.spawn(|this, cx| async move {
let text = fs.load(&abs_path).await?; let text = fs.load(&abs_path).await?;
let entry = entry.await?;
let mut index_task = None; let mut index_task = None;
let snapshot = this.read_with(&cx, |this, _| this.as_local().unwrap().snapshot()); let snapshot = this.read_with(&cx, |this, _| this.as_local().unwrap().snapshot());
if let Some(repo) = snapshot.repository_for_path(&path) { if let Some(repo) = snapshot.repository_for_path(&path) {
@ -981,18 +979,43 @@ impl LocalWorktree {
None None
}; };
Ok(( match entry.await? {
File { Some(entry) => Ok((
entry_id: entry.id, File {
worktree: this, entry_id: Some(entry.id),
path: entry.path, worktree: this,
mtime: entry.mtime, path: entry.path,
is_local: true, mtime: entry.mtime,
is_deleted: false, is_local: true,
}, is_deleted: false,
text, },
diff_base, text,
)) diff_base,
)),
None => {
let metadata = fs
.metadata(&abs_path)
.await
.with_context(|| {
format!("Loading metadata for excluded file {abs_path:?}")
})?
.with_context(|| {
format!("Excluded file {abs_path:?} got removed during loading")
})?;
Ok((
File {
entry_id: None,
worktree: this,
path,
mtime: metadata.mtime,
is_local: true,
is_deleted: false,
},
text,
diff_base,
))
}
}
}) })
} }
@ -1013,17 +1036,37 @@ impl LocalWorktree {
let text = buffer.as_rope().clone(); let text = buffer.as_rope().clone();
let fingerprint = text.fingerprint(); let fingerprint = text.fingerprint();
let version = buffer.version(); let version = buffer.version();
let save = self.write_file(path, text, buffer.line_ending(), cx); let save = self.write_file(path.as_ref(), text, buffer.line_ending(), cx);
let fs = Arc::clone(&self.fs);
let abs_path = self.absolutize(&path);
cx.as_mut().spawn(|mut cx| async move { cx.as_mut().spawn(|mut cx| async move {
let entry = save.await?; let entry = save.await?;
let (entry_id, mtime, path) = match entry {
Some(entry) => (Some(entry.id), entry.mtime, entry.path),
None => {
let metadata = fs
.metadata(&abs_path)
.await
.with_context(|| {
format!(
"Fetching metadata after saving the excluded buffer {abs_path:?}"
)
})?
.with_context(|| {
format!("Excluded buffer {path:?} got removed during saving")
})?;
(None, metadata.mtime, path)
}
};
if has_changed_file { if has_changed_file {
let new_file = Arc::new(File { let new_file = Arc::new(File {
entry_id: entry.id, entry_id,
worktree: handle, worktree: handle,
path: entry.path, path,
mtime: entry.mtime, mtime,
is_local: true, is_local: true,
is_deleted: false, is_deleted: false,
}); });
@ -1049,13 +1092,13 @@ impl LocalWorktree {
project_id, project_id,
buffer_id, buffer_id,
version: serialize_version(&version), version: serialize_version(&version),
mtime: Some(entry.mtime.into()), mtime: Some(mtime.into()),
fingerprint: serialize_fingerprint(fingerprint), fingerprint: serialize_fingerprint(fingerprint),
})?; })?;
} }
buffer_handle.update(&mut cx, |buffer, cx| { buffer_handle.update(&mut cx, |buffer, cx| {
buffer.did_save(version.clone(), fingerprint, entry.mtime, cx); buffer.did_save(version.clone(), fingerprint, mtime, cx);
}); });
Ok(()) Ok(())
@ -1080,7 +1123,7 @@ impl LocalWorktree {
path: impl Into<Arc<Path>>, path: impl Into<Arc<Path>>,
is_dir: bool, is_dir: bool,
cx: &mut ModelContext<Worktree>, cx: &mut ModelContext<Worktree>,
) -> Task<Result<Entry>> { ) -> Task<Result<Option<Entry>>> {
let path = path.into(); let path = path.into();
let lowest_ancestor = self.lowest_ancestor(&path); let lowest_ancestor = self.lowest_ancestor(&path);
let abs_path = self.absolutize(&path); let abs_path = self.absolutize(&path);
@ -1097,7 +1140,7 @@ impl LocalWorktree {
cx.spawn(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
write.await?; write.await?;
let (result, refreshes) = this.update(&mut cx, |this, cx| { let (result, refreshes) = this.update(&mut cx, |this, cx| {
let mut refreshes = Vec::<Task<anyhow::Result<Entry>>>::new(); let mut refreshes = Vec::new();
let refresh_paths = path.strip_prefix(&lowest_ancestor).unwrap(); let refresh_paths = path.strip_prefix(&lowest_ancestor).unwrap();
for refresh_path in refresh_paths.ancestors() { for refresh_path in refresh_paths.ancestors() {
if refresh_path == Path::new("") { if refresh_path == Path::new("") {
@ -1124,14 +1167,14 @@ impl LocalWorktree {
}) })
} }
pub fn write_file( pub(crate) fn write_file(
&self, &self,
path: impl Into<Arc<Path>>, path: impl Into<Arc<Path>>,
text: Rope, text: Rope,
line_ending: LineEnding, line_ending: LineEnding,
cx: &mut ModelContext<Worktree>, cx: &mut ModelContext<Worktree>,
) -> Task<Result<Entry>> { ) -> Task<Result<Option<Entry>>> {
let path = path.into(); let path: Arc<Path> = path.into();
let abs_path = self.absolutize(&path); let abs_path = self.absolutize(&path);
let fs = self.fs.clone(); let fs = self.fs.clone();
let write = cx let write = cx
@ -1190,8 +1233,11 @@ impl LocalWorktree {
entry_id: ProjectEntryId, entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>, new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Worktree>, cx: &mut ModelContext<Worktree>,
) -> Option<Task<Result<Entry>>> { ) -> Task<Result<Option<Entry>>> {
let old_path = self.entry_for_id(entry_id)?.path.clone(); let old_path = match self.entry_for_id(entry_id) {
Some(entry) => entry.path.clone(),
None => return Task::ready(Ok(None)),
};
let new_path = new_path.into(); let new_path = new_path.into();
let abs_old_path = self.absolutize(&old_path); let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path); let abs_new_path = self.absolutize(&new_path);
@ -1201,7 +1247,7 @@ impl LocalWorktree {
.await .await
}); });
Some(cx.spawn(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
rename.await?; rename.await?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.as_local_mut() this.as_local_mut()
@ -1209,7 +1255,7 @@ impl LocalWorktree {
.refresh_entry(new_path.clone(), Some(old_path), cx) .refresh_entry(new_path.clone(), Some(old_path), cx)
}) })
.await .await
})) })
} }
pub fn copy_entry( pub fn copy_entry(
@ -1217,8 +1263,11 @@ impl LocalWorktree {
entry_id: ProjectEntryId, entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>, new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Worktree>, cx: &mut ModelContext<Worktree>,
) -> Option<Task<Result<Entry>>> { ) -> Task<Result<Option<Entry>>> {
let old_path = self.entry_for_id(entry_id)?.path.clone(); let old_path = match self.entry_for_id(entry_id) {
Some(entry) => entry.path.clone(),
None => return Task::ready(Ok(None)),
};
let new_path = new_path.into(); let new_path = new_path.into();
let abs_old_path = self.absolutize(&old_path); let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path); let abs_new_path = self.absolutize(&new_path);
@ -1233,7 +1282,7 @@ impl LocalWorktree {
.await .await
}); });
Some(cx.spawn(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
copy.await?; copy.await?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.as_local_mut() this.as_local_mut()
@ -1241,7 +1290,7 @@ impl LocalWorktree {
.refresh_entry(new_path.clone(), None, cx) .refresh_entry(new_path.clone(), None, cx)
}) })
.await .await
})) })
} }
pub fn expand_entry( pub fn expand_entry(
@ -1277,7 +1326,10 @@ impl LocalWorktree {
path: Arc<Path>, path: Arc<Path>,
old_path: Option<Arc<Path>>, old_path: Option<Arc<Path>>,
cx: &mut ModelContext<Worktree>, cx: &mut ModelContext<Worktree>,
) -> Task<Result<Entry>> { ) -> Task<Result<Option<Entry>>> {
if self.is_path_excluded(path.to_path_buf()) {
return Task::ready(Ok(None));
}
let paths = if let Some(old_path) = old_path.as_ref() { let paths = if let Some(old_path) = old_path.as_ref() {
vec![old_path.clone(), path.clone()] vec![old_path.clone(), path.clone()]
} else { } else {
@ -1286,13 +1338,15 @@ impl LocalWorktree {
let mut refresh = self.refresh_entries_for_paths(paths); let mut refresh = self.refresh_entries_for_paths(paths);
cx.spawn_weak(move |this, mut cx| async move { cx.spawn_weak(move |this, mut cx| async move {
refresh.recv().await; refresh.recv().await;
this.upgrade(&cx) let new_entry = this
.upgrade(&cx)
.ok_or_else(|| anyhow!("worktree was dropped"))? .ok_or_else(|| anyhow!("worktree was dropped"))?
.update(&mut cx, |this, _| { .update(&mut cx, |this, _| {
this.entry_for_path(path) this.entry_for_path(path)
.cloned() .cloned()
.ok_or_else(|| anyhow!("failed to read path after update")) .ok_or_else(|| anyhow!("failed to read path after update"))
}) })?;
Ok(Some(new_entry))
}) })
} }
@ -2226,10 +2280,19 @@ impl LocalSnapshot {
paths paths
} }
pub fn is_path_excluded(&self, abs_path: &Path) -> bool { pub fn is_path_excluded(&self, mut path: PathBuf) -> bool {
self.file_scan_exclusions loop {
.iter() if self
.any(|exclude_matcher| exclude_matcher.is_match(abs_path)) .file_scan_exclusions
.iter()
.any(|exclude_matcher| exclude_matcher.is_match(&path))
{
return true;
}
if !path.pop() {
return false;
}
}
} }
} }
@ -2458,8 +2521,7 @@ impl BackgroundScannerState {
ids_to_preserve.insert(work_directory_id); ids_to_preserve.insert(work_directory_id);
} else { } else {
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path); let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
let git_dir_excluded = snapshot.is_path_excluded(&entry.git_dir_path) let git_dir_excluded = snapshot.is_path_excluded(entry.git_dir_path.to_path_buf());
|| snapshot.is_path_excluded(&git_dir_abs_path);
if git_dir_excluded if git_dir_excluded
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None)) && !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
{ {
@ -2666,7 +2728,7 @@ pub struct File {
pub worktree: ModelHandle<Worktree>, pub worktree: ModelHandle<Worktree>,
pub path: Arc<Path>, pub path: Arc<Path>,
pub mtime: SystemTime, pub mtime: SystemTime,
pub(crate) entry_id: ProjectEntryId, pub(crate) entry_id: Option<ProjectEntryId>,
pub(crate) is_local: bool, pub(crate) is_local: bool,
pub(crate) is_deleted: bool, pub(crate) is_deleted: bool,
} }
@ -2735,7 +2797,7 @@ impl language::File for File {
fn to_proto(&self) -> rpc::proto::File { fn to_proto(&self) -> rpc::proto::File {
rpc::proto::File { rpc::proto::File {
worktree_id: self.worktree.id() as u64, worktree_id: self.worktree.id() as u64,
entry_id: self.entry_id.to_proto(), entry_id: self.entry_id.map(|id| id.to_proto()),
path: self.path.to_string_lossy().into(), path: self.path.to_string_lossy().into(),
mtime: Some(self.mtime.into()), mtime: Some(self.mtime.into()),
is_deleted: self.is_deleted, is_deleted: self.is_deleted,
@ -2793,7 +2855,7 @@ impl File {
worktree, worktree,
path: entry.path.clone(), path: entry.path.clone(),
mtime: entry.mtime, mtime: entry.mtime,
entry_id: entry.id, entry_id: Some(entry.id),
is_local: true, is_local: true,
is_deleted: false, is_deleted: false,
}) })
@ -2818,7 +2880,7 @@ impl File {
worktree, worktree,
path: Path::new(&proto.path).into(), path: Path::new(&proto.path).into(),
mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(), mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
entry_id: ProjectEntryId::from_proto(proto.entry_id), entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
is_local: false, is_local: false,
is_deleted: proto.is_deleted, is_deleted: proto.is_deleted,
}) })
@ -2836,7 +2898,7 @@ impl File {
if self.is_deleted { if self.is_deleted {
None None
} else { } else {
Some(self.entry_id) self.entry_id
} }
} }
} }
@ -3338,16 +3400,7 @@ impl BackgroundScanner {
return false; return false;
} }
// FS events may come for files which parent directory is excluded, need to check ignore those. if snapshot.is_path_excluded(relative_path.to_path_buf()) {
let mut path_to_test = abs_path.clone();
let mut excluded_file_event = snapshot.is_path_excluded(abs_path)
|| snapshot.is_path_excluded(&relative_path);
while !excluded_file_event && path_to_test.pop() {
if snapshot.is_path_excluded(&path_to_test) {
excluded_file_event = true;
}
}
if excluded_file_event {
if !is_git_related { if !is_git_related {
log::debug!("ignoring FS event for excluded path {relative_path:?}"); log::debug!("ignoring FS event for excluded path {relative_path:?}");
} }
@ -3531,7 +3584,7 @@ impl BackgroundScanner {
let state = self.state.lock(); let state = self.state.lock();
let snapshot = &state.snapshot; let snapshot = &state.snapshot;
root_abs_path = snapshot.abs_path().clone(); root_abs_path = snapshot.abs_path().clone();
if snapshot.is_path_excluded(&job.abs_path) { if snapshot.is_path_excluded(job.path.to_path_buf()) {
log::error!("skipping excluded directory {:?}", job.path); log::error!("skipping excluded directory {:?}", job.path);
return Ok(()); return Ok(());
} }
@ -3603,8 +3656,8 @@ impl BackgroundScanner {
{ {
let mut state = self.state.lock(); let mut state = self.state.lock();
if state.snapshot.is_path_excluded(&child_abs_path) { let relative_path = job.path.join(child_name);
let relative_path = job.path.join(child_name); if state.snapshot.is_path_excluded(relative_path.clone()) {
log::debug!("skipping excluded child entry {relative_path:?}"); log::debug!("skipping excluded child entry {relative_path:?}");
state.remove_path(&relative_path); state.remove_path(&relative_path);
continue; continue;

View file

@ -1052,11 +1052,12 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
&[ &[
".git/HEAD", ".git/HEAD",
".git/foo", ".git/foo",
"node_modules",
"node_modules/.DS_Store", "node_modules/.DS_Store",
"node_modules/prettier", "node_modules/prettier",
"node_modules/prettier/package.json", "node_modules/prettier/package.json",
], ],
&["target", "node_modules"], &["target"],
&[ &[
".DS_Store", ".DS_Store",
"src/.DS_Store", "src/.DS_Store",
@ -1106,6 +1107,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
".git/HEAD", ".git/HEAD",
".git/foo", ".git/foo",
".git/new_file", ".git/new_file",
"node_modules",
"node_modules/.DS_Store", "node_modules/.DS_Store",
"node_modules/prettier", "node_modules/prettier",
"node_modules/prettier/package.json", "node_modules/prettier/package.json",
@ -1114,7 +1116,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
"build_output/new_file", "build_output/new_file",
"test_output/new_file", "test_output/new_file",
], ],
&["target", "node_modules", "test_output"], &["target", "test_output"],
&[ &[
".DS_Store", ".DS_Store",
"src/.DS_Store", "src/.DS_Store",
@ -1174,6 +1176,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
.create_entry("a/e".as_ref(), true, cx) .create_entry("a/e".as_ref(), true, cx)
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
assert!(entry.is_dir()); assert!(entry.is_dir());
@ -1222,6 +1225,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/d.txt".as_ref(), false, cx) .create_entry("a/b/c/d.txt".as_ref(), false, cx)
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
assert!(entry.is_file()); assert!(entry.is_file());
@ -1257,6 +1261,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/d.txt".as_ref(), false, cx) .create_entry("a/b/c/d.txt".as_ref(), false, cx)
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
assert!(entry.is_file()); assert!(entry.is_file());
@ -1275,6 +1280,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/e.txt".as_ref(), false, cx) .create_entry("a/b/c/e.txt".as_ref(), false, cx)
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
assert!(entry.is_file()); assert!(entry.is_file());
@ -1291,6 +1297,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("d/e/f/g.txt".as_ref(), false, cx) .create_entry("d/e/f/g.txt".as_ref(), false, cx)
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
assert!(entry.is_file()); assert!(entry.is_file());
@ -1616,14 +1623,14 @@ fn randomly_mutate_worktree(
entry.id.0, entry.id.0,
new_path new_path
); );
let task = worktree.rename_entry(entry.id, new_path, cx).unwrap(); let task = worktree.rename_entry(entry.id, new_path, cx);
cx.foreground().spawn(async move { cx.foreground().spawn(async move {
task.await?; task.await?.unwrap();
Ok(()) Ok(())
}) })
} }
_ => { _ => {
let task = if entry.is_dir() { if entry.is_dir() {
let child_path = entry.path.join(random_filename(rng)); let child_path = entry.path.join(random_filename(rng));
let is_dir = rng.gen_bool(0.3); let is_dir = rng.gen_bool(0.3);
log::info!( log::info!(
@ -1631,15 +1638,20 @@ fn randomly_mutate_worktree(
if is_dir { "dir" } else { "file" }, if is_dir { "dir" } else { "file" },
child_path, child_path,
); );
worktree.create_entry(child_path, is_dir, cx) let task = worktree.create_entry(child_path, is_dir, cx);
cx.foreground().spawn(async move {
task.await?;
Ok(())
})
} else { } else {
log::info!("overwriting file {:?} ({})", entry.path, entry.id.0); log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx) let task =
}; worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
cx.foreground().spawn(async move { cx.foreground().spawn(async move {
task.await?; task.await?;
Ok(()) Ok(())
}) })
}
} }
} }
} }

View file

@ -1151,20 +1151,22 @@ impl Project {
project_path: impl Into<ProjectPath>, project_path: impl Into<ProjectPath>,
is_directory: bool, is_directory: bool,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Option<Task<Result<Entry>>> { ) -> Task<Result<Option<Entry>>> {
let project_path = project_path.into(); let project_path = project_path.into();
let worktree = self.worktree_for_id(project_path.worktree_id, cx)?; let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
return Task::ready(Ok(None));
};
if self.is_local() { if self.is_local() {
Some(worktree.update(cx, |worktree, cx| { worktree.update(cx, |worktree, cx| {
worktree worktree
.as_local_mut() .as_local_mut()
.unwrap() .unwrap()
.create_entry(project_path.path, is_directory, cx) .create_entry(project_path.path, is_directory, cx)
})) })
} else { } else {
let client = self.client.clone(); let client = self.client.clone();
let project_id = self.remote_id().unwrap(); let project_id = self.remote_id().unwrap();
Some(cx.spawn(move |_, mut cx| async move { cx.spawn(move |_, mut cx| async move {
let response = client let response = client
.request(proto::CreateProjectEntry { .request(proto::CreateProjectEntry {
worktree_id: project_path.worktree_id.to_proto(), worktree_id: project_path.worktree_id.to_proto(),
@ -1173,19 +1175,20 @@ impl Project {
is_directory, is_directory,
}) })
.await?; .await?;
let entry = response match response.entry {
.entry Some(entry) => worktree
.ok_or_else(|| anyhow!("missing entry in response"))?; .update(&mut cx, |worktree, cx| {
worktree worktree.as_remote_mut().unwrap().insert_entry(
.update(&mut cx, |worktree, cx| { entry,
worktree.as_remote_mut().unwrap().insert_entry( response.worktree_scan_id as usize,
entry, cx,
response.worktree_scan_id as usize, )
cx, })?
) .await
})? .map(Some),
.await None => Ok(None),
})) }
})
} }
} }
@ -1194,8 +1197,10 @@ impl Project {
entry_id: ProjectEntryId, entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>, new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Option<Task<Result<Entry>>> { ) -> Task<Result<Option<Entry>>> {
let worktree = self.worktree_for_entry(entry_id, cx)?; let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
return Task::ready(Ok(None));
};
let new_path = new_path.into(); let new_path = new_path.into();
if self.is_local() { if self.is_local() {
worktree.update(cx, |worktree, cx| { worktree.update(cx, |worktree, cx| {
@ -1208,7 +1213,7 @@ impl Project {
let client = self.client.clone(); let client = self.client.clone();
let project_id = self.remote_id().unwrap(); let project_id = self.remote_id().unwrap();
Some(cx.spawn(move |_, mut cx| async move { cx.spawn(move |_, mut cx| async move {
let response = client let response = client
.request(proto::CopyProjectEntry { .request(proto::CopyProjectEntry {
project_id, project_id,
@ -1216,19 +1221,20 @@ impl Project {
new_path: new_path.to_string_lossy().into(), new_path: new_path.to_string_lossy().into(),
}) })
.await?; .await?;
let entry = response match response.entry {
.entry Some(entry) => worktree
.ok_or_else(|| anyhow!("missing entry in response"))?; .update(&mut cx, |worktree, cx| {
worktree worktree.as_remote_mut().unwrap().insert_entry(
.update(&mut cx, |worktree, cx| { entry,
worktree.as_remote_mut().unwrap().insert_entry( response.worktree_scan_id as usize,
entry, cx,
response.worktree_scan_id as usize, )
cx, })?
) .await
})? .map(Some),
.await None => Ok(None),
})) }
})
} }
} }
@ -1237,8 +1243,10 @@ impl Project {
entry_id: ProjectEntryId, entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>, new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Option<Task<Result<Entry>>> { ) -> Task<Result<Option<Entry>>> {
let worktree = self.worktree_for_entry(entry_id, cx)?; let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
return Task::ready(Ok(None));
};
let new_path = new_path.into(); let new_path = new_path.into();
if self.is_local() { if self.is_local() {
worktree.update(cx, |worktree, cx| { worktree.update(cx, |worktree, cx| {
@ -1251,7 +1259,7 @@ impl Project {
let client = self.client.clone(); let client = self.client.clone();
let project_id = self.remote_id().unwrap(); let project_id = self.remote_id().unwrap();
Some(cx.spawn(move |_, mut cx| async move { cx.spawn(move |_, mut cx| async move {
let response = client let response = client
.request(proto::RenameProjectEntry { .request(proto::RenameProjectEntry {
project_id, project_id,
@ -1259,19 +1267,20 @@ impl Project {
new_path: new_path.to_string_lossy().into(), new_path: new_path.to_string_lossy().into(),
}) })
.await?; .await?;
let entry = response match response.entry {
.entry Some(entry) => worktree
.ok_or_else(|| anyhow!("missing entry in response"))?; .update(&mut cx, |worktree, cx| {
worktree worktree.as_remote_mut().unwrap().insert_entry(
.update(&mut cx, |worktree, cx| { entry,
worktree.as_remote_mut().unwrap().insert_entry( response.worktree_scan_id as usize,
entry, cx,
response.worktree_scan_id as usize, )
cx, })?
) .await
})? .map(Some),
.await None => Ok(None),
})) }
})
} }
} }
@ -1688,18 +1697,15 @@ impl Project {
pub fn open_path( pub fn open_path(
&mut self, &mut self,
path: impl Into<ProjectPath>, path: ProjectPath,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<Result<(ProjectEntryId, AnyModel)>> { ) -> Task<Result<(Option<ProjectEntryId>, AnyModel)>> {
let project_path = path.into(); let task = self.open_buffer(path.clone(), cx);
let task = self.open_buffer(project_path.clone(), cx); cx.spawn(move |_, cx| async move {
cx.spawn(move |_, mut cx| async move {
let buffer = task.await?; let buffer = task.await?;
let project_entry_id = buffer let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
.update(&mut cx, |buffer, cx| { File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx)) })?;
})?
.with_context(|| format!("no project entry for {project_path:?}"))?;
let buffer: &AnyModel = &buffer; let buffer: &AnyModel = &buffer;
Ok((project_entry_id, buffer.clone())) Ok((project_entry_id, buffer.clone()))
@ -2018,8 +2024,10 @@ impl Project {
remote_id, remote_id,
); );
self.local_buffer_ids_by_entry_id if let Some(entry_id) = file.entry_id {
.insert(file.entry_id, remote_id); self.local_buffer_ids_by_entry_id
.insert(entry_id, remote_id);
}
} }
} }
@ -2474,24 +2482,25 @@ impl Project {
return None; return None;
}; };
match self.local_buffer_ids_by_entry_id.get(&file.entry_id) { let remote_id = buffer.read(cx).remote_id();
Some(_) => { if let Some(entry_id) = file.entry_id {
return None; match self.local_buffer_ids_by_entry_id.get(&entry_id) {
Some(_) => {
return None;
}
None => {
self.local_buffer_ids_by_entry_id
.insert(entry_id, remote_id);
}
} }
None => { };
let remote_id = buffer.read(cx).remote_id(); self.local_buffer_ids_by_path.insert(
self.local_buffer_ids_by_entry_id ProjectPath {
.insert(file.entry_id, remote_id); worktree_id: file.worktree_id(cx),
path: file.path.clone(),
self.local_buffer_ids_by_path.insert( },
ProjectPath { remote_id,
worktree_id: file.worktree_id(cx), );
path: file.path.clone(),
},
remote_id,
);
}
}
} }
_ => {} _ => {}
} }
@ -5845,11 +5854,6 @@ impl Project {
while let Some(ignored_abs_path) = while let Some(ignored_abs_path) =
ignored_paths_to_process.pop_front() ignored_paths_to_process.pop_front()
{ {
if !query.file_matches(Some(&ignored_abs_path))
|| snapshot.is_path_excluded(&ignored_abs_path)
{
continue;
}
if let Some(fs_metadata) = fs if let Some(fs_metadata) = fs
.metadata(&ignored_abs_path) .metadata(&ignored_abs_path)
.await .await
@ -5877,6 +5881,13 @@ impl Project {
} }
} }
} else if !fs_metadata.is_symlink { } else if !fs_metadata.is_symlink {
if !query.file_matches(Some(&ignored_abs_path))
|| snapshot.is_path_excluded(
ignored_entry.path.to_path_buf(),
)
{
continue;
}
let matches = if let Some(file) = fs let matches = if let Some(file) = fs
.open_sync(&ignored_abs_path) .open_sync(&ignored_abs_path)
.await .await
@ -6278,10 +6289,13 @@ impl Project {
return; return;
} }
let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) { let new_file = if let Some(entry) = old_file
.entry_id
.and_then(|entry_id| snapshot.entry_for_id(entry_id))
{
File { File {
is_local: true, is_local: true,
entry_id: entry.id, entry_id: Some(entry.id),
mtime: entry.mtime, mtime: entry.mtime,
path: entry.path.clone(), path: entry.path.clone(),
worktree: worktree_handle.clone(), worktree: worktree_handle.clone(),
@ -6290,7 +6304,7 @@ impl Project {
} else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) { } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
File { File {
is_local: true, is_local: true,
entry_id: entry.id, entry_id: Some(entry.id),
mtime: entry.mtime, mtime: entry.mtime,
path: entry.path.clone(), path: entry.path.clone(),
worktree: worktree_handle.clone(), worktree: worktree_handle.clone(),
@ -6320,10 +6334,12 @@ impl Project {
); );
} }
if new_file.entry_id != *entry_id { if new_file.entry_id != Some(*entry_id) {
self.local_buffer_ids_by_entry_id.remove(entry_id); self.local_buffer_ids_by_entry_id.remove(entry_id);
self.local_buffer_ids_by_entry_id if let Some(entry_id) = new_file.entry_id {
.insert(new_file.entry_id, buffer_id); self.local_buffer_ids_by_entry_id
.insert(entry_id, buffer_id);
}
} }
if new_file != *old_file { if new_file != *old_file {
@ -6890,7 +6906,7 @@ impl Project {
})? })?
.await?; .await?;
Ok(proto::ProjectEntryResponse { Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()), entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64, worktree_scan_id: worktree_scan_id as u64,
}) })
} }
@ -6914,11 +6930,10 @@ impl Project {
.as_local_mut() .as_local_mut()
.unwrap() .unwrap()
.rename_entry(entry_id, new_path, cx) .rename_entry(entry_id, new_path, cx)
.ok_or_else(|| anyhow!("invalid entry")) })?
})??
.await?; .await?;
Ok(proto::ProjectEntryResponse { Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()), entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64, worktree_scan_id: worktree_scan_id as u64,
}) })
} }
@ -6942,11 +6957,10 @@ impl Project {
.as_local_mut() .as_local_mut()
.unwrap() .unwrap()
.copy_entry(entry_id, new_path, cx) .copy_entry(entry_id, new_path, cx)
.ok_or_else(|| anyhow!("invalid entry")) })?
})??
.await?; .await?;
Ok(proto::ProjectEntryResponse { Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()), entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64, worktree_scan_id: worktree_scan_id as u64,
}) })
} }

View file

@ -4182,6 +4182,94 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
); );
} }
#[gpui::test]
async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
"/dir",
json!({
".git": {},
".gitignore": "**/target\n/node_modules\n",
"target": {
"index.txt": "index_key:index_value"
},
"node_modules": {
"eslint": {
"index.ts": "const eslint_key = 'eslint value'",
"package.json": r#"{ "some_key": "some value" }"#,
},
"prettier": {
"index.ts": "const prettier_key = 'prettier value'",
"package.json": r#"{ "other_key": "other value" }"#,
},
},
"package.json": r#"{ "main_key": "main value" }"#,
}),
)
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let query = "key";
assert_eq!(
search(
&project,
SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
cx
)
.await
.unwrap(),
HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
"Only one non-ignored file should have the query"
);
assert_eq!(
search(
&project,
SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
cx
)
.await
.unwrap(),
HashMap::from_iter([
("package.json".to_string(), vec![8..11]),
("target/index.txt".to_string(), vec![6..9]),
(
"node_modules/prettier/package.json".to_string(),
vec![9..12]
),
("node_modules/prettier/index.ts".to_string(), vec![15..18]),
("node_modules/eslint/index.ts".to_string(), vec![13..16]),
("node_modules/eslint/package.json".to_string(), vec![8..11]),
]),
"Unrestricted search with ignored directories should find every file with the query"
);
assert_eq!(
search(
&project,
SearchQuery::text(
query,
false,
false,
true,
vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
vec![PathMatcher::new("*.ts").unwrap()],
)
.unwrap(),
cx
)
.await
.unwrap(),
HashMap::from_iter([(
"node_modules/prettier/package.json".to_string(),
vec![9..12]
)]),
"With search including ignored prettier directory and excluding TS files, only one file should be found"
);
}
#[test] #[test]
fn test_glob_literal_prefix() { fn test_glob_literal_prefix() {
assert_eq!(glob_literal_prefix("**/*.js"), ""); assert_eq!(glob_literal_prefix("**/*.js"), "");

View file

@ -371,15 +371,25 @@ impl SearchQuery {
pub fn file_matches(&self, file_path: Option<&Path>) -> bool { pub fn file_matches(&self, file_path: Option<&Path>) -> bool {
match file_path { match file_path {
Some(file_path) => { Some(file_path) => {
!self let mut path = file_path.to_path_buf();
.files_to_exclude() loop {
.iter() if self
.any(|exclude_glob| exclude_glob.is_match(file_path)) .files_to_exclude()
&& (self.files_to_include().is_empty() .iter()
.any(|exclude_glob| exclude_glob.is_match(&path))
{
return false;
} else if self.files_to_include().is_empty()
|| self || self
.files_to_include() .files_to_include()
.iter() .iter()
.any(|include_glob| include_glob.is_match(file_path))) .any(|include_glob| include_glob.is_match(&path))
{
return true;
} else if !path.pop() {
return false;
}
}
} }
None => self.files_to_include().is_empty(), None => self.files_to_include().is_empty(),
} }

View file

@ -958,8 +958,6 @@ impl LocalWorktree {
cx.spawn(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
let text = fs.load(&abs_path).await?; let text = fs.load(&abs_path).await?;
let entry = entry.await?;
let mut index_task = None; let mut index_task = None;
let snapshot = this.update(&mut cx, |this, _| this.as_local().unwrap().snapshot())?; let snapshot = this.update(&mut cx, |this, _| this.as_local().unwrap().snapshot())?;
if let Some(repo) = snapshot.repository_for_path(&path) { if let Some(repo) = snapshot.repository_for_path(&path) {
@ -982,18 +980,43 @@ impl LocalWorktree {
let worktree = this let worktree = this
.upgrade() .upgrade()
.ok_or_else(|| anyhow!("worktree was dropped"))?; .ok_or_else(|| anyhow!("worktree was dropped"))?;
Ok(( match entry.await? {
File { Some(entry) => Ok((
entry_id: entry.id, File {
worktree, entry_id: Some(entry.id),
path: entry.path, worktree,
mtime: entry.mtime, path: entry.path,
is_local: true, mtime: entry.mtime,
is_deleted: false, is_local: true,
}, is_deleted: false,
text, },
diff_base, text,
)) diff_base,
)),
None => {
let metadata = fs
.metadata(&abs_path)
.await
.with_context(|| {
format!("Loading metadata for excluded file {abs_path:?}")
})?
.with_context(|| {
format!("Excluded file {abs_path:?} got removed during loading")
})?;
Ok((
File {
entry_id: None,
worktree,
path,
mtime: metadata.mtime,
is_local: true,
is_deleted: false,
},
text,
diff_base,
))
}
}
}) })
} }
@ -1013,18 +1036,38 @@ impl LocalWorktree {
let text = buffer.as_rope().clone(); let text = buffer.as_rope().clone();
let fingerprint = text.fingerprint(); let fingerprint = text.fingerprint();
let version = buffer.version(); let version = buffer.version();
let save = self.write_file(path, text, buffer.line_ending(), cx); let save = self.write_file(path.as_ref(), text, buffer.line_ending(), cx);
let fs = Arc::clone(&self.fs);
let abs_path = self.absolutize(&path);
cx.spawn(move |this, mut cx| async move { cx.spawn(move |this, mut cx| async move {
let entry = save.await?; let entry = save.await?;
let this = this.upgrade().context("worktree dropped")?; let this = this.upgrade().context("worktree dropped")?;
let (entry_id, mtime, path) = match entry {
Some(entry) => (Some(entry.id), entry.mtime, entry.path),
None => {
let metadata = fs
.metadata(&abs_path)
.await
.with_context(|| {
format!(
"Fetching metadata after saving the excluded buffer {abs_path:?}"
)
})?
.with_context(|| {
format!("Excluded buffer {path:?} got removed during saving")
})?;
(None, metadata.mtime, path)
}
};
if has_changed_file { if has_changed_file {
let new_file = Arc::new(File { let new_file = Arc::new(File {
entry_id: entry.id, entry_id,
worktree: this, worktree: this,
path: entry.path, path,
mtime: entry.mtime, mtime,
is_local: true, is_local: true,
is_deleted: false, is_deleted: false,
}); });
@ -1050,13 +1093,13 @@ impl LocalWorktree {
project_id, project_id,
buffer_id, buffer_id,
version: serialize_version(&version), version: serialize_version(&version),
mtime: Some(entry.mtime.into()), mtime: Some(mtime.into()),
fingerprint: serialize_fingerprint(fingerprint), fingerprint: serialize_fingerprint(fingerprint),
})?; })?;
} }
buffer_handle.update(&mut cx, |buffer, cx| { buffer_handle.update(&mut cx, |buffer, cx| {
buffer.did_save(version.clone(), fingerprint, entry.mtime, cx); buffer.did_save(version.clone(), fingerprint, mtime, cx);
})?; })?;
Ok(()) Ok(())
@ -1081,7 +1124,7 @@ impl LocalWorktree {
path: impl Into<Arc<Path>>, path: impl Into<Arc<Path>>,
is_dir: bool, is_dir: bool,
cx: &mut ModelContext<Worktree>, cx: &mut ModelContext<Worktree>,
) -> Task<Result<Entry>> { ) -> Task<Result<Option<Entry>>> {
let path = path.into(); let path = path.into();
let lowest_ancestor = self.lowest_ancestor(&path); let lowest_ancestor = self.lowest_ancestor(&path);
let abs_path = self.absolutize(&path); let abs_path = self.absolutize(&path);
@ -1098,7 +1141,7 @@ impl LocalWorktree {
cx.spawn(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
write.await?; write.await?;
let (result, refreshes) = this.update(&mut cx, |this, cx| { let (result, refreshes) = this.update(&mut cx, |this, cx| {
let mut refreshes = Vec::<Task<anyhow::Result<Entry>>>::new(); let mut refreshes = Vec::new();
let refresh_paths = path.strip_prefix(&lowest_ancestor).unwrap(); let refresh_paths = path.strip_prefix(&lowest_ancestor).unwrap();
for refresh_path in refresh_paths.ancestors() { for refresh_path in refresh_paths.ancestors() {
if refresh_path == Path::new("") { if refresh_path == Path::new("") {
@ -1125,14 +1168,14 @@ impl LocalWorktree {
}) })
} }
pub fn write_file( pub(crate) fn write_file(
&self, &self,
path: impl Into<Arc<Path>>, path: impl Into<Arc<Path>>,
text: Rope, text: Rope,
line_ending: LineEnding, line_ending: LineEnding,
cx: &mut ModelContext<Worktree>, cx: &mut ModelContext<Worktree>,
) -> Task<Result<Entry>> { ) -> Task<Result<Option<Entry>>> {
let path = path.into(); let path: Arc<Path> = path.into();
let abs_path = self.absolutize(&path); let abs_path = self.absolutize(&path);
let fs = self.fs.clone(); let fs = self.fs.clone();
let write = cx let write = cx
@ -1191,8 +1234,11 @@ impl LocalWorktree {
entry_id: ProjectEntryId, entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>, new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Worktree>, cx: &mut ModelContext<Worktree>,
) -> Option<Task<Result<Entry>>> { ) -> Task<Result<Option<Entry>>> {
let old_path = self.entry_for_id(entry_id)?.path.clone(); let old_path = match self.entry_for_id(entry_id) {
Some(entry) => entry.path.clone(),
None => return Task::ready(Ok(None)),
};
let new_path = new_path.into(); let new_path = new_path.into();
let abs_old_path = self.absolutize(&old_path); let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path); let abs_new_path = self.absolutize(&new_path);
@ -1202,7 +1248,7 @@ impl LocalWorktree {
.await .await
}); });
Some(cx.spawn(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
rename.await?; rename.await?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.as_local_mut() this.as_local_mut()
@ -1210,7 +1256,7 @@ impl LocalWorktree {
.refresh_entry(new_path.clone(), Some(old_path), cx) .refresh_entry(new_path.clone(), Some(old_path), cx)
})? })?
.await .await
})) })
} }
pub fn copy_entry( pub fn copy_entry(
@ -1218,8 +1264,11 @@ impl LocalWorktree {
entry_id: ProjectEntryId, entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>, new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Worktree>, cx: &mut ModelContext<Worktree>,
) -> Option<Task<Result<Entry>>> { ) -> Task<Result<Option<Entry>>> {
let old_path = self.entry_for_id(entry_id)?.path.clone(); let old_path = match self.entry_for_id(entry_id) {
Some(entry) => entry.path.clone(),
None => return Task::ready(Ok(None)),
};
let new_path = new_path.into(); let new_path = new_path.into();
let abs_old_path = self.absolutize(&old_path); let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path); let abs_new_path = self.absolutize(&new_path);
@ -1234,7 +1283,7 @@ impl LocalWorktree {
.await .await
}); });
Some(cx.spawn(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
copy.await?; copy.await?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.as_local_mut() this.as_local_mut()
@ -1242,7 +1291,7 @@ impl LocalWorktree {
.refresh_entry(new_path.clone(), None, cx) .refresh_entry(new_path.clone(), None, cx)
})? })?
.await .await
})) })
} }
pub fn expand_entry( pub fn expand_entry(
@ -1278,7 +1327,10 @@ impl LocalWorktree {
path: Arc<Path>, path: Arc<Path>,
old_path: Option<Arc<Path>>, old_path: Option<Arc<Path>>,
cx: &mut ModelContext<Worktree>, cx: &mut ModelContext<Worktree>,
) -> Task<Result<Entry>> { ) -> Task<Result<Option<Entry>>> {
if self.is_path_excluded(path.to_path_buf()) {
return Task::ready(Ok(None));
}
let paths = if let Some(old_path) = old_path.as_ref() { let paths = if let Some(old_path) = old_path.as_ref() {
vec![old_path.clone(), path.clone()] vec![old_path.clone(), path.clone()]
} else { } else {
@ -1287,11 +1339,12 @@ impl LocalWorktree {
let mut refresh = self.refresh_entries_for_paths(paths); let mut refresh = self.refresh_entries_for_paths(paths);
cx.spawn(move |this, mut cx| async move { cx.spawn(move |this, mut cx| async move {
refresh.recv().await; refresh.recv().await;
this.update(&mut cx, |this, _| { let new_entry = this.update(&mut cx, |this, _| {
this.entry_for_path(path) this.entry_for_path(path)
.cloned() .cloned()
.ok_or_else(|| anyhow!("failed to read path after update")) .ok_or_else(|| anyhow!("failed to read path after update"))
})? })??;
Ok(Some(new_entry))
}) })
} }
@ -2222,10 +2275,19 @@ impl LocalSnapshot {
paths paths
} }
pub fn is_path_excluded(&self, abs_path: &Path) -> bool { pub fn is_path_excluded(&self, mut path: PathBuf) -> bool {
self.file_scan_exclusions loop {
.iter() if self
.any(|exclude_matcher| exclude_matcher.is_match(abs_path)) .file_scan_exclusions
.iter()
.any(|exclude_matcher| exclude_matcher.is_match(&path))
{
return true;
}
if !path.pop() {
return false;
}
}
} }
} }
@ -2455,8 +2517,7 @@ impl BackgroundScannerState {
ids_to_preserve.insert(work_directory_id); ids_to_preserve.insert(work_directory_id);
} else { } else {
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path); let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
let git_dir_excluded = snapshot.is_path_excluded(&entry.git_dir_path) let git_dir_excluded = snapshot.is_path_excluded(entry.git_dir_path.to_path_buf());
|| snapshot.is_path_excluded(&git_dir_abs_path);
if git_dir_excluded if git_dir_excluded
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None)) && !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
{ {
@ -2663,7 +2724,7 @@ pub struct File {
pub worktree: Model<Worktree>, pub worktree: Model<Worktree>,
pub path: Arc<Path>, pub path: Arc<Path>,
pub mtime: SystemTime, pub mtime: SystemTime,
pub(crate) entry_id: ProjectEntryId, pub(crate) entry_id: Option<ProjectEntryId>,
pub(crate) is_local: bool, pub(crate) is_local: bool,
pub(crate) is_deleted: bool, pub(crate) is_deleted: bool,
} }
@ -2732,7 +2793,7 @@ impl language::File for File {
fn to_proto(&self) -> rpc::proto::File { fn to_proto(&self) -> rpc::proto::File {
rpc::proto::File { rpc::proto::File {
worktree_id: self.worktree.entity_id().as_u64(), worktree_id: self.worktree.entity_id().as_u64(),
entry_id: self.entry_id.to_proto(), entry_id: self.entry_id.map(|id| id.to_proto()),
path: self.path.to_string_lossy().into(), path: self.path.to_string_lossy().into(),
mtime: Some(self.mtime.into()), mtime: Some(self.mtime.into()),
is_deleted: self.is_deleted, is_deleted: self.is_deleted,
@ -2790,7 +2851,7 @@ impl File {
worktree, worktree,
path: entry.path.clone(), path: entry.path.clone(),
mtime: entry.mtime, mtime: entry.mtime,
entry_id: entry.id, entry_id: Some(entry.id),
is_local: true, is_local: true,
is_deleted: false, is_deleted: false,
}) })
@ -2815,7 +2876,7 @@ impl File {
worktree, worktree,
path: Path::new(&proto.path).into(), path: Path::new(&proto.path).into(),
mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(), mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
entry_id: ProjectEntryId::from_proto(proto.entry_id), entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
is_local: false, is_local: false,
is_deleted: proto.is_deleted, is_deleted: proto.is_deleted,
}) })
@ -2833,7 +2894,7 @@ impl File {
if self.is_deleted { if self.is_deleted {
None None
} else { } else {
Some(self.entry_id) self.entry_id
} }
} }
} }
@ -3329,16 +3390,7 @@ impl BackgroundScanner {
return false; return false;
} }
// FS events may come for files which parent directory is excluded, need to check ignore those. if snapshot.is_path_excluded(relative_path.to_path_buf()) {
let mut path_to_test = abs_path.clone();
let mut excluded_file_event = snapshot.is_path_excluded(abs_path)
|| snapshot.is_path_excluded(&relative_path);
while !excluded_file_event && path_to_test.pop() {
if snapshot.is_path_excluded(&path_to_test) {
excluded_file_event = true;
}
}
if excluded_file_event {
if !is_git_related { if !is_git_related {
log::debug!("ignoring FS event for excluded path {relative_path:?}"); log::debug!("ignoring FS event for excluded path {relative_path:?}");
} }
@ -3522,7 +3574,7 @@ impl BackgroundScanner {
let state = self.state.lock(); let state = self.state.lock();
let snapshot = &state.snapshot; let snapshot = &state.snapshot;
root_abs_path = snapshot.abs_path().clone(); root_abs_path = snapshot.abs_path().clone();
if snapshot.is_path_excluded(&job.abs_path) { if snapshot.is_path_excluded(job.path.to_path_buf()) {
log::error!("skipping excluded directory {:?}", job.path); log::error!("skipping excluded directory {:?}", job.path);
return Ok(()); return Ok(());
} }
@ -3593,9 +3645,9 @@ impl BackgroundScanner {
} }
{ {
let relative_path = job.path.join(child_name);
let mut state = self.state.lock(); let mut state = self.state.lock();
if state.snapshot.is_path_excluded(&child_abs_path) { if state.snapshot.is_path_excluded(relative_path.clone()) {
let relative_path = job.path.join(child_name);
log::debug!("skipping excluded child entry {relative_path:?}"); log::debug!("skipping excluded child entry {relative_path:?}");
state.remove_path(&relative_path); state.remove_path(&relative_path);
continue; continue;

View file

@ -1055,11 +1055,12 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
&[ &[
".git/HEAD", ".git/HEAD",
".git/foo", ".git/foo",
"node_modules",
"node_modules/.DS_Store", "node_modules/.DS_Store",
"node_modules/prettier", "node_modules/prettier",
"node_modules/prettier/package.json", "node_modules/prettier/package.json",
], ],
&["target", "node_modules"], &["target"],
&[ &[
".DS_Store", ".DS_Store",
"src/.DS_Store", "src/.DS_Store",
@ -1109,6 +1110,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
".git/HEAD", ".git/HEAD",
".git/foo", ".git/foo",
".git/new_file", ".git/new_file",
"node_modules",
"node_modules/.DS_Store", "node_modules/.DS_Store",
"node_modules/prettier", "node_modules/prettier",
"node_modules/prettier/package.json", "node_modules/prettier/package.json",
@ -1117,7 +1119,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
"build_output/new_file", "build_output/new_file",
"test_output/new_file", "test_output/new_file",
], ],
&["target", "node_modules", "test_output"], &["target", "test_output"],
&[ &[
".DS_Store", ".DS_Store",
"src/.DS_Store", "src/.DS_Store",
@ -1177,6 +1179,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
.create_entry("a/e".as_ref(), true, cx) .create_entry("a/e".as_ref(), true, cx)
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
assert!(entry.is_dir()); assert!(entry.is_dir());
@ -1226,6 +1229,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/d.txt".as_ref(), false, cx) .create_entry("a/b/c/d.txt".as_ref(), false, cx)
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
assert!(entry.is_file()); assert!(entry.is_file());
@ -1261,6 +1265,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/d.txt".as_ref(), false, cx) .create_entry("a/b/c/d.txt".as_ref(), false, cx)
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
assert!(entry.is_file()); assert!(entry.is_file());
@ -1279,6 +1284,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/e.txt".as_ref(), false, cx) .create_entry("a/b/c/e.txt".as_ref(), false, cx)
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
assert!(entry.is_file()); assert!(entry.is_file());
@ -1295,6 +1301,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("d/e/f/g.txt".as_ref(), false, cx) .create_entry("d/e/f/g.txt".as_ref(), false, cx)
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
assert!(entry.is_file()); assert!(entry.is_file());
@ -1620,14 +1627,14 @@ fn randomly_mutate_worktree(
entry.id.0, entry.id.0,
new_path new_path
); );
let task = worktree.rename_entry(entry.id, new_path, cx).unwrap(); let task = worktree.rename_entry(entry.id, new_path, cx);
cx.background_executor().spawn(async move { cx.background_executor().spawn(async move {
task.await?; task.await?.unwrap();
Ok(()) Ok(())
}) })
} }
_ => { _ => {
let task = if entry.is_dir() { if entry.is_dir() {
let child_path = entry.path.join(random_filename(rng)); let child_path = entry.path.join(random_filename(rng));
let is_dir = rng.gen_bool(0.3); let is_dir = rng.gen_bool(0.3);
log::info!( log::info!(
@ -1635,15 +1642,20 @@ fn randomly_mutate_worktree(
if is_dir { "dir" } else { "file" }, if is_dir { "dir" } else { "file" },
child_path, child_path,
); );
worktree.create_entry(child_path, is_dir, cx) let task = worktree.create_entry(child_path, is_dir, cx);
cx.background_executor().spawn(async move {
task.await?;
Ok(())
})
} else { } else {
log::info!("overwriting file {:?} ({})", entry.path, entry.id.0); log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx) let task =
}; worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
cx.background_executor().spawn(async move { cx.background_executor().spawn(async move {
task.await?; task.await?;
Ok(()) Ok(())
}) })
}
} }
} }
} }

View file

@ -621,7 +621,7 @@ impl ProjectPanel {
edited_entry_id = NEW_ENTRY_ID; edited_entry_id = NEW_ENTRY_ID;
edit_task = self.project.update(cx, |project, cx| { edit_task = self.project.update(cx, |project, cx| {
project.create_entry((worktree_id, &new_path), is_dir, cx) project.create_entry((worktree_id, &new_path), is_dir, cx)
})?; });
} else { } else {
let new_path = if let Some(parent) = entry.path.clone().parent() { let new_path = if let Some(parent) = entry.path.clone().parent() {
parent.join(&filename) parent.join(&filename)
@ -635,7 +635,7 @@ impl ProjectPanel {
edited_entry_id = entry.id; edited_entry_id = entry.id;
edit_task = self.project.update(cx, |project, cx| { edit_task = self.project.update(cx, |project, cx| {
project.rename_entry(entry.id, new_path.as_path(), cx) project.rename_entry(entry.id, new_path.as_path(), cx)
})?; });
}; };
edit_state.processing_filename = Some(filename); edit_state.processing_filename = Some(filename);
@ -648,21 +648,22 @@ impl ProjectPanel {
cx.notify(); cx.notify();
})?; })?;
let new_entry = new_entry?; if let Some(new_entry) = new_entry? {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
if let Some(selection) = &mut this.selection { if let Some(selection) = &mut this.selection {
if selection.entry_id == edited_entry_id { if selection.entry_id == edited_entry_id {
selection.worktree_id = worktree_id; selection.worktree_id = worktree_id;
selection.entry_id = new_entry.id; selection.entry_id = new_entry.id;
this.expand_to_selection(cx); this.expand_to_selection(cx);
}
} }
} this.update_visible_entries(None, cx);
this.update_visible_entries(None, cx); if is_new_entry && !is_dir {
if is_new_entry && !is_dir { this.open_entry(new_entry.id, true, cx);
this.open_entry(new_entry.id, true, cx); }
} cx.notify();
cx.notify(); })?;
})?; }
Ok(()) Ok(())
})) }))
} }
@ -935,15 +936,17 @@ impl ProjectPanel {
} }
if clipboard_entry.is_cut() { if clipboard_entry.is_cut() {
if let Some(task) = self.project.update(cx, |project, cx| { self.project
project.rename_entry(clipboard_entry.entry_id(), new_path, cx) .update(cx, |project, cx| {
}) { project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
task.detach_and_log_err(cx) })
} .detach_and_log_err(cx)
} else if let Some(task) = self.project.update(cx, |project, cx| { } else {
project.copy_entry(clipboard_entry.entry_id(), new_path, cx) self.project
}) { .update(cx, |project, cx| {
task.detach_and_log_err(cx) project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
})
.detach_and_log_err(cx)
} }
} }
None None
@ -1026,7 +1029,7 @@ impl ProjectPanel {
let mut new_path = destination_path.to_path_buf(); let mut new_path = destination_path.to_path_buf();
new_path.push(entry_path.path.file_name()?); new_path.push(entry_path.path.file_name()?);
if new_path != entry_path.path.as_ref() { if new_path != entry_path.path.as_ref() {
let task = project.rename_entry(entry_to_move, new_path, cx)?; let task = project.rename_entry(entry_to_move, new_path, cx);
cx.foreground().spawn(task).detach_and_log_err(cx); cx.foreground().spawn(task).detach_and_log_err(cx);
} }

View file

@ -397,7 +397,6 @@ impl ProjectPanel {
menu = menu.action( menu = menu.action(
"Add Folder to Project", "Add Folder to Project",
Box::new(workspace::AddFolderToProject), Box::new(workspace::AddFolderToProject),
cx,
); );
if is_root { if is_root {
menu = menu.entry( menu = menu.entry(
@ -412,35 +411,35 @@ impl ProjectPanel {
} }
menu = menu menu = menu
.action("New File", Box::new(NewFile), cx) .action("New File", Box::new(NewFile))
.action("New Folder", Box::new(NewDirectory), cx) .action("New Folder", Box::new(NewDirectory))
.separator() .separator()
.action("Cut", Box::new(Cut), cx) .action("Cut", Box::new(Cut))
.action("Copy", Box::new(Copy), cx); .action("Copy", Box::new(Copy));
if let Some(clipboard_entry) = self.clipboard_entry { if let Some(clipboard_entry) = self.clipboard_entry {
if clipboard_entry.worktree_id() == worktree_id { if clipboard_entry.worktree_id() == worktree_id {
menu = menu.action("Paste", Box::new(Paste), cx); menu = menu.action("Paste", Box::new(Paste));
} }
} }
menu = menu menu = menu
.separator() .separator()
.action("Copy Path", Box::new(CopyPath), cx) .action("Copy Path", Box::new(CopyPath))
.action("Copy Relative Path", Box::new(CopyRelativePath), cx) .action("Copy Relative Path", Box::new(CopyRelativePath))
.separator() .separator()
.action("Reveal in Finder", Box::new(RevealInFinder), cx); .action("Reveal in Finder", Box::new(RevealInFinder));
if is_dir { if is_dir {
menu = menu menu = menu
.action("Open in Terminal", Box::new(OpenInTerminal), cx) .action("Open in Terminal", Box::new(OpenInTerminal))
.action("Search Inside", Box::new(NewSearchInDirectory), cx) .action("Search Inside", Box::new(NewSearchInDirectory))
} }
menu = menu.separator().action("Rename", Box::new(Rename), cx); menu = menu.separator().action("Rename", Box::new(Rename));
if !is_root { if !is_root {
menu = menu.action("Delete", Box::new(Delete), cx); menu = menu.action("Delete", Box::new(Delete));
} }
menu menu
@ -611,7 +610,7 @@ impl ProjectPanel {
edited_entry_id = NEW_ENTRY_ID; edited_entry_id = NEW_ENTRY_ID;
edit_task = self.project.update(cx, |project, cx| { edit_task = self.project.update(cx, |project, cx| {
project.create_entry((worktree_id, &new_path), is_dir, cx) project.create_entry((worktree_id, &new_path), is_dir, cx)
})?; });
} else { } else {
let new_path = if let Some(parent) = entry.path.clone().parent() { let new_path = if let Some(parent) = entry.path.clone().parent() {
parent.join(&filename) parent.join(&filename)
@ -625,7 +624,7 @@ impl ProjectPanel {
edited_entry_id = entry.id; edited_entry_id = entry.id;
edit_task = self.project.update(cx, |project, cx| { edit_task = self.project.update(cx, |project, cx| {
project.rename_entry(entry.id, new_path.as_path(), cx) project.rename_entry(entry.id, new_path.as_path(), cx)
})?; });
}; };
edit_state.processing_filename = Some(filename); edit_state.processing_filename = Some(filename);
@ -638,21 +637,22 @@ impl ProjectPanel {
cx.notify(); cx.notify();
})?; })?;
let new_entry = new_entry?; if let Some(new_entry) = new_entry? {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
if let Some(selection) = &mut this.selection { if let Some(selection) = &mut this.selection {
if selection.entry_id == edited_entry_id { if selection.entry_id == edited_entry_id {
selection.worktree_id = worktree_id; selection.worktree_id = worktree_id;
selection.entry_id = new_entry.id; selection.entry_id = new_entry.id;
this.expand_to_selection(cx); this.expand_to_selection(cx);
}
} }
} this.update_visible_entries(None, cx);
this.update_visible_entries(None, cx); if is_new_entry && !is_dir {
if is_new_entry && !is_dir { this.open_entry(new_entry.id, true, cx);
this.open_entry(new_entry.id, true, cx); }
} cx.notify();
cx.notify(); })?;
})?; }
Ok(()) Ok(())
})) }))
} }
@ -932,15 +932,17 @@ impl ProjectPanel {
} }
if clipboard_entry.is_cut() { if clipboard_entry.is_cut() {
if let Some(task) = self.project.update(cx, |project, cx| { self.project
project.rename_entry(clipboard_entry.entry_id(), new_path, cx) .update(cx, |project, cx| {
}) { project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
task.detach_and_log_err(cx); })
} .detach_and_log_err(cx)
} else if let Some(task) = self.project.update(cx, |project, cx| { } else {
project.copy_entry(clipboard_entry.entry_id(), new_path, cx) self.project
}) { .update(cx, |project, cx| {
task.detach_and_log_err(cx); project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
})
.detach_and_log_err(cx)
} }
Some(()) Some(())
@ -1026,7 +1028,7 @@ impl ProjectPanel {
// let mut new_path = destination_path.to_path_buf(); // let mut new_path = destination_path.to_path_buf();
// new_path.push(entry_path.path.file_name()?); // new_path.push(entry_path.path.file_name()?);
// if new_path != entry_path.path.as_ref() { // if new_path != entry_path.path.as_ref() {
// let task = project.rename_entry(entry_to_move, new_path, cx)?; // let task = project.rename_entry(entry_to_move, new_path, cx);
// cx.foreground_executor().spawn(task).detach_and_log_err(cx); // cx.foreground_executor().spawn(task).detach_and_log_err(cx);
// } // }

View file

@ -430,7 +430,7 @@ message ExpandProjectEntryResponse {
} }
message ProjectEntryResponse { message ProjectEntryResponse {
Entry entry = 1; optional Entry entry = 1;
uint64 worktree_scan_id = 2; uint64 worktree_scan_id = 2;
} }
@ -1357,7 +1357,7 @@ message User {
message File { message File {
uint64 worktree_id = 1; uint64 worktree_id = 1;
uint64 entry_id = 2; optional uint64 entry_id = 2;
string path = 3; string path = 3;
Timestamp mtime = 4; Timestamp mtime = 4;
bool is_deleted = 5; bool is_deleted = 5;

View file

@ -9,4 +9,4 @@ pub use notification::*;
pub use peer::*; pub use peer::*;
mod macros; mod macros;
pub const PROTOCOL_VERSION: u32 = 66; pub const PROTOCOL_VERSION: u32 = 67;

View file

@ -430,7 +430,7 @@ message ExpandProjectEntryResponse {
} }
message ProjectEntryResponse { message ProjectEntryResponse {
Entry entry = 1; optional Entry entry = 1;
uint64 worktree_scan_id = 2; uint64 worktree_scan_id = 2;
} }
@ -1357,7 +1357,7 @@ message User {
message File { message File {
uint64 worktree_id = 1; uint64 worktree_id = 1;
uint64 entry_id = 2; optional uint64 entry_id = 2;
string path = 3; string path = 3;
Timestamp mtime = 4; Timestamp mtime = 4;
bool is_deleted = 5; bool is_deleted = 5;

View file

@ -9,4 +9,4 @@ pub use notification::*;
pub use peer::*; pub use peer::*;
mod macros; mod macros;
pub const PROTOCOL_VERSION: u32 = 66; pub const PROTOCOL_VERSION: u32 = 67;

View file

@ -0,0 +1,69 @@
[package]
name = "semantic_index2"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/semantic_index.rs"
doctest = false
[dependencies]
ai = { package = "ai2", path = "../ai2" }
collections = { path = "../collections" }
gpui = { package = "gpui2", path = "../gpui2" }
language = { package = "language2", path = "../language2" }
project = { package = "project2", path = "../project2" }
workspace = { package = "workspace2", path = "../workspace2" }
util = { path = "../util" }
rpc = { package = "rpc2", path = "../rpc2" }
settings = { package = "settings2", path = "../settings2" }
anyhow.workspace = true
postage.workspace = true
futures.workspace = true
ordered-float.workspace = true
smol.workspace = true
rusqlite.workspace = true
log.workspace = true
tree-sitter.workspace = true
lazy_static.workspace = true
serde.workspace = true
serde_json.workspace = true
async-trait.workspace = true
tiktoken-rs.workspace = true
parking_lot.workspace = true
rand.workspace = true
schemars.workspace = true
globset.workspace = true
sha1 = "0.10.5"
ndarray = { version = "0.15.0" }
[dev-dependencies]
ai = { package = "ai2", path = "../ai2", features = ["test-support"] }
collections = { path = "../collections", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
language = { package = "language2", path = "../language2", features = ["test-support"] }
project = { package = "project2", path = "../project2", features = ["test-support"] }
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
workspace = { package = "workspace2", path = "../workspace2", features = ["test-support"] }
settings = { package = "settings2", path = "../settings2", features = ["test-support"]}
rust-embed = { version = "8.0", features = ["include-exclude"] }
client = { package = "client2", path = "../client2" }
node_runtime = { path = "../node_runtime"}
pretty_assertions.workspace = true
rand.workspace = true
unindent.workspace = true
tempdir.workspace = true
ctor.workspace = true
env_logger.workspace = true
tree-sitter-typescript.workspace = true
tree-sitter-json.workspace = true
tree-sitter-rust.workspace = true
tree-sitter-toml.workspace = true
tree-sitter-cpp.workspace = true
tree-sitter-elixir.workspace = true
tree-sitter-lua.workspace = true
tree-sitter-ruby.workspace = true
tree-sitter-php.workspace = true

View file

@ -0,0 +1,20 @@
# Semantic Index
## Evaluation
### Metrics
nDCG@k:
- "The value of NDCG is determined by comparing the relevance of the items returned by the search engine to the relevance of the item that a hypothetical "ideal" search engine would return.
- "The relevance of result is represented by a score (also known as a 'grade') that is assigned to the search query. The scores of these results are then discounted based on their position in the search results -- did they get recommended first or last?"
MRR@k:
- "Mean reciprocal rank quantifies the rank of the first relevant item found in teh recommendation list."
MAP@k:
- "Mean average precision averages the precision@k metric at each relevant item position in the recommendation list.
Resources:
- [Evaluating recommendation metrics](https://www.shaped.ai/blog/evaluating-recommendation-systems-map-mmr-ndcg)
- [Math Walkthrough](https://towardsdatascience.com/demystifying-ndcg-bee3be58cfe0)

View file

@ -0,0 +1,114 @@
{
"repo": "https://github.com/AntonOsika/gpt-engineer.git",
"commit": "7735a6445bae3611c62f521e6464c67c957f87c2",
"assertions": [
{
"query": "How do I contribute to this project?",
"matches": [
".github/CONTRIBUTING.md:1",
"ROADMAP.md:48"
]
},
{
"query": "What version of the openai package is active?",
"matches": [
"pyproject.toml:14"
]
},
{
"query": "Ask user for clarification",
"matches": [
"gpt_engineer/steps.py:69"
]
},
{
"query": "generate tests for python code",
"matches": [
"gpt_engineer/steps.py:153"
]
},
{
"query": "get item from database based on key",
"matches": [
"gpt_engineer/db.py:42",
"gpt_engineer/db.py:68"
]
},
{
"query": "prompt user to select files",
"matches": [
"gpt_engineer/file_selector.py:171",
"gpt_engineer/file_selector.py:306",
"gpt_engineer/file_selector.py:289",
"gpt_engineer/file_selector.py:234"
]
},
{
"query": "send to rudderstack",
"matches": [
"gpt_engineer/collect.py:11",
"gpt_engineer/collect.py:38"
]
},
{
"query": "parse code blocks from chat messages",
"matches": [
"gpt_engineer/chat_to_files.py:10",
"docs/intro/chat_parsing.md:1"
]
},
{
"query": "how do I use the docker cli?",
"matches": [
"docker/README.md:1"
]
},
{
"query": "ask the user if the code ran successfully?",
"matches": [
"gpt_engineer/learning.py:54"
]
},
{
"query": "how is consent granted by the user?",
"matches": [
"gpt_engineer/learning.py:107",
"gpt_engineer/learning.py:130",
"gpt_engineer/learning.py:152"
]
},
{
"query": "what are all the different steps the agent can take?",
"matches": [
"docs/intro/steps_module.md:1",
"gpt_engineer/steps.py:391"
]
},
{
"query": "ask the user for clarification?",
"matches": [
"gpt_engineer/steps.py:69"
]
},
{
"query": "what models are available?",
"matches": [
"gpt_engineer/ai.py:315",
"gpt_engineer/ai.py:341",
"docs/open-models.md:1"
]
},
{
"query": "what is the current focus of the project?",
"matches": [
"ROADMAP.md:11"
]
},
{
"query": "does the agent know how to fix code?",
"matches": [
"gpt_engineer/steps.py:367"
]
}
]
}

View file

@ -0,0 +1,104 @@
{
"repo": "https://github.com/tree-sitter/tree-sitter.git",
"commit": "46af27796a76c72d8466627d499f2bca4af958ee",
"assertions": [
{
"query": "What attributes are available for the tags configuration struct?",
"matches": [
"tags/src/lib.rs:24"
]
},
{
"query": "create a new tag configuration",
"matches": [
"tags/src/lib.rs:119"
]
},
{
"query": "generate tags based on config",
"matches": [
"tags/src/lib.rs:261"
]
},
{
"query": "match on ts quantifier in rust",
"matches": [
"lib/binding_rust/lib.rs:139"
]
},
{
"query": "cli command to generate tags",
"matches": [
"cli/src/tags.rs:10"
]
},
{
"query": "what version of the tree-sitter-tags package is active?",
"matches": [
"tags/Cargo.toml:4"
]
},
{
"query": "Insert a new parse state",
"matches": [
"cli/src/generate/build_tables/build_parse_table.rs:153"
]
},
{
"query": "Handle conflict when numerous actions occur on the same symbol",
"matches": [
"cli/src/generate/build_tables/build_parse_table.rs:363",
"cli/src/generate/build_tables/build_parse_table.rs:442"
]
},
{
"query": "Match based on associativity of actions",
"matches": [
"cri/src/generate/build_tables/build_parse_table.rs:542"
]
},
{
"query": "Format token set display",
"matches": [
"cli/src/generate/build_tables/item.rs:246"
]
},
{
"query": "extract choices from rule",
"matches": [
"cli/src/generate/prepare_grammar/flatten_grammar.rs:124"
]
},
{
"query": "How do we identify if a symbol is being used?",
"matches": [
"cli/src/generate/prepare_grammar/flatten_grammar.rs:175"
]
},
{
"query": "How do we launch the playground?",
"matches": [
"cli/src/playground.rs:46"
]
},
{
"query": "How do we test treesitter query matches in rust?",
"matches": [
"cli/src/query_testing.rs:152",
"cli/src/tests/query_test.rs:781",
"cli/src/tests/query_test.rs:2163",
"cli/src/tests/query_test.rs:3781",
"cli/src/tests/query_test.rs:887"
]
},
{
"query": "What does the CLI do?",
"matches": [
"cli/README.md:10",
"cli/loader/README.md:3",
"docs/section-5-implementation.md:14",
"docs/section-5-implementation.md:18"
]
}
]
}

View file

@ -0,0 +1,603 @@
use crate::{
parsing::{Span, SpanDigest},
SEMANTIC_INDEX_VERSION,
};
use ai::embedding::Embedding;
use anyhow::{anyhow, Context, Result};
use collections::HashMap;
use futures::channel::oneshot;
use gpui::BackgroundExecutor;
use ndarray::{Array1, Array2};
use ordered_float::OrderedFloat;
use project::Fs;
use rpc::proto::Timestamp;
use rusqlite::params;
use rusqlite::types::Value;
use std::{
future::Future,
ops::Range,
path::{Path, PathBuf},
rc::Rc,
sync::Arc,
time::SystemTime,
};
use util::{paths::PathMatcher, TryFutureExt};
pub fn argsort<T: Ord>(data: &[T]) -> Vec<usize> {
let mut indices = (0..data.len()).collect::<Vec<_>>();
indices.sort_by_key(|&i| &data[i]);
indices.reverse();
indices
}
#[derive(Debug)]
pub struct FileRecord {
pub id: usize,
pub relative_path: String,
pub mtime: Timestamp,
}
#[derive(Clone)]
pub struct VectorDatabase {
path: Arc<Path>,
transactions:
smol::channel::Sender<Box<dyn 'static + Send + FnOnce(&mut rusqlite::Connection)>>,
}
impl VectorDatabase {
pub async fn new(
fs: Arc<dyn Fs>,
path: Arc<Path>,
executor: BackgroundExecutor,
) -> Result<Self> {
if let Some(db_directory) = path.parent() {
fs.create_dir(db_directory).await?;
}
let (transactions_tx, transactions_rx) = smol::channel::unbounded::<
Box<dyn 'static + Send + FnOnce(&mut rusqlite::Connection)>,
>();
executor
.spawn({
let path = path.clone();
async move {
let mut connection = rusqlite::Connection::open(&path)?;
connection.pragma_update(None, "journal_mode", "wal")?;
connection.pragma_update(None, "synchronous", "normal")?;
connection.pragma_update(None, "cache_size", 1000000)?;
connection.pragma_update(None, "temp_store", "MEMORY")?;
while let Ok(transaction) = transactions_rx.recv().await {
transaction(&mut connection);
}
anyhow::Ok(())
}
.log_err()
})
.detach();
let this = Self {
transactions: transactions_tx,
path,
};
this.initialize_database().await?;
Ok(this)
}
pub fn path(&self) -> &Arc<Path> {
&self.path
}
fn transact<F, T>(&self, f: F) -> impl Future<Output = Result<T>>
where
F: 'static + Send + FnOnce(&rusqlite::Transaction) -> Result<T>,
T: 'static + Send,
{
let (tx, rx) = oneshot::channel();
let transactions = self.transactions.clone();
async move {
if transactions
.send(Box::new(|connection| {
let result = connection
.transaction()
.map_err(|err| anyhow!(err))
.and_then(|transaction| {
let result = f(&transaction)?;
transaction.commit()?;
Ok(result)
});
let _ = tx.send(result);
}))
.await
.is_err()
{
return Err(anyhow!("connection was dropped"))?;
}
rx.await?
}
}
fn initialize_database(&self) -> impl Future<Output = Result<()>> {
self.transact(|db| {
rusqlite::vtab::array::load_module(&db)?;
// Delete existing tables, if SEMANTIC_INDEX_VERSION is bumped
let version_query = db.prepare("SELECT version from semantic_index_config");
let version = version_query
.and_then(|mut query| query.query_row([], |row| Ok(row.get::<_, i64>(0)?)));
if version.map_or(false, |version| version == SEMANTIC_INDEX_VERSION as i64) {
log::trace!("vector database schema up to date");
return Ok(());
}
log::trace!("vector database schema out of date. updating...");
// We renamed the `documents` table to `spans`, so we want to drop
// `documents` without recreating it if it exists.
db.execute("DROP TABLE IF EXISTS documents", [])
.context("failed to drop 'documents' table")?;
db.execute("DROP TABLE IF EXISTS spans", [])
.context("failed to drop 'spans' table")?;
db.execute("DROP TABLE IF EXISTS files", [])
.context("failed to drop 'files' table")?;
db.execute("DROP TABLE IF EXISTS worktrees", [])
.context("failed to drop 'worktrees' table")?;
db.execute("DROP TABLE IF EXISTS semantic_index_config", [])
.context("failed to drop 'semantic_index_config' table")?;
// Initialize Vector Databasing Tables
db.execute(
"CREATE TABLE semantic_index_config (
version INTEGER NOT NULL
)",
[],
)?;
db.execute(
"INSERT INTO semantic_index_config (version) VALUES (?1)",
params![SEMANTIC_INDEX_VERSION],
)?;
db.execute(
"CREATE TABLE worktrees (
id INTEGER PRIMARY KEY AUTOINCREMENT,
absolute_path VARCHAR NOT NULL
);
CREATE UNIQUE INDEX worktrees_absolute_path ON worktrees (absolute_path);
",
[],
)?;
db.execute(
"CREATE TABLE files (
id INTEGER PRIMARY KEY AUTOINCREMENT,
worktree_id INTEGER NOT NULL,
relative_path VARCHAR NOT NULL,
mtime_seconds INTEGER NOT NULL,
mtime_nanos INTEGER NOT NULL,
FOREIGN KEY(worktree_id) REFERENCES worktrees(id) ON DELETE CASCADE
)",
[],
)?;
db.execute(
"CREATE UNIQUE INDEX files_worktree_id_and_relative_path ON files (worktree_id, relative_path)",
[],
)?;
db.execute(
"CREATE TABLE spans (
id INTEGER PRIMARY KEY AUTOINCREMENT,
file_id INTEGER NOT NULL,
start_byte INTEGER NOT NULL,
end_byte INTEGER NOT NULL,
name VARCHAR NOT NULL,
embedding BLOB NOT NULL,
digest BLOB NOT NULL,
FOREIGN KEY(file_id) REFERENCES files(id) ON DELETE CASCADE
)",
[],
)?;
db.execute(
"CREATE INDEX spans_digest ON spans (digest)",
[],
)?;
log::trace!("vector database initialized with updated schema.");
Ok(())
})
}
pub fn delete_file(
&self,
worktree_id: i64,
delete_path: Arc<Path>,
) -> impl Future<Output = Result<()>> {
self.transact(move |db| {
db.execute(
"DELETE FROM files WHERE worktree_id = ?1 AND relative_path = ?2",
params![worktree_id, delete_path.to_str()],
)?;
Ok(())
})
}
pub fn insert_file(
&self,
worktree_id: i64,
path: Arc<Path>,
mtime: SystemTime,
spans: Vec<Span>,
) -> impl Future<Output = Result<()>> {
self.transact(move |db| {
// Return the existing ID, if both the file and mtime match
let mtime = Timestamp::from(mtime);
db.execute(
"
REPLACE INTO files
(worktree_id, relative_path, mtime_seconds, mtime_nanos)
VALUES (?1, ?2, ?3, ?4)
",
params![worktree_id, path.to_str(), mtime.seconds, mtime.nanos],
)?;
let file_id = db.last_insert_rowid();
let mut query = db.prepare(
"
INSERT INTO spans
(file_id, start_byte, end_byte, name, embedding, digest)
VALUES (?1, ?2, ?3, ?4, ?5, ?6)
",
)?;
for span in spans {
query.execute(params![
file_id,
span.range.start.to_string(),
span.range.end.to_string(),
span.name,
span.embedding,
span.digest
])?;
}
Ok(())
})
}
pub fn worktree_previously_indexed(
&self,
worktree_root_path: &Path,
) -> impl Future<Output = Result<bool>> {
let worktree_root_path = worktree_root_path.to_string_lossy().into_owned();
self.transact(move |db| {
let mut worktree_query =
db.prepare("SELECT id FROM worktrees WHERE absolute_path = ?1")?;
let worktree_id = worktree_query
.query_row(params![worktree_root_path], |row| Ok(row.get::<_, i64>(0)?));
if worktree_id.is_ok() {
return Ok(true);
} else {
return Ok(false);
}
})
}
pub fn embeddings_for_digests(
&self,
digests: Vec<SpanDigest>,
) -> impl Future<Output = Result<HashMap<SpanDigest, Embedding>>> {
self.transact(move |db| {
let mut query = db.prepare(
"
SELECT digest, embedding
FROM spans
WHERE digest IN rarray(?)
",
)?;
let mut embeddings_by_digest = HashMap::default();
let digests = Rc::new(
digests
.into_iter()
.map(|p| Value::Blob(p.0.to_vec()))
.collect::<Vec<_>>(),
);
let rows = query.query_map(params![digests], |row| {
Ok((row.get::<_, SpanDigest>(0)?, row.get::<_, Embedding>(1)?))
})?;
for row in rows {
if let Ok(row) = row {
embeddings_by_digest.insert(row.0, row.1);
}
}
Ok(embeddings_by_digest)
})
}
pub fn embeddings_for_files(
&self,
worktree_id_file_paths: HashMap<i64, Vec<Arc<Path>>>,
) -> impl Future<Output = Result<HashMap<SpanDigest, Embedding>>> {
self.transact(move |db| {
let mut query = db.prepare(
"
SELECT digest, embedding
FROM spans
LEFT JOIN files ON files.id = spans.file_id
WHERE files.worktree_id = ? AND files.relative_path IN rarray(?)
",
)?;
let mut embeddings_by_digest = HashMap::default();
for (worktree_id, file_paths) in worktree_id_file_paths {
let file_paths = Rc::new(
file_paths
.into_iter()
.map(|p| Value::Text(p.to_string_lossy().into_owned()))
.collect::<Vec<_>>(),
);
let rows = query.query_map(params![worktree_id, file_paths], |row| {
Ok((row.get::<_, SpanDigest>(0)?, row.get::<_, Embedding>(1)?))
})?;
for row in rows {
if let Ok(row) = row {
embeddings_by_digest.insert(row.0, row.1);
}
}
}
Ok(embeddings_by_digest)
})
}
pub fn find_or_create_worktree(
&self,
worktree_root_path: Arc<Path>,
) -> impl Future<Output = Result<i64>> {
self.transact(move |db| {
let mut worktree_query =
db.prepare("SELECT id FROM worktrees WHERE absolute_path = ?1")?;
let worktree_id = worktree_query
.query_row(params![worktree_root_path.to_string_lossy()], |row| {
Ok(row.get::<_, i64>(0)?)
});
if worktree_id.is_ok() {
return Ok(worktree_id?);
}
// If worktree_id is Err, insert new worktree
db.execute(
"INSERT into worktrees (absolute_path) VALUES (?1)",
params![worktree_root_path.to_string_lossy()],
)?;
Ok(db.last_insert_rowid())
})
}
pub fn get_file_mtimes(
&self,
worktree_id: i64,
) -> impl Future<Output = Result<HashMap<PathBuf, SystemTime>>> {
self.transact(move |db| {
let mut statement = db.prepare(
"
SELECT relative_path, mtime_seconds, mtime_nanos
FROM files
WHERE worktree_id = ?1
ORDER BY relative_path",
)?;
let mut result: HashMap<PathBuf, SystemTime> = HashMap::default();
for row in statement.query_map(params![worktree_id], |row| {
Ok((
row.get::<_, String>(0)?.into(),
Timestamp {
seconds: row.get(1)?,
nanos: row.get(2)?,
}
.into(),
))
})? {
let row = row?;
result.insert(row.0, row.1);
}
Ok(result)
})
}
pub fn top_k_search(
&self,
query_embedding: &Embedding,
limit: usize,
file_ids: &[i64],
) -> impl Future<Output = Result<Vec<(i64, OrderedFloat<f32>)>>> {
let file_ids = file_ids.to_vec();
let query = query_embedding.clone().0;
let query = Array1::from_vec(query);
self.transact(move |db| {
let mut query_statement = db.prepare(
"
SELECT
id, embedding
FROM
spans
WHERE
file_id IN rarray(?)
",
)?;
let deserialized_rows = query_statement
.query_map(params![ids_to_sql(&file_ids)], |row| {
Ok((row.get::<_, usize>(0)?, row.get::<_, Embedding>(1)?))
})?
.filter_map(|row| row.ok())
.collect::<Vec<(usize, Embedding)>>();
if deserialized_rows.len() == 0 {
return Ok(Vec::new());
}
// Get Length of Embeddings Returned
let embedding_len = deserialized_rows[0].1 .0.len();
let batch_n = 1000;
let mut batches = Vec::new();
let mut batch_ids = Vec::new();
let mut batch_embeddings: Vec<f32> = Vec::new();
deserialized_rows.iter().for_each(|(id, embedding)| {
batch_ids.push(id);
batch_embeddings.extend(&embedding.0);
if batch_ids.len() == batch_n {
let embeddings = std::mem::take(&mut batch_embeddings);
let ids = std::mem::take(&mut batch_ids);
let array =
Array2::from_shape_vec((ids.len(), embedding_len.clone()), embeddings);
match array {
Ok(array) => {
batches.push((ids, array));
}
Err(err) => log::error!("Failed to deserialize to ndarray: {:?}", err),
}
}
});
if batch_ids.len() > 0 {
let array = Array2::from_shape_vec(
(batch_ids.len(), embedding_len),
batch_embeddings.clone(),
);
match array {
Ok(array) => {
batches.push((batch_ids.clone(), array));
}
Err(err) => log::error!("Failed to deserialize to ndarray: {:?}", err),
}
}
let mut ids: Vec<usize> = Vec::new();
let mut results = Vec::new();
for (batch_ids, array) in batches {
let scores = array
.dot(&query.t())
.to_vec()
.iter()
.map(|score| OrderedFloat(*score))
.collect::<Vec<OrderedFloat<f32>>>();
results.extend(scores);
ids.extend(batch_ids);
}
let sorted_idx = argsort(&results);
let mut sorted_results = Vec::new();
let last_idx = limit.min(sorted_idx.len());
for idx in &sorted_idx[0..last_idx] {
sorted_results.push((ids[*idx] as i64, results[*idx]))
}
Ok(sorted_results)
})
}
pub fn retrieve_included_file_ids(
&self,
worktree_ids: &[i64],
includes: &[PathMatcher],
excludes: &[PathMatcher],
) -> impl Future<Output = Result<Vec<i64>>> {
let worktree_ids = worktree_ids.to_vec();
let includes = includes.to_vec();
let excludes = excludes.to_vec();
self.transact(move |db| {
let mut file_query = db.prepare(
"
SELECT
id, relative_path
FROM
files
WHERE
worktree_id IN rarray(?)
",
)?;
let mut file_ids = Vec::<i64>::new();
let mut rows = file_query.query([ids_to_sql(&worktree_ids)])?;
while let Some(row) = rows.next()? {
let file_id = row.get(0)?;
let relative_path = row.get_ref(1)?.as_str()?;
let included =
includes.is_empty() || includes.iter().any(|glob| glob.is_match(relative_path));
let excluded = excludes.iter().any(|glob| glob.is_match(relative_path));
if included && !excluded {
file_ids.push(file_id);
}
}
anyhow::Ok(file_ids)
})
}
pub fn spans_for_ids(
&self,
ids: &[i64],
) -> impl Future<Output = Result<Vec<(i64, PathBuf, Range<usize>)>>> {
let ids = ids.to_vec();
self.transact(move |db| {
let mut statement = db.prepare(
"
SELECT
spans.id,
files.worktree_id,
files.relative_path,
spans.start_byte,
spans.end_byte
FROM
spans, files
WHERE
spans.file_id = files.id AND
spans.id in rarray(?)
",
)?;
let result_iter = statement.query_map(params![ids_to_sql(&ids)], |row| {
Ok((
row.get::<_, i64>(0)?,
row.get::<_, i64>(1)?,
row.get::<_, String>(2)?.into(),
row.get(3)?..row.get(4)?,
))
})?;
let mut values_by_id = HashMap::<i64, (i64, PathBuf, Range<usize>)>::default();
for row in result_iter {
let (id, worktree_id, path, range) = row?;
values_by_id.insert(id, (worktree_id, path, range));
}
let mut results = Vec::with_capacity(ids.len());
for id in &ids {
let value = values_by_id
.remove(id)
.ok_or(anyhow!("missing span id {}", id))?;
results.push(value);
}
Ok(results)
})
}
}
fn ids_to_sql(ids: &[i64]) -> Rc<Vec<rusqlite::types::Value>> {
Rc::new(
ids.iter()
.copied()
.map(|v| rusqlite::types::Value::from(v))
.collect::<Vec<_>>(),
)
}

View file

@ -0,0 +1,169 @@
use crate::{parsing::Span, JobHandle};
use ai::embedding::EmbeddingProvider;
use gpui::BackgroundExecutor;
use parking_lot::Mutex;
use smol::channel;
use std::{mem, ops::Range, path::Path, sync::Arc, time::SystemTime};
#[derive(Clone)]
pub struct FileToEmbed {
pub worktree_id: i64,
pub path: Arc<Path>,
pub mtime: SystemTime,
pub spans: Vec<Span>,
pub job_handle: JobHandle,
}
impl std::fmt::Debug for FileToEmbed {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("FileToEmbed")
.field("worktree_id", &self.worktree_id)
.field("path", &self.path)
.field("mtime", &self.mtime)
.field("spans", &self.spans)
.finish_non_exhaustive()
}
}
impl PartialEq for FileToEmbed {
fn eq(&self, other: &Self) -> bool {
self.worktree_id == other.worktree_id
&& self.path == other.path
&& self.mtime == other.mtime
&& self.spans == other.spans
}
}
pub struct EmbeddingQueue {
embedding_provider: Arc<dyn EmbeddingProvider>,
pending_batch: Vec<FileFragmentToEmbed>,
executor: BackgroundExecutor,
pending_batch_token_count: usize,
finished_files_tx: channel::Sender<FileToEmbed>,
finished_files_rx: channel::Receiver<FileToEmbed>,
}
#[derive(Clone)]
pub struct FileFragmentToEmbed {
file: Arc<Mutex<FileToEmbed>>,
span_range: Range<usize>,
}
impl EmbeddingQueue {
pub fn new(
embedding_provider: Arc<dyn EmbeddingProvider>,
executor: BackgroundExecutor,
) -> Self {
let (finished_files_tx, finished_files_rx) = channel::unbounded();
Self {
embedding_provider,
executor,
pending_batch: Vec::new(),
pending_batch_token_count: 0,
finished_files_tx,
finished_files_rx,
}
}
pub fn push(&mut self, file: FileToEmbed) {
if file.spans.is_empty() {
self.finished_files_tx.try_send(file).unwrap();
return;
}
let file = Arc::new(Mutex::new(file));
self.pending_batch.push(FileFragmentToEmbed {
file: file.clone(),
span_range: 0..0,
});
let mut fragment_range = &mut self.pending_batch.last_mut().unwrap().span_range;
for (ix, span) in file.lock().spans.iter().enumerate() {
let span_token_count = if span.embedding.is_none() {
span.token_count
} else {
0
};
let next_token_count = self.pending_batch_token_count + span_token_count;
if next_token_count > self.embedding_provider.max_tokens_per_batch() {
let range_end = fragment_range.end;
self.flush();
self.pending_batch.push(FileFragmentToEmbed {
file: file.clone(),
span_range: range_end..range_end,
});
fragment_range = &mut self.pending_batch.last_mut().unwrap().span_range;
}
fragment_range.end = ix + 1;
self.pending_batch_token_count += span_token_count;
}
}
pub fn flush(&mut self) {
let batch = mem::take(&mut self.pending_batch);
self.pending_batch_token_count = 0;
if batch.is_empty() {
return;
}
let finished_files_tx = self.finished_files_tx.clone();
let embedding_provider = self.embedding_provider.clone();
self.executor
.spawn(async move {
let mut spans = Vec::new();
for fragment in &batch {
let file = fragment.file.lock();
spans.extend(
file.spans[fragment.span_range.clone()]
.iter()
.filter(|d| d.embedding.is_none())
.map(|d| d.content.clone()),
);
}
// If spans is 0, just send the fragment to the finished files if its the last one.
if spans.is_empty() {
for fragment in batch.clone() {
if let Some(file) = Arc::into_inner(fragment.file) {
finished_files_tx.try_send(file.into_inner()).unwrap();
}
}
return;
};
match embedding_provider.embed_batch(spans).await {
Ok(embeddings) => {
let mut embeddings = embeddings.into_iter();
for fragment in batch {
for span in &mut fragment.file.lock().spans[fragment.span_range.clone()]
.iter_mut()
.filter(|d| d.embedding.is_none())
{
if let Some(embedding) = embeddings.next() {
span.embedding = Some(embedding);
} else {
log::error!("number of embeddings != number of documents");
}
}
if let Some(file) = Arc::into_inner(fragment.file) {
finished_files_tx.try_send(file.into_inner()).unwrap();
}
}
}
Err(error) => {
log::error!("{:?}", error);
}
}
})
.detach();
}
pub fn finished_files(&self) -> channel::Receiver<FileToEmbed> {
self.finished_files_rx.clone()
}
}

View file

@ -0,0 +1,414 @@
use ai::{
embedding::{Embedding, EmbeddingProvider},
models::TruncationDirection,
};
use anyhow::{anyhow, Result};
use language::{Grammar, Language};
use rusqlite::{
types::{FromSql, FromSqlResult, ToSqlOutput, ValueRef},
ToSql,
};
use sha1::{Digest, Sha1};
use std::{
borrow::Cow,
cmp::{self, Reverse},
collections::HashSet,
ops::Range,
path::Path,
sync::Arc,
};
use tree_sitter::{Parser, QueryCursor};
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub struct SpanDigest(pub [u8; 20]);
impl FromSql for SpanDigest {
fn column_result(value: ValueRef) -> FromSqlResult<Self> {
let blob = value.as_blob()?;
let bytes =
blob.try_into()
.map_err(|_| rusqlite::types::FromSqlError::InvalidBlobSize {
expected_size: 20,
blob_size: blob.len(),
})?;
return Ok(SpanDigest(bytes));
}
}
impl ToSql for SpanDigest {
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput> {
self.0.to_sql()
}
}
impl From<&'_ str> for SpanDigest {
fn from(value: &'_ str) -> Self {
let mut sha1 = Sha1::new();
sha1.update(value);
Self(sha1.finalize().into())
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct Span {
pub name: String,
pub range: Range<usize>,
pub content: String,
pub embedding: Option<Embedding>,
pub digest: SpanDigest,
pub token_count: usize,
}
const CODE_CONTEXT_TEMPLATE: &str =
"The below code snippet is from file '<path>'\n\n```<language>\n<item>\n```";
const ENTIRE_FILE_TEMPLATE: &str =
"The below snippet is from file '<path>'\n\n```<language>\n<item>\n```";
const MARKDOWN_CONTEXT_TEMPLATE: &str = "The below file contents is from file '<path>'\n\n<item>";
pub const PARSEABLE_ENTIRE_FILE_TYPES: &[&str] = &[
"TOML", "YAML", "CSS", "HEEX", "ERB", "SVELTE", "HTML", "Scheme",
];
pub struct CodeContextRetriever {
pub parser: Parser,
pub cursor: QueryCursor,
pub embedding_provider: Arc<dyn EmbeddingProvider>,
}
// Every match has an item, this represents the fundamental treesitter symbol and anchors the search
// Every match has one or more 'name' captures. These indicate the display range of the item for deduplication.
// If there are preceeding comments, we track this with a context capture
// If there is a piece that should be collapsed in hierarchical queries, we capture it with a collapse capture
// If there is a piece that should be kept inside a collapsed node, we capture it with a keep capture
#[derive(Debug, Clone)]
pub struct CodeContextMatch {
pub start_col: usize,
pub item_range: Option<Range<usize>>,
pub name_range: Option<Range<usize>>,
pub context_ranges: Vec<Range<usize>>,
pub collapse_ranges: Vec<Range<usize>>,
}
impl CodeContextRetriever {
pub fn new(embedding_provider: Arc<dyn EmbeddingProvider>) -> Self {
Self {
parser: Parser::new(),
cursor: QueryCursor::new(),
embedding_provider,
}
}
fn parse_entire_file(
&self,
relative_path: Option<&Path>,
language_name: Arc<str>,
content: &str,
) -> Result<Vec<Span>> {
let document_span = ENTIRE_FILE_TEMPLATE
.replace(
"<path>",
&relative_path.map_or(Cow::Borrowed("untitled"), |path| path.to_string_lossy()),
)
.replace("<language>", language_name.as_ref())
.replace("<item>", &content);
let digest = SpanDigest::from(document_span.as_str());
let model = self.embedding_provider.base_model();
let document_span = model.truncate(
&document_span,
model.capacity()?,
ai::models::TruncationDirection::End,
)?;
let token_count = model.count_tokens(&document_span)?;
Ok(vec![Span {
range: 0..content.len(),
content: document_span,
embedding: Default::default(),
name: language_name.to_string(),
digest,
token_count,
}])
}
fn parse_markdown_file(
&self,
relative_path: Option<&Path>,
content: &str,
) -> Result<Vec<Span>> {
let document_span = MARKDOWN_CONTEXT_TEMPLATE
.replace(
"<path>",
&relative_path.map_or(Cow::Borrowed("untitled"), |path| path.to_string_lossy()),
)
.replace("<item>", &content);
let digest = SpanDigest::from(document_span.as_str());
let model = self.embedding_provider.base_model();
let document_span = model.truncate(
&document_span,
model.capacity()?,
ai::models::TruncationDirection::End,
)?;
let token_count = model.count_tokens(&document_span)?;
Ok(vec![Span {
range: 0..content.len(),
content: document_span,
embedding: None,
name: "Markdown".to_string(),
digest,
token_count,
}])
}
fn get_matches_in_file(
&mut self,
content: &str,
grammar: &Arc<Grammar>,
) -> Result<Vec<CodeContextMatch>> {
let embedding_config = grammar
.embedding_config
.as_ref()
.ok_or_else(|| anyhow!("no embedding queries"))?;
self.parser.set_language(grammar.ts_language).unwrap();
let tree = self
.parser
.parse(&content, None)
.ok_or_else(|| anyhow!("parsing failed"))?;
let mut captures: Vec<CodeContextMatch> = Vec::new();
let mut collapse_ranges: Vec<Range<usize>> = Vec::new();
let mut keep_ranges: Vec<Range<usize>> = Vec::new();
for mat in self.cursor.matches(
&embedding_config.query,
tree.root_node(),
content.as_bytes(),
) {
let mut start_col = 0;
let mut item_range: Option<Range<usize>> = None;
let mut name_range: Option<Range<usize>> = None;
let mut context_ranges: Vec<Range<usize>> = Vec::new();
collapse_ranges.clear();
keep_ranges.clear();
for capture in mat.captures {
if capture.index == embedding_config.item_capture_ix {
item_range = Some(capture.node.byte_range());
start_col = capture.node.start_position().column;
} else if Some(capture.index) == embedding_config.name_capture_ix {
name_range = Some(capture.node.byte_range());
} else if Some(capture.index) == embedding_config.context_capture_ix {
context_ranges.push(capture.node.byte_range());
} else if Some(capture.index) == embedding_config.collapse_capture_ix {
collapse_ranges.push(capture.node.byte_range());
} else if Some(capture.index) == embedding_config.keep_capture_ix {
keep_ranges.push(capture.node.byte_range());
}
}
captures.push(CodeContextMatch {
start_col,
item_range,
name_range,
context_ranges,
collapse_ranges: subtract_ranges(&collapse_ranges, &keep_ranges),
});
}
Ok(captures)
}
pub fn parse_file_with_template(
&mut self,
relative_path: Option<&Path>,
content: &str,
language: Arc<Language>,
) -> Result<Vec<Span>> {
let language_name = language.name();
if PARSEABLE_ENTIRE_FILE_TYPES.contains(&language_name.as_ref()) {
return self.parse_entire_file(relative_path, language_name, &content);
} else if ["Markdown", "Plain Text"].contains(&language_name.as_ref()) {
return self.parse_markdown_file(relative_path, &content);
}
let mut spans = self.parse_file(content, language)?;
for span in &mut spans {
let document_content = CODE_CONTEXT_TEMPLATE
.replace(
"<path>",
&relative_path.map_or(Cow::Borrowed("untitled"), |path| path.to_string_lossy()),
)
.replace("<language>", language_name.as_ref())
.replace("item", &span.content);
let model = self.embedding_provider.base_model();
let document_content = model.truncate(
&document_content,
model.capacity()?,
TruncationDirection::End,
)?;
let token_count = model.count_tokens(&document_content)?;
span.content = document_content;
span.token_count = token_count;
}
Ok(spans)
}
pub fn parse_file(&mut self, content: &str, language: Arc<Language>) -> Result<Vec<Span>> {
let grammar = language
.grammar()
.ok_or_else(|| anyhow!("no grammar for language"))?;
// Iterate through query matches
let matches = self.get_matches_in_file(content, grammar)?;
let language_scope = language.default_scope();
let placeholder = language_scope.collapsed_placeholder();
let mut spans = Vec::new();
let mut collapsed_ranges_within = Vec::new();
let mut parsed_name_ranges = HashSet::new();
for (i, context_match) in matches.iter().enumerate() {
// Items which are collapsible but not embeddable have no item range
let item_range = if let Some(item_range) = context_match.item_range.clone() {
item_range
} else {
continue;
};
// Checks for deduplication
let name;
if let Some(name_range) = context_match.name_range.clone() {
name = content
.get(name_range.clone())
.map_or(String::new(), |s| s.to_string());
if parsed_name_ranges.contains(&name_range) {
continue;
}
parsed_name_ranges.insert(name_range);
} else {
name = String::new();
}
collapsed_ranges_within.clear();
'outer: for remaining_match in &matches[(i + 1)..] {
for collapsed_range in &remaining_match.collapse_ranges {
if item_range.start <= collapsed_range.start
&& item_range.end >= collapsed_range.end
{
collapsed_ranges_within.push(collapsed_range.clone());
} else {
break 'outer;
}
}
}
collapsed_ranges_within.sort_by_key(|r| (r.start, Reverse(r.end)));
let mut span_content = String::new();
for context_range in &context_match.context_ranges {
add_content_from_range(
&mut span_content,
content,
context_range.clone(),
context_match.start_col,
);
span_content.push_str("\n");
}
let mut offset = item_range.start;
for collapsed_range in &collapsed_ranges_within {
if collapsed_range.start > offset {
add_content_from_range(
&mut span_content,
content,
offset..collapsed_range.start,
context_match.start_col,
);
offset = collapsed_range.start;
}
if collapsed_range.end > offset {
span_content.push_str(placeholder);
offset = collapsed_range.end;
}
}
if offset < item_range.end {
add_content_from_range(
&mut span_content,
content,
offset..item_range.end,
context_match.start_col,
);
}
let sha1 = SpanDigest::from(span_content.as_str());
spans.push(Span {
name,
content: span_content,
range: item_range.clone(),
embedding: None,
digest: sha1,
token_count: 0,
})
}
return Ok(spans);
}
}
pub(crate) fn subtract_ranges(
ranges: &[Range<usize>],
ranges_to_subtract: &[Range<usize>],
) -> Vec<Range<usize>> {
let mut result = Vec::new();
let mut ranges_to_subtract = ranges_to_subtract.iter().peekable();
for range in ranges {
let mut offset = range.start;
while offset < range.end {
if let Some(range_to_subtract) = ranges_to_subtract.peek() {
if offset < range_to_subtract.start {
let next_offset = cmp::min(range_to_subtract.start, range.end);
result.push(offset..next_offset);
offset = next_offset;
} else {
let next_offset = cmp::min(range_to_subtract.end, range.end);
offset = next_offset;
}
if offset >= range_to_subtract.end {
ranges_to_subtract.next();
}
} else {
result.push(offset..range.end);
offset = range.end;
}
}
}
result
}
fn add_content_from_range(
output: &mut String,
content: &str,
range: Range<usize>,
start_col: usize,
) {
for mut line in content.get(range.clone()).unwrap_or("").lines() {
for _ in 0..start_col {
if line.starts_with(' ') {
line = &line[1..];
} else {
break;
}
}
output.push_str(line);
output.push('\n');
}
output.pop();
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,28 @@
use anyhow;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::Settings;
#[derive(Deserialize, Debug)]
pub struct SemanticIndexSettings {
pub enabled: bool,
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
pub struct SemanticIndexSettingsContent {
pub enabled: Option<bool>,
}
impl Settings for SemanticIndexSettings {
const KEY: Option<&'static str> = Some("semantic_index");
type FileContent = SemanticIndexSettingsContent;
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut gpui::AppContext,
) -> anyhow::Result<Self> {
Self::load_via_json_merge(default_value, user_values)
}
}

File diff suppressed because it is too large Load diff

View file

@ -1132,6 +1132,7 @@ mod tests {
}) })
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
(wt, entry) (wt, entry)

View file

@ -299,11 +299,8 @@ impl TerminalView {
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) { ) {
self.context_menu = Some(ContextMenu::build(cx, |menu, cx| { self.context_menu = Some(ContextMenu::build(cx, |menu, cx| {
menu.action("Clear", Box::new(Clear), cx).action( menu.action("Clear", Box::new(Clear))
"Close", .action("Close", Box::new(CloseActiveItem { save_intent: None }))
Box::new(CloseActiveItem { save_intent: None }),
cx,
)
})); }));
dbg!(&position); dbg!(&position);
// todo!() // todo!()
@ -1173,6 +1170,7 @@ mod tests {
}) })
}) })
.await .await
.unwrap()
.unwrap(); .unwrap();
(wt, entry) (wt, entry)

View file

@ -2,14 +2,14 @@ use feature_flags::FeatureFlagAppExt;
use fs::Fs; use fs::Fs;
use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; use fuzzy::{match_strings, StringMatch, StringMatchCandidate};
use gpui::{ use gpui::{
actions, AppContext, DismissEvent, EventEmitter, FocusableView, Render, SharedString, View, actions, AppContext, DismissEvent, Div, EventEmitter, FocusableView, Render, SharedString,
ViewContext, VisualContext, WeakView, View, ViewContext, VisualContext, WeakView,
}; };
use picker::{Picker, PickerDelegate}; use picker::{Picker, PickerDelegate};
use settings::{update_settings_file, SettingsStore}; use settings::{update_settings_file, SettingsStore};
use std::sync::Arc; use std::sync::Arc;
use theme::{Theme, ThemeRegistry, ThemeSettings}; use theme::{Theme, ThemeRegistry, ThemeSettings};
use ui::{prelude::*, ListItem}; use ui::{prelude::*, v_stack, ListItem};
use util::ResultExt; use util::ResultExt;
use workspace::{ui::HighlightedLabel, Workspace}; use workspace::{ui::HighlightedLabel, Workspace};
@ -65,10 +65,10 @@ impl FocusableView for ThemeSelector {
} }
impl Render for ThemeSelector { impl Render for ThemeSelector {
type Element = View<Picker<ThemeSelectorDelegate>>; type Element = Div;
fn render(&mut self, _cx: &mut ViewContext<Self>) -> Self::Element { fn render(&mut self, _cx: &mut ViewContext<Self>) -> Self::Element {
self.picker.clone() v_stack().min_w_96().child(self.picker.clone())
} }
} }
@ -98,7 +98,7 @@ impl ThemeSelectorDelegate {
let original_theme = cx.theme().clone(); let original_theme = cx.theme().clone();
let staff_mode = cx.is_staff(); let staff_mode = cx.is_staff();
let registry = cx.global::<Arc<ThemeRegistry>>(); let registry = cx.global::<ThemeRegistry>();
let theme_names = registry.list(staff_mode).collect::<Vec<_>>(); let theme_names = registry.list(staff_mode).collect::<Vec<_>>();
//todo!(theme sorting) //todo!(theme sorting)
// theme_names.sort_unstable_by(|a, b| a.is_light.cmp(&b.is_light).then(a.name.cmp(&b.name))); // theme_names.sort_unstable_by(|a, b| a.is_light.cmp(&b.is_light).then(a.name.cmp(&b.name)));
@ -126,7 +126,7 @@ impl ThemeSelectorDelegate {
fn show_selected_theme(&mut self, cx: &mut ViewContext<Picker<ThemeSelectorDelegate>>) { fn show_selected_theme(&mut self, cx: &mut ViewContext<Picker<ThemeSelectorDelegate>>) {
if let Some(mat) = self.matches.get(self.selected_index) { if let Some(mat) = self.matches.get(self.selected_index) {
let registry = cx.global::<Arc<ThemeRegistry>>(); let registry = cx.global::<ThemeRegistry>();
match registry.get(&mat.string) { match registry.get(&mat.string) {
Ok(theme) => { Ok(theme) => {
Self::set_theme(theme, cx); Self::set_theme(theme, cx);

View file

@ -7,7 +7,7 @@ use gpui::{
IntoElement, Render, View, VisualContext, IntoElement, Render, View, VisualContext,
}; };
use menu::{SelectFirst, SelectLast, SelectNext, SelectPrev}; use menu::{SelectFirst, SelectLast, SelectNext, SelectPrev};
use std::rc::Rc; use std::{rc::Rc, time::Duration};
pub enum ContextMenuItem { pub enum ContextMenuItem {
Separator, Separator,
@ -16,7 +16,7 @@ pub enum ContextMenuItem {
label: SharedString, label: SharedString,
icon: Option<Icon>, icon: Option<Icon>,
handler: Rc<dyn Fn(&mut WindowContext)>, handler: Rc<dyn Fn(&mut WindowContext)>,
key_binding: Option<KeyBinding>, action: Option<Box<dyn Action>>,
}, },
} }
@ -24,6 +24,7 @@ pub struct ContextMenu {
items: Vec<ContextMenuItem>, items: Vec<ContextMenuItem>,
focus_handle: FocusHandle, focus_handle: FocusHandle,
selected_index: Option<usize>, selected_index: Option<usize>,
delayed: bool,
} }
impl FocusableView for ContextMenu { impl FocusableView for ContextMenu {
@ -46,6 +47,7 @@ impl ContextMenu {
items: Default::default(), items: Default::default(),
focus_handle: cx.focus_handle(), focus_handle: cx.focus_handle(),
selected_index: None, selected_index: None,
delayed: false,
}, },
cx, cx,
) )
@ -70,36 +72,26 @@ impl ContextMenu {
self.items.push(ContextMenuItem::Entry { self.items.push(ContextMenuItem::Entry {
label: label.into(), label: label.into(),
handler: Rc::new(on_click), handler: Rc::new(on_click),
key_binding: None,
icon: None, icon: None,
action: None,
}); });
self self
} }
pub fn action( pub fn action(mut self, label: impl Into<SharedString>, action: Box<dyn Action>) -> Self {
mut self,
label: impl Into<SharedString>,
action: Box<dyn Action>,
cx: &mut WindowContext,
) -> Self {
self.items.push(ContextMenuItem::Entry { self.items.push(ContextMenuItem::Entry {
label: label.into(), label: label.into(),
key_binding: KeyBinding::for_action(&*action, cx), action: Some(action.boxed_clone()),
handler: Rc::new(move |cx| cx.dispatch_action(action.boxed_clone())), handler: Rc::new(move |cx| cx.dispatch_action(action.boxed_clone())),
icon: None, icon: None,
}); });
self self
} }
pub fn link( pub fn link(mut self, label: impl Into<SharedString>, action: Box<dyn Action>) -> Self {
mut self,
label: impl Into<SharedString>,
action: Box<dyn Action>,
cx: &mut WindowContext,
) -> Self {
self.items.push(ContextMenuItem::Entry { self.items.push(ContextMenuItem::Entry {
label: label.into(), label: label.into(),
key_binding: KeyBinding::for_action(&*action, cx), action: Some(action.boxed_clone()),
handler: Rc::new(move |cx| cx.dispatch_action(action.boxed_clone())), handler: Rc::new(move |cx| cx.dispatch_action(action.boxed_clone())),
icon: Some(Icon::Link), icon: Some(Icon::Link),
}); });
@ -161,6 +153,37 @@ impl ContextMenu {
self.select_last(&Default::default(), cx); self.select_last(&Default::default(), cx);
} }
} }
pub fn on_action_dispatch(&mut self, dispatched: &Box<dyn Action>, cx: &mut ViewContext<Self>) {
if let Some(ix) = self.items.iter().position(|item| {
if let ContextMenuItem::Entry {
action: Some(action),
..
} = item
{
action.partial_eq(&**dispatched)
} else {
false
}
}) {
self.selected_index = Some(ix);
self.delayed = true;
cx.notify();
let action = dispatched.boxed_clone();
cx.spawn(|this, mut cx| async move {
cx.background_executor()
.timer(Duration::from_millis(50))
.await;
this.update(&mut cx, |this, cx| {
cx.dispatch_action(action);
this.cancel(&Default::default(), cx)
})
})
.detach_and_log_err(cx);
} else {
cx.propagate()
}
}
} }
impl ContextMenuItem { impl ContextMenuItem {
@ -185,6 +208,22 @@ impl Render for ContextMenu {
.on_action(cx.listener(ContextMenu::select_prev)) .on_action(cx.listener(ContextMenu::select_prev))
.on_action(cx.listener(ContextMenu::confirm)) .on_action(cx.listener(ContextMenu::confirm))
.on_action(cx.listener(ContextMenu::cancel)) .on_action(cx.listener(ContextMenu::cancel))
.when(!self.delayed, |mut el| {
for item in self.items.iter() {
if let ContextMenuItem::Entry {
action: Some(action),
..
} = item
{
el = el.on_boxed_action(
action,
cx.listener(ContextMenu::on_action_dispatch),
);
}
}
el
})
.on_blur(cx.listener(|this, _, cx| this.cancel(&Default::default(), cx)))
.flex_none() .flex_none()
.child( .child(
List::new().children(self.items.iter().enumerate().map( List::new().children(self.items.iter().enumerate().map(
@ -196,8 +235,8 @@ impl Render for ContextMenu {
ContextMenuItem::Entry { ContextMenuItem::Entry {
label, label,
handler, handler,
key_binding,
icon, icon,
action,
} => { } => {
let handler = handler.clone(); let handler = handler.clone();
let dismiss = cx.listener(|_, _, cx| cx.emit(DismissEvent)); let dismiss = cx.listener(|_, _, cx| cx.emit(DismissEvent));
@ -218,11 +257,10 @@ impl Render for ContextMenu {
.w_full() .w_full()
.justify_between() .justify_between()
.child(label_element) .child(label_element)
.children( .children(action.as_ref().and_then(|action| {
key_binding KeyBinding::for_action(&**action, cx)
.clone() .map(|binding| div().ml_1().child(binding))
.map(|binding| div().ml_1().child(binding)), })),
),
) )
.selected(Some(ix) == self.selected_index) .selected(Some(ix) == self.selected_index)
.on_click(move |event, cx| { .on_click(move |event, cx| {

View file

@ -1,5 +1,5 @@
use crate::{h_stack, prelude::*, Icon, IconElement, IconSize}; use crate::{h_stack, prelude::*, Icon, IconElement, IconSize};
use gpui::{relative, rems, Action, Div, IntoElement, Keystroke}; use gpui::{relative, rems, Action, Div, FocusHandle, IntoElement, Keystroke};
#[derive(IntoElement, Clone)] #[derive(IntoElement, Clone)]
pub struct KeyBinding { pub struct KeyBinding {
@ -49,12 +49,21 @@ impl RenderOnce for KeyBinding {
impl KeyBinding { impl KeyBinding {
pub fn for_action(action: &dyn Action, cx: &mut WindowContext) -> Option<Self> { pub fn for_action(action: &dyn Action, cx: &mut WindowContext) -> Option<Self> {
// todo! this last is arbitrary, we want to prefer users key bindings over defaults,
// and vim over normal (in vim mode), etc.
let key_binding = cx.bindings_for_action(action).last().cloned()?; let key_binding = cx.bindings_for_action(action).last().cloned()?;
Some(Self::new(key_binding)) Some(Self::new(key_binding))
} }
// like for_action(), but lets you specify the context from which keybindings
// are matched.
pub fn for_action_in(
action: &dyn Action,
focus: &FocusHandle,
cx: &mut WindowContext,
) -> Option<Self> {
let key_binding = cx.bindings_for_action_in(action, focus).last().cloned()?;
Some(Self::new(key_binding))
}
fn icon_for_key(keystroke: &Keystroke) -> Option<Icon> { fn icon_for_key(keystroke: &Keystroke) -> Option<Icon> {
let mut icon: Option<Icon> = None; let mut icon: Option<Icon> = None;

View file

@ -1,7 +1,8 @@
use std::rc::Rc; use std::rc::Rc;
use gpui::{ use gpui::{
px, AnyElement, ClickEvent, Div, ImageSource, MouseButton, MouseDownEvent, Pixels, Stateful, px, AnyElement, AnyView, ClickEvent, Div, ImageSource, MouseButton, MouseDownEvent, Pixels,
Stateful,
}; };
use smallvec::SmallVec; use smallvec::SmallVec;
@ -21,6 +22,7 @@ pub struct ListItem {
inset: bool, inset: bool,
on_click: Option<Rc<dyn Fn(&ClickEvent, &mut WindowContext) + 'static>>, on_click: Option<Rc<dyn Fn(&ClickEvent, &mut WindowContext) + 'static>>,
on_toggle: Option<Rc<dyn Fn(&ClickEvent, &mut WindowContext) + 'static>>, on_toggle: Option<Rc<dyn Fn(&ClickEvent, &mut WindowContext) + 'static>>,
tooltip: Option<Box<dyn Fn(&mut WindowContext) -> AnyView + 'static>>,
on_secondary_mouse_down: Option<Rc<dyn Fn(&MouseDownEvent, &mut WindowContext) + 'static>>, on_secondary_mouse_down: Option<Rc<dyn Fn(&MouseDownEvent, &mut WindowContext) + 'static>>,
children: SmallVec<[AnyElement; 2]>, children: SmallVec<[AnyElement; 2]>,
} }
@ -38,6 +40,7 @@ impl ListItem {
on_click: None, on_click: None,
on_secondary_mouse_down: None, on_secondary_mouse_down: None,
on_toggle: None, on_toggle: None,
tooltip: None,
children: SmallVec::new(), children: SmallVec::new(),
} }
} }
@ -55,6 +58,11 @@ impl ListItem {
self self
} }
pub fn tooltip(mut self, tooltip: impl Fn(&mut WindowContext) -> AnyView + 'static) -> Self {
self.tooltip = Some(Box::new(tooltip));
self
}
pub fn inset(mut self, inset: bool) -> Self { pub fn inset(mut self, inset: bool) -> Self {
self.inset = inset; self.inset = inset;
self self
@ -149,6 +157,7 @@ impl RenderOnce for ListItem {
(on_mouse_down)(event, cx) (on_mouse_down)(event, cx)
}) })
}) })
.when_some(self.tooltip, |this, tooltip| this.tooltip(tooltip))
.child( .child(
div() div()
.when(self.inset, |this| this.px_2()) .when(self.inset, |this| this.px_2())

View file

@ -219,9 +219,11 @@ impl PathMatcher {
} }
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool { pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
other.as_ref().starts_with(&self.maybe_path) let other_path = other.as_ref();
|| self.glob.is_match(&other) other_path.starts_with(&self.maybe_path)
|| self.check_with_end_separator(other.as_ref()) || other_path.ends_with(&self.maybe_path)
|| self.glob.is_match(other_path)
|| self.check_with_end_separator(other_path)
} }
fn check_with_end_separator(&self, path: &Path) -> bool { fn check_with_end_separator(&self, path: &Path) -> bool {
@ -418,4 +420,14 @@ mod tests {
"Path matcher {path_matcher} should match {path:?}" "Path matcher {path_matcher} should match {path:?}"
); );
} }
#[test]
fn project_search() {
let path = Path::new("/Users/someonetoignore/work/zed/zed.dev/node_modules");
let path_matcher = PathMatcher::new("**/node_modules/**").unwrap();
assert!(
path_matcher.is_match(&path),
"Path matcher {path_matcher} should match {path:?}"
);
}
} }

View file

@ -481,18 +481,21 @@ impl Pane {
pub(crate) fn open_item( pub(crate) fn open_item(
&mut self, &mut self,
project_entry_id: ProjectEntryId, project_entry_id: Option<ProjectEntryId>,
focus_item: bool, focus_item: bool,
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
build_item: impl FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>, build_item: impl FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
) -> Box<dyn ItemHandle> { ) -> Box<dyn ItemHandle> {
let mut existing_item = None; let mut existing_item = None;
for (index, item) in self.items.iter().enumerate() { if let Some(project_entry_id) = project_entry_id {
if item.is_singleton(cx) && item.project_entry_ids(cx).as_slice() == [project_entry_id] for (index, item) in self.items.iter().enumerate() {
{ if item.is_singleton(cx)
let item = item.boxed_clone(); && item.project_entry_ids(cx).as_slice() == [project_entry_id]
existing_item = Some((index, item)); {
break; let item = item.boxed_clone();
existing_item = Some((index, item));
break;
}
} }
} }

View file

@ -2129,13 +2129,13 @@ impl Workspace {
}) })
} }
pub(crate) fn load_path( fn load_path(
&mut self, &mut self,
path: ProjectPath, path: ProjectPath,
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) -> Task< ) -> Task<
Result<( Result<(
ProjectEntryId, Option<ProjectEntryId>,
impl 'static + FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>, impl 'static + FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
)>, )>,
> { > {

View file

@ -20,6 +20,7 @@ test-support = [
[dependencies] [dependencies]
db = { path = "../db2", package = "db2" } db = { path = "../db2", package = "db2" }
call = { path = "../call2", package = "call2" }
client = { path = "../client2", package = "client2" } client = { path = "../client2", package = "client2" }
collections = { path = "../collections" } collections = { path = "../collections" }
# context_menu = { path = "../context_menu" } # context_menu = { path = "../context_menu" }
@ -36,7 +37,6 @@ theme = { path = "../theme2", package = "theme2" }
util = { path = "../util" } util = { path = "../util" }
ui = { package = "ui2", path = "../ui2" } ui = { package = "ui2", path = "../ui2" }
async-trait.workspace = true
async-recursion = "1.0.0" async-recursion = "1.0.0"
itertools = "0.10" itertools = "0.10"
bincode = "1.2.1" bincode = "1.2.1"

View file

@ -538,18 +538,21 @@ impl Pane {
pub(crate) fn open_item( pub(crate) fn open_item(
&mut self, &mut self,
project_entry_id: ProjectEntryId, project_entry_id: Option<ProjectEntryId>,
focus_item: bool, focus_item: bool,
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
build_item: impl FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>, build_item: impl FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
) -> Box<dyn ItemHandle> { ) -> Box<dyn ItemHandle> {
let mut existing_item = None; let mut existing_item = None;
for (index, item) in self.items.iter().enumerate() { if let Some(project_entry_id) = project_entry_id {
if item.is_singleton(cx) && item.project_entry_ids(cx).as_slice() == [project_entry_id] for (index, item) in self.items.iter().enumerate() {
{ if item.is_singleton(cx)
let item = item.boxed_clone(); && item.project_entry_ids(cx).as_slice() == [project_entry_id]
existing_item = Some((index, item)); {
break; let item = item.boxed_clone();
existing_item = Some((index, item));
break;
}
} }
} }
@ -1545,22 +1548,17 @@ impl Pane {
right_click_menu(ix).trigger(tab).menu(|cx| { right_click_menu(ix).trigger(tab).menu(|cx| {
ContextMenu::build(cx, |menu, cx| { ContextMenu::build(cx, |menu, cx| {
menu.action( menu.action("Close", CloseActiveItem { save_intent: None }.boxed_clone())
"Close", .action("Close Others", CloseInactiveItems.boxed_clone())
CloseActiveItem { save_intent: None }.boxed_clone(), .separator()
cx, .action("Close Left", CloseItemsToTheLeft.boxed_clone())
) .action("Close Right", CloseItemsToTheRight.boxed_clone())
.action("Close Others", CloseInactiveItems.boxed_clone(), cx) .separator()
.separator() .action("Close Clean", CloseCleanItems.boxed_clone())
.action("Close Left", CloseItemsToTheLeft.boxed_clone(), cx) .action(
.action("Close Right", CloseItemsToTheRight.boxed_clone(), cx) "Close All",
.separator() CloseAllItems { save_intent: None }.boxed_clone(),
.action("Close Clean", CloseCleanItems.boxed_clone(), cx) )
.action(
"Close All",
CloseAllItems { save_intent: None }.boxed_clone(),
cx,
)
}) })
}) })
} }
@ -1653,13 +1651,12 @@ impl Pane {
.icon_size(IconSize::Small) .icon_size(IconSize::Small)
.on_click(cx.listener(|this, _, cx| { .on_click(cx.listener(|this, _, cx| {
let menu = ContextMenu::build(cx, |menu, cx| { let menu = ContextMenu::build(cx, |menu, cx| {
menu.action("New File", NewFile.boxed_clone(), cx) menu.action("New File", NewFile.boxed_clone())
.action( .action(
"New Terminal", "New Terminal",
NewCenterTerminal.boxed_clone(), NewCenterTerminal.boxed_clone(),
cx,
) )
.action("New Search", NewSearch.boxed_clone(), cx) .action("New Search", NewSearch.boxed_clone())
}); });
cx.subscribe(&menu, |this, _, event: &DismissEvent, cx| { cx.subscribe(&menu, |this, _, event: &DismissEvent, cx| {
this.focus(cx); this.focus(cx);
@ -1677,10 +1674,10 @@ impl Pane {
.icon_size(IconSize::Small) .icon_size(IconSize::Small)
.on_click(cx.listener(|this, _, cx| { .on_click(cx.listener(|this, _, cx| {
let menu = ContextMenu::build(cx, |menu, cx| { let menu = ContextMenu::build(cx, |menu, cx| {
menu.action("Split Right", SplitRight.boxed_clone(), cx) menu.action("Split Right", SplitRight.boxed_clone())
.action("Split Left", SplitLeft.boxed_clone(), cx) .action("Split Left", SplitLeft.boxed_clone())
.action("Split Up", SplitUp.boxed_clone(), cx) .action("Split Up", SplitUp.boxed_clone())
.action("Split Down", SplitDown.boxed_clone(), cx) .action("Split Down", SplitDown.boxed_clone())
}); });
cx.subscribe(&menu, |this, _, event: &DismissEvent, cx| { cx.subscribe(&menu, |this, _, event: &DismissEvent, cx| {
this.focus(cx); this.focus(cx);

View file

@ -1,18 +1,20 @@
use crate::{AppState, FollowerState, Pane, Workspace}; use crate::{AppState, FollowerState, Pane, Workspace};
use anyhow::{anyhow, bail, Result}; use anyhow::{anyhow, bail, Result};
use call::{ActiveCall, ParticipantLocation};
use collections::HashMap; use collections::HashMap;
use db::sqlez::{ use db::sqlez::{
bindable::{Bind, Column, StaticColumnCount}, bindable::{Bind, Column, StaticColumnCount},
statement::Statement, statement::Statement,
}; };
use gpui::{ use gpui::{
point, size, AnyWeakView, Bounds, Div, IntoElement, Model, Pixels, Point, View, ViewContext, point, size, AnyWeakView, Bounds, Div, Entity as _, IntoElement, Model, Pixels, Point, View,
ViewContext,
}; };
use parking_lot::Mutex; use parking_lot::Mutex;
use project::Project; use project::Project;
use serde::Deserialize; use serde::Deserialize;
use std::sync::Arc; use std::sync::Arc;
use ui::prelude::*; use ui::{prelude::*, Button};
const HANDLE_HITBOX_SIZE: f32 = 4.0; const HANDLE_HITBOX_SIZE: f32 = 4.0;
const HORIZONTAL_MIN_SIZE: f32 = 80.; const HORIZONTAL_MIN_SIZE: f32 = 80.;
@ -126,6 +128,7 @@ impl PaneGroup {
&self, &self,
project: &Model<Project>, project: &Model<Project>,
follower_states: &HashMap<View<Pane>, FollowerState>, follower_states: &HashMap<View<Pane>, FollowerState>,
active_call: Option<&Model<ActiveCall>>,
active_pane: &View<Pane>, active_pane: &View<Pane>,
zoomed: Option<&AnyWeakView>, zoomed: Option<&AnyWeakView>,
app_state: &Arc<AppState>, app_state: &Arc<AppState>,
@ -135,6 +138,7 @@ impl PaneGroup {
project, project,
0, 0,
follower_states, follower_states,
active_call,
active_pane, active_pane,
zoomed, zoomed,
app_state, app_state,
@ -196,6 +200,7 @@ impl Member {
project: &Model<Project>, project: &Model<Project>,
basis: usize, basis: usize,
follower_states: &HashMap<View<Pane>, FollowerState>, follower_states: &HashMap<View<Pane>, FollowerState>,
active_call: Option<&Model<ActiveCall>>,
active_pane: &View<Pane>, active_pane: &View<Pane>,
zoomed: Option<&AnyWeakView>, zoomed: Option<&AnyWeakView>,
app_state: &Arc<AppState>, app_state: &Arc<AppState>,
@ -203,19 +208,89 @@ impl Member {
) -> impl IntoElement { ) -> impl IntoElement {
match self { match self {
Member::Pane(pane) => { Member::Pane(pane) => {
// todo!() let leader = follower_states.get(pane).and_then(|state| {
// let pane_element = if Some(pane.into()) == zoomed { let room = active_call?.read(cx).room()?.read(cx);
// None room.remote_participant_for_peer_id(state.leader_id)
// } else { });
// Some(pane)
// };
div().size_full().child(pane.clone()).into_any() let mut leader_border = None;
let mut leader_status_box = None;
if let Some(leader) = &leader {
let mut leader_color = cx
.theme()
.players()
.color_for_participant(leader.participant_index.0)
.cursor;
leader_color.fade_out(0.3);
leader_border = Some(leader_color);
// Stack::new() leader_status_box = match leader.location {
// .with_child(pane_element.contained().with_border(leader_border)) ParticipantLocation::SharedProject {
// .with_children(leader_status_box) project_id: leader_project_id,
// .into_any() } => {
if Some(leader_project_id) == project.read(cx).remote_id() {
None
} else {
let leader_user = leader.user.clone();
let leader_user_id = leader.user.id;
Some(
Button::new(
("leader-status", pane.entity_id()),
format!(
"Follow {} to their active project",
leader_user.github_login,
),
)
.on_click(cx.listener(
move |this, _, cx| {
crate::join_remote_project(
leader_project_id,
leader_user_id,
this.app_state().clone(),
cx,
)
.detach_and_log_err(cx);
},
)),
)
}
}
ParticipantLocation::UnsharedProject => Some(Button::new(
("leader-status", pane.entity_id()),
format!(
"{} is viewing an unshared Zed project",
leader.user.github_login
),
)),
ParticipantLocation::External => Some(Button::new(
("leader-status", pane.entity_id()),
format!(
"{} is viewing a window outside of Zed",
leader.user.github_login
),
)),
};
}
div()
.relative()
.size_full()
.child(pane.clone())
.when_some(leader_border, |this, color| {
this.border_2().border_color(color)
})
.when_some(leader_status_box, |this, status_box| {
this.child(
div()
.absolute()
.w_96()
.bottom_3()
.right_3()
.z_index(1)
.child(status_box),
)
})
.into_any()
// let el = div() // let el = div()
// .flex() // .flex()

View file

@ -1,5 +1,9 @@
use crate::participant::{Frame, RemoteVideoTrack}; use crate::{
item::{Item, ItemEvent},
ItemNavHistory, WorkspaceId,
};
use anyhow::Result; use anyhow::Result;
use call::participant::{Frame, RemoteVideoTrack};
use client::{proto::PeerId, User}; use client::{proto::PeerId, User};
use futures::StreamExt; use futures::StreamExt;
use gpui::{ use gpui::{
@ -9,7 +13,6 @@ use gpui::{
}; };
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use ui::{h_stack, Icon, IconElement}; use ui::{h_stack, Icon, IconElement};
use workspace::{item::Item, ItemNavHistory, WorkspaceId};
pub enum Event { pub enum Event {
Close, Close,
@ -56,7 +59,7 @@ impl SharedScreen {
} }
impl EventEmitter<Event> for SharedScreen {} impl EventEmitter<Event> for SharedScreen {}
impl EventEmitter<workspace::item::ItemEvent> for SharedScreen {} impl EventEmitter<ItemEvent> for SharedScreen {}
impl FocusableView for SharedScreen { impl FocusableView for SharedScreen {
fn focus_handle(&self, _: &AppContext) -> FocusHandle { fn focus_handle(&self, _: &AppContext) -> FocusHandle {

File diff suppressed because it is too large Load diff

View file

@ -615,8 +615,8 @@ fn open_local_settings_file(
.update(&mut cx, |project, cx| { .update(&mut cx, |project, cx| {
project.create_entry((tree_id, dir_path), true, cx) project.create_entry((tree_id, dir_path), true, cx)
}) })
.ok_or_else(|| anyhow!("worktree was removed"))? .await
.await?; .context("worktree was removed")?;
} }
} }
@ -625,8 +625,8 @@ fn open_local_settings_file(
.update(&mut cx, |project, cx| { .update(&mut cx, |project, cx| {
project.create_entry((tree_id, file_path), false, cx) project.create_entry((tree_id, file_path), false, cx)
}) })
.ok_or_else(|| anyhow!("worktree was removed"))? .await
.await?; .context("worktree was removed")?;
} }
let editor = workspace let editor = workspace
@ -763,7 +763,7 @@ mod tests {
AppContext, AssetSource, Element, Entity, TestAppContext, View, ViewHandle, AppContext, AssetSource, Element, Entity, TestAppContext, View, ViewHandle,
}; };
use language::LanguageRegistry; use language::LanguageRegistry;
use project::{Project, ProjectPath}; use project::{project_settings::ProjectSettings, Project, ProjectPath};
use serde_json::json; use serde_json::json;
use settings::{handle_settings_file_changes, watch_config_file, SettingsStore}; use settings::{handle_settings_file_changes, watch_config_file, SettingsStore};
use std::{ use std::{
@ -1308,6 +1308,122 @@ mod tests {
}); });
} }
#[gpui::test]
async fn test_opening_excluded_paths(cx: &mut TestAppContext) {
let app_state = init_test(cx);
cx.update(|cx| {
cx.update_global::<SettingsStore, _, _>(|store, cx| {
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions =
Some(vec!["excluded_dir".to_string(), "**/.git".to_string()]);
});
});
});
app_state
.fs
.as_fake()
.insert_tree(
"/root",
json!({
".gitignore": "ignored_dir\n",
".git": {
"HEAD": "ref: refs/heads/main",
},
"regular_dir": {
"file": "regular file contents",
},
"ignored_dir": {
"ignored_subdir": {
"file": "ignored subfile contents",
},
"file": "ignored file contents",
},
"excluded_dir": {
"file": "excluded file contents",
},
}),
)
.await;
let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
let window = cx.add_window(|cx| Workspace::test_new(project, cx));
let workspace = window.root(cx);
let initial_entries = cx.read(|cx| workspace.file_project_paths(cx));
let paths_to_open = [
Path::new("/root/excluded_dir/file").to_path_buf(),
Path::new("/root/.git/HEAD").to_path_buf(),
Path::new("/root/excluded_dir/ignored_subdir").to_path_buf(),
];
let (opened_workspace, new_items) = cx
.update(|cx| workspace::open_paths(&paths_to_open, &app_state, None, cx))
.await
.unwrap();
assert_eq!(
opened_workspace.id(),
workspace.id(),
"Excluded files in subfolders of a workspace root should be opened in the workspace"
);
let mut opened_paths = cx.read(|cx| {
assert_eq!(
new_items.len(),
paths_to_open.len(),
"Expect to get the same number of opened items as submitted paths to open"
);
new_items
.iter()
.zip(paths_to_open.iter())
.map(|(i, path)| {
match i {
Some(Ok(i)) => {
Some(i.project_path(cx).map(|p| p.path.display().to_string()))
}
Some(Err(e)) => panic!("Excluded file {path:?} failed to open: {e:?}"),
None => None,
}
.flatten()
})
.collect::<Vec<_>>()
});
opened_paths.sort();
assert_eq!(
opened_paths,
vec![
None,
Some(".git/HEAD".to_string()),
Some("excluded_dir/file".to_string()),
],
"Excluded files should get opened, excluded dir should not get opened"
);
let entries = cx.read(|cx| workspace.file_project_paths(cx));
assert_eq!(
initial_entries, entries,
"Workspace entries should not change after opening excluded files and directories paths"
);
cx.read(|cx| {
let pane = workspace.read(cx).active_pane().read(cx);
let mut opened_buffer_paths = pane
.items()
.map(|i| {
i.project_path(cx)
.expect("all excluded files that got open should have a path")
.path
.display()
.to_string()
})
.collect::<Vec<_>>();
opened_buffer_paths.sort();
assert_eq!(
opened_buffer_paths,
vec![".git/HEAD".to_string(), "excluded_dir/file".to_string()],
"Despite not being present in the worktrees, buffers for excluded files are opened and added to the pane"
);
});
}
#[gpui::test] #[gpui::test]
async fn test_save_conflicting_item(cx: &mut TestAppContext) { async fn test_save_conflicting_item(cx: &mut TestAppContext) {
let app_state = init_test(cx); let app_state = init_test(cx);

View file

@ -191,7 +191,6 @@ fn main() {
user_store: user_store.clone(), user_store: user_store.clone(),
fs, fs,
build_window_options, build_window_options,
call_factory: call::Call::new,
workspace_store, workspace_store,
node_runtime, node_runtime,
}); });

File diff suppressed because it is too large Load diff