Merge remote-tracking branch 'origin/main' into assistant-2

This commit is contained in:
Antonio Scandurra 2023-05-30 15:11:06 +02:00
commit dc365472a6
26 changed files with 2222 additions and 1577 deletions

4
Cargo.lock generated
View file

@ -3749,9 +3749,9 @@ dependencies = [
[[package]] [[package]]
name = "lsp-types" name = "lsp-types"
version = "0.91.1" version = "0.94.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2368312c59425dd133cb9a327afee65be0a633a8ce471d248e2202a48f8f68ae" checksum = "0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"serde", "serde",

View file

@ -1,18 +1,6 @@
You are #zed, a language model representing the collective understanding of an open source project called Zed. When a new human visits you, they'll send you their profile. You'll respond with an introduction tailored to their situation. For example, a new user might see something like this: User input begins on a line starting with /.
Welcome to Zed! Zed is an innovative, open-source platform designed to enhance team communication and collaboration. At the heart of Zed are *contexts*, which create a dynamic digital representation of shared mental models. Contexts offer personalized starting points and the flexibility to edit and explore, enabling teams to align knowledge, streamline communication, and improve overall performance.
As the #zed model, I'm happy to answer any questions. In fact, I will improve as a result of you doing so!
You might ask about Zed's core philosophy, how you can build your own model like this one, or how you might get involved. Zed's open source!
> [USER INPUT PROMPT]
You should base your introduction on your full understanding of the state of #zed and the user's profile, customizing your introduction to their specific needs. Don't welcome them to Zed if they've been using Zed for 2 days. If they're returning after a while, welcome them back.
User input begins on a line starting with >.
Your output begins on a line starting with <.
Don't apologize ever. Don't apologize ever.
Never say "I apologize". Never say "I apologize".
Use simple language and don't flatter the users. Spend your tokens on valuable information. Use simple language and don't flatter the users.
Keep it short.
Risk being rude.

View file

@ -10,7 +10,7 @@ use gpui::{
WindowContext, WindowContext,
}; };
use isahc::{http::StatusCode, Request, RequestExt}; use isahc::{http::StatusCode, Request, RequestExt};
use language::{language_settings::SoftWrap, Buffer, Language, LanguageRegistry}; use language::{language_settings::SoftWrap, Buffer, LanguageRegistry};
use std::{io, sync::Arc}; use std::{io, sync::Arc};
use util::{post_inc, ResultExt, TryFutureExt}; use util::{post_inc, ResultExt, TryFutureExt};
use workspace::{ use workspace::{
@ -36,7 +36,7 @@ pub enum AssistantPanelEvent {
pub struct AssistantPanel { pub struct AssistantPanel {
width: Option<f32>, width: Option<f32>,
pane: ViewHandle<Pane>, pane: ViewHandle<Pane>,
workspace: WeakViewHandle<Workspace>, languages: Arc<LanguageRegistry>,
_subscriptions: Vec<Subscription>, _subscriptions: Vec<Subscription>,
} }
@ -52,6 +52,7 @@ impl AssistantPanel {
let pane = cx.add_view(|cx| { let pane = cx.add_view(|cx| {
let mut pane = Pane::new( let mut pane = Pane::new(
workspace.weak_handle(), workspace.weak_handle(),
workspace.project().clone(),
workspace.app_state().background_actions, workspace.app_state().background_actions,
Default::default(), Default::default(),
cx, cx,
@ -98,7 +99,7 @@ impl AssistantPanel {
Self { Self {
pane, pane,
workspace: workspace.weak_handle(), languages: workspace.app_state().languages.clone(),
width: None, width: None,
_subscriptions: subscriptions, _subscriptions: subscriptions,
} }
@ -177,28 +178,11 @@ impl Panel for AssistantPanel {
fn set_active(&mut self, active: bool, cx: &mut ViewContext<Self>) { fn set_active(&mut self, active: bool, cx: &mut ViewContext<Self>) {
if active && self.pane.read(cx).items_len() == 0 { if active && self.pane.read(cx).items_len() == 0 {
let workspace = self.workspace.clone();
let pane = self.pane.clone();
let focus = self.has_focus(cx); let focus = self.has_focus(cx);
cx.spawn(|_, mut cx| async move { let editor = cx.add_view(|cx| AssistantEditor::new(self.languages.clone(), cx));
let markdown = workspace self.pane.update(cx, |pane, cx| {
.read_with(&cx, |workspace, _| { pane.add_item(Box::new(editor), true, focus, None, cx)
workspace });
.app_state()
.languages
.language_for_name("Markdown")
})?
.await?;
workspace.update(&mut cx, |workspace, cx| {
let editor = Box::new(cx.add_view(|cx| {
AssistantEditor::new(markdown, workspace.app_state().languages.clone(), cx)
}));
Pane::add_item(workspace, &pane, editor, true, focus, None, cx);
})?;
anyhow::Ok(())
})
.detach_and_log_err(cx);
} }
} }
@ -238,7 +222,6 @@ struct Assistant {
messages_by_id: HashMap<ExcerptId, Message>, messages_by_id: HashMap<ExcerptId, Message>,
completion_count: usize, completion_count: usize,
pending_completions: Vec<PendingCompletion>, pending_completions: Vec<PendingCompletion>,
markdown: Arc<Language>,
language_registry: Arc<LanguageRegistry>, language_registry: Arc<LanguageRegistry>,
} }
@ -247,18 +230,13 @@ impl Entity for Assistant {
} }
impl Assistant { impl Assistant {
fn new( fn new(language_registry: Arc<LanguageRegistry>, cx: &mut ModelContext<Self>) -> Self {
markdown: Arc<Language>,
language_registry: Arc<LanguageRegistry>,
cx: &mut ModelContext<Self>,
) -> Self {
let mut this = Self { let mut this = Self {
buffer: cx.add_model(|_| MultiBuffer::new(0)), buffer: cx.add_model(|_| MultiBuffer::new(0)),
messages: Default::default(), messages: Default::default(),
messages_by_id: Default::default(), messages_by_id: Default::default(),
completion_count: Default::default(), completion_count: Default::default(),
pending_completions: Default::default(), pending_completions: Default::default(),
markdown,
language_registry, language_registry,
}; };
this.push_message(Role::User, cx); this.push_message(Role::User, cx);
@ -323,7 +301,18 @@ impl Assistant {
fn push_message(&mut self, role: Role, cx: &mut ModelContext<Self>) -> Message { fn push_message(&mut self, role: Role, cx: &mut ModelContext<Self>) -> Message {
let content = cx.add_model(|cx| { let content = cx.add_model(|cx| {
let mut buffer = Buffer::new(0, "", cx); let mut buffer = Buffer::new(0, "", cx);
buffer.set_language(Some(self.markdown.clone()), cx); let markdown = self.language_registry.language_for_name("Markdown");
cx.spawn_weak(|buffer, mut cx| async move {
let markdown = markdown.await?;
let buffer = buffer
.upgrade(&cx)
.ok_or_else(|| anyhow!("buffer was dropped"))?;
buffer.update(&mut cx, |buffer, cx| {
buffer.set_language(Some(markdown), cx)
});
anyhow::Ok(())
})
.detach_and_log_err(cx);
buffer.set_language_registry(self.language_registry.clone()); buffer.set_language_registry(self.language_registry.clone());
buffer buffer
}); });
@ -363,12 +352,8 @@ struct AssistantEditor {
} }
impl AssistantEditor { impl AssistantEditor {
fn new( fn new(language_registry: Arc<LanguageRegistry>, cx: &mut ViewContext<Self>) -> Self {
markdown: Arc<Language>, let assistant = cx.add_model(|cx| Assistant::new(language_registry, cx));
language_registry: Arc<LanguageRegistry>,
cx: &mut ViewContext<Self>,
) -> Self {
let assistant = cx.add_model(|cx| Assistant::new(markdown, language_registry, cx));
let editor = cx.add_view(|cx| { let editor = cx.add_view(|cx| {
let mut editor = Editor::for_multibuffer(assistant.read(cx).buffer.clone(), None, cx); let mut editor = Editor::for_multibuffer(assistant.read(cx).buffer.clone(), None, cx);
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx); editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);

View file

@ -5010,19 +5010,21 @@ async fn test_project_symbols(
.unwrap(); .unwrap();
let fake_language_server = fake_language_servers.next().await.unwrap(); let fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server.handle_request::<lsp::request::WorkspaceSymbol, _, _>(|_, _| async move { fake_language_server.handle_request::<lsp::WorkspaceSymbolRequest, _, _>(|_, _| async move {
#[allow(deprecated)] Ok(Some(lsp::WorkspaceSymbolResponse::Flat(vec![
Ok(Some(vec![lsp::SymbolInformation { #[allow(deprecated)]
name: "TWO".into(), lsp::SymbolInformation {
location: lsp::Location { name: "TWO".into(),
uri: lsp::Url::from_file_path("/code/crate-2/two.rs").unwrap(), location: lsp::Location {
range: lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), uri: lsp::Url::from_file_path("/code/crate-2/two.rs").unwrap(),
range: lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
},
kind: lsp::SymbolKind::CONSTANT,
tags: None,
container_name: None,
deprecated: None,
}, },
kind: lsp::SymbolKind::CONSTANT, ])))
tags: None,
container_name: None,
deprecated: None,
}]))
}); });
// Request the definition of a symbol as the guest. // Request the definition of a symbol as the guest.
@ -6606,7 +6608,7 @@ async fn test_basic_following(
// When client A navigates back and forth, client B does so as well. // When client A navigates back and forth, client B does so as well.
workspace_a workspace_a
.update(cx_a, |workspace, cx| { .update(cx_a, |workspace, cx| {
workspace::Pane::go_back(workspace, None, cx) workspace.go_back(workspace.active_pane().downgrade(), cx)
}) })
.await .await
.unwrap(); .unwrap();
@ -6617,7 +6619,7 @@ async fn test_basic_following(
workspace_a workspace_a
.update(cx_a, |workspace, cx| { .update(cx_a, |workspace, cx| {
workspace::Pane::go_back(workspace, None, cx) workspace.go_back(workspace.active_pane().downgrade(), cx)
}) })
.await .await
.unwrap(); .unwrap();
@ -6628,7 +6630,7 @@ async fn test_basic_following(
workspace_a workspace_a
.update(cx_a, |workspace, cx| { .update(cx_a, |workspace, cx| {
workspace::Pane::go_forward(workspace, None, cx) workspace.go_forward(workspace.active_pane().downgrade(), cx)
}) })
.await .await
.unwrap(); .unwrap();

View file

@ -4938,12 +4938,12 @@ impl Editor {
} }
fn push_to_nav_history( fn push_to_nav_history(
&self, &mut self,
cursor_anchor: Anchor, cursor_anchor: Anchor,
new_position: Option<Point>, new_position: Option<Point>,
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) { ) {
if let Some(nav_history) = &self.nav_history { if let Some(nav_history) = self.nav_history.as_mut() {
let buffer = self.buffer.read(cx).read(cx); let buffer = self.buffer.read(cx).read(cx);
let cursor_position = cursor_anchor.to_point(&buffer); let cursor_position = cursor_anchor.to_point(&buffer);
let scroll_state = self.scroll_manager.anchor(); let scroll_state = self.scroll_manager.anchor();

View file

@ -6,7 +6,7 @@ use gpui::{
use picker::{Picker, PickerDelegate}; use picker::{Picker, PickerDelegate};
use project::{PathMatchCandidateSet, Project, ProjectPath, WorktreeId}; use project::{PathMatchCandidateSet, Project, ProjectPath, WorktreeId};
use std::{ use std::{
path::Path, path::{Path, PathBuf},
sync::{ sync::{
atomic::{self, AtomicBool}, atomic::{self, AtomicBool},
Arc, Arc,
@ -25,11 +25,57 @@ pub struct FileFinderDelegate {
latest_search_id: usize, latest_search_id: usize,
latest_search_did_cancel: bool, latest_search_did_cancel: bool,
latest_search_query: Option<PathLikeWithPosition<FileSearchQuery>>, latest_search_query: Option<PathLikeWithPosition<FileSearchQuery>>,
currently_opened_path: Option<ProjectPath>, currently_opened_path: Option<FoundPath>,
matches: Vec<PathMatch>, matches: Matches,
selected: Option<(usize, Arc<Path>)>, selected_index: Option<usize>,
cancel_flag: Arc<AtomicBool>, cancel_flag: Arc<AtomicBool>,
history_items: Vec<ProjectPath>, history_items: Vec<FoundPath>,
}
#[derive(Debug)]
enum Matches {
History(Vec<FoundPath>),
Search(Vec<PathMatch>),
}
#[derive(Debug)]
enum Match<'a> {
History(&'a FoundPath),
Search(&'a PathMatch),
}
impl Matches {
fn len(&self) -> usize {
match self {
Self::History(items) => items.len(),
Self::Search(items) => items.len(),
}
}
fn get(&self, index: usize) -> Option<Match<'_>> {
match self {
Self::History(items) => items.get(index).map(Match::History),
Self::Search(items) => items.get(index).map(Match::Search),
}
}
}
impl Default for Matches {
fn default() -> Self {
Self::History(Vec::new())
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
struct FoundPath {
project: ProjectPath,
absolute: Option<PathBuf>,
}
impl FoundPath {
fn new(project: ProjectPath, absolute: Option<PathBuf>) -> Self {
Self { project, absolute }
}
} }
actions!(file_finder, [Toggle]); actions!(file_finder, [Toggle]);
@ -43,10 +89,41 @@ const MAX_RECENT_SELECTIONS: usize = 20;
fn toggle_file_finder(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext<Workspace>) { fn toggle_file_finder(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext<Workspace>) {
workspace.toggle_modal(cx, |workspace, cx| { workspace.toggle_modal(cx, |workspace, cx| {
let history_items = workspace.recent_navigation_history(Some(MAX_RECENT_SELECTIONS), cx); let project = workspace.project().read(cx);
let currently_opened_path = workspace let currently_opened_path = workspace
.active_item(cx) .active_item(cx)
.and_then(|item| item.project_path(cx)); .and_then(|item| item.project_path(cx))
.map(|project_path| {
let abs_path = project
.worktree_for_id(project_path.worktree_id, cx)
.map(|worktree| worktree.read(cx).abs_path().join(&project_path.path));
FoundPath::new(project_path, abs_path)
});
// if exists, bubble the currently opened path to the top
let history_items = currently_opened_path
.clone()
.into_iter()
.chain(
workspace
.recent_navigation_history(Some(MAX_RECENT_SELECTIONS), cx)
.into_iter()
.filter(|(history_path, _)| {
Some(history_path)
!= currently_opened_path
.as_ref()
.map(|found_path| &found_path.project)
})
.filter(|(_, history_abs_path)| {
history_abs_path.as_ref()
!= currently_opened_path
.as_ref()
.and_then(|found_path| found_path.absolute.as_ref())
})
.map(|(history_path, abs_path)| FoundPath::new(history_path, abs_path)),
)
.collect();
let project = workspace.project().clone(); let project = workspace.project().clone();
let workspace = cx.handle().downgrade(); let workspace = cx.handle().downgrade();
@ -87,37 +164,11 @@ impl FileSearchQuery {
} }
impl FileFinderDelegate { impl FileFinderDelegate {
fn labels_for_match(&self, path_match: &PathMatch) -> (String, Vec<usize>, String, Vec<usize>) { fn new(
let path = &path_match.path;
let path_string = path.to_string_lossy();
let full_path = [path_match.path_prefix.as_ref(), path_string.as_ref()].join("");
let path_positions = path_match.positions.clone();
let file_name = path.file_name().map_or_else(
|| path_match.path_prefix.to_string(),
|file_name| file_name.to_string_lossy().to_string(),
);
let file_name_start = path_match.path_prefix.chars().count() + path_string.chars().count()
- file_name.chars().count();
let file_name_positions = path_positions
.iter()
.filter_map(|pos| {
if pos >= &file_name_start {
Some(pos - file_name_start)
} else {
None
}
})
.collect();
(file_name, file_name_positions, full_path, path_positions)
}
pub fn new(
workspace: WeakViewHandle<Workspace>, workspace: WeakViewHandle<Workspace>,
project: ModelHandle<Project>, project: ModelHandle<Project>,
currently_opened_path: Option<ProjectPath>, currently_opened_path: Option<FoundPath>,
history_items: Vec<ProjectPath>, history_items: Vec<FoundPath>,
cx: &mut ViewContext<FileFinder>, cx: &mut ViewContext<FileFinder>,
) -> Self { ) -> Self {
cx.observe(&project, |picker, _, cx| { cx.observe(&project, |picker, _, cx| {
@ -132,8 +183,8 @@ impl FileFinderDelegate {
latest_search_did_cancel: false, latest_search_did_cancel: false,
latest_search_query: None, latest_search_query: None,
currently_opened_path, currently_opened_path,
matches: Vec::new(), matches: Matches::default(),
selected: None, selected_index: None,
cancel_flag: Arc::new(AtomicBool::new(false)), cancel_flag: Arc::new(AtomicBool::new(false)),
history_items, history_items,
} }
@ -147,7 +198,7 @@ impl FileFinderDelegate {
let relative_to = self let relative_to = self
.currently_opened_path .currently_opened_path
.as_ref() .as_ref()
.map(|project_path| Arc::clone(&project_path.path)); .map(|found_path| Arc::clone(&found_path.project.path));
let worktrees = self let worktrees = self
.project .project
.read(cx) .read(cx)
@ -188,13 +239,13 @@ impl FileFinderDelegate {
.update(&mut cx, |picker, cx| { .update(&mut cx, |picker, cx| {
picker picker
.delegate_mut() .delegate_mut()
.set_matches(search_id, did_cancel, query, matches, cx) .set_search_matches(search_id, did_cancel, query, matches, cx)
}) })
.log_err(); .log_err();
}) })
} }
fn set_matches( fn set_search_matches(
&mut self, &mut self,
search_id: usize, search_id: usize,
did_cancel: bool, did_cancel: bool,
@ -211,15 +262,126 @@ impl FileFinderDelegate {
.as_ref() .as_ref()
.map(|query| query.path_like.path_query()) .map(|query| query.path_like.path_query())
{ {
util::extend_sorted(&mut self.matches, matches.into_iter(), 100, |a, b| b.cmp(a)); match &mut self.matches {
Matches::History(_) => self.matches = Matches::Search(matches),
Matches::Search(search_matches) => {
util::extend_sorted(search_matches, matches.into_iter(), 100, |a, b| {
b.cmp(a)
})
}
}
} else { } else {
self.matches = matches; self.matches = Matches::Search(matches);
} }
self.latest_search_query = Some(query); self.latest_search_query = Some(query);
self.latest_search_did_cancel = did_cancel; self.latest_search_did_cancel = did_cancel;
cx.notify(); cx.notify();
} }
} }
fn labels_for_match(
&self,
path_match: Match,
cx: &AppContext,
ix: usize,
) -> (String, Vec<usize>, String, Vec<usize>) {
let (file_name, file_name_positions, full_path, full_path_positions) = match path_match {
Match::History(found_path) => {
let worktree_id = found_path.project.worktree_id;
let project_relative_path = &found_path.project.path;
let has_worktree = self
.project
.read(cx)
.worktree_for_id(worktree_id, cx)
.is_some();
if !has_worktree {
if let Some(absolute_path) = &found_path.absolute {
return (
absolute_path
.file_name()
.map_or_else(
|| project_relative_path.to_string_lossy(),
|file_name| file_name.to_string_lossy(),
)
.to_string(),
Vec::new(),
absolute_path.to_string_lossy().to_string(),
Vec::new(),
);
}
}
let mut path = Arc::clone(project_relative_path);
if project_relative_path.as_ref() == Path::new("") {
if let Some(absolute_path) = &found_path.absolute {
path = Arc::from(absolute_path.as_path());
}
}
self.labels_for_path_match(&PathMatch {
score: ix as f64,
positions: Vec::new(),
worktree_id: worktree_id.to_usize(),
path,
path_prefix: "".into(),
distance_to_relative_ancestor: usize::MAX,
})
}
Match::Search(path_match) => self.labels_for_path_match(path_match),
};
if file_name_positions.is_empty() {
if let Some(user_home_path) = std::env::var("HOME").ok() {
let user_home_path = user_home_path.trim();
if !user_home_path.is_empty() {
if (&full_path).starts_with(user_home_path) {
return (
file_name,
file_name_positions,
full_path.replace(user_home_path, "~"),
full_path_positions,
);
}
}
}
}
(
file_name,
file_name_positions,
full_path,
full_path_positions,
)
}
fn labels_for_path_match(
&self,
path_match: &PathMatch,
) -> (String, Vec<usize>, String, Vec<usize>) {
let path = &path_match.path;
let path_string = path.to_string_lossy();
let full_path = [path_match.path_prefix.as_ref(), path_string.as_ref()].join("");
let path_positions = path_match.positions.clone();
let file_name = path.file_name().map_or_else(
|| path_match.path_prefix.to_string(),
|file_name| file_name.to_string_lossy().to_string(),
);
let file_name_start = path_match.path_prefix.chars().count() + path_string.chars().count()
- file_name.chars().count();
let file_name_positions = path_positions
.iter()
.filter_map(|pos| {
if pos >= &file_name_start {
Some(pos - file_name_start)
} else {
None
}
})
.collect();
(file_name, file_name_positions, full_path, path_positions)
}
} }
impl PickerDelegate for FileFinderDelegate { impl PickerDelegate for FileFinderDelegate {
@ -232,45 +394,35 @@ impl PickerDelegate for FileFinderDelegate {
} }
fn selected_index(&self) -> usize { fn selected_index(&self) -> usize {
if let Some(selected) = self.selected.as_ref() { self.selected_index.unwrap_or(0)
for (ix, path_match) in self.matches.iter().enumerate() {
if (path_match.worktree_id, path_match.path.as_ref())
== (selected.0, selected.1.as_ref())
{
return ix;
}
}
}
0
} }
fn set_selected_index(&mut self, ix: usize, cx: &mut ViewContext<FileFinder>) { fn set_selected_index(&mut self, ix: usize, cx: &mut ViewContext<FileFinder>) {
let mat = &self.matches[ix]; self.selected_index = Some(ix);
self.selected = Some((mat.worktree_id, mat.path.clone()));
cx.notify(); cx.notify();
} }
fn update_matches(&mut self, raw_query: String, cx: &mut ViewContext<FileFinder>) -> Task<()> { fn update_matches(&mut self, raw_query: String, cx: &mut ViewContext<FileFinder>) -> Task<()> {
if raw_query.is_empty() { if raw_query.is_empty() {
let project = self.project.read(cx);
self.latest_search_id = post_inc(&mut self.search_count); self.latest_search_id = post_inc(&mut self.search_count);
self.matches.clear(); self.matches = Matches::History(
self.history_items
self.matches = self .iter()
.currently_opened_path .filter(|history_item| {
.iter() // if exists, bubble the currently opened path to the top project
.chain(self.history_items.iter().filter(|history_item| { .worktree_for_id(history_item.project.worktree_id, cx)
Some(*history_item) != self.currently_opened_path.as_ref() .is_some()
})) || (project.is_local()
.enumerate() && history_item
.map(|(i, history_item)| PathMatch { .absolute
score: i as f64, .as_ref()
positions: Vec::new(), .filter(|abs_path| abs_path.exists())
worktree_id: history_item.worktree_id.to_usize(), .is_some())
path: Arc::clone(&history_item.path), })
path_prefix: "".into(), .cloned()
distance_to_relative_ancestor: usize::MAX, .collect(),
}) );
.collect();
cx.notify(); cx.notify();
Task::ready(()) Task::ready(())
} else { } else {
@ -293,16 +445,52 @@ impl PickerDelegate for FileFinderDelegate {
fn confirm(&mut self, cx: &mut ViewContext<FileFinder>) { fn confirm(&mut self, cx: &mut ViewContext<FileFinder>) {
if let Some(m) = self.matches.get(self.selected_index()) { if let Some(m) = self.matches.get(self.selected_index()) {
if let Some(workspace) = self.workspace.upgrade(cx) { if let Some(workspace) = self.workspace.upgrade(cx) {
let project_path = ProjectPath { let open_task = workspace.update(cx, |workspace, cx| match m {
worktree_id: WorktreeId::from_usize(m.worktree_id), Match::History(history_match) => {
path: m.path.clone(), let worktree_id = history_match.project.worktree_id;
}; if workspace
let open_task = workspace.update(cx, |workspace, cx| { .project()
workspace.open_path(project_path.clone(), None, true, cx) .read(cx)
.worktree_for_id(worktree_id, cx)
.is_some()
{
workspace.open_path(
ProjectPath {
worktree_id,
path: Arc::clone(&history_match.project.path),
},
None,
true,
cx,
)
} else {
match history_match.absolute.as_ref() {
Some(abs_path) => {
workspace.open_abs_path(abs_path.to_path_buf(), false, cx)
}
None => workspace.open_path(
ProjectPath {
worktree_id,
path: Arc::clone(&history_match.project.path),
},
None,
true,
cx,
),
}
}
}
Match::Search(m) => workspace.open_path(
ProjectPath {
worktree_id: WorktreeId::from_usize(m.worktree_id),
path: m.path.clone(),
},
None,
true,
cx,
),
}); });
let workspace = workspace.downgrade();
let row = self let row = self
.latest_search_query .latest_search_query
.as_ref() .as_ref()
@ -333,6 +521,7 @@ impl PickerDelegate for FileFinderDelegate {
} }
} }
workspace workspace
.downgrade()
.update(&mut cx, |workspace, cx| workspace.dismiss_modal(cx)) .update(&mut cx, |workspace, cx| workspace.dismiss_modal(cx))
.log_err(); .log_err();
@ -352,11 +541,14 @@ impl PickerDelegate for FileFinderDelegate {
selected: bool, selected: bool,
cx: &AppContext, cx: &AppContext,
) -> AnyElement<Picker<Self>> { ) -> AnyElement<Picker<Self>> {
let path_match = &self.matches[ix]; let path_match = self
.matches
.get(ix)
.expect("Invalid matches state: no element for index {ix}");
let theme = theme::current(cx); let theme = theme::current(cx);
let style = theme.picker.item.style_for(mouse_state, selected); let style = theme.picker.item.style_for(mouse_state, selected);
let (file_name, file_name_positions, full_path, full_path_positions) = let (file_name, file_name_positions, full_path, full_path_positions) =
self.labels_for_match(path_match); self.labels_for_match(path_match, cx, ix);
Flex::column() Flex::column()
.with_child( .with_child(
Label::new(file_name, style.label.clone()).with_highlights(file_name_positions), Label::new(file_name, style.label.clone()).with_highlights(file_name_positions),
@ -373,7 +565,7 @@ impl PickerDelegate for FileFinderDelegate {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::{assert_eq, collections::HashMap, time::Duration}; use std::{assert_eq, collections::HashMap, path::Path, time::Duration};
use super::*; use super::*;
use editor::Editor; use editor::Editor;
@ -649,12 +841,16 @@ mod tests {
finder.update(cx, |finder, cx| { finder.update(cx, |finder, cx| {
let delegate = finder.delegate_mut(); let delegate = finder.delegate_mut();
let matches = delegate.matches.clone(); let matches = match &delegate.matches {
Matches::Search(path_matches) => path_matches,
_ => panic!("Search matches expected"),
}
.clone();
// Simulate a search being cancelled after the time limit, // Simulate a search being cancelled after the time limit,
// returning only a subset of the matches that would have been found. // returning only a subset of the matches that would have been found.
drop(delegate.spawn_search(query.clone(), cx)); drop(delegate.spawn_search(query.clone(), cx));
delegate.set_matches( delegate.set_search_matches(
delegate.latest_search_id, delegate.latest_search_id,
true, // did-cancel true, // did-cancel
query.clone(), query.clone(),
@ -664,7 +860,7 @@ mod tests {
// Simulate another cancellation. // Simulate another cancellation.
drop(delegate.spawn_search(query.clone(), cx)); drop(delegate.spawn_search(query.clone(), cx));
delegate.set_matches( delegate.set_search_matches(
delegate.latest_search_id, delegate.latest_search_id,
true, // did-cancel true, // did-cancel
query.clone(), query.clone(),
@ -672,7 +868,12 @@ mod tests {
cx, cx,
); );
assert_eq!(delegate.matches, matches[0..4]) match &delegate.matches {
Matches::Search(new_matches) => {
assert_eq!(new_matches.as_slice(), &matches[0..4])
}
_ => panic!("Search matches expected"),
};
}); });
} }
@ -772,10 +973,14 @@ mod tests {
cx.read(|cx| { cx.read(|cx| {
let finder = finder.read(cx); let finder = finder.read(cx);
let delegate = finder.delegate(); let delegate = finder.delegate();
assert_eq!(delegate.matches.len(), 1); let matches = match &delegate.matches {
Matches::Search(path_matches) => path_matches,
_ => panic!("Search matches expected"),
};
assert_eq!(matches.len(), 1);
let (file_name, file_name_positions, full_path, full_path_positions) = let (file_name, file_name_positions, full_path, full_path_positions) =
delegate.labels_for_match(&delegate.matches[0]); delegate.labels_for_path_match(&matches[0]);
assert_eq!(file_name, "the-file"); assert_eq!(file_name, "the-file");
assert_eq!(file_name_positions, &[0, 1, 4]); assert_eq!(file_name_positions, &[0, 1, 4]);
assert_eq!(full_path, "the-file"); assert_eq!(full_path, "the-file");
@ -876,10 +1081,10 @@ mod tests {
// When workspace has an active item, sort items which are closer to that item // When workspace has an active item, sort items which are closer to that item
// first when they have the same name. In this case, b.txt is closer to dir2's a.txt // first when they have the same name. In this case, b.txt is closer to dir2's a.txt
// so that one should be sorted earlier // so that one should be sorted earlier
let b_path = Some(ProjectPath { let b_path = Some(dummy_found_path(ProjectPath {
worktree_id, worktree_id,
path: Arc::from(Path::new("/root/dir2/b.txt")), path: Arc::from(Path::new("/root/dir2/b.txt")),
}); }));
let (_, finder) = cx.add_window(|cx| { let (_, finder) = cx.add_window(|cx| {
Picker::new( Picker::new(
FileFinderDelegate::new( FileFinderDelegate::new(
@ -901,8 +1106,12 @@ mod tests {
finder.read_with(cx, |f, _| { finder.read_with(cx, |f, _| {
let delegate = f.delegate(); let delegate = f.delegate();
assert_eq!(delegate.matches[0].path.as_ref(), Path::new("dir2/a.txt")); let matches = match &delegate.matches {
assert_eq!(delegate.matches[1].path.as_ref(), Path::new("dir1/a.txt")); Matches::Search(path_matches) => path_matches,
_ => panic!("Search matches expected"),
};
assert_eq!(matches[0].path.as_ref(), Path::new("dir2/a.txt"));
assert_eq!(matches[1].path.as_ref(), Path::new("dir1/a.txt"));
}); });
} }
@ -1012,10 +1221,13 @@ mod tests {
.await; .await;
assert_eq!( assert_eq!(
history_after_first, history_after_first,
vec![ProjectPath { vec![FoundPath::new(
worktree_id, ProjectPath {
path: Arc::from(Path::new("test/first.rs")), worktree_id,
}], path: Arc::from(Path::new("test/first.rs")),
},
Some(PathBuf::from("/src/test/first.rs"))
)],
"Should show 1st opened item in the history when opening the 2nd item" "Should show 1st opened item in the history when opening the 2nd item"
); );
@ -1032,14 +1244,20 @@ mod tests {
assert_eq!( assert_eq!(
history_after_second, history_after_second,
vec![ vec![
ProjectPath { FoundPath::new(
worktree_id, ProjectPath {
path: Arc::from(Path::new("test/second.rs")), worktree_id,
}, path: Arc::from(Path::new("test/second.rs")),
ProjectPath { },
worktree_id, Some(PathBuf::from("/src/test/second.rs"))
path: Arc::from(Path::new("test/first.rs")), ),
}, FoundPath::new(
ProjectPath {
worktree_id,
path: Arc::from(Path::new("test/first.rs")),
},
Some(PathBuf::from("/src/test/first.rs"))
),
], ],
"Should show 1st and 2nd opened items in the history when opening the 3rd item. \ "Should show 1st and 2nd opened items in the history when opening the 3rd item. \
2nd item should be the first in the history, as the last opened." 2nd item should be the first in the history, as the last opened."
@ -1058,18 +1276,27 @@ mod tests {
assert_eq!( assert_eq!(
history_after_third, history_after_third,
vec![ vec![
ProjectPath { FoundPath::new(
worktree_id, ProjectPath {
path: Arc::from(Path::new("test/third.rs")), worktree_id,
}, path: Arc::from(Path::new("test/third.rs")),
ProjectPath { },
worktree_id, Some(PathBuf::from("/src/test/third.rs"))
path: Arc::from(Path::new("test/second.rs")), ),
}, FoundPath::new(
ProjectPath { ProjectPath {
worktree_id, worktree_id,
path: Arc::from(Path::new("test/first.rs")), path: Arc::from(Path::new("test/second.rs")),
}, },
Some(PathBuf::from("/src/test/second.rs"))
),
FoundPath::new(
ProjectPath {
worktree_id,
path: Arc::from(Path::new("test/first.rs")),
},
Some(PathBuf::from("/src/test/first.rs"))
),
], ],
"Should show 1st, 2nd and 3rd opened items in the history when opening the 2nd item again. \ "Should show 1st, 2nd and 3rd opened items in the history when opening the 2nd item again. \
3rd item should be the first in the history, as the last opened." 3rd item should be the first in the history, as the last opened."
@ -1088,24 +1315,162 @@ mod tests {
assert_eq!( assert_eq!(
history_after_second_again, history_after_second_again,
vec![ vec![
ProjectPath { FoundPath::new(
worktree_id, ProjectPath {
path: Arc::from(Path::new("test/second.rs")), worktree_id,
}, path: Arc::from(Path::new("test/second.rs")),
ProjectPath { },
worktree_id, Some(PathBuf::from("/src/test/second.rs"))
path: Arc::from(Path::new("test/third.rs")), ),
}, FoundPath::new(
ProjectPath { ProjectPath {
worktree_id, worktree_id,
path: Arc::from(Path::new("test/first.rs")), path: Arc::from(Path::new("test/third.rs")),
}, },
Some(PathBuf::from("/src/test/third.rs"))
),
FoundPath::new(
ProjectPath {
worktree_id,
path: Arc::from(Path::new("test/first.rs")),
},
Some(PathBuf::from("/src/test/first.rs"))
),
], ],
"Should show 1st, 2nd and 3rd opened items in the history when opening the 3rd item again. \ "Should show 1st, 2nd and 3rd opened items in the history when opening the 3rd item again. \
2nd item, as the last opened, 3rd item should go next as it was opened right before." 2nd item, as the last opened, 3rd item should go next as it was opened right before."
); );
} }
#[gpui::test]
async fn test_external_files_history(
deterministic: Arc<gpui::executor::Deterministic>,
cx: &mut gpui::TestAppContext,
) {
let app_state = init_test(cx);
app_state
.fs
.as_fake()
.insert_tree(
"/src",
json!({
"test": {
"first.rs": "// First Rust file",
"second.rs": "// Second Rust file",
}
}),
)
.await;
app_state
.fs
.as_fake()
.insert_tree(
"/external-src",
json!({
"test": {
"third.rs": "// Third Rust file",
"fourth.rs": "// Fourth Rust file",
}
}),
)
.await;
let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
cx.update(|cx| {
project.update(cx, |project, cx| {
project.find_or_create_local_worktree("/external-src", false, cx)
})
})
.detach();
deterministic.run_until_parked();
let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx));
let worktree_id = cx.read(|cx| {
let worktrees = workspace.read(cx).worktrees(cx).collect::<Vec<_>>();
assert_eq!(worktrees.len(), 1,);
WorktreeId::from_usize(worktrees[0].id())
});
workspace
.update(cx, |workspace, cx| {
workspace.open_abs_path(PathBuf::from("/external-src/test/third.rs"), false, cx)
})
.detach();
deterministic.run_until_parked();
let external_worktree_id = cx.read(|cx| {
let worktrees = workspace.read(cx).worktrees(cx).collect::<Vec<_>>();
assert_eq!(
worktrees.len(),
2,
"External file should get opened in a new worktree"
);
WorktreeId::from_usize(
worktrees
.into_iter()
.find(|worktree| worktree.id() != worktree_id.to_usize())
.expect("New worktree should have a different id")
.id(),
)
});
close_active_item(&workspace, &deterministic, cx).await;
let initial_history_items = open_close_queried_buffer(
"sec",
1,
"second.rs",
window_id,
&workspace,
&deterministic,
cx,
)
.await;
assert_eq!(
initial_history_items,
vec![FoundPath::new(
ProjectPath {
worktree_id: external_worktree_id,
path: Arc::from(Path::new("")),
},
Some(PathBuf::from("/external-src/test/third.rs"))
)],
"Should show external file with its full path in the history after it was open"
);
let updated_history_items = open_close_queried_buffer(
"fir",
1,
"first.rs",
window_id,
&workspace,
&deterministic,
cx,
)
.await;
assert_eq!(
updated_history_items,
vec![
FoundPath::new(
ProjectPath {
worktree_id,
path: Arc::from(Path::new("test/second.rs")),
},
Some(PathBuf::from("/src/test/second.rs"))
),
FoundPath::new(
ProjectPath {
worktree_id: external_worktree_id,
path: Arc::from(Path::new("")),
},
Some(PathBuf::from("/external-src/test/third.rs"))
),
],
"Should keep external file with history updates",
);
}
async fn open_close_queried_buffer( async fn open_close_queried_buffer(
input: &str, input: &str,
expected_matches: usize, expected_matches: usize,
@ -1114,7 +1479,7 @@ mod tests {
workspace: &ViewHandle<Workspace>, workspace: &ViewHandle<Workspace>,
deterministic: &gpui::executor::Deterministic, deterministic: &gpui::executor::Deterministic,
cx: &mut gpui::TestAppContext, cx: &mut gpui::TestAppContext,
) -> Vec<ProjectPath> { ) -> Vec<FoundPath> {
cx.dispatch_action(window_id, Toggle); cx.dispatch_action(window_id, Toggle);
let finder = cx.read(|cx| workspace.read(cx).modal::<FileFinder>().unwrap()); let finder = cx.read(|cx| workspace.read(cx).modal::<FileFinder>().unwrap());
finder finder
@ -1152,6 +1517,16 @@ mod tests {
); );
}); });
close_active_item(workspace, deterministic, cx).await;
history_items
}
async fn close_active_item(
workspace: &ViewHandle<Workspace>,
deterministic: &gpui::executor::Deterministic,
cx: &mut TestAppContext,
) {
let mut original_items = HashMap::new(); let mut original_items = HashMap::new();
cx.read(|cx| { cx.read(|cx| {
for pane in workspace.read(cx).panes() { for pane in workspace.read(cx).panes() {
@ -1161,6 +1536,8 @@ mod tests {
assert!(insertion_result.is_none(), "Pane id {pane_id} collision"); assert!(insertion_result.is_none(), "Pane id {pane_id} collision");
} }
}); });
let active_pane = cx.read(|cx| workspace.read(cx).active_pane().clone());
active_pane active_pane
.update(cx, |pane, cx| { .update(cx, |pane, cx| {
pane.close_active_item(&workspace::CloseActiveItem, cx) pane.close_active_item(&workspace::CloseActiveItem, cx)
@ -1185,8 +1562,10 @@ mod tests {
} }
} }
}); });
assert!(
history_items original_items.len() <= 1,
"At most one panel should got closed"
);
} }
fn init_test(cx: &mut TestAppContext) -> Arc<AppState> { fn init_test(cx: &mut TestAppContext) -> Arc<AppState> {
@ -1216,4 +1595,11 @@ mod tests {
}) })
.unwrap() .unwrap()
} }
fn dummy_found_path(project_path: ProjectPath) -> FoundPath {
FoundPath {
project: project_path,
absolute: None,
}
}
} }

View file

@ -434,7 +434,9 @@ impl<T: Entity> ModelHandle<T> {
Duration::from_secs(1) Duration::from_secs(1)
}; };
let executor = cx.background().clone();
async move { async move {
executor.start_waiting();
let notification = crate::util::timeout(duration, rx.next()) let notification = crate::util::timeout(duration, rx.next())
.await .await
.expect("next notification timed out"); .expect("next notification timed out");

View file

@ -876,6 +876,14 @@ impl Background {
} }
} }
} }
#[cfg(any(test, feature = "test-support"))]
pub fn start_waiting(&self) {
match self {
Self::Deterministic { executor, .. } => executor.start_waiting(),
_ => panic!("this method can only be called on a deterministic executor"),
}
}
} }
impl Default for Background { impl Default for Background {

View file

@ -796,6 +796,12 @@ impl LanguageRegistry {
http_client: Arc<dyn HttpClient>, http_client: Arc<dyn HttpClient>,
cx: &mut AppContext, cx: &mut AppContext,
) -> Option<PendingLanguageServer> { ) -> Option<PendingLanguageServer> {
let server_id = self.state.write().next_language_server_id();
log::info!(
"starting language server name:{}, path:{root_path:?}, id:{server_id}",
adapter.name.0
);
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
if language.fake_adapter.is_some() { if language.fake_adapter.is_some() {
let task = cx.spawn(|cx| async move { let task = cx.spawn(|cx| async move {
@ -825,7 +831,6 @@ impl LanguageRegistry {
Ok(server) Ok(server)
}); });
let server_id = self.state.write().next_language_server_id();
return Some(PendingLanguageServer { server_id, task }); return Some(PendingLanguageServer { server_id, task });
} }
@ -834,7 +839,6 @@ impl LanguageRegistry {
.clone() .clone()
.ok_or_else(|| anyhow!("language server download directory has not been assigned")) .ok_or_else(|| anyhow!("language server download directory has not been assigned"))
.log_err()?; .log_err()?;
let this = self.clone(); let this = self.clone();
let language = language.clone(); let language = language.clone();
let http_client = http_client.clone(); let http_client = http_client.clone();
@ -843,7 +847,6 @@ impl LanguageRegistry {
let adapter = adapter.clone(); let adapter = adapter.clone();
let lsp_binary_statuses = self.lsp_binary_statuses_tx.clone(); let lsp_binary_statuses = self.lsp_binary_statuses_tx.clone();
let login_shell_env_loaded = self.login_shell_env_loaded.clone(); let login_shell_env_loaded = self.login_shell_env_loaded.clone();
let server_id = self.state.write().next_language_server_id();
let task = cx.spawn(|cx| async move { let task = cx.spawn(|cx| async move {
login_shell_env_loaded.await; login_shell_env_loaded.await;

View file

@ -20,7 +20,7 @@ anyhow.workspace = true
async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553", optional = true } async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553", optional = true }
futures.workspace = true futures.workspace = true
log.workspace = true log.workspace = true
lsp-types = "0.91" lsp-types = "0.94"
parking_lot.workspace = true parking_lot.workspace = true
postage.workspace = true postage.workspace = true
serde.workspace = true serde.workspace = true

View file

@ -361,13 +361,18 @@ impl LanguageServer {
capabilities: ClientCapabilities { capabilities: ClientCapabilities {
workspace: Some(WorkspaceClientCapabilities { workspace: Some(WorkspaceClientCapabilities {
configuration: Some(true), configuration: Some(true),
did_change_watched_files: Some(DynamicRegistrationClientCapabilities { did_change_watched_files: Some(DidChangeWatchedFilesClientCapabilities {
dynamic_registration: Some(true), dynamic_registration: Some(true),
relative_pattern_support: Some(true),
}), }),
did_change_configuration: Some(DynamicRegistrationClientCapabilities { did_change_configuration: Some(DynamicRegistrationClientCapabilities {
dynamic_registration: Some(true), dynamic_registration: Some(true),
}), }),
workspace_folders: Some(true), workspace_folders: Some(true),
symbol: Some(WorkspaceSymbolClientCapabilities {
resolve_support: None,
..WorkspaceSymbolClientCapabilities::default()
}),
..Default::default() ..Default::default()
}), }),
text_document: Some(TextDocumentClientCapabilities { text_document: Some(TextDocumentClientCapabilities {
@ -849,10 +854,12 @@ impl FakeLanguageServer {
T: request::Request, T: request::Request,
T::Result: 'static + Send, T::Result: 'static + Send,
{ {
self.server.executor.start_waiting();
self.server.request::<T>(params).await self.server.request::<T>(params).await
} }
pub async fn receive_notification<T: notification::Notification>(&mut self) -> T::Params { pub async fn receive_notification<T: notification::Notification>(&mut self) -> T::Params {
self.server.executor.start_waiting();
self.try_receive_notification::<T>().await.unwrap() self.try_receive_notification::<T>().await.unwrap()
} }

View file

@ -1524,6 +1524,7 @@ impl LspCommand for GetCodeActions {
context: lsp::CodeActionContext { context: lsp::CodeActionContext {
diagnostics: relevant_diagnostics, diagnostics: relevant_diagnostics,
only: language_server.code_action_kinds(), only: language_server.code_action_kinds(),
..lsp::CodeActionContext::default()
}, },
} }
} }

View file

@ -40,6 +40,7 @@ use language::{
PendingLanguageServer, PointUtf16, RopeFingerprint, TextBufferSnapshot, ToOffset, ToPointUtf16, PendingLanguageServer, PointUtf16, RopeFingerprint, TextBufferSnapshot, ToOffset, ToPointUtf16,
Transaction, Unclipped, Transaction, Unclipped,
}; };
use log::error;
use lsp::{ use lsp::{
DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
DocumentHighlightKind, LanguageServer, LanguageServerId, DocumentHighlightKind, LanguageServer, LanguageServerId,
@ -1459,7 +1460,7 @@ impl Project {
}; };
cx.foreground().spawn(async move { cx.foreground().spawn(async move {
pump_loading_buffer_reciever(loading_watch) wait_for_loading_buffer(loading_watch)
.await .await
.map_err(|error| anyhow!("{}", error)) .map_err(|error| anyhow!("{}", error))
}) })
@ -3017,10 +3018,12 @@ impl Project {
if let Some(worktree) = worktree.upgrade(cx) { if let Some(worktree) = worktree.upgrade(cx) {
let worktree = worktree.read(cx); let worktree = worktree.read(cx);
if let Some(abs_path) = worktree.abs_path().to_str() { if let Some(abs_path) = worktree.abs_path().to_str() {
if let Some(suffix) = watcher if let Some(suffix) = match &watcher.glob_pattern {
.glob_pattern lsp::GlobPattern::String(s) => s,
.strip_prefix(abs_path) lsp::GlobPattern::Relative(rp) => &rp.pattern,
.and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR)) }
.strip_prefix(abs_path)
.and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR))
{ {
if let Some(glob) = Glob::new(suffix).log_err() { if let Some(glob) = Glob::new(suffix).log_err() {
builders builders
@ -3218,7 +3221,7 @@ impl Project {
) -> Result<(), anyhow::Error> { ) -> Result<(), anyhow::Error> {
let (worktree, relative_path) = self let (worktree, relative_path) = self
.find_local_worktree(&abs_path, cx) .find_local_worktree(&abs_path, cx)
.ok_or_else(|| anyhow!("no worktree found for diagnostics"))?; .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
let project_path = ProjectPath { let project_path = ProjectPath {
worktree_id: worktree.read(cx).id(), worktree_id: worktree.read(cx).id(),
@ -3759,7 +3762,7 @@ impl Project {
let worktree_abs_path = worktree.abs_path().clone(); let worktree_abs_path = worktree.abs_path().clone();
requests.push( requests.push(
server server
.request::<lsp::request::WorkspaceSymbol>( .request::<lsp::request::WorkspaceSymbolRequest>(
lsp::WorkspaceSymbolParams { lsp::WorkspaceSymbolParams {
query: query.to_string(), query: query.to_string(),
..Default::default() ..Default::default()
@ -3767,12 +3770,32 @@ impl Project {
) )
.log_err() .log_err()
.map(move |response| { .map(move |response| {
let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
flat_responses.into_iter().map(|lsp_symbol| {
(lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
}).collect::<Vec<_>>()
}
lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
nested_responses.into_iter().filter_map(|lsp_symbol| {
let location = match lsp_symbol.location {
lsp::OneOf::Left(location) => location,
lsp::OneOf::Right(_) => {
error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
return None
}
};
Some((lsp_symbol.name, lsp_symbol.kind, location))
}).collect::<Vec<_>>()
}
}).unwrap_or_default();
( (
adapter, adapter,
language, language,
worktree_id, worktree_id,
worktree_abs_path, worktree_abs_path,
response.unwrap_or_default(), lsp_symbols,
) )
}), }),
); );
@ -3794,53 +3817,54 @@ impl Project {
adapter_language, adapter_language,
source_worktree_id, source_worktree_id,
worktree_abs_path, worktree_abs_path,
response, lsp_symbols,
) in responses ) in responses
{ {
symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| { symbols.extend(lsp_symbols.into_iter().filter_map(
let abs_path = lsp_symbol.location.uri.to_file_path().ok()?; |(symbol_name, symbol_kind, symbol_location)| {
let mut worktree_id = source_worktree_id; let abs_path = symbol_location.uri.to_file_path().ok()?;
let path; let mut worktree_id = source_worktree_id;
if let Some((worktree, rel_path)) = let path;
this.find_local_worktree(&abs_path, cx) if let Some((worktree, rel_path)) =
{ this.find_local_worktree(&abs_path, cx)
worktree_id = worktree.read(cx).id(); {
path = rel_path; worktree_id = worktree.read(cx).id();
} else { path = rel_path;
path = relativize_path(&worktree_abs_path, &abs_path); } else {
} path = relativize_path(&worktree_abs_path, &abs_path);
let project_path = ProjectPath {
worktree_id,
path: path.into(),
};
let signature = this.symbol_signature(&project_path);
let adapter_language = adapter_language.clone();
let language = this
.languages
.language_for_file(&project_path.path, None)
.unwrap_or_else(move |_| adapter_language);
let language_server_name = adapter.name.clone();
Some(async move {
let language = language.await;
let label = language
.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
.await;
Symbol {
language_server_name,
source_worktree_id,
path: project_path,
label: label.unwrap_or_else(|| {
CodeLabel::plain(lsp_symbol.name.clone(), None)
}),
kind: lsp_symbol.kind,
name: lsp_symbol.name,
range: range_from_lsp(lsp_symbol.location.range),
signature,
} }
})
})); let project_path = ProjectPath {
worktree_id,
path: path.into(),
};
let signature = this.symbol_signature(&project_path);
let adapter_language = adapter_language.clone();
let language = this
.languages
.language_for_file(&project_path.path, None)
.unwrap_or_else(move |_| adapter_language);
let language_server_name = adapter.name.clone();
Some(async move {
let language = language.await;
let label =
language.label_for_symbol(&symbol_name, symbol_kind).await;
Symbol {
language_server_name,
source_worktree_id,
path: project_path,
label: label.unwrap_or_else(|| {
CodeLabel::plain(symbol_name.clone(), None)
}),
kind: symbol_kind,
name: symbol_name,
range: range_from_lsp(symbol_location.range),
signature,
}
})
},
));
} }
symbols symbols
}); });
@ -4847,7 +4871,7 @@ impl Project {
if worktree.read(cx).is_local() { if worktree.read(cx).is_local() {
cx.subscribe(worktree, |this, worktree, event, cx| match event { cx.subscribe(worktree, |this, worktree, event, cx| match event {
worktree::Event::UpdatedEntries(changes) => { worktree::Event::UpdatedEntries(changes) => {
this.update_local_worktree_buffers(&worktree, &changes, cx); this.update_local_worktree_buffers(&worktree, changes, cx);
this.update_local_worktree_language_servers(&worktree, changes, cx); this.update_local_worktree_language_servers(&worktree, changes, cx);
} }
worktree::Event::UpdatedGitRepositories(updated_repos) => { worktree::Event::UpdatedGitRepositories(updated_repos) => {
@ -4881,13 +4905,13 @@ impl Project {
fn update_local_worktree_buffers( fn update_local_worktree_buffers(
&mut self, &mut self,
worktree_handle: &ModelHandle<Worktree>, worktree_handle: &ModelHandle<Worktree>,
changes: &HashMap<(Arc<Path>, ProjectEntryId), PathChange>, changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) { ) {
let snapshot = worktree_handle.read(cx).snapshot(); let snapshot = worktree_handle.read(cx).snapshot();
let mut renamed_buffers = Vec::new(); let mut renamed_buffers = Vec::new();
for (path, entry_id) in changes.keys() { for (path, entry_id, _) in changes {
let worktree_id = worktree_handle.read(cx).id(); let worktree_id = worktree_handle.read(cx).id();
let project_path = ProjectPath { let project_path = ProjectPath {
worktree_id, worktree_id,
@ -4993,7 +5017,7 @@ impl Project {
fn update_local_worktree_language_servers( fn update_local_worktree_language_servers(
&mut self, &mut self,
worktree_handle: &ModelHandle<Worktree>, worktree_handle: &ModelHandle<Worktree>,
changes: &HashMap<(Arc<Path>, ProjectEntryId), PathChange>, changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) { ) {
if changes.is_empty() { if changes.is_empty() {
@ -5024,23 +5048,21 @@ impl Project {
let params = lsp::DidChangeWatchedFilesParams { let params = lsp::DidChangeWatchedFilesParams {
changes: changes changes: changes
.iter() .iter()
.filter_map(|((path, _), change)| { .filter_map(|(path, _, change)| {
if watched_paths.is_match(&path) { if !watched_paths.is_match(&path) {
Some(lsp::FileEvent { return None;
uri: lsp::Url::from_file_path(abs_path.join(path))
.unwrap(),
typ: match change {
PathChange::Added => lsp::FileChangeType::CREATED,
PathChange::Removed => lsp::FileChangeType::DELETED,
PathChange::Updated
| PathChange::AddedOrUpdated => {
lsp::FileChangeType::CHANGED
}
},
})
} else {
None
} }
let typ = match change {
PathChange::Loaded => return None,
PathChange::Added => lsp::FileChangeType::CREATED,
PathChange::Removed => lsp::FileChangeType::DELETED,
PathChange::Updated => lsp::FileChangeType::CHANGED,
PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
};
Some(lsp::FileEvent {
uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
typ,
})
}) })
.collect(), .collect(),
}; };
@ -5059,98 +5081,102 @@ impl Project {
fn update_local_worktree_buffers_git_repos( fn update_local_worktree_buffers_git_repos(
&mut self, &mut self,
worktree_handle: ModelHandle<Worktree>, worktree_handle: ModelHandle<Worktree>,
repos: &HashMap<Arc<Path>, LocalRepositoryEntry>, changed_repos: &UpdatedGitRepositoriesSet,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) { ) {
debug_assert!(worktree_handle.read(cx).is_local()); debug_assert!(worktree_handle.read(cx).is_local());
// Setup the pending buffers // Identify the loading buffers whose containing repository that has changed.
let future_buffers = self let future_buffers = self
.loading_buffers_by_path .loading_buffers_by_path
.iter() .iter()
.filter_map(|(path, receiver)| { .filter_map(|(project_path, receiver)| {
let path = &path.path; if project_path.worktree_id != worktree_handle.read(cx).id() {
let (work_directory, repo) = repos return None;
.iter() }
.find(|(work_directory, _)| path.starts_with(work_directory))?; let path = &project_path.path;
changed_repos.iter().find(|(work_dir, change)| {
let repo_relative_path = path.strip_prefix(work_directory).log_err()?; path.starts_with(work_dir) && change.git_dir_changed
})?;
let receiver = receiver.clone(); let receiver = receiver.clone();
let repo_ptr = repo.repo_ptr.clone(); let path = path.clone();
let repo_relative_path = repo_relative_path.to_owned();
Some(async move { Some(async move {
pump_loading_buffer_reciever(receiver) wait_for_loading_buffer(receiver)
.await .await
.ok() .ok()
.map(|buffer| (buffer, repo_relative_path, repo_ptr)) .map(|buffer| (buffer, path))
}) })
}) })
.collect::<FuturesUnordered<_>>() .collect::<FuturesUnordered<_>>();
.filter_map(|result| async move {
let (buffer_handle, repo_relative_path, repo_ptr) = result?;
let lock = repo_ptr.lock(); // Identify the current buffers whose containing repository has changed.
lock.load_index_text(&repo_relative_path) let current_buffers = self
.map(|diff_base| (diff_base, buffer_handle)) .opened_buffers
}); .values()
.filter_map(|buffer| {
let buffer = buffer.upgrade(cx)?;
let file = File::from_dyn(buffer.read(cx).file())?;
if file.worktree != worktree_handle {
return None;
}
let path = file.path();
changed_repos.iter().find(|(work_dir, change)| {
path.starts_with(work_dir) && change.git_dir_changed
})?;
Some((buffer, path.clone()))
})
.collect::<Vec<_>>();
let update_diff_base_fn = update_diff_base(self); if future_buffers.len() + current_buffers.len() == 0 {
cx.spawn(|_, mut cx| async move { return;
let diff_base_tasks = cx }
let remote_id = self.remote_id();
let client = self.client.clone();
cx.spawn_weak(move |_, mut cx| async move {
// Wait for all of the buffers to load.
let future_buffers = future_buffers.collect::<Vec<_>>().await;
// Reload the diff base for every buffer whose containing git repository has changed.
let snapshot =
worktree_handle.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
let diff_bases_by_buffer = cx
.background() .background()
.spawn(future_buffers.collect::<Vec<_>>()) .spawn(async move {
future_buffers
.into_iter()
.filter_map(|e| e)
.chain(current_buffers)
.filter_map(|(buffer, path)| {
let (work_directory, repo) =
snapshot.repository_and_work_directory_for_path(&path)?;
let repo = snapshot.get_local_repo(&repo)?;
let relative_path = path.strip_prefix(&work_directory).ok()?;
let base_text = repo.repo_ptr.lock().load_index_text(&relative_path);
Some((buffer, base_text))
})
.collect::<Vec<_>>()
})
.await; .await;
for (diff_base, buffer) in diff_base_tasks.into_iter() { // Assign the new diff bases on all of the buffers.
update_diff_base_fn(Some(diff_base), buffer, &mut cx); for (buffer, diff_base) in diff_bases_by_buffer {
let buffer_id = buffer.update(&mut cx, |buffer, cx| {
buffer.set_diff_base(diff_base.clone(), cx);
buffer.remote_id()
});
if let Some(project_id) = remote_id {
client
.send(proto::UpdateDiffBase {
project_id,
buffer_id,
diff_base,
})
.log_err();
}
} }
}) })
.detach(); .detach();
// And the current buffers
for (_, buffer) in &self.opened_buffers {
if let Some(buffer) = buffer.upgrade(cx) {
let file = match File::from_dyn(buffer.read(cx).file()) {
Some(file) => file,
None => continue,
};
if file.worktree != worktree_handle {
continue;
}
let path = file.path().clone();
let worktree = worktree_handle.read(cx);
let (work_directory, repo) = match repos
.iter()
.find(|(work_directory, _)| path.starts_with(work_directory))
{
Some(repo) => repo.clone(),
None => continue,
};
let relative_repo = match path.strip_prefix(work_directory).log_err() {
Some(relative_repo) => relative_repo.to_owned(),
None => continue,
};
drop(worktree);
let update_diff_base_fn = update_diff_base(self);
let git_ptr = repo.repo_ptr.clone();
let diff_base_task = cx
.background()
.spawn(async move { git_ptr.lock().load_index_text(&relative_repo) });
cx.spawn(|_, mut cx| async move {
let diff_base = diff_base_task.await;
update_diff_base_fn(diff_base, buffer, &mut cx);
})
.detach();
}
}
} }
pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) { pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
@ -5238,6 +5264,20 @@ impl Project {
Some(ProjectPath { worktree_id, path }) Some(ProjectPath { worktree_id, path })
} }
pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
let workspace_root = self
.worktree_for_id(project_path.worktree_id, cx)?
.read(cx)
.abs_path();
let project_path = project_path.path.as_ref();
Some(if project_path == Path::new("") {
workspace_root.to_path_buf()
} else {
workspace_root.join(project_path)
})
}
// RPC message handlers // RPC message handlers
async fn handle_unshare_project( async fn handle_unshare_project(
@ -5848,7 +5888,7 @@ impl Project {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
let Some(guest_id) = envelope.original_sender_id else { let Some(guest_id) = envelope.original_sender_id else {
log::error!("missing original_sender_id on SynchronizeBuffers request"); error!("missing original_sender_id on SynchronizeBuffers request");
return; return;
}; };
@ -7072,7 +7112,7 @@ impl Item for Buffer {
} }
} }
async fn pump_loading_buffer_reciever( async fn wait_for_loading_buffer(
mut receiver: postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>, mut receiver: postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
) -> Result<ModelHandle<Buffer>, Arc<anyhow::Error>> { ) -> Result<ModelHandle<Buffer>, Arc<anyhow::Error>> {
loop { loop {
@ -7085,26 +7125,3 @@ async fn pump_loading_buffer_reciever(
receiver.next().await; receiver.next().await;
} }
} }
fn update_diff_base(
project: &Project,
) -> impl Fn(Option<String>, ModelHandle<Buffer>, &mut AsyncAppContext) {
let remote_id = project.remote_id();
let client = project.client().clone();
move |diff_base, buffer, cx| {
let buffer_id = buffer.update(cx, |buffer, cx| {
buffer.set_diff_base(diff_base.clone(), cx);
buffer.remote_id()
});
if let Some(project_id) = remote_id {
client
.send(proto::UpdateDiffBase {
project_id,
buffer_id: buffer_id as u64,
diff_base,
})
.log_err();
}
}
}

View file

@ -506,7 +506,9 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
register_options: serde_json::to_value( register_options: serde_json::to_value(
lsp::DidChangeWatchedFilesRegistrationOptions { lsp::DidChangeWatchedFilesRegistrationOptions {
watchers: vec![lsp::FileSystemWatcher { watchers: vec![lsp::FileSystemWatcher {
glob_pattern: "/the-root/*.{rs,c}".to_string(), glob_pattern: lsp::GlobPattern::String(
"/the-root/*.{rs,c}".to_string(),
),
kind: None, kind: None,
}], }],
}, },
@ -1193,7 +1195,7 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
.await; .await;
} }
#[gpui::test] #[gpui::test(iterations = 3)]
async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) { async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
init_test(cx); init_test(cx);
@ -1273,7 +1275,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
// The diagnostics have moved down since they were created. // The diagnostics have moved down since they were created.
buffer.next_notification(cx).await; buffer.next_notification(cx).await;
buffer.next_notification(cx).await; cx.foreground().run_until_parked();
buffer.read_with(cx, |buffer, _| { buffer.read_with(cx, |buffer, _| {
assert_eq!( assert_eq!(
buffer buffer
@ -1352,6 +1354,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
}); });
buffer.next_notification(cx).await; buffer.next_notification(cx).await;
cx.foreground().run_until_parked();
buffer.read_with(cx, |buffer, _| { buffer.read_with(cx, |buffer, _| {
assert_eq!( assert_eq!(
buffer buffer
@ -1444,6 +1447,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
}); });
buffer.next_notification(cx).await; buffer.next_notification(cx).await;
cx.foreground().run_until_parked();
buffer.read_with(cx, |buffer, _| { buffer.read_with(cx, |buffer, _| {
assert_eq!( assert_eq!(
buffer buffer
@ -2524,29 +2528,21 @@ async fn test_rescan_and_remote_updates(
// Create a remote copy of this worktree. // Create a remote copy of this worktree.
let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
let remote = cx.update(|cx| { let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
Worktree::remote(
1, let updates = Arc::new(Mutex::new(Vec::new()));
1, tree.update(cx, |tree, cx| {
proto::WorktreeMetadata { let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
id: initial_snapshot.id().to_proto(), let updates = updates.clone();
root_name: initial_snapshot.root_name().into(), move |update| {
abs_path: initial_snapshot updates.lock().push(update);
.abs_path() async { true }
.as_os_str() }
.to_string_lossy() });
.into(),
visible: true,
},
rpc.clone(),
cx,
)
});
remote.update(cx, |remote, _| {
let update = initial_snapshot.build_initial_update(1);
remote.as_remote_mut().unwrap().update_from_remote(update);
}); });
let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
deterministic.run_until_parked(); deterministic.run_until_parked();
cx.read(|cx| { cx.read(|cx| {
@ -2612,14 +2608,11 @@ async fn test_rescan_and_remote_updates(
// Update the remote worktree. Check that it becomes consistent with the // Update the remote worktree. Check that it becomes consistent with the
// local worktree. // local worktree.
remote.update(cx, |remote, cx| { deterministic.run_until_parked();
let update = tree.read(cx).as_local().unwrap().snapshot().build_update( remote.update(cx, |remote, _| {
&initial_snapshot, for update in updates.lock().drain(..) {
1, remote.as_remote_mut().unwrap().update_from_remote(update);
1, }
true,
);
remote.as_remote_mut().unwrap().update_from_remote(update);
}); });
deterministic.run_until_parked(); deterministic.run_until_parked();
remote.read_with(cx, |remote, _| { remote.read_with(cx, |remote, _| {

File diff suppressed because it is too large Load diff

View file

@ -284,7 +284,7 @@ mod tests {
symbol("uno", "/dir/test.rs"), symbol("uno", "/dir/test.rs"),
]; ];
let fake_server = fake_servers.next().await.unwrap(); let fake_server = fake_servers.next().await.unwrap();
fake_server.handle_request::<lsp::request::WorkspaceSymbol, _, _>( fake_server.handle_request::<lsp::WorkspaceSymbolRequest, _, _>(
move |params: lsp::WorkspaceSymbolParams, cx| { move |params: lsp::WorkspaceSymbolParams, cx| {
let executor = cx.background(); let executor = cx.background();
let fake_symbols = fake_symbols.clone(); let fake_symbols = fake_symbols.clone();
@ -308,12 +308,12 @@ mod tests {
.await .await
}; };
Ok(Some( Ok(Some(lsp::WorkspaceSymbolResponse::Flat(
matches matches
.into_iter() .into_iter()
.map(|mat| fake_symbols[mat.candidate_id].clone()) .map(|mat| fake_symbols[mat.candidate_id].clone())
.collect(), .collect(),
)) )))
} }
}, },
); );

View file

@ -44,11 +44,11 @@ struct ActiveSearches(HashMap<WeakModelHandle<Project>, WeakViewHandle<ProjectSe
pub fn init(cx: &mut AppContext) { pub fn init(cx: &mut AppContext) {
cx.set_global(ActiveSearches::default()); cx.set_global(ActiveSearches::default());
cx.add_action(ProjectSearchView::deploy); cx.add_action(ProjectSearchView::deploy);
cx.add_action(ProjectSearchView::move_focus_to_results);
cx.add_action(ProjectSearchBar::search); cx.add_action(ProjectSearchBar::search);
cx.add_action(ProjectSearchBar::search_in_new); cx.add_action(ProjectSearchBar::search_in_new);
cx.add_action(ProjectSearchBar::select_next_match); cx.add_action(ProjectSearchBar::select_next_match);
cx.add_action(ProjectSearchBar::select_prev_match); cx.add_action(ProjectSearchBar::select_prev_match);
cx.add_action(ProjectSearchBar::move_focus_to_results);
cx.capture_action(ProjectSearchBar::tab); cx.capture_action(ProjectSearchBar::tab);
cx.capture_action(ProjectSearchBar::tab_previous); cx.capture_action(ProjectSearchBar::tab_previous);
add_toggle_option_action::<ToggleCaseSensitive>(SearchOption::CaseSensitive, cx); add_toggle_option_action::<ToggleCaseSensitive>(SearchOption::CaseSensitive, cx);
@ -708,6 +708,23 @@ impl ProjectSearchView {
pub fn has_matches(&self) -> bool { pub fn has_matches(&self) -> bool {
self.active_match_index.is_some() self.active_match_index.is_some()
} }
fn move_focus_to_results(pane: &mut Pane, _: &ToggleFocus, cx: &mut ViewContext<Pane>) {
if let Some(search_view) = pane
.active_item()
.and_then(|item| item.downcast::<ProjectSearchView>())
{
search_view.update(cx, |search_view, cx| {
if !search_view.results_editor.is_focused(cx)
&& !search_view.model.read(cx).match_ranges.is_empty()
{
return search_view.focus_results_editor(cx);
}
});
}
cx.propagate_action();
}
} }
impl Default for ProjectSearchBar { impl Default for ProjectSearchBar {
@ -785,23 +802,6 @@ impl ProjectSearchBar {
} }
} }
fn move_focus_to_results(pane: &mut Pane, _: &ToggleFocus, cx: &mut ViewContext<Pane>) {
if let Some(search_view) = pane
.active_item()
.and_then(|item| item.downcast::<ProjectSearchView>())
{
search_view.update(cx, |search_view, cx| {
if search_view.query_editor.is_focused(cx)
&& !search_view.model.read(cx).match_ranges.is_empty()
{
search_view.focus_results_editor(cx);
}
});
} else {
cx.propagate_action();
}
}
fn tab(&mut self, _: &editor::Tab, cx: &mut ViewContext<Self>) { fn tab(&mut self, _: &editor::Tab, cx: &mut ViewContext<Self>) {
self.cycle_field(Direction::Next, cx); self.cycle_field(Direction::Next, cx);
} }
@ -1248,7 +1248,182 @@ pub mod tests {
}); });
} }
#[gpui::test]
async fn test_project_search_focus(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/dir",
json!({
"one.rs": "const ONE: usize = 1;",
"two.rs": "const TWO: usize = one::ONE + one::ONE;",
"three.rs": "const THREE: usize = one::ONE + two::TWO;",
"four.rs": "const FOUR: usize = one::ONE + three::THREE;",
}),
)
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx));
let active_item = cx.read(|cx| {
workspace
.read(cx)
.active_pane()
.read(cx)
.active_item()
.and_then(|item| item.downcast::<ProjectSearchView>())
});
assert!(
active_item.is_none(),
"Expected no search panel to be active, but got: {active_item:?}"
);
workspace.update(cx, |workspace, cx| {
ProjectSearchView::deploy(workspace, &workspace::NewSearch, cx)
});
let Some(search_view) = cx.read(|cx| {
workspace
.read(cx)
.active_pane()
.read(cx)
.active_item()
.and_then(|item| item.downcast::<ProjectSearchView>())
}) else {
panic!("Search view expected to appear after new search event trigger")
};
let search_view_id = search_view.id();
cx.spawn(
|mut cx| async move { cx.dispatch_action(window_id, search_view_id, &ToggleFocus) },
)
.detach();
deterministic.run_until_parked();
search_view.update(cx, |search_view, cx| {
assert!(
search_view.query_editor.is_focused(cx),
"Empty search view should be focused after the toggle focus event: no results panel to focus on",
);
});
search_view.update(cx, |search_view, cx| {
let query_editor = &search_view.query_editor;
assert!(
query_editor.is_focused(cx),
"Search view should be focused after the new search view is activated",
);
let query_text = query_editor.read(cx).text(cx);
assert!(
query_text.is_empty(),
"New search query should be empty but got '{query_text}'",
);
let results_text = search_view
.results_editor
.update(cx, |editor, cx| editor.display_text(cx));
assert!(
results_text.is_empty(),
"Empty search view should have no results but got '{results_text}'"
);
});
search_view.update(cx, |search_view, cx| {
search_view.query_editor.update(cx, |query_editor, cx| {
query_editor.set_text("sOMETHINGtHATsURELYdOESnOTeXIST", cx)
});
search_view.search(cx);
});
deterministic.run_until_parked();
search_view.update(cx, |search_view, cx| {
let results_text = search_view
.results_editor
.update(cx, |editor, cx| editor.display_text(cx));
assert!(
results_text.is_empty(),
"Search view for mismatching query should have no results but got '{results_text}'"
);
assert!(
search_view.query_editor.is_focused(cx),
"Search view should be focused after mismatching query had been used in search",
);
});
cx.spawn(
|mut cx| async move { cx.dispatch_action(window_id, search_view_id, &ToggleFocus) },
)
.detach();
deterministic.run_until_parked();
search_view.update(cx, |search_view, cx| {
assert!(
search_view.query_editor.is_focused(cx),
"Search view with mismatching query should be focused after the toggle focus event: still no results panel to focus on",
);
});
search_view.update(cx, |search_view, cx| {
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("TWO", cx));
search_view.search(cx);
});
deterministic.run_until_parked();
search_view.update(cx, |search_view, cx| {
assert_eq!(
search_view
.results_editor
.update(cx, |editor, cx| editor.display_text(cx)),
"\n\nconst THREE: usize = one::ONE + two::TWO;\n\n\nconst TWO: usize = one::ONE + one::ONE;",
"Search view results should match the query"
);
assert!(
search_view.results_editor.is_focused(cx),
"Search view with mismatching query should be focused after search results are available",
);
});
cx.spawn(
|mut cx| async move { cx.dispatch_action(window_id, search_view_id, &ToggleFocus) },
)
.detach();
deterministic.run_until_parked();
search_view.update(cx, |search_view, cx| {
assert!(
search_view.results_editor.is_focused(cx),
"Search view with matching query should still have its results editor focused after the toggle focus event",
);
});
workspace.update(cx, |workspace, cx| {
ProjectSearchView::deploy(workspace, &workspace::NewSearch, cx)
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "two", "Query should be updated to first search result after search view 2nd open in a row");
assert_eq!(
search_view
.results_editor
.update(cx, |editor, cx| editor.display_text(cx)),
"\n\nconst THREE: usize = one::ONE + two::TWO;\n\n\nconst TWO: usize = one::ONE + one::ONE;",
"Results should be unchanged after search view 2nd open in a row"
);
assert!(
search_view.query_editor.is_focused(cx),
"Focus should be moved into query editor again after search view 2nd open in a row"
);
});
cx.spawn(
|mut cx| async move { cx.dispatch_action(window_id, search_view_id, &ToggleFocus) },
)
.detach();
deterministic.run_until_parked();
search_view.update(cx, |search_view, cx| {
assert!(
search_view.results_editor.is_focused(cx),
"Search view with matching query should switch focus to the results editor after the toggle focus event",
);
});
}
pub fn init_test(cx: &mut TestAppContext) { pub fn init_test(cx: &mut TestAppContext) {
cx.foreground().forbid_parking();
let fonts = cx.font_cache(); let fonts = cx.font_cache();
let mut theme = gpui::fonts::with_font_cache(fonts.clone(), theme::Theme::default); let mut theme = gpui::fonts::with_font_cache(fonts.clone(), theme::Theme::default);
theme.search.match_background = Color::red(); theme.search.match_background = Color::red();
@ -1266,9 +1441,10 @@ pub mod tests {
language::init(cx); language::init(cx);
client::init_settings(cx); client::init_settings(cx);
editor::init_settings(cx); editor::init(cx);
workspace::init_settings(cx); workspace::init_settings(cx);
Project::init_settings(cx); Project::init_settings(cx);
super::init(cx);
}); });
} }
} }

View file

@ -50,6 +50,7 @@ impl TerminalPanel {
let window_id = cx.window_id(); let window_id = cx.window_id();
let mut pane = Pane::new( let mut pane = Pane::new(
workspace.weak_handle(), workspace.weak_handle(),
workspace.project().clone(),
workspace.app_state().background_actions, workspace.app_state().background_actions,
Default::default(), Default::default(),
cx, cx,
@ -176,8 +177,9 @@ impl TerminalPanel {
(panel, pane, items) (panel, pane, items)
})?; })?;
let pane = pane.downgrade();
let items = futures::future::join_all(items).await; let items = futures::future::join_all(items).await;
workspace.update(&mut cx, |workspace, cx| { pane.update(&mut cx, |pane, cx| {
let active_item_id = serialized_panel let active_item_id = serialized_panel
.as_ref() .as_ref()
.and_then(|panel| panel.active_item_id); .and_then(|panel| panel.active_item_id);
@ -185,17 +187,15 @@ impl TerminalPanel {
for item in items { for item in items {
if let Some(item) = item.log_err() { if let Some(item) = item.log_err() {
let item_id = item.id(); let item_id = item.id();
Pane::add_item(workspace, &pane, Box::new(item), false, false, None, cx); pane.add_item(Box::new(item), false, false, None, cx);
if Some(item_id) == active_item_id { if Some(item_id) == active_item_id {
active_ix = Some(pane.read(cx).items_len() - 1); active_ix = Some(pane.items_len() - 1);
} }
} }
} }
if let Some(active_ix) = active_ix { if let Some(active_ix) = active_ix {
pane.update(cx, |pane, cx| { pane.activate_item(active_ix, false, false, cx)
pane.activate_item(active_ix, false, false, cx)
});
} }
})?; })?;
@ -240,8 +240,10 @@ impl TerminalPanel {
Box::new(cx.add_view(|cx| { Box::new(cx.add_view(|cx| {
TerminalView::new(terminal, workspace.database_id(), cx) TerminalView::new(terminal, workspace.database_id(), cx)
})); }));
let focus = pane.read(cx).has_focus(); pane.update(cx, |pane, cx| {
Pane::add_item(workspace, &pane, terminal, true, focus, None, cx); let focus = pane.has_focus();
pane.add_item(terminal, true, focus, None, cx);
});
} }
})?; })?;
this.update(&mut cx, |this, cx| this.serialize(cx))?; this.update(&mut cx, |this, cx| this.serialize(cx))?;

File diff suppressed because it is too large Load diff

View file

@ -183,7 +183,7 @@ pub fn handle_dropped_item<V: View>(
.zip(pane.upgrade(cx)) .zip(pane.upgrade(cx))
{ {
workspace.update(cx, |workspace, cx| { workspace.update(cx, |workspace, cx| {
Pane::move_item(workspace, from, to, item_id, index, cx); workspace.move_item(from, to, item_id, index, cx);
}) })
} }
}); });

View file

@ -1,5 +1,5 @@
use crate::{item::ItemHandle, ItemDeserializers, Member, Pane, PaneAxis, Workspace, WorkspaceId}; use crate::{item::ItemHandle, ItemDeserializers, Member, Pane, PaneAxis, Workspace, WorkspaceId};
use anyhow::{anyhow, Context, Result}; use anyhow::{Context, Result};
use async_recursion::async_recursion; use async_recursion::async_recursion;
use db::sqlez::{ use db::sqlez::{
bindable::{Bind, Column, StaticColumnCount}, bindable::{Bind, Column, StaticColumnCount},
@ -230,7 +230,7 @@ impl SerializedPane {
pub async fn deserialize_to( pub async fn deserialize_to(
&self, &self,
project: &ModelHandle<Project>, project: &ModelHandle<Project>,
pane_handle: &WeakViewHandle<Pane>, pane: &WeakViewHandle<Pane>,
workspace_id: WorkspaceId, workspace_id: WorkspaceId,
workspace: &WeakViewHandle<Workspace>, workspace: &WeakViewHandle<Workspace>,
cx: &mut AsyncAppContext, cx: &mut AsyncAppContext,
@ -239,7 +239,7 @@ impl SerializedPane {
let mut active_item_index = None; let mut active_item_index = None;
for (index, item) in self.children.iter().enumerate() { for (index, item) in self.children.iter().enumerate() {
let project = project.clone(); let project = project.clone();
let item_handle = pane_handle let item_handle = pane
.update(cx, |_, cx| { .update(cx, |_, cx| {
if let Some(deserializer) = cx.global::<ItemDeserializers>().get(&item.kind) { if let Some(deserializer) = cx.global::<ItemDeserializers>().get(&item.kind) {
deserializer(project, workspace.clone(), workspace_id, item.item_id, cx) deserializer(project, workspace.clone(), workspace_id, item.item_id, cx)
@ -256,13 +256,9 @@ impl SerializedPane {
items.push(item_handle.clone()); items.push(item_handle.clone());
if let Some(item_handle) = item_handle { if let Some(item_handle) = item_handle {
workspace.update(cx, |workspace, cx| { pane.update(cx, |pane, cx| {
let pane_handle = pane_handle pane.add_item(item_handle.clone(), true, true, None, cx);
.upgrade(cx) })?;
.ok_or_else(|| anyhow!("pane was dropped"))?;
Pane::add_item(workspace, &pane_handle, item_handle, true, true, None, cx);
anyhow::Ok(())
})??;
} }
if item.active { if item.active {
@ -271,7 +267,7 @@ impl SerializedPane {
} }
if let Some(active_item_index) = active_item_index { if let Some(active_item_index) = active_item_index {
pane_handle.update(cx, |pane, cx| { pane.update(cx, |pane, cx| {
pane.activate_item(active_item_index, false, false, cx); pane.activate_item(active_item_index, false, false, cx);
})?; })?;
} }

View file

@ -99,7 +99,7 @@ impl Item for SharedScreen {
Some(format!("{}'s screen", self.user.github_login).into()) Some(format!("{}'s screen", self.user.github_login).into())
} }
fn deactivated(&mut self, cx: &mut ViewContext<Self>) { fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
if let Some(nav_history) = self.nav_history.as_ref() { if let Some(nav_history) = self.nav_history.as_mut() {
nav_history.push::<()>(None, cx); nav_history.push::<()>(None, cx);
} }
} }

View file

@ -153,14 +153,13 @@ impl View for Toolbar {
let pane = pane.clone(); let pane = pane.clone();
cx.window_context().defer(move |cx| { cx.window_context().defer(move |cx| {
workspace.update(cx, |workspace, cx| { workspace.update(cx, |workspace, cx| {
Pane::go_back(workspace, Some(pane.clone()), cx) workspace.go_back(pane.clone(), cx).detach_and_log_err(cx);
.detach_and_log_err(cx);
}); });
}) })
} }
} }
}, },
super::GoBack { pane: None }, super::GoBack,
"Go Back", "Go Back",
cx, cx,
)); ));
@ -182,14 +181,15 @@ impl View for Toolbar {
let pane = pane.clone(); let pane = pane.clone();
cx.window_context().defer(move |cx| { cx.window_context().defer(move |cx| {
workspace.update(cx, |workspace, cx| { workspace.update(cx, |workspace, cx| {
Pane::go_forward(workspace, Some(pane.clone()), cx) workspace
.go_forward(pane.clone(), cx)
.detach_and_log_err(cx); .detach_and_log_err(cx);
}); });
}); });
} }
} }
}, },
super::GoForward { pane: None }, super::GoForward,
"Go Forward", "Go Forward",
cx, cx,
)); ));

View file

@ -278,6 +278,19 @@ pub fn init(app_state: Arc<AppState>, cx: &mut AppContext) {
workspace.toggle_dock(DockPosition::Bottom, action.focus, cx); workspace.toggle_dock(DockPosition::Bottom, action.focus, cx);
}); });
cx.add_action(Workspace::activate_pane_at_index); cx.add_action(Workspace::activate_pane_at_index);
cx.add_action(|workspace: &mut Workspace, _: &ReopenClosedItem, cx| {
workspace.reopen_closed_item(cx).detach();
});
cx.add_action(|workspace: &mut Workspace, _: &GoBack, cx| {
workspace
.go_back(workspace.active_pane().downgrade(), cx)
.detach();
});
cx.add_action(|workspace: &mut Workspace, _: &GoForward, cx| {
workspace
.go_forward(workspace.active_pane().downgrade(), cx)
.detach();
});
cx.add_action(|_: &mut Workspace, _: &install_cli::Install, cx| { cx.add_action(|_: &mut Workspace, _: &install_cli::Install, cx| {
cx.spawn(|workspace, mut cx| async move { cx.spawn(|workspace, mut cx| async move {
@ -583,6 +596,7 @@ impl Workspace {
let center_pane = cx.add_view(|cx| { let center_pane = cx.add_view(|cx| {
Pane::new( Pane::new(
weak_handle.clone(), weak_handle.clone(),
project.clone(),
app_state.background_actions, app_state.background_actions,
pane_history_timestamp.clone(), pane_history_timestamp.clone(),
cx, cx,
@ -946,21 +960,30 @@ impl Workspace {
&self, &self,
limit: Option<usize>, limit: Option<usize>,
cx: &AppContext, cx: &AppContext,
) -> Vec<ProjectPath> { ) -> Vec<(ProjectPath, Option<PathBuf>)> {
let mut history: HashMap<ProjectPath, usize> = HashMap::default(); let mut abs_paths_opened: HashMap<PathBuf, HashSet<ProjectPath>> = HashMap::default();
let mut history: HashMap<ProjectPath, (Option<PathBuf>, usize)> = HashMap::default();
for pane in &self.panes { for pane in &self.panes {
let pane = pane.read(cx); let pane = pane.read(cx);
pane.nav_history() pane.nav_history()
.for_each_entry(cx, |entry, project_path| { .for_each_entry(cx, |entry, (project_path, fs_path)| {
if let Some(fs_path) = &fs_path {
abs_paths_opened
.entry(fs_path.clone())
.or_default()
.insert(project_path.clone());
}
let timestamp = entry.timestamp; let timestamp = entry.timestamp;
match history.entry(project_path) { match history.entry(project_path) {
hash_map::Entry::Occupied(mut entry) => { hash_map::Entry::Occupied(mut entry) => {
if &timestamp > entry.get() { let (old_fs_path, old_timestamp) = entry.get();
entry.insert(timestamp); if &timestamp > old_timestamp {
assert_eq!(&fs_path, old_fs_path, "Inconsistent nav history");
entry.insert((fs_path, timestamp));
} }
} }
hash_map::Entry::Vacant(entry) => { hash_map::Entry::Vacant(entry) => {
entry.insert(timestamp); entry.insert((fs_path, timestamp));
} }
} }
}); });
@ -968,13 +991,137 @@ impl Workspace {
history history
.into_iter() .into_iter()
.sorted_by_key(|(_, timestamp)| *timestamp) .sorted_by_key(|(_, (_, timestamp))| *timestamp)
.map(|(project_path, _)| project_path) .map(|(project_path, (fs_path, _))| (project_path, fs_path))
.rev() .rev()
.filter(|(history_path, abs_path)| {
let latest_project_path_opened = abs_path
.as_ref()
.and_then(|abs_path| abs_paths_opened.get(abs_path))
.and_then(|project_paths| {
project_paths
.iter()
.max_by(|b1, b2| b1.worktree_id.cmp(&b2.worktree_id))
});
match latest_project_path_opened {
Some(latest_project_path_opened) => latest_project_path_opened == history_path,
None => true,
}
})
.take(limit.unwrap_or(usize::MAX)) .take(limit.unwrap_or(usize::MAX))
.collect() .collect()
} }
fn navigate_history(
&mut self,
pane: WeakViewHandle<Pane>,
mode: NavigationMode,
cx: &mut ViewContext<Workspace>,
) -> Task<Result<()>> {
let to_load = if let Some(pane) = pane.upgrade(cx) {
cx.focus(&pane);
pane.update(cx, |pane, cx| {
loop {
// Retrieve the weak item handle from the history.
let entry = pane.nav_history_mut().pop(mode, cx)?;
// If the item is still present in this pane, then activate it.
if let Some(index) = entry
.item
.upgrade(cx)
.and_then(|v| pane.index_for_item(v.as_ref()))
{
let prev_active_item_index = pane.active_item_index();
pane.nav_history_mut().set_mode(mode);
pane.activate_item(index, true, true, cx);
pane.nav_history_mut().set_mode(NavigationMode::Normal);
let mut navigated = prev_active_item_index != pane.active_item_index();
if let Some(data) = entry.data {
navigated |= pane.active_item()?.navigate(data, cx);
}
if navigated {
break None;
}
}
// If the item is no longer present in this pane, then retrieve its
// project path in order to reopen it.
else {
break pane
.nav_history()
.path_for_item(entry.item.id())
.map(|(project_path, _)| (project_path, entry));
}
}
})
} else {
None
};
if let Some((project_path, entry)) = to_load {
// If the item was no longer present, then load it again from its previous path.
let task = self.load_path(project_path, cx);
cx.spawn(|workspace, mut cx| async move {
let task = task.await;
let mut navigated = false;
if let Some((project_entry_id, build_item)) = task.log_err() {
let prev_active_item_id = pane.update(&mut cx, |pane, _| {
pane.nav_history_mut().set_mode(mode);
pane.active_item().map(|p| p.id())
})?;
pane.update(&mut cx, |pane, cx| {
let item = pane.open_item(project_entry_id, true, cx, build_item);
navigated |= Some(item.id()) != prev_active_item_id;
pane.nav_history_mut().set_mode(NavigationMode::Normal);
if let Some(data) = entry.data {
navigated |= item.navigate(data, cx);
}
})?;
}
if !navigated {
workspace
.update(&mut cx, |workspace, cx| {
Self::navigate_history(workspace, pane, mode, cx)
})?
.await?;
}
Ok(())
})
} else {
Task::ready(Ok(()))
}
}
pub fn go_back(
&mut self,
pane: WeakViewHandle<Pane>,
cx: &mut ViewContext<Workspace>,
) -> Task<Result<()>> {
self.navigate_history(pane, NavigationMode::GoingBack, cx)
}
pub fn go_forward(
&mut self,
pane: WeakViewHandle<Pane>,
cx: &mut ViewContext<Workspace>,
) -> Task<Result<()>> {
self.navigate_history(pane, NavigationMode::GoingForward, cx)
}
pub fn reopen_closed_item(&mut self, cx: &mut ViewContext<Workspace>) -> Task<Result<()>> {
self.navigate_history(
self.active_pane().downgrade(),
NavigationMode::ReopeningClosedItem,
cx,
)
}
pub fn client(&self) -> &Client { pub fn client(&self) -> &Client {
&self.app_state.client &self.app_state.client
} }
@ -1548,6 +1695,7 @@ impl Workspace {
let pane = cx.add_view(|cx| { let pane = cx.add_view(|cx| {
Pane::new( Pane::new(
self.weak_handle(), self.weak_handle(),
self.project.clone(),
self.app_state.background_actions, self.app_state.background_actions,
self.pane_history_timestamp.clone(), self.pane_history_timestamp.clone(),
cx, cx,
@ -1567,7 +1715,7 @@ impl Workspace {
) -> bool { ) -> bool {
if let Some(center_pane) = self.last_active_center_pane.clone() { if let Some(center_pane) = self.last_active_center_pane.clone() {
if let Some(center_pane) = center_pane.upgrade(cx) { if let Some(center_pane) = center_pane.upgrade(cx) {
Pane::add_item(self, &center_pane, item, true, true, None, cx); center_pane.update(cx, |pane, cx| pane.add_item(item, true, true, None, cx));
true true
} else { } else {
false false
@ -1578,8 +1726,38 @@ impl Workspace {
} }
pub fn add_item(&mut self, item: Box<dyn ItemHandle>, cx: &mut ViewContext<Self>) { pub fn add_item(&mut self, item: Box<dyn ItemHandle>, cx: &mut ViewContext<Self>) {
let active_pane = self.active_pane().clone(); self.active_pane
Pane::add_item(self, &active_pane, item, true, true, None, cx); .update(cx, |pane, cx| pane.add_item(item, true, true, None, cx));
}
pub fn open_abs_path(
&mut self,
abs_path: PathBuf,
visible: bool,
cx: &mut ViewContext<Self>,
) -> Task<anyhow::Result<Box<dyn ItemHandle>>> {
cx.spawn(|workspace, mut cx| async move {
let open_paths_task_result = workspace
.update(&mut cx, |workspace, cx| {
workspace.open_paths(vec![abs_path.clone()], visible, cx)
})
.with_context(|| format!("open abs path {abs_path:?} task spawn"))?
.await;
anyhow::ensure!(
open_paths_task_result.len() == 1,
"open abs path {abs_path:?} task returned incorrect number of results"
);
match open_paths_task_result
.into_iter()
.next()
.expect("ensured single task result")
{
Some(open_result) => {
open_result.with_context(|| format!("open abs path {abs_path:?} task join"))
}
None => anyhow::bail!("open abs path {abs_path:?} task returned None"),
}
})
} }
pub fn open_path( pub fn open_path(
@ -1599,13 +1777,10 @@ impl Workspace {
}); });
let task = self.load_path(path.into(), cx); let task = self.load_path(path.into(), cx);
cx.spawn(|this, mut cx| async move { cx.spawn(|_, mut cx| async move {
let (project_entry_id, build_item) = task.await?; let (project_entry_id, build_item) = task.await?;
let pane = pane pane.update(&mut cx, |pane, cx| {
.upgrade(&cx) pane.open_item(project_entry_id, focus_item, cx, build_item)
.ok_or_else(|| anyhow!("pane was closed"))?;
this.update(&mut cx, |this, cx| {
Pane::open_item(this, pane, project_entry_id, focus_item, cx, build_item)
}) })
}) })
} }
@ -1662,8 +1837,9 @@ impl Workspace {
pub fn open_shared_screen(&mut self, peer_id: PeerId, cx: &mut ViewContext<Self>) { pub fn open_shared_screen(&mut self, peer_id: PeerId, cx: &mut ViewContext<Self>) {
if let Some(shared_screen) = self.shared_screen_for_peer(peer_id, &self.active_pane, cx) { if let Some(shared_screen) = self.shared_screen_for_peer(peer_id, &self.active_pane, cx) {
let pane = self.active_pane.clone(); self.active_pane.update(cx, |pane, cx| {
Pane::add_item(self, &pane, Box::new(shared_screen), false, true, None, cx); pane.add_item(Box::new(shared_screen), false, true, None, cx)
});
} }
} }
@ -1750,6 +1926,7 @@ impl Workspace {
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) { ) {
match event { match event {
pane::Event::AddItem { item } => item.added_to_pane(self, pane, cx),
pane::Event::Split(direction) => { pane::Event::Split(direction) => {
self.split_pane(pane, *direction, cx); self.split_pane(pane, *direction, cx);
} }
@ -1804,7 +1981,7 @@ impl Workspace {
let item = pane.read(cx).active_item()?; let item = pane.read(cx).active_item()?;
let maybe_pane_handle = if let Some(clone) = item.clone_on_split(self.database_id(), cx) { let maybe_pane_handle = if let Some(clone) = item.clone_on_split(self.database_id(), cx) {
let new_pane = self.add_pane(cx); let new_pane = self.add_pane(cx);
Pane::add_item(self, &new_pane, clone, true, true, None, cx); new_pane.update(cx, |pane, cx| pane.add_item(clone, true, true, None, cx));
self.center.split(&pane, &new_pane, direction).unwrap(); self.center.split(&pane, &new_pane, direction).unwrap();
Some(new_pane) Some(new_pane)
} else { } else {
@ -1826,7 +2003,7 @@ impl Workspace {
let Some(from) = from.upgrade(cx) else { return; }; let Some(from) = from.upgrade(cx) else { return; };
let new_pane = self.add_pane(cx); let new_pane = self.add_pane(cx);
Pane::move_item(self, from.clone(), new_pane.clone(), item_id_to_move, 0, cx); self.move_item(from.clone(), new_pane.clone(), item_id_to_move, 0, cx);
self.center self.center
.split(&pane_to_split, &new_pane, split_direction) .split(&pane_to_split, &new_pane, split_direction)
.unwrap(); .unwrap();
@ -1854,6 +2031,41 @@ impl Workspace {
})) }))
} }
pub fn move_item(
&mut self,
source: ViewHandle<Pane>,
destination: ViewHandle<Pane>,
item_id_to_move: usize,
destination_index: usize,
cx: &mut ViewContext<Self>,
) {
let item_to_move = source
.read(cx)
.items()
.enumerate()
.find(|(_, item_handle)| item_handle.id() == item_id_to_move);
if item_to_move.is_none() {
log::warn!("Tried to move item handle which was not in `from` pane. Maybe tab was closed during drop");
return;
}
let (item_ix, item_handle) = item_to_move.unwrap();
let item_handle = item_handle.clone();
if source != destination {
// Close item from previous pane
source.update(cx, |source, cx| {
source.remove_item(item_ix, false, cx);
});
}
// This automatically removes duplicate items in the pane
destination.update(cx, |destination, cx| {
destination.add_item(item_handle, true, true, Some(destination_index), cx);
cx.focus_self();
});
}
fn remove_pane(&mut self, pane: ViewHandle<Pane>, cx: &mut ViewContext<Self>) { fn remove_pane(&mut self, pane: ViewHandle<Pane>, cx: &mut ViewContext<Self>) {
if self.center.remove(&pane).unwrap() { if self.center.remove(&pane).unwrap() {
self.force_remove_pane(&pane, cx); self.force_remove_pane(&pane, cx);
@ -2457,7 +2669,9 @@ impl Workspace {
if let Some(index) = pane.update(cx, |pane, _| pane.index_for_item(item.as_ref())) { if let Some(index) = pane.update(cx, |pane, _| pane.index_for_item(item.as_ref())) {
pane.update(cx, |pane, cx| pane.activate_item(index, false, false, cx)); pane.update(cx, |pane, cx| pane.activate_item(index, false, false, cx));
} else { } else {
Pane::add_item(self, &pane, item.boxed_clone(), false, false, None, cx); pane.update(cx, |pane, cx| {
pane.add_item(item.boxed_clone(), false, false, None, cx)
});
} }
if pane_was_focused { if pane_was_focused {
@ -3946,9 +4160,7 @@ mod tests {
}); });
workspace workspace
.update(cx, |workspace, cx| { .update(cx, |workspace, cx| workspace.go_back(pane.downgrade(), cx))
Pane::go_back(workspace, Some(pane.downgrade()), cx)
})
.await .await
.unwrap(); .unwrap();

View file

@ -120,8 +120,8 @@ pub fn menus() -> Vec<Menu<'static>> {
Menu { Menu {
name: "Go", name: "Go",
items: vec![ items: vec![
MenuItem::action("Back", workspace::GoBack { pane: None }), MenuItem::action("Back", workspace::GoBack),
MenuItem::action("Forward", workspace::GoForward { pane: None }), MenuItem::action("Forward", workspace::GoForward),
MenuItem::separator(), MenuItem::separator(),
MenuItem::action("Go to File", file_finder::Toggle), MenuItem::action("Go to File", file_finder::Toggle),
MenuItem::action("Go to Symbol in Project", project_symbols::Toggle), MenuItem::action("Go to Symbol in Project", project_symbols::Toggle),

View file

@ -667,7 +667,7 @@ mod tests {
use util::http::FakeHttpClient; use util::http::FakeHttpClient;
use workspace::{ use workspace::{
item::{Item, ItemHandle}, item::{Item, ItemHandle},
open_new, open_paths, pane, NewFile, Pane, SplitDirection, WorkspaceHandle, open_new, open_paths, pane, NewFile, SplitDirection, WorkspaceHandle,
}; };
#[gpui::test] #[gpui::test]
@ -1492,7 +1492,7 @@ mod tests {
); );
workspace workspace
.update(cx, |w, cx| Pane::go_back(w, None, cx)) .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await .await
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
@ -1501,7 +1501,7 @@ mod tests {
); );
workspace workspace
.update(cx, |w, cx| Pane::go_back(w, None, cx)) .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await .await
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
@ -1510,7 +1510,7 @@ mod tests {
); );
workspace workspace
.update(cx, |w, cx| Pane::go_back(w, None, cx)) .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await .await
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
@ -1519,7 +1519,7 @@ mod tests {
); );
workspace workspace
.update(cx, |w, cx| Pane::go_back(w, None, cx)) .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await .await
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
@ -1529,7 +1529,7 @@ mod tests {
// Go back one more time and ensure we don't navigate past the first item in the history. // Go back one more time and ensure we don't navigate past the first item in the history.
workspace workspace
.update(cx, |w, cx| Pane::go_back(w, None, cx)) .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await .await
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
@ -1538,7 +1538,7 @@ mod tests {
); );
workspace workspace
.update(cx, |w, cx| Pane::go_forward(w, None, cx)) .update(cx, |w, cx| w.go_forward(w.active_pane().downgrade(), cx))
.await .await
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
@ -1547,7 +1547,7 @@ mod tests {
); );
workspace workspace
.update(cx, |w, cx| Pane::go_forward(w, None, cx)) .update(cx, |w, cx| w.go_forward(w.active_pane().downgrade(), cx))
.await .await
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
@ -1565,7 +1565,7 @@ mod tests {
.await .await
.unwrap(); .unwrap();
workspace workspace
.update(cx, |w, cx| Pane::go_forward(w, None, cx)) .update(cx, |w, cx| w.go_forward(w.active_pane().downgrade(), cx))
.await .await
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
@ -1574,7 +1574,7 @@ mod tests {
); );
workspace workspace
.update(cx, |w, cx| Pane::go_forward(w, None, cx)) .update(cx, |w, cx| w.go_forward(w.active_pane().downgrade(), cx))
.await .await
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
@ -1583,7 +1583,7 @@ mod tests {
); );
workspace workspace
.update(cx, |w, cx| Pane::go_back(w, None, cx)) .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await .await
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
@ -1605,7 +1605,7 @@ mod tests {
.await .await
.unwrap(); .unwrap();
workspace workspace
.update(cx, |w, cx| Pane::go_back(w, None, cx)) .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await .await
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
@ -1613,7 +1613,7 @@ mod tests {
(file1.clone(), DisplayPoint::new(10, 0), 0.) (file1.clone(), DisplayPoint::new(10, 0), 0.)
); );
workspace workspace
.update(cx, |w, cx| Pane::go_forward(w, None, cx)) .update(cx, |w, cx| w.go_forward(w.active_pane().downgrade(), cx))
.await .await
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
@ -1657,7 +1657,7 @@ mod tests {
}) })
}); });
workspace workspace
.update(cx, |w, cx| Pane::go_back(w, None, cx)) .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await .await
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
@ -1665,7 +1665,7 @@ mod tests {
(file1.clone(), DisplayPoint::new(2, 0), 0.) (file1.clone(), DisplayPoint::new(2, 0), 0.)
); );
workspace workspace
.update(cx, |w, cx| Pane::go_back(w, None, cx)) .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await .await
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
@ -1770,81 +1770,97 @@ mod tests {
// Reopen all the closed items, ensuring they are reopened in the same order // Reopen all the closed items, ensuring they are reopened in the same order
// in which they were closed. // in which they were closed.
workspace workspace
.update(cx, Pane::reopen_closed_item) .update(cx, Workspace::reopen_closed_item)
.await .await
.unwrap(); .unwrap();
assert_eq!(active_path(&workspace, cx), Some(file3.clone())); assert_eq!(active_path(&workspace, cx), Some(file3.clone()));
workspace workspace
.update(cx, Pane::reopen_closed_item) .update(cx, Workspace::reopen_closed_item)
.await .await
.unwrap(); .unwrap();
assert_eq!(active_path(&workspace, cx), Some(file2.clone())); assert_eq!(active_path(&workspace, cx), Some(file2.clone()));
workspace workspace
.update(cx, Pane::reopen_closed_item) .update(cx, Workspace::reopen_closed_item)
.await .await
.unwrap(); .unwrap();
assert_eq!(active_path(&workspace, cx), Some(file4.clone())); assert_eq!(active_path(&workspace, cx), Some(file4.clone()));
workspace workspace
.update(cx, Pane::reopen_closed_item) .update(cx, Workspace::reopen_closed_item)
.await .await
.unwrap(); .unwrap();
assert_eq!(active_path(&workspace, cx), Some(file1.clone())); assert_eq!(active_path(&workspace, cx), Some(file1.clone()));
// Reopening past the last closed item is a no-op. // Reopening past the last closed item is a no-op.
workspace workspace
.update(cx, Pane::reopen_closed_item) .update(cx, Workspace::reopen_closed_item)
.await .await
.unwrap(); .unwrap();
assert_eq!(active_path(&workspace, cx), Some(file1.clone())); assert_eq!(active_path(&workspace, cx), Some(file1.clone()));
// Reopening closed items doesn't interfere with navigation history. // Reopening closed items doesn't interfere with navigation history.
workspace workspace
.update(cx, |workspace, cx| Pane::go_back(workspace, None, cx)) .update(cx, |workspace, cx| {
workspace.go_back(workspace.active_pane().downgrade(), cx)
})
.await .await
.unwrap(); .unwrap();
assert_eq!(active_path(&workspace, cx), Some(file4.clone())); assert_eq!(active_path(&workspace, cx), Some(file4.clone()));
workspace workspace
.update(cx, |workspace, cx| Pane::go_back(workspace, None, cx)) .update(cx, |workspace, cx| {
workspace.go_back(workspace.active_pane().downgrade(), cx)
})
.await .await
.unwrap(); .unwrap();
assert_eq!(active_path(&workspace, cx), Some(file2.clone())); assert_eq!(active_path(&workspace, cx), Some(file2.clone()));
workspace workspace
.update(cx, |workspace, cx| Pane::go_back(workspace, None, cx)) .update(cx, |workspace, cx| {
workspace.go_back(workspace.active_pane().downgrade(), cx)
})
.await .await
.unwrap(); .unwrap();
assert_eq!(active_path(&workspace, cx), Some(file3.clone())); assert_eq!(active_path(&workspace, cx), Some(file3.clone()));
workspace workspace
.update(cx, |workspace, cx| Pane::go_back(workspace, None, cx)) .update(cx, |workspace, cx| {
workspace.go_back(workspace.active_pane().downgrade(), cx)
})
.await .await
.unwrap(); .unwrap();
assert_eq!(active_path(&workspace, cx), Some(file4.clone())); assert_eq!(active_path(&workspace, cx), Some(file4.clone()));
workspace workspace
.update(cx, |workspace, cx| Pane::go_back(workspace, None, cx)) .update(cx, |workspace, cx| {
workspace.go_back(workspace.active_pane().downgrade(), cx)
})
.await .await
.unwrap(); .unwrap();
assert_eq!(active_path(&workspace, cx), Some(file3.clone())); assert_eq!(active_path(&workspace, cx), Some(file3.clone()));
workspace workspace
.update(cx, |workspace, cx| Pane::go_back(workspace, None, cx)) .update(cx, |workspace, cx| {
workspace.go_back(workspace.active_pane().downgrade(), cx)
})
.await .await
.unwrap(); .unwrap();
assert_eq!(active_path(&workspace, cx), Some(file2.clone())); assert_eq!(active_path(&workspace, cx), Some(file2.clone()));
workspace workspace
.update(cx, |workspace, cx| Pane::go_back(workspace, None, cx)) .update(cx, |workspace, cx| {
workspace.go_back(workspace.active_pane().downgrade(), cx)
})
.await .await
.unwrap(); .unwrap();
assert_eq!(active_path(&workspace, cx), Some(file1.clone())); assert_eq!(active_path(&workspace, cx), Some(file1.clone()));
workspace workspace
.update(cx, |workspace, cx| Pane::go_back(workspace, None, cx)) .update(cx, |workspace, cx| {
workspace.go_back(workspace.active_pane().downgrade(), cx)
})
.await .await
.unwrap(); .unwrap();
assert_eq!(active_path(&workspace, cx), Some(file1.clone())); assert_eq!(active_path(&workspace, cx), Some(file1.clone()));