Enable manual worktree organization (#11504)

Release Notes:

- Preserve order of worktrees in project
([#10883](https://github.com/zed-industries/zed/issues/10883)).
- Enable drag-and-drop reordering for project worktrees

Note: worktree order is not synced during collaboration but guests can
reorder their own project panels.

![Reordering
worktrees](https://github.com/zed-industries/zed/assets/1347854/1c63d83c-5d4e-4b55-b840-bfbf32521b2a)

---------

Co-authored-by: Kirill Bulatov <kirill@zed.dev>
This commit is contained in:
Elliot Thomas 2024-05-24 10:15:48 +01:00 committed by GitHub
parent 1e5389a2be
commit b9697fb487
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 479 additions and 54 deletions

View file

@ -155,6 +155,7 @@ pub enum OpenedBufferEvent {
/// Can be either local (for the project opened on the same host) or remote.(for collab projects, browsed by multiple remote users).
pub struct Project {
worktrees: Vec<WorktreeHandle>,
worktrees_reordered: bool,
active_entry: Option<ProjectEntryId>,
buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
pending_language_server_update: Option<BufferOrderedMessage>,
@ -312,6 +313,7 @@ pub enum Event {
ActiveEntryChanged(Option<ProjectEntryId>),
ActivateProjectPanel,
WorktreeAdded,
WorktreeOrderChanged,
WorktreeRemoved(WorktreeId),
WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
WorktreeUpdatedGitRepositories,
@ -692,6 +694,7 @@ impl Project {
Self {
worktrees: Vec::new(),
worktrees_reordered: false,
buffer_ordered_messages_tx: tx,
flush_language_server_update: None,
pending_language_server_update: None,
@ -825,6 +828,7 @@ impl Project {
.detach();
let mut this = Self {
worktrees: Vec::new(),
worktrees_reordered: false,
buffer_ordered_messages_tx: tx,
pending_language_server_update: None,
flush_language_server_update: None,
@ -1289,6 +1293,10 @@ impl Project {
self.collaborators.values().find(|c| c.replica_id == 0)
}
pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) {
self.worktrees_reordered = worktrees_reordered;
}
/// Collect all worktrees, including ones that don't appear in the project panel
pub fn worktrees(&self) -> impl '_ + DoubleEndedIterator<Item = Model<Worktree>> {
self.worktrees
@ -1296,20 +1304,13 @@ impl Project {
.filter_map(move |worktree| worktree.upgrade())
}
/// Collect all user-visible worktrees, the ones that appear in the project panel
/// Collect all user-visible worktrees, the ones that appear in the project panel.
pub fn visible_worktrees<'a>(
&'a self,
cx: &'a AppContext,
) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
self.worktrees.iter().filter_map(|worktree| {
worktree.upgrade().and_then(|worktree| {
if worktree.read(cx).is_visible() {
Some(worktree)
} else {
None
}
})
})
self.worktrees()
.filter(|worktree| worktree.read(cx).is_visible())
}
pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
@ -1340,6 +1341,18 @@ impl Project {
.map(|worktree| worktree.read(cx).id())
}
/// Checks if the entry is the root of a worktree.
pub fn entry_is_worktree_root(&self, entry_id: ProjectEntryId, cx: &AppContext) -> bool {
self.worktree_for_entry(entry_id, cx)
.map(|worktree| {
worktree
.read(cx)
.root_entry()
.is_some_and(|e| e.id == entry_id)
})
.unwrap_or(false)
}
pub fn visibility_for_paths(&self, paths: &[PathBuf], cx: &AppContext) -> Option<bool> {
paths
.iter()
@ -7204,6 +7217,67 @@ impl Project {
})
}
/// Move a worktree to a new position in the worktree order.
///
/// The worktree will moved to the opposite side of the destination worktree.
///
/// # Example
///
/// Given the worktree order `[11, 22, 33]` and a call to move worktree `22` to `33`,
/// worktree_order will be updated to produce the indexes `[11, 33, 22]`.
///
/// Given the worktree order `[11, 22, 33]` and a call to move worktree `22` to `11`,
/// worktree_order will be updated to produce the indexes `[22, 11, 33]`.
///
/// # Errors
///
/// An error will be returned if the worktree or destination worktree are not found.
pub fn move_worktree(
&mut self,
source: WorktreeId,
destination: WorktreeId,
cx: &mut ModelContext<'_, Self>,
) -> Result<()> {
if source == destination {
return Ok(());
}
let mut source_index = None;
let mut destination_index = None;
for (i, worktree) in self.worktrees.iter().enumerate() {
if let Some(worktree) = worktree.upgrade() {
let worktree_id = worktree.read(cx).id();
if worktree_id == source {
source_index = Some(i);
if destination_index.is_some() {
break;
}
} else if worktree_id == destination {
destination_index = Some(i);
if source_index.is_some() {
break;
}
}
}
}
let source_index =
source_index.with_context(|| format!("Missing worktree for id {source}"))?;
let destination_index =
destination_index.with_context(|| format!("Missing worktree for id {destination}"))?;
if source_index == destination_index {
return Ok(());
}
let worktree_to_move = self.worktrees.remove(source_index);
self.worktrees.insert(destination_index, worktree_to_move);
self.worktrees_reordered = true;
cx.emit(Event::WorktreeOrderChanged);
cx.notify();
Ok(())
}
pub fn find_or_create_local_worktree(
&mut self,
abs_path: impl AsRef<Path>,
@ -7372,6 +7446,7 @@ impl Project {
false
}
});
self.metadata_changed(cx);
}
@ -7411,12 +7486,22 @@ impl Project {
let worktree = worktree.read(cx);
self.is_shared() || worktree.is_visible() || worktree.is_remote()
};
if push_strong_handle {
self.worktrees
.push(WorktreeHandle::Strong(worktree.clone()));
let handle = if push_strong_handle {
WorktreeHandle::Strong(worktree.clone())
} else {
self.worktrees
.push(WorktreeHandle::Weak(worktree.downgrade()));
WorktreeHandle::Weak(worktree.downgrade())
};
if self.worktrees_reordered {
self.worktrees.push(handle);
} else {
let i = match self
.worktrees
.binary_search_by_key(&Some(worktree.read(cx).abs_path()), |other| {
other.upgrade().map(|worktree| worktree.read(cx).abs_path())
}) {
Ok(i) | Err(i) => i,
};
self.worktrees.insert(i, handle);
}
let handle_id = worktree.entity_id();

View file

@ -2409,7 +2409,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
assert_eq!(
list_worktrees(&project, cx),
[("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
[("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
);
drop(definition);
@ -4909,6 +4909,204 @@ async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
);
}
#[gpui::test]
async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/dir",
json!({
"a.rs": "let a = 1;",
"b.rs": "let b = 2;",
"c.rs": "let c = 2;",
}),
)
.await;
let project = Project::test(
fs,
[
"/dir/a.rs".as_ref(),
"/dir/b.rs".as_ref(),
"/dir/c.rs".as_ref(),
],
cx,
)
.await;
// check the initial state and get the worktrees
let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
assert_eq!(worktrees.len(), 3);
let worktree_a = worktrees[0].read(cx);
let worktree_b = worktrees[1].read(cx);
let worktree_c = worktrees[2].read(cx);
// check they start in the right order
assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
(
worktrees[0].clone(),
worktrees[1].clone(),
worktrees[2].clone(),
)
});
// move first worktree to after the second
// [a, b, c] -> [b, a, c]
project
.update(cx, |project, cx| {
let first = worktree_a.read(cx);
let second = worktree_b.read(cx);
project.move_worktree(first.id(), second.id(), cx)
})
.expect("moving first after second");
// check the state after moving
project.update(cx, |project, cx| {
let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
assert_eq!(worktrees.len(), 3);
let first = worktrees[0].read(cx);
let second = worktrees[1].read(cx);
let third = worktrees[2].read(cx);
// check they are now in the right order
assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
});
// move the second worktree to before the first
// [b, a, c] -> [a, b, c]
project
.update(cx, |project, cx| {
let second = worktree_a.read(cx);
let first = worktree_b.read(cx);
project.move_worktree(first.id(), second.id(), cx)
})
.expect("moving second before first");
// check the state after moving
project.update(cx, |project, cx| {
let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
assert_eq!(worktrees.len(), 3);
let first = worktrees[0].read(cx);
let second = worktrees[1].read(cx);
let third = worktrees[2].read(cx);
// check they are now in the right order
assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
});
// move the second worktree to after the third
// [a, b, c] -> [a, c, b]
project
.update(cx, |project, cx| {
let second = worktree_b.read(cx);
let third = worktree_c.read(cx);
project.move_worktree(second.id(), third.id(), cx)
})
.expect("moving second after third");
// check the state after moving
project.update(cx, |project, cx| {
let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
assert_eq!(worktrees.len(), 3);
let first = worktrees[0].read(cx);
let second = worktrees[1].read(cx);
let third = worktrees[2].read(cx);
// check they are now in the right order
assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
});
// move the third worktree to before the second
// [a, c, b] -> [a, b, c]
project
.update(cx, |project, cx| {
let third = worktree_c.read(cx);
let second = worktree_b.read(cx);
project.move_worktree(third.id(), second.id(), cx)
})
.expect("moving third before second");
// check the state after moving
project.update(cx, |project, cx| {
let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
assert_eq!(worktrees.len(), 3);
let first = worktrees[0].read(cx);
let second = worktrees[1].read(cx);
let third = worktrees[2].read(cx);
// check they are now in the right order
assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
});
// move the first worktree to after the third
// [a, b, c] -> [b, c, a]
project
.update(cx, |project, cx| {
let first = worktree_a.read(cx);
let third = worktree_c.read(cx);
project.move_worktree(first.id(), third.id(), cx)
})
.expect("moving first after third");
// check the state after moving
project.update(cx, |project, cx| {
let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
assert_eq!(worktrees.len(), 3);
let first = worktrees[0].read(cx);
let second = worktrees[1].read(cx);
let third = worktrees[2].read(cx);
// check they are now in the right order
assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
});
// move the third worktree to before the first
// [b, c, a] -> [a, b, c]
project
.update(cx, |project, cx| {
let third = worktree_a.read(cx);
let first = worktree_b.read(cx);
project.move_worktree(third.id(), first.id(), cx)
})
.expect("moving third before first");
// check the state after moving
project.update(cx, |project, cx| {
let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
assert_eq!(worktrees.len(), 3);
let first = worktrees[0].read(cx);
let second = worktrees[1].read(cx);
let third = worktrees[2].read(cx);
// check they are now in the right order
assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
});
}
async fn search(
project: &Model<Project>,
query: SearchQuery,

View file

@ -210,7 +210,9 @@ impl ProjectPanel {
this.update_visible_entries(None, cx);
cx.notify();
}
project::Event::WorktreeUpdatedEntries(_, _) | project::Event::WorktreeAdded => {
project::Event::WorktreeUpdatedEntries(_, _)
| project::Event::WorktreeAdded
| project::Event::WorktreeOrderChanged => {
this.update_visible_entries(None, cx);
cx.notify();
}
@ -1239,6 +1241,48 @@ impl ProjectPanel {
destination: ProjectEntryId,
destination_is_file: bool,
cx: &mut ViewContext<Self>,
) {
if self
.project
.read(cx)
.entry_is_worktree_root(entry_to_move, cx)
{
self.move_worktree_root(entry_to_move, destination, cx)
} else {
self.move_worktree_entry(entry_to_move, destination, destination_is_file, cx)
}
}
fn move_worktree_root(
&mut self,
entry_to_move: ProjectEntryId,
destination: ProjectEntryId,
cx: &mut ViewContext<Self>,
) {
self.project.update(cx, |project, cx| {
let Some(worktree_to_move) = project.worktree_for_entry(entry_to_move, cx) else {
return;
};
let Some(destination_worktree) = project.worktree_for_entry(destination, cx) else {
return;
};
let worktree_id = worktree_to_move.read(cx).id();
let destination_id = destination_worktree.read(cx).id();
project
.move_worktree(worktree_id, destination_id, cx)
.log_err();
});
return;
}
fn move_worktree_entry(
&mut self,
entry_to_move: ProjectEntryId,
destination: ProjectEntryId,
destination_is_file: bool,
cx: &mut ViewContext<Self>,
) {
let destination_worktree = self.project.update(cx, |project, cx| {
let entry_path = project.path_for_entry(entry_to_move, cx)?;

View file

@ -231,7 +231,7 @@ impl PickerDelegate for RecentProjectsDelegate {
.enumerate()
.map(|(id, (_, location))| {
let combined_string = match location {
SerializedWorkspaceLocation::Local(paths) => paths
SerializedWorkspaceLocation::Local(paths, _) => paths
.paths()
.iter()
.map(|path| path.compact().to_string_lossy().into_owned())
@ -291,7 +291,7 @@ impl PickerDelegate for RecentProjectsDelegate {
Task::ready(Ok(()))
} else {
match candidate_workspace_location {
SerializedWorkspaceLocation::Local(paths) => {
SerializedWorkspaceLocation::Local(paths, _) => {
let paths = paths.paths().as_ref().clone();
if replace_current_window {
cx.spawn(move |workspace, mut cx| async move {
@ -413,7 +413,7 @@ impl PickerDelegate for RecentProjectsDelegate {
let mut path_start_offset = 0;
let paths = match location {
SerializedWorkspaceLocation::Local(paths) => paths.paths(),
SerializedWorkspaceLocation::Local(paths, _) => paths.paths(),
SerializedWorkspaceLocation::DevServer(dev_server_project) => {
Arc::new(vec![PathBuf::from(format!(
"{}:{}",

View file

@ -22,7 +22,9 @@ use model::{
SerializedWorkspace,
};
use self::model::{DockStructure, SerializedDevServerProject, SerializedWorkspaceLocation};
use self::model::{
DockStructure, LocalPathsOrder, SerializedDevServerProject, SerializedWorkspaceLocation,
};
#[derive(Copy, Clone, Debug, PartialEq)]
pub(crate) struct SerializedAxis(pub(crate) gpui::Axis);
@ -176,6 +178,7 @@ define_connection! {
// workspaces(
// workspace_id: usize, // Primary key for workspaces
// local_paths: Bincode<Vec<PathBuf>>,
// local_paths_order: Bincode<Vec<usize>>,
// dock_visible: bool, // Deprecated
// dock_anchor: DockAnchor, // Deprecated
// dock_pane: Option<usize>, // Deprecated
@ -360,6 +363,9 @@ define_connection! {
ALTER TABLE workspaces DROP COLUMN remote_project_id;
ALTER TABLE workspaces ADD COLUMN dev_server_project_id INTEGER;
),
sql!(
ALTER TABLE workspaces ADD COLUMN local_paths_order BLOB;
),
];
}
@ -378,6 +384,7 @@ impl WorkspaceDb {
let (
workspace_id,
local_paths,
local_paths_order,
dev_server_project_id,
window_bounds,
display,
@ -386,6 +393,7 @@ impl WorkspaceDb {
): (
WorkspaceId,
Option<LocalPaths>,
Option<LocalPathsOrder>,
Option<u64>,
Option<SerializedWindowBounds>,
Option<Uuid>,
@ -396,6 +404,7 @@ impl WorkspaceDb {
SELECT
workspace_id,
local_paths,
local_paths_order,
dev_server_project_id,
window_state,
window_x,
@ -434,7 +443,13 @@ impl WorkspaceDb {
.flatten()?;
SerializedWorkspaceLocation::DevServer(dev_server_project)
} else if let Some(local_paths) = local_paths {
SerializedWorkspaceLocation::Local(local_paths)
match local_paths_order {
Some(order) => SerializedWorkspaceLocation::Local(local_paths, order),
None => {
let order = LocalPathsOrder::default_for_paths(&local_paths);
SerializedWorkspaceLocation::Local(local_paths, order)
}
}
} else {
return None;
};
@ -465,7 +480,7 @@ impl WorkspaceDb {
.context("Clearing old panes")?;
match workspace.location {
SerializedWorkspaceLocation::Local(local_paths) => {
SerializedWorkspaceLocation::Local(local_paths, local_paths_order) => {
conn.exec_bound(sql!(
DELETE FROM workspaces WHERE local_paths = ? AND workspace_id != ?
))?((&local_paths, workspace.id))
@ -476,6 +491,7 @@ impl WorkspaceDb {
INSERT INTO workspaces(
workspace_id,
local_paths,
local_paths_order,
left_dock_visible,
left_dock_active_panel,
left_dock_zoom,
@ -487,21 +503,22 @@ impl WorkspaceDb {
bottom_dock_zoom,
timestamp
)
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, CURRENT_TIMESTAMP)
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, CURRENT_TIMESTAMP)
ON CONFLICT DO
UPDATE SET
local_paths = ?2,
left_dock_visible = ?3,
left_dock_active_panel = ?4,
left_dock_zoom = ?5,
right_dock_visible = ?6,
right_dock_active_panel = ?7,
right_dock_zoom = ?8,
bottom_dock_visible = ?9,
bottom_dock_active_panel = ?10,
bottom_dock_zoom = ?11,
local_paths_order = ?3,
left_dock_visible = ?4,
left_dock_active_panel = ?5,
left_dock_zoom = ?6,
right_dock_visible = ?7,
right_dock_active_panel = ?8,
right_dock_zoom = ?9,
bottom_dock_visible = ?10,
bottom_dock_active_panel = ?11,
bottom_dock_zoom = ?12,
timestamp = CURRENT_TIMESTAMP
))?((workspace.id, &local_paths, workspace.docks))
))?((workspace.id, &local_paths, &local_paths_order, workspace.docks))
.context("Updating workspace")?;
}
SerializedWorkspaceLocation::DevServer(dev_server_project) => {
@ -676,7 +693,7 @@ impl WorkspaceDb {
.await?
.into_iter()
.filter_map(|(_, location)| match location {
SerializedWorkspaceLocation::Local(local_paths) => Some(local_paths),
SerializedWorkspaceLocation::Local(local_paths, _) => Some(local_paths),
SerializedWorkspaceLocation::DevServer(_) => None,
})
.next())
@ -1080,7 +1097,10 @@ mod tests {
let workspace = SerializedWorkspace {
id: WorkspaceId(5),
location: LocalPaths::new(["/tmp", "/tmp2"]).into(),
location: SerializedWorkspaceLocation::Local(
LocalPaths::new(["/tmp", "/tmp2"]),
LocalPathsOrder::new([1, 0]),
),
center_group,
window_bounds: Default::default(),
display: Default::default(),
@ -1089,8 +1109,8 @@ mod tests {
};
db.save_workspace(workspace.clone()).await;
let round_trip_workspace = db.workspace_for_roots(&["/tmp2", "/tmp"]);
let round_trip_workspace = db.workspace_for_roots(&["/tmp2", "/tmp"]);
assert_eq!(workspace, round_trip_workspace.unwrap());
// Test guaranteed duplicate IDs
@ -1109,7 +1129,10 @@ mod tests {
let workspace_1 = SerializedWorkspace {
id: WorkspaceId(1),
location: LocalPaths::new(["/tmp", "/tmp2"]).into(),
location: SerializedWorkspaceLocation::Local(
LocalPaths::new(["/tmp", "/tmp2"]),
LocalPathsOrder::new([0, 1]),
),
center_group: Default::default(),
window_bounds: Default::default(),
display: Default::default(),
@ -1156,7 +1179,10 @@ mod tests {
// Test other mechanism for mutating
let mut workspace_3 = SerializedWorkspace {
id: WorkspaceId(3),
location: LocalPaths::new(&["/tmp", "/tmp2"]).into(),
location: SerializedWorkspaceLocation::Local(
LocalPaths::new(&["/tmp", "/tmp2"]),
LocalPathsOrder::new([1, 0]),
),
center_group: Default::default(),
window_bounds: Default::default(),
display: Default::default(),

View file

@ -33,6 +33,8 @@ impl LocalPaths {
.into_iter()
.map(|p| p.as_ref().to_path_buf())
.collect();
// Ensure all future `zed workspace1 workspace2` and `zed workspace2 workspace1` calls are using the same workspace.
// The actual workspace order is stored in the `LocalPathsOrder` struct.
paths.sort();
Self(Arc::new(paths))
}
@ -44,7 +46,8 @@ impl LocalPaths {
impl From<LocalPaths> for SerializedWorkspaceLocation {
fn from(local_paths: LocalPaths) -> Self {
Self::Local(local_paths)
let order = LocalPathsOrder::default_for_paths(&local_paths);
Self::Local(local_paths, order)
}
}
@ -68,6 +71,43 @@ impl Column for LocalPaths {
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct LocalPathsOrder(Vec<usize>);
impl LocalPathsOrder {
pub fn new(order: impl IntoIterator<Item = usize>) -> Self {
Self(order.into_iter().collect())
}
pub fn order(&self) -> &[usize] {
self.0.as_slice()
}
pub fn default_for_paths(paths: &LocalPaths) -> Self {
Self::new(0..paths.0.len())
}
}
impl StaticColumnCount for LocalPathsOrder {}
impl Bind for &LocalPathsOrder {
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
statement.bind(&bincode::serialize(&self.0)?, start_index)
}
}
impl Column for LocalPathsOrder {
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
let order_blob = statement.column_blob(start_index)?;
let order = if order_blob.is_empty() {
Vec::new()
} else {
bincode::deserialize(order_blob).context("deserializing workspace root order")?
};
Ok((Self(order), start_index + 1))
}
}
impl From<SerializedDevServerProject> for SerializedWorkspaceLocation {
fn from(dev_server_project: SerializedDevServerProject) -> Self {
Self::DevServer(dev_server_project)
@ -101,7 +141,7 @@ impl Column for SerializedDevServerProject {
#[derive(Debug, PartialEq, Clone)]
pub enum SerializedWorkspaceLocation {
Local(LocalPaths),
Local(LocalPaths, LocalPathsOrder),
DevServer(SerializedDevServerProject),
}

View file

@ -90,11 +90,11 @@ pub use workspace_settings::{
AutosaveSetting, RestoreOnStartupBehaviour, TabBarSettings, WorkspaceSettings,
};
use crate::notifications::NotificationId;
use crate::persistence::{
model::{DockData, DockStructure, SerializedItem, SerializedPane, SerializedPaneGroup},
SerializedAxis,
};
use crate::{notifications::NotificationId, persistence::model::LocalPathsOrder};
lazy_static! {
static ref ZED_WINDOW_SIZE: Option<Size<DevicePixels>> = env::var("ZED_WINDOW_SIZE")
@ -904,13 +904,35 @@ impl Workspace {
let serialized_workspace: Option<SerializedWorkspace> =
persistence::DB.workspace_for_roots(abs_paths.as_slice());
let paths_to_open = Arc::new(abs_paths);
let mut paths_to_open = abs_paths;
let paths_order = serialized_workspace
.as_ref()
.map(|ws| &ws.location)
.and_then(|loc| match loc {
SerializedWorkspaceLocation::Local(_, order) => Some(order.order()),
_ => None,
});
if let Some(paths_order) = paths_order {
paths_to_open = paths_order
.iter()
.filter_map(|i| paths_to_open.get(*i).cloned())
.collect::<Vec<_>>();
if paths_order.iter().enumerate().any(|(i, &j)| i != j) {
project_handle
.update(&mut cx, |project, _| {
project.set_worktrees_reordered(true);
})
.log_err();
}
}
// Get project paths for all of the abs_paths
let mut worktree_roots: HashSet<Arc<Path>> = Default::default();
let mut project_paths: Vec<(PathBuf, Option<ProjectPath>)> =
Vec::with_capacity(paths_to_open.len());
for path in paths_to_open.iter().cloned() {
for path in paths_to_open.into_iter() {
if let Some((worktree, project_entry)) = cx
.update(|cx| {
Workspace::project_path_for_path(project_handle.clone(), &path, true, cx)
@ -3488,16 +3510,16 @@ impl Workspace {
self.database_id
}
fn local_paths(&self, cx: &AppContext) -> Option<LocalPaths> {
fn local_paths(&self, cx: &AppContext) -> Option<Vec<Arc<Path>>> {
let project = self.project().read(cx);
if project.is_local() {
Some(LocalPaths::new(
Some(
project
.visible_worktrees(cx)
.map(|worktree| worktree.read(cx).abs_path())
.collect::<Vec<_>>(),
))
)
} else {
None
}
@ -3641,8 +3663,17 @@ impl Workspace {
}
let location = if let Some(local_paths) = self.local_paths(cx) {
if !local_paths.paths().is_empty() {
Some(SerializedWorkspaceLocation::Local(local_paths))
if !local_paths.is_empty() {
let (order, paths): (Vec<_>, Vec<_>) = local_paths
.iter()
.enumerate()
.sorted_by(|a, b| a.1.cmp(b.1))
.unzip();
Some(SerializedWorkspaceLocation::Local(
LocalPaths::new(paths),
LocalPathsOrder::new(order),
))
} else {
None
}
@ -5320,7 +5351,7 @@ mod tests {
// Add a project folder
project
.update(cx, |project, cx| {
project.find_or_create_local_worktree("/root2", true, cx)
project.find_or_create_local_worktree("root2", true, cx)
})
.await
.unwrap();

View file

@ -75,10 +75,11 @@ const displayInfo = JSON.parse(
encoding: "utf8",
}),
);
const mainDisplayResolution =
displayInfo?.SPDisplaysDataType[0]?.spdisplays_ndrvs
?.find((entry) => entry.spdisplays_main === "spdisplays_yes")
?._spdisplays_resolution?.match(RESOLUTION_REGEX);
const mainDisplayResolution = displayInfo?.SPDisplaysDataType?.flatMap(
(display) => display?.spdisplays_ndrvs,
)
?.find((entry) => entry?.spdisplays_main === "spdisplays_yes")
?._spdisplays_resolution?.match(RESOLUTION_REGEX);
if (!mainDisplayResolution) {
throw new Error("Could not parse screen resolution");
}