Workspace persistence for SSH projects (#17996)

TODOs:

- [x] Add tests to `workspace/src/persistence.rs`
- [x] Add a icon for ssh projects
- [x] Fix all `TODO` comments
- [x] Use `port` if it's passed in the ssh connection options

In next PRs:
- Make sure unsaved buffers are persisted/restored, along with other
items/layout
- Handle multiple paths/worktrees correctly


Release Notes:

- N/A

---------

Co-authored-by: Bennet Bo Fenner <bennet@zed.dev>
This commit is contained in:
Thorsten Ball 2024-09-19 17:51:28 +02:00 committed by GitHub
parent 7d0a7541bf
commit e9f2e72ff0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 592 additions and 141 deletions

View file

@ -7,6 +7,7 @@ use client::DevServerProjectId;
use db::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql};
use gpui::{point, size, Axis, Bounds, WindowBounds, WindowId};
use remote::ssh_session::SshProjectId;
use sqlez::{
bindable::{Bind, Column, StaticColumnCount},
statement::Statement,
@ -20,7 +21,7 @@ use crate::WorkspaceId;
use model::{
GroupId, LocalPaths, PaneId, SerializedItem, SerializedPane, SerializedPaneGroup,
SerializedWorkspace,
SerializedSshProject, SerializedWorkspace,
};
use self::model::{
@ -354,7 +355,17 @@ define_connection! {
),
sql!(
ALTER TABLE panes ADD COLUMN pinned_count INTEGER DEFAULT 0;
)
),
sql!(
CREATE TABLE ssh_projects (
id INTEGER PRIMARY KEY,
host TEXT NOT NULL,
port INTEGER,
path TEXT NOT NULL,
user TEXT
);
ALTER TABLE workspaces ADD COLUMN ssh_project_id INTEGER REFERENCES ssh_projects(id) ON DELETE CASCADE;
),
];
}
@ -374,7 +385,6 @@ impl WorkspaceDb {
workspace_id,
local_paths,
local_paths_order,
dev_server_project_id,
window_bounds,
display,
centered_layout,
@ -384,7 +394,6 @@ impl WorkspaceDb {
WorkspaceId,
Option<LocalPaths>,
Option<LocalPathsOrder>,
Option<u64>,
Option<SerializedWindowBounds>,
Option<Uuid>,
Option<bool>,
@ -396,7 +405,6 @@ impl WorkspaceDb {
workspace_id,
local_paths,
local_paths_order,
dev_server_project_id,
window_state,
window_x,
window_y,
@ -422,28 +430,13 @@ impl WorkspaceDb {
.warn_on_err()
.flatten()?;
let location = if let Some(dev_server_project_id) = dev_server_project_id {
let dev_server_project: SerializedDevServerProject = self
.select_row_bound(sql! {
SELECT id, path, dev_server_name
FROM dev_server_projects
WHERE id = ?
})
.and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id))
.context("No remote project found")
.warn_on_err()
.flatten()?;
SerializedWorkspaceLocation::DevServer(dev_server_project)
} else if let Some(local_paths) = local_paths {
match local_paths_order {
Some(order) => SerializedWorkspaceLocation::Local(local_paths, order),
None => {
let order = LocalPathsOrder::default_for_paths(&local_paths);
SerializedWorkspaceLocation::Local(local_paths, order)
}
let local_paths = local_paths?;
let location = match local_paths_order {
Some(order) => SerializedWorkspaceLocation::Local(local_paths, order),
None => {
let order = LocalPathsOrder::default_for_paths(&local_paths);
SerializedWorkspaceLocation::Local(local_paths, order)
}
} else {
return None;
};
Some(SerializedWorkspace {
@ -470,8 +463,6 @@ impl WorkspaceDb {
// and we've grabbed the most recent workspace
let (
workspace_id,
local_paths,
local_paths_order,
dev_server_project_id,
window_bounds,
display,
@ -480,8 +471,6 @@ impl WorkspaceDb {
window_id,
): (
WorkspaceId,
Option<LocalPaths>,
Option<LocalPathsOrder>,
Option<u64>,
Option<SerializedWindowBounds>,
Option<Uuid>,
@ -492,8 +481,6 @@ impl WorkspaceDb {
.select_row_bound(sql! {
SELECT
workspace_id,
local_paths,
local_paths_order,
dev_server_project_id,
window_state,
window_x,
@ -520,29 +507,20 @@ impl WorkspaceDb {
.warn_on_err()
.flatten()?;
let location = if let Some(dev_server_project_id) = dev_server_project_id {
let dev_server_project: SerializedDevServerProject = self
.select_row_bound(sql! {
SELECT id, path, dev_server_name
FROM dev_server_projects
WHERE id = ?
})
.and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id))
.context("No remote project found")
.warn_on_err()
.flatten()?;
SerializedWorkspaceLocation::DevServer(dev_server_project)
} else if let Some(local_paths) = local_paths {
match local_paths_order {
Some(order) => SerializedWorkspaceLocation::Local(local_paths, order),
None => {
let order = LocalPathsOrder::default_for_paths(&local_paths);
SerializedWorkspaceLocation::Local(local_paths, order)
}
}
} else {
return None;
};
let dev_server_project_id = dev_server_project_id?;
let dev_server_project: SerializedDevServerProject = self
.select_row_bound(sql! {
SELECT id, path, dev_server_name
FROM dev_server_projects
WHERE id = ?
})
.and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id))
.context("No remote project found")
.warn_on_err()
.flatten()?;
let location = SerializedWorkspaceLocation::DevServer(dev_server_project);
Some(SerializedWorkspace {
id: workspace_id,
@ -560,6 +538,62 @@ impl WorkspaceDb {
})
}
pub(crate) fn workspace_for_ssh_project(
&self,
ssh_project: &SerializedSshProject,
) -> Option<SerializedWorkspace> {
let (workspace_id, window_bounds, display, centered_layout, docks, window_id): (
WorkspaceId,
Option<SerializedWindowBounds>,
Option<Uuid>,
Option<bool>,
DockStructure,
Option<u64>,
) = self
.select_row_bound(sql! {
SELECT
workspace_id,
window_state,
window_x,
window_y,
window_width,
window_height,
display,
centered_layout,
left_dock_visible,
left_dock_active_panel,
left_dock_zoom,
right_dock_visible,
right_dock_active_panel,
right_dock_zoom,
bottom_dock_visible,
bottom_dock_active_panel,
bottom_dock_zoom,
window_id
FROM workspaces
WHERE ssh_project_id = ?
})
.and_then(|mut prepared_statement| (prepared_statement)(ssh_project.id.0))
.context("No workspaces found")
.warn_on_err()
.flatten()?;
Some(SerializedWorkspace {
id: workspace_id,
location: SerializedWorkspaceLocation::Ssh(ssh_project.clone()),
center_group: self
.get_center_pane_group(workspace_id)
.context("Getting center group")
.log_err()?,
window_bounds,
centered_layout: centered_layout.unwrap_or(false),
display,
docks,
session_id: None,
window_id,
})
}
/// Saves a workspace using the worktree roots. Will garbage collect any workspaces
/// that used this workspace previously
pub(crate) async fn save_workspace(&self, workspace: SerializedWorkspace) {
@ -674,6 +708,49 @@ impl WorkspaceDb {
workspace.docks,
))
.context("Updating workspace")?;
},
SerializedWorkspaceLocation::Ssh(ssh_project) => {
conn.exec_bound(sql!(
DELETE FROM workspaces WHERE ssh_project_id = ? AND workspace_id != ?
))?((ssh_project.id.0, workspace.id))
.context("clearing out old locations")?;
// Upsert
conn.exec_bound(sql!(
INSERT INTO workspaces(
workspace_id,
ssh_project_id,
left_dock_visible,
left_dock_active_panel,
left_dock_zoom,
right_dock_visible,
right_dock_active_panel,
right_dock_zoom,
bottom_dock_visible,
bottom_dock_active_panel,
bottom_dock_zoom,
timestamp
)
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, CURRENT_TIMESTAMP)
ON CONFLICT DO
UPDATE SET
ssh_project_id = ?2,
left_dock_visible = ?3,
left_dock_active_panel = ?4,
left_dock_zoom = ?5,
right_dock_visible = ?6,
right_dock_active_panel = ?7,
right_dock_zoom = ?8,
bottom_dock_visible = ?9,
bottom_dock_active_panel = ?10,
bottom_dock_zoom = ?11,
timestamp = CURRENT_TIMESTAMP
))?((
workspace.id,
ssh_project.id.0,
workspace.docks,
))
.context("Updating workspace")?;
}
}
@ -688,6 +765,46 @@ impl WorkspaceDb {
.await;
}
pub(crate) async fn get_or_create_ssh_project(
&self,
host: String,
port: Option<u16>,
path: String,
user: Option<String>,
) -> Result<SerializedSshProject> {
if let Some(project) = self
.get_ssh_project(host.clone(), port, path.clone(), user.clone())
.await?
{
Ok(project)
} else {
self.insert_ssh_project(host, port, path, user)
.await?
.ok_or_else(|| anyhow!("failed to insert ssh project"))
}
}
query! {
async fn get_ssh_project(host: String, port: Option<u16>, path: String, user: Option<String>) -> Result<Option<SerializedSshProject>> {
SELECT id, host, port, path, user
FROM ssh_projects
WHERE host IS ? AND port IS ? AND path IS ? AND user IS ?
LIMIT 1
}
}
query! {
async fn insert_ssh_project(host: String, port: Option<u16>, path: String, user: Option<String>) -> Result<Option<SerializedSshProject>> {
INSERT INTO ssh_projects(
host,
port,
path,
user
) VALUES (?1, ?2, ?3, ?4)
RETURNING id, host, port, path, user
}
}
query! {
pub async fn next_id() -> Result<WorkspaceId> {
INSERT INTO workspaces DEFAULT VALUES RETURNING workspace_id
@ -695,10 +812,12 @@ impl WorkspaceDb {
}
query! {
fn recent_workspaces() -> Result<Vec<(WorkspaceId, LocalPaths, LocalPathsOrder, Option<u64>)>> {
SELECT workspace_id, local_paths, local_paths_order, dev_server_project_id
fn recent_workspaces() -> Result<Vec<(WorkspaceId, LocalPaths, LocalPathsOrder, Option<u64>, Option<u64>)>> {
SELECT workspace_id, local_paths, local_paths_order, dev_server_project_id, ssh_project_id
FROM workspaces
WHERE local_paths IS NOT NULL OR dev_server_project_id IS NOT NULL
WHERE local_paths IS NOT NULL
OR dev_server_project_id IS NOT NULL
OR ssh_project_id IS NOT NULL
ORDER BY timestamp DESC
}
}
@ -719,6 +838,13 @@ impl WorkspaceDb {
}
}
query! {
fn ssh_projects() -> Result<Vec<SerializedSshProject>> {
SELECT id, host, port, path, user
FROM ssh_projects
}
}
pub(crate) fn last_window(
&self,
) -> anyhow::Result<(Option<Uuid>, Option<SerializedWindowBounds>)> {
@ -768,8 +894,11 @@ impl WorkspaceDb {
let mut result = Vec::new();
let mut delete_tasks = Vec::new();
let dev_server_projects = self.dev_server_projects()?;
let ssh_projects = self.ssh_projects()?;
for (id, location, order, dev_server_project_id) in self.recent_workspaces()? {
for (id, location, order, dev_server_project_id, ssh_project_id) in
self.recent_workspaces()?
{
if let Some(dev_server_project_id) = dev_server_project_id.map(DevServerProjectId) {
if let Some(dev_server_project) = dev_server_projects
.iter()
@ -782,6 +911,15 @@ impl WorkspaceDb {
continue;
}
if let Some(ssh_project_id) = ssh_project_id.map(SshProjectId) {
if let Some(ssh_project) = ssh_projects.iter().find(|rp| rp.id == ssh_project_id) {
result.push((id, SerializedWorkspaceLocation::Ssh(ssh_project.clone())));
} else {
delete_tasks.push(self.delete_workspace_by_id(id));
}
continue;
}
if location.paths().iter().all(|path| path.exists())
&& location.paths().iter().any(|path| path.is_dir())
{
@ -802,7 +940,9 @@ impl WorkspaceDb {
.into_iter()
.filter_map(|(_, location)| match location {
SerializedWorkspaceLocation::Local(local_paths, _) => Some(local_paths),
// Do not automatically reopen Dev Server and SSH workspaces
SerializedWorkspaceLocation::DevServer(_) => None,
SerializedWorkspaceLocation::Ssh(_) => None,
})
.next())
}
@ -1512,6 +1652,122 @@ mod tests {
assert_eq!(have[3], LocalPaths::new([dir1.path().to_str().unwrap()]));
}
#[gpui::test]
async fn test_get_or_create_ssh_project() {
let db = WorkspaceDb(open_test_db("test_get_or_create_ssh_project").await);
let (host, port, path, user) = (
"example.com".to_string(),
Some(22_u16),
"/home/user".to_string(),
Some("user".to_string()),
);
let project = db
.get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone())
.await
.unwrap();
assert_eq!(project.host, host);
assert_eq!(project.path, path);
assert_eq!(project.user, user);
// Test that calling the function again with the same parameters returns the same project
let same_project = db
.get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone())
.await
.unwrap();
assert_eq!(project.id, same_project.id);
// Test with different parameters
let (host2, path2, user2) = (
"otherexample.com".to_string(),
"/home/otheruser".to_string(),
Some("otheruser".to_string()),
);
let different_project = db
.get_or_create_ssh_project(host2.clone(), None, path2.clone(), user2.clone())
.await
.unwrap();
assert_ne!(project.id, different_project.id);
assert_eq!(different_project.host, host2);
assert_eq!(different_project.path, path2);
assert_eq!(different_project.user, user2);
}
#[gpui::test]
async fn test_get_or_create_ssh_project_with_null_user() {
let db = WorkspaceDb(open_test_db("test_get_or_create_ssh_project_with_null_user").await);
let (host, port, path, user) = (
"example.com".to_string(),
None,
"/home/user".to_string(),
None,
);
let project = db
.get_or_create_ssh_project(host.clone(), port, path.clone(), None)
.await
.unwrap();
assert_eq!(project.host, host);
assert_eq!(project.path, path);
assert_eq!(project.user, None);
// Test that calling the function again with the same parameters returns the same project
let same_project = db
.get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone())
.await
.unwrap();
assert_eq!(project.id, same_project.id);
}
#[gpui::test]
async fn test_get_ssh_projects() {
let db = WorkspaceDb(open_test_db("test_get_ssh_projects").await);
let projects = vec![
(
"example.com".to_string(),
None,
"/home/user".to_string(),
None,
),
(
"anotherexample.com".to_string(),
Some(123_u16),
"/home/user2".to_string(),
Some("user2".to_string()),
),
(
"yetanother.com".to_string(),
Some(345_u16),
"/home/user3".to_string(),
None,
),
];
for (host, port, path, user) in projects.iter() {
let project = db
.get_or_create_ssh_project(host.clone(), *port, path.clone(), user.clone())
.await
.unwrap();
assert_eq!(&project.host, host);
assert_eq!(&project.port, port);
assert_eq!(&project.path, path);
assert_eq!(&project.user, user);
}
let stored_projects = db.ssh_projects().unwrap();
assert_eq!(stored_projects.len(), projects.len());
}
#[gpui::test]
async fn test_simple_split() {
env_logger::try_init().ok();