Add the ability to edit remote directories over SSH (#14530)

This is a first step towards allowing you to edit remote projects
directly over SSH. We'll start with a pretty bare-bones feature set, and
incrementally add further features.

### Todo

Distribution
* [x] Build nightly releases of `zed-remote-server` binaries
    * [x] linux (arm + x86)
    * [x] mac (arm + x86)
* [x] Build stable + preview releases of `zed-remote-server`
* [x] download and cache remote server binaries as needed when opening
ssh project
* [x] ensure server has the latest version of the binary


Auth
* [x] allow specifying password at the command line
* [x] auth via ssh keys
* [x] UI password prompt

Features
* [x] upload remote server binary to server automatically
* [x] opening directories
* [x] tracking file system updates
* [x] opening, editing, saving buffers
* [ ] file operations (rename, delete, create)
* [ ] git diffs
* [ ] project search

Release Notes:

- N/A

---------

Co-authored-by: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com>
This commit is contained in:
Max Brunsfeld 2024-07-19 10:27:26 -07:00 committed by GitHub
parent 7733bf686b
commit b9a53ffa0b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
50 changed files with 2194 additions and 250 deletions

View file

@ -0,0 +1,51 @@
[package]
name = "remote_server"
description = "Daemon used for remote editing"
edition = "2021"
version = "0.1.0"
publish = false
license = "GPL-3.0-or-later"
[lints]
workspace = true
[lib]
path = "src/remote_server.rs"
doctest = false
[[bin]]
name = "remote_server"
[features]
default = []
test-support = ["fs/test-support"]
[dependencies]
anyhow.workspace = true
env_logger.workspace = true
fs.workspace = true
futures.workspace = true
gpui.workspace = true
log.workspace = true
project.workspace = true
remote.workspace = true
rpc.workspace = true
settings.workspace = true
smol.workspace = true
worktree.workspace = true
[dev-dependencies]
client = { workspace = true, features = ["test-support"] }
clock = { workspace = true, features = ["test-support"] }
fs = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
http = { workspace = true, features = ["test-support"] }
language = { workspace = true, features = ["test-support"] }
node_runtime = { workspace = true, features = ["test-support"] }
remote = { workspace = true, features = ["test-support"] }
serde_json.workspace = true
[build-dependencies]
cargo_toml.workspace = true
toml.workspace = true

View file

@ -0,0 +1 @@
../../LICENSE-GPL

View file

@ -0,0 +1,10 @@
const ZED_MANIFEST: &str = include_str!("../zed/Cargo.toml");
fn main() {
let zed_cargo_toml: cargo_toml::Manifest =
toml::from_str(ZED_MANIFEST).expect("failed to parse zed Cargo.toml");
println!(
"cargo:rustc-env=ZED_PKG_VERSION={}",
zed_cargo_toml.package.unwrap().version.unwrap()
);
}

View file

@ -0,0 +1,166 @@
use anyhow::{Context as _, Result};
use fs::Fs;
use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext};
use project::{buffer_store::BufferStore, ProjectPath, WorktreeId, WorktreeSettings};
use remote::SshSession;
use rpc::{
proto::{self, AnyProtoClient, PeerId},
TypedEnvelope,
};
use settings::{Settings as _, SettingsStore};
use std::{
path::{Path, PathBuf},
sync::{atomic::AtomicUsize, Arc},
};
use worktree::Worktree;
const PEER_ID: PeerId = PeerId { owner_id: 0, id: 0 };
const PROJECT_ID: u64 = 0;
pub struct HeadlessProject {
pub fs: Arc<dyn Fs>,
pub session: AnyProtoClient,
pub worktrees: Vec<Model<Worktree>>,
pub buffer_store: Model<BufferStore>,
pub next_entry_id: Arc<AtomicUsize>,
}
impl HeadlessProject {
pub fn init(cx: &mut AppContext) {
cx.set_global(SettingsStore::default());
WorktreeSettings::register(cx);
}
pub fn new(session: Arc<SshSession>, fs: Arc<dyn Fs>, cx: &mut ModelContext<Self>) -> Self {
let this = cx.weak_model();
session.add_request_handler(this.clone(), Self::handle_add_worktree);
session.add_request_handler(this.clone(), Self::handle_open_buffer_by_path);
session.add_request_handler(this.clone(), Self::handle_update_buffer);
session.add_request_handler(this.clone(), Self::handle_save_buffer);
HeadlessProject {
session: session.into(),
fs,
worktrees: Vec::new(),
buffer_store: cx.new_model(|_| BufferStore::new(true)),
next_entry_id: Default::default(),
}
}
fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
self.worktrees
.iter()
.find(|worktree| worktree.read(cx).id() == id)
.cloned()
}
pub async fn handle_add_worktree(
this: Model<Self>,
message: TypedEnvelope<proto::AddWorktree>,
mut cx: AsyncAppContext,
) -> Result<proto::AddWorktreeResponse> {
let worktree = this
.update(&mut cx.clone(), |this, _| {
Worktree::local(
Path::new(&message.payload.path),
true,
this.fs.clone(),
this.next_entry_id.clone(),
&mut cx,
)
})?
.await?;
this.update(&mut cx, |this, cx| {
let session = this.session.clone();
this.worktrees.push(worktree.clone());
worktree.update(cx, |worktree, cx| {
worktree.observe_updates(0, cx, move |update| {
session.send(update).ok();
futures::future::ready(true)
});
proto::AddWorktreeResponse {
worktree_id: worktree.id().to_proto(),
}
})
})
}
pub async fn handle_update_buffer(
this: Model<Self>,
envelope: TypedEnvelope<proto::UpdateBuffer>,
mut cx: AsyncAppContext,
) -> Result<proto::Ack> {
this.update(&mut cx, |this, cx| {
this.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.handle_update_buffer(envelope, false, cx)
})
})?
}
pub async fn handle_save_buffer(
this: Model<Self>,
envelope: TypedEnvelope<proto::SaveBuffer>,
mut cx: AsyncAppContext,
) -> Result<proto::BufferSaved> {
let (buffer_store, worktree) = this.update(&mut cx, |this, cx| {
let buffer_store = this.buffer_store.clone();
let worktree = if let Some(path) = &envelope.payload.new_path {
Some(
this.worktree_for_id(WorktreeId::from_proto(path.worktree_id), cx)
.context("worktree does not exist")?,
)
} else {
None
};
anyhow::Ok((buffer_store, worktree))
})??;
BufferStore::handle_save_buffer(buffer_store, PROJECT_ID, worktree, envelope, cx).await
}
pub async fn handle_open_buffer_by_path(
this: Model<Self>,
message: TypedEnvelope<proto::OpenBufferByPath>,
mut cx: AsyncAppContext,
) -> Result<proto::OpenBufferResponse> {
let worktree_id = WorktreeId::from_proto(message.payload.worktree_id);
let (buffer_store, buffer, session) = this.update(&mut cx, |this, cx| {
let worktree = this
.worktree_for_id(worktree_id, cx)
.context("no such worktree")?;
let buffer_store = this.buffer_store.clone();
let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.open_buffer(
ProjectPath {
worktree_id,
path: PathBuf::from(message.payload.path).into(),
},
worktree,
cx,
)
});
anyhow::Ok((buffer_store, buffer, this.session.clone()))
})??;
let buffer = buffer.await?;
let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
cx.spawn(|mut cx| async move {
BufferStore::create_buffer_for_peer(
buffer_store,
PEER_ID,
buffer_id,
PROJECT_ID,
session,
&mut cx,
)
.await
})
.detach();
Ok(proto::OpenBufferResponse {
buffer_id: buffer_id.to_proto(),
})
}
}

View file

@ -0,0 +1,78 @@
use fs::RealFs;
use futures::channel::mpsc;
use gpui::Context as _;
use remote::{
protocol::{read_message, write_message},
SshSession,
};
use remote_server::HeadlessProject;
use smol::{io::AsyncWriteExt, stream::StreamExt as _, Async};
use std::{env, io, mem, process, sync::Arc};
fn main() {
env::set_var("RUST_BACKTRACE", "1");
env::set_var("RUST_LOG", "remote=trace");
let subcommand = std::env::args().nth(1);
match subcommand.as_deref() {
Some("run") => {}
Some("version") => {
println!("{}", env!("ZED_PKG_VERSION"));
return;
}
_ => {
eprintln!("usage: remote <run|version>");
process::exit(1);
}
}
env_logger::init();
gpui::App::headless().run(move |cx| {
HeadlessProject::init(cx);
let (incoming_tx, incoming_rx) = mpsc::unbounded();
let (outgoing_tx, mut outgoing_rx) = mpsc::unbounded();
let mut stdin = Async::new(io::stdin()).unwrap();
let mut stdout = Async::new(io::stdout()).unwrap();
let session = SshSession::server(incoming_rx, outgoing_tx, cx);
let project = cx.new_model(|cx| {
HeadlessProject::new(
session.clone(),
Arc::new(RealFs::new(Default::default(), None)),
cx,
)
});
cx.background_executor()
.spawn(async move {
let mut output_buffer = Vec::new();
while let Some(message) = outgoing_rx.next().await {
write_message(&mut stdout, &mut output_buffer, message).await?;
stdout.flush().await?;
}
anyhow::Ok(())
})
.detach();
cx.background_executor()
.spawn(async move {
let mut input_buffer = Vec::new();
loop {
let message = match read_message(&mut stdin, &mut input_buffer).await {
Ok(message) => message,
Err(error) => {
log::warn!("error reading message: {:?}", error);
process::exit(0);
}
};
incoming_tx.unbounded_send(message).ok();
}
})
.detach();
mem::forget(project);
});
}

View file

@ -0,0 +1,134 @@
use crate::headless_project::HeadlessProject;
use client::{Client, UserStore};
use clock::FakeSystemClock;
use fs::{FakeFs, Fs as _};
use gpui::{Context, Model, TestAppContext};
use http::FakeHttpClient;
use language::LanguageRegistry;
use node_runtime::FakeNodeRuntime;
use project::Project;
use remote::SshSession;
use serde_json::json;
use settings::SettingsStore;
use std::{path::Path, sync::Arc};
#[gpui::test]
async fn test_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
let (client_ssh, server_ssh) = SshSession::fake(cx, server_cx);
let fs = FakeFs::new(server_cx.executor());
fs.insert_tree(
"/code",
json!({
"project1": {
"README.md": "# project 1",
"src": {
"lib.rs": "fn one() -> usize { 1 }"
}
},
"project2": {
"README.md": "# project 2",
},
}),
)
.await;
server_cx.update(HeadlessProject::init);
let _headless_project =
server_cx.new_model(|cx| HeadlessProject::new(server_ssh, fs.clone(), cx));
let project = build_project(client_ssh, cx);
let (worktree, _) = project
.update(cx, |project, cx| {
project.find_or_create_worktree("/code/project1", true, cx)
})
.await
.unwrap();
// The client sees the worktree's contents.
cx.executor().run_until_parked();
let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
worktree.update(cx, |worktree, _cx| {
assert_eq!(
worktree.paths().map(Arc::as_ref).collect::<Vec<_>>(),
vec![
Path::new("README.md"),
Path::new("src"),
Path::new("src/lib.rs"),
]
);
});
// The user opens a buffer in the remote worktree. The buffer's
// contents are loaded from the remote filesystem.
let buffer = project
.update(cx, |project, cx| {
project.open_buffer((worktree_id, Path::new("src/lib.rs")), cx)
})
.await
.unwrap();
buffer.update(cx, |buffer, cx| {
assert_eq!(buffer.text(), "fn one() -> usize { 1 }");
let ix = buffer.text().find('1').unwrap();
buffer.edit([(ix..ix + 1, "100")], None, cx);
});
// The user saves the buffer. The new contents are written to the
// remote filesystem.
project
.update(cx, |project, cx| project.save_buffer(buffer, cx))
.await
.unwrap();
assert_eq!(
fs.load("/code/project1/src/lib.rs".as_ref()).await.unwrap(),
"fn one() -> usize { 100 }"
);
// A new file is created in the remote filesystem. The user
// sees the new file.
fs.save(
"/code/project1/src/main.rs".as_ref(),
&"fn main() {}".into(),
Default::default(),
)
.await
.unwrap();
cx.executor().run_until_parked();
worktree.update(cx, |worktree, _cx| {
assert_eq!(
worktree.paths().map(Arc::as_ref).collect::<Vec<_>>(),
vec![
Path::new("README.md"),
Path::new("src"),
Path::new("src/lib.rs"),
Path::new("src/main.rs"),
]
);
});
}
fn build_project(ssh: Arc<SshSession>, cx: &mut TestAppContext) -> Model<Project> {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
});
let client = cx.update(|cx| {
Client::new(
Arc::new(FakeSystemClock::default()),
FakeHttpClient::with_404_response(),
cx,
)
});
let node = FakeNodeRuntime::new();
let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
let languages = Arc::new(LanguageRegistry::test(cx.executor()));
let fs = FakeFs::new(cx.executor());
cx.update(|cx| {
Project::init(&client, cx);
language::init(cx);
});
cx.update(|cx| Project::ssh(ssh, client, node, user_store, languages, fs, cx))
}

View file

@ -0,0 +1,6 @@
mod headless_project;
#[cfg(test)]
mod remote_editing_tests;
pub use headless_project::HeadlessProject;