collab: Fix project sharing between Windows and Unix (#23680)

Closes #14258

Windows user(host) sharing a project to a guest(using macOS), and host
follows guest:


https://github.com/user-attachments/assets/ba306b6b-23f7-48b1-8ba8-fdc5992d8f00

macOS user(host) sharing a project to a guest(using Windows), and host
follows guest:



https://github.com/user-attachments/assets/c5ee5e78-870d-49e5-907d-8565977a01ae

macOS user edits files in a windows project through collab:



https://github.com/user-attachments/assets/581057cf-e7df-4e56-a0ce-ced74339906a





Release Notes:

- N/A
This commit is contained in:
张小白 2025-02-11 08:12:01 +08:00 committed by GitHub
parent 929c5e76b4
commit c1f162abc6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
14 changed files with 226 additions and 117 deletions

View file

@ -93,6 +93,8 @@ impl PickerDelegate for OpenPathDelegate {
cx.notify(); cx.notify();
} }
// todo(windows)
// Is this method woring correctly on Windows? This method uses `/` for path separator.
fn update_matches( fn update_matches(
&mut self, &mut self,
query: String, query: String,

View file

@ -571,10 +571,6 @@ impl RepoPath {
RepoPath(path.into()) RepoPath(path.into())
} }
pub fn to_proto(&self) -> String {
self.0.to_string_lossy().to_string()
}
} }
impl std::fmt::Display for RepoPath { impl std::fmt::Display for RepoPath {

View file

@ -23,7 +23,10 @@ use language::{
}, },
Buffer, BufferEvent, Capability, DiskState, File as _, Language, LanguageRegistry, Operation, Buffer, BufferEvent, Capability, DiskState, File as _, Language, LanguageRegistry, Operation,
}; };
use rpc::{proto, AnyProtoClient, ErrorExt as _, TypedEnvelope}; use rpc::{
proto::{self, ToProto},
AnyProtoClient, ErrorExt as _, TypedEnvelope,
};
use serde::Deserialize; use serde::Deserialize;
use smol::channel::Receiver; use smol::channel::Receiver;
use std::{ use std::{
@ -580,13 +583,12 @@ impl RemoteBufferStore {
let worktree_id = worktree.read(cx).id().to_proto(); let worktree_id = worktree.read(cx).id().to_proto();
let project_id = self.project_id; let project_id = self.project_id;
let client = self.upstream_client.clone(); let client = self.upstream_client.clone();
let path_string = path.clone().to_string_lossy().to_string();
cx.spawn(move |this, mut cx| async move { cx.spawn(move |this, mut cx| async move {
let response = client let response = client
.request(proto::OpenBufferByPath { .request(proto::OpenBufferByPath {
project_id, project_id,
worktree_id, worktree_id,
path: path_string, path: path.to_proto(),
}) })
.await?; .await?;
let buffer_id = BufferId::new(response.buffer_id)?; let buffer_id = BufferId::new(response.buffer_id)?;

View file

@ -13,6 +13,7 @@ use gpui::{
App, AppContext, Context, Entity, EventEmitter, SharedString, Subscription, Task, WeakEntity, App, AppContext, Context, Entity, EventEmitter, SharedString, Subscription, Task, WeakEntity,
}; };
use language::{Buffer, LanguageRegistry}; use language::{Buffer, LanguageRegistry};
use rpc::proto::ToProto;
use rpc::{proto, AnyProtoClient}; use rpc::{proto, AnyProtoClient};
use settings::WorktreeId; use settings::WorktreeId;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -222,7 +223,7 @@ impl GitState {
work_directory_id: work_directory_id.to_proto(), work_directory_id: work_directory_id.to_proto(),
paths: paths paths: paths
.into_iter() .into_iter()
.map(|repo_path| repo_path.to_proto()) .map(|repo_path| repo_path.as_ref().to_proto())
.collect(), .collect(),
}) })
.await .await
@ -247,7 +248,7 @@ impl GitState {
work_directory_id: work_directory_id.to_proto(), work_directory_id: work_directory_id.to_proto(),
paths: paths paths: paths
.into_iter() .into_iter()
.map(|repo_path| repo_path.to_proto()) .map(|repo_path| repo_path.as_ref().to_proto())
.collect(), .collect(),
}) })
.await .await

View file

@ -55,7 +55,10 @@ use parking_lot::Mutex;
use postage::watch; use postage::watch;
use rand::prelude::*; use rand::prelude::*;
use rpc::AnyProtoClient; use rpc::{
proto::{FromProto, ToProto},
AnyProtoClient,
};
use serde::Serialize; use serde::Serialize;
use settings::{Settings, SettingsLocation, SettingsStore}; use settings::{Settings, SettingsLocation, SettingsStore};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
@ -5360,7 +5363,7 @@ impl LspStore {
project_id: *project_id, project_id: *project_id,
worktree_id: worktree_id.to_proto(), worktree_id: worktree_id.to_proto(),
summary: Some(proto::DiagnosticSummary { summary: Some(proto::DiagnosticSummary {
path: worktree_path.to_string_lossy().to_string(), path: worktree_path.to_proto(),
language_server_id: server_id.0 as u64, language_server_id: server_id.0 as u64,
error_count: new_summary.error_count as u32, error_count: new_summary.error_count as u32,
warning_count: new_summary.warning_count as u32, warning_count: new_summary.warning_count as u32,
@ -5848,10 +5851,8 @@ impl LspStore {
.ok_or_else(|| anyhow!("worktree not found"))?; .ok_or_else(|| anyhow!("worktree not found"))?;
let (old_abs_path, new_abs_path) = { let (old_abs_path, new_abs_path) = {
let root_path = worktree.update(&mut cx, |this, _| this.abs_path())?; let root_path = worktree.update(&mut cx, |this, _| this.abs_path())?;
( let new_path = PathBuf::from_proto(envelope.payload.new_path.clone());
root_path.join(&old_path), (root_path.join(&old_path), root_path.join(&new_path))
root_path.join(&envelope.payload.new_path),
)
}; };
Self::will_rename_entry( Self::will_rename_entry(
@ -5881,7 +5882,7 @@ impl LspStore {
if let Some(message) = envelope.payload.summary { if let Some(message) = envelope.payload.summary {
let project_path = ProjectPath { let project_path = ProjectPath {
worktree_id, worktree_id,
path: Path::new(&message.path).into(), path: Arc::<Path>::from_proto(message.path),
}; };
let path = project_path.path.clone(); let path = project_path.path.clone();
let server_id = LanguageServerId(message.language_server_id as usize); let server_id = LanguageServerId(message.language_server_id as usize);
@ -5915,7 +5916,7 @@ impl LspStore {
project_id: *project_id, project_id: *project_id,
worktree_id: worktree_id.to_proto(), worktree_id: worktree_id.to_proto(),
summary: Some(proto::DiagnosticSummary { summary: Some(proto::DiagnosticSummary {
path: project_path.path.to_string_lossy().to_string(), path: project_path.path.as_ref().to_proto(),
language_server_id: server_id.0 as u64, language_server_id: server_id.0 as u64,
error_count: summary.error_count as u32, error_count: summary.error_count as u32,
warning_count: summary.warning_count as u32, warning_count: summary.warning_count as u32,
@ -7114,7 +7115,7 @@ impl LspStore {
project_id, project_id,
worktree_id: worktree_id.to_proto(), worktree_id: worktree_id.to_proto(),
summary: Some(proto::DiagnosticSummary { summary: Some(proto::DiagnosticSummary {
path: path.to_string_lossy().to_string(), path: path.as_ref().to_proto(),
language_server_id: server_id.0 as u64, language_server_id: server_id.0 as u64,
error_count: 0, error_count: 0,
warning_count: 0, warning_count: 0,
@ -7768,7 +7769,7 @@ impl LspStore {
language_server_name: symbol.language_server_name.0.to_string(), language_server_name: symbol.language_server_name.0.to_string(),
source_worktree_id: symbol.source_worktree_id.to_proto(), source_worktree_id: symbol.source_worktree_id.to_proto(),
worktree_id: symbol.path.worktree_id.to_proto(), worktree_id: symbol.path.worktree_id.to_proto(),
path: symbol.path.path.to_string_lossy().to_string(), path: symbol.path.path.as_ref().to_proto(),
name: symbol.name.clone(), name: symbol.name.clone(),
kind: unsafe { mem::transmute::<lsp::SymbolKind, i32>(symbol.kind) }, kind: unsafe { mem::transmute::<lsp::SymbolKind, i32>(symbol.kind) },
start: Some(proto::PointUtf16 { start: Some(proto::PointUtf16 {
@ -7789,7 +7790,7 @@ impl LspStore {
let kind = unsafe { mem::transmute::<i32, lsp::SymbolKind>(serialized_symbol.kind) }; let kind = unsafe { mem::transmute::<i32, lsp::SymbolKind>(serialized_symbol.kind) };
let path = ProjectPath { let path = ProjectPath {
worktree_id, worktree_id,
path: PathBuf::from(serialized_symbol.path).into(), path: Arc::<Path>::from_proto(serialized_symbol.path),
}; };
let start = serialized_symbol let start = serialized_symbol
@ -8263,7 +8264,7 @@ impl DiagnosticSummary {
path: &Path, path: &Path,
) -> proto::DiagnosticSummary { ) -> proto::DiagnosticSummary {
proto::DiagnosticSummary { proto::DiagnosticSummary {
path: path.to_string_lossy().to_string(), path: path.to_proto(),
language_server_id: language_server_id.0 as u64, language_server_id: language_server_id.0 as u64,
error_count: self.error_count as u32, error_count: self.error_count as u32,
warning_count: self.warning_count as u32, warning_count: self.warning_count as u32,

View file

@ -73,7 +73,7 @@ pub use prettier_store::PrettierStore;
use project_settings::{ProjectSettings, SettingsObserver, SettingsObserverEvent}; use project_settings::{ProjectSettings, SettingsObserver, SettingsObserverEvent};
use remote::{SshConnectionOptions, SshRemoteClient}; use remote::{SshConnectionOptions, SshRemoteClient};
use rpc::{ use rpc::{
proto::{LanguageServerPromptResponse, SSH_PROJECT_ID}, proto::{FromProto, LanguageServerPromptResponse, ToProto, SSH_PROJECT_ID},
AnyProtoClient, ErrorCode, AnyProtoClient, ErrorCode,
}; };
use search::{SearchInputKind, SearchQuery, SearchResult}; use search::{SearchInputKind, SearchQuery, SearchResult};
@ -297,14 +297,14 @@ impl ProjectPath {
pub fn from_proto(p: proto::ProjectPath) -> Self { pub fn from_proto(p: proto::ProjectPath) -> Self {
Self { Self {
worktree_id: WorktreeId::from_proto(p.worktree_id), worktree_id: WorktreeId::from_proto(p.worktree_id),
path: Arc::from(PathBuf::from(p.path)), path: Arc::<Path>::from_proto(p.path),
} }
} }
pub fn to_proto(&self) -> proto::ProjectPath { pub fn to_proto(&self) -> proto::ProjectPath {
proto::ProjectPath { proto::ProjectPath {
worktree_id: self.worktree_id.to_proto(), worktree_id: self.worktree_id.to_proto(),
path: self.path.to_string_lossy().to_string(), path: self.path.as_ref().to_proto(),
} }
} }
@ -3360,18 +3360,19 @@ impl Project {
}) })
}) })
} else if let Some(ssh_client) = self.ssh_client.as_ref() { } else if let Some(ssh_client) = self.ssh_client.as_ref() {
let request_path = Path::new(path);
let request = ssh_client let request = ssh_client
.read(cx) .read(cx)
.proto_client() .proto_client()
.request(proto::GetPathMetadata { .request(proto::GetPathMetadata {
project_id: SSH_PROJECT_ID, project_id: SSH_PROJECT_ID,
path: path.to_string(), path: request_path.to_proto(),
}); });
cx.background_executor().spawn(async move { cx.background_executor().spawn(async move {
let response = request.await.log_err()?; let response = request.await.log_err()?;
if response.exists { if response.exists {
Some(ResolvedPath::AbsPath { Some(ResolvedPath::AbsPath {
path: PathBuf::from(response.path), path: PathBuf::from_proto(response.path),
is_dir: response.is_dir, is_dir: response.is_dir,
}) })
} else { } else {
@ -3441,9 +3442,10 @@ impl Project {
if self.is_local() { if self.is_local() {
DirectoryLister::Local(self.fs.clone()).list_directory(query, cx) DirectoryLister::Local(self.fs.clone()).list_directory(query, cx)
} else if let Some(session) = self.ssh_client.as_ref() { } else if let Some(session) = self.ssh_client.as_ref() {
let path_buf = PathBuf::from(query);
let request = proto::ListRemoteDirectory { let request = proto::ListRemoteDirectory {
dev_server_id: SSH_PROJECT_ID, dev_server_id: SSH_PROJECT_ID,
path: query, path: path_buf.to_proto(),
}; };
let response = session.read(cx).proto_client().request(request); let response = session.read(cx).proto_client().request(request);
@ -3994,7 +3996,7 @@ impl Project {
this.open_buffer( this.open_buffer(
ProjectPath { ProjectPath {
worktree_id, worktree_id,
path: PathBuf::from(envelope.payload.path).into(), path: Arc::<Path>::from_proto(envelope.payload.path),
}, },
cx, cx,
) )

View file

@ -7,18 +7,17 @@ use paths::{
local_settings_file_relative_path, local_tasks_file_relative_path, local_settings_file_relative_path, local_tasks_file_relative_path,
local_vscode_tasks_file_relative_path, EDITORCONFIG_NAME, local_vscode_tasks_file_relative_path, EDITORCONFIG_NAME,
}; };
use rpc::{proto, AnyProtoClient, TypedEnvelope}; use rpc::{
proto::{self, FromProto, ToProto},
AnyProtoClient, TypedEnvelope,
};
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use settings::{ use settings::{
parse_json_with_comments, InvalidSettingsError, LocalSettingsKind, Settings, SettingsLocation, parse_json_with_comments, InvalidSettingsError, LocalSettingsKind, Settings, SettingsLocation,
SettingsSources, SettingsStore, SettingsSources, SettingsStore,
}; };
use std::{ use std::{path::Path, sync::Arc, time::Duration};
path::{Path, PathBuf},
sync::Arc,
time::Duration,
};
use task::{TaskTemplates, VsCodeTaskFile}; use task::{TaskTemplates, VsCodeTaskFile};
use util::ResultExt; use util::ResultExt;
use worktree::{PathChange, UpdatedEntriesSet, Worktree, WorktreeId}; use worktree::{PathChange, UpdatedEntriesSet, Worktree, WorktreeId};
@ -292,7 +291,7 @@ impl SettingsObserver {
.send(proto::UpdateWorktreeSettings { .send(proto::UpdateWorktreeSettings {
project_id, project_id,
worktree_id, worktree_id,
path: path.to_string_lossy().into(), path: path.to_proto(),
content: Some(content), content: Some(content),
kind: Some( kind: Some(
local_settings_kind_to_proto(LocalSettingsKind::Settings).into(), local_settings_kind_to_proto(LocalSettingsKind::Settings).into(),
@ -305,7 +304,7 @@ impl SettingsObserver {
.send(proto::UpdateWorktreeSettings { .send(proto::UpdateWorktreeSettings {
project_id, project_id,
worktree_id, worktree_id,
path: path.to_string_lossy().into(), path: path.to_proto(),
content: Some(content), content: Some(content),
kind: Some( kind: Some(
local_settings_kind_to_proto(LocalSettingsKind::Editorconfig).into(), local_settings_kind_to_proto(LocalSettingsKind::Editorconfig).into(),
@ -343,7 +342,7 @@ impl SettingsObserver {
this.update_settings( this.update_settings(
worktree, worktree,
[( [(
PathBuf::from(&envelope.payload.path).into(), Arc::<Path>::from_proto(envelope.payload.path.clone()),
local_settings_kind_from_proto(kind), local_settings_kind_from_proto(kind),
envelope.payload.content, envelope.payload.content,
)], )],
@ -551,7 +550,7 @@ impl SettingsObserver {
.send(proto::UpdateWorktreeSettings { .send(proto::UpdateWorktreeSettings {
project_id: self.project_id, project_id: self.project_id,
worktree_id: remote_worktree_id.to_proto(), worktree_id: remote_worktree_id.to_proto(),
path: directory.to_string_lossy().into_owned(), path: directory.to_proto(),
content: file_content, content: file_content,
kind: Some(local_settings_kind_to_proto(kind).into()), kind: Some(local_settings_kind_to_proto(kind).into()),
}) })

View file

@ -1,4 +1,4 @@
use std::{str::FromStr, sync::Arc}; use std::{path::PathBuf, str::FromStr, sync::Arc};
use anyhow::{bail, Result}; use anyhow::{bail, Result};
@ -8,7 +8,10 @@ use gpui::{
App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity,
}; };
use language::{LanguageName, LanguageRegistry, LanguageToolchainStore, Toolchain, ToolchainList}; use language::{LanguageName, LanguageRegistry, LanguageToolchainStore, Toolchain, ToolchainList};
use rpc::{proto, AnyProtoClient, TypedEnvelope}; use rpc::{
proto::{self, FromProto, ToProto},
AnyProtoClient, TypedEnvelope,
};
use settings::WorktreeId; use settings::WorktreeId;
use util::ResultExt as _; use util::ResultExt as _;
@ -120,7 +123,9 @@ impl ToolchainStore {
}; };
let toolchain = Toolchain { let toolchain = Toolchain {
name: toolchain.name.into(), name: toolchain.name.into(),
path: toolchain.path.into(), // todo(windows)
// Do we need to convert path to native string?
path: PathBuf::from(toolchain.path).to_proto().into(),
as_json: serde_json::Value::from_str(&toolchain.raw_json)?, as_json: serde_json::Value::from_str(&toolchain.raw_json)?,
language_name, language_name,
}; };
@ -144,10 +149,13 @@ impl ToolchainStore {
.await; .await;
Ok(proto::ActiveToolchainResponse { Ok(proto::ActiveToolchainResponse {
toolchain: toolchain.map(|toolchain| proto::Toolchain { toolchain: toolchain.map(|toolchain| {
let path = PathBuf::from(toolchain.path.to_string());
proto::Toolchain {
name: toolchain.name.into(), name: toolchain.name.into(),
path: toolchain.path.into(), path: path.to_proto(),
raw_json: toolchain.as_json.to_string(), raw_json: toolchain.as_json.to_string(),
}
}), }),
}) })
} }
@ -183,10 +191,13 @@ impl ToolchainStore {
toolchains toolchains
.toolchains .toolchains
.into_iter() .into_iter()
.map(|toolchain| proto::Toolchain { .map(|toolchain| {
let path = PathBuf::from(toolchain.path.to_string());
proto::Toolchain {
name: toolchain.name.to_string(), name: toolchain.name.to_string(),
path: toolchain.path.to_string(), path: path.to_proto(),
raw_json: toolchain.as_json.to_string(), raw_json: toolchain.as_json.to_string(),
}
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
} else { } else {
@ -354,6 +365,7 @@ impl RemoteToolchainStore {
let project_id = self.project_id; let project_id = self.project_id;
let client = self.client.clone(); let client = self.client.clone();
cx.spawn(move |_| async move { cx.spawn(move |_| async move {
let path = PathBuf::from(toolchain.path.to_string());
let _ = client let _ = client
.request(proto::ActivateToolchain { .request(proto::ActivateToolchain {
project_id, project_id,
@ -361,7 +373,7 @@ impl RemoteToolchainStore {
language_name: toolchain.language_name.into(), language_name: toolchain.language_name.into(),
toolchain: Some(proto::Toolchain { toolchain: Some(proto::Toolchain {
name: toolchain.name.into(), name: toolchain.name.into(),
path: toolchain.path.into(), path: path.to_proto(),
raw_json: toolchain.as_json.to_string(), raw_json: toolchain.as_json.to_string(),
}), }),
}) })
@ -398,7 +410,12 @@ impl RemoteToolchainStore {
Some(Toolchain { Some(Toolchain {
language_name: language_name.clone(), language_name: language_name.clone(),
name: toolchain.name.into(), name: toolchain.name.into(),
path: toolchain.path.into(), // todo(windows)
// Do we need to convert path to native string?
path: PathBuf::from_proto(toolchain.path)
.to_string_lossy()
.to_string()
.into(),
as_json: serde_json::Value::from_str(&toolchain.raw_json).ok()?, as_json: serde_json::Value::from_str(&toolchain.raw_json).ok()?,
}) })
}) })
@ -439,7 +456,12 @@ impl RemoteToolchainStore {
Some(Toolchain { Some(Toolchain {
language_name: language_name.clone(), language_name: language_name.clone(),
name: toolchain.name.into(), name: toolchain.name.into(),
path: toolchain.path.into(), // todo(windows)
// Do we need to convert path to native string?
path: PathBuf::from_proto(toolchain.path)
.to_string_lossy()
.to_string()
.into(),
as_json: serde_json::Value::from_str(&toolchain.raw_json).ok()?, as_json: serde_json::Value::from_str(&toolchain.raw_json).ok()?,
}) })
}) })

View file

@ -15,7 +15,7 @@ use futures::{
use gpui::{App, AsyncApp, Context, Entity, EntityId, EventEmitter, Task, WeakEntity}; use gpui::{App, AsyncApp, Context, Entity, EntityId, EventEmitter, Task, WeakEntity};
use postage::oneshot; use postage::oneshot;
use rpc::{ use rpc::{
proto::{self, SSH_PROJECT_ID}, proto::{self, FromProto, ToProto, SSH_PROJECT_ID},
AnyProtoClient, ErrorExt, TypedEnvelope, AnyProtoClient, ErrorExt, TypedEnvelope,
}; };
use smol::{ use smol::{
@ -268,10 +268,11 @@ impl WorktreeStore {
cx.spawn(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
let this = this.upgrade().context("Dropped worktree store")?; let this = this.upgrade().context("Dropped worktree store")?;
let path = Path::new(abs_path.as_str());
let response = client let response = client
.request(proto::AddWorktree { .request(proto::AddWorktree {
project_id: SSH_PROJECT_ID, project_id: SSH_PROJECT_ID,
path: abs_path.clone(), path: path.to_proto(),
visible, visible,
}) })
.await?; .await?;
@ -282,10 +283,11 @@ impl WorktreeStore {
return Ok(existing_worktree); return Ok(existing_worktree);
} }
let root_name = PathBuf::from(&response.canonicalized_path) let root_path_buf = PathBuf::from_proto(response.canonicalized_path.clone());
let root_name = root_path_buf
.file_name() .file_name()
.map(|n| n.to_string_lossy().to_string()) .map(|n| n.to_string_lossy().to_string())
.unwrap_or(response.canonicalized_path.to_string()); .unwrap_or(root_path_buf.to_string_lossy().to_string());
let worktree = cx.update(|cx| { let worktree = cx.update(|cx| {
Worktree::remote( Worktree::remote(
@ -596,7 +598,7 @@ impl WorktreeStore {
id: worktree.id().to_proto(), id: worktree.id().to_proto(),
root_name: worktree.root_name().into(), root_name: worktree.root_name().into(),
visible: worktree.is_visible(), visible: worktree.is_visible(),
abs_path: worktree.abs_path().to_string_lossy().into(), abs_path: worktree.abs_path().to_proto(),
} }
}) })
.collect() .collect()
@ -923,7 +925,7 @@ impl WorktreeStore {
project_id: remote_worktree.project_id(), project_id: remote_worktree.project_id(),
repository: Some(proto::ProjectPath { repository: Some(proto::ProjectPath {
worktree_id: project_path.worktree_id.to_proto(), worktree_id: project_path.worktree_id.to_proto(),
path: project_path.path.to_string_lossy().to_string(), // Root path path: project_path.path.to_proto(), // Root path
}), }),
}); });
@ -994,7 +996,7 @@ impl WorktreeStore {
project_id: remote_worktree.project_id(), project_id: remote_worktree.project_id(),
repository: Some(proto::ProjectPath { repository: Some(proto::ProjectPath {
worktree_id: repository.worktree_id.to_proto(), worktree_id: repository.worktree_id.to_proto(),
path: repository.path.to_string_lossy().to_string(), // Root path path: repository.path.to_proto(), // Root path
}), }),
branch_name: new_branch, branch_name: new_branch,
}); });
@ -1116,7 +1118,7 @@ impl WorktreeStore {
.context("Invalid GitBranches call")?; .context("Invalid GitBranches call")?;
let project_path = ProjectPath { let project_path = ProjectPath {
worktree_id: WorktreeId::from_proto(project_path.worktree_id), worktree_id: WorktreeId::from_proto(project_path.worktree_id),
path: Path::new(&project_path.path).into(), path: Arc::<Path>::from_proto(project_path.path),
}; };
let branches = this let branches = this
@ -1147,7 +1149,7 @@ impl WorktreeStore {
.context("Invalid GitBranches call")?; .context("Invalid GitBranches call")?;
let project_path = ProjectPath { let project_path = ProjectPath {
worktree_id: WorktreeId::from_proto(project_path.worktree_id), worktree_id: WorktreeId::from_proto(project_path.worktree_id),
path: Path::new(&project_path.path).into(), path: Arc::<Path>::from_proto(project_path.path),
}; };
let new_branch = update_branch.payload.branch_name; let new_branch = update_branch.payload.branch_name;

View file

@ -2489,8 +2489,8 @@ message RefreshLlmToken {}
// Remote FS // Remote FS
message AddWorktree { message AddWorktree {
uint64 project_id = 2;
string path = 1; string path = 1;
uint64 project_id = 2;
bool visible = 3; bool visible = 3;
} }
@ -2625,6 +2625,7 @@ message UpdateGitBranch {
string branch_name = 2; string branch_name = 2;
ProjectPath repository = 3; ProjectPath repository = 3;
} }
message GetPanicFiles { message GetPanicFiles {
} }

View file

@ -15,6 +15,8 @@ use std::{
cmp, cmp,
fmt::{self, Debug}, fmt::{self, Debug},
iter, mem, iter, mem,
path::{Path, PathBuf},
sync::Arc,
time::{Duration, SystemTime, UNIX_EPOCH}, time::{Duration, SystemTime, UNIX_EPOCH},
}; };
@ -137,6 +139,62 @@ impl fmt::Display for PeerId {
} }
} }
pub trait FromProto {
fn from_proto(proto: String) -> Self;
}
pub trait ToProto {
fn to_proto(self) -> String;
}
impl FromProto for PathBuf {
#[cfg(target_os = "windows")]
fn from_proto(proto: String) -> Self {
proto.split("/").collect()
}
#[cfg(not(target_os = "windows"))]
fn from_proto(proto: String) -> Self {
PathBuf::from(proto)
}
}
impl FromProto for Arc<Path> {
fn from_proto(proto: String) -> Self {
PathBuf::from_proto(proto).into()
}
}
impl ToProto for PathBuf {
#[cfg(target_os = "windows")]
fn to_proto(self) -> String {
self.components()
.map(|comp| comp.as_os_str().to_string_lossy().to_string())
.collect::<Vec<_>>()
.join("/")
}
#[cfg(not(target_os = "windows"))]
fn to_proto(self) -> String {
self.to_string_lossy().to_string()
}
}
impl ToProto for &Path {
#[cfg(target_os = "windows")]
fn to_proto(self) -> String {
self.components()
.map(|comp| comp.as_os_str().to_string_lossy().to_string())
.collect::<Vec<_>>()
.join("/")
}
#[cfg(not(target_os = "windows"))]
fn to_proto(self) -> String {
self.to_string_lossy().to_string()
}
}
messages!( messages!(
(AcceptTermsOfService, Foreground), (AcceptTermsOfService, Foreground),
(AcceptTermsOfServiceResponse, Foreground), (AcceptTermsOfServiceResponse, Foreground),
@ -757,4 +815,22 @@ mod tests {
}; };
assert_eq!(PeerId::from_u64(peer_id.as_u64()), peer_id); assert_eq!(PeerId::from_u64(peer_id.as_u64()), peer_id);
} }
#[test]
#[cfg(target_os = "windows")]
fn test_proto() {
fn generate_proto_path(path: PathBuf) -> PathBuf {
let proto = path.to_proto();
PathBuf::from_proto(proto)
}
let path = PathBuf::from("C:\\foo\\bar");
assert_eq!(path, generate_proto_path(path.clone()));
let path = PathBuf::from("C:/foo/bar/");
assert_eq!(path, generate_proto_path(path.clone()));
let path = PathBuf::from("C:/foo\\bar\\");
assert_eq!(path, generate_proto_path(path.clone()));
}
} }

View file

@ -1,3 +1,4 @@
use ::proto::{FromProto, ToProto};
use anyhow::{anyhow, Context as _, Result}; use anyhow::{anyhow, Context as _, Result};
use extension::ExtensionHostProxy; use extension::ExtensionHostProxy;
use extension_host::headless_host::HeadlessExtensionStore; use extension_host::headless_host::HeadlessExtensionStore;
@ -325,10 +326,8 @@ impl HeadlessProject {
mut cx: AsyncApp, mut cx: AsyncApp,
) -> Result<proto::AddWorktreeResponse> { ) -> Result<proto::AddWorktreeResponse> {
use client::ErrorCodeExt; use client::ErrorCodeExt;
let path = shellexpand::tilde(&message.payload.path).to_string();
let fs = this.read_with(&mut cx, |this, _| this.fs.clone())?; let fs = this.read_with(&mut cx, |this, _| this.fs.clone())?;
let path = PathBuf::from(path); let path = PathBuf::from_proto(shellexpand::tilde(&message.payload.path).to_string());
let canonicalized = match fs.canonicalize(&path).await { let canonicalized = match fs.canonicalize(&path).await {
Ok(path) => path, Ok(path) => path,
@ -363,7 +362,7 @@ impl HeadlessProject {
let response = this.update(&mut cx, |_, cx| { let response = this.update(&mut cx, |_, cx| {
worktree.update(cx, |worktree, _| proto::AddWorktreeResponse { worktree.update(cx, |worktree, _| proto::AddWorktreeResponse {
worktree_id: worktree.id().to_proto(), worktree_id: worktree.id().to_proto(),
canonicalized_path: canonicalized.to_string_lossy().to_string(), canonicalized_path: canonicalized.to_proto(),
}) })
})?; })?;
@ -418,7 +417,7 @@ impl HeadlessProject {
buffer_store.open_buffer( buffer_store.open_buffer(
ProjectPath { ProjectPath {
worktree_id, worktree_id,
path: PathBuf::from(message.payload.path).into(), path: Arc::<Path>::from_proto(message.payload.path),
}, },
cx, cx,
) )
@ -559,11 +558,11 @@ impl HeadlessProject {
envelope: TypedEnvelope<proto::ListRemoteDirectory>, envelope: TypedEnvelope<proto::ListRemoteDirectory>,
cx: AsyncApp, cx: AsyncApp,
) -> Result<proto::ListRemoteDirectoryResponse> { ) -> Result<proto::ListRemoteDirectoryResponse> {
let expanded = shellexpand::tilde(&envelope.payload.path).to_string();
let fs = cx.read_entity(&this, |this, _| this.fs.clone())?; let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
let expanded = PathBuf::from_proto(shellexpand::tilde(&envelope.payload.path).to_string());
let mut entries = Vec::new(); let mut entries = Vec::new();
let mut response = fs.read_dir(Path::new(&expanded)).await?; let mut response = fs.read_dir(&expanded).await?;
while let Some(path) = response.next().await { while let Some(path) = response.next().await {
if let Some(file_name) = path?.file_name() { if let Some(file_name) = path?.file_name() {
entries.push(file_name.to_string_lossy().to_string()); entries.push(file_name.to_string_lossy().to_string());
@ -578,15 +577,15 @@ impl HeadlessProject {
cx: AsyncApp, cx: AsyncApp,
) -> Result<proto::GetPathMetadataResponse> { ) -> Result<proto::GetPathMetadataResponse> {
let fs = cx.read_entity(&this, |this, _| this.fs.clone())?; let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
let expanded = shellexpand::tilde(&envelope.payload.path).to_string(); let expanded = PathBuf::from_proto(shellexpand::tilde(&envelope.payload.path).to_string());
let metadata = fs.metadata(&PathBuf::from(expanded.clone())).await?; let metadata = fs.metadata(&expanded).await?;
let is_dir = metadata.map(|metadata| metadata.is_dir).unwrap_or(false); let is_dir = metadata.map(|metadata| metadata.is_dir).unwrap_or(false);
Ok(proto::GetPathMetadataResponse { Ok(proto::GetPathMetadataResponse {
exists: metadata.is_some(), exists: metadata.is_some(),
is_dir, is_dir,
path: expanded, path: expanded.to_proto(),
}) })
} }

View file

@ -859,7 +859,7 @@ async fn test_remote_resolve_path_in_buffer(
async fn test_remote_resolve_abs_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { async fn test_remote_resolve_abs_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
let fs = FakeFs::new(server_cx.executor()); let fs = FakeFs::new(server_cx.executor());
fs.insert_tree( fs.insert_tree(
"/code", path!("/code"),
json!({ json!({
"project1": { "project1": {
".git": {}, ".git": {},
@ -876,7 +876,7 @@ async fn test_remote_resolve_abs_path(cx: &mut TestAppContext, server_cx: &mut T
let path = project let path = project
.update(cx, |project, cx| { .update(cx, |project, cx| {
project.resolve_abs_path("/code/project1/README.md", cx) project.resolve_abs_path(path!("/code/project1/README.md"), cx)
}) })
.await .await
.unwrap(); .unwrap();
@ -884,12 +884,12 @@ async fn test_remote_resolve_abs_path(cx: &mut TestAppContext, server_cx: &mut T
assert!(path.is_file()); assert!(path.is_file());
assert_eq!( assert_eq!(
path.abs_path().unwrap().to_string_lossy(), path.abs_path().unwrap().to_string_lossy(),
"/code/project1/README.md" path!("/code/project1/README.md")
); );
let path = project let path = project
.update(cx, |project, cx| { .update(cx, |project, cx| {
project.resolve_abs_path("/code/project1/src", cx) project.resolve_abs_path(path!("/code/project1/src"), cx)
}) })
.await .await
.unwrap(); .unwrap();
@ -897,12 +897,12 @@ async fn test_remote_resolve_abs_path(cx: &mut TestAppContext, server_cx: &mut T
assert!(path.is_dir()); assert!(path.is_dir());
assert_eq!( assert_eq!(
path.abs_path().unwrap().to_string_lossy(), path.abs_path().unwrap().to_string_lossy(),
"/code/project1/src" path!("/code/project1/src")
); );
let path = project let path = project
.update(cx, |project, cx| { .update(cx, |project, cx| {
project.resolve_abs_path("/code/project1/DOESNOTEXIST", cx) project.resolve_abs_path(path!("/code/project1/DOESNOTEXIST"), cx)
}) })
.await; .await;
assert!(path.is_none()); assert!(path.is_none());
@ -958,7 +958,7 @@ async fn test_adding_then_removing_then_adding_worktrees(
) { ) {
let fs = FakeFs::new(server_cx.executor()); let fs = FakeFs::new(server_cx.executor());
fs.insert_tree( fs.insert_tree(
"/code", path!("/code"),
json!({ json!({
"project1": { "project1": {
".git": {}, ".git": {},
@ -977,14 +977,14 @@ async fn test_adding_then_removing_then_adding_worktrees(
let (project, _headless) = init_test(&fs, cx, server_cx).await; let (project, _headless) = init_test(&fs, cx, server_cx).await;
let (_worktree, _) = project let (_worktree, _) = project
.update(cx, |project, cx| { .update(cx, |project, cx| {
project.find_or_create_worktree("/code/project1", true, cx) project.find_or_create_worktree(path!("/code/project1"), true, cx)
}) })
.await .await
.unwrap(); .unwrap();
let (worktree_2, _) = project let (worktree_2, _) = project
.update(cx, |project, cx| { .update(cx, |project, cx| {
project.find_or_create_worktree("/code/project2", true, cx) project.find_or_create_worktree(path!("/code/project2"), true, cx)
}) })
.await .await
.unwrap(); .unwrap();
@ -994,7 +994,7 @@ async fn test_adding_then_removing_then_adding_worktrees(
let (worktree_2, _) = project let (worktree_2, _) = project
.update(cx, |project, cx| { .update(cx, |project, cx| {
project.find_or_create_worktree("/code/project2", true, cx) project.find_or_create_worktree(path!("/code/project2"), true, cx)
}) })
.await .await
.unwrap(); .unwrap();

View file

@ -39,7 +39,7 @@ use postage::{
watch, watch,
}; };
use rpc::{ use rpc::{
proto::{self, split_worktree_update}, proto::{self, split_worktree_update, FromProto, ToProto},
AnyProtoClient, AnyProtoClient,
}; };
pub use settings::WorktreeId; pub use settings::WorktreeId;
@ -283,13 +283,13 @@ impl RepositoryEntry {
current_new_entry = new_statuses.next(); current_new_entry = new_statuses.next();
} }
Ordering::Greater => { Ordering::Greater => {
removed_statuses.push(old_entry.repo_path.to_proto()); removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
current_old_entry = old_statuses.next(); current_old_entry = old_statuses.next();
} }
} }
} }
(None, Some(old_entry)) => { (None, Some(old_entry)) => {
removed_statuses.push(old_entry.repo_path.to_proto()); removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
current_old_entry = old_statuses.next(); current_old_entry = old_statuses.next();
} }
(Some(new_entry), None) => { (Some(new_entry), None) => {
@ -308,7 +308,7 @@ impl RepositoryEntry {
current_merge_conflicts: self current_merge_conflicts: self
.current_merge_conflicts .current_merge_conflicts
.iter() .iter()
.map(RepoPath::to_proto) .map(|path| path.as_ref().to_proto())
.collect(), .collect(),
} }
} }
@ -700,7 +700,7 @@ impl Worktree {
let snapshot = Snapshot::new( let snapshot = Snapshot::new(
worktree.id, worktree.id,
worktree.root_name, worktree.root_name,
Arc::from(PathBuf::from(worktree.abs_path)), Arc::<Path>::from_proto(worktree.abs_path),
); );
let background_snapshot = Arc::new(Mutex::new((snapshot.clone(), Vec::new()))); let background_snapshot = Arc::new(Mutex::new((snapshot.clone(), Vec::new())));
@ -849,7 +849,7 @@ impl Worktree {
id: self.id().to_proto(), id: self.id().to_proto(),
root_name: self.root_name().to_string(), root_name: self.root_name().to_string(),
visible: self.is_visible(), visible: self.is_visible(),
abs_path: self.abs_path().as_os_str().to_string_lossy().into(), abs_path: self.abs_path().to_proto(),
} }
} }
@ -1007,7 +1007,7 @@ impl Worktree {
is_directory: bool, is_directory: bool,
cx: &Context<Worktree>, cx: &Context<Worktree>,
) -> Task<Result<CreatedEntry>> { ) -> Task<Result<CreatedEntry>> {
let path = path.into(); let path: Arc<Path> = path.into();
let worktree_id = self.id(); let worktree_id = self.id();
match self { match self {
Worktree::Local(this) => this.create_entry(path, is_directory, cx), Worktree::Local(this) => this.create_entry(path, is_directory, cx),
@ -1016,7 +1016,7 @@ impl Worktree {
let request = this.client.request(proto::CreateProjectEntry { let request = this.client.request(proto::CreateProjectEntry {
worktree_id: worktree_id.to_proto(), worktree_id: worktree_id.to_proto(),
project_id, project_id,
path: path.to_string_lossy().into(), path: path.as_ref().to_proto(),
is_directory, is_directory,
}); });
cx.spawn(move |this, mut cx| async move { cx.spawn(move |this, mut cx| async move {
@ -1101,21 +1101,19 @@ impl Worktree {
new_path: impl Into<Arc<Path>>, new_path: impl Into<Arc<Path>>,
cx: &Context<Self>, cx: &Context<Self>,
) -> Task<Result<Option<Entry>>> { ) -> Task<Result<Option<Entry>>> {
let new_path = new_path.into(); let new_path: Arc<Path> = new_path.into();
match self { match self {
Worktree::Local(this) => { Worktree::Local(this) => {
this.copy_entry(entry_id, relative_worktree_source_path, new_path, cx) this.copy_entry(entry_id, relative_worktree_source_path, new_path, cx)
} }
Worktree::Remote(this) => { Worktree::Remote(this) => {
let relative_worktree_source_path = let relative_worktree_source_path = relative_worktree_source_path
relative_worktree_source_path.map(|relative_worktree_source_path| { .map(|relative_worktree_source_path| relative_worktree_source_path.to_proto());
relative_worktree_source_path.to_string_lossy().into()
});
let response = this.client.request(proto::CopyProjectEntry { let response = this.client.request(proto::CopyProjectEntry {
project_id: this.project_id, project_id: this.project_id,
entry_id: entry_id.to_proto(), entry_id: entry_id.to_proto(),
relative_worktree_source_path, relative_worktree_source_path,
new_path: new_path.to_string_lossy().into(), new_path: new_path.to_proto(),
}); });
cx.spawn(move |this, mut cx| async move { cx.spawn(move |this, mut cx| async move {
let response = response.await?; let response = response.await?;
@ -1214,7 +1212,11 @@ impl Worktree {
let (scan_id, entry) = this.update(&mut cx, |this, cx| { let (scan_id, entry) = this.update(&mut cx, |this, cx| {
( (
this.scan_id(), this.scan_id(),
this.create_entry(PathBuf::from(request.path), request.is_directory, cx), this.create_entry(
Arc::<Path>::from_proto(request.path),
request.is_directory,
cx,
),
) )
})?; })?;
Ok(proto::ProjectEntryResponse { Ok(proto::ProjectEntryResponse {
@ -1288,7 +1290,7 @@ impl Worktree {
this.scan_id(), this.scan_id(),
this.rename_entry( this.rename_entry(
ProjectEntryId::from_proto(request.entry_id), ProjectEntryId::from_proto(request.entry_id),
PathBuf::from(request.new_path), Arc::<Path>::from_proto(request.new_path),
cx, cx,
), ),
) )
@ -1308,14 +1310,15 @@ impl Worktree {
mut cx: AsyncApp, mut cx: AsyncApp,
) -> Result<proto::ProjectEntryResponse> { ) -> Result<proto::ProjectEntryResponse> {
let (scan_id, task) = this.update(&mut cx, |this, cx| { let (scan_id, task) = this.update(&mut cx, |this, cx| {
let relative_worktree_source_path = let relative_worktree_source_path = request
request.relative_worktree_source_path.map(PathBuf::from); .relative_worktree_source_path
.map(PathBuf::from_proto);
( (
this.scan_id(), this.scan_id(),
this.copy_entry( this.copy_entry(
ProjectEntryId::from_proto(request.entry_id), ProjectEntryId::from_proto(request.entry_id),
relative_worktree_source_path, relative_worktree_source_path,
PathBuf::from(request.new_path), PathBuf::from_proto(request.new_path),
cx, cx,
), ),
) )
@ -2368,11 +2371,11 @@ impl RemoteWorktree {
new_path: impl Into<Arc<Path>>, new_path: impl Into<Arc<Path>>,
cx: &Context<Worktree>, cx: &Context<Worktree>,
) -> Task<Result<CreatedEntry>> { ) -> Task<Result<CreatedEntry>> {
let new_path = new_path.into(); let new_path: Arc<Path> = new_path.into();
let response = self.client.request(proto::RenameProjectEntry { let response = self.client.request(proto::RenameProjectEntry {
project_id: self.project_id, project_id: self.project_id,
entry_id: entry_id.to_proto(), entry_id: entry_id.to_proto(),
new_path: new_path.to_string_lossy().into(), new_path: new_path.as_ref().to_proto(),
}); });
cx.spawn(move |this, mut cx| async move { cx.spawn(move |this, mut cx| async move {
let response = response.await?; let response = response.await?;
@ -2454,7 +2457,7 @@ impl Snapshot {
proto::UpdateWorktree { proto::UpdateWorktree {
project_id, project_id,
worktree_id, worktree_id,
abs_path: self.abs_path().to_string_lossy().into(), abs_path: self.abs_path().to_proto(),
root_name: self.root_name().to_string(), root_name: self.root_name().to_string(),
updated_entries, updated_entries,
removed_entries: Vec::new(), removed_entries: Vec::new(),
@ -2555,7 +2558,7 @@ impl Snapshot {
update.removed_entries.len() update.removed_entries.len()
); );
self.update_abs_path( self.update_abs_path(
SanitizedPath::from(PathBuf::from(update.abs_path)), SanitizedPath::from(PathBuf::from_proto(update.abs_path)),
update.root_name, update.root_name,
); );
@ -2617,7 +2620,7 @@ impl Snapshot {
let edits = repository let edits = repository
.removed_statuses .removed_statuses
.into_iter() .into_iter()
.map(|path| Edit::Remove(PathKey(Path::new(&path).into()))) .map(|path| Edit::Remove(PathKey(FromProto::from_proto(path))))
.chain(repository.updated_statuses.into_iter().filter_map( .chain(repository.updated_statuses.into_iter().filter_map(
|updated_status| { |updated_status| {
Some(Edit::Insert(updated_status.try_into().log_err()?)) Some(Edit::Insert(updated_status.try_into().log_err()?))
@ -2952,7 +2955,7 @@ impl LocalSnapshot {
proto::UpdateWorktree { proto::UpdateWorktree {
project_id, project_id,
worktree_id, worktree_id,
abs_path: self.abs_path().to_string_lossy().into(), abs_path: self.abs_path().to_proto(),
root_name: self.root_name().to_string(), root_name: self.root_name().to_string(),
updated_entries, updated_entries,
removed_entries, removed_entries,
@ -3635,7 +3638,7 @@ impl language::File for File {
rpc::proto::File { rpc::proto::File {
worktree_id: self.worktree.read(cx).id().to_proto(), worktree_id: self.worktree.read(cx).id().to_proto(),
entry_id: self.entry_id.map(|id| id.to_proto()), entry_id: self.entry_id.map(|id| id.to_proto()),
path: self.path.to_string_lossy().into(), path: self.path.as_ref().to_proto(),
mtime: self.disk_state.mtime().map(|time| time.into()), mtime: self.disk_state.mtime().map(|time| time.into()),
is_deleted: self.disk_state == DiskState::Deleted, is_deleted: self.disk_state == DiskState::Deleted,
} }
@ -3716,7 +3719,7 @@ impl File {
Ok(Self { Ok(Self {
worktree, worktree,
path: Path::new(&proto.path).into(), path: Arc::<Path>::from_proto(proto.path),
disk_state, disk_state,
entry_id: proto.entry_id.map(ProjectEntryId::from_proto), entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
is_local: false, is_local: false,
@ -3835,8 +3838,9 @@ impl StatusEntry {
index_status index_status
}), }),
}; };
proto::StatusEntry { proto::StatusEntry {
repo_path: self.repo_path.to_proto(), repo_path: self.repo_path.as_ref().to_proto(),
simple_status, simple_status,
status: Some(status_to_proto(self.status)), status: Some(status_to_proto(self.status)),
} }
@ -3847,7 +3851,7 @@ impl TryFrom<proto::StatusEntry> for StatusEntry {
type Error = anyhow::Error; type Error = anyhow::Error;
fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> { fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
let repo_path = RepoPath(Path::new(&value.repo_path).into()); let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
let status = status_from_proto(value.simple_status, value.status)?; let status = status_from_proto(value.simple_status, value.status)?;
Ok(Self { repo_path, status }) Ok(Self { repo_path, status })
} }
@ -6231,7 +6235,7 @@ impl<'a> From<&'a Entry> for proto::Entry {
Self { Self {
id: entry.id.to_proto(), id: entry.id.to_proto(),
is_dir: entry.is_dir(), is_dir: entry.is_dir(),
path: entry.path.to_string_lossy().into(), path: entry.path.as_ref().to_proto(),
inode: entry.inode, inode: entry.inode,
mtime: entry.mtime.map(|time| time.into()), mtime: entry.mtime.map(|time| time.into()),
is_ignored: entry.is_ignored, is_ignored: entry.is_ignored,
@ -6241,7 +6245,7 @@ impl<'a> From<&'a Entry> for proto::Entry {
canonical_path: entry canonical_path: entry
.canonical_path .canonical_path
.as_ref() .as_ref()
.map(|path| path.to_string_lossy().to_string()), .map(|path| path.as_ref().to_proto()),
} }
} }
} }
@ -6257,20 +6261,22 @@ impl<'a> TryFrom<(&'a CharBag, &PathMatcher, proto::Entry)> for Entry {
} else { } else {
EntryKind::File EntryKind::File
}; };
let path: Arc<Path> = PathBuf::from(entry.path).into();
let path = Arc::<Path>::from_proto(entry.path);
let char_bag = char_bag_for_path(*root_char_bag, &path); let char_bag = char_bag_for_path(*root_char_bag, &path);
let is_always_included = always_included.is_match(path.as_ref());
Ok(Entry { Ok(Entry {
id: ProjectEntryId::from_proto(entry.id), id: ProjectEntryId::from_proto(entry.id),
kind, kind,
path: path.clone(), path,
inode: entry.inode, inode: entry.inode,
mtime: entry.mtime.map(|time| time.into()), mtime: entry.mtime.map(|time| time.into()),
size: entry.size.unwrap_or(0), size: entry.size.unwrap_or(0),
canonical_path: entry canonical_path: entry
.canonical_path .canonical_path
.map(|path_string| Box::from(Path::new(&path_string))), .map(|path_string| Box::from(PathBuf::from_proto(path_string))),
is_ignored: entry.is_ignored, is_ignored: entry.is_ignored,
is_always_included: always_included.is_match(path.as_ref()), is_always_included,
is_external: entry.is_external, is_external: entry.is_external,
is_private: false, is_private: false,
char_bag, char_bag,