Remove dependencies from the Worktree crate and make it more focused (#12747)

The `worktree` crate mainly provides an in-memory model of a directory
and its git repositories. But because it was originally extracted from
the Project crate, it also contained lingering bits of code that were
outside of that area:
* it had a little bit of logic related to buffers (though most buffer
management lives in `project`)
* it had a *little* bit of logic for storing diagnostics (though the
vast majority of LSP and diagnostic logic lives in `project`)
* it had a little bit of logic for sending RPC message (though the
*receiving* logic for those RPC messages lived in `project`)

In this PR, I've moved those concerns entirely to the project crate
(where they were already dealt with for the most part), so that the
worktree crate can be more focused on its main job, and have fewer
dependencies.

Worktree no longer depends on `client` or `lsp`. It still depends on
`language`, but only because of `impl language::File for
worktree::File`.

Release Notes:

- N/A
This commit is contained in:
Max Brunsfeld 2024-06-06 11:16:58 -07:00 committed by GitHub
parent 00dfd217d8
commit 48581167b7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 554 additions and 689 deletions

View file

@ -14,7 +14,6 @@ workspace = true
[features]
test-support = [
"client/test-support",
"language/test-support",
"settings/test-support",
"text/test-support",
@ -24,7 +23,6 @@ test-support = [
[dependencies]
anyhow.workspace = true
client.workspace = true
clock.workspace = true
collections.workspace = true
fs.workspace = true
@ -36,7 +34,6 @@ ignore.workspace = true
itertools.workspace = true
language.workspace = true
log.workspace = true
lsp.workspace = true
parking_lot.workspace = true
postage.workspace = true
rpc.workspace = true

View file

@ -5,11 +5,9 @@ mod worktree_tests;
use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
use anyhow::{anyhow, Context as _, Result};
use client::{proto, Client};
use clock::ReplicaId;
use collections::{HashMap, HashSet, VecDeque};
use fs::{copy_recursive, RemoveOptions};
use fs::{Fs, Watcher};
use fs::{copy_recursive, Fs, RemoveOptions, Watcher};
use futures::{
channel::{
mpsc::{self, UnboundedSender},
@ -21,9 +19,9 @@ use futures::{
FutureExt as _, Stream, StreamExt,
};
use fuzzy::CharBag;
use git::status::GitStatus;
use git::{
repository::{GitFileStatus, GitRepository, RepoPath},
status::GitStatus,
DOT_GIT, GITIGNORE,
};
use gpui::{
@ -32,21 +30,15 @@ use gpui::{
};
use ignore::IgnoreStack;
use itertools::Itertools;
use language::{
proto::{deserialize_version, serialize_line_ending, serialize_version},
Buffer, Capability, DiagnosticEntry, File as _, LineEnding, PointUtf16, Rope, Unclipped,
};
use lsp::{DiagnosticSeverity, LanguageServerId};
use parking_lot::Mutex;
use postage::{
barrier,
prelude::{Sink as _, Stream as _},
watch,
};
use serde::Serialize;
use rpc::proto;
use settings::{Settings, SettingsLocation, SettingsStore};
use smol::channel::{self, Sender};
use std::time::Instant;
use std::{
any::Any,
cmp::{self, Ordering},
@ -62,10 +54,10 @@ use std::{
atomic::{AtomicUsize, Ordering::SeqCst},
Arc,
},
time::{Duration, SystemTime},
time::{Duration, Instant, SystemTime},
};
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
use text::BufferId;
use text::{LineEnding, Rope};
use util::{
paths::{PathMatcher, HOME},
ResultExt,
@ -106,36 +98,16 @@ pub enum CreatedEntry {
Excluded { abs_path: PathBuf },
}
#[cfg(any(test, feature = "test-support"))]
impl CreatedEntry {
pub fn to_included(self) -> Option<Entry> {
match self {
CreatedEntry::Included(entry) => Some(entry),
CreatedEntry::Excluded { .. } => None,
}
}
}
pub struct LocalWorktree {
snapshot: LocalSnapshot,
scan_requests_tx: channel::Sender<ScanRequest>,
path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
_background_scanner_tasks: Vec<Task<()>>,
share: Option<ShareState>,
diagnostics: HashMap<
Arc<Path>,
Vec<(
LanguageServerId,
Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
)>,
>,
diagnostic_summaries: HashMap<Arc<Path>, HashMap<LanguageServerId, DiagnosticSummary>>,
client: Arc<Client>,
update_observer: Option<ShareState>,
fs: Arc<dyn Fs>,
fs_case_sensitive: bool,
visible: bool,
next_entry_id: Arc<AtomicUsize>,
}
@ -147,12 +119,9 @@ struct ScanRequest {
pub struct RemoteWorktree {
snapshot: Snapshot,
background_snapshot: Arc<Mutex<Snapshot>>,
project_id: u64,
client: Arc<Client>,
updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
replica_id: ReplicaId,
diagnostic_summaries: HashMap<Arc<Path>, HashMap<LanguageServerId, DiagnosticSummary>>,
visible: bool,
disconnected: bool,
}
@ -365,7 +334,6 @@ enum ScanState {
}
struct ShareState {
project_id: u64,
snapshots_tx:
mpsc::UnboundedSender<(LocalSnapshot, UpdatedEntriesSet, UpdatedGitRepositoriesSet)>,
resume_updates: watch::Sender<()>,
@ -382,7 +350,6 @@ impl EventEmitter<Event> for Worktree {}
impl Worktree {
pub async fn local(
client: Arc<Client>,
path: impl Into<Arc<Path>>,
visible: bool,
fs: Arc<dyn Fs>,
@ -502,7 +469,7 @@ impl Worktree {
next_entry_id: Arc::clone(&next_entry_id),
snapshot,
is_scanning: watch::channel_with(true),
share: None,
update_observer: None,
scan_requests_tx,
path_prefixes_to_scan_tx,
_background_scanner_tasks: start_background_scan_tasks(
@ -514,9 +481,6 @@ impl Worktree {
Arc::clone(&fs),
cx,
),
diagnostics: Default::default(),
diagnostic_summaries: Default::default(),
client,
fs,
fs_case_sensitive,
visible,
@ -525,10 +489,8 @@ impl Worktree {
}
pub fn remote(
project_remote_id: u64,
replica_id: ReplicaId,
worktree: proto::WorktreeMetadata,
client: Arc<Client>,
cx: &mut AppContext,
) -> Model<Self> {
cx.new_model(|cx: &mut ModelContext<Self>| {
@ -590,14 +552,11 @@ impl Worktree {
.detach();
Worktree::Remote(RemoteWorktree {
project_id: project_remote_id,
replica_id,
snapshot: snapshot.clone(),
background_snapshot,
updates_tx: Some(updates_tx),
snapshot_subscriptions: Default::default(),
client: client.clone(),
diagnostic_summaries: Default::default(),
visible: worktree.visible,
disconnected: false,
})
@ -679,21 +638,6 @@ impl Worktree {
}
}
pub fn diagnostic_summaries(
&self,
) -> impl Iterator<Item = (Arc<Path>, LanguageServerId, DiagnosticSummary)> + '_ {
match self {
Worktree::Local(worktree) => &worktree.diagnostic_summaries,
Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
}
.iter()
.flat_map(|(path, summaries)| {
summaries
.iter()
.map(move |(&server_id, &summary)| (path.clone(), server_id, summary))
})
}
pub fn abs_path(&self) -> Arc<Path> {
match self {
Worktree::Local(worktree) => worktree.abs_path.clone(),
@ -807,168 +751,6 @@ impl LocalWorktree {
path.starts_with(&self.abs_path)
}
pub fn load_buffer(
&mut self,
path: &Path,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<Model<Buffer>>> {
let path = Arc::from(path);
let reservation = cx.reserve_model();
let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
cx.spawn(move |this, mut cx| async move {
let (file, contents, diff_base) = this
.update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))?
.await?;
let text_buffer = cx
.background_executor()
.spawn(async move { text::Buffer::new(0, buffer_id, contents) })
.await;
cx.insert_model(reservation, |_| {
Buffer::build(
text_buffer,
diff_base,
Some(Arc::new(file)),
Capability::ReadWrite,
)
})
})
}
pub fn new_buffer(
&mut self,
path: Arc<Path>,
cx: &mut ModelContext<Worktree>,
) -> Model<Buffer> {
let worktree = cx.handle();
cx.new_model(|cx| {
let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64());
let text_buffer = text::Buffer::new(0, buffer_id, "".into());
Buffer::build(
text_buffer,
None,
Some(Arc::new(File {
worktree,
path,
mtime: None,
entry_id: None,
is_local: true,
is_deleted: false,
is_private: false,
})),
Capability::ReadWrite,
)
})
}
pub fn diagnostics_for_path(
&self,
path: &Path,
) -> Vec<(
LanguageServerId,
Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
)> {
self.diagnostics.get(path).cloned().unwrap_or_default()
}
pub fn clear_diagnostics_for_language_server(
&mut self,
server_id: LanguageServerId,
_: &mut ModelContext<Worktree>,
) {
let worktree_id = self.id().to_proto();
self.diagnostic_summaries
.retain(|path, summaries_by_server_id| {
if summaries_by_server_id.remove(&server_id).is_some() {
if let Some(share) = self.share.as_ref() {
self.client
.send(proto::UpdateDiagnosticSummary {
project_id: share.project_id,
worktree_id,
summary: Some(proto::DiagnosticSummary {
path: path.to_string_lossy().to_string(),
language_server_id: server_id.0 as u64,
error_count: 0,
warning_count: 0,
}),
})
.log_err();
}
!summaries_by_server_id.is_empty()
} else {
true
}
});
self.diagnostics.retain(|_, diagnostics_by_server_id| {
if let Ok(ix) = diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
diagnostics_by_server_id.remove(ix);
!diagnostics_by_server_id.is_empty()
} else {
true
}
});
}
pub fn update_diagnostics(
&mut self,
server_id: LanguageServerId,
worktree_path: Arc<Path>,
diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
_: &mut ModelContext<Worktree>,
) -> Result<bool> {
let summaries_by_server_id = self
.diagnostic_summaries
.entry(worktree_path.clone())
.or_default();
let old_summary = summaries_by_server_id
.remove(&server_id)
.unwrap_or_default();
let new_summary = DiagnosticSummary::new(&diagnostics);
if new_summary.is_empty() {
if let Some(diagnostics_by_server_id) = self.diagnostics.get_mut(&worktree_path) {
if let Ok(ix) = diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
diagnostics_by_server_id.remove(ix);
}
if diagnostics_by_server_id.is_empty() {
self.diagnostics.remove(&worktree_path);
}
}
} else {
summaries_by_server_id.insert(server_id, new_summary);
let diagnostics_by_server_id =
self.diagnostics.entry(worktree_path.clone()).or_default();
match diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
Ok(ix) => {
diagnostics_by_server_id[ix] = (server_id, diagnostics);
}
Err(ix) => {
diagnostics_by_server_id.insert(ix, (server_id, diagnostics));
}
}
}
if !old_summary.is_empty() || !new_summary.is_empty() {
if let Some(share) = self.share.as_ref() {
self.client
.send(proto::UpdateDiagnosticSummary {
project_id: share.project_id,
worktree_id: self.id().to_proto(),
summary: Some(proto::DiagnosticSummary {
path: worktree_path.to_string_lossy().to_string(),
language_server_id: server_id.0 as u64,
error_count: new_summary.error_count as u32,
warning_count: new_summary.warning_count as u32,
}),
})
.log_err();
}
}
Ok(!old_summary.is_empty() || !new_summary.is_empty())
}
fn restart_background_scanners(&mut self, cx: &mut ModelContext<Worktree>) {
let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
@ -997,7 +779,7 @@ impl LocalWorktree {
new_snapshot.share_private_files = self.snapshot.share_private_files;
self.snapshot = new_snapshot;
if let Some(share) = self.share.as_mut() {
if let Some(share) = self.update_observer.as_mut() {
share
.snapshots_tx
.unbounded_send((
@ -1138,7 +920,7 @@ impl LocalWorktree {
}
}
fn load(
pub fn load_file(
&self,
path: &Path,
cx: &mut ModelContext<Worktree>,
@ -1232,97 +1014,6 @@ impl LocalWorktree {
})
}
pub fn save_buffer(
&self,
buffer_handle: Model<Buffer>,
path: Arc<Path>,
mut has_changed_file: bool,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<()>> {
let buffer = buffer_handle.read(cx);
let rpc = self.client.clone();
let buffer_id: u64 = buffer.remote_id().into();
let project_id = self.share.as_ref().map(|share| share.project_id);
if buffer.file().is_some_and(|file| !file.is_created()) {
has_changed_file = true;
}
let text = buffer.as_rope().clone();
let version = buffer.version();
let save = self.write_file(path.as_ref(), text, buffer.line_ending(), cx);
let fs = Arc::clone(&self.fs);
let abs_path = self.absolutize(&path);
let is_private = self.snapshot.is_path_private(&path);
cx.spawn(move |this, mut cx| async move {
let entry = save.await?;
let abs_path = abs_path?;
let this = this.upgrade().context("worktree dropped")?;
let (entry_id, mtime, path, is_dotenv) = match entry {
Some(entry) => (Some(entry.id), entry.mtime, entry.path, entry.is_private),
None => {
let metadata = fs
.metadata(&abs_path)
.await
.with_context(|| {
format!(
"Fetching metadata after saving the excluded buffer {abs_path:?}"
)
})?
.with_context(|| {
format!("Excluded buffer {path:?} got removed during saving")
})?;
(None, Some(metadata.mtime), path, is_private)
}
};
if has_changed_file {
let new_file = Arc::new(File {
entry_id,
worktree: this,
path,
mtime,
is_local: true,
is_deleted: false,
is_private: is_dotenv,
});
if let Some(project_id) = project_id {
rpc.send(proto::UpdateBufferFile {
project_id,
buffer_id,
file: Some(new_file.to_proto()),
})
.log_err();
}
buffer_handle.update(&mut cx, |buffer, cx| {
if has_changed_file {
buffer.file_updated(new_file, cx);
}
})?;
}
if let Some(project_id) = project_id {
rpc.send(proto::BufferSaved {
project_id,
buffer_id,
version: serialize_version(&version),
mtime: mtime.map(|time| time.into()),
})?;
}
buffer_handle.update(&mut cx, |buffer, cx| {
buffer.did_save(version.clone(), mtime, cx);
})?;
Ok(())
})
}
/// Find the lowest path in the worktree's datastructures that is an ancestor
fn lowest_ancestor(&self, path: &Path) -> PathBuf {
let mut lowest_ancestor = None;
@ -1400,7 +1091,7 @@ impl LocalWorktree {
})
}
pub(crate) fn write_file(
pub fn write_file(
&self,
path: impl Into<Arc<Path>>,
text: Rope,
@ -1630,8 +1321,7 @@ impl LocalWorktree {
project_id: u64,
cx: &mut ModelContext<Worktree>,
callback: F,
) -> oneshot::Receiver<()>
where
) where
F: 'static + Send + Fn(proto::UpdateWorktree) -> Fut,
Fut: Send + Future<Output = bool>,
{
@ -1640,12 +1330,9 @@ impl LocalWorktree {
#[cfg(not(any(test, feature = "test-support")))]
const MAX_CHUNK_SIZE: usize = 256;
let (share_tx, share_rx) = oneshot::channel();
if let Some(share) = self.share.as_mut() {
share_tx.send(()).ok();
*share.resume_updates.borrow_mut() = ();
return share_rx;
if let Some(observer) = self.update_observer.as_mut() {
*observer.resume_updates.borrow_mut() = ();
return;
}
let (resume_updates_tx, mut resume_updates_rx) = watch::channel::<()>();
@ -1683,47 +1370,23 @@ impl LocalWorktree {
}
}
}
share_tx.send(()).ok();
Some(())
});
self.share = Some(ShareState {
project_id,
self.update_observer = Some(ShareState {
snapshots_tx,
resume_updates: resume_updates_tx,
_maintain_remote_snapshot,
});
share_rx
}
pub fn share(&mut self, project_id: u64, cx: &mut ModelContext<Worktree>) -> Task<Result<()>> {
let client = self.client.clone();
for (path, summaries) in &self.diagnostic_summaries {
for (&server_id, summary) in summaries {
if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
project_id,
worktree_id: cx.entity_id().as_u64(),
summary: Some(summary.to_proto(server_id, path)),
}) {
return Task::ready(Err(e));
}
}
}
let rx = self.observe_updates(project_id, cx, move |update| {
client.request(update).map(|result| result.is_ok())
});
cx.background_executor()
.spawn(async move { rx.await.map_err(|_| anyhow!("share ended")) })
pub fn stop_observing_updates(&mut self) {
self.update_observer.take();
}
pub fn unshare(&mut self) {
self.share.take();
}
pub fn is_shared(&self) -> bool {
self.share.is_some()
#[cfg(any(test, feature = "test-support"))]
pub fn has_update_observer(&self) -> bool {
self.update_observer.is_some()
}
pub fn share_private_files(&mut self, cx: &mut ModelContext<Worktree>) {
@ -1743,37 +1406,6 @@ impl RemoteWorktree {
self.disconnected = true;
}
pub fn save_buffer(
&self,
buffer_handle: Model<Buffer>,
new_path: Option<proto::ProjectPath>,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<()>> {
let buffer = buffer_handle.read(cx);
let buffer_id = buffer.remote_id().into();
let version = buffer.version();
let rpc = self.client.clone();
let project_id = self.project_id;
cx.spawn(move |_, mut cx| async move {
let response = rpc
.request(proto::SaveBuffer {
project_id,
buffer_id,
new_path,
version: serialize_version(&version),
})
.await?;
let version = deserialize_version(&response.version);
let mtime = response.mtime.map(|mtime| mtime.into());
buffer_handle.update(&mut cx, |buffer, cx| {
buffer.did_save(version.clone(), mtime, cx);
})?;
Ok(())
})
}
pub fn update_from_remote(&mut self, update: proto::UpdateWorktree) {
if let Some(updates_tx) = &self.updates_tx {
updates_tx
@ -1807,32 +1439,6 @@ impl RemoteWorktree {
}
}
pub fn update_diagnostic_summary(
&mut self,
path: Arc<Path>,
summary: &proto::DiagnosticSummary,
) {
let server_id = LanguageServerId(summary.language_server_id as usize);
let summary = DiagnosticSummary {
error_count: summary.error_count as usize,
warning_count: summary.warning_count as usize,
};
if summary.is_empty() {
if let Some(summaries) = self.diagnostic_summaries.get_mut(&path) {
summaries.remove(&server_id);
if summaries.is_empty() {
self.diagnostic_summaries.remove(&path);
}
}
} else {
self.diagnostic_summaries
.entry(path)
.or_default()
.insert(server_id, summary);
}
}
pub fn insert_entry(
&mut self,
entry: proto::Entry,
@ -3023,29 +2629,6 @@ impl language::LocalFile for File {
cx.background_executor()
.spawn(async move { fs.load(&abs_path?).await })
}
fn buffer_reloaded(
&self,
buffer_id: BufferId,
version: &clock::Global,
line_ending: LineEnding,
mtime: Option<SystemTime>,
cx: &mut AppContext,
) {
let worktree = self.worktree.read(cx).as_local().unwrap();
if let Some(project_id) = worktree.share.as_ref().map(|share| share.project_id) {
worktree
.client
.send(proto::BufferReloaded {
project_id,
buffer_id: buffer_id.into(),
version: serialize_version(version),
mtime: mtime.map(|time| time.into()),
line_ending: serialize_line_ending(line_ending) as i32,
})
.log_err();
}
}
}
impl File {
@ -5109,46 +4692,12 @@ impl ProjectEntryId {
}
}
#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
pub struct DiagnosticSummary {
pub error_count: usize,
pub warning_count: usize,
}
impl DiagnosticSummary {
fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
let mut this = Self {
error_count: 0,
warning_count: 0,
};
for entry in diagnostics {
if entry.diagnostic.is_primary {
match entry.diagnostic.severity {
DiagnosticSeverity::ERROR => this.error_count += 1,
DiagnosticSeverity::WARNING => this.warning_count += 1,
_ => {}
}
}
}
this
}
pub fn is_empty(&self) -> bool {
self.error_count == 0 && self.warning_count == 0
}
pub fn to_proto(
&self,
language_server_id: LanguageServerId,
path: &Path,
) -> proto::DiagnosticSummary {
proto::DiagnosticSummary {
path: path.to_string_lossy().to_string(),
language_server_id: language_server_id.0 as u64,
error_count: self.error_count as u32,
warning_count: self.warning_count as u32,
#[cfg(any(test, feature = "test-support"))]
impl CreatedEntry {
pub fn to_included(self) -> Option<Entry> {
match self {
CreatedEntry::Included(entry) => Some(entry),
CreatedEntry::Excluded { .. } => None,
}
}
}

View file

@ -3,12 +3,9 @@ use crate::{
WorktreeModelHandle,
};
use anyhow::Result;
use client::Client;
use clock::FakeSystemClock;
use fs::{FakeFs, Fs, RealFs, RemoveOptions};
use git::{repository::GitFileStatus, GITIGNORE};
use gpui::{BorrowAppContext, ModelContext, Task, TestAppContext};
use http::FakeHttpClient;
use parking_lot::Mutex;
use postage::stream::Stream;
use pretty_assertions::assert_eq;
@ -35,7 +32,6 @@ async fn test_traversal(cx: &mut TestAppContext) {
.await;
let tree = Worktree::local(
build_client(cx),
Path::new("/root"),
true,
fs,
@ -100,7 +96,6 @@ async fn test_circular_symlinks(cx: &mut TestAppContext) {
.unwrap();
let tree = Worktree::local(
build_client(cx),
Path::new("/root"),
true,
fs.clone(),
@ -200,7 +195,6 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
.unwrap();
let tree = Worktree::local(
build_client(cx),
Path::new("/root/dir1"),
true,
fs.clone(),
@ -351,7 +345,6 @@ async fn test_renaming_case_only(cx: &mut TestAppContext) {
}));
let tree = Worktree::local(
build_client(cx),
temp_root.path(),
true,
fs.clone(),
@ -428,7 +421,6 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
.await;
let tree = Worktree::local(
build_client(cx),
Path::new("/root"),
true,
fs.clone(),
@ -461,16 +453,16 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
// Open a file that is nested inside of a gitignored directory that
// has not yet been expanded.
let prev_read_dir_count = fs.read_dir_call_count();
let buffer = tree
let (file, _, _) = tree
.update(cx, |tree, cx| {
tree.as_local_mut()
.unwrap()
.load_buffer("one/node_modules/b/b1.js".as_ref(), cx)
.load_file("one/node_modules/b/b1.js".as_ref(), cx)
})
.await
.unwrap();
tree.read_with(cx, |tree, cx| {
tree.read_with(cx, |tree, _| {
assert_eq!(
tree.entries(true)
.map(|entry| (entry.path.as_ref(), entry.is_ignored))
@ -491,10 +483,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
]
);
assert_eq!(
buffer.read(cx).file().unwrap().path().as_ref(),
Path::new("one/node_modules/b/b1.js")
);
assert_eq!(file.path.as_ref(), Path::new("one/node_modules/b/b1.js"));
// Only the newly-expanded directories are scanned.
assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
@ -503,16 +492,16 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
// Open another file in a different subdirectory of the same
// gitignored directory.
let prev_read_dir_count = fs.read_dir_call_count();
let buffer = tree
let (file, _, _) = tree
.update(cx, |tree, cx| {
tree.as_local_mut()
.unwrap()
.load_buffer("one/node_modules/a/a2.js".as_ref(), cx)
.load_file("one/node_modules/a/a2.js".as_ref(), cx)
})
.await
.unwrap();
tree.read_with(cx, |tree, cx| {
tree.read_with(cx, |tree, _| {
assert_eq!(
tree.entries(true)
.map(|entry| (entry.path.as_ref(), entry.is_ignored))
@ -535,10 +524,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
]
);
assert_eq!(
buffer.read(cx).file().unwrap().path().as_ref(),
Path::new("one/node_modules/a/a2.js")
);
assert_eq!(file.path.as_ref(), Path::new("one/node_modules/a/a2.js"));
// Only the newly-expanded directory is scanned.
assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
@ -591,7 +577,6 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
.await;
let tree = Worktree::local(
build_client(cx),
Path::new("/root"),
true,
fs.clone(),
@ -711,7 +696,6 @@ async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
.await;
let tree = Worktree::local(
build_client(cx),
"/root/tree".as_ref(),
true,
fs.clone(),
@ -793,7 +777,6 @@ async fn test_update_gitignore(cx: &mut TestAppContext) {
.await;
let tree = Worktree::local(
build_client(cx),
"/root".as_ref(),
true,
fs.clone(),
@ -848,7 +831,6 @@ async fn test_write_file(cx: &mut TestAppContext) {
}));
let tree = Worktree::local(
build_client(cx),
dir.path(),
true,
Arc::new(RealFs::default()),
@ -928,7 +910,6 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
});
let tree = Worktree::local(
build_client(cx),
dir.path(),
true,
Arc::new(RealFs::default()),
@ -1032,7 +1013,6 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
});
let tree = Worktree::local(
build_client(cx),
dir.path(),
true,
Arc::new(RealFs::default()),
@ -1142,7 +1122,6 @@ async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) {
let dot_git_worktree_dir = dir.path().join(".git");
let tree = Worktree::local(
build_client(cx),
dot_git_worktree_dir.clone(),
true,
Arc::new(RealFs::default()),
@ -1181,7 +1160,6 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
.await;
let tree = Worktree::local(
build_client(cx),
"/root".as_ref(),
true,
fs,
@ -1194,7 +1172,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
let snapshot1 = tree.update(cx, |tree, cx| {
let tree = tree.as_local_mut().unwrap();
let snapshot = Arc::new(Mutex::new(tree.snapshot()));
let _ = tree.observe_updates(0, cx, {
tree.observe_updates(0, cx, {
let snapshot = snapshot.clone();
move |update| {
snapshot.lock().apply_remote_update(update).unwrap();
@ -1232,13 +1210,6 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
init_test(cx);
cx.executor().allow_parking();
let client_fake = cx.update(|cx| {
Client::new(
Arc::new(FakeSystemClock::default()),
FakeHttpClient::with_404_response(),
cx,
)
});
let fs_fake = FakeFs::new(cx.background_executor.clone());
fs_fake
@ -1251,7 +1222,6 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.await;
let tree_fake = Worktree::local(
client_fake,
"/root".as_ref(),
true,
fs_fake,
@ -1280,21 +1250,12 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
});
let client_real = cx.update(|cx| {
Client::new(
Arc::new(FakeSystemClock::default()),
FakeHttpClient::with_404_response(),
cx,
)
});
let fs_real = Arc::new(RealFs::default());
let temp_root = temp_tree(json!({
"a": {}
}));
let tree_real = Worktree::local(
client_real,
temp_root.path(),
true,
fs_real,
@ -1385,7 +1346,6 @@ async fn test_random_worktree_operations_during_initial_scan(
log::info!("generated initial tree");
let worktree = Worktree::local(
build_client(cx),
root_dir,
true,
fs.clone(),
@ -1400,7 +1360,7 @@ async fn test_random_worktree_operations_during_initial_scan(
worktree.update(cx, |tree, cx| {
check_worktree_change_events(tree, cx);
let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
tree.as_local_mut().unwrap().observe_updates(0, cx, {
let updates = updates.clone();
move |update| {
updates.lock().push(update);
@ -1475,7 +1435,6 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
log::info!("generated initial tree");
let worktree = Worktree::local(
build_client(cx),
root_dir,
true,
fs.clone(),
@ -1489,7 +1448,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
worktree.update(cx, |tree, cx| {
check_worktree_change_events(tree, cx);
let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
tree.as_local_mut().unwrap().observe_updates(0, cx, {
let updates = updates.clone();
move |update| {
updates.lock().push(update);
@ -1548,7 +1507,6 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
{
let new_worktree = Worktree::local(
build_client(cx),
root_dir,
true,
fs.clone(),
@ -1892,7 +1850,6 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
let root_path = root.path();
let tree = Worktree::local(
build_client(cx),
root_path,
true,
Arc::new(RealFs::default()),
@ -1971,7 +1928,6 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) {
}));
let tree = Worktree::local(
build_client(cx),
root.path(),
true,
Arc::new(RealFs::default()),
@ -2112,7 +2068,6 @@ async fn test_git_status(cx: &mut TestAppContext) {
git_commit("Initial commit", &repo);
let tree = Worktree::local(
build_client(cx),
root.path(),
true,
Arc::new(RealFs::default()),
@ -2294,7 +2249,6 @@ async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) {
// Open the worktree in subfolder
let project_root = Path::new("my-repo/sub-folder-1/sub-folder-2");
let tree = Worktree::local(
build_client(cx),
root.path().join(project_root),
true,
Arc::new(RealFs::default()),
@ -2392,7 +2346,6 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
);
let tree = Worktree::local(
build_client(cx),
Path::new("/root"),
true,
fs.clone(),
@ -2471,12 +2424,6 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
}
}
fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
let clock = Arc::new(FakeSystemClock::default());
let http_client = FakeHttpClient::with_404_response();
cx.update(|cx| Client::new(clock, http_client, cx))
}
#[track_caller]
fn git_init(path: &Path) -> git2::Repository {
git2::Repository::init(path).expect("Failed to initialize git repository")