Eagerly update worktree entries when saving
Don't use ModelHandles for storing Files. Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
This commit is contained in:
parent
dc8e216fcb
commit
c5e08b6548
6 changed files with 365 additions and 265 deletions
|
@ -2444,7 +2444,7 @@ impl View for Editor {
|
|||
impl workspace::Item for Buffer {
|
||||
type View = Editor;
|
||||
|
||||
fn file(&self) -> Option<&ModelHandle<File>> {
|
||||
fn file(&self) -> Option<&File> {
|
||||
self.file()
|
||||
}
|
||||
|
||||
|
@ -2474,7 +2474,7 @@ impl workspace::ItemView for Editor {
|
|||
.buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.and_then(|file| file.read(cx).file_name(cx));
|
||||
.and_then(|file| file.file_name(cx));
|
||||
if let Some(name) = filename {
|
||||
name.to_string_lossy().into()
|
||||
} else {
|
||||
|
@ -2483,10 +2483,7 @@ impl workspace::ItemView for Editor {
|
|||
}
|
||||
|
||||
fn entry_id(&self, cx: &AppContext) -> Option<(usize, Arc<Path>)> {
|
||||
self.buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map(|file| file.read(cx).entry_id())
|
||||
self.buffer.read(cx).file().map(|file| file.entry_id())
|
||||
}
|
||||
|
||||
fn clone_on_split(&self, cx: &mut ViewContext<Self>) -> Option<Self>
|
||||
|
@ -2498,11 +2495,7 @@ impl workspace::ItemView for Editor {
|
|||
Some(clone)
|
||||
}
|
||||
|
||||
fn save(
|
||||
&mut self,
|
||||
new_file: Option<ModelHandle<File>>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
fn save(&mut self, new_file: Option<File>, cx: &mut ViewContext<Self>) -> Task<Result<()>> {
|
||||
self.buffer.update(cx, |b, cx| b.save(new_file, cx))
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ use crate::{
|
|||
worktree::File,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
|
||||
use gpui::{AppContext, Entity, ModelContext, Task};
|
||||
use lazy_static::lazy_static;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
|
@ -30,6 +30,7 @@ use std::{
|
|||
hash::BuildHasher,
|
||||
iter::Iterator,
|
||||
ops::{Deref, DerefMut, Range},
|
||||
path::Path,
|
||||
str,
|
||||
sync::Arc,
|
||||
time::{Duration, Instant, SystemTime, UNIX_EPOCH},
|
||||
|
@ -114,7 +115,7 @@ pub struct Buffer {
|
|||
last_edit: time::Local,
|
||||
undo_map: UndoMap,
|
||||
history: History,
|
||||
file: Option<ModelHandle<File>>,
|
||||
file: Option<File>,
|
||||
language: Option<Arc<Language>>,
|
||||
syntax_tree: Mutex<Option<SyntaxTree>>,
|
||||
is_parsing: bool,
|
||||
|
@ -420,7 +421,7 @@ impl Buffer {
|
|||
pub fn from_history(
|
||||
replica_id: ReplicaId,
|
||||
history: History,
|
||||
file: Option<ModelHandle<File>>,
|
||||
file: Option<File>,
|
||||
language: Option<Arc<Language>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
|
@ -430,13 +431,13 @@ impl Buffer {
|
|||
fn build(
|
||||
replica_id: ReplicaId,
|
||||
history: History,
|
||||
file: Option<ModelHandle<File>>,
|
||||
file: Option<File>,
|
||||
language: Option<Arc<Language>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
let saved_mtime;
|
||||
if let Some(file) = file.as_ref() {
|
||||
saved_mtime = file.read(cx).mtime(cx.as_ref());
|
||||
saved_mtime = file.mtime(cx.as_ref());
|
||||
} else {
|
||||
saved_mtime = UNIX_EPOCH;
|
||||
}
|
||||
|
@ -492,13 +493,13 @@ impl Buffer {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn file(&self) -> Option<&ModelHandle<File>> {
|
||||
pub fn file(&self) -> Option<&File> {
|
||||
self.file.as_ref()
|
||||
}
|
||||
|
||||
pub fn save(
|
||||
&mut self,
|
||||
new_file: Option<ModelHandle<File>>,
|
||||
new_file: Option<File>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let text = self.visible_text.clone();
|
||||
|
@ -507,7 +508,7 @@ impl Buffer {
|
|||
|
||||
cx.spawn(|handle, mut cx| async move {
|
||||
if let Some(file) = new_file.as_ref().or(file.as_ref()) {
|
||||
let result = file.read_with(&cx, |file, cx| file.save(text, cx)).await;
|
||||
let result = cx.update(|cx| file.save(text, cx)).await;
|
||||
if result.is_ok() {
|
||||
handle.update(&mut cx, |me, cx| me.did_save(version, new_file, cx));
|
||||
}
|
||||
|
@ -521,22 +522,36 @@ impl Buffer {
|
|||
fn did_save(
|
||||
&mut self,
|
||||
version: time::Global,
|
||||
file: Option<ModelHandle<File>>,
|
||||
new_file: Option<File>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
if file.is_some() {
|
||||
self.file = file;
|
||||
if let Some(new_file) = new_file {
|
||||
let buffer = cx.handle();
|
||||
new_file.saved_buffer(buffer, cx.as_mut());
|
||||
self.file = Some(new_file);
|
||||
}
|
||||
if let Some(file) = &self.file {
|
||||
self.saved_mtime = file.read(cx).mtime(cx.as_ref());
|
||||
self.saved_mtime = file.mtime(cx.as_ref());
|
||||
}
|
||||
self.saved_version = version;
|
||||
cx.emit(Event::Saved);
|
||||
}
|
||||
|
||||
pub fn file_was_moved(&mut self, new_path: Arc<Path>, cx: &mut ModelContext<Self>) {
|
||||
self.file.as_mut().unwrap().path = new_path;
|
||||
cx.emit(Event::FileHandleChanged);
|
||||
}
|
||||
|
||||
pub fn file_was_added(&mut self, cx: &mut ModelContext<Self>) {
|
||||
cx.emit(Event::FileHandleChanged);
|
||||
}
|
||||
|
||||
pub fn file_was_deleted(&mut self, cx: &mut ModelContext<Self>) {
|
||||
if self.version == self.saved_version {
|
||||
cx.emit(Event::Dirtied);
|
||||
}
|
||||
cx.emit(Event::FileHandleChanged);
|
||||
}
|
||||
|
||||
pub fn file_was_modified(
|
||||
&mut self,
|
||||
|
@ -759,10 +774,7 @@ impl Buffer {
|
|||
|
||||
pub fn is_dirty(&self, cx: &AppContext) -> bool {
|
||||
self.version > self.saved_version
|
||||
|| self
|
||||
.file
|
||||
.as_ref()
|
||||
.map_or(false, |file| file.read(cx).is_deleted(cx))
|
||||
|| self.file.as_ref().map_or(false, |file| file.is_deleted(cx))
|
||||
}
|
||||
|
||||
pub fn has_conflict(&self, cx: &AppContext) -> bool {
|
||||
|
@ -770,7 +782,7 @@ impl Buffer {
|
|||
&& self
|
||||
.file
|
||||
.as_ref()
|
||||
.map_or(false, |file| file.read(cx).mtime(cx) > self.saved_mtime)
|
||||
.map_or(false, |file| file.mtime(cx) > self.saved_mtime)
|
||||
}
|
||||
|
||||
pub fn version(&self) -> time::Global {
|
||||
|
@ -2208,6 +2220,7 @@ impl ToPoint for usize {
|
|||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
language::LanguageRegistry,
|
||||
test::{build_app_state, temp_tree},
|
||||
util::RandomCharIter,
|
||||
worktree::{Worktree, WorktreeHandle},
|
||||
|
@ -2220,6 +2233,7 @@ mod tests {
|
|||
cmp::Ordering,
|
||||
env, fs,
|
||||
iter::FromIterator,
|
||||
path::Path,
|
||||
rc::Rc,
|
||||
sync::atomic::{self, AtomicUsize},
|
||||
};
|
||||
|
@ -2676,20 +2690,24 @@ mod tests {
|
|||
#[test]
|
||||
fn test_is_dirty() {
|
||||
App::test_async((), |mut cx| async move {
|
||||
let language_registry = Arc::new(LanguageRegistry::new());
|
||||
|
||||
let dir = temp_tree(json!({
|
||||
"file1": "",
|
||||
"file2": "",
|
||||
"file3": "",
|
||||
"file1": "abc",
|
||||
"file2": "def",
|
||||
"file3": "ghi",
|
||||
}));
|
||||
let tree = cx.add_model(|cx| Worktree::local(dir.path(), cx));
|
||||
tree.flush_fs_events(&cx).await;
|
||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
|
||||
let file1 = cx.update(|cx| tree.file("file1", cx));
|
||||
let buffer1 = cx.add_model(|cx| {
|
||||
Buffer::from_history(0, History::new("abc".into()), Some(file1), None, cx)
|
||||
});
|
||||
let buffer1 = tree
|
||||
.update(&mut cx, |tree, cx| {
|
||||
tree.open_buffer("file1", language_registry.clone(), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let events = Rc::new(RefCell::new(Vec::new()));
|
||||
|
||||
// initially, the buffer isn't dirty.
|
||||
|
@ -2746,14 +2764,17 @@ mod tests {
|
|||
|
||||
// When a file is deleted, the buffer is considered dirty.
|
||||
let events = Rc::new(RefCell::new(Vec::new()));
|
||||
let file2 = cx.update(|cx| tree.file("file2", cx));
|
||||
let buffer2 = cx.add_model(|cx: &mut ModelContext<Buffer>| {
|
||||
cx.subscribe(&cx.handle(), {
|
||||
let buffer2 = tree
|
||||
.update(&mut cx, |tree, cx| {
|
||||
tree.open_buffer("file2", language_registry.clone(), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
buffer2.update(&mut cx, |_, cx| {
|
||||
cx.subscribe(&buffer2, {
|
||||
let events = events.clone();
|
||||
move |_, event, _| events.borrow_mut().push(event.clone())
|
||||
});
|
||||
|
||||
Buffer::from_history(0, History::new("abc".into()), Some(file2), None, cx)
|
||||
});
|
||||
|
||||
fs::remove_file(dir.path().join("file2")).unwrap();
|
||||
|
@ -2767,14 +2788,17 @@ mod tests {
|
|||
|
||||
// When a file is already dirty when deleted, we don't emit a Dirtied event.
|
||||
let events = Rc::new(RefCell::new(Vec::new()));
|
||||
let file3 = cx.update(|cx| tree.file("file3", cx));
|
||||
let buffer3 = cx.add_model(|cx: &mut ModelContext<Buffer>| {
|
||||
cx.subscribe(&cx.handle(), {
|
||||
let buffer3 = tree
|
||||
.update(&mut cx, |tree, cx| {
|
||||
tree.open_buffer("file3", language_registry.clone(), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
buffer3.update(&mut cx, |_, cx| {
|
||||
cx.subscribe(&buffer3, {
|
||||
let events = events.clone();
|
||||
move |_, event, _| events.borrow_mut().push(event.clone())
|
||||
});
|
||||
|
||||
Buffer::from_history(0, History::new("abc".into()), Some(file3), None, cx)
|
||||
});
|
||||
|
||||
tree.flush_fs_events(&cx).await;
|
||||
|
@ -2800,16 +2824,12 @@ mod tests {
|
|||
.await;
|
||||
|
||||
let abs_path = dir.path().join("the-file");
|
||||
let file = cx.update(|cx| tree.file("the-file", cx));
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::from_history(
|
||||
0,
|
||||
History::new(initial_contents.into()),
|
||||
Some(file),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let buffer = tree
|
||||
.update(&mut cx, |tree, cx| {
|
||||
tree.open_buffer(Path::new("the-file"), Arc::new(LanguageRegistry::new()), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Add a cursor at the start of each row.
|
||||
let (selection_set_id, _) = buffer.update(&mut cx, |buffer, cx| {
|
||||
|
|
|
@ -419,6 +419,22 @@ impl<T: KeyedItem> SumTree<T> {
|
|||
};
|
||||
}
|
||||
|
||||
pub fn replace(&mut self, item: T, cx: &<T::Summary as Summary>::Context) -> bool {
|
||||
let mut replaced = false;
|
||||
*self = {
|
||||
let mut cursor = self.cursor::<T::Key, ()>();
|
||||
let mut new_tree = cursor.slice(&item.key(), Bias::Left, cx);
|
||||
if cursor.item().map_or(false, |item| item.key() == item.key()) {
|
||||
cursor.next(cx);
|
||||
replaced = true;
|
||||
}
|
||||
new_tree.push(item, cx);
|
||||
new_tree.push_tree(cursor.suffix(cx), cx);
|
||||
new_tree
|
||||
};
|
||||
replaced
|
||||
}
|
||||
|
||||
pub fn edit(
|
||||
&mut self,
|
||||
mut edits: Vec<Edit<T>>,
|
||||
|
|
|
@ -207,7 +207,7 @@ pub trait Item: Entity + Sized {
|
|||
cx: &mut ViewContext<Self::View>,
|
||||
) -> Self::View;
|
||||
|
||||
fn file(&self) -> Option<&ModelHandle<File>>;
|
||||
fn file(&self) -> Option<&File>;
|
||||
}
|
||||
|
||||
pub trait ItemView: View {
|
||||
|
@ -225,11 +225,7 @@ pub trait ItemView: View {
|
|||
fn has_conflict(&self, _: &AppContext) -> bool {
|
||||
false
|
||||
}
|
||||
fn save(
|
||||
&mut self,
|
||||
_: Option<ModelHandle<File>>,
|
||||
_: &mut ViewContext<Self>,
|
||||
) -> Task<anyhow::Result<()>>;
|
||||
fn save(&mut self, _: Option<File>, _: &mut ViewContext<Self>) -> Task<anyhow::Result<()>>;
|
||||
fn should_activate_item_on_event(_: &Self::Event) -> bool {
|
||||
false
|
||||
}
|
||||
|
@ -244,7 +240,7 @@ pub trait ItemHandle: Send + Sync {
|
|||
}
|
||||
|
||||
pub trait WeakItemHandle: Send + Sync {
|
||||
fn file<'a>(&'a self, cx: &'a AppContext) -> Option<&'a ModelHandle<File>>;
|
||||
fn file<'a>(&'a self, cx: &'a AppContext) -> Option<&'a File>;
|
||||
fn add_view(
|
||||
&self,
|
||||
window_id: usize,
|
||||
|
@ -264,11 +260,7 @@ pub trait ItemViewHandle: Send + Sync {
|
|||
fn to_any(&self) -> AnyViewHandle;
|
||||
fn is_dirty(&self, cx: &AppContext) -> bool;
|
||||
fn has_conflict(&self, cx: &AppContext) -> bool;
|
||||
fn save(
|
||||
&self,
|
||||
file: Option<ModelHandle<File>>,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> Task<anyhow::Result<()>>;
|
||||
fn save(&self, file: Option<File>, cx: &mut MutableAppContext) -> Task<anyhow::Result<()>>;
|
||||
}
|
||||
|
||||
impl<T: Item> ItemHandle for ModelHandle<T> {
|
||||
|
@ -282,7 +274,7 @@ impl<T: Item> ItemHandle for ModelHandle<T> {
|
|||
}
|
||||
|
||||
impl<T: Item> WeakItemHandle for WeakModelHandle<T> {
|
||||
fn file<'a>(&'a self, cx: &'a AppContext) -> Option<&'a ModelHandle<File>> {
|
||||
fn file<'a>(&'a self, cx: &'a AppContext) -> Option<&'a File> {
|
||||
self.upgrade(cx).and_then(|h| h.read(cx).file())
|
||||
}
|
||||
|
||||
|
@ -342,11 +334,7 @@ impl<T: ItemView> ItemViewHandle for ViewHandle<T> {
|
|||
})
|
||||
}
|
||||
|
||||
fn save(
|
||||
&self,
|
||||
file: Option<ModelHandle<File>>,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> Task<anyhow::Result<()>> {
|
||||
fn save(&self, file: Option<File>, cx: &mut MutableAppContext) -> Task<anyhow::Result<()>> {
|
||||
self.update(cx, |item, cx| item.save(file, cx))
|
||||
}
|
||||
|
||||
|
@ -481,9 +469,8 @@ impl Workspace {
|
|||
let is_file = bg.spawn(async move { abs_path.is_file() });
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
if is_file.await {
|
||||
return this.update(&mut cx, |this, cx| {
|
||||
this.open_entry(file.read(cx).entry_id(), cx)
|
||||
});
|
||||
return this
|
||||
.update(&mut cx, |this, cx| this.open_entry(file.entry_id(), cx));
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -499,18 +486,18 @@ impl Workspace {
|
|||
}
|
||||
}
|
||||
|
||||
fn file_for_path(&mut self, abs_path: &Path, cx: &mut ViewContext<Self>) -> ModelHandle<File> {
|
||||
fn file_for_path(&mut self, abs_path: &Path, cx: &mut ViewContext<Self>) -> File {
|
||||
for tree in self.worktrees.iter() {
|
||||
if let Some(relative_path) = tree
|
||||
.read(cx)
|
||||
.as_local()
|
||||
.and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
|
||||
{
|
||||
return tree.file(relative_path, cx.as_mut());
|
||||
return tree.file(relative_path);
|
||||
}
|
||||
}
|
||||
let worktree = self.add_worktree(&abs_path, cx);
|
||||
worktree.file(Path::new(""), cx.as_mut())
|
||||
worktree.file(Path::new(""))
|
||||
}
|
||||
|
||||
pub fn add_worktree(
|
||||
|
@ -584,7 +571,7 @@ impl Workspace {
|
|||
if view_for_existing_item.is_none()
|
||||
&& item
|
||||
.file(cx.as_ref())
|
||||
.map_or(false, |file| file.read(cx).entry_id() == entry)
|
||||
.map_or(false, |file| file.entry_id() == entry)
|
||||
{
|
||||
view_for_existing_item = Some(
|
||||
item.add_view(cx.window_id(), settings.clone(), cx.as_mut())
|
||||
|
|
|
@ -6,13 +6,13 @@ use self::{char_bag::CharBag, ignore::IgnoreStack};
|
|||
use crate::{
|
||||
editor::{Buffer, History, Rope},
|
||||
language::LanguageRegistry,
|
||||
rpc::{self, proto, ConnectionId, PeerId},
|
||||
rpc::{self, proto, ConnectionId},
|
||||
sum_tree::{self, Cursor, Edit, SumTree},
|
||||
time::ReplicaId,
|
||||
util::Bias,
|
||||
};
|
||||
use ::ignore::gitignore::Gitignore;
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
pub use fuzzy::{match_paths, PathMatch};
|
||||
use gpui::{
|
||||
scoped_pool, AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
|
||||
|
@ -107,12 +107,12 @@ impl Worktree {
|
|||
|
||||
pub fn open_buffer(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
path: impl AsRef<Path>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<ModelHandle<Buffer>>> {
|
||||
) -> impl Future<Output = Result<ModelHandle<Buffer>>> + 'static {
|
||||
match self {
|
||||
Worktree::Local(worktree) => worktree.open_buffer(path, language_registry, cx),
|
||||
Worktree::Local(worktree) => worktree.open_buffer(path.as_ref(), language_registry, cx),
|
||||
Worktree::Remote(_) => todo!(),
|
||||
}
|
||||
}
|
||||
|
@ -121,7 +121,7 @@ impl Worktree {
|
|||
&self,
|
||||
path: &Path,
|
||||
content: Rope,
|
||||
cx: &AppContext,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> impl Future<Output = Result<()>> {
|
||||
match self {
|
||||
Worktree::Local(worktree) => worktree.save(path, content, cx),
|
||||
|
@ -161,6 +161,7 @@ impl LocalWorktree {
|
|||
scan_id: 0,
|
||||
abs_path,
|
||||
root_name: Default::default(),
|
||||
root_char_bag: Default::default(),
|
||||
ignores: Default::default(),
|
||||
entries: Default::default(),
|
||||
};
|
||||
|
@ -219,7 +220,7 @@ impl LocalWorktree {
|
|||
path: &Path,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Task<Result<ModelHandle<Buffer>>> {
|
||||
) -> impl Future<Output = Result<ModelHandle<Buffer>>> + 'static {
|
||||
let handle = cx.handle();
|
||||
|
||||
// If there is already a buffer for the given path, then return it.
|
||||
|
@ -227,7 +228,6 @@ impl LocalWorktree {
|
|||
self.open_buffers.retain(|buffer| {
|
||||
if let Some(buffer) = buffer.upgrade(cx.as_ref()) {
|
||||
if let Some(file) = buffer.read(cx.as_ref()).file() {
|
||||
let file = file.read(cx.as_ref());
|
||||
if file.worktree_id() == handle.id() && file.path.as_ref() == path {
|
||||
existing_buffer = Some(buffer);
|
||||
}
|
||||
|
@ -238,12 +238,14 @@ impl LocalWorktree {
|
|||
}
|
||||
});
|
||||
|
||||
let mut new_buffer = None;
|
||||
if existing_buffer.is_none() {
|
||||
let path = Arc::from(path);
|
||||
let contents = self.load(&path, cx.as_ref());
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
new_buffer = Some(cx.spawn(|this, mut cx| async move {
|
||||
let contents = contents.await?;
|
||||
let language = language_registry.select_language(&path).cloned();
|
||||
let file = cx.add_model(|cx| File::new(handle, path.into(), cx));
|
||||
let file = File::new(handle, path.into());
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::from_history(0, History::new(contents.into()), Some(file), language, cx)
|
||||
});
|
||||
|
@ -252,7 +254,16 @@ impl LocalWorktree {
|
|||
this.open_buffers.insert(buffer.downgrade());
|
||||
});
|
||||
Ok(buffer)
|
||||
})
|
||||
}));
|
||||
}
|
||||
|
||||
async move {
|
||||
if let Some(existing_buffer) = existing_buffer {
|
||||
Ok(existing_buffer)
|
||||
} else {
|
||||
new_buffer.unwrap().await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn scan_complete(&self) -> impl Future<Output = ()> {
|
||||
|
@ -274,24 +285,29 @@ impl LocalWorktree {
|
|||
|
||||
self.scan_state.0.blocking_send(scan_state).ok();
|
||||
self.poll_snapshot(cx);
|
||||
|
||||
if let Some(diff) = diff {
|
||||
self.observe_snapshot_diff(diff, cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn observe_snapshot_diff(&mut self, diff: Diff, cx: &mut ModelContext<Worktree>) {
|
||||
let handle = cx.handle();
|
||||
self.open_buffers.retain(|buffer| {
|
||||
if let Some(buffer) = buffer.upgrade(cx.as_ref()) {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
let handle = handle.clone();
|
||||
if let Some(file) = buffer.file() {
|
||||
let path = file.read(cx.as_ref()).path.clone();
|
||||
if diff.added.contains(&path) {
|
||||
cx.notify();
|
||||
}
|
||||
// Notify any buffers whose files were deleted.
|
||||
else if diff.removed.contains(&path) {
|
||||
let mut path = file.path.clone();
|
||||
if let Some(new_path) = diff.moved.get(&path) {
|
||||
buffer.file_was_moved(new_path.clone(), cx);
|
||||
path = new_path.clone();
|
||||
} else if diff.added.contains(&path) {
|
||||
buffer.file_was_added(cx);
|
||||
} else if diff.removed.contains(&path) {
|
||||
buffer.file_was_deleted(cx);
|
||||
}
|
||||
// Notify any buffers whose files were modified.
|
||||
else if diff.modified.contains(&path) {
|
||||
|
||||
if diff.modified.contains(&path) {
|
||||
cx.spawn(|buffer, mut cx| async move {
|
||||
let new_contents = handle
|
||||
.read_with(&cx, |this, cx| {
|
||||
|
@ -321,7 +337,6 @@ impl LocalWorktree {
|
|||
});
|
||||
cx.emit(diff);
|
||||
}
|
||||
}
|
||||
|
||||
fn poll_snapshot(&mut self, cx: &mut ModelContext<Worktree>) {
|
||||
self.snapshot = self.background_snapshot.lock().clone();
|
||||
|
@ -379,9 +394,17 @@ impl LocalWorktree {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn save(&self, path: &Path, content: Rope, cx: &AppContext) -> Task<Result<()>> {
|
||||
let path = path.to_path_buf();
|
||||
pub fn save(
|
||||
&self,
|
||||
path: impl Into<Arc<Path>>,
|
||||
content: Rope,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Task<Result<()>> {
|
||||
let path = path.into();
|
||||
let abs_path = self.absolutize(&path);
|
||||
let background_snapshot = self.background_snapshot.clone();
|
||||
let save = {
|
||||
let path = path.clone();
|
||||
cx.background_executor().spawn(async move {
|
||||
let buffer_size = content.summary().bytes.min(10 * 1024);
|
||||
let file = fs::File::create(&abs_path)?;
|
||||
|
@ -390,6 +413,29 @@ impl LocalWorktree {
|
|||
writer.write(chunk.as_bytes())?;
|
||||
}
|
||||
writer.flush()?;
|
||||
|
||||
// Eagerly populate the snapshot with an updated entry for the saved file
|
||||
let root_char_bag = background_snapshot.lock().root_char_bag;
|
||||
let entry = fs_entry_for_path(root_char_bag, path, &abs_path)?
|
||||
.ok_or_else(|| anyhow!("could not read saved file metadata"))?;
|
||||
let added = background_snapshot.lock().entries.replace(entry, &());
|
||||
|
||||
Ok::<bool, anyhow::Error>(added)
|
||||
})
|
||||
};
|
||||
|
||||
cx.spawn(|worktree, mut cx| async move {
|
||||
let added = save.await?;
|
||||
worktree.update(&mut cx, |worktree, cx| {
|
||||
let worktree = worktree.as_local_mut().unwrap();
|
||||
worktree.poll_snapshot(cx);
|
||||
let mut diff = Diff::default();
|
||||
if added {
|
||||
diff.added.insert(path.clone());
|
||||
}
|
||||
diff.modified.insert(path);
|
||||
worktree.observe_snapshot_diff(diff, cx)
|
||||
});
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
@ -512,6 +558,7 @@ impl RemoteWorktree {
|
|||
scan_id: 0,
|
||||
abs_path: Path::new("").into(),
|
||||
root_name: worktree.root_name,
|
||||
root_char_bag,
|
||||
ignores: Default::default(),
|
||||
entries,
|
||||
};
|
||||
|
@ -531,6 +578,7 @@ pub struct Snapshot {
|
|||
scan_id: usize,
|
||||
abs_path: Arc<Path>,
|
||||
root_name: String,
|
||||
root_char_bag: CharBag,
|
||||
ignores: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
|
||||
entries: SumTree<Entry>,
|
||||
}
|
||||
|
@ -774,33 +822,20 @@ pub struct Diff {
|
|||
#[derive(Clone, PartialEq)]
|
||||
pub struct File {
|
||||
worktree: ModelHandle<Worktree>,
|
||||
path: Arc<Path>,
|
||||
}
|
||||
|
||||
impl Entity for File {
|
||||
type Event = ();
|
||||
pub path: Arc<Path>,
|
||||
}
|
||||
|
||||
impl File {
|
||||
pub fn new(
|
||||
worktree: ModelHandle<Worktree>,
|
||||
path: Arc<Path>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
cx.subscribe(&worktree, Self::handle_worktree_update);
|
||||
pub fn new(worktree: ModelHandle<Worktree>, path: Arc<Path>) -> Self {
|
||||
Self { worktree, path }
|
||||
}
|
||||
|
||||
fn handle_worktree_update(&mut self, diff: &Diff, cx: &mut ModelContext<Self>) {
|
||||
if let Some(new_path) = diff.moved.get(&self.path) {
|
||||
self.path = new_path.clone();
|
||||
cx.notify();
|
||||
} else if diff.added.contains(&self.path)
|
||||
|| diff.removed.contains(&self.path)
|
||||
|| diff.modified.contains(&self.path)
|
||||
{
|
||||
cx.notify();
|
||||
pub fn saved_buffer(&self, buffer: ModelHandle<Buffer>, cx: &mut MutableAppContext) {
|
||||
self.worktree.update(cx, |worktree, _| {
|
||||
if let Worktree::Local(worktree) = worktree {
|
||||
worktree.open_buffers.insert(buffer.downgrade());
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns this file's path relative to the root of its worktree.
|
||||
|
@ -833,9 +868,13 @@ impl File {
|
|||
.map_or(UNIX_EPOCH, |entry| entry.mtime)
|
||||
}
|
||||
|
||||
pub fn save(&self, content: Rope, cx: &AppContext) -> impl Future<Output = Result<()>> {
|
||||
let worktree = self.worktree.read(cx);
|
||||
worktree.save(&self.path(), content, cx)
|
||||
pub fn save(
|
||||
&self,
|
||||
content: Rope,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> impl Future<Output = Result<()>> {
|
||||
self.worktree
|
||||
.update(cx, |worktree, cx| worktree.save(&self.path(), content, cx))
|
||||
}
|
||||
|
||||
pub fn worktree_id(&self) -> usize {
|
||||
|
@ -1024,13 +1063,11 @@ struct BackgroundScanner {
|
|||
snapshot: Arc<Mutex<Snapshot>>,
|
||||
notify: Sender<ScanState>,
|
||||
thread_pool: scoped_pool::Pool,
|
||||
root_char_bag: CharBag,
|
||||
}
|
||||
|
||||
impl BackgroundScanner {
|
||||
fn new(snapshot: Arc<Mutex<Snapshot>>, notify: Sender<ScanState>, worktree_id: usize) -> Self {
|
||||
Self {
|
||||
root_char_bag: Default::default(),
|
||||
snapshot,
|
||||
notify,
|
||||
thread_pool: scoped_pool::Pool::new(16, format!("worktree-{}-scanner", worktree_id)),
|
||||
|
@ -1098,8 +1135,13 @@ impl BackgroundScanner {
|
|||
if is_dir {
|
||||
root_name.push('/');
|
||||
}
|
||||
self.root_char_bag = root_name.chars().map(|c| c.to_ascii_lowercase()).collect();
|
||||
self.snapshot.lock().root_name = root_name;
|
||||
|
||||
let root_char_bag = root_name.chars().map(|c| c.to_ascii_lowercase()).collect();
|
||||
{
|
||||
let mut snapshot = self.snapshot.lock();
|
||||
snapshot.root_name = root_name;
|
||||
snapshot.root_char_bag = root_char_bag;
|
||||
}
|
||||
|
||||
if is_dir {
|
||||
self.snapshot.lock().insert_entry(Entry {
|
||||
|
@ -1125,7 +1167,7 @@ impl BackgroundScanner {
|
|||
for _ in 0..self.thread_pool.thread_count() {
|
||||
pool.execute(|| {
|
||||
while let Ok(job) = rx.recv() {
|
||||
if let Err(err) = self.scan_dir(&job) {
|
||||
if let Err(err) = self.scan_dir(root_char_bag, &job) {
|
||||
log::error!("error scanning {:?}: {}", job.abs_path, err);
|
||||
}
|
||||
}
|
||||
|
@ -1134,7 +1176,7 @@ impl BackgroundScanner {
|
|||
});
|
||||
} else {
|
||||
self.snapshot.lock().insert_entry(Entry {
|
||||
kind: EntryKind::File(self.char_bag(&path)),
|
||||
kind: EntryKind::File(char_bag_for_path(root_char_bag, &path)),
|
||||
path,
|
||||
inode,
|
||||
mtime,
|
||||
|
@ -1146,7 +1188,7 @@ impl BackgroundScanner {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn scan_dir(&self, job: &ScanJob) -> io::Result<()> {
|
||||
fn scan_dir(&self, root_char_bag: CharBag, job: &ScanJob) -> io::Result<()> {
|
||||
let mut new_entries: Vec<Entry> = Vec::new();
|
||||
let mut new_jobs: Vec<ScanJob> = Vec::new();
|
||||
let mut ignore_stack = job.ignore_stack.clone();
|
||||
|
@ -1218,7 +1260,7 @@ impl BackgroundScanner {
|
|||
} else {
|
||||
let is_ignored = ignore_stack.is_path_ignored(&child_path, false);
|
||||
new_entries.push(Entry {
|
||||
kind: EntryKind::File(self.char_bag(&child_path)),
|
||||
kind: EntryKind::File(char_bag_for_path(root_char_bag, &child_path)),
|
||||
path: child_path,
|
||||
inode: child_inode,
|
||||
mtime: child_mtime,
|
||||
|
@ -1247,6 +1289,7 @@ impl BackgroundScanner {
|
|||
} else {
|
||||
return false;
|
||||
};
|
||||
let root_char_bag = snapshot.root_char_bag;
|
||||
|
||||
events.sort_unstable_by(|a, b| a.path.cmp(&b.path));
|
||||
let mut abs_paths = events.into_iter().map(|e| e.path).peekable();
|
||||
|
@ -1271,7 +1314,7 @@ impl BackgroundScanner {
|
|||
|
||||
snapshot.remove_path(&path);
|
||||
|
||||
match self.fs_entry_for_path(path.clone(), &abs_path) {
|
||||
match fs_entry_for_path(snapshot.root_char_bag, path.clone(), &abs_path) {
|
||||
Ok(Some(mut fs_entry)) => {
|
||||
let is_dir = fs_entry.is_dir();
|
||||
let ignore_stack = snapshot.ignore_stack_for_path(&path, is_dir);
|
||||
|
@ -1304,7 +1347,7 @@ impl BackgroundScanner {
|
|||
for _ in 0..self.thread_pool.thread_count() {
|
||||
pool.execute(|| {
|
||||
while let Ok(job) = scan_queue_rx.recv() {
|
||||
if let Err(err) = self.scan_dir(&job) {
|
||||
if let Err(err) = self.scan_dir(root_char_bag, &job) {
|
||||
log::error!("error scanning {:?}: {}", job.abs_path, err);
|
||||
}
|
||||
}
|
||||
|
@ -1401,8 +1444,13 @@ impl BackgroundScanner {
|
|||
}
|
||||
self.snapshot.lock().entries.edit(edits, &());
|
||||
}
|
||||
}
|
||||
|
||||
fn fs_entry_for_path(&self, path: Arc<Path>, abs_path: &Path) -> Result<Option<Entry>> {
|
||||
fn fs_entry_for_path(
|
||||
root_char_bag: CharBag,
|
||||
path: Arc<Path>,
|
||||
abs_path: &Path,
|
||||
) -> Result<Option<Entry>> {
|
||||
let metadata = match fs::metadata(&abs_path) {
|
||||
Err(err) => {
|
||||
return match (err.kind(), err.raw_os_error()) {
|
||||
|
@ -1424,9 +1472,9 @@ impl BackgroundScanner {
|
|||
kind: if metadata.file_type().is_dir() {
|
||||
EntryKind::PendingDir
|
||||
} else {
|
||||
EntryKind::File(self.char_bag(&path))
|
||||
EntryKind::File(char_bag_for_path(root_char_bag, &path))
|
||||
},
|
||||
path,
|
||||
path: Arc::from(path),
|
||||
inode,
|
||||
mtime,
|
||||
is_symlink,
|
||||
|
@ -1434,17 +1482,16 @@ impl BackgroundScanner {
|
|||
};
|
||||
|
||||
Ok(Some(entry))
|
||||
}
|
||||
}
|
||||
|
||||
fn char_bag(&self, path: &Path) -> CharBag {
|
||||
let mut result = self.root_char_bag;
|
||||
fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
|
||||
let mut result = root_char_bag;
|
||||
result.extend(
|
||||
path.to_string_lossy()
|
||||
.chars()
|
||||
.map(|c| c.to_ascii_lowercase()),
|
||||
);
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
struct ScanJob {
|
||||
|
@ -1461,7 +1508,7 @@ struct UpdateIgnoreStatusJob {
|
|||
}
|
||||
|
||||
pub trait WorktreeHandle {
|
||||
fn file(&self, path: impl AsRef<Path>, cx: &mut MutableAppContext) -> ModelHandle<File>;
|
||||
fn file(&self, path: impl AsRef<Path>) -> File;
|
||||
|
||||
#[cfg(test)]
|
||||
fn flush_fs_events<'a>(
|
||||
|
@ -1471,10 +1518,10 @@ pub trait WorktreeHandle {
|
|||
}
|
||||
|
||||
impl WorktreeHandle for ModelHandle<Worktree> {
|
||||
fn file(&self, path: impl AsRef<Path>, cx: &mut MutableAppContext) -> ModelHandle<File> {
|
||||
fn file(&self, path: impl AsRef<Path>) -> File {
|
||||
let path = Arc::from(path.as_ref());
|
||||
let handle = self.clone();
|
||||
cx.add_model(|cx| File::new(handle, path, cx))
|
||||
File::new(handle, path)
|
||||
}
|
||||
|
||||
// When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
|
||||
|
@ -1681,14 +1728,20 @@ mod tests {
|
|||
});
|
||||
assert_eq!(path.file_name().unwrap(), "file1");
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(1, "a line of text.\n".repeat(10 * 1024), cx));
|
||||
|
||||
tree.update(&mut cx, |tree, cx| {
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(1, "a line of text.\n".repeat(10 * 1024), cx));
|
||||
let text = buffer.read(cx).snapshot().text();
|
||||
smol::block_on(tree.save(&path, text, cx.as_ref())).unwrap();
|
||||
tree.save(&path, text, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_contents = fs::read_to_string(dir.path().join(path)).unwrap();
|
||||
assert_eq!(new_contents, buffer.read(cx).text());
|
||||
});
|
||||
assert_eq!(
|
||||
new_contents,
|
||||
buffer.read_with(&cx, |buffer, _| buffer.text())
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
|
@ -1704,15 +1757,21 @@ mod tests {
|
|||
cx.read(|cx| assert_eq!(tree.read(cx).file_count(), 1));
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(1, "a line of text.\n".repeat(10 * 1024), cx));
|
||||
let file = cx.update(|cx| tree.file("", cx));
|
||||
let file = tree.file("");
|
||||
|
||||
file.read_with(&cx, |file, cx| {
|
||||
cx.update(|cx| {
|
||||
assert_eq!(file.path().file_name(), None);
|
||||
let text = buffer.read(cx).snapshot().text();
|
||||
smol::block_on(file.save(text, cx.as_ref())).unwrap();
|
||||
file.save(text, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_contents = fs::read_to_string(file_path).unwrap();
|
||||
assert_eq!(new_contents, buffer.read(cx).text());
|
||||
});
|
||||
assert_eq!(
|
||||
new_contents,
|
||||
buffer.read_with(&cx, |buffer, _| buffer.text())
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
|
@ -1731,23 +1790,31 @@ mod tests {
|
|||
}
|
||||
}));
|
||||
|
||||
let language_registry = Arc::new(LanguageRegistry::new());
|
||||
|
||||
let tree = cx.add_model(|cx| Worktree::local(dir.path(), cx));
|
||||
let file2 = cx.update(|cx| tree.file("a/file2", cx));
|
||||
let file3 = cx.update(|cx| tree.file("a/file3", cx));
|
||||
let file4 = cx.update(|cx| tree.file("b/c/file4", cx));
|
||||
let file5 = cx.update(|cx| tree.file("b/c/file5", cx));
|
||||
let non_existent_file = cx.update(|cx| tree.file("a/file_x", cx));
|
||||
|
||||
let mut buffer_for_path = |path: &'static str| {
|
||||
let buffer = tree.update(&mut cx, |tree, cx| {
|
||||
tree.open_buffer(path, language_registry.clone(), cx)
|
||||
});
|
||||
async move { buffer.await.unwrap() }
|
||||
};
|
||||
|
||||
let buffer2 = buffer_for_path("a/file2").await;
|
||||
let buffer3 = buffer_for_path("a/file3").await;
|
||||
let buffer4 = buffer_for_path("b/c/file4").await;
|
||||
let buffer5 = buffer_for_path("b/c/file5").await;
|
||||
|
||||
// After scanning, the worktree knows which files exist and which don't.
|
||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
|
||||
cx.read(|cx| {
|
||||
assert!(!file2.read(cx).is_deleted(cx));
|
||||
assert!(!file3.read(cx).is_deleted(cx));
|
||||
assert!(!file4.read(cx).is_deleted(cx));
|
||||
assert!(!file5.read(cx).is_deleted(cx));
|
||||
assert!(non_existent_file.read(cx).is_deleted(cx));
|
||||
assert!(!buffer2.read(cx).is_dirty(cx));
|
||||
assert!(!buffer3.read(cx).is_dirty(cx));
|
||||
assert!(!buffer4.read(cx).is_dirty(cx));
|
||||
assert!(!buffer5.read(cx).is_dirty(cx));
|
||||
});
|
||||
|
||||
tree.flush_fs_events(&cx).await;
|
||||
|
@ -1774,14 +1841,27 @@ mod tests {
|
|||
]
|
||||
);
|
||||
|
||||
assert_eq!(file2.read(cx).path().as_ref(), Path::new("a/file2.new"));
|
||||
assert_eq!(file3.read(cx).path().as_ref(), Path::new("d/file3"));
|
||||
assert_eq!(file4.read(cx).path().as_ref(), Path::new("d/file4"));
|
||||
assert_eq!(file5.read(cx).path().as_ref(), Path::new("b/c/file5"));
|
||||
assert!(!file2.read(cx).is_deleted(cx));
|
||||
assert!(!file3.read(cx).is_deleted(cx));
|
||||
assert!(!file4.read(cx).is_deleted(cx));
|
||||
assert!(file5.read(cx).is_deleted(cx));
|
||||
assert_eq!(
|
||||
buffer2.read(cx).file().unwrap().path().as_ref(),
|
||||
Path::new("a/file2.new")
|
||||
);
|
||||
assert_eq!(
|
||||
buffer3.read(cx).file().unwrap().path().as_ref(),
|
||||
Path::new("d/file3")
|
||||
);
|
||||
assert_eq!(
|
||||
buffer4.read(cx).file().unwrap().path().as_ref(),
|
||||
Path::new("d/file4")
|
||||
);
|
||||
assert_eq!(
|
||||
buffer5.read(cx).file().unwrap().path().as_ref(),
|
||||
Path::new("b/c/file5")
|
||||
);
|
||||
|
||||
assert!(!buffer2.read(cx).file().unwrap().is_deleted(cx));
|
||||
assert!(!buffer3.read(cx).file().unwrap().is_deleted(cx));
|
||||
assert!(!buffer4.read(cx).file().unwrap().is_deleted(cx));
|
||||
assert!(buffer5.read(cx).file().unwrap().is_deleted(cx));
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1833,6 +1913,7 @@ mod tests {
|
|||
entries: Default::default(),
|
||||
ignores: Default::default(),
|
||||
root_name: Default::default(),
|
||||
root_char_bag: Default::default(),
|
||||
};
|
||||
|
||||
snapshot.entries.edit(
|
||||
|
@ -1991,6 +2072,7 @@ mod tests {
|
|||
entries: Default::default(),
|
||||
ignores: Default::default(),
|
||||
root_name: Default::default(),
|
||||
root_char_bag: Default::default(),
|
||||
})),
|
||||
notify_tx,
|
||||
0,
|
||||
|
@ -2025,6 +2107,7 @@ mod tests {
|
|||
entries: Default::default(),
|
||||
ignores: Default::default(),
|
||||
root_name: Default::default(),
|
||||
root_char_bag: Default::default(),
|
||||
})),
|
||||
notify_tx,
|
||||
1,
|
||||
|
|
|
@ -619,6 +619,7 @@ mod tests {
|
|||
ignores: Default::default(),
|
||||
entries: Default::default(),
|
||||
root_name: Default::default(),
|
||||
root_char_bag: Default::default(),
|
||||
},
|
||||
false,
|
||||
path_entries.into_iter(),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue