Eagerly update worktree entries when saving

Don't use ModelHandles for storing Files.

Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
This commit is contained in:
Nathan Sobo 2021-06-22 16:58:50 -06:00 committed by Max Brunsfeld
parent dc8e216fcb
commit c5e08b6548
6 changed files with 365 additions and 265 deletions

View file

@ -2444,7 +2444,7 @@ impl View for Editor {
impl workspace::Item for Buffer { impl workspace::Item for Buffer {
type View = Editor; type View = Editor;
fn file(&self) -> Option<&ModelHandle<File>> { fn file(&self) -> Option<&File> {
self.file() self.file()
} }
@ -2474,7 +2474,7 @@ impl workspace::ItemView for Editor {
.buffer .buffer
.read(cx) .read(cx)
.file() .file()
.and_then(|file| file.read(cx).file_name(cx)); .and_then(|file| file.file_name(cx));
if let Some(name) = filename { if let Some(name) = filename {
name.to_string_lossy().into() name.to_string_lossy().into()
} else { } else {
@ -2483,10 +2483,7 @@ impl workspace::ItemView for Editor {
} }
fn entry_id(&self, cx: &AppContext) -> Option<(usize, Arc<Path>)> { fn entry_id(&self, cx: &AppContext) -> Option<(usize, Arc<Path>)> {
self.buffer self.buffer.read(cx).file().map(|file| file.entry_id())
.read(cx)
.file()
.map(|file| file.read(cx).entry_id())
} }
fn clone_on_split(&self, cx: &mut ViewContext<Self>) -> Option<Self> fn clone_on_split(&self, cx: &mut ViewContext<Self>) -> Option<Self>
@ -2498,11 +2495,7 @@ impl workspace::ItemView for Editor {
Some(clone) Some(clone)
} }
fn save( fn save(&mut self, new_file: Option<File>, cx: &mut ViewContext<Self>) -> Task<Result<()>> {
&mut self,
new_file: Option<ModelHandle<File>>,
cx: &mut ViewContext<Self>,
) -> Task<Result<()>> {
self.buffer.update(cx, |b, cx| b.save(new_file, cx)) self.buffer.update(cx, |b, cx| b.save(new_file, cx))
} }

View file

@ -22,7 +22,7 @@ use crate::{
worktree::File, worktree::File,
}; };
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; use gpui::{AppContext, Entity, ModelContext, Task};
use lazy_static::lazy_static; use lazy_static::lazy_static;
use std::{ use std::{
cell::RefCell, cell::RefCell,
@ -30,6 +30,7 @@ use std::{
hash::BuildHasher, hash::BuildHasher,
iter::Iterator, iter::Iterator,
ops::{Deref, DerefMut, Range}, ops::{Deref, DerefMut, Range},
path::Path,
str, str,
sync::Arc, sync::Arc,
time::{Duration, Instant, SystemTime, UNIX_EPOCH}, time::{Duration, Instant, SystemTime, UNIX_EPOCH},
@ -114,7 +115,7 @@ pub struct Buffer {
last_edit: time::Local, last_edit: time::Local,
undo_map: UndoMap, undo_map: UndoMap,
history: History, history: History,
file: Option<ModelHandle<File>>, file: Option<File>,
language: Option<Arc<Language>>, language: Option<Arc<Language>>,
syntax_tree: Mutex<Option<SyntaxTree>>, syntax_tree: Mutex<Option<SyntaxTree>>,
is_parsing: bool, is_parsing: bool,
@ -420,7 +421,7 @@ impl Buffer {
pub fn from_history( pub fn from_history(
replica_id: ReplicaId, replica_id: ReplicaId,
history: History, history: History,
file: Option<ModelHandle<File>>, file: Option<File>,
language: Option<Arc<Language>>, language: Option<Arc<Language>>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Self { ) -> Self {
@ -430,13 +431,13 @@ impl Buffer {
fn build( fn build(
replica_id: ReplicaId, replica_id: ReplicaId,
history: History, history: History,
file: Option<ModelHandle<File>>, file: Option<File>,
language: Option<Arc<Language>>, language: Option<Arc<Language>>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Self { ) -> Self {
let saved_mtime; let saved_mtime;
if let Some(file) = file.as_ref() { if let Some(file) = file.as_ref() {
saved_mtime = file.read(cx).mtime(cx.as_ref()); saved_mtime = file.mtime(cx.as_ref());
} else { } else {
saved_mtime = UNIX_EPOCH; saved_mtime = UNIX_EPOCH;
} }
@ -492,13 +493,13 @@ impl Buffer {
} }
} }
pub fn file(&self) -> Option<&ModelHandle<File>> { pub fn file(&self) -> Option<&File> {
self.file.as_ref() self.file.as_ref()
} }
pub fn save( pub fn save(
&mut self, &mut self,
new_file: Option<ModelHandle<File>>, new_file: Option<File>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<Result<()>> { ) -> Task<Result<()>> {
let text = self.visible_text.clone(); let text = self.visible_text.clone();
@ -507,7 +508,7 @@ impl Buffer {
cx.spawn(|handle, mut cx| async move { cx.spawn(|handle, mut cx| async move {
if let Some(file) = new_file.as_ref().or(file.as_ref()) { if let Some(file) = new_file.as_ref().or(file.as_ref()) {
let result = file.read_with(&cx, |file, cx| file.save(text, cx)).await; let result = cx.update(|cx| file.save(text, cx)).await;
if result.is_ok() { if result.is_ok() {
handle.update(&mut cx, |me, cx| me.did_save(version, new_file, cx)); handle.update(&mut cx, |me, cx| me.did_save(version, new_file, cx));
} }
@ -521,22 +522,36 @@ impl Buffer {
fn did_save( fn did_save(
&mut self, &mut self,
version: time::Global, version: time::Global,
file: Option<ModelHandle<File>>, new_file: Option<File>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) { ) {
if file.is_some() { if let Some(new_file) = new_file {
self.file = file; let buffer = cx.handle();
new_file.saved_buffer(buffer, cx.as_mut());
self.file = Some(new_file);
} }
if let Some(file) = &self.file { if let Some(file) = &self.file {
self.saved_mtime = file.read(cx).mtime(cx.as_ref()); self.saved_mtime = file.mtime(cx.as_ref());
} }
self.saved_version = version; self.saved_version = version;
cx.emit(Event::Saved); cx.emit(Event::Saved);
} }
pub fn file_was_moved(&mut self, new_path: Arc<Path>, cx: &mut ModelContext<Self>) {
self.file.as_mut().unwrap().path = new_path;
cx.emit(Event::FileHandleChanged);
}
pub fn file_was_added(&mut self, cx: &mut ModelContext<Self>) {
cx.emit(Event::FileHandleChanged);
}
pub fn file_was_deleted(&mut self, cx: &mut ModelContext<Self>) { pub fn file_was_deleted(&mut self, cx: &mut ModelContext<Self>) {
if self.version == self.saved_version {
cx.emit(Event::Dirtied); cx.emit(Event::Dirtied);
} }
cx.emit(Event::FileHandleChanged);
}
pub fn file_was_modified( pub fn file_was_modified(
&mut self, &mut self,
@ -759,10 +774,7 @@ impl Buffer {
pub fn is_dirty(&self, cx: &AppContext) -> bool { pub fn is_dirty(&self, cx: &AppContext) -> bool {
self.version > self.saved_version self.version > self.saved_version
|| self || self.file.as_ref().map_or(false, |file| file.is_deleted(cx))
.file
.as_ref()
.map_or(false, |file| file.read(cx).is_deleted(cx))
} }
pub fn has_conflict(&self, cx: &AppContext) -> bool { pub fn has_conflict(&self, cx: &AppContext) -> bool {
@ -770,7 +782,7 @@ impl Buffer {
&& self && self
.file .file
.as_ref() .as_ref()
.map_or(false, |file| file.read(cx).mtime(cx) > self.saved_mtime) .map_or(false, |file| file.mtime(cx) > self.saved_mtime)
} }
pub fn version(&self) -> time::Global { pub fn version(&self) -> time::Global {
@ -2208,6 +2220,7 @@ impl ToPoint for usize {
mod tests { mod tests {
use super::*; use super::*;
use crate::{ use crate::{
language::LanguageRegistry,
test::{build_app_state, temp_tree}, test::{build_app_state, temp_tree},
util::RandomCharIter, util::RandomCharIter,
worktree::{Worktree, WorktreeHandle}, worktree::{Worktree, WorktreeHandle},
@ -2220,6 +2233,7 @@ mod tests {
cmp::Ordering, cmp::Ordering,
env, fs, env, fs,
iter::FromIterator, iter::FromIterator,
path::Path,
rc::Rc, rc::Rc,
sync::atomic::{self, AtomicUsize}, sync::atomic::{self, AtomicUsize},
}; };
@ -2676,20 +2690,24 @@ mod tests {
#[test] #[test]
fn test_is_dirty() { fn test_is_dirty() {
App::test_async((), |mut cx| async move { App::test_async((), |mut cx| async move {
let language_registry = Arc::new(LanguageRegistry::new());
let dir = temp_tree(json!({ let dir = temp_tree(json!({
"file1": "", "file1": "abc",
"file2": "", "file2": "def",
"file3": "", "file3": "ghi",
})); }));
let tree = cx.add_model(|cx| Worktree::local(dir.path(), cx)); let tree = cx.add_model(|cx| Worktree::local(dir.path(), cx));
tree.flush_fs_events(&cx).await; tree.flush_fs_events(&cx).await;
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await; .await;
let file1 = cx.update(|cx| tree.file("file1", cx)); let buffer1 = tree
let buffer1 = cx.add_model(|cx| { .update(&mut cx, |tree, cx| {
Buffer::from_history(0, History::new("abc".into()), Some(file1), None, cx) tree.open_buffer("file1", language_registry.clone(), cx)
}); })
.await
.unwrap();
let events = Rc::new(RefCell::new(Vec::new())); let events = Rc::new(RefCell::new(Vec::new()));
// initially, the buffer isn't dirty. // initially, the buffer isn't dirty.
@ -2746,14 +2764,17 @@ mod tests {
// When a file is deleted, the buffer is considered dirty. // When a file is deleted, the buffer is considered dirty.
let events = Rc::new(RefCell::new(Vec::new())); let events = Rc::new(RefCell::new(Vec::new()));
let file2 = cx.update(|cx| tree.file("file2", cx)); let buffer2 = tree
let buffer2 = cx.add_model(|cx: &mut ModelContext<Buffer>| { .update(&mut cx, |tree, cx| {
cx.subscribe(&cx.handle(), { tree.open_buffer("file2", language_registry.clone(), cx)
})
.await
.unwrap();
buffer2.update(&mut cx, |_, cx| {
cx.subscribe(&buffer2, {
let events = events.clone(); let events = events.clone();
move |_, event, _| events.borrow_mut().push(event.clone()) move |_, event, _| events.borrow_mut().push(event.clone())
}); });
Buffer::from_history(0, History::new("abc".into()), Some(file2), None, cx)
}); });
fs::remove_file(dir.path().join("file2")).unwrap(); fs::remove_file(dir.path().join("file2")).unwrap();
@ -2767,14 +2788,17 @@ mod tests {
// When a file is already dirty when deleted, we don't emit a Dirtied event. // When a file is already dirty when deleted, we don't emit a Dirtied event.
let events = Rc::new(RefCell::new(Vec::new())); let events = Rc::new(RefCell::new(Vec::new()));
let file3 = cx.update(|cx| tree.file("file3", cx)); let buffer3 = tree
let buffer3 = cx.add_model(|cx: &mut ModelContext<Buffer>| { .update(&mut cx, |tree, cx| {
cx.subscribe(&cx.handle(), { tree.open_buffer("file3", language_registry.clone(), cx)
})
.await
.unwrap();
buffer3.update(&mut cx, |_, cx| {
cx.subscribe(&buffer3, {
let events = events.clone(); let events = events.clone();
move |_, event, _| events.borrow_mut().push(event.clone()) move |_, event, _| events.borrow_mut().push(event.clone())
}); });
Buffer::from_history(0, History::new("abc".into()), Some(file3), None, cx)
}); });
tree.flush_fs_events(&cx).await; tree.flush_fs_events(&cx).await;
@ -2800,16 +2824,12 @@ mod tests {
.await; .await;
let abs_path = dir.path().join("the-file"); let abs_path = dir.path().join("the-file");
let file = cx.update(|cx| tree.file("the-file", cx)); let buffer = tree
let buffer = cx.add_model(|cx| { .update(&mut cx, |tree, cx| {
Buffer::from_history( tree.open_buffer(Path::new("the-file"), Arc::new(LanguageRegistry::new()), cx)
0, })
History::new(initial_contents.into()), .await
Some(file), .unwrap();
None,
cx,
)
});
// Add a cursor at the start of each row. // Add a cursor at the start of each row.
let (selection_set_id, _) = buffer.update(&mut cx, |buffer, cx| { let (selection_set_id, _) = buffer.update(&mut cx, |buffer, cx| {

View file

@ -419,6 +419,22 @@ impl<T: KeyedItem> SumTree<T> {
}; };
} }
pub fn replace(&mut self, item: T, cx: &<T::Summary as Summary>::Context) -> bool {
let mut replaced = false;
*self = {
let mut cursor = self.cursor::<T::Key, ()>();
let mut new_tree = cursor.slice(&item.key(), Bias::Left, cx);
if cursor.item().map_or(false, |item| item.key() == item.key()) {
cursor.next(cx);
replaced = true;
}
new_tree.push(item, cx);
new_tree.push_tree(cursor.suffix(cx), cx);
new_tree
};
replaced
}
pub fn edit( pub fn edit(
&mut self, &mut self,
mut edits: Vec<Edit<T>>, mut edits: Vec<Edit<T>>,

View file

@ -207,7 +207,7 @@ pub trait Item: Entity + Sized {
cx: &mut ViewContext<Self::View>, cx: &mut ViewContext<Self::View>,
) -> Self::View; ) -> Self::View;
fn file(&self) -> Option<&ModelHandle<File>>; fn file(&self) -> Option<&File>;
} }
pub trait ItemView: View { pub trait ItemView: View {
@ -225,11 +225,7 @@ pub trait ItemView: View {
fn has_conflict(&self, _: &AppContext) -> bool { fn has_conflict(&self, _: &AppContext) -> bool {
false false
} }
fn save( fn save(&mut self, _: Option<File>, _: &mut ViewContext<Self>) -> Task<anyhow::Result<()>>;
&mut self,
_: Option<ModelHandle<File>>,
_: &mut ViewContext<Self>,
) -> Task<anyhow::Result<()>>;
fn should_activate_item_on_event(_: &Self::Event) -> bool { fn should_activate_item_on_event(_: &Self::Event) -> bool {
false false
} }
@ -244,7 +240,7 @@ pub trait ItemHandle: Send + Sync {
} }
pub trait WeakItemHandle: Send + Sync { pub trait WeakItemHandle: Send + Sync {
fn file<'a>(&'a self, cx: &'a AppContext) -> Option<&'a ModelHandle<File>>; fn file<'a>(&'a self, cx: &'a AppContext) -> Option<&'a File>;
fn add_view( fn add_view(
&self, &self,
window_id: usize, window_id: usize,
@ -264,11 +260,7 @@ pub trait ItemViewHandle: Send + Sync {
fn to_any(&self) -> AnyViewHandle; fn to_any(&self) -> AnyViewHandle;
fn is_dirty(&self, cx: &AppContext) -> bool; fn is_dirty(&self, cx: &AppContext) -> bool;
fn has_conflict(&self, cx: &AppContext) -> bool; fn has_conflict(&self, cx: &AppContext) -> bool;
fn save( fn save(&self, file: Option<File>, cx: &mut MutableAppContext) -> Task<anyhow::Result<()>>;
&self,
file: Option<ModelHandle<File>>,
cx: &mut MutableAppContext,
) -> Task<anyhow::Result<()>>;
} }
impl<T: Item> ItemHandle for ModelHandle<T> { impl<T: Item> ItemHandle for ModelHandle<T> {
@ -282,7 +274,7 @@ impl<T: Item> ItemHandle for ModelHandle<T> {
} }
impl<T: Item> WeakItemHandle for WeakModelHandle<T> { impl<T: Item> WeakItemHandle for WeakModelHandle<T> {
fn file<'a>(&'a self, cx: &'a AppContext) -> Option<&'a ModelHandle<File>> { fn file<'a>(&'a self, cx: &'a AppContext) -> Option<&'a File> {
self.upgrade(cx).and_then(|h| h.read(cx).file()) self.upgrade(cx).and_then(|h| h.read(cx).file())
} }
@ -342,11 +334,7 @@ impl<T: ItemView> ItemViewHandle for ViewHandle<T> {
}) })
} }
fn save( fn save(&self, file: Option<File>, cx: &mut MutableAppContext) -> Task<anyhow::Result<()>> {
&self,
file: Option<ModelHandle<File>>,
cx: &mut MutableAppContext,
) -> Task<anyhow::Result<()>> {
self.update(cx, |item, cx| item.save(file, cx)) self.update(cx, |item, cx| item.save(file, cx))
} }
@ -481,9 +469,8 @@ impl Workspace {
let is_file = bg.spawn(async move { abs_path.is_file() }); let is_file = bg.spawn(async move { abs_path.is_file() });
cx.spawn(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
if is_file.await { if is_file.await {
return this.update(&mut cx, |this, cx| { return this
this.open_entry(file.read(cx).entry_id(), cx) .update(&mut cx, |this, cx| this.open_entry(file.entry_id(), cx));
});
} else { } else {
None None
} }
@ -499,18 +486,18 @@ impl Workspace {
} }
} }
fn file_for_path(&mut self, abs_path: &Path, cx: &mut ViewContext<Self>) -> ModelHandle<File> { fn file_for_path(&mut self, abs_path: &Path, cx: &mut ViewContext<Self>) -> File {
for tree in self.worktrees.iter() { for tree in self.worktrees.iter() {
if let Some(relative_path) = tree if let Some(relative_path) = tree
.read(cx) .read(cx)
.as_local() .as_local()
.and_then(|t| abs_path.strip_prefix(t.abs_path()).ok()) .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
{ {
return tree.file(relative_path, cx.as_mut()); return tree.file(relative_path);
} }
} }
let worktree = self.add_worktree(&abs_path, cx); let worktree = self.add_worktree(&abs_path, cx);
worktree.file(Path::new(""), cx.as_mut()) worktree.file(Path::new(""))
} }
pub fn add_worktree( pub fn add_worktree(
@ -584,7 +571,7 @@ impl Workspace {
if view_for_existing_item.is_none() if view_for_existing_item.is_none()
&& item && item
.file(cx.as_ref()) .file(cx.as_ref())
.map_or(false, |file| file.read(cx).entry_id() == entry) .map_or(false, |file| file.entry_id() == entry)
{ {
view_for_existing_item = Some( view_for_existing_item = Some(
item.add_view(cx.window_id(), settings.clone(), cx.as_mut()) item.add_view(cx.window_id(), settings.clone(), cx.as_mut())

View file

@ -6,13 +6,13 @@ use self::{char_bag::CharBag, ignore::IgnoreStack};
use crate::{ use crate::{
editor::{Buffer, History, Rope}, editor::{Buffer, History, Rope},
language::LanguageRegistry, language::LanguageRegistry,
rpc::{self, proto, ConnectionId, PeerId}, rpc::{self, proto, ConnectionId},
sum_tree::{self, Cursor, Edit, SumTree}, sum_tree::{self, Cursor, Edit, SumTree},
time::ReplicaId, time::ReplicaId,
util::Bias, util::Bias,
}; };
use ::ignore::gitignore::Gitignore; use ::ignore::gitignore::Gitignore;
use anyhow::{Context, Result}; use anyhow::{anyhow, Context, Result};
pub use fuzzy::{match_paths, PathMatch}; pub use fuzzy::{match_paths, PathMatch};
use gpui::{ use gpui::{
scoped_pool, AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, scoped_pool, AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
@ -107,12 +107,12 @@ impl Worktree {
pub fn open_buffer( pub fn open_buffer(
&mut self, &mut self,
path: &Path, path: impl AsRef<Path>,
language_registry: Arc<LanguageRegistry>, language_registry: Arc<LanguageRegistry>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<Result<ModelHandle<Buffer>>> { ) -> impl Future<Output = Result<ModelHandle<Buffer>>> + 'static {
match self { match self {
Worktree::Local(worktree) => worktree.open_buffer(path, language_registry, cx), Worktree::Local(worktree) => worktree.open_buffer(path.as_ref(), language_registry, cx),
Worktree::Remote(_) => todo!(), Worktree::Remote(_) => todo!(),
} }
} }
@ -121,7 +121,7 @@ impl Worktree {
&self, &self,
path: &Path, path: &Path,
content: Rope, content: Rope,
cx: &AppContext, cx: &mut ModelContext<Self>,
) -> impl Future<Output = Result<()>> { ) -> impl Future<Output = Result<()>> {
match self { match self {
Worktree::Local(worktree) => worktree.save(path, content, cx), Worktree::Local(worktree) => worktree.save(path, content, cx),
@ -161,6 +161,7 @@ impl LocalWorktree {
scan_id: 0, scan_id: 0,
abs_path, abs_path,
root_name: Default::default(), root_name: Default::default(),
root_char_bag: Default::default(),
ignores: Default::default(), ignores: Default::default(),
entries: Default::default(), entries: Default::default(),
}; };
@ -219,7 +220,7 @@ impl LocalWorktree {
path: &Path, path: &Path,
language_registry: Arc<LanguageRegistry>, language_registry: Arc<LanguageRegistry>,
cx: &mut ModelContext<Worktree>, cx: &mut ModelContext<Worktree>,
) -> Task<Result<ModelHandle<Buffer>>> { ) -> impl Future<Output = Result<ModelHandle<Buffer>>> + 'static {
let handle = cx.handle(); let handle = cx.handle();
// If there is already a buffer for the given path, then return it. // If there is already a buffer for the given path, then return it.
@ -227,7 +228,6 @@ impl LocalWorktree {
self.open_buffers.retain(|buffer| { self.open_buffers.retain(|buffer| {
if let Some(buffer) = buffer.upgrade(cx.as_ref()) { if let Some(buffer) = buffer.upgrade(cx.as_ref()) {
if let Some(file) = buffer.read(cx.as_ref()).file() { if let Some(file) = buffer.read(cx.as_ref()).file() {
let file = file.read(cx.as_ref());
if file.worktree_id() == handle.id() && file.path.as_ref() == path { if file.worktree_id() == handle.id() && file.path.as_ref() == path {
existing_buffer = Some(buffer); existing_buffer = Some(buffer);
} }
@ -238,12 +238,14 @@ impl LocalWorktree {
} }
}); });
let mut new_buffer = None;
if existing_buffer.is_none() {
let path = Arc::from(path); let path = Arc::from(path);
let contents = self.load(&path, cx.as_ref()); let contents = self.load(&path, cx.as_ref());
cx.spawn(|this, mut cx| async move { new_buffer = Some(cx.spawn(|this, mut cx| async move {
let contents = contents.await?; let contents = contents.await?;
let language = language_registry.select_language(&path).cloned(); let language = language_registry.select_language(&path).cloned();
let file = cx.add_model(|cx| File::new(handle, path.into(), cx)); let file = File::new(handle, path.into());
let buffer = cx.add_model(|cx| { let buffer = cx.add_model(|cx| {
Buffer::from_history(0, History::new(contents.into()), Some(file), language, cx) Buffer::from_history(0, History::new(contents.into()), Some(file), language, cx)
}); });
@ -252,7 +254,16 @@ impl LocalWorktree {
this.open_buffers.insert(buffer.downgrade()); this.open_buffers.insert(buffer.downgrade());
}); });
Ok(buffer) Ok(buffer)
}) }));
}
async move {
if let Some(existing_buffer) = existing_buffer {
Ok(existing_buffer)
} else {
new_buffer.unwrap().await
}
}
} }
pub fn scan_complete(&self) -> impl Future<Output = ()> { pub fn scan_complete(&self) -> impl Future<Output = ()> {
@ -274,24 +285,29 @@ impl LocalWorktree {
self.scan_state.0.blocking_send(scan_state).ok(); self.scan_state.0.blocking_send(scan_state).ok();
self.poll_snapshot(cx); self.poll_snapshot(cx);
if let Some(diff) = diff { if let Some(diff) = diff {
self.observe_snapshot_diff(diff, cx);
}
}
fn observe_snapshot_diff(&mut self, diff: Diff, cx: &mut ModelContext<Worktree>) {
let handle = cx.handle(); let handle = cx.handle();
self.open_buffers.retain(|buffer| { self.open_buffers.retain(|buffer| {
if let Some(buffer) = buffer.upgrade(cx.as_ref()) { if let Some(buffer) = buffer.upgrade(cx.as_ref()) {
buffer.update(cx, |buffer, cx| { buffer.update(cx, |buffer, cx| {
let handle = handle.clone(); let handle = handle.clone();
if let Some(file) = buffer.file() { if let Some(file) = buffer.file() {
let path = file.read(cx.as_ref()).path.clone(); let mut path = file.path.clone();
if diff.added.contains(&path) { if let Some(new_path) = diff.moved.get(&path) {
cx.notify(); buffer.file_was_moved(new_path.clone(), cx);
} path = new_path.clone();
// Notify any buffers whose files were deleted. } else if diff.added.contains(&path) {
else if diff.removed.contains(&path) { buffer.file_was_added(cx);
} else if diff.removed.contains(&path) {
buffer.file_was_deleted(cx); buffer.file_was_deleted(cx);
} }
// Notify any buffers whose files were modified.
else if diff.modified.contains(&path) { if diff.modified.contains(&path) {
cx.spawn(|buffer, mut cx| async move { cx.spawn(|buffer, mut cx| async move {
let new_contents = handle let new_contents = handle
.read_with(&cx, |this, cx| { .read_with(&cx, |this, cx| {
@ -321,7 +337,6 @@ impl LocalWorktree {
}); });
cx.emit(diff); cx.emit(diff);
} }
}
fn poll_snapshot(&mut self, cx: &mut ModelContext<Worktree>) { fn poll_snapshot(&mut self, cx: &mut ModelContext<Worktree>) {
self.snapshot = self.background_snapshot.lock().clone(); self.snapshot = self.background_snapshot.lock().clone();
@ -379,9 +394,17 @@ impl LocalWorktree {
}) })
} }
pub fn save(&self, path: &Path, content: Rope, cx: &AppContext) -> Task<Result<()>> { pub fn save(
let path = path.to_path_buf(); &self,
path: impl Into<Arc<Path>>,
content: Rope,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<()>> {
let path = path.into();
let abs_path = self.absolutize(&path); let abs_path = self.absolutize(&path);
let background_snapshot = self.background_snapshot.clone();
let save = {
let path = path.clone();
cx.background_executor().spawn(async move { cx.background_executor().spawn(async move {
let buffer_size = content.summary().bytes.min(10 * 1024); let buffer_size = content.summary().bytes.min(10 * 1024);
let file = fs::File::create(&abs_path)?; let file = fs::File::create(&abs_path)?;
@ -390,6 +413,29 @@ impl LocalWorktree {
writer.write(chunk.as_bytes())?; writer.write(chunk.as_bytes())?;
} }
writer.flush()?; writer.flush()?;
// Eagerly populate the snapshot with an updated entry for the saved file
let root_char_bag = background_snapshot.lock().root_char_bag;
let entry = fs_entry_for_path(root_char_bag, path, &abs_path)?
.ok_or_else(|| anyhow!("could not read saved file metadata"))?;
let added = background_snapshot.lock().entries.replace(entry, &());
Ok::<bool, anyhow::Error>(added)
})
};
cx.spawn(|worktree, mut cx| async move {
let added = save.await?;
worktree.update(&mut cx, |worktree, cx| {
let worktree = worktree.as_local_mut().unwrap();
worktree.poll_snapshot(cx);
let mut diff = Diff::default();
if added {
diff.added.insert(path.clone());
}
diff.modified.insert(path);
worktree.observe_snapshot_diff(diff, cx)
});
Ok(()) Ok(())
}) })
} }
@ -512,6 +558,7 @@ impl RemoteWorktree {
scan_id: 0, scan_id: 0,
abs_path: Path::new("").into(), abs_path: Path::new("").into(),
root_name: worktree.root_name, root_name: worktree.root_name,
root_char_bag,
ignores: Default::default(), ignores: Default::default(),
entries, entries,
}; };
@ -531,6 +578,7 @@ pub struct Snapshot {
scan_id: usize, scan_id: usize,
abs_path: Arc<Path>, abs_path: Arc<Path>,
root_name: String, root_name: String,
root_char_bag: CharBag,
ignores: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>, ignores: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
entries: SumTree<Entry>, entries: SumTree<Entry>,
} }
@ -774,33 +822,20 @@ pub struct Diff {
#[derive(Clone, PartialEq)] #[derive(Clone, PartialEq)]
pub struct File { pub struct File {
worktree: ModelHandle<Worktree>, worktree: ModelHandle<Worktree>,
path: Arc<Path>, pub path: Arc<Path>,
}
impl Entity for File {
type Event = ();
} }
impl File { impl File {
pub fn new( pub fn new(worktree: ModelHandle<Worktree>, path: Arc<Path>) -> Self {
worktree: ModelHandle<Worktree>,
path: Arc<Path>,
cx: &mut ModelContext<Self>,
) -> Self {
cx.subscribe(&worktree, Self::handle_worktree_update);
Self { worktree, path } Self { worktree, path }
} }
fn handle_worktree_update(&mut self, diff: &Diff, cx: &mut ModelContext<Self>) { pub fn saved_buffer(&self, buffer: ModelHandle<Buffer>, cx: &mut MutableAppContext) {
if let Some(new_path) = diff.moved.get(&self.path) { self.worktree.update(cx, |worktree, _| {
self.path = new_path.clone(); if let Worktree::Local(worktree) = worktree {
cx.notify(); worktree.open_buffers.insert(buffer.downgrade());
} else if diff.added.contains(&self.path)
|| diff.removed.contains(&self.path)
|| diff.modified.contains(&self.path)
{
cx.notify();
} }
})
} }
/// Returns this file's path relative to the root of its worktree. /// Returns this file's path relative to the root of its worktree.
@ -833,9 +868,13 @@ impl File {
.map_or(UNIX_EPOCH, |entry| entry.mtime) .map_or(UNIX_EPOCH, |entry| entry.mtime)
} }
pub fn save(&self, content: Rope, cx: &AppContext) -> impl Future<Output = Result<()>> { pub fn save(
let worktree = self.worktree.read(cx); &self,
worktree.save(&self.path(), content, cx) content: Rope,
cx: &mut MutableAppContext,
) -> impl Future<Output = Result<()>> {
self.worktree
.update(cx, |worktree, cx| worktree.save(&self.path(), content, cx))
} }
pub fn worktree_id(&self) -> usize { pub fn worktree_id(&self) -> usize {
@ -1024,13 +1063,11 @@ struct BackgroundScanner {
snapshot: Arc<Mutex<Snapshot>>, snapshot: Arc<Mutex<Snapshot>>,
notify: Sender<ScanState>, notify: Sender<ScanState>,
thread_pool: scoped_pool::Pool, thread_pool: scoped_pool::Pool,
root_char_bag: CharBag,
} }
impl BackgroundScanner { impl BackgroundScanner {
fn new(snapshot: Arc<Mutex<Snapshot>>, notify: Sender<ScanState>, worktree_id: usize) -> Self { fn new(snapshot: Arc<Mutex<Snapshot>>, notify: Sender<ScanState>, worktree_id: usize) -> Self {
Self { Self {
root_char_bag: Default::default(),
snapshot, snapshot,
notify, notify,
thread_pool: scoped_pool::Pool::new(16, format!("worktree-{}-scanner", worktree_id)), thread_pool: scoped_pool::Pool::new(16, format!("worktree-{}-scanner", worktree_id)),
@ -1098,8 +1135,13 @@ impl BackgroundScanner {
if is_dir { if is_dir {
root_name.push('/'); root_name.push('/');
} }
self.root_char_bag = root_name.chars().map(|c| c.to_ascii_lowercase()).collect();
self.snapshot.lock().root_name = root_name; let root_char_bag = root_name.chars().map(|c| c.to_ascii_lowercase()).collect();
{
let mut snapshot = self.snapshot.lock();
snapshot.root_name = root_name;
snapshot.root_char_bag = root_char_bag;
}
if is_dir { if is_dir {
self.snapshot.lock().insert_entry(Entry { self.snapshot.lock().insert_entry(Entry {
@ -1125,7 +1167,7 @@ impl BackgroundScanner {
for _ in 0..self.thread_pool.thread_count() { for _ in 0..self.thread_pool.thread_count() {
pool.execute(|| { pool.execute(|| {
while let Ok(job) = rx.recv() { while let Ok(job) = rx.recv() {
if let Err(err) = self.scan_dir(&job) { if let Err(err) = self.scan_dir(root_char_bag, &job) {
log::error!("error scanning {:?}: {}", job.abs_path, err); log::error!("error scanning {:?}: {}", job.abs_path, err);
} }
} }
@ -1134,7 +1176,7 @@ impl BackgroundScanner {
}); });
} else { } else {
self.snapshot.lock().insert_entry(Entry { self.snapshot.lock().insert_entry(Entry {
kind: EntryKind::File(self.char_bag(&path)), kind: EntryKind::File(char_bag_for_path(root_char_bag, &path)),
path, path,
inode, inode,
mtime, mtime,
@ -1146,7 +1188,7 @@ impl BackgroundScanner {
Ok(()) Ok(())
} }
fn scan_dir(&self, job: &ScanJob) -> io::Result<()> { fn scan_dir(&self, root_char_bag: CharBag, job: &ScanJob) -> io::Result<()> {
let mut new_entries: Vec<Entry> = Vec::new(); let mut new_entries: Vec<Entry> = Vec::new();
let mut new_jobs: Vec<ScanJob> = Vec::new(); let mut new_jobs: Vec<ScanJob> = Vec::new();
let mut ignore_stack = job.ignore_stack.clone(); let mut ignore_stack = job.ignore_stack.clone();
@ -1218,7 +1260,7 @@ impl BackgroundScanner {
} else { } else {
let is_ignored = ignore_stack.is_path_ignored(&child_path, false); let is_ignored = ignore_stack.is_path_ignored(&child_path, false);
new_entries.push(Entry { new_entries.push(Entry {
kind: EntryKind::File(self.char_bag(&child_path)), kind: EntryKind::File(char_bag_for_path(root_char_bag, &child_path)),
path: child_path, path: child_path,
inode: child_inode, inode: child_inode,
mtime: child_mtime, mtime: child_mtime,
@ -1247,6 +1289,7 @@ impl BackgroundScanner {
} else { } else {
return false; return false;
}; };
let root_char_bag = snapshot.root_char_bag;
events.sort_unstable_by(|a, b| a.path.cmp(&b.path)); events.sort_unstable_by(|a, b| a.path.cmp(&b.path));
let mut abs_paths = events.into_iter().map(|e| e.path).peekable(); let mut abs_paths = events.into_iter().map(|e| e.path).peekable();
@ -1271,7 +1314,7 @@ impl BackgroundScanner {
snapshot.remove_path(&path); snapshot.remove_path(&path);
match self.fs_entry_for_path(path.clone(), &abs_path) { match fs_entry_for_path(snapshot.root_char_bag, path.clone(), &abs_path) {
Ok(Some(mut fs_entry)) => { Ok(Some(mut fs_entry)) => {
let is_dir = fs_entry.is_dir(); let is_dir = fs_entry.is_dir();
let ignore_stack = snapshot.ignore_stack_for_path(&path, is_dir); let ignore_stack = snapshot.ignore_stack_for_path(&path, is_dir);
@ -1304,7 +1347,7 @@ impl BackgroundScanner {
for _ in 0..self.thread_pool.thread_count() { for _ in 0..self.thread_pool.thread_count() {
pool.execute(|| { pool.execute(|| {
while let Ok(job) = scan_queue_rx.recv() { while let Ok(job) = scan_queue_rx.recv() {
if let Err(err) = self.scan_dir(&job) { if let Err(err) = self.scan_dir(root_char_bag, &job) {
log::error!("error scanning {:?}: {}", job.abs_path, err); log::error!("error scanning {:?}: {}", job.abs_path, err);
} }
} }
@ -1401,8 +1444,13 @@ impl BackgroundScanner {
} }
self.snapshot.lock().entries.edit(edits, &()); self.snapshot.lock().entries.edit(edits, &());
} }
}
fn fs_entry_for_path(&self, path: Arc<Path>, abs_path: &Path) -> Result<Option<Entry>> { fn fs_entry_for_path(
root_char_bag: CharBag,
path: Arc<Path>,
abs_path: &Path,
) -> Result<Option<Entry>> {
let metadata = match fs::metadata(&abs_path) { let metadata = match fs::metadata(&abs_path) {
Err(err) => { Err(err) => {
return match (err.kind(), err.raw_os_error()) { return match (err.kind(), err.raw_os_error()) {
@ -1424,9 +1472,9 @@ impl BackgroundScanner {
kind: if metadata.file_type().is_dir() { kind: if metadata.file_type().is_dir() {
EntryKind::PendingDir EntryKind::PendingDir
} else { } else {
EntryKind::File(self.char_bag(&path)) EntryKind::File(char_bag_for_path(root_char_bag, &path))
}, },
path, path: Arc::from(path),
inode, inode,
mtime, mtime,
is_symlink, is_symlink,
@ -1434,17 +1482,16 @@ impl BackgroundScanner {
}; };
Ok(Some(entry)) Ok(Some(entry))
} }
fn char_bag(&self, path: &Path) -> CharBag { fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
let mut result = self.root_char_bag; let mut result = root_char_bag;
result.extend( result.extend(
path.to_string_lossy() path.to_string_lossy()
.chars() .chars()
.map(|c| c.to_ascii_lowercase()), .map(|c| c.to_ascii_lowercase()),
); );
result result
}
} }
struct ScanJob { struct ScanJob {
@ -1461,7 +1508,7 @@ struct UpdateIgnoreStatusJob {
} }
pub trait WorktreeHandle { pub trait WorktreeHandle {
fn file(&self, path: impl AsRef<Path>, cx: &mut MutableAppContext) -> ModelHandle<File>; fn file(&self, path: impl AsRef<Path>) -> File;
#[cfg(test)] #[cfg(test)]
fn flush_fs_events<'a>( fn flush_fs_events<'a>(
@ -1471,10 +1518,10 @@ pub trait WorktreeHandle {
} }
impl WorktreeHandle for ModelHandle<Worktree> { impl WorktreeHandle for ModelHandle<Worktree> {
fn file(&self, path: impl AsRef<Path>, cx: &mut MutableAppContext) -> ModelHandle<File> { fn file(&self, path: impl AsRef<Path>) -> File {
let path = Arc::from(path.as_ref()); let path = Arc::from(path.as_ref());
let handle = self.clone(); let handle = self.clone();
cx.add_model(|cx| File::new(handle, path, cx)) File::new(handle, path)
} }
// When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
@ -1681,14 +1728,20 @@ mod tests {
}); });
assert_eq!(path.file_name().unwrap(), "file1"); assert_eq!(path.file_name().unwrap(), "file1");
let buffer = cx.add_model(|cx| Buffer::new(1, "a line of text.\n".repeat(10 * 1024), cx));
tree.update(&mut cx, |tree, cx| { tree.update(&mut cx, |tree, cx| {
let buffer =
cx.add_model(|cx| Buffer::new(1, "a line of text.\n".repeat(10 * 1024), cx));
let text = buffer.read(cx).snapshot().text(); let text = buffer.read(cx).snapshot().text();
smol::block_on(tree.save(&path, text, cx.as_ref())).unwrap(); tree.save(&path, text, cx)
})
.await
.unwrap();
let new_contents = fs::read_to_string(dir.path().join(path)).unwrap(); let new_contents = fs::read_to_string(dir.path().join(path)).unwrap();
assert_eq!(new_contents, buffer.read(cx).text()); assert_eq!(
}); new_contents,
buffer.read_with(&cx, |buffer, _| buffer.text())
);
} }
#[gpui::test] #[gpui::test]
@ -1704,15 +1757,21 @@ mod tests {
cx.read(|cx| assert_eq!(tree.read(cx).file_count(), 1)); cx.read(|cx| assert_eq!(tree.read(cx).file_count(), 1));
let buffer = cx.add_model(|cx| Buffer::new(1, "a line of text.\n".repeat(10 * 1024), cx)); let buffer = cx.add_model(|cx| Buffer::new(1, "a line of text.\n".repeat(10 * 1024), cx));
let file = cx.update(|cx| tree.file("", cx)); let file = tree.file("");
file.read_with(&cx, |file, cx| { cx.update(|cx| {
assert_eq!(file.path().file_name(), None); assert_eq!(file.path().file_name(), None);
let text = buffer.read(cx).snapshot().text(); let text = buffer.read(cx).snapshot().text();
smol::block_on(file.save(text, cx.as_ref())).unwrap(); file.save(text, cx)
})
.await
.unwrap();
let new_contents = fs::read_to_string(file_path).unwrap(); let new_contents = fs::read_to_string(file_path).unwrap();
assert_eq!(new_contents, buffer.read(cx).text()); assert_eq!(
}); new_contents,
buffer.read_with(&cx, |buffer, _| buffer.text())
);
} }
#[gpui::test] #[gpui::test]
@ -1731,23 +1790,31 @@ mod tests {
} }
})); }));
let language_registry = Arc::new(LanguageRegistry::new());
let tree = cx.add_model(|cx| Worktree::local(dir.path(), cx)); let tree = cx.add_model(|cx| Worktree::local(dir.path(), cx));
let file2 = cx.update(|cx| tree.file("a/file2", cx));
let file3 = cx.update(|cx| tree.file("a/file3", cx)); let mut buffer_for_path = |path: &'static str| {
let file4 = cx.update(|cx| tree.file("b/c/file4", cx)); let buffer = tree.update(&mut cx, |tree, cx| {
let file5 = cx.update(|cx| tree.file("b/c/file5", cx)); tree.open_buffer(path, language_registry.clone(), cx)
let non_existent_file = cx.update(|cx| tree.file("a/file_x", cx)); });
async move { buffer.await.unwrap() }
};
let buffer2 = buffer_for_path("a/file2").await;
let buffer3 = buffer_for_path("a/file3").await;
let buffer4 = buffer_for_path("b/c/file4").await;
let buffer5 = buffer_for_path("b/c/file5").await;
// After scanning, the worktree knows which files exist and which don't. // After scanning, the worktree knows which files exist and which don't.
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await; .await;
cx.read(|cx| { cx.read(|cx| {
assert!(!file2.read(cx).is_deleted(cx)); assert!(!buffer2.read(cx).is_dirty(cx));
assert!(!file3.read(cx).is_deleted(cx)); assert!(!buffer3.read(cx).is_dirty(cx));
assert!(!file4.read(cx).is_deleted(cx)); assert!(!buffer4.read(cx).is_dirty(cx));
assert!(!file5.read(cx).is_deleted(cx)); assert!(!buffer5.read(cx).is_dirty(cx));
assert!(non_existent_file.read(cx).is_deleted(cx));
}); });
tree.flush_fs_events(&cx).await; tree.flush_fs_events(&cx).await;
@ -1774,14 +1841,27 @@ mod tests {
] ]
); );
assert_eq!(file2.read(cx).path().as_ref(), Path::new("a/file2.new")); assert_eq!(
assert_eq!(file3.read(cx).path().as_ref(), Path::new("d/file3")); buffer2.read(cx).file().unwrap().path().as_ref(),
assert_eq!(file4.read(cx).path().as_ref(), Path::new("d/file4")); Path::new("a/file2.new")
assert_eq!(file5.read(cx).path().as_ref(), Path::new("b/c/file5")); );
assert!(!file2.read(cx).is_deleted(cx)); assert_eq!(
assert!(!file3.read(cx).is_deleted(cx)); buffer3.read(cx).file().unwrap().path().as_ref(),
assert!(!file4.read(cx).is_deleted(cx)); Path::new("d/file3")
assert!(file5.read(cx).is_deleted(cx)); );
assert_eq!(
buffer4.read(cx).file().unwrap().path().as_ref(),
Path::new("d/file4")
);
assert_eq!(
buffer5.read(cx).file().unwrap().path().as_ref(),
Path::new("b/c/file5")
);
assert!(!buffer2.read(cx).file().unwrap().is_deleted(cx));
assert!(!buffer3.read(cx).file().unwrap().is_deleted(cx));
assert!(!buffer4.read(cx).file().unwrap().is_deleted(cx));
assert!(buffer5.read(cx).file().unwrap().is_deleted(cx));
}); });
} }
@ -1833,6 +1913,7 @@ mod tests {
entries: Default::default(), entries: Default::default(),
ignores: Default::default(), ignores: Default::default(),
root_name: Default::default(), root_name: Default::default(),
root_char_bag: Default::default(),
}; };
snapshot.entries.edit( snapshot.entries.edit(
@ -1991,6 +2072,7 @@ mod tests {
entries: Default::default(), entries: Default::default(),
ignores: Default::default(), ignores: Default::default(),
root_name: Default::default(), root_name: Default::default(),
root_char_bag: Default::default(),
})), })),
notify_tx, notify_tx,
0, 0,
@ -2025,6 +2107,7 @@ mod tests {
entries: Default::default(), entries: Default::default(),
ignores: Default::default(), ignores: Default::default(),
root_name: Default::default(), root_name: Default::default(),
root_char_bag: Default::default(),
})), })),
notify_tx, notify_tx,
1, 1,

View file

@ -619,6 +619,7 @@ mod tests {
ignores: Default::default(), ignores: Default::default(),
entries: Default::default(), entries: Default::default(),
root_name: Default::default(), root_name: Default::default(),
root_char_bag: Default::default(),
}, },
false, false,
path_entries.into_iter(), path_entries.into_iter(),