Port to gpui2
This commit is contained in:
parent
e5616bce98
commit
16b5d4b35c
17 changed files with 2585 additions and 355 deletions
|
@ -958,8 +958,6 @@ impl LocalWorktree {
|
|||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let text = fs.load(&abs_path).await?;
|
||||
let entry = entry.await?;
|
||||
|
||||
let mut index_task = None;
|
||||
let snapshot = this.update(&mut cx, |this, _| this.as_local().unwrap().snapshot())?;
|
||||
if let Some(repo) = snapshot.repository_for_path(&path) {
|
||||
|
@ -982,18 +980,43 @@ impl LocalWorktree {
|
|||
let worktree = this
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("worktree was dropped"))?;
|
||||
Ok((
|
||||
File {
|
||||
entry_id: entry.id,
|
||||
worktree,
|
||||
path: entry.path,
|
||||
mtime: entry.mtime,
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
},
|
||||
text,
|
||||
diff_base,
|
||||
))
|
||||
match entry.await? {
|
||||
Some(entry) => Ok((
|
||||
File {
|
||||
entry_id: Some(entry.id),
|
||||
worktree,
|
||||
path: entry.path,
|
||||
mtime: entry.mtime,
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
},
|
||||
text,
|
||||
diff_base,
|
||||
)),
|
||||
None => {
|
||||
let metadata = fs
|
||||
.metadata(&abs_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("Loading metadata for excluded file {abs_path:?}")
|
||||
})?
|
||||
.with_context(|| {
|
||||
format!("Excluded file {abs_path:?} got removed during loading")
|
||||
})?;
|
||||
Ok((
|
||||
File {
|
||||
entry_id: None,
|
||||
worktree,
|
||||
path,
|
||||
mtime: metadata.mtime,
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
},
|
||||
text,
|
||||
diff_base,
|
||||
))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -1013,18 +1036,38 @@ impl LocalWorktree {
|
|||
let text = buffer.as_rope().clone();
|
||||
let fingerprint = text.fingerprint();
|
||||
let version = buffer.version();
|
||||
let save = self.write_file(path, text, buffer.line_ending(), cx);
|
||||
let save = self.write_file(path.as_ref(), text, buffer.line_ending(), cx);
|
||||
let fs = Arc::clone(&self.fs);
|
||||
let abs_path = self.absolutize(&path);
|
||||
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
let entry = save.await?;
|
||||
let this = this.upgrade().context("worktree dropped")?;
|
||||
|
||||
let (entry_id, mtime, path) = match entry {
|
||||
Some(entry) => (Some(entry.id), entry.mtime, entry.path),
|
||||
None => {
|
||||
let metadata = fs
|
||||
.metadata(&abs_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Fetching metadata after saving the excluded buffer {abs_path:?}"
|
||||
)
|
||||
})?
|
||||
.with_context(|| {
|
||||
format!("Excluded buffer {path:?} got removed during saving")
|
||||
})?;
|
||||
(None, metadata.mtime, path)
|
||||
}
|
||||
};
|
||||
|
||||
if has_changed_file {
|
||||
let new_file = Arc::new(File {
|
||||
entry_id: entry.id,
|
||||
entry_id,
|
||||
worktree: this,
|
||||
path: entry.path,
|
||||
mtime: entry.mtime,
|
||||
path,
|
||||
mtime,
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
});
|
||||
|
@ -1050,13 +1093,13 @@ impl LocalWorktree {
|
|||
project_id,
|
||||
buffer_id,
|
||||
version: serialize_version(&version),
|
||||
mtime: Some(entry.mtime.into()),
|
||||
mtime: Some(mtime.into()),
|
||||
fingerprint: serialize_fingerprint(fingerprint),
|
||||
})?;
|
||||
}
|
||||
|
||||
buffer_handle.update(&mut cx, |buffer, cx| {
|
||||
buffer.did_save(version.clone(), fingerprint, entry.mtime, cx);
|
||||
buffer.did_save(version.clone(), fingerprint, mtime, cx);
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
|
@ -1081,7 +1124,7 @@ impl LocalWorktree {
|
|||
path: impl Into<Arc<Path>>,
|
||||
is_dir: bool,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Task<Result<Entry>> {
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let path = path.into();
|
||||
let lowest_ancestor = self.lowest_ancestor(&path);
|
||||
let abs_path = self.absolutize(&path);
|
||||
|
@ -1098,7 +1141,7 @@ impl LocalWorktree {
|
|||
cx.spawn(|this, mut cx| async move {
|
||||
write.await?;
|
||||
let (result, refreshes) = this.update(&mut cx, |this, cx| {
|
||||
let mut refreshes = Vec::<Task<anyhow::Result<Entry>>>::new();
|
||||
let mut refreshes = Vec::new();
|
||||
let refresh_paths = path.strip_prefix(&lowest_ancestor).unwrap();
|
||||
for refresh_path in refresh_paths.ancestors() {
|
||||
if refresh_path == Path::new("") {
|
||||
|
@ -1125,14 +1168,14 @@ impl LocalWorktree {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn write_file(
|
||||
pub(crate) fn write_file(
|
||||
&self,
|
||||
path: impl Into<Arc<Path>>,
|
||||
text: Rope,
|
||||
line_ending: LineEnding,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Task<Result<Entry>> {
|
||||
let path = path.into();
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let path: Arc<Path> = path.into();
|
||||
let abs_path = self.absolutize(&path);
|
||||
let fs = self.fs.clone();
|
||||
let write = cx
|
||||
|
@ -1191,8 +1234,11 @@ impl LocalWorktree {
|
|||
entry_id: ProjectEntryId,
|
||||
new_path: impl Into<Arc<Path>>,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Option<Task<Result<Entry>>> {
|
||||
let old_path = self.entry_for_id(entry_id)?.path.clone();
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let old_path = match self.entry_for_id(entry_id) {
|
||||
Some(entry) => entry.path.clone(),
|
||||
None => return Task::ready(Ok(None)),
|
||||
};
|
||||
let new_path = new_path.into();
|
||||
let abs_old_path = self.absolutize(&old_path);
|
||||
let abs_new_path = self.absolutize(&new_path);
|
||||
|
@ -1202,7 +1248,7 @@ impl LocalWorktree {
|
|||
.await
|
||||
});
|
||||
|
||||
Some(cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
rename.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.as_local_mut()
|
||||
|
@ -1210,7 +1256,7 @@ impl LocalWorktree {
|
|||
.refresh_entry(new_path.clone(), Some(old_path), cx)
|
||||
})?
|
||||
.await
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn copy_entry(
|
||||
|
@ -1218,8 +1264,11 @@ impl LocalWorktree {
|
|||
entry_id: ProjectEntryId,
|
||||
new_path: impl Into<Arc<Path>>,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Option<Task<Result<Entry>>> {
|
||||
let old_path = self.entry_for_id(entry_id)?.path.clone();
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let old_path = match self.entry_for_id(entry_id) {
|
||||
Some(entry) => entry.path.clone(),
|
||||
None => return Task::ready(Ok(None)),
|
||||
};
|
||||
let new_path = new_path.into();
|
||||
let abs_old_path = self.absolutize(&old_path);
|
||||
let abs_new_path = self.absolutize(&new_path);
|
||||
|
@ -1234,7 +1283,7 @@ impl LocalWorktree {
|
|||
.await
|
||||
});
|
||||
|
||||
Some(cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
copy.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.as_local_mut()
|
||||
|
@ -1242,7 +1291,7 @@ impl LocalWorktree {
|
|||
.refresh_entry(new_path.clone(), None, cx)
|
||||
})?
|
||||
.await
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn expand_entry(
|
||||
|
@ -1278,7 +1327,10 @@ impl LocalWorktree {
|
|||
path: Arc<Path>,
|
||||
old_path: Option<Arc<Path>>,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Task<Result<Entry>> {
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
if self.is_path_excluded(path.to_path_buf()) {
|
||||
return Task::ready(Ok(None));
|
||||
}
|
||||
let paths = if let Some(old_path) = old_path.as_ref() {
|
||||
vec![old_path.clone(), path.clone()]
|
||||
} else {
|
||||
|
@ -1287,11 +1339,12 @@ impl LocalWorktree {
|
|||
let mut refresh = self.refresh_entries_for_paths(paths);
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
refresh.recv().await;
|
||||
this.update(&mut cx, |this, _| {
|
||||
let new_entry = this.update(&mut cx, |this, _| {
|
||||
this.entry_for_path(path)
|
||||
.cloned()
|
||||
.ok_or_else(|| anyhow!("failed to read path after update"))
|
||||
})?
|
||||
})??;
|
||||
Ok(Some(new_entry))
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -2222,10 +2275,19 @@ impl LocalSnapshot {
|
|||
paths
|
||||
}
|
||||
|
||||
pub fn is_path_excluded(&self, abs_path: &Path) -> bool {
|
||||
self.file_scan_exclusions
|
||||
.iter()
|
||||
.any(|exclude_matcher| exclude_matcher.is_match(abs_path))
|
||||
pub fn is_path_excluded(&self, mut path: PathBuf) -> bool {
|
||||
loop {
|
||||
if self
|
||||
.file_scan_exclusions
|
||||
.iter()
|
||||
.any(|exclude_matcher| exclude_matcher.is_match(&path))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
if !path.pop() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2455,8 +2517,7 @@ impl BackgroundScannerState {
|
|||
ids_to_preserve.insert(work_directory_id);
|
||||
} else {
|
||||
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
|
||||
let git_dir_excluded = snapshot.is_path_excluded(&entry.git_dir_path)
|
||||
|| snapshot.is_path_excluded(&git_dir_abs_path);
|
||||
let git_dir_excluded = snapshot.is_path_excluded(entry.git_dir_path.to_path_buf());
|
||||
if git_dir_excluded
|
||||
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
|
||||
{
|
||||
|
@ -2663,7 +2724,7 @@ pub struct File {
|
|||
pub worktree: Model<Worktree>,
|
||||
pub path: Arc<Path>,
|
||||
pub mtime: SystemTime,
|
||||
pub(crate) entry_id: ProjectEntryId,
|
||||
pub(crate) entry_id: Option<ProjectEntryId>,
|
||||
pub(crate) is_local: bool,
|
||||
pub(crate) is_deleted: bool,
|
||||
}
|
||||
|
@ -2732,7 +2793,7 @@ impl language::File for File {
|
|||
fn to_proto(&self) -> rpc::proto::File {
|
||||
rpc::proto::File {
|
||||
worktree_id: self.worktree.entity_id().as_u64(),
|
||||
entry_id: self.entry_id.to_proto(),
|
||||
entry_id: self.entry_id.map(|id| id.to_proto()),
|
||||
path: self.path.to_string_lossy().into(),
|
||||
mtime: Some(self.mtime.into()),
|
||||
is_deleted: self.is_deleted,
|
||||
|
@ -2790,7 +2851,7 @@ impl File {
|
|||
worktree,
|
||||
path: entry.path.clone(),
|
||||
mtime: entry.mtime,
|
||||
entry_id: entry.id,
|
||||
entry_id: Some(entry.id),
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
})
|
||||
|
@ -2815,7 +2876,7 @@ impl File {
|
|||
worktree,
|
||||
path: Path::new(&proto.path).into(),
|
||||
mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
|
||||
entry_id: ProjectEntryId::from_proto(proto.entry_id),
|
||||
entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
|
||||
is_local: false,
|
||||
is_deleted: proto.is_deleted,
|
||||
})
|
||||
|
@ -2833,7 +2894,7 @@ impl File {
|
|||
if self.is_deleted {
|
||||
None
|
||||
} else {
|
||||
Some(self.entry_id)
|
||||
self.entry_id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3329,16 +3390,7 @@ impl BackgroundScanner {
|
|||
return false;
|
||||
}
|
||||
|
||||
// FS events may come for files which parent directory is excluded, need to check ignore those.
|
||||
let mut path_to_test = abs_path.clone();
|
||||
let mut excluded_file_event = snapshot.is_path_excluded(abs_path)
|
||||
|| snapshot.is_path_excluded(&relative_path);
|
||||
while !excluded_file_event && path_to_test.pop() {
|
||||
if snapshot.is_path_excluded(&path_to_test) {
|
||||
excluded_file_event = true;
|
||||
}
|
||||
}
|
||||
if excluded_file_event {
|
||||
if snapshot.is_path_excluded(relative_path.to_path_buf()) {
|
||||
if !is_git_related {
|
||||
log::debug!("ignoring FS event for excluded path {relative_path:?}");
|
||||
}
|
||||
|
@ -3522,7 +3574,7 @@ impl BackgroundScanner {
|
|||
let state = self.state.lock();
|
||||
let snapshot = &state.snapshot;
|
||||
root_abs_path = snapshot.abs_path().clone();
|
||||
if snapshot.is_path_excluded(&job.abs_path) {
|
||||
if snapshot.is_path_excluded(job.path.to_path_buf()) {
|
||||
log::error!("skipping excluded directory {:?}", job.path);
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -3593,9 +3645,9 @@ impl BackgroundScanner {
|
|||
}
|
||||
|
||||
{
|
||||
let relative_path = job.path.join(child_name);
|
||||
let mut state = self.state.lock();
|
||||
if state.snapshot.is_path_excluded(&child_abs_path) {
|
||||
let relative_path = job.path.join(child_name);
|
||||
if state.snapshot.is_path_excluded(relative_path.clone()) {
|
||||
log::debug!("skipping excluded child entry {relative_path:?}");
|
||||
state.remove_path(&relative_path);
|
||||
continue;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue