Merge pull request #1386 from zed-industries/symlink-cycles
Detect cycles when scanning a directory
This commit is contained in:
commit
59366a5c44
4 changed files with 698 additions and 321 deletions
|
@ -1,6 +1,6 @@
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use fsevent::EventStream;
|
use fsevent::EventStream;
|
||||||
use futures::{Stream, StreamExt};
|
use futures::{future::BoxFuture, Stream, StreamExt};
|
||||||
use language::LineEnding;
|
use language::LineEnding;
|
||||||
use smol::io::{AsyncReadExt, AsyncWriteExt};
|
use smol::io::{AsyncReadExt, AsyncWriteExt};
|
||||||
use std::{
|
use std::{
|
||||||
|
@ -12,11 +12,18 @@ use std::{
|
||||||
};
|
};
|
||||||
use text::Rope;
|
use text::Rope;
|
||||||
|
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
use collections::{btree_map, BTreeMap};
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
use futures::lock::Mutex;
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
use std::sync::{Arc, Weak};
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
#[async_trait::async_trait]
|
||||||
pub trait Fs: Send + Sync {
|
pub trait Fs: Send + Sync {
|
||||||
async fn create_dir(&self, path: &Path) -> Result<()>;
|
async fn create_dir(&self, path: &Path) -> Result<()>;
|
||||||
async fn create_file(&self, path: &Path, options: CreateOptions) -> Result<()>;
|
async fn create_file(&self, path: &Path, options: CreateOptions) -> Result<()>;
|
||||||
async fn copy(&self, source: &Path, target: &Path, options: CopyOptions) -> Result<()>;
|
async fn copy_file(&self, source: &Path, target: &Path, options: CopyOptions) -> Result<()>;
|
||||||
async fn rename(&self, source: &Path, target: &Path, options: RenameOptions) -> Result<()>;
|
async fn rename(&self, source: &Path, target: &Path, options: RenameOptions) -> Result<()>;
|
||||||
async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()>;
|
async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()>;
|
||||||
async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()>;
|
async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()>;
|
||||||
|
@ -92,7 +99,7 @@ impl Fs for RealFs {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn copy(&self, source: &Path, target: &Path, options: CopyOptions) -> Result<()> {
|
async fn copy_file(&self, source: &Path, target: &Path, options: CopyOptions) -> Result<()> {
|
||||||
if !options.overwrite && smol::fs::metadata(target).await.is_ok() {
|
if !options.overwrite && smol::fs::metadata(target).await.is_ok() {
|
||||||
if options.ignore_if_exists {
|
if options.ignore_if_exists {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
|
@ -101,23 +108,7 @@ impl Fs for RealFs {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let metadata = smol::fs::metadata(source).await?;
|
|
||||||
let _ = smol::fs::remove_dir_all(target).await;
|
|
||||||
if metadata.is_dir() {
|
|
||||||
self.create_dir(target).await?;
|
|
||||||
let mut children = smol::fs::read_dir(source).await?;
|
|
||||||
while let Some(child) = children.next().await {
|
|
||||||
if let Ok(child) = child {
|
|
||||||
let child_source_path = child.path();
|
|
||||||
let child_target_path = target.join(child.file_name());
|
|
||||||
self.copy(&child_source_path, &child_target_path, options)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
smol::fs::copy(source, target).await?;
|
smol::fs::copy(source, target).await?;
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -252,35 +243,115 @@ impl Fs for RealFs {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
#[derive(Clone, Debug)]
|
pub struct FakeFs {
|
||||||
struct FakeFsEntry {
|
// Use an unfair lock to ensure tests are deterministic.
|
||||||
metadata: Metadata,
|
state: Mutex<FakeFsState>,
|
||||||
content: Option<String>,
|
executor: Weak<gpui::executor::Background>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
struct FakeFsState {
|
struct FakeFsState {
|
||||||
entries: std::collections::BTreeMap<PathBuf, FakeFsEntry>,
|
root: Arc<Mutex<FakeFsEntry>>,
|
||||||
next_inode: u64,
|
next_inode: u64,
|
||||||
event_txs: Vec<smol::channel::Sender<Vec<fsevent::Event>>>,
|
event_txs: Vec<smol::channel::Sender<Vec<fsevent::Event>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum FakeFsEntry {
|
||||||
|
File {
|
||||||
|
inode: u64,
|
||||||
|
mtime: SystemTime,
|
||||||
|
content: String,
|
||||||
|
},
|
||||||
|
Dir {
|
||||||
|
inode: u64,
|
||||||
|
mtime: SystemTime,
|
||||||
|
entries: BTreeMap<String, Arc<Mutex<FakeFsEntry>>>,
|
||||||
|
},
|
||||||
|
Symlink {
|
||||||
|
target: PathBuf,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
impl FakeFsState {
|
impl FakeFsState {
|
||||||
fn validate_path(&self, path: &Path) -> Result<()> {
|
async fn read_path<'a>(&'a self, target: &Path) -> Result<Arc<Mutex<FakeFsEntry>>> {
|
||||||
if path.is_absolute()
|
Ok(self
|
||||||
&& path
|
.try_read_path(target)
|
||||||
.parent()
|
.await
|
||||||
.and_then(|path| self.entries.get(path))
|
.ok_or_else(|| anyhow!("path does not exist: {}", target.display()))?
|
||||||
.map_or(false, |e| e.metadata.is_dir)
|
.0)
|
||||||
{
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(anyhow!("invalid path {:?}", path))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn emit_event<I, T>(&mut self, paths: I)
|
async fn try_read_path<'a>(
|
||||||
|
&'a self,
|
||||||
|
target: &Path,
|
||||||
|
) -> Option<(Arc<Mutex<FakeFsEntry>>, PathBuf)> {
|
||||||
|
let mut path = target.to_path_buf();
|
||||||
|
let mut real_path = PathBuf::new();
|
||||||
|
let mut entry_stack = Vec::new();
|
||||||
|
'outer: loop {
|
||||||
|
let mut path_components = path.components().collect::<collections::VecDeque<_>>();
|
||||||
|
while let Some(component) = path_components.pop_front() {
|
||||||
|
match component {
|
||||||
|
Component::Prefix(_) => panic!("prefix paths aren't supported"),
|
||||||
|
Component::RootDir => {
|
||||||
|
entry_stack.clear();
|
||||||
|
entry_stack.push(self.root.clone());
|
||||||
|
real_path.clear();
|
||||||
|
real_path.push("/");
|
||||||
|
}
|
||||||
|
Component::CurDir => {}
|
||||||
|
Component::ParentDir => {
|
||||||
|
entry_stack.pop()?;
|
||||||
|
real_path.pop();
|
||||||
|
}
|
||||||
|
Component::Normal(name) => {
|
||||||
|
let current_entry = entry_stack.last().cloned()?;
|
||||||
|
let current_entry = current_entry.lock().await;
|
||||||
|
if let FakeFsEntry::Dir { entries, .. } = &*current_entry {
|
||||||
|
let entry = entries.get(name.to_str().unwrap()).cloned()?;
|
||||||
|
let _entry = entry.lock().await;
|
||||||
|
if let FakeFsEntry::Symlink { target, .. } = &*_entry {
|
||||||
|
let mut target = target.clone();
|
||||||
|
target.extend(path_components);
|
||||||
|
path = target;
|
||||||
|
continue 'outer;
|
||||||
|
} else {
|
||||||
|
entry_stack.push(entry.clone());
|
||||||
|
real_path.push(name);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
entry_stack.pop().map(|entry| (entry, real_path))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn write_path<Fn, T>(&self, path: &Path, callback: Fn) -> Result<T>
|
||||||
|
where
|
||||||
|
Fn: FnOnce(btree_map::Entry<String, Arc<Mutex<FakeFsEntry>>>) -> Result<T>,
|
||||||
|
{
|
||||||
|
let path = normalize_path(path);
|
||||||
|
let filename = path
|
||||||
|
.file_name()
|
||||||
|
.ok_or_else(|| anyhow!("cannot overwrite the root"))?;
|
||||||
|
let parent_path = path.parent().unwrap();
|
||||||
|
|
||||||
|
let parent = self.read_path(parent_path).await?;
|
||||||
|
let mut parent = parent.lock().await;
|
||||||
|
let new_entry = parent
|
||||||
|
.dir_entries(parent_path)?
|
||||||
|
.entry(filename.to_str().unwrap().into());
|
||||||
|
callback(new_entry)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn emit_event<I, T>(&mut self, paths: I)
|
||||||
where
|
where
|
||||||
I: IntoIterator<Item = T>,
|
I: IntoIterator<Item = T>,
|
||||||
T: Into<PathBuf>,
|
T: Into<PathBuf>,
|
||||||
|
@ -301,33 +372,17 @@ impl FakeFsState {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
|
||||||
pub struct FakeFs {
|
|
||||||
// Use an unfair lock to ensure tests are deterministic.
|
|
||||||
state: futures::lock::Mutex<FakeFsState>,
|
|
||||||
executor: std::sync::Weak<gpui::executor::Background>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
impl FakeFs {
|
impl FakeFs {
|
||||||
pub fn new(executor: std::sync::Arc<gpui::executor::Background>) -> std::sync::Arc<Self> {
|
pub fn new(executor: Arc<gpui::executor::Background>) -> Arc<Self> {
|
||||||
let mut entries = std::collections::BTreeMap::new();
|
Arc::new(Self {
|
||||||
entries.insert(
|
executor: Arc::downgrade(&executor),
|
||||||
Path::new("/").to_path_buf(),
|
state: Mutex::new(FakeFsState {
|
||||||
FakeFsEntry {
|
root: Arc::new(Mutex::new(FakeFsEntry::Dir {
|
||||||
metadata: Metadata {
|
|
||||||
inode: 0,
|
inode: 0,
|
||||||
mtime: SystemTime::now(),
|
mtime: SystemTime::now(),
|
||||||
is_dir: true,
|
entries: Default::default(),
|
||||||
is_symlink: false,
|
})),
|
||||||
},
|
|
||||||
content: None,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
std::sync::Arc::new(Self {
|
|
||||||
executor: std::sync::Arc::downgrade(&executor),
|
|
||||||
state: futures::lock::Mutex::new(FakeFsState {
|
|
||||||
entries,
|
|
||||||
next_inode: 1,
|
next_inode: 1,
|
||||||
event_txs: Default::default(),
|
event_txs: Default::default(),
|
||||||
}),
|
}),
|
||||||
|
@ -337,23 +392,48 @@ impl FakeFs {
|
||||||
pub async fn insert_file(&self, path: impl AsRef<Path>, content: String) {
|
pub async fn insert_file(&self, path: impl AsRef<Path>, content: String) {
|
||||||
let mut state = self.state.lock().await;
|
let mut state = self.state.lock().await;
|
||||||
let path = path.as_ref();
|
let path = path.as_ref();
|
||||||
state.validate_path(path).unwrap();
|
|
||||||
|
|
||||||
let inode = state.next_inode;
|
let inode = state.next_inode;
|
||||||
state.next_inode += 1;
|
state.next_inode += 1;
|
||||||
state.entries.insert(
|
let file = Arc::new(Mutex::new(FakeFsEntry::File {
|
||||||
path.to_path_buf(),
|
|
||||||
FakeFsEntry {
|
|
||||||
metadata: Metadata {
|
|
||||||
inode,
|
inode,
|
||||||
mtime: SystemTime::now(),
|
mtime: SystemTime::now(),
|
||||||
is_dir: false,
|
content,
|
||||||
is_symlink: false,
|
}));
|
||||||
},
|
state
|
||||||
content: Some(content),
|
.write_path(path, move |entry| {
|
||||||
},
|
match entry {
|
||||||
);
|
btree_map::Entry::Vacant(e) => {
|
||||||
state.emit_event(&[path]).await;
|
e.insert(file);
|
||||||
|
}
|
||||||
|
btree_map::Entry::Occupied(mut e) => {
|
||||||
|
*e.get_mut() = file;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
state.emit_event(&[path]);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn insert_symlink(&self, path: impl AsRef<Path>, target: PathBuf) {
|
||||||
|
let mut state = self.state.lock().await;
|
||||||
|
let path = path.as_ref();
|
||||||
|
let file = Arc::new(Mutex::new(FakeFsEntry::Symlink { target }));
|
||||||
|
state
|
||||||
|
.write_path(path.as_ref(), move |e| match e {
|
||||||
|
btree_map::Entry::Vacant(e) => {
|
||||||
|
e.insert(file);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
btree_map::Entry::Occupied(mut e) => {
|
||||||
|
*e.get_mut() = file;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
state.emit_event(&[path]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
|
@ -392,13 +472,22 @@ impl FakeFs {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn files(&self) -> Vec<PathBuf> {
|
pub async fn files(&self) -> Vec<PathBuf> {
|
||||||
self.state
|
let mut result = Vec::new();
|
||||||
.lock()
|
let mut queue = collections::VecDeque::new();
|
||||||
.await
|
queue.push_back((PathBuf::from("/"), self.state.lock().await.root.clone()));
|
||||||
.entries
|
while let Some((path, entry)) = queue.pop_front() {
|
||||||
.iter()
|
let e = entry.lock().await;
|
||||||
.filter_map(|(path, entry)| entry.content.as_ref().map(|_| path.clone()))
|
match &*e {
|
||||||
.collect()
|
FakeFsEntry::File { .. } => result.push(path),
|
||||||
|
FakeFsEntry::Dir { entries, .. } => {
|
||||||
|
for (name, entry) in entries {
|
||||||
|
queue.push_back((path.join(name), entry.clone()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FakeFsEntry::Symlink { .. } => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn simulate_random_delay(&self) {
|
async fn simulate_random_delay(&self) {
|
||||||
|
@ -410,182 +499,207 @@ impl FakeFs {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
impl FakeFsEntry {
|
||||||
|
fn is_file(&self) -> bool {
|
||||||
|
matches!(self, Self::File { .. })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn file_content(&self, path: &Path) -> Result<&String> {
|
||||||
|
if let Self::File { content, .. } = self {
|
||||||
|
Ok(content)
|
||||||
|
} else {
|
||||||
|
Err(anyhow!("not a file: {}", path.display()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_file_content(&mut self, path: &Path, new_content: String) -> Result<()> {
|
||||||
|
if let Self::File { content, mtime, .. } = self {
|
||||||
|
*mtime = SystemTime::now();
|
||||||
|
*content = new_content;
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(anyhow!("not a file: {}", path.display()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dir_entries(
|
||||||
|
&mut self,
|
||||||
|
path: &Path,
|
||||||
|
) -> Result<&mut BTreeMap<String, Arc<Mutex<FakeFsEntry>>>> {
|
||||||
|
if let Self::Dir { entries, .. } = self {
|
||||||
|
Ok(entries)
|
||||||
|
} else {
|
||||||
|
Err(anyhow!("not a directory: {}", path.display()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
#[async_trait::async_trait]
|
#[async_trait::async_trait]
|
||||||
impl Fs for FakeFs {
|
impl Fs for FakeFs {
|
||||||
async fn create_dir(&self, path: &Path) -> Result<()> {
|
async fn create_dir(&self, path: &Path) -> Result<()> {
|
||||||
self.simulate_random_delay().await;
|
self.simulate_random_delay().await;
|
||||||
let state = &mut *self.state.lock().await;
|
let mut state = self.state.lock().await;
|
||||||
let path = normalize_path(path);
|
|
||||||
let mut ancestor_path = PathBuf::new();
|
let mut created_dirs = Vec::new();
|
||||||
let mut created_dir_paths = Vec::new();
|
let mut cur_path = PathBuf::new();
|
||||||
for component in path.components() {
|
for component in path.components() {
|
||||||
ancestor_path.push(component);
|
cur_path.push(component);
|
||||||
let entry = state
|
if cur_path == Path::new("/") {
|
||||||
.entries
|
continue;
|
||||||
.entry(ancestor_path.clone())
|
}
|
||||||
.or_insert_with(|| {
|
|
||||||
let inode = state.next_inode;
|
let inode = state.next_inode;
|
||||||
state.next_inode += 1;
|
state.next_inode += 1;
|
||||||
created_dir_paths.push(ancestor_path.clone());
|
state
|
||||||
FakeFsEntry {
|
.write_path(&cur_path, |entry| {
|
||||||
metadata: Metadata {
|
entry.or_insert_with(|| {
|
||||||
|
created_dirs.push(cur_path.clone());
|
||||||
|
Arc::new(Mutex::new(FakeFsEntry::Dir {
|
||||||
inode,
|
inode,
|
||||||
mtime: SystemTime::now(),
|
mtime: SystemTime::now(),
|
||||||
is_dir: true,
|
entries: Default::default(),
|
||||||
is_symlink: false,
|
}))
|
||||||
},
|
|
||||||
content: None,
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
if !entry.metadata.is_dir {
|
Ok(())
|
||||||
return Err(anyhow!(
|
})
|
||||||
"cannot create directory because {:?} is a file",
|
.await?;
|
||||||
ancestor_path
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
state.emit_event(&created_dir_paths).await;
|
|
||||||
|
|
||||||
|
state.emit_event(&created_dirs);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn create_file(&self, path: &Path, options: CreateOptions) -> Result<()> {
|
async fn create_file(&self, path: &Path, options: CreateOptions) -> Result<()> {
|
||||||
self.simulate_random_delay().await;
|
self.simulate_random_delay().await;
|
||||||
let mut state = self.state.lock().await;
|
let mut state = self.state.lock().await;
|
||||||
let path = normalize_path(path);
|
|
||||||
state.validate_path(&path)?;
|
|
||||||
if let Some(entry) = state.entries.get_mut(&path) {
|
|
||||||
if entry.metadata.is_dir || entry.metadata.is_symlink {
|
|
||||||
return Err(anyhow!(
|
|
||||||
"cannot create file because {:?} is a dir or a symlink",
|
|
||||||
path
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if options.overwrite {
|
|
||||||
entry.metadata.mtime = SystemTime::now();
|
|
||||||
entry.content = Some(Default::default());
|
|
||||||
} else if !options.ignore_if_exists {
|
|
||||||
return Err(anyhow!(
|
|
||||||
"cannot create file because {:?} already exists",
|
|
||||||
&path
|
|
||||||
));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let inode = state.next_inode;
|
let inode = state.next_inode;
|
||||||
state.next_inode += 1;
|
state.next_inode += 1;
|
||||||
let entry = FakeFsEntry {
|
let file = Arc::new(Mutex::new(FakeFsEntry::File {
|
||||||
metadata: Metadata {
|
|
||||||
inode,
|
inode,
|
||||||
mtime: SystemTime::now(),
|
mtime: SystemTime::now(),
|
||||||
is_dir: false,
|
content: String::new(),
|
||||||
is_symlink: false,
|
}));
|
||||||
},
|
state
|
||||||
content: Some(Default::default()),
|
.write_path(path, |entry| {
|
||||||
};
|
match entry {
|
||||||
state.entries.insert(path.to_path_buf(), entry);
|
btree_map::Entry::Occupied(mut e) => {
|
||||||
|
if options.overwrite {
|
||||||
|
*e.get_mut() = file;
|
||||||
|
} else if !options.ignore_if_exists {
|
||||||
|
return Err(anyhow!("path already exists: {}", path.display()));
|
||||||
}
|
}
|
||||||
state.emit_event(&[path]).await;
|
}
|
||||||
|
btree_map::Entry::Vacant(e) => {
|
||||||
|
e.insert(file);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
state.emit_event(&[path]);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn rename(&self, source: &Path, target: &Path, options: RenameOptions) -> Result<()> {
|
async fn rename(&self, old_path: &Path, new_path: &Path, options: RenameOptions) -> Result<()> {
|
||||||
|
let old_path = normalize_path(old_path);
|
||||||
|
let new_path = normalize_path(new_path);
|
||||||
|
let mut state = self.state.lock().await;
|
||||||
|
let moved_entry = state
|
||||||
|
.write_path(&old_path, |e| {
|
||||||
|
if let btree_map::Entry::Occupied(e) = e {
|
||||||
|
Ok(e.remove())
|
||||||
|
} else {
|
||||||
|
Err(anyhow!("path does not exist: {}", &old_path.display()))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
state
|
||||||
|
.write_path(&new_path, |e| {
|
||||||
|
match e {
|
||||||
|
btree_map::Entry::Occupied(mut e) => {
|
||||||
|
if options.overwrite {
|
||||||
|
*e.get_mut() = moved_entry;
|
||||||
|
} else if !options.ignore_if_exists {
|
||||||
|
return Err(anyhow!("path already exists: {}", new_path.display()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
btree_map::Entry::Vacant(e) => {
|
||||||
|
e.insert(moved_entry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
state.emit_event(&[old_path, new_path]);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn copy_file(&self, source: &Path, target: &Path, options: CopyOptions) -> Result<()> {
|
||||||
let source = normalize_path(source);
|
let source = normalize_path(source);
|
||||||
let target = normalize_path(target);
|
let target = normalize_path(target);
|
||||||
|
|
||||||
let mut state = self.state.lock().await;
|
let mut state = self.state.lock().await;
|
||||||
state.validate_path(&source)?;
|
let source_entry = state.read_path(&source).await?;
|
||||||
state.validate_path(&target)?;
|
let content = source_entry.lock().await.file_content(&source)?.clone();
|
||||||
|
let entry = state
|
||||||
if !options.overwrite && state.entries.contains_key(&target) {
|
.write_path(&target, |e| match e {
|
||||||
if options.ignore_if_exists {
|
btree_map::Entry::Occupied(e) => {
|
||||||
return Ok(());
|
if options.overwrite {
|
||||||
} else {
|
Ok(Some(e.get().clone()))
|
||||||
|
} else if !options.ignore_if_exists {
|
||||||
return Err(anyhow!("{target:?} already exists"));
|
return Err(anyhow!("{target:?} already exists"));
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut removed = Vec::new();
|
|
||||||
state.entries.retain(|path, entry| {
|
|
||||||
if let Ok(relative_path) = path.strip_prefix(&source) {
|
|
||||||
removed.push((relative_path.to_path_buf(), entry.clone()));
|
|
||||||
false
|
|
||||||
} else {
|
} else {
|
||||||
true
|
Ok(None)
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
for (relative_path, entry) in removed {
|
|
||||||
let new_path = normalize_path(&target.join(relative_path));
|
|
||||||
state.entries.insert(new_path, entry);
|
|
||||||
}
|
}
|
||||||
|
btree_map::Entry::Vacant(e) => Ok(Some(
|
||||||
state.emit_event(&[source, target]).await;
|
e.insert(Arc::new(Mutex::new(FakeFsEntry::File {
|
||||||
|
inode: 0,
|
||||||
|
mtime: SystemTime::now(),
|
||||||
|
content: String::new(),
|
||||||
|
})))
|
||||||
|
.clone(),
|
||||||
|
)),
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
if let Some(entry) = entry {
|
||||||
|
entry.lock().await.set_file_content(&target, content)?;
|
||||||
|
}
|
||||||
|
state.emit_event(&[target]);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn copy(&self, source: &Path, target: &Path, options: CopyOptions) -> Result<()> {
|
async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> {
|
||||||
let source = normalize_path(source);
|
let path = normalize_path(path);
|
||||||
let target = normalize_path(target);
|
let parent_path = path
|
||||||
|
.parent()
|
||||||
|
.ok_or_else(|| anyhow!("cannot remove the root"))?;
|
||||||
|
let base_name = path.file_name().unwrap();
|
||||||
|
|
||||||
let mut state = self.state.lock().await;
|
let state = self.state.lock().await;
|
||||||
state.validate_path(&source)?;
|
let parent_entry = state.read_path(parent_path).await?;
|
||||||
state.validate_path(&target)?;
|
let mut parent_entry = parent_entry.lock().await;
|
||||||
|
let entry = parent_entry
|
||||||
|
.dir_entries(parent_path)?
|
||||||
|
.entry(base_name.to_str().unwrap().into());
|
||||||
|
|
||||||
if !options.overwrite && state.entries.contains_key(&target) {
|
match entry {
|
||||||
if options.ignore_if_exists {
|
btree_map::Entry::Vacant(_) => {
|
||||||
return Ok(());
|
if !options.ignore_if_not_exists {
|
||||||
} else {
|
return Err(anyhow!("{path:?} does not exist"));
|
||||||
return Err(anyhow!("{target:?} already exists"));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
btree_map::Entry::Occupied(e) => {
|
||||||
let mut new_entries = Vec::new();
|
{
|
||||||
for (path, entry) in &state.entries {
|
let mut entry = e.get().lock().await;
|
||||||
if let Ok(relative_path) = path.strip_prefix(&source) {
|
let children = entry.dir_entries(&path)?;
|
||||||
new_entries.push((relative_path.to_path_buf(), entry.clone()));
|
if !options.recursive && !children.is_empty() {
|
||||||
|
return Err(anyhow!("{path:?} is not empty"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
e.remove();
|
||||||
let mut events = Vec::new();
|
|
||||||
for (relative_path, entry) in new_entries {
|
|
||||||
let new_path = normalize_path(&target.join(relative_path));
|
|
||||||
events.push(new_path.clone());
|
|
||||||
state.entries.insert(new_path, entry);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
state.emit_event(&events).await;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn remove_dir(&self, dir_path: &Path, options: RemoveOptions) -> Result<()> {
|
|
||||||
let dir_path = normalize_path(dir_path);
|
|
||||||
let mut state = self.state.lock().await;
|
|
||||||
state.validate_path(&dir_path)?;
|
|
||||||
if let Some(entry) = state.entries.get(&dir_path) {
|
|
||||||
if !entry.metadata.is_dir {
|
|
||||||
return Err(anyhow!(
|
|
||||||
"cannot remove {dir_path:?} because it is not a dir"
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if !options.recursive {
|
|
||||||
let descendants = state
|
|
||||||
.entries
|
|
||||||
.keys()
|
|
||||||
.filter(|path| path.starts_with(path))
|
|
||||||
.count();
|
|
||||||
if descendants > 1 {
|
|
||||||
return Err(anyhow!("{dir_path:?} is not empty"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
state.entries.retain(|path, _| !path.starts_with(&dir_path));
|
|
||||||
state.emit_event(&[dir_path]).await;
|
|
||||||
} else if !options.ignore_if_not_exists {
|
|
||||||
return Err(anyhow!("{dir_path:?} does not exist"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -593,18 +707,28 @@ impl Fs for FakeFs {
|
||||||
|
|
||||||
async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()> {
|
async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()> {
|
||||||
let path = normalize_path(path);
|
let path = normalize_path(path);
|
||||||
|
let parent_path = path
|
||||||
|
.parent()
|
||||||
|
.ok_or_else(|| anyhow!("cannot remove the root"))?;
|
||||||
|
let base_name = path.file_name().unwrap();
|
||||||
let mut state = self.state.lock().await;
|
let mut state = self.state.lock().await;
|
||||||
state.validate_path(&path)?;
|
let parent_entry = state.read_path(parent_path).await?;
|
||||||
if let Some(entry) = state.entries.get(&path) {
|
let mut parent_entry = parent_entry.lock().await;
|
||||||
if entry.metadata.is_dir {
|
let entry = parent_entry
|
||||||
return Err(anyhow!("cannot remove {path:?} because it is not a file"));
|
.dir_entries(parent_path)?
|
||||||
}
|
.entry(base_name.to_str().unwrap().into());
|
||||||
|
match entry {
|
||||||
state.entries.remove(&path);
|
btree_map::Entry::Vacant(_) => {
|
||||||
state.emit_event(&[path]).await;
|
if !options.ignore_if_not_exists {
|
||||||
} else if !options.ignore_if_not_exists {
|
|
||||||
return Err(anyhow!("{path:?} does not exist"));
|
return Err(anyhow!("{path:?} does not exist"));
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
btree_map::Entry::Occupied(e) => {
|
||||||
|
e.get().lock().await.file_content(&path)?;
|
||||||
|
e.remove();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.emit_event(&[path]);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -617,86 +741,84 @@ impl Fs for FakeFs {
|
||||||
let path = normalize_path(path);
|
let path = normalize_path(path);
|
||||||
self.simulate_random_delay().await;
|
self.simulate_random_delay().await;
|
||||||
let state = self.state.lock().await;
|
let state = self.state.lock().await;
|
||||||
let text = state
|
let entry = state.read_path(&path).await?;
|
||||||
.entries
|
let entry = entry.lock().await;
|
||||||
.get(&path)
|
entry.file_content(&path).cloned()
|
||||||
.and_then(|e| e.content.as_ref())
|
|
||||||
.ok_or_else(|| anyhow!("file {:?} does not exist", path))?;
|
|
||||||
Ok(text.clone())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn save(&self, path: &Path, text: &Rope, line_ending: LineEnding) -> Result<()> {
|
async fn save(&self, path: &Path, text: &Rope, line_ending: LineEnding) -> Result<()> {
|
||||||
self.simulate_random_delay().await;
|
self.simulate_random_delay().await;
|
||||||
let mut state = self.state.lock().await;
|
|
||||||
let path = normalize_path(path);
|
let path = normalize_path(path);
|
||||||
state.validate_path(&path)?;
|
|
||||||
let content = chunks(text, line_ending).collect();
|
let content = chunks(text, line_ending).collect();
|
||||||
if let Some(entry) = state.entries.get_mut(&path) {
|
self.insert_file(path, content).await;
|
||||||
if entry.metadata.is_dir {
|
|
||||||
Err(anyhow!("cannot overwrite a directory with a file"))
|
|
||||||
} else {
|
|
||||||
entry.content = Some(content);
|
|
||||||
entry.metadata.mtime = SystemTime::now();
|
|
||||||
state.emit_event(&[path]).await;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
let inode = state.next_inode;
|
|
||||||
state.next_inode += 1;
|
|
||||||
let entry = FakeFsEntry {
|
|
||||||
metadata: Metadata {
|
|
||||||
inode,
|
|
||||||
mtime: SystemTime::now(),
|
|
||||||
is_dir: false,
|
|
||||||
is_symlink: false,
|
|
||||||
},
|
|
||||||
content: Some(content),
|
|
||||||
};
|
|
||||||
state.entries.insert(path.to_path_buf(), entry);
|
|
||||||
state.emit_event(&[path]).await;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn canonicalize(&self, path: &Path) -> Result<PathBuf> {
|
async fn canonicalize(&self, path: &Path) -> Result<PathBuf> {
|
||||||
|
let path = normalize_path(path);
|
||||||
self.simulate_random_delay().await;
|
self.simulate_random_delay().await;
|
||||||
Ok(normalize_path(path))
|
let state = self.state.lock().await;
|
||||||
|
if let Some((_, real_path)) = state.try_read_path(&path).await {
|
||||||
|
Ok(real_path)
|
||||||
|
} else {
|
||||||
|
Err(anyhow!("path does not exist: {}", path.display()))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn is_file(&self, path: &Path) -> bool {
|
async fn is_file(&self, path: &Path) -> bool {
|
||||||
let path = normalize_path(path);
|
let path = normalize_path(path);
|
||||||
self.simulate_random_delay().await;
|
self.simulate_random_delay().await;
|
||||||
let state = self.state.lock().await;
|
let state = self.state.lock().await;
|
||||||
state
|
if let Some((entry, _)) = state.try_read_path(&path).await {
|
||||||
.entries
|
entry.lock().await.is_file()
|
||||||
.get(&path)
|
} else {
|
||||||
.map_or(false, |entry| !entry.metadata.is_dir)
|
false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn metadata(&self, path: &Path) -> Result<Option<Metadata>> {
|
async fn metadata(&self, path: &Path) -> Result<Option<Metadata>> {
|
||||||
self.simulate_random_delay().await;
|
self.simulate_random_delay().await;
|
||||||
let state = self.state.lock().await;
|
|
||||||
let path = normalize_path(path);
|
let path = normalize_path(path);
|
||||||
Ok(state.entries.get(&path).map(|entry| entry.metadata.clone()))
|
let state = self.state.lock().await;
|
||||||
|
if let Some((entry, real_path)) = state.try_read_path(&path).await {
|
||||||
|
let entry = entry.lock().await;
|
||||||
|
let is_symlink = real_path != path;
|
||||||
|
|
||||||
|
Ok(Some(match &*entry {
|
||||||
|
FakeFsEntry::File { inode, mtime, .. } => Metadata {
|
||||||
|
inode: *inode,
|
||||||
|
mtime: *mtime,
|
||||||
|
is_dir: false,
|
||||||
|
is_symlink,
|
||||||
|
},
|
||||||
|
FakeFsEntry::Dir { inode, mtime, .. } => Metadata {
|
||||||
|
inode: *inode,
|
||||||
|
mtime: *mtime,
|
||||||
|
is_dir: true,
|
||||||
|
is_symlink,
|
||||||
|
},
|
||||||
|
FakeFsEntry::Symlink { .. } => unreachable!(),
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn read_dir(
|
async fn read_dir(
|
||||||
&self,
|
&self,
|
||||||
abs_path: &Path,
|
path: &Path,
|
||||||
) -> Result<Pin<Box<dyn Send + Stream<Item = Result<PathBuf>>>>> {
|
) -> Result<Pin<Box<dyn Send + Stream<Item = Result<PathBuf>>>>> {
|
||||||
use futures::{future, stream};
|
|
||||||
self.simulate_random_delay().await;
|
self.simulate_random_delay().await;
|
||||||
|
let path = normalize_path(path);
|
||||||
let state = self.state.lock().await;
|
let state = self.state.lock().await;
|
||||||
let abs_path = normalize_path(abs_path);
|
let entry = state.read_path(&path).await?;
|
||||||
Ok(Box::pin(stream::iter(state.entries.clone()).filter_map(
|
let mut entry = entry.lock().await;
|
||||||
move |(child_path, _)| {
|
let children = entry.dir_entries(&path)?;
|
||||||
future::ready(if child_path.parent() == Some(&abs_path) {
|
let paths = children
|
||||||
Some(Ok(child_path))
|
.keys()
|
||||||
} else {
|
.map(|file_name| Ok(path.join(file_name)))
|
||||||
None
|
.collect::<Vec<_>>();
|
||||||
})
|
Ok(Box::pin(futures::stream::iter(paths)))
|
||||||
},
|
|
||||||
)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn watch(
|
async fn watch(
|
||||||
|
@ -773,3 +895,112 @@ pub fn normalize_path(path: &Path) -> PathBuf {
|
||||||
}
|
}
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn copy_recursive<'a>(
|
||||||
|
fs: &'a dyn Fs,
|
||||||
|
source: &'a Path,
|
||||||
|
target: &'a Path,
|
||||||
|
options: CopyOptions,
|
||||||
|
) -> BoxFuture<'a, Result<()>> {
|
||||||
|
use futures::future::FutureExt;
|
||||||
|
|
||||||
|
async move {
|
||||||
|
let metadata = fs
|
||||||
|
.metadata(source)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow!("path does not exist: {}", source.display()))?;
|
||||||
|
if metadata.is_dir {
|
||||||
|
if !options.overwrite && fs.metadata(target).await.is_ok() {
|
||||||
|
if options.ignore_if_exists {
|
||||||
|
return Ok(());
|
||||||
|
} else {
|
||||||
|
return Err(anyhow!("{target:?} already exists"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let _ = fs
|
||||||
|
.remove_dir(
|
||||||
|
target,
|
||||||
|
RemoveOptions {
|
||||||
|
recursive: true,
|
||||||
|
ignore_if_not_exists: true,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
fs.create_dir(target).await?;
|
||||||
|
let mut children = fs.read_dir(source).await?;
|
||||||
|
while let Some(child_path) = children.next().await {
|
||||||
|
if let Ok(child_path) = child_path {
|
||||||
|
if let Some(file_name) = child_path.file_name() {
|
||||||
|
let child_target_path = target.join(file_name);
|
||||||
|
copy_recursive(fs, &child_path, &child_target_path, options).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
fs.copy_file(source, target, options).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use gpui::TestAppContext;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
async fn test_fake_fs(cx: &mut TestAppContext) {
|
||||||
|
let fs = FakeFs::new(cx.background());
|
||||||
|
|
||||||
|
fs.insert_tree(
|
||||||
|
"/root",
|
||||||
|
json!({
|
||||||
|
"dir1": {
|
||||||
|
"a": "A",
|
||||||
|
"b": "B"
|
||||||
|
},
|
||||||
|
"dir2": {
|
||||||
|
"c": "C",
|
||||||
|
"dir3": {
|
||||||
|
"d": "D"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
fs.files().await,
|
||||||
|
vec![
|
||||||
|
PathBuf::from("/root/dir1/a"),
|
||||||
|
PathBuf::from("/root/dir1/b"),
|
||||||
|
PathBuf::from("/root/dir2/c"),
|
||||||
|
PathBuf::from("/root/dir2/dir3/d"),
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
fs.insert_symlink("/root/dir2/link-to-dir3", "./dir3".into())
|
||||||
|
.await;
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
fs.canonicalize("/root/dir2/link-to-dir3".as_ref())
|
||||||
|
.await
|
||||||
|
.unwrap(),
|
||||||
|
PathBuf::from("/root/dir2/dir3"),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
fs.canonicalize("/root/dir2/link-to-dir3/d".as_ref())
|
||||||
|
.await
|
||||||
|
.unwrap(),
|
||||||
|
PathBuf::from("/root/dir2/dir3/d"),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
fs.load("/root/dir2/link-to-dir3/d".as_ref()).await.unwrap(),
|
||||||
|
"D",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::{ProjectEntryId, RemoveOptions};
|
use crate::{copy_recursive, ProjectEntryId, RemoveOptions};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
fs::{self, Fs},
|
fs::{self, Fs},
|
||||||
|
@ -47,7 +47,7 @@ use std::{
|
||||||
task::Poll,
|
task::Poll,
|
||||||
time::{Duration, SystemTime},
|
time::{Duration, SystemTime},
|
||||||
};
|
};
|
||||||
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap};
|
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
|
||||||
use util::{ResultExt, TryFutureExt};
|
use util::{ResultExt, TryFutureExt};
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
|
@ -731,7 +731,12 @@ impl LocalWorktree {
|
||||||
let fs = self.fs.clone();
|
let fs = self.fs.clone();
|
||||||
let abs_new_path = abs_new_path.clone();
|
let abs_new_path = abs_new_path.clone();
|
||||||
async move {
|
async move {
|
||||||
fs.copy(&abs_old_path, &abs_new_path, Default::default())
|
copy_recursive(
|
||||||
|
fs.as_ref(),
|
||||||
|
&abs_old_path,
|
||||||
|
&abs_new_path,
|
||||||
|
Default::default(),
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -1486,6 +1491,16 @@ impl LocalSnapshot {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn ancestor_inodes_for_path(&self, path: &Path) -> TreeSet<u64> {
|
||||||
|
let mut inodes = TreeSet::default();
|
||||||
|
for ancestor in path.ancestors().skip(1) {
|
||||||
|
if let Some(entry) = self.entry_for_path(ancestor) {
|
||||||
|
inodes.insert(entry.inode);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
inodes
|
||||||
|
}
|
||||||
|
|
||||||
fn ignore_stack_for_abs_path(&self, abs_path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
|
fn ignore_stack_for_abs_path(&self, abs_path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
|
||||||
let mut new_ignores = Vec::new();
|
let mut new_ignores = Vec::new();
|
||||||
for ancestor in abs_path.ancestors().skip(1) {
|
for ancestor in abs_path.ancestors().skip(1) {
|
||||||
|
@ -2048,14 +2063,16 @@ impl BackgroundScanner {
|
||||||
async fn scan_dirs(&mut self) -> Result<()> {
|
async fn scan_dirs(&mut self) -> Result<()> {
|
||||||
let root_char_bag;
|
let root_char_bag;
|
||||||
let root_abs_path;
|
let root_abs_path;
|
||||||
let next_entry_id;
|
let root_inode;
|
||||||
let is_dir;
|
let is_dir;
|
||||||
|
let next_entry_id;
|
||||||
{
|
{
|
||||||
let snapshot = self.snapshot.lock();
|
let snapshot = self.snapshot.lock();
|
||||||
root_char_bag = snapshot.root_char_bag;
|
root_char_bag = snapshot.root_char_bag;
|
||||||
root_abs_path = snapshot.abs_path.clone();
|
root_abs_path = snapshot.abs_path.clone();
|
||||||
|
root_inode = snapshot.root_entry().map(|e| e.inode);
|
||||||
|
is_dir = snapshot.root_entry().map_or(false, |e| e.is_dir());
|
||||||
next_entry_id = snapshot.next_entry_id.clone();
|
next_entry_id = snapshot.next_entry_id.clone();
|
||||||
is_dir = snapshot.root_entry().map_or(false, |e| e.is_dir())
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Populate ignores above the root.
|
// Populate ignores above the root.
|
||||||
|
@ -2083,12 +2100,18 @@ impl BackgroundScanner {
|
||||||
|
|
||||||
if is_dir {
|
if is_dir {
|
||||||
let path: Arc<Path> = Arc::from(Path::new(""));
|
let path: Arc<Path> = Arc::from(Path::new(""));
|
||||||
|
let mut ancestor_inodes = TreeSet::default();
|
||||||
|
if let Some(root_inode) = root_inode {
|
||||||
|
ancestor_inodes.insert(root_inode);
|
||||||
|
}
|
||||||
|
|
||||||
let (tx, rx) = channel::unbounded();
|
let (tx, rx) = channel::unbounded();
|
||||||
self.executor
|
self.executor
|
||||||
.block(tx.send(ScanJob {
|
.block(tx.send(ScanJob {
|
||||||
abs_path: root_abs_path.to_path_buf(),
|
abs_path: root_abs_path.to_path_buf(),
|
||||||
path,
|
path,
|
||||||
ignore_stack,
|
ignore_stack,
|
||||||
|
ancestor_inodes,
|
||||||
scan_queue: tx.clone(),
|
scan_queue: tx.clone(),
|
||||||
}))
|
}))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -2190,10 +2213,13 @@ impl BackgroundScanner {
|
||||||
root_char_bag,
|
root_char_bag,
|
||||||
);
|
);
|
||||||
|
|
||||||
if child_metadata.is_dir {
|
if child_entry.is_dir() {
|
||||||
let is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true);
|
let is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true);
|
||||||
child_entry.is_ignored = is_ignored;
|
child_entry.is_ignored = is_ignored;
|
||||||
new_entries.push(child_entry);
|
|
||||||
|
if !job.ancestor_inodes.contains(&child_entry.inode) {
|
||||||
|
let mut ancestor_inodes = job.ancestor_inodes.clone();
|
||||||
|
ancestor_inodes.insert(child_entry.inode);
|
||||||
new_jobs.push(ScanJob {
|
new_jobs.push(ScanJob {
|
||||||
abs_path: child_abs_path,
|
abs_path: child_abs_path,
|
||||||
path: child_path,
|
path: child_path,
|
||||||
|
@ -2202,12 +2228,15 @@ impl BackgroundScanner {
|
||||||
} else {
|
} else {
|
||||||
ignore_stack.clone()
|
ignore_stack.clone()
|
||||||
},
|
},
|
||||||
|
ancestor_inodes,
|
||||||
scan_queue: job.scan_queue.clone(),
|
scan_queue: job.scan_queue.clone(),
|
||||||
});
|
});
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
|
child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
|
||||||
|
}
|
||||||
|
|
||||||
new_entries.push(child_entry);
|
new_entries.push(child_entry);
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.snapshot
|
self.snapshot
|
||||||
|
@ -2286,12 +2315,16 @@ impl BackgroundScanner {
|
||||||
);
|
);
|
||||||
fs_entry.is_ignored = ignore_stack.is_all();
|
fs_entry.is_ignored = ignore_stack.is_all();
|
||||||
snapshot.insert_entry(fs_entry, self.fs.as_ref());
|
snapshot.insert_entry(fs_entry, self.fs.as_ref());
|
||||||
if metadata.is_dir {
|
|
||||||
|
let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path);
|
||||||
|
if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) {
|
||||||
|
ancestor_inodes.insert(metadata.inode);
|
||||||
self.executor
|
self.executor
|
||||||
.block(scan_queue_tx.send(ScanJob {
|
.block(scan_queue_tx.send(ScanJob {
|
||||||
abs_path,
|
abs_path,
|
||||||
path,
|
path,
|
||||||
ignore_stack,
|
ignore_stack,
|
||||||
|
ancestor_inodes,
|
||||||
scan_queue: scan_queue_tx.clone(),
|
scan_queue: scan_queue_tx.clone(),
|
||||||
}))
|
}))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -2453,6 +2486,7 @@ struct ScanJob {
|
||||||
path: Arc<Path>,
|
path: Arc<Path>,
|
||||||
ignore_stack: Arc<IgnoreStack>,
|
ignore_stack: Arc<IgnoreStack>,
|
||||||
scan_queue: Sender<ScanJob>,
|
scan_queue: Sender<ScanJob>,
|
||||||
|
ancestor_inodes: TreeSet<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct UpdateIgnoreStatusJob {
|
struct UpdateIgnoreStatusJob {
|
||||||
|
@ -2739,7 +2773,7 @@ mod tests {
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use client::test::FakeHttpClient;
|
use client::test::FakeHttpClient;
|
||||||
use fs::RealFs;
|
use fs::RealFs;
|
||||||
use gpui::TestAppContext;
|
use gpui::{executor::Deterministic, TestAppContext};
|
||||||
use rand::prelude::*;
|
use rand::prelude::*;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use std::{
|
use std::{
|
||||||
|
@ -2807,6 +2841,87 @@ mod tests {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[gpui::test(iterations = 10)]
|
||||||
|
async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
|
||||||
|
let fs = FakeFs::new(cx.background());
|
||||||
|
fs.insert_tree(
|
||||||
|
"/root",
|
||||||
|
json!({
|
||||||
|
"lib": {
|
||||||
|
"a": {
|
||||||
|
"a.txt": ""
|
||||||
|
},
|
||||||
|
"b": {
|
||||||
|
"b.txt": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
|
||||||
|
fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
|
||||||
|
|
||||||
|
let http_client = FakeHttpClient::with_404_response();
|
||||||
|
let client = Client::new(http_client);
|
||||||
|
let tree = Worktree::local(
|
||||||
|
client,
|
||||||
|
Arc::from(Path::new("/root")),
|
||||||
|
true,
|
||||||
|
fs.clone(),
|
||||||
|
Default::default(),
|
||||||
|
&mut cx.to_async(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||||
|
.await;
|
||||||
|
|
||||||
|
tree.read_with(cx, |tree, _| {
|
||||||
|
assert_eq!(
|
||||||
|
tree.entries(false)
|
||||||
|
.map(|entry| entry.path.as_ref())
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
vec![
|
||||||
|
Path::new(""),
|
||||||
|
Path::new("lib"),
|
||||||
|
Path::new("lib/a"),
|
||||||
|
Path::new("lib/a/a.txt"),
|
||||||
|
Path::new("lib/a/lib"),
|
||||||
|
Path::new("lib/b"),
|
||||||
|
Path::new("lib/b/b.txt"),
|
||||||
|
Path::new("lib/b/lib"),
|
||||||
|
]
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
fs.rename(
|
||||||
|
Path::new("/root/lib/a/lib"),
|
||||||
|
Path::new("/root/lib/a/lib-2"),
|
||||||
|
Default::default(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
executor.run_until_parked();
|
||||||
|
tree.read_with(cx, |tree, _| {
|
||||||
|
assert_eq!(
|
||||||
|
tree.entries(false)
|
||||||
|
.map(|entry| entry.path.as_ref())
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
vec![
|
||||||
|
Path::new(""),
|
||||||
|
Path::new("lib"),
|
||||||
|
Path::new("lib/a"),
|
||||||
|
Path::new("lib/a/a.txt"),
|
||||||
|
Path::new("lib/a/lib-2"),
|
||||||
|
Path::new("lib/b"),
|
||||||
|
Path::new("lib/b/b.txt"),
|
||||||
|
Path::new("lib/b/lib"),
|
||||||
|
]
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
|
async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
|
||||||
let parent_dir = temp_tree(json!({
|
let parent_dir = temp_tree(json!({
|
||||||
|
|
|
@ -5,7 +5,7 @@ use arrayvec::ArrayVec;
|
||||||
pub use cursor::{Cursor, FilterCursor, Iter};
|
pub use cursor::{Cursor, FilterCursor, Iter};
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc};
|
use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc};
|
||||||
pub use tree_map::TreeMap;
|
pub use tree_map::{TreeMap, TreeSet};
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
const TREE_BASE: usize = 2;
|
const TREE_BASE: usize = 2;
|
||||||
|
|
|
@ -20,6 +20,11 @@ pub struct MapKey<K>(K);
|
||||||
#[derive(Clone, Debug, Default)]
|
#[derive(Clone, Debug, Default)]
|
||||||
pub struct MapKeyRef<'a, K>(Option<&'a K>);
|
pub struct MapKeyRef<'a, K>(Option<&'a K>);
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct TreeSet<K>(TreeMap<K, ()>)
|
||||||
|
where
|
||||||
|
K: Clone + Debug + Default + Ord;
|
||||||
|
|
||||||
impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
|
impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
|
||||||
pub fn from_ordered_entries(entries: impl IntoIterator<Item = (K, V)>) -> Self {
|
pub fn from_ordered_entries(entries: impl IntoIterator<Item = (K, V)>) -> Self {
|
||||||
let tree = SumTree::from_iter(
|
let tree = SumTree::from_iter(
|
||||||
|
@ -136,6 +141,32 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<K> Default for TreeSet<K>
|
||||||
|
where
|
||||||
|
K: Clone + Debug + Default + Ord,
|
||||||
|
{
|
||||||
|
fn default() -> Self {
|
||||||
|
Self(Default::default())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K> TreeSet<K>
|
||||||
|
where
|
||||||
|
K: Clone + Debug + Default + Ord,
|
||||||
|
{
|
||||||
|
pub fn insert(&mut self, key: K) {
|
||||||
|
self.0.insert(key, ());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn contains(&self, key: &K) -> bool {
|
||||||
|
self.0.get(key).is_some()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn iter<'a>(&'a self) -> impl 'a + Iterator<Item = &'a K> {
|
||||||
|
self.0.iter().map(|(k, _)| k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue