Uncomment project2 tests (#3200)

This commit is contained in:
Conrad Irwin 2023-11-01 20:27:53 +00:00 committed by GitHub
commit f415a37a3d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 3876 additions and 3745 deletions

View file

@ -84,7 +84,7 @@ struct DeterministicState {
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum ExecutorEvent {
PollRunnable { id: usize },
EnqueuRunnable { id: usize },
EnqueueRunnable { id: usize },
}
#[cfg(any(test, feature = "test-support"))]
@ -199,7 +199,7 @@ impl Deterministic {
let unparker = self.parker.lock().unparker();
let (runnable, task) = async_task::spawn_local(future, move |runnable| {
let mut state = state.lock();
state.push_to_history(ExecutorEvent::EnqueuRunnable { id });
state.push_to_history(ExecutorEvent::EnqueueRunnable { id });
state
.scheduled_from_foreground
.entry(cx_id)
@ -229,7 +229,7 @@ impl Deterministic {
let mut state = state.lock();
state
.poll_history
.push(ExecutorEvent::EnqueuRunnable { id });
.push(ExecutorEvent::EnqueueRunnable { id });
state
.scheduled_from_background
.push(BackgroundRunnable { id, runnable });
@ -616,7 +616,7 @@ impl ExecutorEvent {
pub fn id(&self) -> usize {
match self {
ExecutorEvent::PollRunnable { id } => *id,
ExecutorEvent::EnqueuRunnable { id } => *id,
ExecutorEvent::EnqueueRunnable { id } => *id,
}
}
}

View file

@ -376,7 +376,7 @@ impl AppContext {
self.observers.remove(&entity_id);
self.event_listeners.remove(&entity_id);
for mut release_callback in self.release_listeners.remove(&entity_id) {
release_callback(&mut entity, self);
release_callback(entity.as_mut(), self);
}
}
}

View file

@ -106,7 +106,12 @@ impl EntityMap {
dropped_entity_ids
.into_iter()
.map(|entity_id| {
ref_counts.counts.remove(entity_id);
let count = ref_counts.counts.remove(entity_id).unwrap();
debug_assert_eq!(
count.load(SeqCst),
0,
"dropped an entity that was referenced"
);
(entity_id, self.entities.remove(entity_id).unwrap())
})
.collect()
@ -211,7 +216,7 @@ impl Drop for AnyModel {
let count = entity_map
.counts
.get(self.entity_id)
.expect("Detected over-release of a model.");
.expect("detected over-release of a handle.");
let prev_count = count.fetch_sub(1, SeqCst);
assert_ne!(prev_count, 0, "Detected over-release of a model.");
if prev_count == 1 {
@ -395,12 +400,16 @@ impl AnyWeakModel {
}
pub fn upgrade(&self) -> Option<AnyModel> {
let entity_map = self.entity_ref_counts.upgrade()?;
entity_map
.read()
.counts
.get(self.entity_id)?
.fetch_add(1, SeqCst);
let ref_counts = &self.entity_ref_counts.upgrade()?;
let ref_counts = ref_counts.read();
let ref_count = ref_counts.counts.get(self.entity_id)?;
// entity_id is in dropped_entity_ids
if ref_count.load(SeqCst) == 0 {
return None;
}
ref_count.fetch_add(1, SeqCst);
Some(AnyModel {
entity_id: self.entity_id,
entity_type: self.entity_type,
@ -499,3 +508,60 @@ impl<T> PartialEq<Model<T>> for WeakModel<T> {
self.entity_id() == other.any_model.entity_id()
}
}
#[cfg(test)]
mod test {
use crate::EntityMap;
struct TestEntity {
pub i: i32,
}
#[test]
fn test_entity_map_slot_assignment_before_cleanup() {
// Tests that slots are not re-used before take_dropped.
let mut entity_map = EntityMap::new();
let slot = entity_map.reserve::<TestEntity>();
entity_map.insert(slot, TestEntity { i: 1 });
let slot = entity_map.reserve::<TestEntity>();
entity_map.insert(slot, TestEntity { i: 2 });
let dropped = entity_map.take_dropped();
assert_eq!(dropped.len(), 2);
assert_eq!(
dropped
.into_iter()
.map(|(_, entity)| entity.downcast::<TestEntity>().unwrap().i)
.collect::<Vec<i32>>(),
vec![1, 2],
);
}
#[test]
fn test_entity_map_weak_upgrade_before_cleanup() {
// Tests that weak handles are not upgraded before take_dropped
let mut entity_map = EntityMap::new();
let slot = entity_map.reserve::<TestEntity>();
let handle = entity_map.insert(slot, TestEntity { i: 1 });
let weak = handle.downgrade();
drop(handle);
let strong = weak.upgrade();
assert_eq!(strong, None);
let dropped = entity_map.take_dropped();
assert_eq!(dropped.len(), 1);
assert_eq!(
dropped
.into_iter()
.map(|(_, entity)| entity.downcast::<TestEntity>().unwrap().i)
.collect::<Vec<i32>>(),
vec![1],
);
}
}

View file

@ -1,7 +1,8 @@
use crate::{
AnyWindowHandle, AppContext, AsyncAppContext, Context, Executor, MainThread, Model,
ModelContext, Result, Task, TestDispatcher, TestPlatform, WindowContext,
AnyWindowHandle, AppContext, AsyncAppContext, Context, EventEmitter, Executor, MainThread,
Model, ModelContext, Result, Task, TestDispatcher, TestPlatform, WindowContext,
};
use futures::SinkExt;
use parking_lot::Mutex;
use std::{future::Future, sync::Arc};
@ -63,8 +64,8 @@ impl TestAppContext {
}
pub fn update<R>(&self, f: impl FnOnce(&mut AppContext) -> R) -> R {
let mut lock = self.app.lock();
f(&mut *lock)
let mut cx = self.app.lock();
cx.update(f)
}
pub fn read_window<R>(
@ -149,4 +150,22 @@ impl TestAppContext {
executor: self.executor.clone(),
}
}
pub fn subscribe<T: 'static + EventEmitter + Send>(
&mut self,
entity: &Model<T>,
) -> futures::channel::mpsc::UnboundedReceiver<T::Event>
where
T::Event: 'static + Send + Clone,
{
let (mut tx, rx) = futures::channel::mpsc::unbounded();
entity
.update(self, |_, cx: &mut ModelContext<T>| {
cx.subscribe(entity, move |_, _, event, cx| {
cx.executor().block(tx.send(event.clone())).unwrap();
})
})
.detach();
rx
}
}

View file

@ -6,7 +6,10 @@ use std::{
marker::PhantomData,
mem,
pin::Pin,
sync::Arc,
sync::{
atomic::{AtomicBool, Ordering::SeqCst},
Arc,
},
task::{Context, Poll},
time::Duration,
};
@ -136,7 +139,11 @@ impl Executor {
pub fn block<R>(&self, future: impl Future<Output = R>) -> R {
pin_mut!(future);
let (parker, unparker) = parking::pair();
let awoken = Arc::new(AtomicBool::new(false));
let awoken2 = awoken.clone();
let waker = waker_fn(move || {
awoken2.store(true, SeqCst);
unparker.unpark();
});
let mut cx = std::task::Context::from_waker(&waker);
@ -146,9 +153,20 @@ impl Executor {
Poll::Ready(result) => return result,
Poll::Pending => {
if !self.dispatcher.poll() {
if awoken.swap(false, SeqCst) {
continue;
}
#[cfg(any(test, feature = "test-support"))]
if let Some(_) = self.dispatcher.as_test() {
panic!("blocked with nothing left to run")
if let Some(test) = self.dispatcher.as_test() {
if !test.parking_allowed() {
let mut backtrace_message = String::new();
if let Some(backtrace) = test.waiting_backtrace() {
backtrace_message =
format!("\nbacktrace of waiting future:\n{:?}", backtrace);
}
panic!("parked with nothing left to run\n{:?}", backtrace_message)
}
}
parker.park();
}
@ -206,12 +224,12 @@ impl Executor {
#[cfg(any(test, feature = "test-support"))]
pub fn start_waiting(&self) {
todo!("start_waiting")
self.dispatcher.as_test().unwrap().start_waiting();
}
#[cfg(any(test, feature = "test-support"))]
pub fn finish_waiting(&self) {
todo!("finish_waiting")
self.dispatcher.as_test().unwrap().finish_waiting();
}
#[cfg(any(test, feature = "test-support"))]
@ -229,6 +247,11 @@ impl Executor {
self.dispatcher.as_test().unwrap().run_until_parked()
}
#[cfg(any(test, feature = "test-support"))]
pub fn allow_parking(&self) {
self.dispatcher.as_test().unwrap().allow_parking();
}
pub fn num_cpus(&self) -> usize {
num_cpus::get()
}

View file

@ -1,5 +1,6 @@
use crate::PlatformDispatcher;
use async_task::Runnable;
use backtrace::Backtrace;
use collections::{HashMap, VecDeque};
use parking_lot::Mutex;
use rand::prelude::*;
@ -28,6 +29,8 @@ struct TestDispatcherState {
time: Duration,
is_main_thread: bool,
next_id: TestDispatcherId,
allow_parking: bool,
waiting_backtrace: Option<Backtrace>,
}
impl TestDispatcher {
@ -40,6 +43,8 @@ impl TestDispatcher {
time: Duration::ZERO,
is_main_thread: true,
next_id: TestDispatcherId(1),
allow_parking: false,
waiting_backtrace: None,
};
TestDispatcher {
@ -66,7 +71,7 @@ impl TestDispatcher {
self.state.lock().time = new_now;
}
pub fn simulate_random_delay(&self) -> impl Future<Output = ()> {
pub fn simulate_random_delay(&self) -> impl 'static + Send + Future<Output = ()> {
pub struct YieldNow {
count: usize,
}
@ -93,6 +98,29 @@ impl TestDispatcher {
pub fn run_until_parked(&self) {
while self.poll() {}
}
pub fn parking_allowed(&self) -> bool {
self.state.lock().allow_parking
}
pub fn allow_parking(&self) {
self.state.lock().allow_parking = true
}
pub fn start_waiting(&self) {
self.state.lock().waiting_backtrace = Some(Backtrace::new_unresolved());
}
pub fn finish_waiting(&self) {
self.state.lock().waiting_backtrace.take();
}
pub fn waiting_backtrace(&self) -> Option<Backtrace> {
self.state.lock().waiting_backtrace.take().map(|mut b| {
b.resolve();
b
})
}
}
impl Clone for TestDispatcher {

View file

@ -2604,64 +2604,64 @@ async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
}
#[gpui::test]
async fn test_save_as(cx: &mut gpui::TestAppContext) {
init_test(cx);
// #[gpui::test]
// async fn test_save_as(cx: &mut gpui::TestAppContext) {
// init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree("/dir", json!({})).await;
// let fs = FakeFs::new(cx.background());
// fs.insert_tree("/dir", json!({})).await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let languages = project.read_with(cx, |project, _| project.languages().clone());
languages.register(
"/some/path",
LanguageConfig {
name: "Rust".into(),
path_suffixes: vec!["rs".into()],
..Default::default()
},
tree_sitter_rust::language(),
vec![],
|_| Default::default(),
);
// let languages = project.read_with(cx, |project, _| project.languages().clone());
// languages.register(
// "/some/path",
// LanguageConfig {
// name: "Rust".into(),
// path_suffixes: vec!["rs".into()],
// ..Default::default()
// },
// tree_sitter_rust::language(),
// vec![],
// |_| Default::default(),
// );
let buffer = project.update(cx, |project, cx| {
project.create_buffer("", None, cx).unwrap()
});
buffer.update(cx, |buffer, cx| {
buffer.edit([(0..0, "abc")], None, cx);
assert!(buffer.is_dirty());
assert!(!buffer.has_conflict());
assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
});
project
.update(cx, |project, cx| {
project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
})
.await
.unwrap();
assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
// let buffer = project.update(cx, |project, cx| {
// project.create_buffer("", None, cx).unwrap()
// });
// buffer.update(cx, |buffer, cx| {
// buffer.edit([(0..0, "abc")], None, cx);
// assert!(buffer.is_dirty());
// assert!(!buffer.has_conflict());
// assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
// });
// project
// .update(cx, |project, cx| {
// project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
// })
// .await
// .unwrap();
// assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
cx.foreground().run_until_parked();
buffer.read_with(cx, |buffer, cx| {
assert_eq!(
buffer.file().unwrap().full_path(cx),
Path::new("dir/file1.rs")
);
assert!(!buffer.is_dirty());
assert!(!buffer.has_conflict());
assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
});
// cx.foreground().run_until_parked();
// buffer.read_with(cx, |buffer, cx| {
// assert_eq!(
// buffer.file().unwrap().full_path(cx),
// Path::new("dir/file1.rs")
// );
// assert!(!buffer.is_dirty());
// assert!(!buffer.has_conflict());
// assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
// });
let opened_buffer = project
.update(cx, |project, cx| {
project.open_local_buffer("/dir/file1.rs", cx)
})
.await
.unwrap();
assert_eq!(opened_buffer, buffer);
}
// let opened_buffer = project
// .update(cx, |project, cx| {
// project.open_local_buffer("/dir/file1.rs", cx)
// })
// .await
// .unwrap();
// assert_eq!(opened_buffer, buffer);
// }
#[gpui::test(retries = 5)]
async fn test_rescan_and_remote_updates(

View file

@ -16,6 +16,7 @@ test-support = [
"settings2/test-support",
"text/test-support",
"prettier2/test-support",
"gpui2/test-support",
]
[dependencies]

View file

@ -855,39 +855,39 @@ impl Project {
}
}
// #[cfg(any(test, feature = "test-support"))]
// pub async fn test(
// fs: Arc<dyn Fs>,
// root_paths: impl IntoIterator<Item = &Path>,
// cx: &mut gpui::TestAppContext,
// ) -> Handle<Project> {
// let mut languages = LanguageRegistry::test();
// languages.set_executor(cx.background());
// let http_client = util::http::FakeHttpClient::with_404_response();
// let client = cx.update(|cx| client2::Client::new(http_client.clone(), cx));
// let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
// let project = cx.update(|cx| {
// Project::local(
// client,
// node_runtime::FakeNodeRuntime::new(),
// user_store,
// Arc::new(languages),
// fs,
// cx,
// )
// });
// for path in root_paths {
// let (tree, _) = project
// .update(cx, |project, cx| {
// project.find_or_create_local_worktree(path, true, cx)
// })
// .await
// .unwrap();
// tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
// .await;
// }
// project
// }
#[cfg(any(test, feature = "test-support"))]
pub async fn test(
fs: Arc<dyn Fs>,
root_paths: impl IntoIterator<Item = &Path>,
cx: &mut gpui2::TestAppContext,
) -> Model<Project> {
let mut languages = LanguageRegistry::test();
languages.set_executor(cx.executor().clone());
let http_client = util::http::FakeHttpClient::with_404_response();
let client = cx.update(|cx| client2::Client::new(http_client.clone(), cx));
let user_store = cx.build_model(|cx| UserStore::new(client.clone(), http_client, cx));
let project = cx.update(|cx| {
Project::local(
client,
node_runtime::FakeNodeRuntime::new(),
user_store,
Arc::new(languages),
fs,
cx,
)
});
for path in root_paths {
let (tree, _) = project
.update(cx, |project, cx| {
project.find_or_create_local_worktree(path, true, cx)
})
.await
.unwrap();
tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
}
project
}
fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
let mut language_servers_to_start = Vec::new();

File diff suppressed because it is too large Load diff

View file

@ -4030,53 +4030,52 @@ struct UpdateIgnoreStatusJob {
scan_queue: Sender<ScanJob>,
}
// todo!("re-enable when we have tests")
// pub trait WorktreeModelHandle {
// #[cfg(any(test, feature = "test-support"))]
// fn flush_fs_events<'a>(
// &self,
// cx: &'a gpui::TestAppContext,
// ) -> futures::future::LocalBoxFuture<'a, ()>;
// }
pub trait WorktreeModelHandle {
#[cfg(any(test, feature = "test-support"))]
fn flush_fs_events<'a>(
&self,
cx: &'a mut gpui2::TestAppContext,
) -> futures::future::LocalBoxFuture<'a, ()>;
}
// impl WorktreeModelHandle for Handle<Worktree> {
// // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
// // occurred before the worktree was constructed. These events can cause the worktree to perform
// // extra directory scans, and emit extra scan-state notifications.
// //
// // This function mutates the worktree's directory and waits for those mutations to be picked up,
// // to ensure that all redundant FS events have already been processed.
// #[cfg(any(test, feature = "test-support"))]
// fn flush_fs_events<'a>(
// &self,
// cx: &'a gpui::TestAppContext,
// ) -> futures::future::LocalBoxFuture<'a, ()> {
// let filename = "fs-event-sentinel";
// let tree = self.clone();
// let (fs, root_path) = self.read_with(cx, |tree, _| {
// let tree = tree.as_local().unwrap();
// (tree.fs.clone(), tree.abs_path().clone())
// });
impl WorktreeModelHandle for Model<Worktree> {
// When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
// occurred before the worktree was constructed. These events can cause the worktree to perform
// extra directory scans, and emit extra scan-state notifications.
//
// This function mutates the worktree's directory and waits for those mutations to be picked up,
// to ensure that all redundant FS events have already been processed.
#[cfg(any(test, feature = "test-support"))]
fn flush_fs_events<'a>(
&self,
cx: &'a mut gpui2::TestAppContext,
) -> futures::future::LocalBoxFuture<'a, ()> {
let filename = "fs-event-sentinel";
let tree = self.clone();
let (fs, root_path) = self.update(cx, |tree, _| {
let tree = tree.as_local().unwrap();
(tree.fs.clone(), tree.abs_path().clone())
});
// async move {
// fs.create_file(&root_path.join(filename), Default::default())
// .await
// .unwrap();
// tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_some())
// .await;
async move {
fs.create_file(&root_path.join(filename), Default::default())
.await
.unwrap();
cx.executor().run_until_parked();
assert!(tree.update(cx, |tree, _| tree.entry_for_path(filename).is_some()));
// fs.remove_file(&root_path.join(filename), Default::default())
// .await
// .unwrap();
// tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_none())
// .await;
fs.remove_file(&root_path.join(filename), Default::default())
.await
.unwrap();
cx.executor().run_until_parked();
assert!(tree.update(cx, |tree, _| tree.entry_for_path(filename).is_none()));
// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
// .await;
// }
// .boxed_local()
// }
// }
cx.update(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
}
.boxed_local()
}
}
#[derive(Clone, Debug)]
struct TraversalProgress<'a> {