Merge branch 'master' into file-changed-on-disk

This commit is contained in:
Max Brunsfeld 2021-05-12 16:20:03 -07:00
commit 4910bc50c6
14 changed files with 3718 additions and 4119 deletions

9
Cargo.lock generated
View file

@ -1180,6 +1180,7 @@ dependencies = [
"etagere", "etagere",
"font-kit", "font-kit",
"foreign-types", "foreign-types",
"gpui_macros",
"log", "log",
"metal", "metal",
"num_cpus", "num_cpus",
@ -1205,6 +1206,14 @@ dependencies = [
"usvg", "usvg",
] ]
[[package]]
name = "gpui_macros"
version = "0.1.0"
dependencies = [
"quote",
"syn",
]
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
version = "0.9.1" version = "0.9.1"

View file

@ -1,5 +1,5 @@
[workspace] [workspace]
members = ["zed", "gpui", "fsevent", "scoped_pool"] members = ["zed", "gpui", "gpui_macros", "fsevent", "scoped_pool"]
[patch.crates-io] [patch.crates-io]
async-task = {git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e"} async-task = {git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e"}

View file

@ -8,6 +8,7 @@ version = "0.1.0"
async-task = "4.0.3" async-task = "4.0.3"
ctor = "0.1" ctor = "0.1"
etagere = "0.2" etagere = "0.2"
gpui_macros = {path = "../gpui_macros"}
log = "0.4" log = "0.4"
num_cpus = "1.13" num_cpus = "1.13"
ordered-float = "2.1.1" ordered-float = "2.1.1"

File diff suppressed because it is too large Load diff

View file

@ -24,6 +24,7 @@ pub mod color;
pub mod json; pub mod json;
pub mod keymap; pub mod keymap;
mod platform; mod platform;
pub use gpui_macros::test;
pub use platform::{Event, PathPromptOptions, PromptLevel}; pub use platform::{Event, PathPromptOptions, PromptLevel};
pub use presenter::{ pub use presenter::{
AfterLayoutContext, Axis, DebugContext, EventContext, LayoutContext, PaintContext, AfterLayoutContext, Axis, DebugContext, EventContext, LayoutContext, PaintContext,

11
gpui_macros/Cargo.toml Normal file
View file

@ -0,0 +1,11 @@
[package]
name = "gpui_macros"
version = "0.1.0"
edition = "2018"
[lib]
proc-macro = true
[dependencies]
syn = "1.0"
quote = "1.0"

57
gpui_macros/src/lib.rs Normal file
View file

@ -0,0 +1,57 @@
use std::mem;
use proc_macro::TokenStream;
use quote::{format_ident, quote};
use syn::{parse_macro_input, parse_quote, AttributeArgs, ItemFn, Meta, NestedMeta};
#[proc_macro_attribute]
pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
let mut namespace = format_ident!("gpui");
let args = syn::parse_macro_input!(args as AttributeArgs);
for arg in args {
match arg {
NestedMeta::Meta(Meta::Path(name))
if name.get_ident().map_or(false, |n| n == "self") =>
{
namespace = format_ident!("crate");
}
other => {
return TokenStream::from(
syn::Error::new_spanned(other, "invalid argument").into_compile_error(),
)
}
}
}
let mut inner_fn = parse_macro_input!(function as ItemFn);
let inner_fn_attributes = mem::take(&mut inner_fn.attrs);
let inner_fn_name = format_ident!("_{}", inner_fn.sig.ident);
let outer_fn_name = mem::replace(&mut inner_fn.sig.ident, inner_fn_name.clone());
let mut outer_fn: ItemFn = if inner_fn.sig.asyncness.is_some() {
parse_quote! {
#[test]
fn #outer_fn_name() {
#inner_fn
#namespace::App::test_async((), move |ctx| async {
#inner_fn_name(ctx).await;
});
}
}
} else {
parse_quote! {
#[test]
fn #outer_fn_name() {
#inner_fn
#namespace::App::test((), |ctx| {
#inner_fn_name(ctx);
});
}
}
};
outer_fn.attrs.extend(inner_fn_attributes);
TokenStream::from(quote!(#outer_fn))
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -671,176 +671,163 @@ mod tests {
use super::*; use super::*;
use crate::test::sample_text; use crate::test::sample_text;
use buffer::ToPoint; use buffer::ToPoint;
use gpui::App;
#[test] #[gpui::test]
fn test_basic_folds() { fn test_basic_folds(app: &mut gpui::MutableAppContext) {
App::test((), |app| { let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(5, 6), ctx));
let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(5, 6), ctx)); let mut map = FoldMap::new(buffer.clone(), app.as_ref());
let mut map = FoldMap::new(buffer.clone(), app.as_ref());
map.fold( map.fold(
vec![ vec![
Point::new(0, 2)..Point::new(2, 2), Point::new(0, 2)..Point::new(2, 2),
Point::new(2, 4)..Point::new(4, 1), Point::new(2, 4)..Point::new(4, 1),
], ],
app.as_ref(), app.as_ref(),
) )
.unwrap(); .unwrap();
assert_eq!(map.text(app.as_ref()), "aa…cc…eeeee"); assert_eq!(map.text(app.as_ref()), "aa…cc…eeeee");
buffer.update(app, |buffer, ctx| { buffer.update(app, |buffer, ctx| {
buffer buffer
.edit( .edit(
vec![ vec![
Point::new(0, 0)..Point::new(0, 1), Point::new(0, 0)..Point::new(0, 1),
Point::new(2, 3)..Point::new(2, 3), Point::new(2, 3)..Point::new(2, 3),
], ],
"123", "123",
Some(ctx), Some(ctx),
) )
.unwrap();
});
assert_eq!(map.text(app.as_ref()), "123a…c123c…eeeee");
buffer.update(app, |buffer, ctx| {
let start_version = buffer.version.clone();
buffer
.edit(Some(Point::new(2, 6)..Point::new(4, 3)), "456", Some(ctx))
.unwrap();
buffer.edits_since(start_version).collect::<Vec<_>>()
});
assert_eq!(map.text(app.as_ref()), "123a…c123456eee");
map.unfold(Some(Point::new(0, 4)..Point::new(0, 5)), app.as_ref())
.unwrap(); .unwrap();
assert_eq!(map.text(app.as_ref()), "123aaaaa\nbbbbbb\nccc123456eee");
}); });
} assert_eq!(map.text(app.as_ref()), "123a…c123c…eeeee");
#[test] buffer.update(app, |buffer, ctx| {
fn test_adjacent_folds() { let start_version = buffer.version.clone();
App::test((), |app| { buffer
let buffer = app.add_model(|ctx| Buffer::new(0, "abcdefghijkl", ctx)); .edit(Some(Point::new(2, 6)..Point::new(4, 3)), "456", Some(ctx))
.unwrap();
{ buffer.edits_since(start_version).collect::<Vec<_>>()
let mut map = FoldMap::new(buffer.clone(), app.as_ref());
map.fold(vec![5..8], app.as_ref()).unwrap();
map.check_invariants(app.as_ref());
assert_eq!(map.text(app.as_ref()), "abcde…ijkl");
// Create an fold adjacent to the start of the first fold.
map.fold(vec![0..1, 2..5], app.as_ref()).unwrap();
map.check_invariants(app.as_ref());
assert_eq!(map.text(app.as_ref()), "…b…ijkl");
// Create an fold adjacent to the end of the first fold.
map.fold(vec![11..11, 8..10], app.as_ref()).unwrap();
map.check_invariants(app.as_ref());
assert_eq!(map.text(app.as_ref()), "…b…kl");
}
{
let mut map = FoldMap::new(buffer.clone(), app.as_ref());
// Create two adjacent folds.
map.fold(vec![0..2, 2..5], app.as_ref()).unwrap();
map.check_invariants(app.as_ref());
assert_eq!(map.text(app.as_ref()), "…fghijkl");
// Edit within one of the folds.
buffer.update(app, |buffer, ctx| {
let version = buffer.version();
buffer.edit(vec![0..1], "12345", Some(ctx)).unwrap();
buffer.edits_since(version).collect::<Vec<_>>()
});
map.check_invariants(app.as_ref());
assert_eq!(map.text(app.as_ref()), "12345…fghijkl");
}
}); });
} assert_eq!(map.text(app.as_ref()), "123a…c123456eee");
#[test] map.unfold(Some(Point::new(0, 4)..Point::new(0, 5)), app.as_ref())
fn test_overlapping_folds() {
App::test((), |app| {
let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(5, 6), ctx));
let mut map = FoldMap::new(buffer.clone(), app.as_ref());
map.fold(
vec![
Point::new(0, 2)..Point::new(2, 2),
Point::new(0, 4)..Point::new(1, 0),
Point::new(1, 2)..Point::new(3, 2),
Point::new(3, 1)..Point::new(4, 1),
],
app.as_ref(),
)
.unwrap(); .unwrap();
assert_eq!(map.text(app.as_ref()), "aa…eeeee"); assert_eq!(map.text(app.as_ref()), "123aaaaa\nbbbbbb\nccc123456eee");
})
} }
#[test] #[gpui::test]
fn test_merging_folds_via_edit() { fn test_adjacent_folds(app: &mut gpui::MutableAppContext) {
App::test((), |app| { let buffer = app.add_model(|ctx| Buffer::new(0, "abcdefghijkl", ctx));
let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(5, 6), ctx));
{
let mut map = FoldMap::new(buffer.clone(), app.as_ref()); let mut map = FoldMap::new(buffer.clone(), app.as_ref());
map.fold( map.fold(vec![5..8], app.as_ref()).unwrap();
vec![ map.check_invariants(app.as_ref());
Point::new(0, 2)..Point::new(2, 2), assert_eq!(map.text(app.as_ref()), "abcde…ijkl");
Point::new(3, 1)..Point::new(4, 1),
],
app.as_ref(),
)
.unwrap();
assert_eq!(map.text(app.as_ref()), "aa…cccc\nd…eeeee");
// Create an fold adjacent to the start of the first fold.
map.fold(vec![0..1, 2..5], app.as_ref()).unwrap();
map.check_invariants(app.as_ref());
assert_eq!(map.text(app.as_ref()), "…b…ijkl");
// Create an fold adjacent to the end of the first fold.
map.fold(vec![11..11, 8..10], app.as_ref()).unwrap();
map.check_invariants(app.as_ref());
assert_eq!(map.text(app.as_ref()), "…b…kl");
}
{
let mut map = FoldMap::new(buffer.clone(), app.as_ref());
// Create two adjacent folds.
map.fold(vec![0..2, 2..5], app.as_ref()).unwrap();
map.check_invariants(app.as_ref());
assert_eq!(map.text(app.as_ref()), "…fghijkl");
// Edit within one of the folds.
buffer.update(app, |buffer, ctx| { buffer.update(app, |buffer, ctx| {
buffer let version = buffer.version();
.edit(Some(Point::new(2, 2)..Point::new(3, 1)), "", Some(ctx)) buffer.edit(vec![0..1], "12345", Some(ctx)).unwrap();
.unwrap(); buffer.edits_since(version).collect::<Vec<_>>()
}); });
assert_eq!(map.text(app.as_ref()), "aa…eeeee"); map.check_invariants(app.as_ref());
}); assert_eq!(map.text(app.as_ref()), "12345…fghijkl");
}
} }
#[test] #[gpui::test]
fn test_folds_in_range() { fn test_overlapping_folds(app: &mut gpui::MutableAppContext) {
App::test((), |app| { let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(5, 6), ctx));
let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(5, 6), ctx)); let mut map = FoldMap::new(buffer.clone(), app.as_ref());
let mut map = FoldMap::new(buffer.clone(), app.as_ref()); map.fold(
let buffer = buffer.read(app); vec![
Point::new(0, 2)..Point::new(2, 2),
map.fold( Point::new(0, 4)..Point::new(1, 0),
vec![ Point::new(1, 2)..Point::new(3, 2),
Point::new(0, 2)..Point::new(2, 2), Point::new(3, 1)..Point::new(4, 1),
Point::new(0, 4)..Point::new(1, 0), ],
Point::new(1, 2)..Point::new(3, 2), app.as_ref(),
Point::new(3, 1)..Point::new(4, 1), )
], .unwrap();
app.as_ref(), assert_eq!(map.text(app.as_ref()), "aa…eeeee");
)
.unwrap();
let fold_ranges = map
.folds_in_range(Point::new(1, 0)..Point::new(1, 3), app.as_ref())
.unwrap()
.map(|fold| {
fold.start.to_point(buffer).unwrap()..fold.end.to_point(buffer).unwrap()
})
.collect::<Vec<_>>();
assert_eq!(
fold_ranges,
vec![
Point::new(0, 2)..Point::new(2, 2),
Point::new(1, 2)..Point::new(3, 2)
]
);
});
} }
#[test] #[gpui::test]
fn test_random_folds() { fn test_merging_folds_via_edit(app: &mut gpui::MutableAppContext) {
let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(5, 6), ctx));
let mut map = FoldMap::new(buffer.clone(), app.as_ref());
map.fold(
vec![
Point::new(0, 2)..Point::new(2, 2),
Point::new(3, 1)..Point::new(4, 1),
],
app.as_ref(),
)
.unwrap();
assert_eq!(map.text(app.as_ref()), "aa…cccc\nd…eeeee");
buffer.update(app, |buffer, ctx| {
buffer
.edit(Some(Point::new(2, 2)..Point::new(3, 1)), "", Some(ctx))
.unwrap();
});
assert_eq!(map.text(app.as_ref()), "aa…eeeee");
}
#[gpui::test]
fn test_folds_in_range(app: &mut gpui::MutableAppContext) {
let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(5, 6), ctx));
let mut map = FoldMap::new(buffer.clone(), app.as_ref());
let buffer = buffer.read(app);
map.fold(
vec![
Point::new(0, 2)..Point::new(2, 2),
Point::new(0, 4)..Point::new(1, 0),
Point::new(1, 2)..Point::new(3, 2),
Point::new(3, 1)..Point::new(4, 1),
],
app.as_ref(),
)
.unwrap();
let fold_ranges = map
.folds_in_range(Point::new(1, 0)..Point::new(1, 3), app.as_ref())
.unwrap()
.map(|fold| fold.start.to_point(buffer).unwrap()..fold.end.to_point(buffer).unwrap())
.collect::<Vec<_>>();
assert_eq!(
fold_ranges,
vec![
Point::new(0, 2)..Point::new(2, 2),
Point::new(1, 2)..Point::new(3, 2)
]
);
}
#[gpui::test]
fn test_random_folds(app: &mut gpui::MutableAppContext) {
use crate::editor::ToPoint; use crate::editor::ToPoint;
use crate::util::RandomCharIter; use crate::util::RandomCharIter;
use rand::prelude::*; use rand::prelude::*;
@ -863,203 +850,197 @@ mod tests {
dbg!(seed); dbg!(seed);
let mut rng = StdRng::seed_from_u64(seed); let mut rng = StdRng::seed_from_u64(seed);
App::test((), |app| { let buffer = app.add_model(|ctx| {
let buffer = app.add_model(|ctx| { let len = rng.gen_range(0..10);
let len = rng.gen_range(0..10); let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>(); Buffer::new(0, text, ctx)
Buffer::new(0, text, ctx) });
}); let mut map = FoldMap::new(buffer.clone(), app.as_ref());
let mut map = FoldMap::new(buffer.clone(), app.as_ref());
for _ in 0..operations { for _ in 0..operations {
log::info!("text: {:?}", buffer.read(app).text()); log::info!("text: {:?}", buffer.read(app).text());
match rng.gen_range(0..=100) { match rng.gen_range(0..=100) {
0..=34 => { 0..=34 => {
let buffer = buffer.read(app); let buffer = buffer.read(app);
let mut to_fold = Vec::new(); let mut to_fold = Vec::new();
for _ in 0..rng.gen_range(1..=5) { for _ in 0..rng.gen_range(1..=5) {
let end = rng.gen_range(0..=buffer.len()); let end = rng.gen_range(0..=buffer.len());
let start = rng.gen_range(0..=end); let start = rng.gen_range(0..=end);
to_fold.push(start..end); to_fold.push(start..end);
}
log::info!("folding {:?}", to_fold);
map.fold(to_fold, app.as_ref()).unwrap();
} }
35..=59 if !map.folds.is_empty() => { log::info!("folding {:?}", to_fold);
let buffer = buffer.read(app); map.fold(to_fold, app.as_ref()).unwrap();
let mut to_unfold = Vec::new(); }
for _ in 0..rng.gen_range(1..=3) { 35..=59 if !map.folds.is_empty() => {
let end = rng.gen_range(0..=buffer.len()); let buffer = buffer.read(app);
let start = rng.gen_range(0..=end); let mut to_unfold = Vec::new();
to_unfold.push(start..end); for _ in 0..rng.gen_range(1..=3) {
} let end = rng.gen_range(0..=buffer.len());
log::info!("unfolding {:?}", to_unfold); let start = rng.gen_range(0..=end);
map.unfold(to_unfold, app.as_ref()).unwrap(); to_unfold.push(start..end);
}
_ => {
let edits = buffer.update(app, |buffer, ctx| {
let start_version = buffer.version.clone();
let edit_count = rng.gen_range(1..=5);
buffer.randomly_edit(&mut rng, edit_count, Some(ctx));
buffer.edits_since(start_version).collect::<Vec<_>>()
});
log::info!("editing {:?}", edits);
} }
log::info!("unfolding {:?}", to_unfold);
map.unfold(to_unfold, app.as_ref()).unwrap();
} }
map.check_invariants(app.as_ref()); _ => {
let edits = buffer.update(app, |buffer, ctx| {
let buffer = map.buffer.read(app); let start_version = buffer.version.clone();
let mut expected_text = buffer.text(); let edit_count = rng.gen_range(1..=5);
let mut expected_buffer_rows = Vec::new(); buffer.randomly_edit(&mut rng, edit_count, Some(ctx));
let mut next_row = buffer.max_point().row; buffer.edits_since(start_version).collect::<Vec<_>>()
for fold_range in map.merged_fold_ranges(app.as_ref()).into_iter().rev() { });
let fold_start = buffer.point_for_offset(fold_range.start).unwrap(); log::info!("editing {:?}", edits);
let fold_end = buffer.point_for_offset(fold_range.end).unwrap();
expected_buffer_rows.extend((fold_end.row + 1..=next_row).rev());
next_row = fold_start.row;
expected_text.replace_range(fold_range.start..fold_range.end, "");
}
expected_buffer_rows.extend((0..=next_row).rev());
expected_buffer_rows.reverse();
assert_eq!(map.text(app.as_ref()), expected_text);
for (display_row, line) in expected_text.lines().enumerate() {
let line_len = map.line_len(display_row as u32, app.as_ref()).unwrap();
assert_eq!(line_len, line.chars().count() as u32);
}
let mut display_point = DisplayPoint::new(0, 0);
let mut display_offset = DisplayOffset(0);
for c in expected_text.chars() {
let buffer_point = map.to_buffer_point(display_point, app.as_ref());
let buffer_offset = buffer_point.to_offset(buffer).unwrap();
assert_eq!(
map.to_display_point(buffer_point, app.as_ref()),
display_point
);
assert_eq!(
map.to_buffer_offset(display_point, app.as_ref()).unwrap(),
buffer_offset
);
assert_eq!(
map.to_display_offset(display_point, app.as_ref()).unwrap(),
display_offset
);
if c == '\n' {
*display_point.row_mut() += 1;
*display_point.column_mut() = 0;
} else {
*display_point.column_mut() += 1;
}
display_offset.0 += 1;
}
for _ in 0..5 {
let row = rng.gen_range(0..=map.max_point(app.as_ref()).row());
let column = rng.gen_range(0..=map.line_len(row, app.as_ref()).unwrap());
let point = DisplayPoint::new(row, column);
let offset = map.to_display_offset(point, app.as_ref()).unwrap().0;
let len = rng.gen_range(0..=map.len(app.as_ref()) - offset);
assert_eq!(
map.snapshot(app.as_ref())
.chars_at(point, app.as_ref())
.unwrap()
.take(len)
.collect::<String>(),
expected_text
.chars()
.skip(offset)
.take(len)
.collect::<String>()
);
}
for (idx, buffer_row) in expected_buffer_rows.iter().enumerate() {
let display_row = map
.to_display_point(Point::new(*buffer_row, 0), app.as_ref())
.row();
assert_eq!(
map.snapshot(app.as_ref())
.buffer_rows(display_row)
.unwrap()
.collect::<Vec<_>>(),
expected_buffer_rows[idx..],
);
}
for fold_range in map.merged_fold_ranges(app.as_ref()) {
let display_point = map.to_display_point(
fold_range.start.to_point(buffer).unwrap(),
app.as_ref(),
);
assert!(map.is_line_folded(display_point.row(), app.as_ref()));
}
for _ in 0..5 {
let end = rng.gen_range(0..=buffer.len());
let start = rng.gen_range(0..=end);
let expected_folds = map
.folds
.items()
.into_iter()
.filter(|fold| {
let start = buffer.anchor_before(start).unwrap();
let end = buffer.anchor_after(end).unwrap();
start.cmp(&fold.0.end, buffer).unwrap() == Ordering::Less
&& end.cmp(&fold.0.start, buffer).unwrap() == Ordering::Greater
})
.map(|fold| fold.0)
.collect::<Vec<_>>();
assert_eq!(
map.folds_in_range(start..end, app.as_ref())
.unwrap()
.cloned()
.collect::<Vec<_>>(),
expected_folds
);
} }
} }
}); map.check_invariants(app.as_ref());
let buffer = map.buffer.read(app);
let mut expected_text = buffer.text();
let mut expected_buffer_rows = Vec::new();
let mut next_row = buffer.max_point().row;
for fold_range in map.merged_fold_ranges(app.as_ref()).into_iter().rev() {
let fold_start = buffer.point_for_offset(fold_range.start).unwrap();
let fold_end = buffer.point_for_offset(fold_range.end).unwrap();
expected_buffer_rows.extend((fold_end.row + 1..=next_row).rev());
next_row = fold_start.row;
expected_text.replace_range(fold_range.start..fold_range.end, "");
}
expected_buffer_rows.extend((0..=next_row).rev());
expected_buffer_rows.reverse();
assert_eq!(map.text(app.as_ref()), expected_text);
for (display_row, line) in expected_text.lines().enumerate() {
let line_len = map.line_len(display_row as u32, app.as_ref()).unwrap();
assert_eq!(line_len, line.chars().count() as u32);
}
let mut display_point = DisplayPoint::new(0, 0);
let mut display_offset = DisplayOffset(0);
for c in expected_text.chars() {
let buffer_point = map.to_buffer_point(display_point, app.as_ref());
let buffer_offset = buffer_point.to_offset(buffer).unwrap();
assert_eq!(
map.to_display_point(buffer_point, app.as_ref()),
display_point
);
assert_eq!(
map.to_buffer_offset(display_point, app.as_ref()).unwrap(),
buffer_offset
);
assert_eq!(
map.to_display_offset(display_point, app.as_ref()).unwrap(),
display_offset
);
if c == '\n' {
*display_point.row_mut() += 1;
*display_point.column_mut() = 0;
} else {
*display_point.column_mut() += 1;
}
display_offset.0 += 1;
}
for _ in 0..5 {
let row = rng.gen_range(0..=map.max_point(app.as_ref()).row());
let column = rng.gen_range(0..=map.line_len(row, app.as_ref()).unwrap());
let point = DisplayPoint::new(row, column);
let offset = map.to_display_offset(point, app.as_ref()).unwrap().0;
let len = rng.gen_range(0..=map.len(app.as_ref()) - offset);
assert_eq!(
map.snapshot(app.as_ref())
.chars_at(point, app.as_ref())
.unwrap()
.take(len)
.collect::<String>(),
expected_text
.chars()
.skip(offset)
.take(len)
.collect::<String>()
);
}
for (idx, buffer_row) in expected_buffer_rows.iter().enumerate() {
let display_row = map
.to_display_point(Point::new(*buffer_row, 0), app.as_ref())
.row();
assert_eq!(
map.snapshot(app.as_ref())
.buffer_rows(display_row)
.unwrap()
.collect::<Vec<_>>(),
expected_buffer_rows[idx..],
);
}
for fold_range in map.merged_fold_ranges(app.as_ref()) {
let display_point = map
.to_display_point(fold_range.start.to_point(buffer).unwrap(), app.as_ref());
assert!(map.is_line_folded(display_point.row(), app.as_ref()));
}
for _ in 0..5 {
let end = rng.gen_range(0..=buffer.len());
let start = rng.gen_range(0..=end);
let expected_folds = map
.folds
.items()
.into_iter()
.filter(|fold| {
let start = buffer.anchor_before(start).unwrap();
let end = buffer.anchor_after(end).unwrap();
start.cmp(&fold.0.end, buffer).unwrap() == Ordering::Less
&& end.cmp(&fold.0.start, buffer).unwrap() == Ordering::Greater
})
.map(|fold| fold.0)
.collect::<Vec<_>>();
assert_eq!(
map.folds_in_range(start..end, app.as_ref())
.unwrap()
.cloned()
.collect::<Vec<_>>(),
expected_folds
);
}
}
} }
} }
#[test] #[gpui::test]
fn test_buffer_rows() { fn test_buffer_rows(app: &mut gpui::MutableAppContext) {
App::test((), |app| { let text = sample_text(6, 6) + "\n";
let text = sample_text(6, 6) + "\n"; let buffer = app.add_model(|ctx| Buffer::new(0, text, ctx));
let buffer = app.add_model(|ctx| Buffer::new(0, text, ctx));
let mut map = FoldMap::new(buffer.clone(), app.as_ref()); let mut map = FoldMap::new(buffer.clone(), app.as_ref());
map.fold( map.fold(
vec![ vec![
Point::new(0, 2)..Point::new(2, 2), Point::new(0, 2)..Point::new(2, 2),
Point::new(3, 1)..Point::new(4, 1), Point::new(3, 1)..Point::new(4, 1),
], ],
app.as_ref(), app.as_ref(),
) )
.unwrap(); .unwrap();
assert_eq!(map.text(app.as_ref()), "aa…cccc\nd…eeeee\nffffff\n"); assert_eq!(map.text(app.as_ref()), "aa…cccc\nd…eeeee\nffffff\n");
assert_eq!( assert_eq!(
map.snapshot(app.as_ref()) map.snapshot(app.as_ref())
.buffer_rows(0) .buffer_rows(0)
.unwrap() .unwrap()
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
vec![0, 3, 5, 6] vec![0, 3, 5, 6]
); );
assert_eq!( assert_eq!(
map.snapshot(app.as_ref()) map.snapshot(app.as_ref())
.buffer_rows(3) .buffer_rows(3)
.unwrap() .unwrap()
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
vec![6] vec![6]
); );
});
} }
impl FoldMap { impl FoldMap {

View file

@ -339,53 +339,50 @@ pub fn collapse_tabs(
mod tests { mod tests {
use super::*; use super::*;
use crate::test::*; use crate::test::*;
use gpui::App;
#[test] #[gpui::test]
fn test_chars_at() { fn test_chars_at(app: &mut gpui::MutableAppContext) {
App::test((), |app| { let text = sample_text(6, 6);
let text = sample_text(6, 6); let buffer = app.add_model(|ctx| Buffer::new(0, text, ctx));
let buffer = app.add_model(|ctx| Buffer::new(0, text, ctx)); let map = DisplayMap::new(buffer.clone(), 4, app.as_ref());
let map = DisplayMap::new(buffer.clone(), 4, app.as_ref()); buffer
buffer .update(app, |buffer, ctx| {
.update(app, |buffer, ctx| { buffer.edit(
buffer.edit( vec![
vec![ Point::new(1, 0)..Point::new(1, 0),
Point::new(1, 0)..Point::new(1, 0), Point::new(1, 1)..Point::new(1, 1),
Point::new(1, 1)..Point::new(1, 1), Point::new(2, 1)..Point::new(2, 1),
Point::new(2, 1)..Point::new(2, 1), ],
], "\t",
"\t", Some(ctx),
Some(ctx), )
) })
}) .unwrap();
.unwrap();
assert_eq!( assert_eq!(
map.snapshot(app.as_ref()) map.snapshot(app.as_ref())
.chars_at(DisplayPoint::new(1, 0), app.as_ref()) .chars_at(DisplayPoint::new(1, 0), app.as_ref())
.unwrap() .unwrap()
.take(10) .take(10)
.collect::<String>(), .collect::<String>(),
" b bb" " b bb"
); );
assert_eq!( assert_eq!(
map.snapshot(app.as_ref()) map.snapshot(app.as_ref())
.chars_at(DisplayPoint::new(1, 2), app.as_ref()) .chars_at(DisplayPoint::new(1, 2), app.as_ref())
.unwrap() .unwrap()
.take(10) .take(10)
.collect::<String>(), .collect::<String>(),
" b bbbb" " b bbbb"
); );
assert_eq!( assert_eq!(
map.snapshot(app.as_ref()) map.snapshot(app.as_ref())
.chars_at(DisplayPoint::new(1, 6), app.as_ref()) .chars_at(DisplayPoint::new(1, 6), app.as_ref())
.unwrap() .unwrap()
.take(13) .take(13)
.collect::<String>(), .collect::<String>(),
" bbbbb\nc c" " bbbbb\nc c"
); );
});
} }
#[test] #[test]
@ -411,12 +408,10 @@ mod tests {
assert_eq!(collapse_tabs("\ta".chars(), 5, Bias::Right, 4), (2, 0)); assert_eq!(collapse_tabs("\ta".chars(), 5, Bias::Right, 4), (2, 0));
} }
#[test] #[gpui::test]
fn test_max_point() { fn test_max_point(app: &mut gpui::MutableAppContext) {
App::test((), |app| { let buffer = app.add_model(|ctx| Buffer::new(0, "aaa\n\t\tbbb", ctx));
let buffer = app.add_model(|ctx| Buffer::new(0, "aaa\n\t\tbbb", ctx)); let map = DisplayMap::new(buffer.clone(), 4, app.as_ref());
let map = DisplayMap::new(buffer.clone(), 4, app.as_ref()); assert_eq!(map.max_point(app.as_ref()), DisplayPoint::new(1, 11))
assert_eq!(map.max_point(app.as_ref()), DisplayPoint::new(1, 11))
});
} }
} }

View file

@ -399,7 +399,7 @@ impl FileFinder {
self.cancel_flag.store(true, atomic::Ordering::Relaxed); self.cancel_flag.store(true, atomic::Ordering::Relaxed);
self.cancel_flag = Arc::new(AtomicBool::new(false)); self.cancel_flag = Arc::new(AtomicBool::new(false));
let cancel_flag = self.cancel_flag.clone(); let cancel_flag = self.cancel_flag.clone();
let task = ctx.background_executor().spawn(async move { let background_task = ctx.background_executor().spawn(async move {
let include_root_name = snapshots.len() > 1; let include_root_name = snapshots.len() > 1;
let matches = match_paths( let matches = match_paths(
snapshots.iter(), snapshots.iter(),
@ -415,7 +415,11 @@ impl FileFinder {
(search_id, did_cancel, query, matches) (search_id, did_cancel, query, matches)
}); });
ctx.spawn(task, Self::update_matches).detach(); ctx.spawn(|this, mut ctx| async move {
let matches = background_task.await;
this.update(&mut ctx, |this, ctx| this.update_matches(matches, ctx));
})
.detach();
Some(()) Some(())
} }
@ -453,220 +457,208 @@ impl FileFinder {
mod tests { mod tests {
use super::*; use super::*;
use crate::{editor, settings, test::temp_tree, workspace::Workspace}; use crate::{editor, settings, test::temp_tree, workspace::Workspace};
use gpui::App;
use serde_json::json; use serde_json::json;
use std::fs; use std::fs;
use tempdir::TempDir; use tempdir::TempDir;
#[test] #[gpui::test]
fn test_matching_paths() { async fn test_matching_paths(mut app: gpui::TestAppContext) {
App::test_async((), |mut app| async move { let tmp_dir = TempDir::new("example").unwrap();
let tmp_dir = TempDir::new("example").unwrap(); fs::create_dir(tmp_dir.path().join("a")).unwrap();
fs::create_dir(tmp_dir.path().join("a")).unwrap(); fs::write(tmp_dir.path().join("a/banana"), "banana").unwrap();
fs::write(tmp_dir.path().join("a/banana"), "banana").unwrap(); fs::write(tmp_dir.path().join("a/bandana"), "bandana").unwrap();
fs::write(tmp_dir.path().join("a/bandana"), "bandana").unwrap(); app.update(|ctx| {
app.update(|ctx| { super::init(ctx);
super::init(ctx); editor::init(ctx);
editor::init(ctx);
});
let settings = settings::channel(&app.font_cache()).unwrap().1;
let (window_id, workspace) = app.add_window(|ctx| {
let mut workspace = Workspace::new(0, settings, ctx);
workspace.add_worktree(tmp_dir.path(), ctx);
workspace
});
app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx))
.await;
app.dispatch_action(
window_id,
vec![workspace.id()],
"file_finder:toggle".into(),
(),
);
let finder = app.read(|ctx| {
workspace
.read(ctx)
.modal()
.cloned()
.unwrap()
.downcast::<FileFinder>()
.unwrap()
});
let query_buffer = app.read(|ctx| finder.read(ctx).query_buffer.clone());
let chain = vec![finder.id(), query_buffer.id()];
app.dispatch_action(window_id, chain.clone(), "buffer:insert", "b".to_string());
app.dispatch_action(window_id, chain.clone(), "buffer:insert", "n".to_string());
app.dispatch_action(window_id, chain.clone(), "buffer:insert", "a".to_string());
finder
.condition(&app, |finder, _| finder.matches.len() == 2)
.await;
let active_pane = app.read(|ctx| workspace.read(ctx).active_pane().clone());
app.dispatch_action(
window_id,
vec![workspace.id(), finder.id()],
"menu:select_next",
(),
);
app.dispatch_action(
window_id,
vec![workspace.id(), finder.id()],
"file_finder:confirm",
(),
);
active_pane
.condition(&app, |pane, _| pane.active_item().is_some())
.await;
app.read(|ctx| {
let active_item = active_pane.read(ctx).active_item().unwrap();
assert_eq!(active_item.title(ctx), "bandana");
});
}); });
}
#[test]
fn test_matching_cancellation() {
App::test_async((), |mut app| async move {
let tmp_dir = temp_tree(json!({
"hello": "",
"goodbye": "",
"halogen-light": "",
"happiness": "",
"height": "",
"hi": "",
"hiccup": "",
}));
let settings = settings::channel(&app.font_cache()).unwrap().1;
let (_, workspace) = app.add_window(|ctx| {
let mut workspace = Workspace::new(0, settings.clone(), ctx);
workspace.add_worktree(tmp_dir.path(), ctx);
workspace
});
app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx))
.await;
let (_, finder) =
app.add_window(|ctx| FileFinder::new(settings, workspace.clone(), ctx));
let query = "hi".to_string();
finder.update(&mut app, |f, ctx| f.spawn_search(query.clone(), ctx));
finder.condition(&app, |f, _| f.matches.len() == 5).await;
finder.update(&mut app, |finder, ctx| {
let matches = finder.matches.clone();
// Simulate a search being cancelled after the time limit,
// returning only a subset of the matches that would have been found.
finder.spawn_search(query.clone(), ctx);
finder.update_matches(
(
finder.latest_search_id,
true, // did-cancel
query.clone(),
vec![matches[1].clone(), matches[3].clone()],
),
ctx,
);
// Simulate another cancellation.
finder.spawn_search(query.clone(), ctx);
finder.update_matches(
(
finder.latest_search_id,
true, // did-cancel
query.clone(),
vec![matches[0].clone(), matches[2].clone(), matches[3].clone()],
),
ctx,
);
assert_eq!(finder.matches, matches[0..4])
});
});
}
#[test]
fn test_single_file_worktrees() {
App::test_async((), |mut app| async move {
let temp_dir = TempDir::new("test-single-file-worktrees").unwrap();
let dir_path = temp_dir.path().join("the-parent-dir");
let file_path = dir_path.join("the-file");
fs::create_dir(&dir_path).unwrap();
fs::write(&file_path, "").unwrap();
let settings = settings::channel(&app.font_cache()).unwrap().1;
let (_, workspace) = app.add_window(|ctx| {
let mut workspace = Workspace::new(0, settings.clone(), ctx);
workspace.add_worktree(&file_path, ctx);
workspace
});
app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx))
.await;
let (_, finder) =
app.add_window(|ctx| FileFinder::new(settings, workspace.clone(), ctx));
// Even though there is only one worktree, that worktree's filename
// is included in the matching, because the worktree is a single file.
finder.update(&mut app, |f, ctx| f.spawn_search("thf".into(), ctx));
finder.condition(&app, |f, _| f.matches.len() == 1).await;
app.read(|ctx| {
let finder = finder.read(ctx);
let (file_name, file_name_positions, full_path, full_path_positions) =
finder.labels_for_match(&finder.matches[0], ctx).unwrap();
assert_eq!(file_name, "the-file");
assert_eq!(file_name_positions, &[0, 1, 4]);
assert_eq!(full_path, "the-file");
assert_eq!(full_path_positions, &[0, 1, 4]);
});
// Since the worktree root is a file, searching for its name followed by a slash does
// not match anything.
finder.update(&mut app, |f, ctx| f.spawn_search("thf/".into(), ctx));
finder.condition(&app, |f, _| f.matches.len() == 0).await;
});
}
#[test]
fn test_multiple_matches_with_same_relative_path() {
App::test_async((), |mut app| async move {
let tmp_dir = temp_tree(json!({
"dir1": { "a.txt": "" },
"dir2": { "a.txt": "" }
}));
let settings = settings::channel(&app.font_cache()).unwrap().1;
let (_, workspace) = app.add_window(|ctx| Workspace::new(0, settings.clone(), ctx));
let settings = settings::channel(&app.font_cache()).unwrap().1;
let (window_id, workspace) = app.add_window(|ctx| {
let mut workspace = Workspace::new(0, settings, ctx);
workspace.add_worktree(tmp_dir.path(), ctx);
workspace workspace
.update(&mut app, |workspace, ctx| { });
workspace.open_paths( app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx))
&[tmp_dir.path().join("dir1"), tmp_dir.path().join("dir2")], .await;
ctx, app.dispatch_action(
) window_id,
}) vec![workspace.id()],
.await; "file_finder:toggle".into(),
app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx)) (),
.await; );
let (_, finder) = let finder = app.read(|ctx| {
app.add_window(|ctx| FileFinder::new(settings, workspace.clone(), ctx)); workspace
.read(ctx)
.modal()
.cloned()
.unwrap()
.downcast::<FileFinder>()
.unwrap()
});
let query_buffer = app.read(|ctx| finder.read(ctx).query_buffer.clone());
// Run a search that matches two files with the same relative path. let chain = vec![finder.id(), query_buffer.id()];
finder.update(&mut app, |f, ctx| f.spawn_search("a.t".into(), ctx)); app.dispatch_action(window_id, chain.clone(), "buffer:insert", "b".to_string());
finder.condition(&app, |f, _| f.matches.len() == 2).await; app.dispatch_action(window_id, chain.clone(), "buffer:insert", "n".to_string());
app.dispatch_action(window_id, chain.clone(), "buffer:insert", "a".to_string());
finder
.condition(&app, |finder, _| finder.matches.len() == 2)
.await;
// Can switch between different matches with the same relative path. let active_pane = app.read(|ctx| workspace.read(ctx).active_pane().clone());
finder.update(&mut app, |f, ctx| { app.dispatch_action(
assert_eq!(f.selected_index(), 0); window_id,
f.select_next(&(), ctx); vec![workspace.id(), finder.id()],
assert_eq!(f.selected_index(), 1); "menu:select_next",
f.select_prev(&(), ctx); (),
assert_eq!(f.selected_index(), 0); );
}); app.dispatch_action(
window_id,
vec![workspace.id(), finder.id()],
"file_finder:confirm",
(),
);
active_pane
.condition(&app, |pane, _| pane.active_item().is_some())
.await;
app.read(|ctx| {
let active_item = active_pane.read(ctx).active_item().unwrap();
assert_eq!(active_item.title(ctx), "bandana");
});
}
#[gpui::test]
async fn test_matching_cancellation(mut app: gpui::TestAppContext) {
let tmp_dir = temp_tree(json!({
"hello": "",
"goodbye": "",
"halogen-light": "",
"happiness": "",
"height": "",
"hi": "",
"hiccup": "",
}));
let settings = settings::channel(&app.font_cache()).unwrap().1;
let (_, workspace) = app.add_window(|ctx| {
let mut workspace = Workspace::new(0, settings.clone(), ctx);
workspace.add_worktree(tmp_dir.path(), ctx);
workspace
});
app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx))
.await;
let (_, finder) = app.add_window(|ctx| FileFinder::new(settings, workspace.clone(), ctx));
let query = "hi".to_string();
finder.update(&mut app, |f, ctx| f.spawn_search(query.clone(), ctx));
finder.condition(&app, |f, _| f.matches.len() == 5).await;
finder.update(&mut app, |finder, ctx| {
let matches = finder.matches.clone();
// Simulate a search being cancelled after the time limit,
// returning only a subset of the matches that would have been found.
finder.spawn_search(query.clone(), ctx);
finder.update_matches(
(
finder.latest_search_id,
true, // did-cancel
query.clone(),
vec![matches[1].clone(), matches[3].clone()],
),
ctx,
);
// Simulate another cancellation.
finder.spawn_search(query.clone(), ctx);
finder.update_matches(
(
finder.latest_search_id,
true, // did-cancel
query.clone(),
vec![matches[0].clone(), matches[2].clone(), matches[3].clone()],
),
ctx,
);
assert_eq!(finder.matches, matches[0..4])
});
}
#[gpui::test]
async fn test_single_file_worktrees(mut app: gpui::TestAppContext) {
let temp_dir = TempDir::new("test-single-file-worktrees").unwrap();
let dir_path = temp_dir.path().join("the-parent-dir");
let file_path = dir_path.join("the-file");
fs::create_dir(&dir_path).unwrap();
fs::write(&file_path, "").unwrap();
let settings = settings::channel(&app.font_cache()).unwrap().1;
let (_, workspace) = app.add_window(|ctx| {
let mut workspace = Workspace::new(0, settings.clone(), ctx);
workspace.add_worktree(&file_path, ctx);
workspace
});
app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx))
.await;
let (_, finder) = app.add_window(|ctx| FileFinder::new(settings, workspace.clone(), ctx));
// Even though there is only one worktree, that worktree's filename
// is included in the matching, because the worktree is a single file.
finder.update(&mut app, |f, ctx| f.spawn_search("thf".into(), ctx));
finder.condition(&app, |f, _| f.matches.len() == 1).await;
app.read(|ctx| {
let finder = finder.read(ctx);
let (file_name, file_name_positions, full_path, full_path_positions) =
finder.labels_for_match(&finder.matches[0], ctx).unwrap();
assert_eq!(file_name, "the-file");
assert_eq!(file_name_positions, &[0, 1, 4]);
assert_eq!(full_path, "the-file");
assert_eq!(full_path_positions, &[0, 1, 4]);
});
// Since the worktree root is a file, searching for its name followed by a slash does
// not match anything.
finder.update(&mut app, |f, ctx| f.spawn_search("thf/".into(), ctx));
finder.condition(&app, |f, _| f.matches.len() == 0).await;
}
#[gpui::test]
async fn test_multiple_matches_with_same_relative_path(mut app: gpui::TestAppContext) {
let tmp_dir = temp_tree(json!({
"dir1": { "a.txt": "" },
"dir2": { "a.txt": "" }
}));
let settings = settings::channel(&app.font_cache()).unwrap().1;
let (_, workspace) = app.add_window(|ctx| Workspace::new(0, settings.clone(), ctx));
workspace
.update(&mut app, |workspace, ctx| {
workspace.open_paths(
&[tmp_dir.path().join("dir1"), tmp_dir.path().join("dir2")],
ctx,
)
})
.await;
app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx))
.await;
let (_, finder) = app.add_window(|ctx| FileFinder::new(settings, workspace.clone(), ctx));
// Run a search that matches two files with the same relative path.
finder.update(&mut app, |f, ctx| f.spawn_search("a.t".into(), ctx));
finder.condition(&app, |f, _| f.matches.len() == 2).await;
// Can switch between different matches with the same relative path.
finder.update(&mut app, |f, ctx| {
assert_eq!(f.selected_index(), 0);
f.select_next(&(), ctx);
assert_eq!(f.selected_index(), 1);
f.select_prev(&(), ctx);
assert_eq!(f.selected_index(), 0);
}); });
} }
} }

File diff suppressed because it is too large Load diff

View file

@ -16,7 +16,7 @@ use postage::{
prelude::{Sink, Stream}, prelude::{Sink, Stream},
watch, watch,
}; };
use smol::{channel::Sender, Timer}; use smol::channel::Sender;
use std::{ use std::{
cmp, cmp,
collections::{HashMap, HashSet}, collections::{HashMap, HashSet},
@ -99,8 +99,27 @@ impl Worktree {
scanner.run(event_stream) scanner.run(event_stream)
}); });
ctx.spawn_stream(scan_state_rx, Self::observe_scan_state, |_, _| {}) ctx.spawn(|this, mut ctx| {
.detach(); let this = this.downgrade();
async move {
while let Ok(scan_state) = scan_state_rx.recv().await {
let alive = ctx.update(|ctx| {
if let Some(handle) = this.upgrade(&ctx) {
handle
.update(ctx, |this, ctx| this.observe_scan_state(scan_state, ctx));
true
} else {
false
}
});
if !alive {
break;
}
}
}
})
.detach();
tree tree
} }
@ -117,15 +136,16 @@ impl Worktree {
pub fn next_scan_complete(&self, ctx: &mut ModelContext<Self>) -> impl Future<Output = ()> { pub fn next_scan_complete(&self, ctx: &mut ModelContext<Self>) -> impl Future<Output = ()> {
let scan_id = self.snapshot.scan_id; let scan_id = self.snapshot.scan_id;
ctx.spawn_stream( let mut scan_state = self.scan_state.1.clone();
self.scan_state.1.clone(), ctx.spawn(|this, ctx| async move {
move |this, scan_state, ctx| { while let Some(scan_state) = scan_state.recv().await {
if matches!(scan_state, ScanState::Idle) && this.snapshot.scan_id > scan_id { if this.read_with(&ctx, |this, _| {
ctx.halt_stream(); matches!(scan_state, ScanState::Idle) && this.snapshot.scan_id > scan_id
}) {
break;
} }
}, }
|_, _| {}, })
)
} }
fn observe_scan_state(&mut self, scan_state: ScanState, ctx: &mut ModelContext<Self>) { fn observe_scan_state(&mut self, scan_state: ScanState, ctx: &mut ModelContext<Self>) {
@ -138,9 +158,11 @@ impl Worktree {
ctx.notify(); ctx.notify();
if self.is_scanning() && !self.poll_scheduled { if self.is_scanning() && !self.poll_scheduled {
ctx.spawn(Timer::after(Duration::from_millis(100)), |this, _, ctx| { ctx.spawn(|this, mut ctx| async move {
this.poll_scheduled = false; this.update(&mut ctx, |this, ctx| {
this.poll_entries(ctx); this.poll_scheduled = false;
this.poll_entries(ctx);
})
}) })
.detach(); .detach();
self.poll_scheduled = true; self.poll_scheduled = true;
@ -1394,7 +1416,6 @@ mod tests {
use crate::editor::Buffer; use crate::editor::Buffer;
use crate::test::*; use crate::test::*;
use anyhow::Result; use anyhow::Result;
use gpui::App;
use rand::prelude::*; use rand::prelude::*;
use serde_json::json; use serde_json::json;
use std::env; use std::env;
@ -1402,248 +1423,237 @@ mod tests {
use std::os::unix; use std::os::unix;
use std::time::{SystemTime, UNIX_EPOCH}; use std::time::{SystemTime, UNIX_EPOCH};
#[test] #[gpui::test]
fn test_populate_and_search() { async fn test_populate_and_search(mut app: gpui::TestAppContext) {
App::test_async((), |mut app| async move { let dir = temp_tree(json!({
let dir = temp_tree(json!({ "root": {
"root": { "apple": "",
"apple": "", "banana": {
"banana": { "carrot": {
"carrot": { "date": "",
"date": "", "endive": "",
"endive": "",
}
},
"fennel": {
"grape": "",
} }
},
"fennel": {
"grape": "",
} }
})); }
}));
let root_link_path = dir.path().join("root_link"); let root_link_path = dir.path().join("root_link");
unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap(); unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
unix::fs::symlink( unix::fs::symlink(
&dir.path().join("root/fennel"), &dir.path().join("root/fennel"),
&dir.path().join("root/finnochio"), &dir.path().join("root/finnochio"),
)
.unwrap();
let tree = app.add_model(|ctx| Worktree::new(root_link_path, ctx));
app.read(|ctx| tree.read(ctx).scan_complete()).await;
app.read(|ctx| {
let tree = tree.read(ctx);
assert_eq!(tree.file_count(), 5);
assert_eq!(
tree.inode_for_path("fennel/grape"),
tree.inode_for_path("finnochio/grape")
);
let results = match_paths(
Some(tree.snapshot()).iter(),
"bna",
false,
false,
false,
10,
Default::default(),
ctx.thread_pool().clone(),
) )
.into_iter()
.map(|result| result.path)
.collect::<Vec<Arc<Path>>>();
assert_eq!(
results,
vec![
PathBuf::from("banana/carrot/date").into(),
PathBuf::from("banana/carrot/endive").into(),
]
);
})
}
#[gpui::test]
async fn test_save_file(mut app: gpui::TestAppContext) {
let dir = temp_tree(json!({
"file1": "the old contents",
}));
let tree = app.add_model(|ctx| Worktree::new(dir.path(), ctx));
app.read(|ctx| tree.read(ctx).scan_complete()).await;
app.read(|ctx| assert_eq!(tree.read(ctx).file_count(), 1));
let buffer =
app.add_model(|ctx| Buffer::new(1, "a line of text.\n".repeat(10 * 1024), ctx));
let path = tree.update(&mut app, |tree, ctx| {
let path = tree.files(0).next().unwrap().path().clone();
assert_eq!(path.file_name().unwrap(), "file1");
smol::block_on(tree.save(&path, buffer.read(ctx).snapshot(), ctx.as_ref())).unwrap();
path
});
let history = app
.read(|ctx| tree.read(ctx).load_history(&path, ctx))
.await
.unwrap(); .unwrap();
app.read(|ctx| {
let tree = app.add_model(|ctx| Worktree::new(root_link_path, ctx)); assert_eq!(history.base_text.as_ref(), buffer.read(ctx).text());
app.read(|ctx| tree.read(ctx).scan_complete()).await;
app.read(|ctx| {
let tree = tree.read(ctx);
assert_eq!(tree.file_count(), 5);
assert_eq!(
tree.inode_for_path("fennel/grape"),
tree.inode_for_path("finnochio/grape")
);
let results = match_paths(
Some(tree.snapshot()).iter(),
"bna",
false,
false,
false,
10,
Default::default(),
ctx.thread_pool().clone(),
)
.into_iter()
.map(|result| result.path)
.collect::<Vec<Arc<Path>>>();
assert_eq!(
results,
vec![
PathBuf::from("banana/carrot/date").into(),
PathBuf::from("banana/carrot/endive").into(),
]
);
})
}); });
} }
#[test] #[gpui::test]
fn test_save_file() { async fn test_save_in_single_file_worktree(mut app: gpui::TestAppContext) {
App::test_async((), |mut app| async move { let dir = temp_tree(json!({
let dir = temp_tree(json!({ "file1": "the old contents",
"file1": "the old contents", }));
}));
let tree = app.add_model(|ctx| Worktree::new(dir.path(), ctx)); let tree = app.add_model(|ctx| Worktree::new(dir.path().join("file1"), ctx));
app.read(|ctx| tree.read(ctx).scan_complete()).await; app.read(|ctx| tree.read(ctx).scan_complete()).await;
app.read(|ctx| assert_eq!(tree.read(ctx).file_count(), 1)); app.read(|ctx| assert_eq!(tree.read(ctx).file_count(), 1));
let buffer = let buffer =
app.add_model(|ctx| Buffer::new(1, "a line of text.\n".repeat(10 * 1024), ctx)); app.add_model(|ctx| Buffer::new(1, "a line of text.\n".repeat(10 * 1024), ctx));
let path = tree.update(&mut app, |tree, ctx| { let file = app.read(|ctx| tree.file("", ctx));
let path = tree.files(0).next().unwrap().path().clone(); app.update(|ctx| {
assert_eq!(path.file_name().unwrap(), "file1"); assert_eq!(file.path().file_name(), None);
smol::block_on(tree.save(&path, buffer.read(ctx).snapshot(), ctx.as_ref())) smol::block_on(file.save(buffer.read(ctx).snapshot(), ctx.as_ref())).unwrap();
.unwrap();
path
});
let history = app
.read(|ctx| tree.read(ctx).load_history(&path, ctx))
.await
.unwrap();
app.read(|ctx| {
assert_eq!(history.base_text.as_ref(), buffer.read(ctx).text());
});
}); });
let history = app.read(|ctx| file.load_history(ctx)).await.unwrap();
app.read(|ctx| assert_eq!(history.base_text.as_ref(), buffer.read(ctx).text()));
} }
#[test] #[gpui::test]
fn test_save_in_single_file_worktree() { async fn test_rescan_simple(mut app: gpui::TestAppContext) {
App::test_async((), |mut app| async move { let dir = temp_tree(json!({
let dir = temp_tree(json!({ "a": {
"file1": "the old contents", "file1": "",
})); "file2": "",
"file3": "",
let tree = app.add_model(|ctx| Worktree::new(dir.path().join("file1"), ctx)); },
app.read(|ctx| tree.read(ctx).scan_complete()).await; "b": {
app.read(|ctx| assert_eq!(tree.read(ctx).file_count(), 1)); "c": {
"file4": "",
let buffer = "file5": "",
app.add_model(|ctx| Buffer::new(1, "a line of text.\n".repeat(10 * 1024), ctx));
let file = app.read(|ctx| tree.file("", ctx));
app.update(|ctx| {
assert_eq!(file.path().file_name(), None);
smol::block_on(file.save(buffer.read(ctx).snapshot(), ctx.as_ref())).unwrap();
});
let history = app.read(|ctx| file.load_history(ctx)).await.unwrap();
app.read(|ctx| assert_eq!(history.base_text.as_ref(), buffer.read(ctx).text()));
});
}
#[test]
fn test_rescan_simple() {
App::test_async((), |mut app| async move {
let dir = temp_tree(json!({
"a": {
"file1": "",
"file2": "",
"file3": "",
},
"b": {
"c": {
"file4": "",
"file5": "",
}
} }
})); }
}));
let tree = app.add_model(|ctx| Worktree::new(dir.path(), ctx)); let tree = app.add_model(|ctx| Worktree::new(dir.path(), ctx));
let (file2, file3, file4, file5, non_existent_file) = app.read(|ctx| { let (file2, file3, file4, file5, non_existent_file) = app.read(|ctx| {
( (
tree.file("a/file2", ctx), tree.file("a/file2", ctx),
tree.file("a/file3", ctx), tree.file("a/file3", ctx),
tree.file("b/c/file4", ctx), tree.file("b/c/file4", ctx),
tree.file("b/c/file5", ctx), tree.file("b/c/file5", ctx),
tree.file("a/filex", ctx), tree.file("a/filex", ctx),
) )
}); });
// The worktree hasn't scanned the directories containing these paths, // The worktree hasn't scanned the directories containing these paths,
// so it can't determine that the paths are deleted. // so it can't determine that the paths are deleted.
assert!(!file2.is_deleted());
assert!(!file3.is_deleted());
assert!(!file4.is_deleted());
assert!(!file5.is_deleted());
assert!(!non_existent_file.is_deleted());
// After scanning, the worktree knows which files exist and which don't.
app.read(|ctx| tree.read(ctx).scan_complete()).await;
assert!(!file2.is_deleted());
assert!(!file3.is_deleted());
assert!(!file4.is_deleted());
assert!(!file5.is_deleted());
assert!(non_existent_file.is_deleted());
tree.flush_fs_events(&app).await;
std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
tree.update(&mut app, |tree, ctx| tree.next_scan_complete(ctx))
.await;
app.read(|ctx| {
assert_eq!(
tree.read(ctx)
.paths()
.map(|p| p.to_str().unwrap())
.collect::<Vec<_>>(),
vec![
"a",
"a/file1",
"a/file2.new",
"b",
"d",
"d/file3",
"d/file4"
]
);
assert_eq!(file2.path().to_str().unwrap(), "a/file2.new");
assert_eq!(file4.path().as_ref(), Path::new("d/file4"));
assert_eq!(file5.path().as_ref(), Path::new("d/file5"));
assert!(!file2.is_deleted()); assert!(!file2.is_deleted());
assert!(!file3.is_deleted());
assert!(!file4.is_deleted()); assert!(!file4.is_deleted());
assert!(!file5.is_deleted()); assert!(file5.is_deleted());
assert!(!non_existent_file.is_deleted());
// After scanning, the worktree knows which files exist and which don't. // Right now, this rename isn't detected because the target path
app.read(|ctx| tree.read(ctx).scan_complete()).await; // no longer exists on the file system by the time we process the
assert!(!file2.is_deleted()); // rename event.
assert!(!file3.is_deleted()); assert_eq!(file3.path().as_ref(), Path::new("a/file3"));
assert!(!file4.is_deleted()); assert!(file3.is_deleted());
assert!(!file5.is_deleted());
assert!(non_existent_file.is_deleted());
tree.flush_fs_events(&app).await;
fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
fs::remove_file(dir.path().join("b/c/file5")).unwrap();
fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
tree.update(&mut app, |tree, ctx| tree.next_scan_complete(ctx))
.await;
app.read(|ctx| {
assert_eq!(
tree.read(ctx)
.paths()
.map(|p| p.to_str().unwrap())
.collect::<Vec<_>>(),
vec![
"a",
"a/file1",
"a/file2.new",
"b",
"d",
"d/file3",
"d/file4"
]
);
assert_eq!(file2.path().to_str().unwrap(), "a/file2.new");
assert_eq!(file4.path().as_ref(), Path::new("d/file4"));
assert_eq!(file5.path().as_ref(), Path::new("d/file5"));
assert!(!file2.is_deleted());
assert!(!file4.is_deleted());
assert!(file5.is_deleted());
// Right now, this rename isn't detected because the target path
// no longer exists on the file system by the time we process the
// rename event.
assert_eq!(file3.path().as_ref(), Path::new("a/file3"));
assert!(file3.is_deleted());
});
}); });
} }
#[test] #[gpui::test]
fn test_rescan_with_gitignore() { async fn test_rescan_with_gitignore(mut app: gpui::TestAppContext) {
App::test_async((), |mut app| async move { let dir = temp_tree(json!({
let dir = temp_tree(json!({ ".git": {},
".git": {}, ".gitignore": "ignored-dir\n",
".gitignore": "ignored-dir\n", "tracked-dir": {
"tracked-dir": { "tracked-file1": "tracked contents",
"tracked-file1": "tracked contents", },
}, "ignored-dir": {
"ignored-dir": { "ignored-file1": "ignored contents",
"ignored-file1": "ignored contents", }
} }));
}));
let tree = app.add_model(|ctx| Worktree::new(dir.path(), ctx)); let tree = app.add_model(|ctx| Worktree::new(dir.path(), ctx));
app.read(|ctx| tree.read(ctx).scan_complete()).await; app.read(|ctx| tree.read(ctx).scan_complete()).await;
tree.flush_fs_events(&app).await; tree.flush_fs_events(&app).await;
app.read(|ctx| { app.read(|ctx| {
let tree = tree.read(ctx); let tree = tree.read(ctx);
let tracked = tree.entry_for_path("tracked-dir/tracked-file1").unwrap(); let tracked = tree.entry_for_path("tracked-dir/tracked-file1").unwrap();
let ignored = tree.entry_for_path("ignored-dir/ignored-file1").unwrap(); let ignored = tree.entry_for_path("ignored-dir/ignored-file1").unwrap();
assert_eq!(tracked.is_ignored(), false); assert_eq!(tracked.is_ignored(), false);
assert_eq!(ignored.is_ignored(), true); assert_eq!(ignored.is_ignored(), true);
}); });
fs::write(dir.path().join("tracked-dir/tracked-file2"), "").unwrap(); fs::write(dir.path().join("tracked-dir/tracked-file2"), "").unwrap();
fs::write(dir.path().join("ignored-dir/ignored-file2"), "").unwrap(); fs::write(dir.path().join("ignored-dir/ignored-file2"), "").unwrap();
tree.update(&mut app, |tree, ctx| tree.next_scan_complete(ctx)) tree.update(&mut app, |tree, ctx| tree.next_scan_complete(ctx))
.await; .await;
app.read(|ctx| { app.read(|ctx| {
let tree = tree.read(ctx); let tree = tree.read(ctx);
let dot_git = tree.entry_for_path(".git").unwrap(); let dot_git = tree.entry_for_path(".git").unwrap();
let tracked = tree.entry_for_path("tracked-dir/tracked-file2").unwrap(); let tracked = tree.entry_for_path("tracked-dir/tracked-file2").unwrap();
let ignored = tree.entry_for_path("ignored-dir/ignored-file2").unwrap(); let ignored = tree.entry_for_path("ignored-dir/ignored-file2").unwrap();
assert_eq!(tracked.is_ignored(), false); assert_eq!(tracked.is_ignored(), false);
assert_eq!(ignored.is_ignored(), true); assert_eq!(ignored.is_ignored(), true);
assert_eq!(dot_git.is_ignored(), true); assert_eq!(dot_git.is_ignored(), true);
});
}); });
} }