Store worktree changes in a sorted arc slice, not a HashMap
We don't need to look-up change types by an arbitrary key, but we do need to iterate it. It would also be useful to be able to cheaply clone the changes, to use them in a background task.
This commit is contained in:
parent
59bfd40679
commit
6628c4df28
2 changed files with 35 additions and 26 deletions
|
@ -4847,7 +4847,7 @@ impl Project {
|
||||||
if worktree.read(cx).is_local() {
|
if worktree.read(cx).is_local() {
|
||||||
cx.subscribe(worktree, |this, worktree, event, cx| match event {
|
cx.subscribe(worktree, |this, worktree, event, cx| match event {
|
||||||
worktree::Event::UpdatedEntries(changes) => {
|
worktree::Event::UpdatedEntries(changes) => {
|
||||||
this.update_local_worktree_buffers(&worktree, &changes, cx);
|
this.update_local_worktree_buffers(&worktree, changes, cx);
|
||||||
this.update_local_worktree_language_servers(&worktree, changes, cx);
|
this.update_local_worktree_language_servers(&worktree, changes, cx);
|
||||||
}
|
}
|
||||||
worktree::Event::UpdatedGitRepositories(updated_repos) => {
|
worktree::Event::UpdatedGitRepositories(updated_repos) => {
|
||||||
|
@ -4881,13 +4881,13 @@ impl Project {
|
||||||
fn update_local_worktree_buffers(
|
fn update_local_worktree_buffers(
|
||||||
&mut self,
|
&mut self,
|
||||||
worktree_handle: &ModelHandle<Worktree>,
|
worktree_handle: &ModelHandle<Worktree>,
|
||||||
changes: &HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
|
changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) {
|
) {
|
||||||
let snapshot = worktree_handle.read(cx).snapshot();
|
let snapshot = worktree_handle.read(cx).snapshot();
|
||||||
|
|
||||||
let mut renamed_buffers = Vec::new();
|
let mut renamed_buffers = Vec::new();
|
||||||
for (path, entry_id) in changes.keys() {
|
for (path, entry_id, _) in changes {
|
||||||
let worktree_id = worktree_handle.read(cx).id();
|
let worktree_id = worktree_handle.read(cx).id();
|
||||||
let project_path = ProjectPath {
|
let project_path = ProjectPath {
|
||||||
worktree_id,
|
worktree_id,
|
||||||
|
@ -4993,7 +4993,7 @@ impl Project {
|
||||||
fn update_local_worktree_language_servers(
|
fn update_local_worktree_language_servers(
|
||||||
&mut self,
|
&mut self,
|
||||||
worktree_handle: &ModelHandle<Worktree>,
|
worktree_handle: &ModelHandle<Worktree>,
|
||||||
changes: &HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
|
changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) {
|
) {
|
||||||
if changes.is_empty() {
|
if changes.is_empty() {
|
||||||
|
@ -5024,7 +5024,7 @@ impl Project {
|
||||||
let params = lsp::DidChangeWatchedFilesParams {
|
let params = lsp::DidChangeWatchedFilesParams {
|
||||||
changes: changes
|
changes: changes
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|((path, _), change)| {
|
.filter_map(|(path, _, change)| {
|
||||||
if !watched_paths.is_match(&path) {
|
if !watched_paths.is_match(&path) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
|
@ -363,7 +363,7 @@ enum ScanState {
|
||||||
Started,
|
Started,
|
||||||
Updated {
|
Updated {
|
||||||
snapshot: LocalSnapshot,
|
snapshot: LocalSnapshot,
|
||||||
changes: HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
|
changes: Arc<[(Arc<Path>, ProjectEntryId, PathChange)]>,
|
||||||
barrier: Option<barrier::Sender>,
|
barrier: Option<barrier::Sender>,
|
||||||
scanning: bool,
|
scanning: bool,
|
||||||
},
|
},
|
||||||
|
@ -377,7 +377,7 @@ struct ShareState {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum Event {
|
pub enum Event {
|
||||||
UpdatedEntries(HashMap<(Arc<Path>, ProjectEntryId), PathChange>),
|
UpdatedEntries(Arc<[(Arc<Path>, ProjectEntryId, PathChange)]>),
|
||||||
UpdatedGitRepositories(HashMap<Arc<Path>, LocalRepositoryEntry>),
|
UpdatedGitRepositories(HashMap<Arc<Path>, LocalRepositoryEntry>),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -548,7 +548,7 @@ impl Worktree {
|
||||||
this.update(&mut cx, |this, cx| {
|
this.update(&mut cx, |this, cx| {
|
||||||
let this = this.as_remote_mut().unwrap();
|
let this = this.as_remote_mut().unwrap();
|
||||||
this.snapshot = this.background_snapshot.lock().clone();
|
this.snapshot = this.background_snapshot.lock().clone();
|
||||||
cx.emit(Event::UpdatedEntries(Default::default()));
|
cx.emit(Event::UpdatedEntries(Arc::from([])));
|
||||||
cx.notify();
|
cx.notify();
|
||||||
while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
|
while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
|
||||||
if this.observed_snapshot(*scan_id) {
|
if this.observed_snapshot(*scan_id) {
|
||||||
|
@ -3396,18 +3396,24 @@ impl BackgroundScanner {
|
||||||
old_snapshot: &Snapshot,
|
old_snapshot: &Snapshot,
|
||||||
new_snapshot: &Snapshot,
|
new_snapshot: &Snapshot,
|
||||||
event_paths: &[Arc<Path>],
|
event_paths: &[Arc<Path>],
|
||||||
) -> HashMap<(Arc<Path>, ProjectEntryId), PathChange> {
|
) -> Arc<[(Arc<Path>, ProjectEntryId, PathChange)]> {
|
||||||
use BackgroundScannerPhase::*;
|
use BackgroundScannerPhase::*;
|
||||||
use PathChange::{Added, AddedOrUpdated, Loaded, Removed, Updated};
|
use PathChange::{Added, AddedOrUpdated, Loaded, Removed, Updated};
|
||||||
|
|
||||||
let mut changes = HashMap::default();
|
|
||||||
let mut old_paths = old_snapshot.entries_by_path.cursor::<PathKey>();
|
let mut old_paths = old_snapshot.entries_by_path.cursor::<PathKey>();
|
||||||
let mut new_paths = new_snapshot.entries_by_path.cursor::<PathKey>();
|
let mut new_paths = new_snapshot.entries_by_path.cursor::<PathKey>();
|
||||||
|
old_paths.next(&());
|
||||||
|
new_paths.next(&());
|
||||||
|
|
||||||
|
let mut changes = Vec::new();
|
||||||
for path in event_paths {
|
for path in event_paths {
|
||||||
let path = PathKey(path.clone());
|
let path = PathKey(path.clone());
|
||||||
old_paths.seek(&path, Bias::Left, &());
|
if old_paths.item().map_or(false, |e| e.path < path.0) {
|
||||||
new_paths.seek(&path, Bias::Left, &());
|
old_paths.seek_forward(&path, Bias::Left, &());
|
||||||
|
}
|
||||||
|
if new_paths.item().map_or(false, |e| e.path < path.0) {
|
||||||
|
new_paths.seek_forward(&path, Bias::Left, &());
|
||||||
|
}
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match (old_paths.item(), new_paths.item()) {
|
match (old_paths.item(), new_paths.item()) {
|
||||||
|
@ -3422,7 +3428,7 @@ impl BackgroundScanner {
|
||||||
|
|
||||||
match Ord::cmp(&old_entry.path, &new_entry.path) {
|
match Ord::cmp(&old_entry.path, &new_entry.path) {
|
||||||
Ordering::Less => {
|
Ordering::Less => {
|
||||||
changes.insert((old_entry.path.clone(), old_entry.id), Removed);
|
changes.push((old_entry.path.clone(), old_entry.id, Removed));
|
||||||
old_paths.next(&());
|
old_paths.next(&());
|
||||||
}
|
}
|
||||||
Ordering::Equal => {
|
Ordering::Equal => {
|
||||||
|
@ -3430,42 +3436,45 @@ impl BackgroundScanner {
|
||||||
// If the worktree was not fully initialized when this event was generated,
|
// If the worktree was not fully initialized when this event was generated,
|
||||||
// we can't know whether this entry was added during the scan or whether
|
// we can't know whether this entry was added during the scan or whether
|
||||||
// it was merely updated.
|
// it was merely updated.
|
||||||
changes.insert(
|
changes.push((
|
||||||
(new_entry.path.clone(), new_entry.id),
|
new_entry.path.clone(),
|
||||||
|
new_entry.id,
|
||||||
AddedOrUpdated,
|
AddedOrUpdated,
|
||||||
);
|
));
|
||||||
} else if old_entry.mtime != new_entry.mtime {
|
} else if old_entry.mtime != new_entry.mtime {
|
||||||
changes.insert((new_entry.path.clone(), new_entry.id), Updated);
|
changes.push((new_entry.path.clone(), new_entry.id, Updated));
|
||||||
}
|
}
|
||||||
old_paths.next(&());
|
old_paths.next(&());
|
||||||
new_paths.next(&());
|
new_paths.next(&());
|
||||||
}
|
}
|
||||||
Ordering::Greater => {
|
Ordering::Greater => {
|
||||||
changes.insert(
|
changes.push((
|
||||||
(new_entry.path.clone(), new_entry.id),
|
new_entry.path.clone(),
|
||||||
|
new_entry.id,
|
||||||
if self.phase == InitialScan {
|
if self.phase == InitialScan {
|
||||||
Loaded
|
Loaded
|
||||||
} else {
|
} else {
|
||||||
Added
|
Added
|
||||||
},
|
},
|
||||||
);
|
));
|
||||||
new_paths.next(&());
|
new_paths.next(&());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(Some(old_entry), None) => {
|
(Some(old_entry), None) => {
|
||||||
changes.insert((old_entry.path.clone(), old_entry.id), Removed);
|
changes.push((old_entry.path.clone(), old_entry.id, Removed));
|
||||||
old_paths.next(&());
|
old_paths.next(&());
|
||||||
}
|
}
|
||||||
(None, Some(new_entry)) => {
|
(None, Some(new_entry)) => {
|
||||||
changes.insert(
|
changes.push((
|
||||||
(new_entry.path.clone(), new_entry.id),
|
new_entry.path.clone(),
|
||||||
|
new_entry.id,
|
||||||
if self.phase == InitialScan {
|
if self.phase == InitialScan {
|
||||||
Loaded
|
Loaded
|
||||||
} else {
|
} else {
|
||||||
Added
|
Added
|
||||||
},
|
},
|
||||||
);
|
));
|
||||||
new_paths.next(&());
|
new_paths.next(&());
|
||||||
}
|
}
|
||||||
(None, None) => break,
|
(None, None) => break,
|
||||||
|
@ -3473,7 +3482,7 @@ impl BackgroundScanner {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
changes
|
changes.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn progress_timer(&self, running: bool) {
|
async fn progress_timer(&self, running: bool) {
|
||||||
|
@ -4337,7 +4346,7 @@ mod tests {
|
||||||
|
|
||||||
cx.subscribe(&worktree, move |tree, _, event, _| {
|
cx.subscribe(&worktree, move |tree, _, event, _| {
|
||||||
if let Event::UpdatedEntries(changes) = event {
|
if let Event::UpdatedEntries(changes) = event {
|
||||||
for ((path, _), change_type) in changes.iter() {
|
for (path, _, change_type) in changes.iter() {
|
||||||
let mtime = tree.entry_for_path(&path).map(|e| e.mtime);
|
let mtime = tree.entry_for_path(&path).map(|e| e.mtime);
|
||||||
let path = path.clone();
|
let path = path.clone();
|
||||||
let ix = match paths.binary_search_by_key(&&path, |e| &e.0) {
|
let ix = match paths.binary_search_by_key(&&path, |e| &e.0) {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue