diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 7f56239f83..f399a53412 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -2454,33 +2454,32 @@ mod tests { assert_eq!(snapshot_1.text(), "abcd123"); // Replace the buffer 1 excerpt with new excerpts from buffer 2. - let (excerpt_id_2, excerpt_id_3, excerpt_id_4) = - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts([&excerpt_id_1], cx); - ( - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer_2, - range: 0..4, - }, - cx, - ), - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer_2, - range: 6..10, - }, - cx, - ), - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer_2, - range: 12..16, - }, - cx, - ), - ) - }); + let (excerpt_id_2, excerpt_id_3, _) = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.remove_excerpts([&excerpt_id_1], cx); + ( + multibuffer.push_excerpt( + ExcerptProperties { + buffer: &buffer_2, + range: 0..4, + }, + cx, + ), + multibuffer.push_excerpt( + ExcerptProperties { + buffer: &buffer_2, + range: 6..10, + }, + cx, + ), + multibuffer.push_excerpt( + ExcerptProperties { + buffer: &buffer_2, + range: 12..16, + }, + cx, + ), + ) + }); let snapshot_2 = multibuffer.read(cx).snapshot(cx); assert_eq!(snapshot_2.text(), "ABCD\nGHIJ\nMNOP"); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index a0ed7b811d..325ac682eb 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -101,6 +101,16 @@ impl DiagnosticSummary { this } + + pub fn to_proto(&self, path: Arc) -> proto::DiagnosticSummary { + proto::DiagnosticSummary { + path: path.to_string_lossy().to_string(), + error_count: self.error_count as u32, + warning_count: self.warning_count as u32, + info_count: self.info_count as u32, + hint_count: self.hint_count as u32, + } + } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 3e76151fdf..3533714c04 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -7,8 +7,7 @@ use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{anyhow, Context, Result}; use client::{proto, Client, PeerId, TypedEnvelope, UserStore}; use clock::ReplicaId; -use collections::{hash_map, HashMap}; -use collections::{BTreeMap, HashSet}; +use collections::{hash_map, HashMap, HashSet}; use futures::{Stream, StreamExt}; use fuzzy::CharBag; use gpui::{ @@ -44,7 +43,7 @@ use std::{ }, time::{Duration, SystemTime}, }; -use sum_tree::Bias; +use sum_tree::{Bias, TreeMap}; use sum_tree::{Edit, SeekTarget, SumTree}; use util::{post_inc, ResultExt, TryFutureExt}; @@ -142,7 +141,7 @@ impl Worktree { .map(|c| c.to_ascii_lowercase()) .collect(); let root_name = worktree.root_name.clone(); - let (entries_by_path, entries_by_id) = cx + let (entries_by_path, entries_by_id, diagnostic_summaries) = cx .background() .spawn(async move { let mut entries_by_path_edits = Vec::new(); @@ -166,7 +165,22 @@ impl Worktree { let mut entries_by_id = SumTree::new(); entries_by_path.edit(entries_by_path_edits, &()); entries_by_id.edit(entries_by_id_edits, &()); - (entries_by_path, entries_by_id) + + let diagnostic_summaries = TreeMap::from_ordered_entries( + worktree.diagnostic_summaries.into_iter().map(|summary| { + ( + PathKey(PathBuf::from(summary.path).into()), + DiagnosticSummary { + error_count: summary.error_count as usize, + warning_count: summary.warning_count as usize, + info_count: summary.info_count as usize, + hint_count: summary.hint_count as usize, + }, + ) + }), + ); + + (entries_by_path, entries_by_id, diagnostic_summaries) }) .await; @@ -183,6 +197,7 @@ impl Worktree { entries_by_id, removed_entry_ids: Default::default(), next_entry_id: Default::default(), + diagnostic_summaries, }; let (updates_tx, mut updates_rx) = postage::mpsc::channel(64); @@ -223,7 +238,6 @@ impl Worktree { client: client.clone(), loading_buffers: Default::default(), open_buffers: Default::default(), - diagnostic_summaries: Default::default(), queued_operations: Default::default(), languages, user_store, @@ -351,7 +365,7 @@ impl Worktree { Worktree::Remote(worktree) => &worktree.diagnostic_summaries, } .iter() - .map(|(path, summary)| (path.clone(), summary.clone())) + .map(|(path, summary)| (path.0.clone(), summary.clone())) } pub fn loading_buffers<'a>(&'a mut self) -> &'a mut LoadingBuffers { @@ -778,8 +792,9 @@ impl Worktree { let this = self.as_local_mut().unwrap(); let summary = DiagnosticSummary::new(&diagnostics); - this.diagnostic_summaries - .insert(worktree_path.clone(), summary.clone()); + this.snapshot + .diagnostic_summaries + .insert(PathKey(worktree_path.clone()), summary.clone()); this.diagnostics.insert(worktree_path.clone(), diagnostics); cx.emit(Event::DiagnosticsUpdated(worktree_path.clone())); @@ -796,11 +811,13 @@ impl Worktree { .send(proto::UpdateDiagnosticSummary { project_id, worktree_id, - path, - error_count: summary.error_count as u32, - warning_count: summary.warning_count as u32, - info_count: summary.info_count as u32, - hint_count: summary.hint_count as u32, + summary: Some(proto::DiagnosticSummary { + path, + error_count: summary.error_count as u32, + warning_count: summary.warning_count as u32, + info_count: summary.info_count as u32, + hint_count: summary.hint_count as u32, + }), }) .await .log_err() @@ -890,6 +907,7 @@ pub struct Snapshot { entries_by_id: SumTree, removed_entry_ids: HashMap, next_entry_id: Arc, + diagnostic_summaries: TreeMap, } pub struct LocalWorktree { @@ -904,7 +922,6 @@ pub struct LocalWorktree { open_buffers: HashMap>, shared_buffers: HashMap>>, diagnostics: HashMap, Vec>>, - diagnostic_summaries: BTreeMap, DiagnosticSummary>, queued_operations: Vec<(u64, Operation)>, language_registry: Arc, client: Arc, @@ -928,7 +945,6 @@ pub struct RemoteWorktree { replica_id: ReplicaId, loading_buffers: LoadingBuffers, open_buffers: HashMap, - diagnostic_summaries: BTreeMap, DiagnosticSummary>, languages: Arc, user_store: ModelHandle, queued_operations: Vec<(u64, Operation)>, @@ -986,6 +1002,7 @@ impl LocalWorktree { entries_by_id: Default::default(), removed_entry_ids: Default::default(), next_entry_id: Arc::new(next_entry_id), + diagnostic_summaries: Default::default(), }; if let Some(metadata) = metadata { snapshot.insert_entry( @@ -1011,7 +1028,6 @@ impl LocalWorktree { open_buffers: Default::default(), shared_buffers: Default::default(), diagnostics: Default::default(), - diagnostic_summaries: Default::default(), queued_operations: Default::default(), language_registry: languages, client, @@ -1626,17 +1642,19 @@ impl RemoteWorktree { envelope: TypedEnvelope, cx: &mut ModelContext, ) { - let path: Arc = Path::new(&envelope.payload.path).into(); - self.diagnostic_summaries.insert( - path.clone(), - DiagnosticSummary { - error_count: envelope.payload.error_count as usize, - warning_count: envelope.payload.warning_count as usize, - info_count: envelope.payload.info_count as usize, - hint_count: envelope.payload.hint_count as usize, - }, - ); - cx.emit(Event::DiagnosticsUpdated(path)); + if let Some(summary) = envelope.payload.summary { + let path: Arc = Path::new(&summary.path).into(); + self.snapshot.diagnostic_summaries.insert( + PathKey(path.clone()), + DiagnosticSummary { + error_count: summary.error_count as usize, + warning_count: summary.warning_count as usize, + info_count: summary.info_count as usize, + hint_count: summary.hint_count as usize, + }, + ); + cx.emit(Event::DiagnosticsUpdated(path)); + } } pub fn disk_based_diagnostics_updated(&self, cx: &mut ModelContext) { @@ -1679,10 +1697,15 @@ impl Snapshot { root_name, entries: self .entries_by_path - .cursor::<()>() + .iter() .filter(|e| !e.is_ignored) .map(Into::into) .collect(), + diagnostic_summaries: self + .diagnostic_summaries + .iter() + .map(|(path, summary)| summary.to_proto(path.0.clone())) + .collect(), } } @@ -4160,6 +4183,7 @@ mod tests { root_name: Default::default(), root_char_bag: Default::default(), next_entry_id: next_entry_id.clone(), + diagnostic_summaries: Default::default(), }; initial_snapshot.insert_entry( Entry::new( diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 71ceb4d9ac..669ffdbd7e 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -170,6 +170,10 @@ message BufferSaved { message UpdateDiagnosticSummary { uint64 project_id = 1; uint64 worktree_id = 2; + DiagnosticSummary summary = 3; +} + +message DiagnosticSummary { string path = 3; uint32 error_count = 4; uint32 warning_count = 5; @@ -256,6 +260,7 @@ message Worktree { uint64 id = 1; string root_name = 2; repeated Entry entries = 3; + repeated DiagnosticSummary diagnostic_summaries = 4; } message Entry { diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 0d84a89e14..19792caace 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -17,7 +17,7 @@ use rpc::{ Connection, ConnectionId, Peer, TypedEnvelope, }; use sha1::{Digest as _, Sha1}; -use std::{any::TypeId, future::Future, mem, sync::Arc, time::Instant}; +use std::{any::TypeId, future::Future, mem, path::PathBuf, sync::Arc, time::Instant}; use store::{Store, Worktree}; use surf::StatusCode; use tide::log; @@ -302,6 +302,11 @@ impl Server { id: *id, root_name: worktree.root_name.clone(), entries: share.entries.values().cloned().collect(), + diagnostic_summaries: share + .diagnostic_summaries + .values() + .cloned() + .collect(), }) }) .collect(); @@ -473,11 +478,17 @@ impl Server { .map(|entry| (entry.id, entry)) .collect(); + let diagnostic_summaries = mem::take(&mut worktree.diagnostic_summaries) + .into_iter() + .map(|summary| (PathBuf::from(summary.path.clone()), summary)) + .collect(); + let contact_user_ids = self.state_mut().share_worktree( request.payload.project_id, worktree.id, request.sender_id, entries, + diagnostic_summaries, ); if let Some(contact_user_ids) = contact_user_ids { self.peer.respond(request.receipt(), proto::Ack {}).await?; @@ -520,13 +531,23 @@ impl Server { } async fn update_diagnostic_summary( - self: Arc, + mut self: Arc, request: TypedEnvelope, ) -> tide::Result<()> { - let receiver_ids = self - .state() - .project_connection_ids(request.payload.project_id, request.sender_id) + let receiver_ids = request + .payload + .summary + .clone() + .and_then(|summary| { + self.state_mut().update_diagnostic_summary( + request.payload.project_id, + request.payload.worktree_id, + request.sender_id, + summary, + ) + }) .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?; + broadcast(request.sender_id, receiver_ids, |connection_id| { self.peer .forward_send(request.sender_id, connection_id, request.payload.clone()) @@ -1816,6 +1837,39 @@ mod tests { .await .unwrap(); + // Simulate a language server reporting errors for a file. + fake_language_server + .notify::(lsp::PublishDiagnosticsParams { + uri: lsp::Url::from_file_path("/a/a.rs").unwrap(), + version: None, + diagnostics: vec![lsp::Diagnostic { + severity: Some(lsp::DiagnosticSeverity::ERROR), + range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 7)), + message: "message 1".to_string(), + ..Default::default() + }], + }) + .await; + + // Wait for server to see the diagnostics update. + server + .condition(|store| { + let worktree = store + .project(project_id) + .unwrap() + .worktrees + .get(&worktree_id.to_proto()) + .unwrap(); + + !worktree + .share + .as_ref() + .unwrap() + .diagnostic_summaries + .is_empty() + }) + .await; + // Join the worktree as client B. let project_b = Project::remote( project_id, @@ -1828,7 +1882,24 @@ mod tests { .await .unwrap(); - // Simulate a language server reporting errors for a file. + project_b.read_with(&cx_b, |project, cx| { + assert_eq!( + project.diagnostic_summaries(cx).collect::>(), + &[( + ProjectPath { + worktree_id, + path: Arc::from(Path::new("a.rs")), + }, + DiagnosticSummary { + error_count: 1, + warning_count: 0, + ..Default::default() + }, + )] + ) + }); + + // Simulate a language server reporting more errors for a file. fake_language_server .notify::(lsp::PublishDiagnosticsParams { uri: lsp::Url::from_file_path("/a/a.rs").unwrap(), @@ -1853,6 +1924,7 @@ mod tests { }) .await; + // Client b gets the updated summaries project_b .condition(&cx_b, |project, cx| { project.diagnostic_summaries(cx).collect::>() @@ -1870,7 +1942,7 @@ mod tests { }) .await; - // Open the file with the errors. + // Open the file with the errors on client B. They should be present. let worktree_b = project_b.update(&mut cx_b, |p, _| p.worktrees()[0].clone()); let buffer_b = cx_b .background() diff --git a/crates/server/src/rpc/store.rs b/crates/server/src/rpc/store.rs index e4d740629f..7e8523b06c 100644 --- a/crates/server/src/rpc/store.rs +++ b/crates/server/src/rpc/store.rs @@ -1,8 +1,8 @@ use crate::db::{ChannelId, UserId}; use anyhow::anyhow; -use collections::{HashMap, HashSet}; +use collections::{BTreeMap, HashMap, HashSet}; use rpc::{proto, ConnectionId}; -use std::collections::hash_map; +use std::{collections::hash_map, path::PathBuf}; #[derive(Default)] pub struct Store { @@ -41,6 +41,7 @@ pub struct ProjectShare { pub struct WorktreeShare { pub entries: HashMap, + pub diagnostic_summaries: BTreeMap, } #[derive(Default)] @@ -385,17 +386,42 @@ impl Store { worktree_id: u64, connection_id: ConnectionId, entries: HashMap, + diagnostic_summaries: BTreeMap, ) -> Option> { let project = self.projects.get_mut(&project_id)?; let worktree = project.worktrees.get_mut(&worktree_id)?; if project.host_connection_id == connection_id && project.share.is_some() { - worktree.share = Some(WorktreeShare { entries }); + worktree.share = Some(WorktreeShare { + entries, + diagnostic_summaries, + }); Some(project.authorized_user_ids()) } else { None } } + pub fn update_diagnostic_summary( + &mut self, + project_id: u64, + worktree_id: u64, + connection_id: ConnectionId, + summary: proto::DiagnosticSummary, + ) -> Option> { + let project = self.projects.get_mut(&project_id)?; + let worktree = project.worktrees.get_mut(&worktree_id)?; + if project.host_connection_id == connection_id { + if let Some(share) = worktree.share.as_mut() { + share + .diagnostic_summaries + .insert(summary.path.clone().into(), summary); + return Some(project.connection_ids()); + } + } + + None + } + pub fn join_project( &mut self, connection_id: ConnectionId, @@ -497,6 +523,11 @@ impl Store { Some(self.channels.get(&channel_id)?.connection_ids()) } + #[cfg(test)] + pub fn project(&self, project_id: u64) -> Option<&Project> { + self.projects.get(&project_id) + } + pub fn read_project(&self, project_id: u64, connection_id: ConnectionId) -> Option<&Project> { let project = self.projects.get(&project_id)?; if project.host_connection_id == connection_id diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index f50c233d05..1de6b2f589 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -21,6 +21,16 @@ pub struct MapKey(K); pub struct MapKeyRef<'a, K>(Option<&'a K>); impl TreeMap { + pub fn from_ordered_entries(entries: impl IntoIterator) -> Self { + let tree = SumTree::from_iter( + entries + .into_iter() + .map(|(key, value)| MapEntry { key, value }), + &(), + ); + Self(tree) + } + pub fn get<'a>(&self, key: &'a K) -> Option<&V> { let mut cursor = self.0.cursor::>(); cursor.seek(&MapKeyRef(Some(key)), Bias::Left, &());