Send diagnostic summaries to guests when they join the project
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
This commit is contained in:
parent
d7a78e14ac
commit
2dbee1d914
7 changed files with 217 additions and 66 deletions
|
@ -2454,8 +2454,7 @@ mod tests {
|
||||||
assert_eq!(snapshot_1.text(), "abcd123");
|
assert_eq!(snapshot_1.text(), "abcd123");
|
||||||
|
|
||||||
// Replace the buffer 1 excerpt with new excerpts from buffer 2.
|
// Replace the buffer 1 excerpt with new excerpts from buffer 2.
|
||||||
let (excerpt_id_2, excerpt_id_3, excerpt_id_4) =
|
let (excerpt_id_2, excerpt_id_3, _) = multibuffer.update(cx, |multibuffer, cx| {
|
||||||
multibuffer.update(cx, |multibuffer, cx| {
|
|
||||||
multibuffer.remove_excerpts([&excerpt_id_1], cx);
|
multibuffer.remove_excerpts([&excerpt_id_1], cx);
|
||||||
(
|
(
|
||||||
multibuffer.push_excerpt(
|
multibuffer.push_excerpt(
|
||||||
|
|
|
@ -101,6 +101,16 @@ impl DiagnosticSummary {
|
||||||
|
|
||||||
this
|
this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
|
||||||
|
proto::DiagnosticSummary {
|
||||||
|
path: path.to_string_lossy().to_string(),
|
||||||
|
error_count: self.error_count as u32,
|
||||||
|
warning_count: self.warning_count as u32,
|
||||||
|
info_count: self.info_count as u32,
|
||||||
|
hint_count: self.hint_count as u32,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||||
|
|
|
@ -7,8 +7,7 @@ use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use client::{proto, Client, PeerId, TypedEnvelope, UserStore};
|
use client::{proto, Client, PeerId, TypedEnvelope, UserStore};
|
||||||
use clock::ReplicaId;
|
use clock::ReplicaId;
|
||||||
use collections::{hash_map, HashMap};
|
use collections::{hash_map, HashMap, HashSet};
|
||||||
use collections::{BTreeMap, HashSet};
|
|
||||||
use futures::{Stream, StreamExt};
|
use futures::{Stream, StreamExt};
|
||||||
use fuzzy::CharBag;
|
use fuzzy::CharBag;
|
||||||
use gpui::{
|
use gpui::{
|
||||||
|
@ -44,7 +43,7 @@ use std::{
|
||||||
},
|
},
|
||||||
time::{Duration, SystemTime},
|
time::{Duration, SystemTime},
|
||||||
};
|
};
|
||||||
use sum_tree::Bias;
|
use sum_tree::{Bias, TreeMap};
|
||||||
use sum_tree::{Edit, SeekTarget, SumTree};
|
use sum_tree::{Edit, SeekTarget, SumTree};
|
||||||
use util::{post_inc, ResultExt, TryFutureExt};
|
use util::{post_inc, ResultExt, TryFutureExt};
|
||||||
|
|
||||||
|
@ -142,7 +141,7 @@ impl Worktree {
|
||||||
.map(|c| c.to_ascii_lowercase())
|
.map(|c| c.to_ascii_lowercase())
|
||||||
.collect();
|
.collect();
|
||||||
let root_name = worktree.root_name.clone();
|
let root_name = worktree.root_name.clone();
|
||||||
let (entries_by_path, entries_by_id) = cx
|
let (entries_by_path, entries_by_id, diagnostic_summaries) = cx
|
||||||
.background()
|
.background()
|
||||||
.spawn(async move {
|
.spawn(async move {
|
||||||
let mut entries_by_path_edits = Vec::new();
|
let mut entries_by_path_edits = Vec::new();
|
||||||
|
@ -166,7 +165,22 @@ impl Worktree {
|
||||||
let mut entries_by_id = SumTree::new();
|
let mut entries_by_id = SumTree::new();
|
||||||
entries_by_path.edit(entries_by_path_edits, &());
|
entries_by_path.edit(entries_by_path_edits, &());
|
||||||
entries_by_id.edit(entries_by_id_edits, &());
|
entries_by_id.edit(entries_by_id_edits, &());
|
||||||
(entries_by_path, entries_by_id)
|
|
||||||
|
let diagnostic_summaries = TreeMap::from_ordered_entries(
|
||||||
|
worktree.diagnostic_summaries.into_iter().map(|summary| {
|
||||||
|
(
|
||||||
|
PathKey(PathBuf::from(summary.path).into()),
|
||||||
|
DiagnosticSummary {
|
||||||
|
error_count: summary.error_count as usize,
|
||||||
|
warning_count: summary.warning_count as usize,
|
||||||
|
info_count: summary.info_count as usize,
|
||||||
|
hint_count: summary.hint_count as usize,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
(entries_by_path, entries_by_id, diagnostic_summaries)
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
|
@ -183,6 +197,7 @@ impl Worktree {
|
||||||
entries_by_id,
|
entries_by_id,
|
||||||
removed_entry_ids: Default::default(),
|
removed_entry_ids: Default::default(),
|
||||||
next_entry_id: Default::default(),
|
next_entry_id: Default::default(),
|
||||||
|
diagnostic_summaries,
|
||||||
};
|
};
|
||||||
|
|
||||||
let (updates_tx, mut updates_rx) = postage::mpsc::channel(64);
|
let (updates_tx, mut updates_rx) = postage::mpsc::channel(64);
|
||||||
|
@ -223,7 +238,6 @@ impl Worktree {
|
||||||
client: client.clone(),
|
client: client.clone(),
|
||||||
loading_buffers: Default::default(),
|
loading_buffers: Default::default(),
|
||||||
open_buffers: Default::default(),
|
open_buffers: Default::default(),
|
||||||
diagnostic_summaries: Default::default(),
|
|
||||||
queued_operations: Default::default(),
|
queued_operations: Default::default(),
|
||||||
languages,
|
languages,
|
||||||
user_store,
|
user_store,
|
||||||
|
@ -351,7 +365,7 @@ impl Worktree {
|
||||||
Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
|
Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
|
||||||
}
|
}
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(path, summary)| (path.clone(), summary.clone()))
|
.map(|(path, summary)| (path.0.clone(), summary.clone()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn loading_buffers<'a>(&'a mut self) -> &'a mut LoadingBuffers {
|
pub fn loading_buffers<'a>(&'a mut self) -> &'a mut LoadingBuffers {
|
||||||
|
@ -778,8 +792,9 @@ impl Worktree {
|
||||||
|
|
||||||
let this = self.as_local_mut().unwrap();
|
let this = self.as_local_mut().unwrap();
|
||||||
let summary = DiagnosticSummary::new(&diagnostics);
|
let summary = DiagnosticSummary::new(&diagnostics);
|
||||||
this.diagnostic_summaries
|
this.snapshot
|
||||||
.insert(worktree_path.clone(), summary.clone());
|
.diagnostic_summaries
|
||||||
|
.insert(PathKey(worktree_path.clone()), summary.clone());
|
||||||
this.diagnostics.insert(worktree_path.clone(), diagnostics);
|
this.diagnostics.insert(worktree_path.clone(), diagnostics);
|
||||||
|
|
||||||
cx.emit(Event::DiagnosticsUpdated(worktree_path.clone()));
|
cx.emit(Event::DiagnosticsUpdated(worktree_path.clone()));
|
||||||
|
@ -796,11 +811,13 @@ impl Worktree {
|
||||||
.send(proto::UpdateDiagnosticSummary {
|
.send(proto::UpdateDiagnosticSummary {
|
||||||
project_id,
|
project_id,
|
||||||
worktree_id,
|
worktree_id,
|
||||||
|
summary: Some(proto::DiagnosticSummary {
|
||||||
path,
|
path,
|
||||||
error_count: summary.error_count as u32,
|
error_count: summary.error_count as u32,
|
||||||
warning_count: summary.warning_count as u32,
|
warning_count: summary.warning_count as u32,
|
||||||
info_count: summary.info_count as u32,
|
info_count: summary.info_count as u32,
|
||||||
hint_count: summary.hint_count as u32,
|
hint_count: summary.hint_count as u32,
|
||||||
|
}),
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
.log_err()
|
.log_err()
|
||||||
|
@ -890,6 +907,7 @@ pub struct Snapshot {
|
||||||
entries_by_id: SumTree<PathEntry>,
|
entries_by_id: SumTree<PathEntry>,
|
||||||
removed_entry_ids: HashMap<u64, usize>,
|
removed_entry_ids: HashMap<u64, usize>,
|
||||||
next_entry_id: Arc<AtomicUsize>,
|
next_entry_id: Arc<AtomicUsize>,
|
||||||
|
diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct LocalWorktree {
|
pub struct LocalWorktree {
|
||||||
|
@ -904,7 +922,6 @@ pub struct LocalWorktree {
|
||||||
open_buffers: HashMap<usize, WeakModelHandle<Buffer>>,
|
open_buffers: HashMap<usize, WeakModelHandle<Buffer>>,
|
||||||
shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
|
shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
|
||||||
diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<PointUtf16>>>,
|
diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<PointUtf16>>>,
|
||||||
diagnostic_summaries: BTreeMap<Arc<Path>, DiagnosticSummary>,
|
|
||||||
queued_operations: Vec<(u64, Operation)>,
|
queued_operations: Vec<(u64, Operation)>,
|
||||||
language_registry: Arc<LanguageRegistry>,
|
language_registry: Arc<LanguageRegistry>,
|
||||||
client: Arc<Client>,
|
client: Arc<Client>,
|
||||||
|
@ -928,7 +945,6 @@ pub struct RemoteWorktree {
|
||||||
replica_id: ReplicaId,
|
replica_id: ReplicaId,
|
||||||
loading_buffers: LoadingBuffers,
|
loading_buffers: LoadingBuffers,
|
||||||
open_buffers: HashMap<usize, RemoteBuffer>,
|
open_buffers: HashMap<usize, RemoteBuffer>,
|
||||||
diagnostic_summaries: BTreeMap<Arc<Path>, DiagnosticSummary>,
|
|
||||||
languages: Arc<LanguageRegistry>,
|
languages: Arc<LanguageRegistry>,
|
||||||
user_store: ModelHandle<UserStore>,
|
user_store: ModelHandle<UserStore>,
|
||||||
queued_operations: Vec<(u64, Operation)>,
|
queued_operations: Vec<(u64, Operation)>,
|
||||||
|
@ -986,6 +1002,7 @@ impl LocalWorktree {
|
||||||
entries_by_id: Default::default(),
|
entries_by_id: Default::default(),
|
||||||
removed_entry_ids: Default::default(),
|
removed_entry_ids: Default::default(),
|
||||||
next_entry_id: Arc::new(next_entry_id),
|
next_entry_id: Arc::new(next_entry_id),
|
||||||
|
diagnostic_summaries: Default::default(),
|
||||||
};
|
};
|
||||||
if let Some(metadata) = metadata {
|
if let Some(metadata) = metadata {
|
||||||
snapshot.insert_entry(
|
snapshot.insert_entry(
|
||||||
|
@ -1011,7 +1028,6 @@ impl LocalWorktree {
|
||||||
open_buffers: Default::default(),
|
open_buffers: Default::default(),
|
||||||
shared_buffers: Default::default(),
|
shared_buffers: Default::default(),
|
||||||
diagnostics: Default::default(),
|
diagnostics: Default::default(),
|
||||||
diagnostic_summaries: Default::default(),
|
|
||||||
queued_operations: Default::default(),
|
queued_operations: Default::default(),
|
||||||
language_registry: languages,
|
language_registry: languages,
|
||||||
client,
|
client,
|
||||||
|
@ -1626,18 +1642,20 @@ impl RemoteWorktree {
|
||||||
envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
|
envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
|
||||||
cx: &mut ModelContext<Worktree>,
|
cx: &mut ModelContext<Worktree>,
|
||||||
) {
|
) {
|
||||||
let path: Arc<Path> = Path::new(&envelope.payload.path).into();
|
if let Some(summary) = envelope.payload.summary {
|
||||||
self.diagnostic_summaries.insert(
|
let path: Arc<Path> = Path::new(&summary.path).into();
|
||||||
path.clone(),
|
self.snapshot.diagnostic_summaries.insert(
|
||||||
|
PathKey(path.clone()),
|
||||||
DiagnosticSummary {
|
DiagnosticSummary {
|
||||||
error_count: envelope.payload.error_count as usize,
|
error_count: summary.error_count as usize,
|
||||||
warning_count: envelope.payload.warning_count as usize,
|
warning_count: summary.warning_count as usize,
|
||||||
info_count: envelope.payload.info_count as usize,
|
info_count: summary.info_count as usize,
|
||||||
hint_count: envelope.payload.hint_count as usize,
|
hint_count: summary.hint_count as usize,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
cx.emit(Event::DiagnosticsUpdated(path));
|
cx.emit(Event::DiagnosticsUpdated(path));
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn disk_based_diagnostics_updated(&self, cx: &mut ModelContext<Worktree>) {
|
pub fn disk_based_diagnostics_updated(&self, cx: &mut ModelContext<Worktree>) {
|
||||||
cx.emit(Event::DiskBasedDiagnosticsUpdated);
|
cx.emit(Event::DiskBasedDiagnosticsUpdated);
|
||||||
|
@ -1679,10 +1697,15 @@ impl Snapshot {
|
||||||
root_name,
|
root_name,
|
||||||
entries: self
|
entries: self
|
||||||
.entries_by_path
|
.entries_by_path
|
||||||
.cursor::<()>()
|
.iter()
|
||||||
.filter(|e| !e.is_ignored)
|
.filter(|e| !e.is_ignored)
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
.collect(),
|
.collect(),
|
||||||
|
diagnostic_summaries: self
|
||||||
|
.diagnostic_summaries
|
||||||
|
.iter()
|
||||||
|
.map(|(path, summary)| summary.to_proto(path.0.clone()))
|
||||||
|
.collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4160,6 +4183,7 @@ mod tests {
|
||||||
root_name: Default::default(),
|
root_name: Default::default(),
|
||||||
root_char_bag: Default::default(),
|
root_char_bag: Default::default(),
|
||||||
next_entry_id: next_entry_id.clone(),
|
next_entry_id: next_entry_id.clone(),
|
||||||
|
diagnostic_summaries: Default::default(),
|
||||||
};
|
};
|
||||||
initial_snapshot.insert_entry(
|
initial_snapshot.insert_entry(
|
||||||
Entry::new(
|
Entry::new(
|
||||||
|
|
|
@ -170,6 +170,10 @@ message BufferSaved {
|
||||||
message UpdateDiagnosticSummary {
|
message UpdateDiagnosticSummary {
|
||||||
uint64 project_id = 1;
|
uint64 project_id = 1;
|
||||||
uint64 worktree_id = 2;
|
uint64 worktree_id = 2;
|
||||||
|
DiagnosticSummary summary = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message DiagnosticSummary {
|
||||||
string path = 3;
|
string path = 3;
|
||||||
uint32 error_count = 4;
|
uint32 error_count = 4;
|
||||||
uint32 warning_count = 5;
|
uint32 warning_count = 5;
|
||||||
|
@ -256,6 +260,7 @@ message Worktree {
|
||||||
uint64 id = 1;
|
uint64 id = 1;
|
||||||
string root_name = 2;
|
string root_name = 2;
|
||||||
repeated Entry entries = 3;
|
repeated Entry entries = 3;
|
||||||
|
repeated DiagnosticSummary diagnostic_summaries = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message Entry {
|
message Entry {
|
||||||
|
|
|
@ -17,7 +17,7 @@ use rpc::{
|
||||||
Connection, ConnectionId, Peer, TypedEnvelope,
|
Connection, ConnectionId, Peer, TypedEnvelope,
|
||||||
};
|
};
|
||||||
use sha1::{Digest as _, Sha1};
|
use sha1::{Digest as _, Sha1};
|
||||||
use std::{any::TypeId, future::Future, mem, sync::Arc, time::Instant};
|
use std::{any::TypeId, future::Future, mem, path::PathBuf, sync::Arc, time::Instant};
|
||||||
use store::{Store, Worktree};
|
use store::{Store, Worktree};
|
||||||
use surf::StatusCode;
|
use surf::StatusCode;
|
||||||
use tide::log;
|
use tide::log;
|
||||||
|
@ -302,6 +302,11 @@ impl Server {
|
||||||
id: *id,
|
id: *id,
|
||||||
root_name: worktree.root_name.clone(),
|
root_name: worktree.root_name.clone(),
|
||||||
entries: share.entries.values().cloned().collect(),
|
entries: share.entries.values().cloned().collect(),
|
||||||
|
diagnostic_summaries: share
|
||||||
|
.diagnostic_summaries
|
||||||
|
.values()
|
||||||
|
.cloned()
|
||||||
|
.collect(),
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -473,11 +478,17 @@ impl Server {
|
||||||
.map(|entry| (entry.id, entry))
|
.map(|entry| (entry.id, entry))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
let diagnostic_summaries = mem::take(&mut worktree.diagnostic_summaries)
|
||||||
|
.into_iter()
|
||||||
|
.map(|summary| (PathBuf::from(summary.path.clone()), summary))
|
||||||
|
.collect();
|
||||||
|
|
||||||
let contact_user_ids = self.state_mut().share_worktree(
|
let contact_user_ids = self.state_mut().share_worktree(
|
||||||
request.payload.project_id,
|
request.payload.project_id,
|
||||||
worktree.id,
|
worktree.id,
|
||||||
request.sender_id,
|
request.sender_id,
|
||||||
entries,
|
entries,
|
||||||
|
diagnostic_summaries,
|
||||||
);
|
);
|
||||||
if let Some(contact_user_ids) = contact_user_ids {
|
if let Some(contact_user_ids) = contact_user_ids {
|
||||||
self.peer.respond(request.receipt(), proto::Ack {}).await?;
|
self.peer.respond(request.receipt(), proto::Ack {}).await?;
|
||||||
|
@ -520,13 +531,23 @@ impl Server {
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn update_diagnostic_summary(
|
async fn update_diagnostic_summary(
|
||||||
self: Arc<Server>,
|
mut self: Arc<Server>,
|
||||||
request: TypedEnvelope<proto::UpdateDiagnosticSummary>,
|
request: TypedEnvelope<proto::UpdateDiagnosticSummary>,
|
||||||
) -> tide::Result<()> {
|
) -> tide::Result<()> {
|
||||||
let receiver_ids = self
|
let receiver_ids = request
|
||||||
.state()
|
.payload
|
||||||
.project_connection_ids(request.payload.project_id, request.sender_id)
|
.summary
|
||||||
|
.clone()
|
||||||
|
.and_then(|summary| {
|
||||||
|
self.state_mut().update_diagnostic_summary(
|
||||||
|
request.payload.project_id,
|
||||||
|
request.payload.worktree_id,
|
||||||
|
request.sender_id,
|
||||||
|
summary,
|
||||||
|
)
|
||||||
|
})
|
||||||
.ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
|
.ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
|
||||||
|
|
||||||
broadcast(request.sender_id, receiver_ids, |connection_id| {
|
broadcast(request.sender_id, receiver_ids, |connection_id| {
|
||||||
self.peer
|
self.peer
|
||||||
.forward_send(request.sender_id, connection_id, request.payload.clone())
|
.forward_send(request.sender_id, connection_id, request.payload.clone())
|
||||||
|
@ -1816,6 +1837,39 @@ mod tests {
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
// Simulate a language server reporting errors for a file.
|
||||||
|
fake_language_server
|
||||||
|
.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
|
||||||
|
uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
|
||||||
|
version: None,
|
||||||
|
diagnostics: vec![lsp::Diagnostic {
|
||||||
|
severity: Some(lsp::DiagnosticSeverity::ERROR),
|
||||||
|
range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 7)),
|
||||||
|
message: "message 1".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
}],
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// Wait for server to see the diagnostics update.
|
||||||
|
server
|
||||||
|
.condition(|store| {
|
||||||
|
let worktree = store
|
||||||
|
.project(project_id)
|
||||||
|
.unwrap()
|
||||||
|
.worktrees
|
||||||
|
.get(&worktree_id.to_proto())
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
!worktree
|
||||||
|
.share
|
||||||
|
.as_ref()
|
||||||
|
.unwrap()
|
||||||
|
.diagnostic_summaries
|
||||||
|
.is_empty()
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
// Join the worktree as client B.
|
// Join the worktree as client B.
|
||||||
let project_b = Project::remote(
|
let project_b = Project::remote(
|
||||||
project_id,
|
project_id,
|
||||||
|
@ -1828,7 +1882,24 @@ mod tests {
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// Simulate a language server reporting errors for a file.
|
project_b.read_with(&cx_b, |project, cx| {
|
||||||
|
assert_eq!(
|
||||||
|
project.diagnostic_summaries(cx).collect::<Vec<_>>(),
|
||||||
|
&[(
|
||||||
|
ProjectPath {
|
||||||
|
worktree_id,
|
||||||
|
path: Arc::from(Path::new("a.rs")),
|
||||||
|
},
|
||||||
|
DiagnosticSummary {
|
||||||
|
error_count: 1,
|
||||||
|
warning_count: 0,
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
)]
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
// Simulate a language server reporting more errors for a file.
|
||||||
fake_language_server
|
fake_language_server
|
||||||
.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
|
.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
|
||||||
uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
|
uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
|
||||||
|
@ -1853,6 +1924,7 @@ mod tests {
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
|
// Client b gets the updated summaries
|
||||||
project_b
|
project_b
|
||||||
.condition(&cx_b, |project, cx| {
|
.condition(&cx_b, |project, cx| {
|
||||||
project.diagnostic_summaries(cx).collect::<Vec<_>>()
|
project.diagnostic_summaries(cx).collect::<Vec<_>>()
|
||||||
|
@ -1870,7 +1942,7 @@ mod tests {
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
// Open the file with the errors.
|
// Open the file with the errors on client B. They should be present.
|
||||||
let worktree_b = project_b.update(&mut cx_b, |p, _| p.worktrees()[0].clone());
|
let worktree_b = project_b.update(&mut cx_b, |p, _| p.worktrees()[0].clone());
|
||||||
let buffer_b = cx_b
|
let buffer_b = cx_b
|
||||||
.background()
|
.background()
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use crate::db::{ChannelId, UserId};
|
use crate::db::{ChannelId, UserId};
|
||||||
use anyhow::anyhow;
|
use anyhow::anyhow;
|
||||||
use collections::{HashMap, HashSet};
|
use collections::{BTreeMap, HashMap, HashSet};
|
||||||
use rpc::{proto, ConnectionId};
|
use rpc::{proto, ConnectionId};
|
||||||
use std::collections::hash_map;
|
use std::{collections::hash_map, path::PathBuf};
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct Store {
|
pub struct Store {
|
||||||
|
@ -41,6 +41,7 @@ pub struct ProjectShare {
|
||||||
|
|
||||||
pub struct WorktreeShare {
|
pub struct WorktreeShare {
|
||||||
pub entries: HashMap<u64, proto::Entry>,
|
pub entries: HashMap<u64, proto::Entry>,
|
||||||
|
pub diagnostic_summaries: BTreeMap<PathBuf, proto::DiagnosticSummary>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
@ -385,17 +386,42 @@ impl Store {
|
||||||
worktree_id: u64,
|
worktree_id: u64,
|
||||||
connection_id: ConnectionId,
|
connection_id: ConnectionId,
|
||||||
entries: HashMap<u64, proto::Entry>,
|
entries: HashMap<u64, proto::Entry>,
|
||||||
|
diagnostic_summaries: BTreeMap<PathBuf, proto::DiagnosticSummary>,
|
||||||
) -> Option<Vec<UserId>> {
|
) -> Option<Vec<UserId>> {
|
||||||
let project = self.projects.get_mut(&project_id)?;
|
let project = self.projects.get_mut(&project_id)?;
|
||||||
let worktree = project.worktrees.get_mut(&worktree_id)?;
|
let worktree = project.worktrees.get_mut(&worktree_id)?;
|
||||||
if project.host_connection_id == connection_id && project.share.is_some() {
|
if project.host_connection_id == connection_id && project.share.is_some() {
|
||||||
worktree.share = Some(WorktreeShare { entries });
|
worktree.share = Some(WorktreeShare {
|
||||||
|
entries,
|
||||||
|
diagnostic_summaries,
|
||||||
|
});
|
||||||
Some(project.authorized_user_ids())
|
Some(project.authorized_user_ids())
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn update_diagnostic_summary(
|
||||||
|
&mut self,
|
||||||
|
project_id: u64,
|
||||||
|
worktree_id: u64,
|
||||||
|
connection_id: ConnectionId,
|
||||||
|
summary: proto::DiagnosticSummary,
|
||||||
|
) -> Option<Vec<ConnectionId>> {
|
||||||
|
let project = self.projects.get_mut(&project_id)?;
|
||||||
|
let worktree = project.worktrees.get_mut(&worktree_id)?;
|
||||||
|
if project.host_connection_id == connection_id {
|
||||||
|
if let Some(share) = worktree.share.as_mut() {
|
||||||
|
share
|
||||||
|
.diagnostic_summaries
|
||||||
|
.insert(summary.path.clone().into(), summary);
|
||||||
|
return Some(project.connection_ids());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
pub fn join_project(
|
pub fn join_project(
|
||||||
&mut self,
|
&mut self,
|
||||||
connection_id: ConnectionId,
|
connection_id: ConnectionId,
|
||||||
|
@ -497,6 +523,11 @@ impl Store {
|
||||||
Some(self.channels.get(&channel_id)?.connection_ids())
|
Some(self.channels.get(&channel_id)?.connection_ids())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn project(&self, project_id: u64) -> Option<&Project> {
|
||||||
|
self.projects.get(&project_id)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn read_project(&self, project_id: u64, connection_id: ConnectionId) -> Option<&Project> {
|
pub fn read_project(&self, project_id: u64, connection_id: ConnectionId) -> Option<&Project> {
|
||||||
let project = self.projects.get(&project_id)?;
|
let project = self.projects.get(&project_id)?;
|
||||||
if project.host_connection_id == connection_id
|
if project.host_connection_id == connection_id
|
||||||
|
|
|
@ -21,6 +21,16 @@ pub struct MapKey<K>(K);
|
||||||
pub struct MapKeyRef<'a, K>(Option<&'a K>);
|
pub struct MapKeyRef<'a, K>(Option<&'a K>);
|
||||||
|
|
||||||
impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
|
impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
|
||||||
|
pub fn from_ordered_entries(entries: impl IntoIterator<Item = (K, V)>) -> Self {
|
||||||
|
let tree = SumTree::from_iter(
|
||||||
|
entries
|
||||||
|
.into_iter()
|
||||||
|
.map(|(key, value)| MapEntry { key, value }),
|
||||||
|
&(),
|
||||||
|
);
|
||||||
|
Self(tree)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get<'a>(&self, key: &'a K) -> Option<&V> {
|
pub fn get<'a>(&self, key: &'a K) -> Option<&V> {
|
||||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||||
cursor.seek(&MapKeyRef(Some(key)), Bias::Left, &());
|
cursor.seek(&MapKeyRef(Some(key)), Bias::Left, &());
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue