Merge pull request #2409 from zed-industries/stale-excerpts

Fix stale project diagnostic excerpts for guests
This commit is contained in:
Max Brunsfeld 2023-04-24 14:22:04 -07:00 committed by GitHub
commit b76194db97
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 293 additions and 105 deletions

View file

@ -32,7 +32,10 @@ use std::{
env, future, mem, env, future, mem,
path::{Path, PathBuf}, path::{Path, PathBuf},
rc::Rc, rc::Rc,
sync::Arc, sync::{
atomic::{AtomicBool, Ordering::SeqCst},
Arc,
},
}; };
use unindent::Unindent as _; use unindent::Unindent as _;
use workspace::{ use workspace::{
@ -3636,6 +3639,141 @@ async fn test_collaborating_with_diagnostics(
}); });
} }
#[gpui::test(iterations = 10)]
async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
deterministic: Arc<Deterministic>,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
deterministic.forbid_parking();
let mut server = TestServer::start(&deterministic).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
server
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
.await;
// Set up a fake language server.
let mut language = Language::new(
LanguageConfig {
name: "Rust".into(),
path_suffixes: vec!["rs".to_string()],
..Default::default()
},
Some(tree_sitter_rust::language()),
);
let mut fake_language_servers = language
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
disk_based_diagnostics_progress_token: Some("the-disk-based-token".into()),
disk_based_diagnostics_sources: vec!["the-disk-based-diagnostics-source".into()],
..Default::default()
}))
.await;
client_a.language_registry.add(Arc::new(language));
let file_names = &["one.rs", "two.rs", "three.rs", "four.rs", "five.rs"];
client_a
.fs
.insert_tree(
"/test",
json!({
"one.rs": "const ONE: usize = 1;",
"two.rs": "const TWO: usize = 2;",
"three.rs": "const THREE: usize = 3;",
"four.rs": "const FOUR: usize = 3;",
"five.rs": "const FIVE: usize = 3;",
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/test", cx_a).await;
// Share a project as client A
let active_call_a = cx_a.read(ActiveCall::global);
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
// Join the project as client B and open all three files.
let project_b = client_b.build_remote_project(project_id, cx_b).await;
let guest_buffers = futures::future::try_join_all(file_names.iter().map(|file_name| {
project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, file_name), cx))
}))
.await
.unwrap();
// Simulate a language server reporting errors for a file.
let fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server
.request::<lsp::request::WorkDoneProgressCreate>(lsp::WorkDoneProgressCreateParams {
token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
})
.await
.unwrap();
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Begin(
lsp::WorkDoneProgressBegin {
title: "Progress Began".into(),
..Default::default()
},
)),
});
for file_name in file_names {
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
lsp::PublishDiagnosticsParams {
uri: lsp::Url::from_file_path(Path::new("/test").join(file_name)).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
severity: Some(lsp::DiagnosticSeverity::WARNING),
source: Some("the-disk-based-diagnostics-source".into()),
range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
message: "message one".to_string(),
..Default::default()
}],
},
);
}
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::End(
lsp::WorkDoneProgressEnd { message: None },
)),
});
// When the "disk base diagnostics finished" message is received, the buffers'
// diagnostics are expected to be present.
let disk_based_diagnostics_finished = Arc::new(AtomicBool::new(false));
project_b.update(cx_b, {
let project_b = project_b.clone();
let disk_based_diagnostics_finished = disk_based_diagnostics_finished.clone();
move |_, cx| {
cx.subscribe(&project_b, move |_, _, event, cx| {
if let project::Event::DiskBasedDiagnosticsFinished { .. } = event {
disk_based_diagnostics_finished.store(true, SeqCst);
for buffer in &guest_buffers {
assert_eq!(
buffer
.read(cx)
.snapshot()
.diagnostics_in_range::<_, usize>(0..5, false)
.count(),
1,
"expected a diagnostic for buffer {:?}",
buffer.read(cx).file().unwrap().path(),
);
}
}
})
.detach();
}
});
deterministic.run_until_parked();
assert!(disk_based_diagnostics_finished.load(SeqCst));
}
#[gpui::test(iterations = 10)] #[gpui::test(iterations = 10)]
async fn test_collaborating_with_completion( async fn test_collaborating_with_completion(
deterministic: Arc<Deterministic>, deterministic: Arc<Deterministic>,

View file

@ -93,7 +93,7 @@ pub trait Item {
pub struct Project { pub struct Project {
worktrees: Vec<WorktreeHandle>, worktrees: Vec<WorktreeHandle>,
active_entry: Option<ProjectEntryId>, active_entry: Option<ProjectEntryId>,
buffer_changes_tx: mpsc::UnboundedSender<BufferMessage>, buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
languages: Arc<LanguageRegistry>, languages: Arc<LanguageRegistry>,
language_servers: HashMap<LanguageServerId, LanguageServerState>, language_servers: HashMap<LanguageServerId, LanguageServerState>,
language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>, language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
@ -137,11 +137,16 @@ struct LspBufferSnapshot {
snapshot: TextBufferSnapshot, snapshot: TextBufferSnapshot,
} }
enum BufferMessage { /// Message ordered with respect to buffer operations
enum BufferOrderedMessage {
Operation { Operation {
buffer_id: u64, buffer_id: u64,
operation: proto::Operation, operation: proto::Operation,
}, },
LanguageServerUpdate {
language_server_id: LanguageServerId,
message: proto::update_language_server::Variant,
},
Resync, Resync,
} }
@ -443,11 +448,11 @@ impl Project {
) -> ModelHandle<Self> { ) -> ModelHandle<Self> {
cx.add_model(|cx: &mut ModelContext<Self>| { cx.add_model(|cx: &mut ModelContext<Self>| {
let (tx, rx) = mpsc::unbounded(); let (tx, rx) = mpsc::unbounded();
cx.spawn_weak(|this, cx| Self::send_buffer_messages(this, rx, cx)) cx.spawn_weak(|this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
.detach(); .detach();
Self { Self {
worktrees: Default::default(), worktrees: Default::default(),
buffer_changes_tx: tx, buffer_ordered_messages_tx: tx,
collaborators: Default::default(), collaborators: Default::default(),
opened_buffers: Default::default(), opened_buffers: Default::default(),
shared_buffers: Default::default(), shared_buffers: Default::default(),
@ -511,11 +516,11 @@ impl Project {
} }
let (tx, rx) = mpsc::unbounded(); let (tx, rx) = mpsc::unbounded();
cx.spawn_weak(|this, cx| Self::send_buffer_messages(this, rx, cx)) cx.spawn_weak(|this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
.detach(); .detach();
let mut this = Self { let mut this = Self {
worktrees: Vec::new(), worktrees: Vec::new(),
buffer_changes_tx: tx, buffer_ordered_messages_tx: tx,
loading_buffers_by_path: Default::default(), loading_buffers_by_path: Default::default(),
opened_buffer: watch::channel(), opened_buffer: watch::channel(),
shared_buffers: Default::default(), shared_buffers: Default::default(),
@ -1168,8 +1173,8 @@ impl Project {
) )
}) })
.collect(); .collect();
self.buffer_changes_tx self.buffer_ordered_messages_tx
.unbounded_send(BufferMessage::Resync) .unbounded_send(BufferOrderedMessage::Resync)
.unwrap(); .unwrap();
cx.notify(); cx.notify();
Ok(()) Ok(())
@ -1784,23 +1789,49 @@ impl Project {
} }
} }
async fn send_buffer_messages( async fn send_buffer_ordered_messages(
this: WeakModelHandle<Self>, this: WeakModelHandle<Self>,
rx: UnboundedReceiver<BufferMessage>, rx: UnboundedReceiver<BufferOrderedMessage>,
mut cx: AsyncAppContext, mut cx: AsyncAppContext,
) -> Option<()> { ) -> Option<()> {
const MAX_BATCH_SIZE: usize = 128; const MAX_BATCH_SIZE: usize = 128;
let mut needs_resync_with_host = false;
let mut operations_by_buffer_id = HashMap::default(); let mut operations_by_buffer_id = HashMap::default();
async fn flush_operations(
this: &ModelHandle<Project>,
operations_by_buffer_id: &mut HashMap<u64, Vec<proto::Operation>>,
needs_resync_with_host: &mut bool,
is_local: bool,
cx: &AsyncAppContext,
) {
for (buffer_id, operations) in operations_by_buffer_id.drain() {
let request = this.read_with(cx, |this, _| {
let project_id = this.remote_id()?;
Some(this.client.request(proto::UpdateBuffer {
buffer_id,
project_id,
operations,
}))
});
if let Some(request) = request {
if request.await.is_err() && !is_local {
*needs_resync_with_host = true;
break;
}
}
}
}
let mut needs_resync_with_host = false;
let mut changes = rx.ready_chunks(MAX_BATCH_SIZE); let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
while let Some(changes) = changes.next().await { while let Some(changes) = changes.next().await {
let this = this.upgrade(&mut cx)?; let this = this.upgrade(&mut cx)?;
let is_local = this.read_with(&cx, |this, _| this.is_local()); let is_local = this.read_with(&cx, |this, _| this.is_local());
for change in changes { for change in changes {
match change { match change {
BufferMessage::Operation { BufferOrderedMessage::Operation {
buffer_id, buffer_id,
operation, operation,
} => { } => {
@ -1813,7 +1844,8 @@ impl Project {
.or_insert(Vec::new()) .or_insert(Vec::new())
.push(operation); .push(operation);
} }
BufferMessage::Resync => {
BufferOrderedMessage::Resync => {
operations_by_buffer_id.clear(); operations_by_buffer_id.clear();
if this if this
.update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx)) .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))
@ -1823,25 +1855,43 @@ impl Project {
needs_resync_with_host = false; needs_resync_with_host = false;
} }
} }
BufferOrderedMessage::LanguageServerUpdate {
language_server_id,
message,
} => {
flush_operations(
&this,
&mut operations_by_buffer_id,
&mut needs_resync_with_host,
is_local,
&cx,
)
.await;
this.read_with(&cx, |this, _| {
if let Some(project_id) = this.remote_id() {
this.client
.send(proto::UpdateLanguageServer {
project_id,
language_server_id: language_server_id.0 as u64,
variant: Some(message),
})
.log_err();
}
});
}
} }
} }
for (buffer_id, operations) in operations_by_buffer_id.drain() { flush_operations(
let request = this.read_with(&cx, |this, _| { &this,
let project_id = this.remote_id()?; &mut operations_by_buffer_id,
Some(this.client.request(proto::UpdateBuffer { &mut needs_resync_with_host,
buffer_id, is_local,
project_id, &cx,
operations, )
})) .await;
});
if let Some(request) = request {
if request.await.is_err() && !is_local {
needs_resync_with_host = true;
break;
}
}
}
} }
None None
@ -1855,8 +1905,8 @@ impl Project {
) -> Option<()> { ) -> Option<()> {
match event { match event {
BufferEvent::Operation(operation) => { BufferEvent::Operation(operation) => {
self.buffer_changes_tx self.buffer_ordered_messages_tx
.unbounded_send(BufferMessage::Operation { .unbounded_send(BufferOrderedMessage::Operation {
buffer_id: buffer.read(cx).remote_id(), buffer_id: buffer.read(cx).remote_id(),
operation: language::proto::serialize_operation(operation), operation: language::proto::serialize_operation(operation),
}) })
@ -1964,14 +2014,19 @@ impl Project {
let task = cx.spawn_weak(|this, mut cx| async move { let task = cx.spawn_weak(|this, mut cx| async move {
cx.background().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await; cx.background().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
if let Some(this) = this.upgrade(&cx) { if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, cx | { this.update(&mut cx, |this, cx| {
this.disk_based_diagnostics_finished(language_server_id, cx); this.disk_based_diagnostics_finished(
this.broadcast_language_server_update(
language_server_id, language_server_id,
proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated( cx,
proto::LspDiskBasedDiagnosticsUpdated {},
),
); );
this.buffer_ordered_messages_tx
.unbounded_send(
BufferOrderedMessage::LanguageServerUpdate {
language_server_id,
message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
},
)
.ok();
}); });
} }
}); });
@ -2609,7 +2664,7 @@ impl Project {
fn on_lsp_progress( fn on_lsp_progress(
&mut self, &mut self,
progress: lsp::ProgressParams, progress: lsp::ProgressParams,
server_id: LanguageServerId, language_server_id: LanguageServerId,
disk_based_diagnostics_progress_token: Option<String>, disk_based_diagnostics_progress_token: Option<String>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) { ) {
@ -2622,7 +2677,7 @@ impl Project {
}; };
let lsp::ProgressParamsValue::WorkDone(progress) = progress.value; let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
let language_server_status = let language_server_status =
if let Some(status) = self.language_server_statuses.get_mut(&server_id) { if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
status status
} else { } else {
return; return;
@ -2642,16 +2697,16 @@ impl Project {
lsp::WorkDoneProgress::Begin(report) => { lsp::WorkDoneProgress::Begin(report) => {
if is_disk_based_diagnostics_progress { if is_disk_based_diagnostics_progress {
language_server_status.has_pending_diagnostic_updates = true; language_server_status.has_pending_diagnostic_updates = true;
self.disk_based_diagnostics_started(server_id, cx); self.disk_based_diagnostics_started(language_server_id, cx);
self.broadcast_language_server_update( self.buffer_ordered_messages_tx
server_id, .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating( language_server_id,
proto::LspDiskBasedDiagnosticsUpdating {}, message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
), })
); .ok();
} else { } else {
self.on_lsp_work_start( self.on_lsp_work_start(
server_id, language_server_id,
token.clone(), token.clone(),
LanguageServerProgress { LanguageServerProgress {
message: report.message.clone(), message: report.message.clone(),
@ -2660,20 +2715,24 @@ impl Project {
}, },
cx, cx,
); );
self.broadcast_language_server_update( self.buffer_ordered_messages_tx
server_id, .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
proto::update_language_server::Variant::WorkStart(proto::LspWorkStart { language_server_id,
message: proto::update_language_server::Variant::WorkStart(
proto::LspWorkStart {
token, token,
message: report.message, message: report.message,
percentage: report.percentage.map(|p| p as u32), percentage: report.percentage.map(|p| p as u32),
}), },
); ),
})
.ok();
} }
} }
lsp::WorkDoneProgress::Report(report) => { lsp::WorkDoneProgress::Report(report) => {
if !is_disk_based_diagnostics_progress { if !is_disk_based_diagnostics_progress {
self.on_lsp_work_progress( self.on_lsp_work_progress(
server_id, language_server_id,
token.clone(), token.clone(),
LanguageServerProgress { LanguageServerProgress {
message: report.message.clone(), message: report.message.clone(),
@ -2682,16 +2741,18 @@ impl Project {
}, },
cx, cx,
); );
self.broadcast_language_server_update( self.buffer_ordered_messages_tx
server_id, .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
proto::update_language_server::Variant::WorkProgress( language_server_id,
message: proto::update_language_server::Variant::WorkProgress(
proto::LspWorkProgress { proto::LspWorkProgress {
token, token,
message: report.message, message: report.message,
percentage: report.percentage.map(|p| p as u32), percentage: report.percentage.map(|p| p as u32),
}, },
), ),
); })
.ok();
} }
} }
lsp::WorkDoneProgress::End(_) => { lsp::WorkDoneProgress::End(_) => {
@ -2699,21 +2760,26 @@ impl Project {
if is_disk_based_diagnostics_progress { if is_disk_based_diagnostics_progress {
language_server_status.has_pending_diagnostic_updates = false; language_server_status.has_pending_diagnostic_updates = false;
self.disk_based_diagnostics_finished(server_id, cx); self.disk_based_diagnostics_finished(language_server_id, cx);
self.broadcast_language_server_update( self.buffer_ordered_messages_tx
server_id, .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
language_server_id,
message:
proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated( proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
proto::LspDiskBasedDiagnosticsUpdated {}, Default::default(),
), ),
); })
.ok();
} else { } else {
self.on_lsp_work_end(server_id, token.clone(), cx); self.on_lsp_work_end(language_server_id, token.clone(), cx);
self.broadcast_language_server_update( self.buffer_ordered_messages_tx
server_id, .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd { language_server_id,
token, message: proto::update_language_server::Variant::WorkEnd(
}), proto::LspWorkEnd { token },
); ),
})
.ok();
} }
} }
} }
@ -2822,22 +2888,6 @@ impl Project {
}) })
} }
fn broadcast_language_server_update(
&self,
language_server_id: LanguageServerId,
event: proto::update_language_server::Variant,
) {
if let Some(project_id) = self.remote_id() {
self.client
.send(proto::UpdateLanguageServer {
project_id,
language_server_id: language_server_id.0 as u64,
variant: Some(event),
})
.log_err();
}
}
pub fn language_server_statuses( pub fn language_server_statuses(
&self, &self,
) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> { ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
@ -4866,8 +4916,8 @@ impl Project {
if is_host { if is_host {
this.opened_buffers this.opened_buffers
.retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_))); .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
this.buffer_changes_tx this.buffer_ordered_messages_tx
.unbounded_send(BufferMessage::Resync) .unbounded_send(BufferOrderedMessage::Resync)
.unwrap(); .unwrap();
} }