Track just the local dirty state

This commit is contained in:
Piotr Osiewicz 2024-03-05 18:15:43 +01:00
parent e4c2d86a80
commit c2025f1327
5 changed files with 49 additions and 17 deletions

View file

@ -205,6 +205,7 @@ impl ChannelBuffer {
pub fn acknowledge_buffer_version(&mut self, cx: &mut ModelContext<'_, ChannelBuffer>) { pub fn acknowledge_buffer_version(&mut self, cx: &mut ModelContext<'_, ChannelBuffer>) {
let buffer = self.buffer.read(cx); let buffer = self.buffer.read(cx);
let is_dirty = buffer.has_local_changes();
let version = buffer.version(); let version = buffer.version();
let buffer_id = buffer.remote_id().into(); let buffer_id = buffer.remote_id().into();
let client = self.client.clone(); let client = self.client.clone();
@ -219,6 +220,7 @@ impl ChannelBuffer {
buffer_id, buffer_id,
epoch, epoch,
version: serialize_version(&version), version: serialize_version(&version),
is_dirty,
}) })
.ok(); .ok();
Ok(()) Ok(())

View file

@ -22,6 +22,7 @@ use gpui::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLab
use lazy_static::lazy_static; use lazy_static::lazy_static;
use lsp::LanguageServerId; use lsp::LanguageServerId;
use parking_lot::Mutex; use parking_lot::Mutex;
use rpc::ConnectionId;
use similar::{ChangeTag, TextDiff}; use similar::{ChangeTag, TextDiff};
use smallvec::SmallVec; use smallvec::SmallVec;
use smol::future::yield_now; use smol::future::yield_now;
@ -90,6 +91,8 @@ pub struct Buffer {
/// or saved to disk. /// or saved to disk.
saved_version: clock::Global, saved_version: clock::Global,
saved_undo_top: Option<Transaction>, saved_undo_top: Option<Transaction>,
/// True if a peer with given id has any local changes to the buffer.
peer_has_changes: BTreeMap<ConnectionId, bool>,
transaction_depth: usize, transaction_depth: usize,
was_dirty_before_starting_transaction: Option<bool>, was_dirty_before_starting_transaction: Option<bool>,
reload_task: Option<Task<Result<()>>>, reload_task: Option<Task<Result<()>>>,
@ -703,6 +706,7 @@ impl Buffer {
completion_triggers_timestamp: Default::default(), completion_triggers_timestamp: Default::default(),
deferred_ops: OperationQueue::new(), deferred_ops: OperationQueue::new(),
saved_undo_top: None, saved_undo_top: None,
peer_has_changes: Default::default(),
} }
} }
@ -792,6 +796,9 @@ impl Buffer {
.peek_undo_stack() .peek_undo_stack()
.map(|entry| entry.transaction()) .map(|entry| entry.transaction())
.cloned(); .cloned();
self.peer_has_changes
.values_mut()
.for_each(|has_changed| *has_changed = false);
cx.emit(Event::Saved); cx.emit(Event::Saved);
cx.notify(); cx.notify();
} }
@ -1515,9 +1522,16 @@ impl Buffer {
self.end_transaction(cx) self.end_transaction(cx)
} }
fn content_differs(&self) -> bool { pub fn has_local_changes(&self) -> bool {
self.peek_undo_stack().map(|entry| entry.transaction()) != self.saved_undo_top.as_ref() self.peek_undo_stack().map(|entry| entry.transaction()) != self.saved_undo_top.as_ref()
} }
pub fn content_differs(&self) -> bool {
self.has_local_changes()
|| self
.peer_has_changes
.values()
.any(|has_changes| *has_changes)
}
/// Checks if the buffer has unsaved changes. /// Checks if the buffer has unsaved changes.
pub fn is_dirty(&self) -> bool { pub fn is_dirty(&self) -> bool {
self.content_differs() || self.file.as_ref().map_or(false, |file| file.is_deleted()) self.content_differs() || self.file.as_ref().map_or(false, |file| file.is_deleted())
@ -1829,6 +1843,10 @@ impl Buffer {
cx.notify(); cx.notify();
} }
pub fn mark_dirty(&mut self, peer_id: ConnectionId, dirty: bool) {
*self.peer_has_changes.entry(peer_id).or_default() = dirty;
}
/// Applies the given remote operations to the buffer. /// Applies the given remote operations to the buffer.
pub fn apply_ops<I: IntoIterator<Item = Operation>>( pub fn apply_ops<I: IntoIterator<Item = Operation>>(
&mut self, &mut self,
@ -1980,6 +1998,7 @@ impl Buffer {
/// Removes the selections for a given peer. /// Removes the selections for a given peer.
pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) { pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
self.remote_selections.remove(&replica_id); self.remote_selections.remove(&replica_id);
//self.peer_has_changes.remove(&replica_id);
cx.notify(); cx.notify();
} }

View file

@ -185,6 +185,8 @@ enum BufferOrderedMessage {
Operation { Operation {
buffer_id: BufferId, buffer_id: BufferId,
operation: proto::Operation, operation: proto::Operation,
is_dirty: bool,
is_local_dirty: bool,
}, },
LanguageServerUpdate { LanguageServerUpdate {
language_server_id: LanguageServerId, language_server_id: LanguageServerId,
@ -2210,18 +2212,19 @@ impl Project {
let mut operations_by_buffer_id = HashMap::default(); let mut operations_by_buffer_id = HashMap::default();
async fn flush_operations( async fn flush_operations(
this: &WeakModel<Project>, this: &WeakModel<Project>,
operations_by_buffer_id: &mut HashMap<BufferId, Vec<proto::Operation>>, operations_by_buffer_id: &mut HashMap<BufferId, (Vec<proto::Operation>, bool)>,
needs_resync_with_host: &mut bool, needs_resync_with_host: &mut bool,
is_local: bool, is_local: bool,
cx: &mut AsyncAppContext, cx: &mut AsyncAppContext,
) -> Result<()> { ) -> Result<()> {
for (buffer_id, operations) in operations_by_buffer_id.drain() { for (buffer_id, (operations, is_dirty)) in operations_by_buffer_id.drain() {
let request = this.update(cx, |this, _| { let request = this.update(cx, |this, _| {
let project_id = this.remote_id()?; let project_id = this.remote_id()?;
Some(this.client.request(proto::UpdateBuffer { Some(this.client.request(proto::UpdateBuffer {
buffer_id: buffer_id.into(), buffer_id: buffer_id.into(),
project_id, project_id,
operations, operations,
is_dirty,
})) }))
})?; })?;
if let Some(request) = request { if let Some(request) = request {
@ -2239,20 +2242,22 @@ impl Project {
while let Some(changes) = changes.next().await { while let Some(changes) = changes.next().await {
let is_local = this.update(&mut cx, |this, _| this.is_local())?; let is_local = this.update(&mut cx, |this, _| this.is_local())?;
for change in changes { for change in changes {
match change { match change {
BufferOrderedMessage::Operation { BufferOrderedMessage::Operation {
buffer_id, buffer_id,
operation, operation,
is_dirty,
is_local_dirty,
} => { } => {
if needs_resync_with_host { if needs_resync_with_host {
continue; continue;
} }
let is_dirty = if is_local { is_dirty } else { is_local_dirty };
operations_by_buffer_id operations_by_buffer_id
.entry(buffer_id) .entry(buffer_id)
.or_insert(Vec::new()) .or_insert((Vec::new(), is_dirty))
.0
.push(operation); .push(operation);
} }
@ -2323,10 +2328,13 @@ impl Project {
match event { match event {
BufferEvent::Operation(operation) => { BufferEvent::Operation(operation) => {
let buffer = buffer.read(cx);
self.buffer_ordered_messages_tx self.buffer_ordered_messages_tx
.unbounded_send(BufferOrderedMessage::Operation { .unbounded_send(BufferOrderedMessage::Operation {
buffer_id: buffer.read(cx).remote_id(), buffer_id: buffer.remote_id(),
operation: language::proto::serialize_operation(operation), operation: language::proto::serialize_operation(operation),
is_dirty: buffer.is_dirty(),
is_local_dirty: buffer.has_local_changes(),
}) })
.ok(); .ok();
} }
@ -7733,6 +7741,7 @@ impl Project {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
let payload = envelope.payload.clone(); let payload = envelope.payload.clone();
let buffer_id = BufferId::new(payload.buffer_id)?; let buffer_id = BufferId::new(payload.buffer_id)?;
dbg!(payload.is_dirty);
let ops = payload let ops = payload
.operations .operations
.into_iter() .into_iter()
@ -7742,7 +7751,10 @@ impl Project {
match this.opened_buffers.entry(buffer_id) { match this.opened_buffers.entry(buffer_id) {
hash_map::Entry::Occupied(mut e) => match e.get_mut() { hash_map::Entry::Occupied(mut e) => match e.get_mut() {
OpenBuffer::Strong(buffer) => { OpenBuffer::Strong(buffer) => {
buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?; buffer.update(cx, |buffer, cx| {
buffer.mark_dirty(envelope.sender_id, payload.is_dirty);
buffer.apply_ops(ops, cx)
})?;
} }
OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops), OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
OpenBuffer::Weak(_) => {} OpenBuffer::Weak(_) => {}
@ -7920,8 +7932,6 @@ impl Project {
buffer_id: buffer_id.into(), buffer_id: buffer_id.into(),
version: serialize_version(buffer.saved_version()), version: serialize_version(buffer.saved_version()),
mtime: Some(buffer.saved_mtime().into()), mtime: Some(buffer.saved_mtime().into()),
saved_undo_top,
})?) })?)
} }
@ -8020,10 +8030,9 @@ impl Project {
line_ending: language::proto::serialize_line_ending( line_ending: language::proto::serialize_line_ending(
buffer.line_ending(), buffer.line_ending(),
) as i32, ) as i32,
saved_undo_top,
}) })
.log_err(); .log_err();
let is_dirty = buffer.is_dirty();
cx.background_executor() cx.background_executor()
.spawn( .spawn(
async move { async move {
@ -8034,6 +8043,7 @@ impl Project {
project_id, project_id,
buffer_id: buffer_id.into(), buffer_id: buffer_id.into(),
operations: chunk, operations: chunk,
is_dirty,
}) })
.await?; .await?;
} }
@ -8662,9 +8672,10 @@ impl Project {
.iter() .iter()
.filter_map(|(id, buffer)| { .filter_map(|(id, buffer)| {
let buffer = buffer.upgrade()?; let buffer = buffer.upgrade()?;
let buffer = buffer.read(cx);
Some(proto::BufferVersion { Some(proto::BufferVersion {
id: (*id).into(), id: (*id).into(),
version: language::proto::serialize_version(&buffer.read(cx).version), version: language::proto::serialize_version(&buffer.version),
}) })
}) })
.collect(); .collect();
@ -8699,6 +8710,7 @@ impl Project {
if let Some(buffer) = this.buffer_for_id(buffer_id) { if let Some(buffer) = this.buffer_for_id(buffer_id) {
let operations = let operations =
buffer.read(cx).serialize_ops(Some(remote_version), cx); buffer.read(cx).serialize_ops(Some(remote_version), cx);
let is_dirty = buffer.read(cx).is_dirty();
cx.background_executor().spawn(async move { cx.background_executor().spawn(async move {
let operations = operations.await; let operations = operations.await;
for chunk in split_operations(operations) { for chunk in split_operations(operations) {
@ -8707,6 +8719,7 @@ impl Project {
project_id, project_id,
buffer_id: buffer_id.into(), buffer_id: buffer_id.into(),
operations: chunk, operations: chunk,
is_dirty,
}) })
.await?; .await?;
} }

View file

@ -1171,7 +1171,6 @@ impl LocalWorktree {
buffer_id, buffer_id,
version: serialize_version(&version), version: serialize_version(&version),
mtime: Some(mtime.into()), mtime: Some(mtime.into()),
saved_undo_top,
})?; })?;
} }
@ -2958,7 +2957,6 @@ impl language::LocalFile for File {
mtime: Some(mtime.into()), mtime: Some(mtime.into()),
line_ending: serialize_line_ending(line_ending) as i32, line_ending: serialize_line_ending(line_ending) as i32,
saved_undo_top: saved_undo_top.map(serialize_transaction),
}) })
.log_err(); .log_err();
} }

View file

@ -640,6 +640,7 @@ message UpdateBuffer {
uint64 project_id = 1; uint64 project_id = 1;
uint64 buffer_id = 2; uint64 buffer_id = 2;
repeated Operation operations = 3; repeated Operation operations = 3;
bool is_dirty = 4;
} }
message UpdateChannelBuffer { message UpdateChannelBuffer {
@ -665,7 +666,6 @@ message BufferSaved {
repeated VectorClockEntry version = 3; repeated VectorClockEntry version = 3;
Timestamp mtime = 4; Timestamp mtime = 4;
reserved 5; reserved 5;
optional Transaction saved_undo_top = 6;
} }
message BufferReloaded { message BufferReloaded {
@ -675,7 +675,6 @@ message BufferReloaded {
Timestamp mtime = 4; Timestamp mtime = 4;
reserved 5; reserved 5;
LineEnding line_ending = 6; LineEnding line_ending = 6;
optional Transaction saved_undo_top = 7;
} }
message ReloadBuffers { message ReloadBuffers {
@ -1241,6 +1240,7 @@ message AckBufferOperation {
uint64 buffer_id = 1; uint64 buffer_id = 1;
uint64 epoch = 2; uint64 epoch = 2;
repeated VectorClockEntry version = 3; repeated VectorClockEntry version = 3;
bool is_dirty = 4;
} }
message JoinChannelBufferResponse { message JoinChannelBufferResponse {