Merge branch 'main' into drag-and-drop

This commit is contained in:
K Simmons 2022-08-25 16:32:11 -07:00
commit 8591c3f46d
12 changed files with 367 additions and 219 deletions

View file

@ -2006,7 +2006,7 @@ impl HighlightedRange {
} }
} }
fn scale_vertical_mouse_autoscroll_delta(delta: f32) -> f32 { pub fn scale_vertical_mouse_autoscroll_delta(delta: f32) -> f32 {
delta.powf(1.5) / 100.0 delta.powf(1.5) / 100.0
} }

View file

@ -3304,8 +3304,19 @@ mod tests {
fn test_remote_multibuffer(cx: &mut MutableAppContext) { fn test_remote_multibuffer(cx: &mut MutableAppContext) {
let host_buffer = cx.add_model(|cx| Buffer::new(0, "a", cx)); let host_buffer = cx.add_model(|cx| Buffer::new(0, "a", cx));
let guest_buffer = cx.add_model(|cx| { let guest_buffer = cx.add_model(|cx| {
let message = host_buffer.read(cx).to_proto(); let state = host_buffer.read(cx).to_proto();
Buffer::from_proto(1, message, None, cx).unwrap() let ops = cx
.background()
.block(host_buffer.read(cx).serialize_ops(cx));
let mut buffer = Buffer::from_proto(1, state, None).unwrap();
buffer
.apply_ops(
ops.into_iter()
.map(|op| language::proto::deserialize_operation(op).unwrap()),
cx,
)
.unwrap();
buffer
}); });
let multibuffer = cx.add_model(|cx| MultiBuffer::singleton(guest_buffer.clone(), cx)); let multibuffer = cx.add_model(|cx| MultiBuffer::singleton(guest_buffer.clone(), cx));
let snapshot = multibuffer.read(cx).snapshot(cx); let snapshot = multibuffer.read(cx).snapshot(cx);

View file

@ -70,6 +70,7 @@ pub struct Buffer {
diagnostics_timestamp: clock::Lamport, diagnostics_timestamp: clock::Lamport,
file_update_count: usize, file_update_count: usize,
completion_triggers: Vec<String>, completion_triggers: Vec<String>,
completion_triggers_timestamp: clock::Lamport,
deferred_ops: OperationQueue<Operation>, deferred_ops: OperationQueue<Operation>,
} }
@ -358,9 +359,8 @@ impl Buffer {
pub fn from_proto( pub fn from_proto(
replica_id: ReplicaId, replica_id: ReplicaId,
message: proto::Buffer, message: proto::BufferState,
file: Option<Arc<dyn File>>, file: Option<Arc<dyn File>>,
cx: &mut ModelContext<Self>,
) -> Result<Self> { ) -> Result<Self> {
let buffer = TextBuffer::new(replica_id, message.id, message.base_text); let buffer = TextBuffer::new(replica_id, message.id, message.base_text);
let mut this = Self::build(buffer, file); let mut this = Self::build(buffer, file);
@ -368,74 +368,51 @@ impl Buffer {
proto::LineEnding::from_i32(message.line_ending) proto::LineEnding::from_i32(message.line_ending)
.ok_or_else(|| anyhow!("missing line_ending"))?, .ok_or_else(|| anyhow!("missing line_ending"))?,
)); ));
let ops = message
.operations
.into_iter()
.map(proto::deserialize_operation)
.collect::<Result<Vec<_>>>()?;
this.apply_ops(ops, cx)?;
for selection_set in message.selections {
let lamport_timestamp = clock::Lamport {
replica_id: selection_set.replica_id as ReplicaId,
value: selection_set.lamport_timestamp,
};
this.remote_selections.insert(
selection_set.replica_id as ReplicaId,
SelectionSet {
line_mode: selection_set.line_mode,
selections: proto::deserialize_selections(selection_set.selections),
lamport_timestamp,
},
);
this.text.lamport_clock.observe(lamport_timestamp);
}
let snapshot = this.snapshot();
let entries = proto::deserialize_diagnostics(message.diagnostics);
this.apply_diagnostic_update(
DiagnosticSet::from_sorted_entries(entries.iter().cloned(), &snapshot),
clock::Lamport {
replica_id: 0,
value: message.diagnostics_timestamp,
},
cx,
);
this.completion_triggers = message.completion_triggers;
Ok(this) Ok(this)
} }
pub fn to_proto(&self) -> proto::Buffer { pub fn to_proto(&self) -> proto::BufferState {
let mut operations = self proto::BufferState {
.text
.history()
.map(|op| proto::serialize_operation(&Operation::Buffer(op.clone())))
.chain(self.deferred_ops.iter().map(proto::serialize_operation))
.collect::<Vec<_>>();
operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
proto::Buffer {
id: self.remote_id(), id: self.remote_id(),
file: self.file.as_ref().map(|f| f.to_proto()), file: self.file.as_ref().map(|f| f.to_proto()),
base_text: self.base_text().to_string(), base_text: self.base_text().to_string(),
operations,
selections: self
.remote_selections
.iter()
.map(|(replica_id, set)| proto::SelectionSet {
replica_id: *replica_id as u32,
selections: proto::serialize_selections(&set.selections),
lamport_timestamp: set.lamport_timestamp.value,
line_mode: set.line_mode,
})
.collect(),
diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
diagnostics_timestamp: self.diagnostics_timestamp.value,
completion_triggers: self.completion_triggers.clone(),
line_ending: proto::serialize_line_ending(self.line_ending()) as i32, line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
} }
} }
pub fn serialize_ops(&self, cx: &AppContext) -> Task<Vec<proto::Operation>> {
let mut operations = Vec::new();
operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
operations.extend(self.remote_selections.iter().map(|(_, set)| {
proto::serialize_operation(&Operation::UpdateSelections {
selections: set.selections.clone(),
lamport_timestamp: set.lamport_timestamp,
line_mode: set.line_mode,
})
}));
operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
diagnostics: self.diagnostics.iter().cloned().collect(),
lamport_timestamp: self.diagnostics_timestamp,
}));
operations.push(proto::serialize_operation(
&Operation::UpdateCompletionTriggers {
triggers: self.completion_triggers.clone(),
lamport_timestamp: self.completion_triggers_timestamp,
},
));
let text_operations = self.text.operations().clone();
cx.background().spawn(async move {
operations.extend(
text_operations
.iter()
.map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
);
operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
operations
})
}
pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self { pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
self.set_language(Some(language), cx); self.set_language(Some(language), cx);
self self
@ -470,6 +447,7 @@ impl Buffer {
diagnostics_timestamp: Default::default(), diagnostics_timestamp: Default::default(),
file_update_count: 0, file_update_count: 0,
completion_triggers: Default::default(), completion_triggers: Default::default(),
completion_triggers_timestamp: Default::default(),
deferred_ops: OperationQueue::new(), deferred_ops: OperationQueue::new(),
} }
} }
@ -1517,11 +1495,11 @@ impl Buffer {
pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) { pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
self.completion_triggers = triggers.clone(); self.completion_triggers = triggers.clone();
let lamport_timestamp = self.text.lamport_clock.tick(); self.completion_triggers_timestamp = self.text.lamport_clock.tick();
self.send_operation( self.send_operation(
Operation::UpdateCompletionTriggers { Operation::UpdateCompletionTriggers {
triggers, triggers,
lamport_timestamp, lamport_timestamp: self.completion_triggers_timestamp,
}, },
cx, cx,
); );

View file

@ -1,6 +1,5 @@
use crate::{ use crate::{
diagnostic_set::DiagnosticEntry, CodeAction, CodeLabel, Completion, Diagnostic, Language, diagnostic_set::DiagnosticEntry, CodeAction, CodeLabel, Completion, Diagnostic, Language,
Operation,
}; };
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use clock::ReplicaId; use clock::ReplicaId;
@ -9,7 +8,7 @@ use rpc::proto;
use std::{ops::Range, sync::Arc}; use std::{ops::Range, sync::Arc};
use text::*; use text::*;
pub use proto::{Buffer, LineEnding, SelectionSet}; pub use proto::{BufferState, LineEnding, Operation, SelectionSet};
pub fn deserialize_line_ending(message: proto::LineEnding) -> text::LineEnding { pub fn deserialize_line_ending(message: proto::LineEnding) -> text::LineEnding {
match message { match message {
@ -25,13 +24,13 @@ pub fn serialize_line_ending(message: text::LineEnding) -> proto::LineEnding {
} }
} }
pub fn serialize_operation(operation: &Operation) -> proto::Operation { pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation {
proto::Operation { proto::Operation {
variant: Some(match operation { variant: Some(match operation {
Operation::Buffer(text::Operation::Edit(edit)) => { crate::Operation::Buffer(text::Operation::Edit(edit)) => {
proto::operation::Variant::Edit(serialize_edit_operation(edit)) proto::operation::Variant::Edit(serialize_edit_operation(edit))
} }
Operation::Buffer(text::Operation::Undo { crate::Operation::Buffer(text::Operation::Undo {
undo, undo,
lamport_timestamp, lamport_timestamp,
}) => proto::operation::Variant::Undo(proto::operation::Undo { }) => proto::operation::Variant::Undo(proto::operation::Undo {
@ -49,7 +48,7 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
}) })
.collect(), .collect(),
}), }),
Operation::UpdateSelections { crate::Operation::UpdateSelections {
selections, selections,
line_mode, line_mode,
lamport_timestamp, lamport_timestamp,
@ -59,7 +58,7 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
selections: serialize_selections(selections), selections: serialize_selections(selections),
line_mode: *line_mode, line_mode: *line_mode,
}), }),
Operation::UpdateDiagnostics { crate::Operation::UpdateDiagnostics {
diagnostics, diagnostics,
lamport_timestamp, lamport_timestamp,
} => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics { } => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics {
@ -67,7 +66,7 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
lamport_timestamp: lamport_timestamp.value, lamport_timestamp: lamport_timestamp.value,
diagnostics: serialize_diagnostics(diagnostics.iter()), diagnostics: serialize_diagnostics(diagnostics.iter()),
}), }),
Operation::UpdateCompletionTriggers { crate::Operation::UpdateCompletionTriggers {
triggers, triggers,
lamport_timestamp, lamport_timestamp,
} => proto::operation::Variant::UpdateCompletionTriggers( } => proto::operation::Variant::UpdateCompletionTriggers(
@ -165,41 +164,43 @@ pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
} }
} }
pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> { pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operation> {
Ok( Ok(
match message match message
.variant .variant
.ok_or_else(|| anyhow!("missing operation variant"))? .ok_or_else(|| anyhow!("missing operation variant"))?
{ {
proto::operation::Variant::Edit(edit) => { proto::operation::Variant::Edit(edit) => {
Operation::Buffer(text::Operation::Edit(deserialize_edit_operation(edit))) crate::Operation::Buffer(text::Operation::Edit(deserialize_edit_operation(edit)))
} }
proto::operation::Variant::Undo(undo) => Operation::Buffer(text::Operation::Undo { proto::operation::Variant::Undo(undo) => {
lamport_timestamp: clock::Lamport { crate::Operation::Buffer(text::Operation::Undo {
replica_id: undo.replica_id as ReplicaId, lamport_timestamp: clock::Lamport {
value: undo.lamport_timestamp,
},
undo: UndoOperation {
id: clock::Local {
replica_id: undo.replica_id as ReplicaId, replica_id: undo.replica_id as ReplicaId,
value: undo.local_timestamp, value: undo.lamport_timestamp,
}, },
version: deserialize_version(undo.version), undo: UndoOperation {
counts: undo id: clock::Local {
.counts replica_id: undo.replica_id as ReplicaId,
.into_iter() value: undo.local_timestamp,
.map(|c| { },
( version: deserialize_version(undo.version),
clock::Local { counts: undo
replica_id: c.replica_id as ReplicaId, .counts
value: c.local_timestamp, .into_iter()
}, .map(|c| {
c.count, (
) clock::Local {
}) replica_id: c.replica_id as ReplicaId,
.collect(), value: c.local_timestamp,
}, },
}), c.count,
)
})
.collect(),
},
})
}
proto::operation::Variant::UpdateSelections(message) => { proto::operation::Variant::UpdateSelections(message) => {
let selections = message let selections = message
.selections .selections
@ -215,7 +216,7 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
Operation::UpdateSelections { crate::Operation::UpdateSelections {
lamport_timestamp: clock::Lamport { lamport_timestamp: clock::Lamport {
replica_id: message.replica_id as ReplicaId, replica_id: message.replica_id as ReplicaId,
value: message.lamport_timestamp, value: message.lamport_timestamp,
@ -224,15 +225,17 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
line_mode: message.line_mode, line_mode: message.line_mode,
} }
} }
proto::operation::Variant::UpdateDiagnostics(message) => Operation::UpdateDiagnostics { proto::operation::Variant::UpdateDiagnostics(message) => {
diagnostics: deserialize_diagnostics(message.diagnostics), crate::Operation::UpdateDiagnostics {
lamport_timestamp: clock::Lamport { diagnostics: deserialize_diagnostics(message.diagnostics),
replica_id: message.replica_id as ReplicaId, lamport_timestamp: clock::Lamport {
value: message.lamport_timestamp, replica_id: message.replica_id as ReplicaId,
}, value: message.lamport_timestamp,
}, },
}
}
proto::operation::Variant::UpdateCompletionTriggers(message) => { proto::operation::Variant::UpdateCompletionTriggers(message) => {
Operation::UpdateCompletionTriggers { crate::Operation::UpdateCompletionTriggers {
triggers: message.triggers, triggers: message.triggers,
lamport_timestamp: clock::Lamport { lamport_timestamp: clock::Lamport {
replica_id: message.replica_id as ReplicaId, replica_id: message.replica_id as ReplicaId,

View file

@ -2,6 +2,7 @@ use super::*;
use clock::ReplicaId; use clock::ReplicaId;
use collections::BTreeMap; use collections::BTreeMap;
use gpui::{ModelHandle, MutableAppContext}; use gpui::{ModelHandle, MutableAppContext};
use proto::deserialize_operation;
use rand::prelude::*; use rand::prelude::*;
use settings::Settings; use settings::Settings;
use std::{ use std::{
@ -1047,8 +1048,19 @@ fn test_serialization(cx: &mut gpui::MutableAppContext) {
}); });
assert_eq!(buffer1.read(cx).text(), "abcDF"); assert_eq!(buffer1.read(cx).text(), "abcDF");
let message = buffer1.read(cx).to_proto(); let state = buffer1.read(cx).to_proto();
let buffer2 = cx.add_model(|cx| Buffer::from_proto(1, message, None, cx).unwrap()); let ops = cx.background().block(buffer1.read(cx).serialize_ops(cx));
let buffer2 = cx.add_model(|cx| {
let mut buffer = Buffer::from_proto(1, state, None).unwrap();
buffer
.apply_ops(
ops.into_iter()
.map(|op| proto::deserialize_operation(op).unwrap()),
cx,
)
.unwrap();
buffer
});
assert_eq!(buffer2.read(cx).text(), "abcDF"); assert_eq!(buffer2.read(cx).text(), "abcDF");
} }
@ -1075,9 +1087,18 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
for i in 0..rng.gen_range(min_peers..=max_peers) { for i in 0..rng.gen_range(min_peers..=max_peers) {
let buffer = cx.add_model(|cx| { let buffer = cx.add_model(|cx| {
let mut buffer = let state = base_buffer.read(cx).to_proto();
Buffer::from_proto(i as ReplicaId, base_buffer.read(cx).to_proto(), None, cx) let ops = cx
.unwrap(); .background()
.block(base_buffer.read(cx).serialize_ops(cx));
let mut buffer = Buffer::from_proto(i as ReplicaId, state, None).unwrap();
buffer
.apply_ops(
ops.into_iter()
.map(|op| proto::deserialize_operation(op).unwrap()),
cx,
)
.unwrap();
buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200))); buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
let network = network.clone(); let network = network.clone();
cx.subscribe(&cx.handle(), move |buffer, _, event, _| { cx.subscribe(&cx.handle(), move |buffer, _, event, _| {
@ -1164,7 +1185,8 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
mutation_count -= 1; mutation_count -= 1;
} }
50..=59 if replica_ids.len() < max_peers => { 50..=59 if replica_ids.len() < max_peers => {
let old_buffer = buffer.read(cx).to_proto(); let old_buffer_state = buffer.read(cx).to_proto();
let old_buffer_ops = cx.background().block(buffer.read(cx).serialize_ops(cx));
let new_replica_id = (0..=replica_ids.len() as ReplicaId) let new_replica_id = (0..=replica_ids.len() as ReplicaId)
.filter(|replica_id| *replica_id != buffer.read(cx).replica_id()) .filter(|replica_id| *replica_id != buffer.read(cx).replica_id())
.choose(&mut rng) .choose(&mut rng)
@ -1176,7 +1198,15 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
); );
new_buffer = Some(cx.add_model(|cx| { new_buffer = Some(cx.add_model(|cx| {
let mut new_buffer = let mut new_buffer =
Buffer::from_proto(new_replica_id, old_buffer, None, cx).unwrap(); Buffer::from_proto(new_replica_id, old_buffer_state, None).unwrap();
new_buffer
.apply_ops(
old_buffer_ops
.into_iter()
.map(|op| deserialize_operation(op).unwrap()),
cx,
)
.unwrap();
log::info!( log::info!(
"New replica {} text: {:?}", "New replica {} text: {:?}",
new_buffer.replica_id(), new_buffer.replica_id(),

View file

@ -123,6 +123,7 @@ pub struct Project {
loading_local_worktrees: loading_local_worktrees:
HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>, HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
opened_buffers: HashMap<u64, OpenBuffer>, opened_buffers: HashMap<u64, OpenBuffer>,
incomplete_buffers: HashMap<u64, ModelHandle<Buffer>>,
buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>, buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
nonce: u128, nonce: u128,
initialized_persistent_state: bool, initialized_persistent_state: bool,
@ -144,7 +145,7 @@ pub enum JoinProjectError {
enum OpenBuffer { enum OpenBuffer {
Strong(ModelHandle<Buffer>), Strong(ModelHandle<Buffer>),
Weak(WeakModelHandle<Buffer>), Weak(WeakModelHandle<Buffer>),
Loading(Vec<Operation>), Operations(Vec<Operation>),
} }
enum WorktreeHandle { enum WorktreeHandle {
@ -461,6 +462,7 @@ impl Project {
collaborators: Default::default(), collaborators: Default::default(),
opened_buffers: Default::default(), opened_buffers: Default::default(),
shared_buffers: Default::default(), shared_buffers: Default::default(),
incomplete_buffers: Default::default(),
loading_buffers: Default::default(), loading_buffers: Default::default(),
loading_local_worktrees: Default::default(), loading_local_worktrees: Default::default(),
buffer_snapshots: Default::default(), buffer_snapshots: Default::default(),
@ -550,6 +552,7 @@ impl Project {
loading_buffers: Default::default(), loading_buffers: Default::default(),
opened_buffer: watch::channel(), opened_buffer: watch::channel(),
shared_buffers: Default::default(), shared_buffers: Default::default(),
incomplete_buffers: Default::default(),
loading_local_worktrees: Default::default(), loading_local_worktrees: Default::default(),
active_entry: None, active_entry: None,
collaborators: Default::default(), collaborators: Default::default(),
@ -1331,7 +1334,7 @@ impl Project {
*open_buffer = OpenBuffer::Strong(buffer); *open_buffer = OpenBuffer::Strong(buffer);
} }
} }
OpenBuffer::Loading(_) => unreachable!(), OpenBuffer::Operations(_) => unreachable!(),
} }
} }
@ -1456,6 +1459,10 @@ impl Project {
} }
cx.emit(Event::DisconnectedFromHost); cx.emit(Event::DisconnectedFromHost);
cx.notify(); cx.notify();
// Wake up all futures currently waiting on a buffer to get opened,
// to give them a chance to fail now that we've disconnected.
*self.opened_buffer.0.borrow_mut() = ();
} }
} }
@ -1757,7 +1764,7 @@ impl Project {
match self.opened_buffers.insert(remote_id, open_buffer) { match self.opened_buffers.insert(remote_id, open_buffer) {
None => {} None => {}
Some(OpenBuffer::Loading(operations)) => { Some(OpenBuffer::Operations(operations)) => {
buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))? buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
} }
Some(OpenBuffer::Weak(existing_handle)) => { Some(OpenBuffer::Weak(existing_handle)) => {
@ -5107,7 +5114,7 @@ impl Project {
OpenBuffer::Strong(buffer) => { OpenBuffer::Strong(buffer) => {
buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?; buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
} }
OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops), OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
OpenBuffer::Weak(_) => {} OpenBuffer::Weak(_) => {}
}, },
hash_map::Entry::Vacant(e) => { hash_map::Entry::Vacant(e) => {
@ -5116,7 +5123,7 @@ impl Project {
"received buffer update from {:?}", "received buffer update from {:?}",
envelope.original_sender_id envelope.original_sender_id
); );
e.insert(OpenBuffer::Loading(ops)); e.insert(OpenBuffer::Operations(ops));
} }
} }
Ok(()) Ok(())
@ -5130,24 +5137,52 @@ impl Project {
mut cx: AsyncAppContext, mut cx: AsyncAppContext,
) -> Result<()> { ) -> Result<()> {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
let mut buffer = envelope match envelope
.payload .payload
.buffer .variant
.ok_or_else(|| anyhow!("invalid buffer"))?; .ok_or_else(|| anyhow!("missing variant"))?
let mut buffer_file = None; {
if let Some(file) = buffer.file.take() { proto::create_buffer_for_peer::Variant::State(mut state) => {
let worktree_id = WorktreeId::from_proto(file.worktree_id); let mut buffer_file = None;
let worktree = this if let Some(file) = state.file.take() {
.worktree_for_id(worktree_id, cx) let worktree_id = WorktreeId::from_proto(file.worktree_id);
.ok_or_else(|| anyhow!("no worktree found for id {}", file.worktree_id))?; let worktree = this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?) anyhow!("no worktree found for id {}", file.worktree_id)
as Arc<dyn language::File>); })?;
} buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
as Arc<dyn language::File>);
}
let buffer = cx.add_model(|cx| { let buffer_id = state.id;
Buffer::from_proto(this.replica_id(), buffer, buffer_file, cx).unwrap() let buffer = cx.add_model(|_| {
}); Buffer::from_proto(this.replica_id(), state, buffer_file).unwrap()
this.register_buffer(&buffer, cx)?; });
this.incomplete_buffers.insert(buffer_id, buffer);
}
proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
let buffer = this
.incomplete_buffers
.get(&chunk.buffer_id)
.ok_or_else(|| {
anyhow!(
"received chunk for buffer {} without initial state",
chunk.buffer_id
)
})?
.clone();
let operations = chunk
.operations
.into_iter()
.map(language::proto::deserialize_operation)
.collect::<Result<Vec<_>>>()?;
buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
if chunk.is_last {
this.incomplete_buffers.remove(&chunk.buffer_id);
this.register_buffer(&buffer, cx)?;
}
}
}
Ok(()) Ok(())
}) })
@ -5658,13 +5693,54 @@ impl Project {
if let Some(project_id) = self.remote_id() { if let Some(project_id) = self.remote_id() {
let shared_buffers = self.shared_buffers.entry(peer_id).or_default(); let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
if shared_buffers.insert(buffer_id) { if shared_buffers.insert(buffer_id) {
self.client let buffer = buffer.read(cx);
.send(proto::CreateBufferForPeer { let state = buffer.to_proto();
project_id, let operations = buffer.serialize_ops(cx);
peer_id: peer_id.0, let client = self.client.clone();
buffer: Some(buffer.read(cx).to_proto()), cx.background()
}) .spawn(
.log_err(); async move {
let mut operations = operations.await;
client.send(proto::CreateBufferForPeer {
project_id,
peer_id: peer_id.0,
variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
})?;
loop {
#[cfg(any(test, feature = "test-support"))]
const CHUNK_SIZE: usize = 5;
#[cfg(not(any(test, feature = "test-support")))]
const CHUNK_SIZE: usize = 100;
let chunk = operations
.drain(..cmp::min(CHUNK_SIZE, operations.len()))
.collect();
let is_last = operations.is_empty();
client.send(proto::CreateBufferForPeer {
project_id,
peer_id: peer_id.0,
variant: Some(proto::create_buffer_for_peer::Variant::Chunk(
proto::BufferChunk {
buffer_id,
operations: chunk,
is_last,
},
)),
})?;
if is_last {
break;
}
}
Ok(())
}
.log_err(),
)
.detach();
} }
} }
@ -5686,7 +5762,10 @@ impl Project {
}); });
if let Some(buffer) = buffer { if let Some(buffer) = buffer {
break buffer; break buffer;
} else if this.read_with(&cx, |this, _| this.is_read_only()) {
return Err(anyhow!("disconnected before buffer {} could be opened", id));
} }
opened_buffer_rx opened_buffer_rx
.next() .next()
.await .await
@ -6026,7 +6105,7 @@ impl OpenBuffer {
match self { match self {
OpenBuffer::Strong(handle) => Some(handle.clone()), OpenBuffer::Strong(handle) => Some(handle.clone()),
OpenBuffer::Weak(handle) => handle.upgrade(cx), OpenBuffer::Weak(handle) => handle.upgrade(cx),
OpenBuffer::Loading(_) => None, OpenBuffer::Operations(_) => None,
} }
} }
} }

View file

@ -370,7 +370,10 @@ message OpenBufferResponse {
message CreateBufferForPeer { message CreateBufferForPeer {
uint64 project_id = 1; uint64 project_id = 1;
uint32 peer_id = 2; uint32 peer_id = 2;
Buffer buffer = 3; oneof variant {
BufferState state = 3;
BufferChunk chunk = 4;
}
} }
message UpdateBuffer { message UpdateBuffer {
@ -808,16 +811,17 @@ message Entry {
bool is_ignored = 7; bool is_ignored = 7;
} }
message Buffer { message BufferState {
uint64 id = 1; uint64 id = 1;
optional File file = 2; optional File file = 2;
string base_text = 3; string base_text = 3;
repeated Operation operations = 4; LineEnding line_ending = 4;
repeated SelectionSet selections = 5; }
repeated Diagnostic diagnostics = 6;
uint32 diagnostics_timestamp = 7; message BufferChunk {
repeated string completion_triggers = 8; uint64 buffer_id = 1;
LineEnding line_ending = 9; repeated Operation operations = 2;
bool is_last = 3;
} }
enum LineEnding { enum LineEnding {

View file

@ -6,4 +6,4 @@ pub use conn::Connection;
pub use peer::*; pub use peer::*;
mod macros; mod macros;
pub const PROTOCOL_VERSION: u32 = 30; pub const PROTOCOL_VERSION: u32 = 31;

View file

@ -33,12 +33,11 @@ impl Modifiers {
} }
} }
//TODO: Determine if I should add modifiers into the ScrollWheelEvent type fn from_scroll(scroll: &ScrollWheelEvent) -> Self {
fn from_scroll() -> Self {
Modifiers { Modifiers {
ctrl: false, ctrl: scroll.ctrl,
shift: false, shift: scroll.shift,
alt: false, alt: scroll.alt,
} }
} }
} }
@ -123,7 +122,7 @@ pub fn scroll_report(
point, point,
MouseButton::from_scroll(e), MouseButton::from_scroll(e),
true, true,
Modifiers::from_scroll(), Modifiers::from_scroll(e),
MouseFormat::from_mode(mode), MouseFormat::from_mode(mode),
) )
.map(|report| repeat(report).take(max(scroll_lines, 1) as usize)) .map(|report| repeat(report).take(max(scroll_lines, 1) as usize))

View file

@ -18,7 +18,6 @@ use alacritty_terminal::{
Term, Term,
}; };
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use futures::{ use futures::{
channel::mpsc::{unbounded, UnboundedReceiver, UnboundedSender}, channel::mpsc::{unbounded, UnboundedReceiver, UnboundedSender},
FutureExt, FutureExt,
@ -29,7 +28,14 @@ use mappings::mouse::{
}; };
use modal::deploy_modal; use modal::deploy_modal;
use settings::{AlternateScroll, Settings, Shell, TerminalBlink}; use settings::{AlternateScroll, Settings, Shell, TerminalBlink};
use std::{collections::HashMap, fmt::Display, ops::Sub, path::PathBuf, sync::Arc, time::Duration}; use std::{
collections::{HashMap, VecDeque},
fmt::Display,
ops::Sub,
path::PathBuf,
sync::Arc,
time::Duration,
};
use thiserror::Error; use thiserror::Error;
use gpui::{ use gpui::{
@ -83,7 +89,7 @@ enum InternalEvent {
Clear, Clear,
Scroll(Scroll), Scroll(Scroll),
SetSelection(Option<Selection>), SetSelection(Option<Selection>),
UpdateSelection((Point, Direction)), UpdateSelection(Vector2F),
Copy, Copy,
} }
@ -311,11 +317,8 @@ impl TerminalBuilder {
term.set_mode(alacritty_terminal::ansi::Mode::BlinkingCursor) term.set_mode(alacritty_terminal::ansi::Mode::BlinkingCursor)
} }
//Start alternate_scroll if we need to //Alacritty defaults to alternate scrolling being on, so we just need to turn it off.
if let AlternateScroll::On = alternate_scroll { if let AlternateScroll::Off = alternate_scroll {
term.set_mode(alacritty_terminal::ansi::Mode::AlternateScroll)
} else {
//Alacritty turns it on by default, so we need to turn it off.
term.unset_mode(alacritty_terminal::ansi::Mode::AlternateScroll) term.unset_mode(alacritty_terminal::ansi::Mode::AlternateScroll)
} }
@ -363,13 +366,14 @@ impl TerminalBuilder {
let terminal = Terminal { let terminal = Terminal {
pty_tx: Notifier(pty_tx), pty_tx: Notifier(pty_tx),
term, term,
events: vec![], events: VecDeque::with_capacity(10), //Should never get this high.
title: shell_txt.clone(), title: shell_txt.clone(),
default_title: shell_txt, default_title: shell_txt,
last_mode: TermMode::NONE, last_mode: TermMode::NONE,
cur_size: initial_size, cur_size: initial_size,
last_mouse: None, last_mouse: None,
last_offset: 0, last_offset: 0,
current_selection: false,
}; };
Ok(TerminalBuilder { Ok(TerminalBuilder {
@ -434,13 +438,14 @@ impl TerminalBuilder {
pub struct Terminal { pub struct Terminal {
pty_tx: Notifier, pty_tx: Notifier,
term: Arc<FairMutex<Term<ZedListener>>>, term: Arc<FairMutex<Term<ZedListener>>>,
events: Vec<InternalEvent>, events: VecDeque<InternalEvent>,
default_title: String, default_title: String,
title: String, title: String,
cur_size: TerminalSize, cur_size: TerminalSize,
last_mode: TermMode, last_mode: TermMode,
last_offset: usize, last_offset: usize,
last_mouse: Option<(Point, Direction)>, last_mouse: Option<(Point, Direction)>,
current_selection: bool,
} }
impl Terminal { impl Terminal {
@ -480,9 +485,9 @@ impl Terminal {
cx.emit(Event::Wakeup); cx.emit(Event::Wakeup);
cx.notify(); cx.notify();
} }
AlacTermEvent::ColorRequest(_, _) => { AlacTermEvent::ColorRequest(_, _) => self
self.events.push(InternalEvent::TermEvent(event.clone())) .events
} .push_back(InternalEvent::TermEvent(event.clone())),
} }
} }
@ -514,11 +519,16 @@ impl Terminal {
self.write_to_pty("\x0c".to_string()); self.write_to_pty("\x0c".to_string());
term.clear_screen(ClearMode::Saved); term.clear_screen(ClearMode::Saved);
} }
InternalEvent::Scroll(scroll) => term.scroll_display(*scroll), InternalEvent::Scroll(scroll) => {
term.scroll_display(*scroll);
}
InternalEvent::SetSelection(sel) => term.selection = sel.clone(), InternalEvent::SetSelection(sel) => term.selection = sel.clone(),
InternalEvent::UpdateSelection((point, side)) => { InternalEvent::UpdateSelection(position) => {
if let Some(mut selection) = term.selection.take() { if let Some(mut selection) = term.selection.take() {
selection.update(*point, *side); let point = mouse_point(*position, self.cur_size, term.grid().display_offset());
let side = mouse_side(*position, self.cur_size);
selection.update(point, side);
term.selection = Some(selection); term.selection = Some(selection);
} }
} }
@ -531,10 +541,37 @@ impl Terminal {
} }
} }
pub fn input(&mut self, input: String) { fn begin_select(&mut self, sel: Selection) {
self.events.push(InternalEvent::Scroll(Scroll::Bottom)); self.current_selection = true;
self.events.push(InternalEvent::SetSelection(None)); self.events
self.write_to_pty(input); .push_back(InternalEvent::SetSelection(Some(sel)));
}
fn continue_selection(&mut self, location: Vector2F) {
self.events
.push_back(InternalEvent::UpdateSelection(location))
}
fn end_select(&mut self) {
self.current_selection = false;
self.events.push_back(InternalEvent::SetSelection(None));
}
fn scroll(&mut self, scroll: Scroll) {
self.events.push_back(InternalEvent::Scroll(scroll));
}
pub fn copy(&mut self) {
self.events.push_back(InternalEvent::Copy);
}
pub fn clear(&mut self) {
self.events.push_back(InternalEvent::Clear)
}
///Resize the terminal and the PTY.
pub fn set_size(&mut self, new_size: TerminalSize) {
self.events.push_back(InternalEvent::Resize(new_size))
} }
///Write the Input payload to the tty. ///Write the Input payload to the tty.
@ -542,13 +579,10 @@ impl Terminal {
self.pty_tx.notify(input.into_bytes()); self.pty_tx.notify(input.into_bytes());
} }
///Resize the terminal and the PTY. pub fn input(&mut self, input: String) {
pub fn set_size(&mut self, new_size: TerminalSize) { self.scroll(Scroll::Bottom);
self.events.push(InternalEvent::Resize(new_size)) self.end_select();
} self.write_to_pty(input);
pub fn clear(&mut self) {
self.events.push(InternalEvent::Clear)
} }
pub fn try_keystroke(&mut self, keystroke: &Keystroke) -> bool { pub fn try_keystroke(&mut self, keystroke: &Keystroke) -> bool {
@ -571,10 +605,6 @@ impl Terminal {
self.input(paste_text) self.input(paste_text)
} }
pub fn copy(&mut self) {
self.events.push(InternalEvent::Copy);
}
pub fn render_lock<F, T>(&mut self, cx: &mut ModelContext<Self>, f: F) -> T pub fn render_lock<F, T>(&mut self, cx: &mut ModelContext<Self>, f: F) -> T
where where
F: FnOnce(RenderableContent, char) -> T, F: FnOnce(RenderableContent, char) -> T,
@ -582,7 +612,8 @@ impl Terminal {
let m = self.term.clone(); //Arc clone let m = self.term.clone(); //Arc clone
let mut term = m.lock(); let mut term = m.lock();
while let Some(e) = self.events.pop() { //Note that this ordering matters for
while let Some(e) = self.events.pop_front() {
self.process_terminal_event(&e, &mut term, cx) self.process_terminal_event(&e, &mut term, cx)
} }
@ -647,11 +678,28 @@ impl Terminal {
let position = e.position.sub(origin); let position = e.position.sub(origin);
if !self.mouse_mode(e.shift) { if !self.mouse_mode(e.shift) {
let point = mouse_point(position, self.cur_size, self.last_offset); // Alacritty has the same ordering, of first updating the selection
let side = mouse_side(position, self.cur_size); // then scrolling 15ms later
self.continue_selection(position);
self.events // Doesn't make sense to scroll the alt screen
.push(InternalEvent::UpdateSelection((point, side))); if !self.last_mode.contains(TermMode::ALT_SCREEN) {
//TODO: Why do these need to be doubled?
let top = e.region.origin_y() + (self.cur_size.line_height * 2.);
let bottom = e.region.lower_left().y() - (self.cur_size.line_height * 2.);
let scroll_delta = if e.position.y() < top {
(top - e.position.y()).powf(1.1)
} else if e.position.y() > bottom {
-((e.position.y() - bottom).powf(1.1))
} else {
return; //Nothing to do
};
let scroll_lines = (scroll_delta / self.cur_size.line_height) as i32;
self.scroll(Scroll::Delta(scroll_lines));
self.continue_selection(position)
}
} }
} }
@ -665,12 +713,7 @@ impl Terminal {
self.pty_tx.notify(bytes); self.pty_tx.notify(bytes);
} }
} else if e.button == MouseButton::Left { } else if e.button == MouseButton::Left {
self.events self.begin_select(Selection::new(SelectionType::Simple, point, side));
.push(InternalEvent::SetSelection(Some(Selection::new(
SelectionType::Simple,
point,
side,
))));
} }
} }
@ -692,7 +735,9 @@ impl Terminal {
let selection = let selection =
selection_type.map(|selection_type| Selection::new(selection_type, point, side)); selection_type.map(|selection_type| Selection::new(selection_type, point, side));
self.events.push(InternalEvent::SetSelection(selection)); if let Some(sel) = selection {
self.begin_select(sel);
}
} }
} }
@ -712,17 +757,16 @@ impl Terminal {
} }
///Scroll the terminal ///Scroll the terminal
pub fn scroll(&mut self, scroll: &ScrollWheelEvent, origin: Vector2F) { pub fn scroll_wheel(&mut self, e: &ScrollWheelEvent, origin: Vector2F) {
if self.mouse_mode(scroll.shift) { if self.mouse_mode(e.shift) {
//TODO: Currently this only sends the current scroll reports as they come in. Alacritty //TODO: Currently this only sends the current scroll reports as they come in. Alacritty
//Sends the *entire* scroll delta on *every* scroll event, only resetting it when //Sends the *entire* scroll delta on *every* scroll event, only resetting it when
//The scroll enters 'TouchPhase::Started'. Do I need to replicate this? //The scroll enters 'TouchPhase::Started'. Do I need to replicate this?
//This would be consistent with a scroll model based on 'distance from origin'... //This would be consistent with a scroll model based on 'distance from origin'...
let scroll_lines = (scroll.delta.y() / self.cur_size.line_height) as i32; let scroll_lines = (e.delta.y() / self.cur_size.line_height) as i32;
let point = mouse_point(scroll.position.sub(origin), self.cur_size, self.last_offset); let point = mouse_point(e.position.sub(origin), self.cur_size, self.last_offset);
if let Some(scrolls) = scroll_report(point, scroll_lines as i32, scroll, self.last_mode) if let Some(scrolls) = scroll_report(point, scroll_lines as i32, e, self.last_mode) {
{
for scroll in scrolls { for scroll in scrolls {
self.pty_tx.notify(scroll); self.pty_tx.notify(scroll);
} }
@ -730,19 +774,19 @@ impl Terminal {
} else if self } else if self
.last_mode .last_mode
.contains(TermMode::ALT_SCREEN | TermMode::ALTERNATE_SCROLL) .contains(TermMode::ALT_SCREEN | TermMode::ALTERNATE_SCROLL)
&& !scroll.shift && !e.shift
{ {
//TODO: See above TODO, also applies here. //TODO: See above TODO, also applies here.
let scroll_lines = ((scroll.delta.y() * ALACRITTY_SCROLL_MULTIPLIER) let scroll_lines =
/ self.cur_size.line_height) as i32; ((e.delta.y() * ALACRITTY_SCROLL_MULTIPLIER) / self.cur_size.line_height) as i32;
self.pty_tx.notify(alt_scroll(scroll_lines)) self.pty_tx.notify(alt_scroll(scroll_lines))
} else { } else {
let scroll_lines = ((scroll.delta.y() * ALACRITTY_SCROLL_MULTIPLIER) let scroll_lines =
/ self.cur_size.line_height) as i32; ((e.delta.y() * ALACRITTY_SCROLL_MULTIPLIER) / self.cur_size.line_height) as i32;
if scroll_lines != 0 { if scroll_lines != 0 {
let scroll = Scroll::Delta(scroll_lines); let scroll = Scroll::Delta(scroll_lines);
self.events.push(InternalEvent::Scroll(scroll)); self.scroll(scroll);
} }
} }
} }

View file

@ -828,7 +828,7 @@ impl Element for TerminalElement {
let origin = bounds.origin() + vec2f(layout.size.cell_width, 0.); let origin = bounds.origin() + vec2f(layout.size.cell_width, 0.);
if let Some(terminal) = self.terminal.upgrade(cx.app) { if let Some(terminal) = self.terminal.upgrade(cx.app) {
terminal.update(cx.app, |term, _| term.scroll(e, origin)); terminal.update(cx.app, |term, _| term.scroll_wheel(e, origin));
cx.notify(); cx.notify();
} }
}) })

View file

@ -45,7 +45,7 @@ use std::{
}; };
pub use subscription::*; pub use subscription::*;
pub use sum_tree::Bias; pub use sum_tree::Bias;
use sum_tree::{FilterCursor, SumTree}; use sum_tree::{FilterCursor, SumTree, TreeMap};
lazy_static! { lazy_static! {
static ref CARRIAGE_RETURNS_REGEX: Regex = Regex::new("\r\n|\r").unwrap(); static ref CARRIAGE_RETURNS_REGEX: Regex = Regex::new("\r\n|\r").unwrap();
@ -109,7 +109,7 @@ impl HistoryEntry {
struct History { struct History {
// TODO: Turn this into a String or Rope, maybe. // TODO: Turn this into a String or Rope, maybe.
base_text: Arc<str>, base_text: Arc<str>,
operations: HashMap<clock::Local, Operation>, operations: TreeMap<clock::Local, Operation>,
insertion_slices: HashMap<clock::Local, Vec<InsertionSlice>>, insertion_slices: HashMap<clock::Local, Vec<InsertionSlice>>,
undo_stack: Vec<HistoryEntry>, undo_stack: Vec<HistoryEntry>,
redo_stack: Vec<HistoryEntry>, redo_stack: Vec<HistoryEntry>,
@ -1213,8 +1213,8 @@ impl Buffer {
&self.history.base_text &self.history.base_text
} }
pub fn history(&self) -> impl Iterator<Item = &Operation> { pub fn operations(&self) -> &TreeMap<clock::Local, Operation> {
self.history.operations.values() &self.history.operations
} }
pub fn undo_history(&self) -> impl Iterator<Item = (&clock::Local, &[(clock::Local, u32)])> { pub fn undo_history(&self) -> impl Iterator<Item = (&clock::Local, &[(clock::Local, u32)])> {