Merge pull request #313 from zed-industries/polish-project-diagnostics
Polish project diagnostics UX
This commit is contained in:
commit
8d7bb8b1a3
52 changed files with 2596 additions and 1703 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -3502,6 +3502,7 @@ dependencies = [
|
|||
"project",
|
||||
"serde_json",
|
||||
"theme",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
|
@ -5650,6 +5651,7 @@ dependencies = [
|
|||
"anyhow",
|
||||
"client",
|
||||
"clock",
|
||||
"collections",
|
||||
"gpui",
|
||||
"language",
|
||||
"log",
|
||||
|
|
|
@ -233,7 +233,7 @@ impl ChatPanel {
|
|||
Empty::new().boxed()
|
||||
};
|
||||
|
||||
Expanded::new(1., messages).boxed()
|
||||
Flexible::new(1., true, messages).boxed()
|
||||
}
|
||||
|
||||
fn render_message(&self, message: &ChannelMessage) -> ElementBox {
|
||||
|
|
|
@ -214,7 +214,7 @@ impl ContactsPanel {
|
|||
}));
|
||||
}
|
||||
})
|
||||
.expanded(1.0)
|
||||
.flexible(1., true)
|
||||
.boxed()
|
||||
})
|
||||
.constrained()
|
||||
|
|
|
@ -1,37 +1,40 @@
|
|||
pub mod items;
|
||||
|
||||
use anyhow::Result;
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::{
|
||||
context_header_renderer, diagnostic_block_renderer, diagnostic_header_renderer,
|
||||
display_map::{BlockDisposition, BlockId, BlockProperties},
|
||||
BuildSettings, Editor, ExcerptId, ExcerptProperties, MultiBuffer,
|
||||
items::BufferItemHandle,
|
||||
Autoscroll, BuildSettings, Editor, ExcerptId, ExcerptProperties, MultiBuffer, ToOffset,
|
||||
};
|
||||
use gpui::{
|
||||
action, elements::*, keymap::Binding, AppContext, Entity, ModelHandle, MutableAppContext,
|
||||
RenderContext, Task, View, ViewContext, ViewHandle,
|
||||
RenderContext, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use language::{Bias, Buffer, Diagnostic, DiagnosticEntry, Point};
|
||||
use language::{Bias, Buffer, DiagnosticEntry, Point, Selection, SelectionGoal};
|
||||
use postage::watch;
|
||||
use project::Project;
|
||||
use std::{cmp::Ordering, ops::Range, path::Path, sync::Arc};
|
||||
use project::{Project, ProjectPath, WorktreeId};
|
||||
use std::{cmp::Ordering, mem, ops::Range, path::Path, sync::Arc};
|
||||
use util::TryFutureExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
action!(Toggle);
|
||||
action!(ClearInvalid);
|
||||
action!(Deploy);
|
||||
action!(OpenExcerpts);
|
||||
|
||||
const CONTEXT_LINE_COUNT: u32 = 1;
|
||||
|
||||
pub fn init(cx: &mut MutableAppContext) {
|
||||
cx.add_bindings([
|
||||
Binding::new("alt-shift-D", Toggle, None),
|
||||
Binding::new("alt-shift-D", Deploy, Some("Workspace")),
|
||||
Binding::new(
|
||||
"alt-shift-C",
|
||||
ClearInvalid,
|
||||
"alt-shift-D",
|
||||
OpenExcerpts,
|
||||
Some("ProjectDiagnosticsEditor"),
|
||||
),
|
||||
]);
|
||||
cx.add_action(ProjectDiagnosticsEditor::toggle);
|
||||
cx.add_action(ProjectDiagnosticsEditor::clear_invalid);
|
||||
cx.add_action(ProjectDiagnosticsEditor::deploy);
|
||||
cx.add_action(ProjectDiagnosticsEditor::open_excerpts);
|
||||
}
|
||||
|
||||
type Event = editor::Event;
|
||||
|
@ -41,24 +44,22 @@ struct ProjectDiagnostics {
|
|||
}
|
||||
|
||||
struct ProjectDiagnosticsEditor {
|
||||
model: ModelHandle<ProjectDiagnostics>,
|
||||
workspace: WeakViewHandle<Workspace>,
|
||||
editor: ViewHandle<Editor>,
|
||||
excerpts: ModelHandle<MultiBuffer>,
|
||||
path_states: Vec<(Arc<Path>, Vec<DiagnosticGroupState>)>,
|
||||
paths_to_update: HashMap<WorktreeId, HashSet<ProjectPath>>,
|
||||
build_settings: BuildSettings,
|
||||
settings: watch::Receiver<workspace::Settings>,
|
||||
}
|
||||
|
||||
struct DiagnosticGroupState {
|
||||
primary_diagnostic: DiagnosticEntry<language::Anchor>,
|
||||
primary_excerpt_ix: usize,
|
||||
excerpts: Vec<ExcerptId>,
|
||||
blocks: HashMap<BlockId, DiagnosticBlock>,
|
||||
blocks: HashSet<BlockId>,
|
||||
block_count: usize,
|
||||
is_valid: bool,
|
||||
}
|
||||
|
||||
enum DiagnosticBlock {
|
||||
Header(Diagnostic),
|
||||
Inline(Diagnostic),
|
||||
Context,
|
||||
}
|
||||
|
||||
impl ProjectDiagnostics {
|
||||
|
@ -81,55 +82,49 @@ impl View for ProjectDiagnosticsEditor {
|
|||
}
|
||||
|
||||
fn render(&mut self, _: &mut RenderContext<Self>) -> ElementBox {
|
||||
ChildView::new(self.editor.id()).boxed()
|
||||
if self.path_states.is_empty() {
|
||||
let theme = &self.settings.borrow().theme.project_diagnostics;
|
||||
Label::new(
|
||||
"No problems detected in the project".to_string(),
|
||||
theme.empty_message.clone(),
|
||||
)
|
||||
.aligned()
|
||||
.contained()
|
||||
.with_style(theme.container)
|
||||
.boxed()
|
||||
} else {
|
||||
ChildView::new(self.editor.id()).boxed()
|
||||
}
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
cx.focus(&self.editor);
|
||||
if !self.path_states.is_empty() {
|
||||
cx.focus(&self.editor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProjectDiagnosticsEditor {
|
||||
fn new(
|
||||
project: ModelHandle<Project>,
|
||||
model: ModelHandle<ProjectDiagnostics>,
|
||||
workspace: WeakViewHandle<Workspace>,
|
||||
settings: watch::Receiver<workspace::Settings>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let project_paths = project
|
||||
.read(cx)
|
||||
.diagnostic_summaries(cx)
|
||||
.map(|e| e.0)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
cx.spawn(|this, mut cx| {
|
||||
let project = project.clone();
|
||||
async move {
|
||||
for project_path in project_paths {
|
||||
let buffer = project
|
||||
.update(&mut cx, |project, cx| project.open_buffer(project_path, cx))
|
||||
.await?;
|
||||
this.update(&mut cx, |view, cx| view.populate_excerpts(buffer, cx))
|
||||
let project = model.read(cx).project.clone();
|
||||
cx.subscribe(&project, |this, _, event, cx| match event {
|
||||
project::Event::DiskBasedDiagnosticsUpdated { worktree_id } => {
|
||||
if let Some(paths) = this.paths_to_update.remove(&worktree_id) {
|
||||
this.update_excerpts(paths, cx);
|
||||
}
|
||||
Result::<_, anyhow::Error>::Ok(())
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.subscribe(&project, |_, project, event, cx| {
|
||||
if let project::Event::DiagnosticsUpdated(project_path) = event {
|
||||
let project_path = project_path.clone();
|
||||
cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
let buffer = project
|
||||
.update(&mut cx, |project, cx| project.open_buffer(project_path, cx))
|
||||
.await?;
|
||||
this.update(&mut cx, |view, cx| view.populate_excerpts(buffer, cx));
|
||||
Ok(())
|
||||
}
|
||||
.log_err()
|
||||
})
|
||||
.detach();
|
||||
project::Event::DiagnosticsUpdated(path) => {
|
||||
this.paths_to_update
|
||||
.entry(path.worktree_id)
|
||||
.or_default()
|
||||
.insert(path.clone());
|
||||
}
|
||||
_ => {}
|
||||
})
|
||||
.detach();
|
||||
|
||||
|
@ -139,12 +134,24 @@ impl ProjectDiagnosticsEditor {
|
|||
cx.add_view(|cx| Editor::for_buffer(excerpts.clone(), build_settings.clone(), cx));
|
||||
cx.subscribe(&editor, |_, _, event, cx| cx.emit(*event))
|
||||
.detach();
|
||||
Self {
|
||||
|
||||
let paths_to_update = project
|
||||
.read(cx)
|
||||
.diagnostic_summaries(cx)
|
||||
.map(|e| e.0)
|
||||
.collect();
|
||||
let this = Self {
|
||||
model,
|
||||
workspace,
|
||||
excerpts,
|
||||
editor,
|
||||
build_settings,
|
||||
settings,
|
||||
path_states: Default::default(),
|
||||
}
|
||||
paths_to_update: Default::default(),
|
||||
};
|
||||
this.update_excerpts(paths_to_update, cx);
|
||||
this
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -152,41 +159,68 @@ impl ProjectDiagnosticsEditor {
|
|||
self.editor.read(cx).text(cx)
|
||||
}
|
||||
|
||||
fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext<Workspace>) {
|
||||
let diagnostics = cx.add_model(|_| ProjectDiagnostics::new(workspace.project().clone()));
|
||||
workspace.add_item(diagnostics, cx);
|
||||
fn deploy(workspace: &mut Workspace, _: &Deploy, cx: &mut ViewContext<Workspace>) {
|
||||
if let Some(existing) = workspace.item_of_type::<ProjectDiagnostics>(cx) {
|
||||
workspace.activate_item(&existing, cx);
|
||||
} else {
|
||||
let diagnostics =
|
||||
cx.add_model(|_| ProjectDiagnostics::new(workspace.project().clone()));
|
||||
workspace.open_item(diagnostics, cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn clear_invalid(&mut self, _: &ClearInvalid, cx: &mut ViewContext<Self>) {
|
||||
let mut blocks_to_delete = HashSet::default();
|
||||
let mut excerpts_to_delete = Vec::new();
|
||||
let mut path_ixs_to_delete = Vec::new();
|
||||
for (ix, (_, groups)) in self.path_states.iter_mut().enumerate() {
|
||||
groups.retain(|group| {
|
||||
if group.is_valid {
|
||||
true
|
||||
} else {
|
||||
blocks_to_delete.extend(group.blocks.keys().copied());
|
||||
excerpts_to_delete.extend(group.excerpts.iter().cloned());
|
||||
false
|
||||
fn open_excerpts(&mut self, _: &OpenExcerpts, cx: &mut ViewContext<Self>) {
|
||||
if let Some(workspace) = self.workspace.upgrade(cx) {
|
||||
let editor = self.editor.read(cx);
|
||||
let excerpts = self.excerpts.read(cx);
|
||||
let mut new_selections_by_buffer = HashMap::default();
|
||||
|
||||
for selection in editor.local_selections::<usize>(cx) {
|
||||
for (buffer, mut range) in
|
||||
excerpts.excerpted_buffers(selection.start..selection.end, cx)
|
||||
{
|
||||
if selection.reversed {
|
||||
mem::swap(&mut range.start, &mut range.end);
|
||||
}
|
||||
new_selections_by_buffer
|
||||
.entry(buffer)
|
||||
.or_insert(Vec::new())
|
||||
.push(range)
|
||||
}
|
||||
}
|
||||
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
for (buffer, ranges) in new_selections_by_buffer {
|
||||
let buffer = BufferItemHandle(buffer);
|
||||
workspace.activate_pane_for_item(&buffer, cx);
|
||||
let editor = workspace
|
||||
.open_item(buffer, cx)
|
||||
.to_any()
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.select_ranges(ranges, Some(Autoscroll::Center), cx)
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if groups.is_empty() {
|
||||
path_ixs_to_delete.push(ix);
|
||||
fn update_excerpts(&self, paths: HashSet<ProjectPath>, cx: &mut ViewContext<Self>) {
|
||||
let project = self.model.read(cx).project.clone();
|
||||
cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
for path in paths {
|
||||
let buffer = project
|
||||
.update(&mut cx, |project, cx| project.open_buffer(path, cx))
|
||||
.await?;
|
||||
this.update(&mut cx, |view, cx| view.populate_excerpts(buffer, cx))
|
||||
}
|
||||
Result::<_, anyhow::Error>::Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
for ix in path_ixs_to_delete.into_iter().rev() {
|
||||
self.path_states.remove(ix);
|
||||
}
|
||||
|
||||
self.excerpts.update(cx, |excerpts, cx| {
|
||||
excerpts_to_delete.sort_unstable();
|
||||
excerpts.remove_excerpts(&excerpts_to_delete, cx)
|
||||
});
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.remove_blocks(blocks_to_delete, cx));
|
||||
.log_err()
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn populate_excerpts(&mut self, buffer: ModelHandle<Buffer>, cx: &mut ViewContext<Self>) {
|
||||
|
@ -202,6 +236,7 @@ impl ProjectDiagnosticsEditor {
|
|||
}
|
||||
}
|
||||
|
||||
let was_empty = self.path_states.is_empty();
|
||||
let path_ix = match self
|
||||
.path_states
|
||||
.binary_search_by_key(&path.as_ref(), |e| e.0.as_ref())
|
||||
|
@ -225,18 +260,9 @@ impl ProjectDiagnosticsEditor {
|
|||
let mut groups_to_add = Vec::new();
|
||||
let mut group_ixs_to_remove = Vec::new();
|
||||
let mut blocks_to_add = Vec::new();
|
||||
let mut blocks_to_restyle = HashMap::default();
|
||||
let mut blocks_to_remove = HashSet::default();
|
||||
let selected_excerpts = self
|
||||
.editor
|
||||
.read(cx)
|
||||
.local_anchor_selections()
|
||||
.iter()
|
||||
.flat_map(|s| [s.start.excerpt_id().clone(), s.end.excerpt_id().clone()])
|
||||
.collect::<HashSet<_>>();
|
||||
let mut diagnostic_blocks = Vec::new();
|
||||
let excerpts_snapshot = self.excerpts.update(cx, |excerpts, excerpts_cx| {
|
||||
let mut old_groups = groups.iter_mut().enumerate().peekable();
|
||||
let mut old_groups = groups.iter().enumerate().peekable();
|
||||
let mut new_groups = snapshot
|
||||
.diagnostic_groups()
|
||||
.into_iter()
|
||||
|
@ -246,7 +272,7 @@ impl ProjectDiagnosticsEditor {
|
|||
loop {
|
||||
let mut to_insert = None;
|
||||
let mut to_invalidate = None;
|
||||
let mut to_validate = None;
|
||||
let mut to_keep = None;
|
||||
match (old_groups.peek(), new_groups.peek()) {
|
||||
(None, None) => break,
|
||||
(None, Some(_)) => to_insert = new_groups.next(),
|
||||
|
@ -257,7 +283,7 @@ impl ProjectDiagnosticsEditor {
|
|||
match compare_diagnostics(old_primary, new_primary, &snapshot) {
|
||||
Ordering::Less => to_invalidate = old_groups.next(),
|
||||
Ordering::Equal => {
|
||||
to_validate = old_groups.next();
|
||||
to_keep = old_groups.next();
|
||||
new_groups.next();
|
||||
}
|
||||
Ordering::Greater => to_insert = new_groups.next(),
|
||||
|
@ -268,10 +294,10 @@ impl ProjectDiagnosticsEditor {
|
|||
if let Some(group) = to_insert {
|
||||
let mut group_state = DiagnosticGroupState {
|
||||
primary_diagnostic: group.entries[group.primary_ix].clone(),
|
||||
primary_excerpt_ix: 0,
|
||||
excerpts: Default::default(),
|
||||
blocks: Default::default(),
|
||||
block_count: 0,
|
||||
is_valid: true,
|
||||
};
|
||||
let mut pending_range: Option<(Range<Point>, usize)> = None;
|
||||
let mut is_first_excerpt_for_group = true;
|
||||
|
@ -309,14 +335,16 @@ impl ProjectDiagnosticsEditor {
|
|||
if is_first_excerpt_for_group {
|
||||
is_first_excerpt_for_group = false;
|
||||
let primary = &group.entries[group.primary_ix].diagnostic;
|
||||
let mut header = primary.clone();
|
||||
header.message =
|
||||
primary.message.split('\n').next().unwrap().to_string();
|
||||
group_state.block_count += 1;
|
||||
diagnostic_blocks.push(DiagnosticBlock::Header(primary.clone()));
|
||||
blocks_to_add.push(BlockProperties {
|
||||
position: header_position,
|
||||
height: 2,
|
||||
height: 3,
|
||||
render: diagnostic_header_renderer(
|
||||
buffer.clone(),
|
||||
primary.clone(),
|
||||
header,
|
||||
true,
|
||||
self.build_settings.clone(),
|
||||
),
|
||||
|
@ -324,7 +352,6 @@ impl ProjectDiagnosticsEditor {
|
|||
});
|
||||
} else {
|
||||
group_state.block_count += 1;
|
||||
diagnostic_blocks.push(DiagnosticBlock::Context);
|
||||
blocks_to_add.push(BlockProperties {
|
||||
position: header_position,
|
||||
height: 1,
|
||||
|
@ -334,17 +361,20 @@ impl ProjectDiagnosticsEditor {
|
|||
}
|
||||
|
||||
for entry in &group.entries[*start_ix..ix] {
|
||||
if !entry.diagnostic.is_primary {
|
||||
let mut diagnostic = entry.diagnostic.clone();
|
||||
if diagnostic.is_primary {
|
||||
group_state.primary_excerpt_ix = group_state.excerpts.len() - 1;
|
||||
diagnostic.message =
|
||||
entry.diagnostic.message.split('\n').skip(1).collect();
|
||||
}
|
||||
|
||||
if !diagnostic.message.is_empty() {
|
||||
group_state.block_count += 1;
|
||||
diagnostic_blocks
|
||||
.push(DiagnosticBlock::Inline(entry.diagnostic.clone()));
|
||||
blocks_to_add.push(BlockProperties {
|
||||
position: (excerpt_id.clone(), entry.range.start.clone()),
|
||||
height: entry.diagnostic.message.matches('\n').count()
|
||||
as u8
|
||||
+ 1,
|
||||
height: diagnostic.message.matches('\n').count() as u8 + 1,
|
||||
render: diagnostic_block_renderer(
|
||||
entry.diagnostic.clone(),
|
||||
diagnostic,
|
||||
true,
|
||||
self.build_settings.clone(),
|
||||
),
|
||||
|
@ -363,76 +393,11 @@ impl ProjectDiagnosticsEditor {
|
|||
|
||||
groups_to_add.push(group_state);
|
||||
} else if let Some((group_ix, group_state)) = to_invalidate {
|
||||
if group_state
|
||||
.excerpts
|
||||
.iter()
|
||||
.any(|excerpt_id| selected_excerpts.contains(excerpt_id))
|
||||
{
|
||||
for (block_id, block) in &group_state.blocks {
|
||||
match block {
|
||||
DiagnosticBlock::Header(diagnostic) => {
|
||||
blocks_to_restyle.insert(
|
||||
*block_id,
|
||||
diagnostic_header_renderer(
|
||||
buffer.clone(),
|
||||
diagnostic.clone(),
|
||||
false,
|
||||
self.build_settings.clone(),
|
||||
),
|
||||
);
|
||||
}
|
||||
DiagnosticBlock::Inline(diagnostic) => {
|
||||
blocks_to_restyle.insert(
|
||||
*block_id,
|
||||
diagnostic_block_renderer(
|
||||
diagnostic.clone(),
|
||||
false,
|
||||
self.build_settings.clone(),
|
||||
),
|
||||
);
|
||||
}
|
||||
DiagnosticBlock::Context => {}
|
||||
}
|
||||
}
|
||||
|
||||
group_state.is_valid = false;
|
||||
prev_excerpt_id = group_state.excerpts.last().unwrap().clone();
|
||||
} else {
|
||||
excerpts.remove_excerpts(group_state.excerpts.iter(), excerpts_cx);
|
||||
group_ixs_to_remove.push(group_ix);
|
||||
blocks_to_remove.extend(group_state.blocks.keys().copied());
|
||||
}
|
||||
} else if let Some((_, group_state)) = to_validate {
|
||||
for (block_id, block) in &group_state.blocks {
|
||||
match block {
|
||||
DiagnosticBlock::Header(diagnostic) => {
|
||||
blocks_to_restyle.insert(
|
||||
*block_id,
|
||||
diagnostic_header_renderer(
|
||||
buffer.clone(),
|
||||
diagnostic.clone(),
|
||||
true,
|
||||
self.build_settings.clone(),
|
||||
),
|
||||
);
|
||||
}
|
||||
DiagnosticBlock::Inline(diagnostic) => {
|
||||
blocks_to_restyle.insert(
|
||||
*block_id,
|
||||
diagnostic_block_renderer(
|
||||
diagnostic.clone(),
|
||||
true,
|
||||
self.build_settings.clone(),
|
||||
),
|
||||
);
|
||||
}
|
||||
DiagnosticBlock::Context => {}
|
||||
}
|
||||
}
|
||||
group_state.is_valid = true;
|
||||
prev_excerpt_id = group_state.excerpts.last().unwrap().clone();
|
||||
} else {
|
||||
unreachable!();
|
||||
excerpts.remove_excerpts(group_state.excerpts.iter(), excerpts_cx);
|
||||
group_ixs_to_remove.push(group_ix);
|
||||
blocks_to_remove.extend(group_state.blocks.iter().copied());
|
||||
} else if let Some((_, group)) = to_keep {
|
||||
prev_excerpt_id = group.excerpts.last().unwrap().clone();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -441,7 +406,6 @@ impl ProjectDiagnosticsEditor {
|
|||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.remove_blocks(blocks_to_remove, cx);
|
||||
editor.replace_blocks(blocks_to_restyle, cx);
|
||||
let mut block_ids = editor
|
||||
.insert_blocks(
|
||||
blocks_to_add.into_iter().map(|block| {
|
||||
|
@ -455,8 +419,7 @@ impl ProjectDiagnosticsEditor {
|
|||
}),
|
||||
cx,
|
||||
)
|
||||
.into_iter()
|
||||
.zip(diagnostic_blocks);
|
||||
.into_iter();
|
||||
|
||||
for group_state in &mut groups_to_add {
|
||||
group_state.blocks = block_ids.by_ref().take(group_state.block_count).collect();
|
||||
|
@ -481,6 +444,58 @@ impl ProjectDiagnosticsEditor {
|
|||
self.path_states.remove(path_ix);
|
||||
}
|
||||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let groups = self.path_states.get(path_ix)?.1.as_slice();
|
||||
|
||||
let mut selections;
|
||||
let new_excerpt_ids_by_selection_id;
|
||||
if was_empty {
|
||||
new_excerpt_ids_by_selection_id = [(0, ExcerptId::min())].into_iter().collect();
|
||||
selections = vec![Selection {
|
||||
id: 0,
|
||||
start: 0,
|
||||
end: 0,
|
||||
reversed: false,
|
||||
goal: SelectionGoal::None,
|
||||
}];
|
||||
} else {
|
||||
new_excerpt_ids_by_selection_id = editor.refresh_selections(cx);
|
||||
selections = editor.local_selections::<usize>(cx);
|
||||
}
|
||||
|
||||
// If any selection has lost its position, move it to start of the next primary diagnostic.
|
||||
for selection in &mut selections {
|
||||
if let Some(new_excerpt_id) = new_excerpt_ids_by_selection_id.get(&selection.id) {
|
||||
let group_ix = match groups.binary_search_by(|probe| {
|
||||
probe.excerpts.last().unwrap().cmp(&new_excerpt_id)
|
||||
}) {
|
||||
Ok(ix) | Err(ix) => ix,
|
||||
};
|
||||
if let Some(group) = groups.get(group_ix) {
|
||||
let offset = excerpts_snapshot
|
||||
.anchor_in_excerpt(
|
||||
group.excerpts[group.primary_excerpt_ix].clone(),
|
||||
group.primary_diagnostic.range.start.clone(),
|
||||
)
|
||||
.to_offset(&excerpts_snapshot);
|
||||
selection.start = offset;
|
||||
selection.end = offset;
|
||||
}
|
||||
}
|
||||
}
|
||||
editor.update_selections(selections, None, cx);
|
||||
Some(())
|
||||
});
|
||||
|
||||
if self.path_states.is_empty() {
|
||||
if self.editor.is_focused(cx) {
|
||||
cx.focus_self();
|
||||
}
|
||||
} else {
|
||||
if cx.handle().is_focused(cx) {
|
||||
cx.focus(&self.editor);
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
@ -490,11 +505,10 @@ impl workspace::Item for ProjectDiagnostics {
|
|||
|
||||
fn build_view(
|
||||
handle: ModelHandle<Self>,
|
||||
settings: watch::Receiver<workspace::Settings>,
|
||||
workspace: &Workspace,
|
||||
cx: &mut ViewContext<Self::View>,
|
||||
) -> Self::View {
|
||||
let project = handle.read(cx).project.clone();
|
||||
ProjectDiagnosticsEditor::new(project, settings, cx)
|
||||
ProjectDiagnosticsEditor::new(handle, workspace.weak_handle(), workspace.settings(), cx)
|
||||
}
|
||||
|
||||
fn project_path(&self) -> Option<project::ProjectPath> {
|
||||
|
@ -503,6 +517,12 @@ impl workspace::Item for ProjectDiagnostics {
|
|||
}
|
||||
|
||||
impl workspace::ItemView for ProjectDiagnosticsEditor {
|
||||
type ItemHandle = ModelHandle<ProjectDiagnostics>;
|
||||
|
||||
fn item_handle(&self, _: &AppContext) -> Self::ItemHandle {
|
||||
self.model.clone()
|
||||
}
|
||||
|
||||
fn title(&self, _: &AppContext) -> String {
|
||||
"Project Diagnostics".to_string()
|
||||
}
|
||||
|
@ -511,10 +531,26 @@ impl workspace::ItemView for ProjectDiagnosticsEditor {
|
|||
None
|
||||
}
|
||||
|
||||
fn is_dirty(&self, cx: &AppContext) -> bool {
|
||||
self.excerpts.read(cx).read(cx).is_dirty()
|
||||
}
|
||||
|
||||
fn has_conflict(&self, cx: &AppContext) -> bool {
|
||||
self.excerpts.read(cx).read(cx).has_conflict()
|
||||
}
|
||||
|
||||
fn can_save(&self, _: &AppContext) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn save(&mut self, cx: &mut ViewContext<Self>) -> Result<Task<Result<()>>> {
|
||||
self.excerpts.update(cx, |excerpts, cx| excerpts.save(cx))
|
||||
}
|
||||
|
||||
fn can_save_as(&self, _: &AppContext) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn save_as(
|
||||
&mut self,
|
||||
_: ModelHandle<project::Worktree>,
|
||||
|
@ -524,28 +560,12 @@ impl workspace::ItemView for ProjectDiagnosticsEditor {
|
|||
unreachable!()
|
||||
}
|
||||
|
||||
fn is_dirty(&self, cx: &AppContext) -> bool {
|
||||
self.excerpts.read(cx).read(cx).is_dirty()
|
||||
}
|
||||
|
||||
fn has_conflict(&self, cx: &AppContext) -> bool {
|
||||
self.excerpts.read(cx).read(cx).has_conflict()
|
||||
}
|
||||
|
||||
fn should_update_tab_on_event(event: &Event) -> bool {
|
||||
matches!(
|
||||
event,
|
||||
Event::Saved | Event::Dirtied | Event::FileHandleChanged
|
||||
)
|
||||
}
|
||||
|
||||
fn can_save(&self, _: &AppContext) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn can_save_as(&self, _: &AppContext) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn compare_diagnostics<L: language::ToOffset, R: language::ToOffset>(
|
||||
|
@ -570,9 +590,10 @@ fn compare_diagnostics<L: language::ToOffset, R: language::ToOffset>(
|
|||
mod tests {
|
||||
use super::*;
|
||||
use client::{http::ServerResponse, test::FakeHttpClient, Client, UserStore};
|
||||
use editor::DisplayPoint;
|
||||
use gpui::TestAppContext;
|
||||
use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, LanguageRegistry};
|
||||
use project::FakeFs;
|
||||
use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, LanguageRegistry, PointUtf16};
|
||||
use project::{worktree, FakeFs};
|
||||
use serde_json::json;
|
||||
use std::sync::Arc;
|
||||
use unindent::Unindent as _;
|
||||
|
@ -580,7 +601,8 @@ mod tests {
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_diagnostics(mut cx: TestAppContext) {
|
||||
let settings = cx.update(WorkspaceParams::test).settings;
|
||||
let workspace_params = cx.update(WorkspaceParams::test);
|
||||
let settings = workspace_params.settings.clone();
|
||||
let http_client = FakeHttpClient::new(|_| async move { Ok(ServerResponse::new(404)) });
|
||||
let client = Client::new(http_client.clone());
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
|
||||
|
@ -629,11 +651,12 @@ mod tests {
|
|||
|
||||
worktree.update(&mut cx, |worktree, cx| {
|
||||
worktree
|
||||
.update_diagnostics_from_provider(
|
||||
.update_diagnostic_entries(
|
||||
Arc::from("/test/main.rs".as_ref()),
|
||||
None,
|
||||
vec![
|
||||
DiagnosticEntry {
|
||||
range: 20..21,
|
||||
range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9),
|
||||
diagnostic: Diagnostic {
|
||||
message:
|
||||
"move occurs because `x` has type `Vec<char>`, which does not implement the `Copy` trait"
|
||||
|
@ -646,7 +669,7 @@ mod tests {
|
|||
},
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: 40..41,
|
||||
range: PointUtf16::new(2, 8)..PointUtf16::new(2, 9),
|
||||
diagnostic: Diagnostic {
|
||||
message:
|
||||
"move occurs because `y` has type `Vec<char>`, which does not implement the `Copy` trait"
|
||||
|
@ -659,7 +682,7 @@ mod tests {
|
|||
},
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: 58..59,
|
||||
range: PointUtf16::new(3, 6)..PointUtf16::new(3, 7),
|
||||
diagnostic: Diagnostic {
|
||||
message: "value moved here".to_string(),
|
||||
severity: DiagnosticSeverity::INFORMATION,
|
||||
|
@ -670,7 +693,7 @@ mod tests {
|
|||
},
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: 68..69,
|
||||
range: PointUtf16::new(4, 6)..PointUtf16::new(4, 7),
|
||||
diagnostic: Diagnostic {
|
||||
message: "value moved here".to_string(),
|
||||
severity: DiagnosticSeverity::INFORMATION,
|
||||
|
@ -681,9 +704,9 @@ mod tests {
|
|||
},
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: 112..113,
|
||||
range: PointUtf16::new(7, 6)..PointUtf16::new(7, 7),
|
||||
diagnostic: Diagnostic {
|
||||
message: "use of moved value".to_string(),
|
||||
message: "use of moved value\nvalue used here after move".to_string(),
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
is_primary: true,
|
||||
is_disk_based: true,
|
||||
|
@ -692,20 +715,9 @@ mod tests {
|
|||
},
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: 112..113,
|
||||
range: PointUtf16::new(8, 6)..PointUtf16::new(8, 7),
|
||||
diagnostic: Diagnostic {
|
||||
message: "value used here after move".to_string(),
|
||||
severity: DiagnosticSeverity::INFORMATION,
|
||||
is_primary: false,
|
||||
is_disk_based: true,
|
||||
group_id: 0,
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: 122..123,
|
||||
diagnostic: Diagnostic {
|
||||
message: "use of moved value".to_string(),
|
||||
message: "use of moved value\nvalue used here after move".to_string(),
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
is_primary: true,
|
||||
is_disk_based: true,
|
||||
|
@ -713,25 +725,17 @@ mod tests {
|
|||
..Default::default()
|
||||
},
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: 122..123,
|
||||
diagnostic: Diagnostic {
|
||||
message: "value used here after move".to_string(),
|
||||
severity: DiagnosticSeverity::INFORMATION,
|
||||
is_primary: false,
|
||||
is_disk_based: true,
|
||||
group_id: 1,
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
],
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
|
||||
let view = cx.add_view(Default::default(), |cx| {
|
||||
ProjectDiagnosticsEditor::new(project.clone(), settings, cx)
|
||||
let model = cx.add_model(|_| ProjectDiagnostics::new(project.clone()));
|
||||
let workspace = cx.add_view(0, |cx| Workspace::new(&workspace_params, cx));
|
||||
|
||||
let view = cx.add_view(0, |cx| {
|
||||
ProjectDiagnosticsEditor::new(model, workspace.downgrade(), settings, cx)
|
||||
});
|
||||
|
||||
view.condition(&mut cx, |view, cx| view.text(cx).contains("fn main()"))
|
||||
|
@ -746,6 +750,7 @@ mod tests {
|
|||
//
|
||||
// main.rs, diagnostic group 1
|
||||
//
|
||||
"\n", // padding
|
||||
"\n", // primary message
|
||||
"\n", // filename
|
||||
" let x = vec![];\n",
|
||||
|
@ -762,6 +767,7 @@ mod tests {
|
|||
//
|
||||
// main.rs, diagnostic group 2
|
||||
//
|
||||
"\n", // padding
|
||||
"\n", // primary message
|
||||
"\n", // filename
|
||||
"fn main() {\n",
|
||||
|
@ -778,39 +784,35 @@ mod tests {
|
|||
"}"
|
||||
)
|
||||
);
|
||||
|
||||
view.editor.update(cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.selected_display_ranges(cx),
|
||||
[DisplayPoint::new(11, 6)..DisplayPoint::new(11, 6)]
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
worktree.update(&mut cx, |worktree, cx| {
|
||||
worktree
|
||||
.update_diagnostics_from_provider(
|
||||
.update_diagnostic_entries(
|
||||
Arc::from("/test/a.rs".as_ref()),
|
||||
vec![
|
||||
DiagnosticEntry {
|
||||
range: 15..15,
|
||||
diagnostic: Diagnostic {
|
||||
message: "mismatched types".to_string(),
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
is_primary: true,
|
||||
is_disk_based: true,
|
||||
group_id: 0,
|
||||
..Default::default()
|
||||
},
|
||||
None,
|
||||
vec![DiagnosticEntry {
|
||||
range: PointUtf16::new(0, 15)..PointUtf16::new(0, 15),
|
||||
diagnostic: Diagnostic {
|
||||
message: "mismatched types\nexpected `usize`, found `char`".to_string(),
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
is_primary: true,
|
||||
is_disk_based: true,
|
||||
group_id: 0,
|
||||
..Default::default()
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: 15..15,
|
||||
diagnostic: Diagnostic {
|
||||
message: "expected `usize`, found `char`".to_string(),
|
||||
severity: DiagnosticSeverity::INFORMATION,
|
||||
is_primary: false,
|
||||
is_disk_based: true,
|
||||
group_id: 0,
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
],
|
||||
}],
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
cx.emit(worktree::Event::DiskBasedDiagnosticsUpdated);
|
||||
});
|
||||
|
||||
view.condition(&mut cx, |view, cx| view.text(cx).contains("const a"))
|
||||
|
@ -825,6 +827,7 @@ mod tests {
|
|||
//
|
||||
// a.rs
|
||||
//
|
||||
"\n", // padding
|
||||
"\n", // primary message
|
||||
"\n", // filename
|
||||
"const a: i32 = 'a';\n",
|
||||
|
@ -833,6 +836,7 @@ mod tests {
|
|||
//
|
||||
// main.rs, diagnostic group 1
|
||||
//
|
||||
"\n", // padding
|
||||
"\n", // primary message
|
||||
"\n", // filename
|
||||
" let x = vec![];\n",
|
||||
|
@ -849,6 +853,7 @@ mod tests {
|
|||
//
|
||||
// main.rs, diagnostic group 2
|
||||
//
|
||||
"\n", // padding
|
||||
"\n", // primary message
|
||||
"\n", // filename
|
||||
"fn main() {\n",
|
||||
|
|
87
crates/diagnostics/src/items.rs
Normal file
87
crates/diagnostics/src/items.rs
Normal file
|
@ -0,0 +1,87 @@
|
|||
use gpui::{
|
||||
elements::*, platform::CursorStyle, Entity, ModelHandle, RenderContext, View, ViewContext,
|
||||
};
|
||||
use postage::watch;
|
||||
use project::Project;
|
||||
use std::fmt::Write;
|
||||
use workspace::{Settings, StatusItemView};
|
||||
|
||||
pub struct DiagnosticSummary {
|
||||
settings: watch::Receiver<Settings>,
|
||||
summary: project::DiagnosticSummary,
|
||||
in_progress: bool,
|
||||
}
|
||||
|
||||
impl DiagnosticSummary {
|
||||
pub fn new(
|
||||
project: &ModelHandle<Project>,
|
||||
settings: watch::Receiver<Settings>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
cx.subscribe(project, |this, project, event, cx| match event {
|
||||
project::Event::DiskBasedDiagnosticsUpdated { .. } => {
|
||||
this.summary = project.read(cx).diagnostic_summary(cx);
|
||||
cx.notify();
|
||||
}
|
||||
project::Event::DiskBasedDiagnosticsStarted => {
|
||||
this.in_progress = true;
|
||||
cx.notify();
|
||||
}
|
||||
project::Event::DiskBasedDiagnosticsFinished => {
|
||||
this.in_progress = false;
|
||||
cx.notify();
|
||||
}
|
||||
_ => {}
|
||||
})
|
||||
.detach();
|
||||
Self {
|
||||
settings,
|
||||
summary: project.read(cx).diagnostic_summary(cx),
|
||||
in_progress: project.read(cx).is_running_disk_based_diagnostics(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Entity for DiagnosticSummary {
|
||||
type Event = ();
|
||||
}
|
||||
|
||||
impl View for DiagnosticSummary {
|
||||
fn ui_name() -> &'static str {
|
||||
"DiagnosticSummary"
|
||||
}
|
||||
|
||||
fn render(&mut self, cx: &mut RenderContext<Self>) -> ElementBox {
|
||||
enum Tag {}
|
||||
|
||||
let theme = &self.settings.borrow().theme.project_diagnostics;
|
||||
let mut message = String::new();
|
||||
if self.in_progress {
|
||||
message.push_str("Checking... ");
|
||||
}
|
||||
write!(
|
||||
message,
|
||||
"Errors: {}, Warnings: {}",
|
||||
self.summary.error_count, self.summary.warning_count
|
||||
)
|
||||
.unwrap();
|
||||
MouseEventHandler::new::<Tag, _, _, _>(0, cx, |_, _| {
|
||||
Label::new(message, theme.status_bar_item.text.clone())
|
||||
.contained()
|
||||
.with_style(theme.status_bar_item.container)
|
||||
.boxed()
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand)
|
||||
.on_click(|cx| cx.dispatch_action(crate::Deploy))
|
||||
.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
impl StatusItemView for DiagnosticSummary {
|
||||
fn set_active_pane_item(
|
||||
&mut self,
|
||||
_: Option<&dyn workspace::ItemViewHandle>,
|
||||
_: &mut ViewContext<Self>,
|
||||
) {
|
||||
}
|
||||
}
|
|
@ -199,7 +199,10 @@ impl DisplaySnapshot {
|
|||
|
||||
pub fn prev_line_boundary(&self, mut point: Point) -> (Point, DisplayPoint) {
|
||||
loop {
|
||||
point.column = 0;
|
||||
let mut fold_point = point.to_fold_point(&self.folds_snapshot, Bias::Left);
|
||||
*fold_point.column_mut() = 0;
|
||||
point = fold_point.to_buffer_point(&self.folds_snapshot);
|
||||
|
||||
let mut display_point = self.point_to_display_point(point, Bias::Left);
|
||||
*display_point.column_mut() = 0;
|
||||
let next_point = self.display_point_to_point(display_point, Bias::Left);
|
||||
|
@ -212,7 +215,10 @@ impl DisplaySnapshot {
|
|||
|
||||
pub fn next_line_boundary(&self, mut point: Point) -> (Point, DisplayPoint) {
|
||||
loop {
|
||||
point.column = self.buffer_snapshot.line_len(point.row);
|
||||
let mut fold_point = point.to_fold_point(&self.folds_snapshot, Bias::Right);
|
||||
*fold_point.column_mut() = self.folds_snapshot.line_len(fold_point.row());
|
||||
point = fold_point.to_buffer_point(&self.folds_snapshot);
|
||||
|
||||
let mut display_point = self.point_to_display_point(point, Bias::Right);
|
||||
*display_point.column_mut() = self.line_len(display_point.row());
|
||||
let next_point = self.display_point_to_point(display_point, Bias::Right);
|
||||
|
@ -446,10 +452,11 @@ impl ToDisplayPoint for Anchor {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{movement, test::*};
|
||||
use gpui::{color::Color, elements::*, MutableAppContext};
|
||||
use crate::movement;
|
||||
use gpui::{color::Color, elements::*, test::observe, MutableAppContext};
|
||||
use language::{Buffer, Language, LanguageConfig, RandomCharIter, SelectionGoal};
|
||||
use rand::{prelude::*, Rng};
|
||||
use smol::stream::StreamExt;
|
||||
use std::{env, sync::Arc};
|
||||
use theme::SyntaxTheme;
|
||||
use util::test::sample_text;
|
||||
|
@ -493,7 +500,7 @@ mod tests {
|
|||
let map = cx.add_model(|cx| {
|
||||
DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, wrap_width, cx)
|
||||
});
|
||||
let (_observer, notifications) = Observer::new(&map, &mut cx);
|
||||
let mut notifications = observe(&map, &mut cx);
|
||||
let mut fold_count = 0;
|
||||
let mut blocks = Vec::new();
|
||||
|
||||
|
@ -589,7 +596,7 @@ mod tests {
|
|||
}
|
||||
|
||||
if map.read_with(&cx, |map, cx| map.is_rewrapping(cx)) {
|
||||
notifications.recv().await.unwrap();
|
||||
notifications.next().await.unwrap();
|
||||
}
|
||||
|
||||
let snapshot = map.update(&mut cx, |map, cx| map.snapshot(cx));
|
||||
|
|
|
@ -37,7 +37,6 @@ impl FoldPoint {
|
|||
&mut self.0.row
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn column_mut(&mut self) -> &mut u32 {
|
||||
&mut self.0.column
|
||||
}
|
||||
|
@ -549,7 +548,6 @@ impl FoldSnapshot {
|
|||
FoldOffset(self.transforms.summary().output.bytes)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn line_len(&self, row: u32) -> u32 {
|
||||
let line_start = FoldPoint::new(row, 0).to_offset(self).0;
|
||||
let line_end = if row >= self.max_point().row() {
|
||||
|
|
|
@ -1014,11 +1014,12 @@ mod tests {
|
|||
use super::*;
|
||||
use crate::{
|
||||
display_map::{fold_map::FoldMap, tab_map::TabMap},
|
||||
test::Observer,
|
||||
MultiBuffer,
|
||||
};
|
||||
use gpui::test::observe;
|
||||
use language::RandomCharIter;
|
||||
use rand::prelude::*;
|
||||
use smol::stream::StreamExt;
|
||||
use std::{cmp, env};
|
||||
use text::Rope;
|
||||
|
||||
|
@ -1072,10 +1073,10 @@ mod tests {
|
|||
|
||||
let (wrap_map, _) =
|
||||
cx.update(|cx| WrapMap::new(tabs_snapshot.clone(), font_id, font_size, wrap_width, cx));
|
||||
let (_observer, notifications) = Observer::new(&wrap_map, &mut cx);
|
||||
let mut notifications = observe(&wrap_map, &mut cx);
|
||||
|
||||
if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
|
||||
notifications.recv().await.unwrap();
|
||||
notifications.next().await.unwrap();
|
||||
}
|
||||
|
||||
let (initial_snapshot, _) = wrap_map.update(&mut cx, |map, cx| {
|
||||
|
@ -1148,7 +1149,7 @@ mod tests {
|
|||
if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) {
|
||||
log::info!("Waiting for wrapping to finish");
|
||||
while wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
|
||||
notifications.recv().await.unwrap();
|
||||
notifications.next().await.unwrap();
|
||||
}
|
||||
wrap_map.read_with(&cx, |map, _| assert!(map.pending_edits.is_empty()));
|
||||
}
|
||||
|
@ -1236,7 +1237,7 @@ mod tests {
|
|||
if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
|
||||
log::info!("Waiting for wrapping to finish");
|
||||
while wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
|
||||
notifications.recv().await.unwrap();
|
||||
notifications.next().await.unwrap();
|
||||
}
|
||||
}
|
||||
wrap_map.read_with(&cx, |map, _| assert!(map.pending_edits.is_empty()));
|
||||
|
|
|
@ -28,8 +28,8 @@ use language::{
|
|||
BracketPair, Buffer, Diagnostic, DiagnosticSeverity, Language, Point, Selection, SelectionGoal,
|
||||
TransactionId,
|
||||
};
|
||||
pub use multi_buffer::{Anchor, ExcerptId, ExcerptProperties, MultiBuffer};
|
||||
use multi_buffer::{AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, ToOffset, ToPoint};
|
||||
pub use multi_buffer::{Anchor, ExcerptId, ExcerptProperties, MultiBuffer, ToOffset, ToPoint};
|
||||
use multi_buffer::{AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot};
|
||||
use postage::watch;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smallvec::SmallVec;
|
||||
|
@ -46,7 +46,7 @@ use sum_tree::Bias;
|
|||
use text::rope::TextDimension;
|
||||
use theme::{DiagnosticStyle, EditorStyle};
|
||||
use util::post_inc;
|
||||
use workspace::{EntryOpener, Workspace};
|
||||
use workspace::{PathOpener, Workspace};
|
||||
|
||||
const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500);
|
||||
const MAX_LINE_LEN: usize = 1024;
|
||||
|
@ -111,8 +111,8 @@ action!(FoldSelectedRanges);
|
|||
action!(Scroll, Vector2F);
|
||||
action!(Select, SelectPhase);
|
||||
|
||||
pub fn init(cx: &mut MutableAppContext, entry_openers: &mut Vec<Box<dyn EntryOpener>>) {
|
||||
entry_openers.push(Box::new(items::BufferOpener));
|
||||
pub fn init(cx: &mut MutableAppContext, path_openers: &mut Vec<Box<dyn PathOpener>>) {
|
||||
path_openers.push(Box::new(items::BufferOpener));
|
||||
cx.add_bindings(vec![
|
||||
Binding::new("escape", Cancel, Some("Editor")),
|
||||
Binding::new("backspace", Backspace, Some("Editor")),
|
||||
|
@ -365,7 +365,7 @@ pub struct Editor {
|
|||
select_larger_syntax_node_stack: Vec<Box<[Selection<usize>]>>,
|
||||
active_diagnostics: Option<ActiveDiagnosticGroup>,
|
||||
scroll_position: Vector2F,
|
||||
scroll_top_anchor: Anchor,
|
||||
scroll_top_anchor: Option<Anchor>,
|
||||
autoscroll_request: Option<Autoscroll>,
|
||||
build_settings: BuildSettings,
|
||||
focused: bool,
|
||||
|
@ -383,7 +383,7 @@ pub struct EditorSnapshot {
|
|||
pub placeholder_text: Option<Arc<str>>,
|
||||
is_focused: bool,
|
||||
scroll_position: Vector2F,
|
||||
scroll_top_anchor: Anchor,
|
||||
scroll_top_anchor: Option<Anchor>,
|
||||
}
|
||||
|
||||
struct PendingSelection {
|
||||
|
@ -495,7 +495,7 @@ impl Editor {
|
|||
active_diagnostics: None,
|
||||
build_settings,
|
||||
scroll_position: Vector2F::zero(),
|
||||
scroll_top_anchor: Anchor::min(),
|
||||
scroll_top_anchor: None,
|
||||
autoscroll_request: None,
|
||||
focused: false,
|
||||
show_local_cursors: false,
|
||||
|
@ -524,8 +524,7 @@ impl Editor {
|
|||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, "", cx).with_language(Some(language::PLAIN_TEXT.clone()), None, cx)
|
||||
});
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
workspace.add_item(BufferItemHandle(buffer), cx);
|
||||
workspace.open_item(BufferItemHandle(buffer), cx);
|
||||
}
|
||||
|
||||
pub fn replica_id(&self, cx: &AppContext) -> ReplicaId {
|
||||
|
@ -565,15 +564,22 @@ impl Editor {
|
|||
|
||||
pub fn set_scroll_position(&mut self, scroll_position: Vector2F, cx: &mut ViewContext<Self>) {
|
||||
let map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let scroll_top_buffer_offset =
|
||||
DisplayPoint::new(scroll_position.y() as u32, 0).to_offset(&map, Bias::Right);
|
||||
self.scroll_top_anchor = map
|
||||
.buffer_snapshot
|
||||
.anchor_at(scroll_top_buffer_offset, Bias::Right);
|
||||
self.scroll_position = vec2f(
|
||||
scroll_position.x(),
|
||||
scroll_position.y() - self.scroll_top_anchor.to_display_point(&map).row() as f32,
|
||||
);
|
||||
|
||||
if scroll_position.y() == 0. {
|
||||
self.scroll_top_anchor = None;
|
||||
self.scroll_position = scroll_position;
|
||||
} else {
|
||||
let scroll_top_buffer_offset =
|
||||
DisplayPoint::new(scroll_position.y() as u32, 0).to_offset(&map, Bias::Right);
|
||||
let anchor = map
|
||||
.buffer_snapshot
|
||||
.anchor_at(scroll_top_buffer_offset, Bias::Right);
|
||||
self.scroll_position = vec2f(
|
||||
scroll_position.x(),
|
||||
scroll_position.y() - anchor.to_display_point(&map).row() as f32,
|
||||
);
|
||||
self.scroll_top_anchor = Some(anchor);
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
@ -1049,6 +1055,45 @@ impl Editor {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn selected_ranges<D: TextDimension + Ord + Sub<D, Output = D>>(
|
||||
&self,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> Vec<Range<D>> {
|
||||
self.local_selections::<D>(cx)
|
||||
.iter()
|
||||
.map(|s| {
|
||||
if s.reversed {
|
||||
s.end.clone()..s.start.clone()
|
||||
} else {
|
||||
s.start.clone()..s.end.clone()
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn selected_display_ranges(&self, cx: &mut MutableAppContext) -> Vec<Range<DisplayPoint>> {
|
||||
let display_map = self
|
||||
.display_map
|
||||
.update(cx, |display_map, cx| display_map.snapshot(cx));
|
||||
self.selections
|
||||
.iter()
|
||||
.chain(
|
||||
self.pending_selection
|
||||
.as_ref()
|
||||
.map(|pending| &pending.selection),
|
||||
)
|
||||
.map(|s| {
|
||||
if s.reversed {
|
||||
s.end.to_display_point(&display_map)..s.start.to_display_point(&display_map)
|
||||
} else {
|
||||
s.start.to_display_point(&display_map)..s.end.to_display_point(&display_map)
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn select_ranges<I, T>(
|
||||
&mut self,
|
||||
ranges: I,
|
||||
|
@ -1059,7 +1104,7 @@ impl Editor {
|
|||
T: ToOffset,
|
||||
{
|
||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||
let selections = ranges
|
||||
let mut selections = ranges
|
||||
.into_iter()
|
||||
.map(|range| {
|
||||
let mut start = range.start.to_offset(&buffer);
|
||||
|
@ -1078,7 +1123,8 @@ impl Editor {
|
|||
goal: SelectionGoal::None,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
.collect::<Vec<_>>();
|
||||
selections.sort_unstable_by_key(|s| s.start);
|
||||
self.update_selections(selections, autoscroll, cx);
|
||||
}
|
||||
|
||||
|
@ -1564,7 +1610,6 @@ impl Editor {
|
|||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||
|
||||
let mut row_delta = 0;
|
||||
let mut new_cursors = Vec::new();
|
||||
let mut edit_ranges = Vec::new();
|
||||
let mut selections = selections.iter().peekable();
|
||||
|
@ -1590,7 +1635,7 @@ impl Editor {
|
|||
// If there's a line after the range, delete the \n from the end of the row range
|
||||
// and position the cursor on the next line.
|
||||
edit_end = Point::new(rows.end, 0).to_offset(&buffer);
|
||||
cursor_buffer_row = rows.start;
|
||||
cursor_buffer_row = rows.end;
|
||||
} else {
|
||||
// If there isn't a line after the range, delete the \n from the line before the
|
||||
// start of the row range and position the cursor there.
|
||||
|
@ -1599,29 +1644,35 @@ impl Editor {
|
|||
cursor_buffer_row = rows.start.saturating_sub(1);
|
||||
}
|
||||
|
||||
let mut cursor =
|
||||
Point::new(cursor_buffer_row - row_delta, 0).to_display_point(&display_map);
|
||||
let mut cursor = Point::new(cursor_buffer_row, 0).to_display_point(&display_map);
|
||||
*cursor.column_mut() =
|
||||
cmp::min(goal_display_column, display_map.line_len(cursor.row()));
|
||||
row_delta += rows.len() as u32;
|
||||
|
||||
new_cursors.push((selection.id, cursor.to_point(&display_map)));
|
||||
new_cursors.push((
|
||||
selection.id,
|
||||
buffer.anchor_after(cursor.to_point(&display_map)),
|
||||
));
|
||||
edit_ranges.push(edit_start..edit_end);
|
||||
}
|
||||
|
||||
new_cursors.sort_unstable_by_key(|(_, point)| point.clone());
|
||||
new_cursors.sort_unstable_by(|a, b| a.1.cmp(&b.1, &buffer).unwrap());
|
||||
let buffer = self.buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(edit_ranges, "", cx);
|
||||
buffer.snapshot(cx)
|
||||
});
|
||||
let new_selections = new_cursors
|
||||
.into_iter()
|
||||
.map(|(id, cursor)| Selection {
|
||||
id,
|
||||
start: cursor,
|
||||
end: cursor,
|
||||
reversed: false,
|
||||
goal: SelectionGoal::None,
|
||||
.map(|(id, cursor)| {
|
||||
let cursor = cursor.to_point(&buffer);
|
||||
Selection {
|
||||
id,
|
||||
start: cursor,
|
||||
end: cursor,
|
||||
reversed: false,
|
||||
goal: SelectionGoal::None,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
self.buffer
|
||||
.update(cx, |buffer, cx| buffer.edit(edit_ranges, "", cx));
|
||||
self.update_selections(new_selections, Some(Autoscroll::Fit), cx);
|
||||
self.end_transaction(cx);
|
||||
}
|
||||
|
@ -1629,7 +1680,7 @@ impl Editor {
|
|||
pub fn duplicate_line(&mut self, _: &DuplicateLine, cx: &mut ViewContext<Self>) {
|
||||
self.start_transaction(cx);
|
||||
|
||||
let mut selections = self.local_selections::<Point>(cx);
|
||||
let selections = self.local_selections::<Point>(cx);
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let buffer = &display_map.buffer_snapshot;
|
||||
|
||||
|
@ -1659,28 +1710,13 @@ impl Editor {
|
|||
edits.push((start, text, rows.len() as u32));
|
||||
}
|
||||
|
||||
let mut edits_iter = edits.iter().peekable();
|
||||
let mut row_delta = 0;
|
||||
for selection in selections.iter_mut() {
|
||||
while let Some((point, _, line_count)) = edits_iter.peek() {
|
||||
if *point <= selection.start {
|
||||
row_delta += line_count;
|
||||
edits_iter.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
selection.start.row += row_delta;
|
||||
selection.end.row += row_delta;
|
||||
}
|
||||
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
for (point, text, _) in edits.into_iter().rev() {
|
||||
buffer.edit(Some(point..point), text, cx);
|
||||
}
|
||||
});
|
||||
|
||||
self.update_selections(selections, Some(Autoscroll::Fit), cx);
|
||||
self.request_autoscroll(Autoscroll::Fit, cx);
|
||||
self.end_transaction(cx);
|
||||
}
|
||||
|
||||
|
@ -2867,19 +2903,19 @@ impl Editor {
|
|||
loop {
|
||||
let next_group = buffer
|
||||
.diagnostics_in_range::<_, usize>(search_start..buffer.len())
|
||||
.find_map(|(provider_name, entry)| {
|
||||
.find_map(|entry| {
|
||||
if entry.diagnostic.is_primary
|
||||
&& !entry.range.is_empty()
|
||||
&& Some(entry.range.end) != active_primary_range.as_ref().map(|r| *r.end())
|
||||
{
|
||||
Some((provider_name, entry.range, entry.diagnostic.group_id))
|
||||
Some((entry.range, entry.diagnostic.group_id))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
if let Some((provider_name, primary_range, group_id)) = next_group {
|
||||
self.activate_diagnostics(provider_name, group_id, cx);
|
||||
if let Some((primary_range, group_id)) = next_group {
|
||||
self.activate_diagnostics(group_id, cx);
|
||||
self.update_selections(
|
||||
vec![Selection {
|
||||
id: selection.id,
|
||||
|
@ -2907,7 +2943,7 @@ impl Editor {
|
|||
let primary_range_start = active_diagnostics.primary_range.start.to_offset(&buffer);
|
||||
let is_valid = buffer
|
||||
.diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone())
|
||||
.any(|(_, entry)| {
|
||||
.any(|entry| {
|
||||
entry.diagnostic.is_primary
|
||||
&& !entry.range.is_empty()
|
||||
&& entry.range.start == primary_range_start
|
||||
|
@ -2933,12 +2969,7 @@ impl Editor {
|
|||
}
|
||||
}
|
||||
|
||||
fn activate_diagnostics(
|
||||
&mut self,
|
||||
provider_name: &str,
|
||||
group_id: usize,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
fn activate_diagnostics(&mut self, group_id: usize, cx: &mut ViewContext<Self>) {
|
||||
self.dismiss_diagnostics(cx);
|
||||
self.active_diagnostics = self.display_map.update(cx, |display_map, cx| {
|
||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||
|
@ -2947,7 +2978,7 @@ impl Editor {
|
|||
let mut primary_message = None;
|
||||
let mut group_end = Point::zero();
|
||||
let diagnostic_group = buffer
|
||||
.diagnostic_group::<Point>(provider_name, group_id)
|
||||
.diagnostic_group::<Point>(group_id)
|
||||
.map(|entry| {
|
||||
if entry.range.end > group_end {
|
||||
group_end = entry.range.end;
|
||||
|
@ -3113,10 +3144,6 @@ impl Editor {
|
|||
.collect()
|
||||
}
|
||||
|
||||
pub fn local_anchor_selections(&self) -> &Arc<[Selection<Anchor>]> {
|
||||
&self.selections
|
||||
}
|
||||
|
||||
fn resolve_selections<'a, D, I>(
|
||||
&self,
|
||||
selections: I,
|
||||
|
@ -3270,6 +3297,45 @@ impl Editor {
|
|||
);
|
||||
}
|
||||
|
||||
/// Compute new ranges for any selections that were located in excerpts that have
|
||||
/// since been removed.
|
||||
///
|
||||
/// Returns a `HashMap` indicating which selections whose former head position
|
||||
/// was no longer present. The keys of the map are selection ids. The values are
|
||||
/// the id of the new excerpt where the head of the selection has been moved.
|
||||
pub fn refresh_selections(&mut self, cx: &mut ViewContext<Self>) -> HashMap<usize, ExcerptId> {
|
||||
let anchors_with_status = self.buffer.update(cx, |buffer, cx| {
|
||||
let snapshot = buffer.read(cx);
|
||||
snapshot.refresh_anchors(
|
||||
self.selections
|
||||
.iter()
|
||||
.flat_map(|selection| [&selection.start, &selection.end]),
|
||||
)
|
||||
});
|
||||
let mut selections_with_lost_position = HashMap::default();
|
||||
self.selections = self
|
||||
.selections
|
||||
.iter()
|
||||
.cloned()
|
||||
.zip(anchors_with_status.chunks(2))
|
||||
.map(|(mut selection, anchors)| {
|
||||
selection.start = anchors[0].0.clone();
|
||||
selection.end = anchors[1].0.clone();
|
||||
let kept_head_position = if selection.reversed {
|
||||
anchors[0].1
|
||||
} else {
|
||||
anchors[1].1
|
||||
};
|
||||
if !kept_head_position {
|
||||
selections_with_lost_position
|
||||
.insert(selection.id, selection.head().excerpt_id.clone());
|
||||
}
|
||||
selection
|
||||
})
|
||||
.collect();
|
||||
selections_with_lost_position
|
||||
}
|
||||
|
||||
fn set_selections(&mut self, selections: Arc<[Selection<Anchor>]>, cx: &mut ViewContext<Self>) {
|
||||
self.selections = selections;
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
|
@ -3650,10 +3716,14 @@ impl EditorSettings {
|
|||
fn compute_scroll_position(
|
||||
snapshot: &DisplaySnapshot,
|
||||
mut scroll_position: Vector2F,
|
||||
scroll_top_anchor: &Anchor,
|
||||
scroll_top_anchor: &Option<Anchor>,
|
||||
) -> Vector2F {
|
||||
let scroll_top = scroll_top_anchor.to_display_point(snapshot).row() as f32;
|
||||
scroll_position.set_y(scroll_top + scroll_position.y());
|
||||
if let Some(anchor) = scroll_top_anchor {
|
||||
let scroll_top = anchor.to_display_point(snapshot).row() as f32;
|
||||
scroll_position.set_y(scroll_top + scroll_position.y());
|
||||
} else {
|
||||
scroll_position.set_y(0.);
|
||||
}
|
||||
scroll_position
|
||||
}
|
||||
|
||||
|
@ -3786,6 +3856,7 @@ pub fn diagnostic_block_renderer(
|
|||
let mut text_style = settings.style.text.clone();
|
||||
text_style.color = diagnostic_style(diagnostic.severity, is_valid, &settings.style).text;
|
||||
Text::new(diagnostic.message.clone(), text_style)
|
||||
.with_soft_wrap(false)
|
||||
.contained()
|
||||
.with_margin_left(cx.anchor_x)
|
||||
.boxed()
|
||||
|
@ -3801,7 +3872,8 @@ pub fn diagnostic_header_renderer(
|
|||
Arc::new(move |cx| {
|
||||
let settings = build_settings(cx);
|
||||
let mut text_style = settings.style.text.clone();
|
||||
text_style.color = diagnostic_style(diagnostic.severity, is_valid, &settings.style).text;
|
||||
let diagnostic_style = diagnostic_style(diagnostic.severity, is_valid, &settings.style);
|
||||
text_style.color = diagnostic_style.text;
|
||||
let file_path = if let Some(file) = buffer.read(&**cx).file() {
|
||||
file.path().to_string_lossy().to_string()
|
||||
} else {
|
||||
|
@ -3809,8 +3881,17 @@ pub fn diagnostic_header_renderer(
|
|||
};
|
||||
|
||||
Flex::column()
|
||||
.with_child(Label::new(diagnostic.message.clone(), text_style).boxed())
|
||||
.with_child(
|
||||
Text::new(diagnostic.message.clone(), text_style)
|
||||
.with_soft_wrap(false)
|
||||
.boxed(),
|
||||
)
|
||||
.with_child(Label::new(file_path, settings.style.text.clone()).boxed())
|
||||
.aligned()
|
||||
.left()
|
||||
.contained()
|
||||
.with_style(diagnostic_style.header)
|
||||
.expanded()
|
||||
.boxed()
|
||||
})
|
||||
}
|
||||
|
@ -6160,45 +6241,6 @@ mod tests {
|
|||
});
|
||||
}
|
||||
|
||||
impl Editor {
|
||||
fn selected_ranges<D: TextDimension + Ord + Sub<D, Output = D>>(
|
||||
&self,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> Vec<Range<D>> {
|
||||
self.local_selections::<D>(cx)
|
||||
.iter()
|
||||
.map(|s| {
|
||||
if s.reversed {
|
||||
s.end.clone()..s.start.clone()
|
||||
} else {
|
||||
s.start.clone()..s.end.clone()
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn selected_display_ranges(&self, cx: &mut MutableAppContext) -> Vec<Range<DisplayPoint>> {
|
||||
let display_map = self
|
||||
.display_map
|
||||
.update(cx, |display_map, cx| display_map.snapshot(cx));
|
||||
self.selections
|
||||
.iter()
|
||||
.chain(
|
||||
self.pending_selection
|
||||
.as_ref()
|
||||
.map(|pending| &pending.selection),
|
||||
)
|
||||
.map(|s| {
|
||||
if s.reversed {
|
||||
s.end.to_display_point(&display_map)..s.start.to_display_point(&display_map)
|
||||
} else {
|
||||
s.start.to_display_point(&display_map)..s.end.to_display_point(&display_map)
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
fn empty_range(row: usize, column: usize) -> Range<DisplayPoint> {
|
||||
let point = DisplayPoint::new(row as u32, column as u32);
|
||||
point..point
|
||||
|
|
|
@ -5,25 +5,26 @@ use gpui::{
|
|||
elements::*, AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, RenderContext,
|
||||
Subscription, Task, View, ViewContext, ViewHandle, WeakModelHandle,
|
||||
};
|
||||
use language::{Diagnostic, File as _};
|
||||
use language::{Buffer, Diagnostic, File as _};
|
||||
use postage::watch;
|
||||
use project::{File, ProjectPath, Worktree};
|
||||
use std::fmt::Write;
|
||||
use std::path::Path;
|
||||
use text::{Point, Selection};
|
||||
use workspace::{
|
||||
EntryOpener, ItemHandle, ItemView, ItemViewHandle, Settings, StatusItemView, WeakItemHandle,
|
||||
ItemHandle, ItemView, ItemViewHandle, PathOpener, Settings, StatusItemView, WeakItemHandle,
|
||||
Workspace,
|
||||
};
|
||||
|
||||
pub struct BufferOpener;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct BufferItemHandle(pub ModelHandle<MultiBuffer>);
|
||||
pub struct BufferItemHandle(pub ModelHandle<Buffer>);
|
||||
|
||||
#[derive(Clone)]
|
||||
struct WeakBufferItemHandle(WeakModelHandle<MultiBuffer>);
|
||||
struct WeakBufferItemHandle(WeakModelHandle<Buffer>);
|
||||
|
||||
impl EntryOpener for BufferOpener {
|
||||
impl PathOpener for BufferOpener {
|
||||
fn open(
|
||||
&self,
|
||||
worktree: &mut Worktree,
|
||||
|
@ -31,9 +32,8 @@ impl EntryOpener for BufferOpener {
|
|||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Option<Task<Result<Box<dyn ItemHandle>>>> {
|
||||
let buffer = worktree.open_buffer(project_path.path, cx);
|
||||
let task = cx.spawn(|_, mut cx| async move {
|
||||
let task = cx.spawn(|_, _| async move {
|
||||
let buffer = buffer.await?;
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
Ok(Box::new(BufferItemHandle(buffer)) as Box<dyn ItemHandle>)
|
||||
});
|
||||
Some(task)
|
||||
|
@ -44,14 +44,15 @@ impl ItemHandle for BufferItemHandle {
|
|||
fn add_view(
|
||||
&self,
|
||||
window_id: usize,
|
||||
settings: watch::Receiver<Settings>,
|
||||
workspace: &Workspace,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> Box<dyn ItemViewHandle> {
|
||||
let buffer = self.0.downgrade();
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(self.0.clone(), cx));
|
||||
let weak_buffer = buffer.downgrade();
|
||||
Box::new(cx.add_view(window_id, |cx| {
|
||||
Editor::for_buffer(
|
||||
self.0.clone(),
|
||||
crate::settings_builder(buffer, settings),
|
||||
buffer,
|
||||
crate::settings_builder(weak_buffer, workspace.settings()),
|
||||
cx,
|
||||
)
|
||||
}))
|
||||
|
@ -61,16 +62,24 @@ impl ItemHandle for BufferItemHandle {
|
|||
Box::new(self.clone())
|
||||
}
|
||||
|
||||
fn to_any(&self) -> gpui::AnyModelHandle {
|
||||
self.0.clone().into()
|
||||
}
|
||||
|
||||
fn downgrade(&self) -> Box<dyn workspace::WeakItemHandle> {
|
||||
Box::new(WeakBufferItemHandle(self.0.downgrade()))
|
||||
}
|
||||
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
|
||||
File::from_dyn(self.0.read(cx).file(cx)).map(|f| ProjectPath {
|
||||
File::from_dyn(self.0.read(cx).file()).map(|f| ProjectPath {
|
||||
worktree_id: f.worktree_id(cx),
|
||||
path: f.path().clone(),
|
||||
})
|
||||
}
|
||||
|
||||
fn id(&self) -> usize {
|
||||
self.0.id()
|
||||
}
|
||||
}
|
||||
|
||||
impl WeakItemHandle for WeakBufferItemHandle {
|
||||
|
@ -79,22 +88,17 @@ impl WeakItemHandle for WeakBufferItemHandle {
|
|||
.upgrade(cx)
|
||||
.map(|buffer| Box::new(BufferItemHandle(buffer)) as Box<dyn ItemHandle>)
|
||||
}
|
||||
|
||||
fn id(&self) -> usize {
|
||||
self.0.id()
|
||||
}
|
||||
}
|
||||
|
||||
impl ItemView for Editor {
|
||||
fn should_activate_item_on_event(event: &Event) -> bool {
|
||||
matches!(event, Event::Activate)
|
||||
}
|
||||
type ItemHandle = BufferItemHandle;
|
||||
|
||||
fn should_close_item_on_event(event: &Event) -> bool {
|
||||
matches!(event, Event::Closed)
|
||||
}
|
||||
|
||||
fn should_update_tab_on_event(event: &Event) -> bool {
|
||||
matches!(
|
||||
event,
|
||||
Event::Saved | Event::Dirtied | Event::FileHandleChanged
|
||||
)
|
||||
fn item_handle(&self, cx: &AppContext) -> Self::ItemHandle {
|
||||
BufferItemHandle(self.buffer.read(cx).as_singleton().unwrap())
|
||||
}
|
||||
|
||||
fn title(&self, cx: &AppContext) -> String {
|
||||
|
@ -124,6 +128,18 @@ impl ItemView for Editor {
|
|||
Some(self.clone(cx))
|
||||
}
|
||||
|
||||
fn is_dirty(&self, cx: &AppContext) -> bool {
|
||||
self.buffer().read(cx).read(cx).is_dirty()
|
||||
}
|
||||
|
||||
fn has_conflict(&self, cx: &AppContext) -> bool {
|
||||
self.buffer().read(cx).read(cx).has_conflict()
|
||||
}
|
||||
|
||||
fn can_save(&self, cx: &AppContext) -> bool {
|
||||
self.project_path(cx).is_some()
|
||||
}
|
||||
|
||||
fn save(&mut self, cx: &mut ViewContext<Self>) -> Result<Task<Result<()>>> {
|
||||
let save = self.buffer().update(cx, |b, cx| b.save(cx))?;
|
||||
Ok(cx.spawn(|_, _| async move {
|
||||
|
@ -132,6 +148,10 @@ impl ItemView for Editor {
|
|||
}))
|
||||
}
|
||||
|
||||
fn can_save_as(&self, _: &AppContext) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn save_as(
|
||||
&mut self,
|
||||
worktree: ModelHandle<Worktree>,
|
||||
|
@ -180,20 +200,19 @@ impl ItemView for Editor {
|
|||
})
|
||||
}
|
||||
|
||||
fn is_dirty(&self, cx: &AppContext) -> bool {
|
||||
self.buffer().read(cx).read(cx).is_dirty()
|
||||
fn should_activate_item_on_event(event: &Event) -> bool {
|
||||
matches!(event, Event::Activate)
|
||||
}
|
||||
|
||||
fn has_conflict(&self, cx: &AppContext) -> bool {
|
||||
self.buffer().read(cx).read(cx).has_conflict()
|
||||
fn should_close_item_on_event(event: &Event) -> bool {
|
||||
matches!(event, Event::Closed)
|
||||
}
|
||||
|
||||
fn can_save(&self, cx: &AppContext) -> bool {
|
||||
self.project_path(cx).is_some()
|
||||
}
|
||||
|
||||
fn can_save_as(&self, _: &AppContext) -> bool {
|
||||
true
|
||||
fn should_update_tab_on_event(event: &Event) -> bool {
|
||||
matches!(
|
||||
event,
|
||||
Event::Saved | Event::Dirtied | Event::FileHandleChanged
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -298,9 +317,9 @@ impl DiagnosticMessage {
|
|||
let new_diagnostic = buffer
|
||||
.read(cx)
|
||||
.diagnostics_in_range::<_, usize>(cursor_position..cursor_position)
|
||||
.filter(|(_, entry)| !entry.range.is_empty())
|
||||
.min_by_key(|(_, entry)| (entry.diagnostic.severity, entry.range.len()))
|
||||
.map(|(_, entry)| entry.diagnostic);
|
||||
.filter(|entry| !entry.range.is_empty())
|
||||
.min_by_key(|entry| (entry.diagnostic.severity, entry.range.len()))
|
||||
.map(|entry| entry.diagnostic);
|
||||
if new_diagnostic != self.diagnostic {
|
||||
self.diagnostic = new_diagnostic;
|
||||
cx.notify();
|
||||
|
|
|
@ -172,7 +172,7 @@ pub fn next_word_boundary(map: &DisplaySnapshot, mut point: DisplayPoint) -> Dis
|
|||
}
|
||||
prev_char_kind = Some(char_kind);
|
||||
}
|
||||
point
|
||||
map.clip_point(point, Bias::Right)
|
||||
}
|
||||
|
||||
pub fn is_inside_word(map: &DisplaySnapshot, point: DisplayPoint) -> bool {
|
||||
|
|
|
@ -496,6 +496,14 @@ impl MultiBuffer {
|
|||
}
|
||||
}
|
||||
|
||||
for (buffer_id, buffer_state) in self.buffers.borrow().iter() {
|
||||
if !selections_by_buffer.contains_key(buffer_id) {
|
||||
buffer_state
|
||||
.buffer
|
||||
.update(cx, |buffer, cx| buffer.remove_active_selections(cx));
|
||||
}
|
||||
}
|
||||
|
||||
for (buffer_id, mut selections) in selections_by_buffer {
|
||||
self.buffers.borrow()[&buffer_id]
|
||||
.buffer
|
||||
|
@ -681,6 +689,38 @@ impl MultiBuffer {
|
|||
.map_or(Vec::new(), |state| state.excerpts.clone())
|
||||
}
|
||||
|
||||
pub fn excerpted_buffers<'a, T: ToOffset>(
|
||||
&'a self,
|
||||
range: Range<T>,
|
||||
cx: &AppContext,
|
||||
) -> Vec<(ModelHandle<Buffer>, Range<usize>)> {
|
||||
let snapshot = self.snapshot(cx);
|
||||
let start = range.start.to_offset(&snapshot);
|
||||
let end = range.end.to_offset(&snapshot);
|
||||
|
||||
let mut result = Vec::new();
|
||||
let mut cursor = snapshot.excerpts.cursor::<usize>();
|
||||
cursor.seek(&start, Bias::Right, &());
|
||||
while let Some(excerpt) = cursor.item() {
|
||||
if *cursor.start() > end {
|
||||
break;
|
||||
}
|
||||
|
||||
let mut end_before_newline = cursor.end(&());
|
||||
if excerpt.has_trailing_newline {
|
||||
end_before_newline -= 1;
|
||||
}
|
||||
let excerpt_start = excerpt.range.start.to_offset(&excerpt.buffer);
|
||||
let start = excerpt_start + (cmp::max(start, *cursor.start()) - *cursor.start());
|
||||
let end = excerpt_start + (cmp::min(end, end_before_newline) - *cursor.start());
|
||||
let buffer = self.buffers.borrow()[&excerpt.buffer_id].buffer.clone();
|
||||
result.push((buffer, start..end));
|
||||
cursor.next(&());
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn remove_excerpts<'a>(
|
||||
&mut self,
|
||||
excerpt_ids: impl IntoIterator<Item = &'a ExcerptId>,
|
||||
|
@ -1291,7 +1331,7 @@ impl MultiBufferSnapshot {
|
|||
|
||||
let mut position = D::from_text_summary(&cursor.start().text);
|
||||
if let Some(excerpt) = cursor.item() {
|
||||
if excerpt.id == anchor.excerpt_id {
|
||||
if excerpt.id == anchor.excerpt_id && excerpt.buffer_id == anchor.buffer_id {
|
||||
let excerpt_buffer_start = excerpt.range.start.summary::<D>(&excerpt.buffer);
|
||||
let buffer_position = anchor.text_anchor.summary::<D>(&excerpt.buffer);
|
||||
if buffer_position > excerpt_buffer_start {
|
||||
|
@ -1302,6 +1342,87 @@ impl MultiBufferSnapshot {
|
|||
position
|
||||
}
|
||||
|
||||
pub fn refresh_anchors<'a, I>(&'a self, anchors: I) -> Vec<(Anchor, bool)>
|
||||
where
|
||||
I: 'a + IntoIterator<Item = &'a Anchor>,
|
||||
{
|
||||
let mut anchors = anchors.into_iter().peekable();
|
||||
let mut cursor = self.excerpts.cursor::<Option<&ExcerptId>>();
|
||||
let mut result = Vec::new();
|
||||
while let Some(anchor) = anchors.peek() {
|
||||
let old_excerpt_id = &anchor.excerpt_id;
|
||||
|
||||
// Find the location where this anchor's excerpt should be.
|
||||
cursor.seek_forward(&Some(old_excerpt_id), Bias::Left, &());
|
||||
if cursor.item().is_none() {
|
||||
cursor.next(&());
|
||||
}
|
||||
|
||||
let next_excerpt = cursor.item();
|
||||
let prev_excerpt = cursor.prev_item();
|
||||
|
||||
// Process all of the anchors for this excerpt.
|
||||
while let Some(&anchor) = anchors.peek() {
|
||||
if anchor.excerpt_id != *old_excerpt_id {
|
||||
break;
|
||||
}
|
||||
let mut kept_position = false;
|
||||
let mut anchor = anchors.next().unwrap().clone();
|
||||
|
||||
// Leave min and max anchors unchanged.
|
||||
if *old_excerpt_id == ExcerptId::max() || *old_excerpt_id == ExcerptId::min() {
|
||||
kept_position = true;
|
||||
}
|
||||
// If the old excerpt still exists at this location, then leave
|
||||
// the anchor unchanged.
|
||||
else if next_excerpt.map_or(false, |excerpt| {
|
||||
excerpt.id == *old_excerpt_id && excerpt.buffer_id == anchor.buffer_id
|
||||
}) {
|
||||
kept_position = true;
|
||||
}
|
||||
// If the old excerpt no longer exists at this location, then attempt to
|
||||
// find an equivalent position for this anchor in an adjacent excerpt.
|
||||
else {
|
||||
for excerpt in [next_excerpt, prev_excerpt].iter().filter_map(|e| *e) {
|
||||
if excerpt.contains(&anchor) {
|
||||
anchor.excerpt_id = excerpt.id.clone();
|
||||
kept_position = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// If there's no adjacent excerpt that contains the anchor's position,
|
||||
// then report that the anchor has lost its position.
|
||||
if !kept_position {
|
||||
anchor = if let Some(excerpt) = next_excerpt {
|
||||
Anchor {
|
||||
buffer_id: excerpt.buffer_id,
|
||||
excerpt_id: excerpt.id.clone(),
|
||||
text_anchor: excerpt
|
||||
.buffer
|
||||
.anchor_at(&excerpt.range.start, anchor.text_anchor.bias),
|
||||
}
|
||||
} else if let Some(excerpt) = prev_excerpt {
|
||||
Anchor {
|
||||
buffer_id: excerpt.buffer_id,
|
||||
excerpt_id: excerpt.id.clone(),
|
||||
text_anchor: excerpt
|
||||
.buffer
|
||||
.anchor_at(&excerpt.range.end, anchor.text_anchor.bias),
|
||||
}
|
||||
} else if anchor.text_anchor.bias == Bias::Left {
|
||||
Anchor::min()
|
||||
} else {
|
||||
Anchor::max()
|
||||
};
|
||||
}
|
||||
|
||||
result.push((anchor, kept_position));
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
pub fn summaries_for_anchors<'a, D, I>(&'a self, anchors: I) -> Vec<D>
|
||||
where
|
||||
D: TextDimension + Ord + Sub<D, Output = D>,
|
||||
|
@ -1312,6 +1433,7 @@ impl MultiBufferSnapshot {
|
|||
let mut summaries = Vec::new();
|
||||
while let Some(anchor) = anchors.peek() {
|
||||
let excerpt_id = &anchor.excerpt_id;
|
||||
let buffer_id = anchor.buffer_id;
|
||||
let excerpt_anchors = iter::from_fn(|| {
|
||||
let anchor = anchors.peek()?;
|
||||
if anchor.excerpt_id == *excerpt_id {
|
||||
|
@ -1328,7 +1450,7 @@ impl MultiBufferSnapshot {
|
|||
|
||||
let position = D::from_text_summary(&cursor.start().text);
|
||||
if let Some(excerpt) = cursor.item() {
|
||||
if excerpt.id == *excerpt_id {
|
||||
if excerpt.id == *excerpt_id && excerpt.buffer_id == buffer_id {
|
||||
let excerpt_buffer_start = excerpt.range.start.summary::<D>(&excerpt.buffer);
|
||||
summaries.extend(
|
||||
excerpt
|
||||
|
@ -1379,6 +1501,7 @@ impl MultiBufferSnapshot {
|
|||
let text_anchor =
|
||||
excerpt.clip_anchor(excerpt.buffer.anchor_at(buffer_start + overshoot, bias));
|
||||
Anchor {
|
||||
buffer_id: excerpt.buffer_id,
|
||||
excerpt_id: excerpt.id.clone(),
|
||||
text_anchor,
|
||||
}
|
||||
|
@ -1397,6 +1520,7 @@ impl MultiBufferSnapshot {
|
|||
let text_anchor = excerpt.clip_anchor(text_anchor);
|
||||
drop(cursor);
|
||||
return Anchor {
|
||||
buffer_id: excerpt.buffer_id,
|
||||
excerpt_id,
|
||||
text_anchor,
|
||||
};
|
||||
|
@ -1497,7 +1621,6 @@ impl MultiBufferSnapshot {
|
|||
|
||||
pub fn diagnostic_group<'a, O>(
|
||||
&'a self,
|
||||
provider_name: &'a str,
|
||||
group_id: usize,
|
||||
) -> impl Iterator<Item = DiagnosticEntry<O>> + 'a
|
||||
where
|
||||
|
@ -1505,13 +1628,13 @@ impl MultiBufferSnapshot {
|
|||
{
|
||||
self.as_singleton()
|
||||
.into_iter()
|
||||
.flat_map(move |buffer| buffer.diagnostic_group(provider_name, group_id))
|
||||
.flat_map(move |buffer| buffer.diagnostic_group(group_id))
|
||||
}
|
||||
|
||||
pub fn diagnostics_in_range<'a, T, O>(
|
||||
&'a self,
|
||||
range: Range<T>,
|
||||
) -> impl Iterator<Item = (&'a str, DiagnosticEntry<O>)> + 'a
|
||||
) -> impl Iterator<Item = DiagnosticEntry<O>> + 'a
|
||||
where
|
||||
T: 'a + ToOffset,
|
||||
O: 'a + text::FromAnchor,
|
||||
|
@ -1596,10 +1719,12 @@ impl MultiBufferSnapshot {
|
|||
.flat_map(move |(replica_id, selections)| {
|
||||
selections.map(move |selection| {
|
||||
let mut start = Anchor {
|
||||
buffer_id: excerpt.buffer_id,
|
||||
excerpt_id: excerpt.id.clone(),
|
||||
text_anchor: selection.start.clone(),
|
||||
};
|
||||
let mut end = Anchor {
|
||||
buffer_id: excerpt.buffer_id,
|
||||
excerpt_id: excerpt.id.clone(),
|
||||
text_anchor: selection.end.clone(),
|
||||
};
|
||||
|
@ -1795,6 +1920,22 @@ impl Excerpt {
|
|||
text_anchor
|
||||
}
|
||||
}
|
||||
|
||||
fn contains(&self, anchor: &Anchor) -> bool {
|
||||
self.buffer_id == anchor.buffer_id
|
||||
&& self
|
||||
.range
|
||||
.start
|
||||
.cmp(&anchor.text_anchor, &self.buffer)
|
||||
.unwrap()
|
||||
.is_le()
|
||||
&& self
|
||||
.range
|
||||
.end
|
||||
.cmp(&anchor.text_anchor, &self.buffer)
|
||||
.unwrap()
|
||||
.is_ge()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Excerpt {
|
||||
|
@ -2370,6 +2511,131 @@ mod tests {
|
|||
assert_eq!(old_snapshot.anchor_after(10).to_offset(&new_snapshot), 14);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_multibuffer_resolving_anchors_after_replacing_their_excerpts(
|
||||
cx: &mut MutableAppContext,
|
||||
) {
|
||||
let buffer_1 = cx.add_model(|cx| Buffer::new(0, "abcd", cx));
|
||||
let buffer_2 = cx.add_model(|cx| Buffer::new(0, "ABCDEFGHIJKLMNOP", cx));
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
|
||||
// Create an insertion id in buffer 1 that doesn't exist in buffer 2.
|
||||
// Add an excerpt from buffer 1 that spans this new insertion.
|
||||
buffer_1.update(cx, |buffer, cx| buffer.edit([4..4], "123", cx));
|
||||
let excerpt_id_1 = multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.push_excerpt(
|
||||
ExcerptProperties {
|
||||
buffer: &buffer_1,
|
||||
range: 0..7,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let snapshot_1 = multibuffer.read(cx).snapshot(cx);
|
||||
assert_eq!(snapshot_1.text(), "abcd123");
|
||||
|
||||
// Replace the buffer 1 excerpt with new excerpts from buffer 2.
|
||||
let (excerpt_id_2, excerpt_id_3, _) = multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.remove_excerpts([&excerpt_id_1], cx);
|
||||
(
|
||||
multibuffer.push_excerpt(
|
||||
ExcerptProperties {
|
||||
buffer: &buffer_2,
|
||||
range: 0..4,
|
||||
},
|
||||
cx,
|
||||
),
|
||||
multibuffer.push_excerpt(
|
||||
ExcerptProperties {
|
||||
buffer: &buffer_2,
|
||||
range: 6..10,
|
||||
},
|
||||
cx,
|
||||
),
|
||||
multibuffer.push_excerpt(
|
||||
ExcerptProperties {
|
||||
buffer: &buffer_2,
|
||||
range: 12..16,
|
||||
},
|
||||
cx,
|
||||
),
|
||||
)
|
||||
});
|
||||
let snapshot_2 = multibuffer.read(cx).snapshot(cx);
|
||||
assert_eq!(snapshot_2.text(), "ABCD\nGHIJ\nMNOP");
|
||||
|
||||
// The old excerpt id has been reused.
|
||||
assert_eq!(excerpt_id_2, excerpt_id_1);
|
||||
|
||||
// Resolve some anchors from the previous snapshot in the new snapshot.
|
||||
// Although there is still an excerpt with the same id, it is for
|
||||
// a different buffer, so we don't attempt to resolve the old text
|
||||
// anchor in the new buffer.
|
||||
assert_eq!(
|
||||
snapshot_2.summary_for_anchor::<usize>(&snapshot_1.anchor_before(2)),
|
||||
0
|
||||
);
|
||||
assert_eq!(
|
||||
snapshot_2.summaries_for_anchors::<usize, _>(&[
|
||||
snapshot_1.anchor_before(2),
|
||||
snapshot_1.anchor_after(3)
|
||||
]),
|
||||
vec![0, 0]
|
||||
);
|
||||
let refresh =
|
||||
snapshot_2.refresh_anchors(&[snapshot_1.anchor_before(2), snapshot_1.anchor_after(3)]);
|
||||
assert_eq!(
|
||||
refresh,
|
||||
&[
|
||||
(snapshot_2.anchor_before(0), false),
|
||||
(snapshot_2.anchor_after(0), false),
|
||||
]
|
||||
);
|
||||
|
||||
// Replace the middle excerpt with a smaller excerpt in buffer 2,
|
||||
// that intersects the old excerpt.
|
||||
let excerpt_id_5 = multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.remove_excerpts([&excerpt_id_3], cx);
|
||||
multibuffer.insert_excerpt_after(
|
||||
&excerpt_id_3,
|
||||
ExcerptProperties {
|
||||
buffer: &buffer_2,
|
||||
range: 5..8,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let snapshot_3 = multibuffer.read(cx).snapshot(cx);
|
||||
assert_eq!(snapshot_3.text(), "ABCD\nFGH\nMNOP");
|
||||
assert_ne!(excerpt_id_5, excerpt_id_3);
|
||||
|
||||
// Resolve some anchors from the previous snapshot in the new snapshot.
|
||||
// The anchor in the middle excerpt snaps to the beginning of the
|
||||
// excerpt, since it is not
|
||||
let anchors = [
|
||||
snapshot_2.anchor_before(0),
|
||||
snapshot_2.anchor_after(2),
|
||||
snapshot_2.anchor_after(6),
|
||||
snapshot_2.anchor_after(14),
|
||||
];
|
||||
assert_eq!(
|
||||
snapshot_3.summaries_for_anchors::<usize, _>(&anchors),
|
||||
&[0, 2, 9, 13]
|
||||
);
|
||||
|
||||
let new_anchors = snapshot_3.refresh_anchors(&anchors);
|
||||
assert_eq!(
|
||||
new_anchors.iter().map(|a| a.1).collect::<Vec<_>>(),
|
||||
&[true, true, true, true]
|
||||
);
|
||||
assert_eq!(
|
||||
snapshot_3.summaries_for_anchors::<usize, _>(new_anchors.iter().map(|a| &a.0)),
|
||||
&[0, 2, 7, 13]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random_excerpts(cx: &mut MutableAppContext, mut rng: StdRng) {
|
||||
let operations = env::var("OPERATIONS")
|
||||
|
@ -2377,7 +2643,7 @@ mod tests {
|
|||
.unwrap_or(10);
|
||||
|
||||
let mut buffers: Vec<ModelHandle<Buffer>> = Vec::new();
|
||||
let list = cx.add_model(|_| MultiBuffer::new(0));
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
let mut excerpt_ids = Vec::new();
|
||||
let mut expected_excerpts = Vec::<(ModelHandle<Buffer>, Range<text::Anchor>)>::new();
|
||||
let mut old_versions = Vec::new();
|
||||
|
@ -2408,7 +2674,9 @@ mod tests {
|
|||
);
|
||||
}
|
||||
ids_to_remove.sort_unstable();
|
||||
list.update(cx, |list, cx| list.remove_excerpts(&ids_to_remove, cx));
|
||||
multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.remove_excerpts(&ids_to_remove, cx)
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
let buffer_handle = if buffers.is_empty() || rng.gen_bool(0.4) {
|
||||
|
@ -2440,8 +2708,8 @@ mod tests {
|
|||
&buffer.text()[start_ix..end_ix]
|
||||
);
|
||||
|
||||
let excerpt_id = list.update(cx, |list, cx| {
|
||||
list.insert_excerpt_after(
|
||||
let excerpt_id = multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.insert_excerpt_after(
|
||||
&prev_excerpt_id,
|
||||
ExcerptProperties {
|
||||
buffer: &buffer_handle,
|
||||
|
@ -2457,12 +2725,12 @@ mod tests {
|
|||
}
|
||||
|
||||
if rng.gen_bool(0.3) {
|
||||
list.update(cx, |list, cx| {
|
||||
old_versions.push((list.snapshot(cx), list.subscribe()));
|
||||
multibuffer.update(cx, |multibuffer, cx| {
|
||||
old_versions.push((multibuffer.snapshot(cx), multibuffer.subscribe()));
|
||||
})
|
||||
}
|
||||
|
||||
let snapshot = list.read(cx).snapshot(cx);
|
||||
let snapshot = multibuffer.read(cx).snapshot(cx);
|
||||
|
||||
let mut excerpt_starts = Vec::new();
|
||||
let mut expected_text = String::new();
|
||||
|
@ -2657,15 +2925,30 @@ mod tests {
|
|||
let end_ix = text_rope.clip_offset(rng.gen_range(0..=text_rope.len()), Bias::Right);
|
||||
let start_ix = text_rope.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
|
||||
|
||||
let text_for_range = snapshot
|
||||
.text_for_range(start_ix..end_ix)
|
||||
.collect::<String>();
|
||||
assert_eq!(
|
||||
snapshot
|
||||
.text_for_range(start_ix..end_ix)
|
||||
.collect::<String>(),
|
||||
text_for_range,
|
||||
&expected_text[start_ix..end_ix],
|
||||
"incorrect text for range {:?}",
|
||||
start_ix..end_ix
|
||||
);
|
||||
|
||||
let excerpted_buffer_ranges =
|
||||
multibuffer.read(cx).excerpted_buffers(start_ix..end_ix, cx);
|
||||
let excerpted_buffers_text = excerpted_buffer_ranges
|
||||
.into_iter()
|
||||
.map(|(buffer, buffer_range)| {
|
||||
buffer
|
||||
.read(cx)
|
||||
.text_for_range(buffer_range)
|
||||
.collect::<String>()
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
assert_eq!(excerpted_buffers_text, text_for_range);
|
||||
|
||||
let expected_summary = TextSummary::from(&expected_text[start_ix..end_ix]);
|
||||
assert_eq!(
|
||||
snapshot.text_summary_for_range::<TextSummary, _>(start_ix..end_ix),
|
||||
|
@ -2699,7 +2982,7 @@ mod tests {
|
|||
}
|
||||
}
|
||||
|
||||
let snapshot = list.read(cx).snapshot(cx);
|
||||
let snapshot = multibuffer.read(cx).snapshot(cx);
|
||||
for (old_snapshot, subscription) in old_versions {
|
||||
let edits = subscription.consume().into_inner();
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@ use text::{rope::TextDimension, Point};
|
|||
|
||||
#[derive(Clone, Eq, PartialEq, Debug, Hash)]
|
||||
pub struct Anchor {
|
||||
pub(crate) buffer_id: usize,
|
||||
pub(crate) excerpt_id: ExcerptId,
|
||||
pub(crate) text_anchor: text::Anchor,
|
||||
}
|
||||
|
@ -16,6 +17,7 @@ pub struct Anchor {
|
|||
impl Anchor {
|
||||
pub fn min() -> Self {
|
||||
Self {
|
||||
buffer_id: 0,
|
||||
excerpt_id: ExcerptId::min(),
|
||||
text_anchor: text::Anchor::min(),
|
||||
}
|
||||
|
@ -23,6 +25,7 @@ impl Anchor {
|
|||
|
||||
pub fn max() -> Self {
|
||||
Self {
|
||||
buffer_id: 0,
|
||||
excerpt_id: ExcerptId::max(),
|
||||
text_anchor: text::Anchor::max(),
|
||||
}
|
||||
|
@ -54,6 +57,7 @@ impl Anchor {
|
|||
if self.text_anchor.bias != Bias::Left {
|
||||
if let Some(buffer_snapshot) = snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id) {
|
||||
return Self {
|
||||
buffer_id: self.buffer_id,
|
||||
excerpt_id: self.excerpt_id.clone(),
|
||||
text_anchor: self.text_anchor.bias_left(buffer_snapshot),
|
||||
};
|
||||
|
@ -66,6 +70,7 @@ impl Anchor {
|
|||
if self.text_anchor.bias != Bias::Right {
|
||||
if let Some(buffer_snapshot) = snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id) {
|
||||
return Self {
|
||||
buffer_id: self.buffer_id,
|
||||
excerpt_id: self.excerpt_id.clone(),
|
||||
text_anchor: self.text_anchor.bias_right(buffer_snapshot),
|
||||
};
|
||||
|
|
|
@ -1,33 +1,6 @@
|
|||
use gpui::{Entity, ModelHandle};
|
||||
use smol::channel;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
#[cfg(test)]
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
// std::env::set_var("RUST_LOG", "info");
|
||||
env_logger::init();
|
||||
}
|
||||
|
||||
pub struct Observer<T>(PhantomData<T>);
|
||||
|
||||
impl<T: 'static> Entity for Observer<T> {
|
||||
type Event = ();
|
||||
}
|
||||
|
||||
impl<T: Entity> Observer<T> {
|
||||
pub fn new(
|
||||
handle: &ModelHandle<T>,
|
||||
cx: &mut gpui::TestAppContext,
|
||||
) -> (ModelHandle<Self>, channel::Receiver<()>) {
|
||||
let (notify_tx, notify_rx) = channel::unbounded();
|
||||
let observer = cx.add_model(|cx| {
|
||||
cx.observe(handle, move |_, _, _| {
|
||||
let _ = notify_tx.try_send(());
|
||||
})
|
||||
.detach();
|
||||
Observer(PhantomData)
|
||||
});
|
||||
(observer, notify_rx)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -83,7 +83,7 @@ impl View for FileFinder {
|
|||
.with_style(settings.theme.selector.input_editor.container)
|
||||
.boxed(),
|
||||
)
|
||||
.with_child(Flexible::new(1.0, self.render_matches()).boxed())
|
||||
.with_child(Flexible::new(1.0, false, self.render_matches()).boxed())
|
||||
.boxed(),
|
||||
)
|
||||
.with_style(settings.theme.selector.container)
|
||||
|
@ -175,6 +175,7 @@ impl FileFinder {
|
|||
.with_child(
|
||||
Flexible::new(
|
||||
1.0,
|
||||
false,
|
||||
Flex::column()
|
||||
.with_child(
|
||||
Label::new(file_name.to_string(), style.label.clone())
|
||||
|
@ -249,8 +250,8 @@ impl FileFinder {
|
|||
match event {
|
||||
Event::Selected(project_path) => {
|
||||
workspace
|
||||
.open_entry(project_path.clone(), cx)
|
||||
.map(|d| d.detach());
|
||||
.open_path(project_path.clone(), cx)
|
||||
.detach_and_log_err(cx);
|
||||
workspace.dismiss_modal(cx);
|
||||
}
|
||||
Event::Dismissed => {
|
||||
|
@ -430,14 +431,14 @@ mod tests {
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_matching_paths(mut cx: gpui::TestAppContext) {
|
||||
let mut entry_openers = Vec::new();
|
||||
let mut path_openers = Vec::new();
|
||||
cx.update(|cx| {
|
||||
super::init(cx);
|
||||
editor::init(cx, &mut entry_openers);
|
||||
editor::init(cx, &mut path_openers);
|
||||
});
|
||||
|
||||
let mut params = cx.update(WorkspaceParams::test);
|
||||
params.entry_openers = Arc::from(entry_openers);
|
||||
params.path_openers = Arc::from(path_openers);
|
||||
params
|
||||
.fs
|
||||
.as_fake()
|
||||
|
|
|
@ -992,7 +992,7 @@ impl MutableAppContext {
|
|||
})
|
||||
}
|
||||
|
||||
fn observe<E, H, F>(&mut self, handle: &H, mut callback: F) -> Subscription
|
||||
pub fn observe<E, H, F>(&mut self, handle: &H, mut callback: F) -> Subscription
|
||||
where
|
||||
E: Entity,
|
||||
E::Event: 'static,
|
||||
|
@ -2672,9 +2672,11 @@ impl<T: Entity> ModelHandle<T> {
|
|||
}
|
||||
}
|
||||
|
||||
cx.borrow().foreground().start_waiting();
|
||||
rx.recv()
|
||||
.await
|
||||
.expect("model dropped with pending condition");
|
||||
cx.borrow().foreground().finish_waiting();
|
||||
}
|
||||
})
|
||||
.await
|
||||
|
@ -2771,6 +2773,10 @@ impl<T: Entity> WeakModelHandle<T> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn id(&self) -> usize {
|
||||
self.model_id
|
||||
}
|
||||
|
||||
pub fn upgrade(self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<T>> {
|
||||
cx.upgrade_model_handle(self)
|
||||
}
|
||||
|
@ -2914,9 +2920,11 @@ impl<T: View> ViewHandle<T> {
|
|||
}
|
||||
}
|
||||
|
||||
cx.borrow().foreground().start_waiting();
|
||||
rx.recv()
|
||||
.await
|
||||
.expect("view dropped with pending condition");
|
||||
cx.borrow().foreground().finish_waiting();
|
||||
}
|
||||
})
|
||||
.await
|
||||
|
@ -3089,14 +3097,39 @@ impl Drop for AnyViewHandle {
|
|||
|
||||
pub struct AnyModelHandle {
|
||||
model_id: usize,
|
||||
model_type: TypeId,
|
||||
ref_counts: Arc<Mutex<RefCounts>>,
|
||||
}
|
||||
|
||||
impl AnyModelHandle {
|
||||
pub fn downcast<T: Entity>(self) -> Option<ModelHandle<T>> {
|
||||
if self.is::<T>() {
|
||||
let result = Some(ModelHandle {
|
||||
model_id: self.model_id,
|
||||
model_type: PhantomData,
|
||||
ref_counts: self.ref_counts.clone(),
|
||||
});
|
||||
unsafe {
|
||||
Arc::decrement_strong_count(&self.ref_counts);
|
||||
}
|
||||
std::mem::forget(self);
|
||||
result
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is<T: Entity>(&self) -> bool {
|
||||
self.model_type == TypeId::of::<T>()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Entity> From<ModelHandle<T>> for AnyModelHandle {
|
||||
fn from(handle: ModelHandle<T>) -> Self {
|
||||
handle.ref_counts.lock().inc_model(handle.model_id);
|
||||
Self {
|
||||
model_id: handle.model_id,
|
||||
model_type: TypeId::of::<T>(),
|
||||
ref_counts: handle.ref_counts.clone(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ mod constrained_box;
|
|||
mod container;
|
||||
mod empty;
|
||||
mod event_handler;
|
||||
mod expanded;
|
||||
mod flex;
|
||||
mod hook;
|
||||
mod image;
|
||||
|
@ -16,6 +17,7 @@ mod svg;
|
|||
mod text;
|
||||
mod uniform_list;
|
||||
|
||||
use self::expanded::Expanded;
|
||||
pub use self::{
|
||||
align::*, canvas::*, constrained_box::*, container::*, empty::*, event_handler::*, flex::*,
|
||||
hook::*, image::*, label::*, list::*, mouse_event_handler::*, overlay::*, stack::*, svg::*,
|
||||
|
@ -130,11 +132,18 @@ pub trait Element {
|
|||
Container::new(self.boxed())
|
||||
}
|
||||
|
||||
fn expanded(self, flex: f32) -> Expanded
|
||||
fn expanded(self) -> Expanded
|
||||
where
|
||||
Self: 'static + Sized,
|
||||
{
|
||||
Expanded::new(flex, self.boxed())
|
||||
Expanded::new(self.boxed())
|
||||
}
|
||||
|
||||
fn flexible(self, flex: f32, expanded: bool) -> Flexible
|
||||
where
|
||||
Self: 'static + Sized,
|
||||
{
|
||||
Flexible::new(flex, expanded, self.boxed())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
90
crates/gpui/src/elements/expanded.rs
Normal file
90
crates/gpui/src/elements/expanded.rs
Normal file
|
@ -0,0 +1,90 @@
|
|||
use crate::{
|
||||
geometry::{rect::RectF, vector::Vector2F},
|
||||
json, DebugContext, Element, ElementBox, Event, EventContext, LayoutContext, PaintContext,
|
||||
SizeConstraint,
|
||||
};
|
||||
use serde_json::json;
|
||||
|
||||
pub struct Expanded {
|
||||
child: ElementBox,
|
||||
full_width: bool,
|
||||
full_height: bool,
|
||||
}
|
||||
|
||||
impl Expanded {
|
||||
pub fn new(child: ElementBox) -> Self {
|
||||
Self {
|
||||
child,
|
||||
full_width: true,
|
||||
full_height: true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_full_width(mut self) -> Self {
|
||||
self.full_width = true;
|
||||
self.full_height = false;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn to_full_height(mut self) -> Self {
|
||||
self.full_width = false;
|
||||
self.full_height = true;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Element for Expanded {
|
||||
type LayoutState = ();
|
||||
type PaintState = ();
|
||||
|
||||
fn layout(
|
||||
&mut self,
|
||||
mut constraint: SizeConstraint,
|
||||
cx: &mut LayoutContext,
|
||||
) -> (Vector2F, Self::LayoutState) {
|
||||
if self.full_width {
|
||||
constraint.min.set_x(constraint.max.x());
|
||||
}
|
||||
if self.full_height {
|
||||
constraint.min.set_y(constraint.max.y());
|
||||
}
|
||||
let size = self.child.layout(constraint, cx);
|
||||
(size, ())
|
||||
}
|
||||
|
||||
fn paint(
|
||||
&mut self,
|
||||
bounds: RectF,
|
||||
visible_bounds: RectF,
|
||||
_: &mut Self::LayoutState,
|
||||
cx: &mut PaintContext,
|
||||
) -> Self::PaintState {
|
||||
self.child.paint(bounds.origin(), visible_bounds, cx);
|
||||
}
|
||||
|
||||
fn dispatch_event(
|
||||
&mut self,
|
||||
event: &Event,
|
||||
_: RectF,
|
||||
_: &mut Self::LayoutState,
|
||||
_: &mut Self::PaintState,
|
||||
cx: &mut EventContext,
|
||||
) -> bool {
|
||||
self.child.dispatch_event(event, cx)
|
||||
}
|
||||
|
||||
fn debug(
|
||||
&self,
|
||||
_: RectF,
|
||||
_: &Self::LayoutState,
|
||||
_: &Self::PaintState,
|
||||
cx: &DebugContext,
|
||||
) -> json::Value {
|
||||
json!({
|
||||
"type": "Expanded",
|
||||
"full_width": self.full_width,
|
||||
"full_height": self.full_height,
|
||||
"child": self.child.debug(cx)
|
||||
})
|
||||
}
|
||||
}
|
|
@ -228,88 +228,15 @@ struct FlexParentData {
|
|||
expanded: bool,
|
||||
}
|
||||
|
||||
pub struct Expanded {
|
||||
metadata: FlexParentData,
|
||||
child: ElementBox,
|
||||
}
|
||||
|
||||
impl Expanded {
|
||||
pub fn new(flex: f32, child: ElementBox) -> Self {
|
||||
Expanded {
|
||||
metadata: FlexParentData {
|
||||
flex,
|
||||
expanded: true,
|
||||
},
|
||||
child,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Element for Expanded {
|
||||
type LayoutState = ();
|
||||
type PaintState = ();
|
||||
|
||||
fn layout(
|
||||
&mut self,
|
||||
constraint: SizeConstraint,
|
||||
cx: &mut LayoutContext,
|
||||
) -> (Vector2F, Self::LayoutState) {
|
||||
let size = self.child.layout(constraint, cx);
|
||||
(size, ())
|
||||
}
|
||||
|
||||
fn paint(
|
||||
&mut self,
|
||||
bounds: RectF,
|
||||
visible_bounds: RectF,
|
||||
_: &mut Self::LayoutState,
|
||||
cx: &mut PaintContext,
|
||||
) -> Self::PaintState {
|
||||
self.child.paint(bounds.origin(), visible_bounds, cx)
|
||||
}
|
||||
|
||||
fn dispatch_event(
|
||||
&mut self,
|
||||
event: &Event,
|
||||
_: RectF,
|
||||
_: &mut Self::LayoutState,
|
||||
_: &mut Self::PaintState,
|
||||
cx: &mut EventContext,
|
||||
) -> bool {
|
||||
self.child.dispatch_event(event, cx)
|
||||
}
|
||||
|
||||
fn metadata(&self) -> Option<&dyn Any> {
|
||||
Some(&self.metadata)
|
||||
}
|
||||
|
||||
fn debug(
|
||||
&self,
|
||||
_: RectF,
|
||||
_: &Self::LayoutState,
|
||||
_: &Self::PaintState,
|
||||
cx: &DebugContext,
|
||||
) -> Value {
|
||||
json!({
|
||||
"type": "Expanded",
|
||||
"flex": self.metadata.flex,
|
||||
"child": self.child.debug(cx)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Flexible {
|
||||
metadata: FlexParentData,
|
||||
child: ElementBox,
|
||||
}
|
||||
|
||||
impl Flexible {
|
||||
pub fn new(flex: f32, child: ElementBox) -> Self {
|
||||
pub fn new(flex: f32, expanded: bool, child: ElementBox) -> Self {
|
||||
Flexible {
|
||||
metadata: FlexParentData {
|
||||
flex,
|
||||
expanded: false,
|
||||
},
|
||||
metadata: FlexParentData { flex, expanded },
|
||||
child,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ use serde_json::json;
|
|||
pub struct Text {
|
||||
text: String,
|
||||
style: TextStyle,
|
||||
soft_wrap: bool,
|
||||
}
|
||||
|
||||
pub struct LayoutState {
|
||||
|
@ -23,13 +24,22 @@ pub struct LayoutState {
|
|||
|
||||
impl Text {
|
||||
pub fn new(text: String, style: TextStyle) -> Self {
|
||||
Self { text, style }
|
||||
Self {
|
||||
text,
|
||||
style,
|
||||
soft_wrap: true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_default_color(mut self, color: Color) -> Self {
|
||||
self.style.color = color;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_soft_wrap(mut self, soft_wrap: bool) -> Self {
|
||||
self.soft_wrap = soft_wrap;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Element for Text {
|
||||
|
@ -54,9 +64,13 @@ impl Element for Text {
|
|||
self.style.font_size,
|
||||
&[(line.len(), self.style.to_run())],
|
||||
);
|
||||
let wrap_boundaries = wrapper
|
||||
.wrap_shaped_line(line, &shaped_line, constraint.max.x())
|
||||
.collect::<Vec<_>>();
|
||||
let wrap_boundaries = if self.soft_wrap {
|
||||
wrapper
|
||||
.wrap_shaped_line(line, &shaped_line, constraint.max.x())
|
||||
.collect::<Vec<_>>()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
max_line_width = max_line_width.max(shaped_line.width());
|
||||
line_count += wrap_boundaries.len() + 1;
|
||||
|
|
|
@ -7,7 +7,7 @@ use rand::prelude::*;
|
|||
use smol::{channel, prelude::*, Executor, Timer};
|
||||
use std::{
|
||||
any::Any,
|
||||
fmt::{self, Debug},
|
||||
fmt::{self, Debug, Display},
|
||||
marker::PhantomData,
|
||||
mem,
|
||||
ops::RangeInclusive,
|
||||
|
@ -25,7 +25,7 @@ use waker_fn::waker_fn;
|
|||
|
||||
use crate::{
|
||||
platform::{self, Dispatcher},
|
||||
util,
|
||||
util, MutableAppContext,
|
||||
};
|
||||
|
||||
pub enum Foreground {
|
||||
|
@ -77,6 +77,7 @@ struct DeterministicState {
|
|||
block_on_ticks: RangeInclusive<usize>,
|
||||
now: Instant,
|
||||
pending_timers: Vec<(Instant, barrier::Sender)>,
|
||||
waiting_backtrace: Option<Backtrace>,
|
||||
}
|
||||
|
||||
pub struct Deterministic {
|
||||
|
@ -97,6 +98,7 @@ impl Deterministic {
|
|||
block_on_ticks: 0..=1000,
|
||||
now: Instant::now(),
|
||||
pending_timers: Default::default(),
|
||||
waiting_backtrace: None,
|
||||
})),
|
||||
parker: Default::default(),
|
||||
}
|
||||
|
@ -143,8 +145,8 @@ impl Deterministic {
|
|||
return result;
|
||||
}
|
||||
|
||||
if !woken.load(SeqCst) && self.state.lock().forbid_parking {
|
||||
panic!("deterministic executor parked after a call to forbid_parking");
|
||||
if !woken.load(SeqCst) {
|
||||
self.state.lock().will_park();
|
||||
}
|
||||
|
||||
woken.store(false, SeqCst);
|
||||
|
@ -206,6 +208,7 @@ impl Deterministic {
|
|||
}
|
||||
|
||||
let state = self.state.lock();
|
||||
|
||||
if state.scheduled_from_foreground.is_empty()
|
||||
&& state.scheduled_from_background.is_empty()
|
||||
&& state.spawned_from_foreground.is_empty()
|
||||
|
@ -244,11 +247,9 @@ impl Deterministic {
|
|||
if let Poll::Ready(result) = future.as_mut().poll(&mut cx) {
|
||||
return Some(result);
|
||||
}
|
||||
let state = self.state.lock();
|
||||
let mut state = self.state.lock();
|
||||
if state.scheduled_from_background.is_empty() {
|
||||
if state.forbid_parking {
|
||||
panic!("deterministic executor parked after a call to forbid_parking");
|
||||
}
|
||||
state.will_park();
|
||||
drop(state);
|
||||
self.parker.lock().park();
|
||||
}
|
||||
|
@ -261,6 +262,26 @@ impl Deterministic {
|
|||
}
|
||||
}
|
||||
|
||||
impl DeterministicState {
|
||||
fn will_park(&mut self) {
|
||||
if self.forbid_parking {
|
||||
let mut backtrace_message = String::new();
|
||||
if let Some(backtrace) = self.waiting_backtrace.as_mut() {
|
||||
backtrace.resolve();
|
||||
backtrace_message = format!(
|
||||
"\nbacktrace of waiting future:\n{:?}",
|
||||
CwdBacktrace::new(backtrace)
|
||||
);
|
||||
}
|
||||
|
||||
panic!(
|
||||
"deterministic executor parked after a call to forbid_parking{}",
|
||||
backtrace_message
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct Trace {
|
||||
executed: Vec<Backtrace>,
|
||||
|
@ -306,32 +327,53 @@ impl Trace {
|
|||
}
|
||||
}
|
||||
|
||||
impl Debug for Trace {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
struct FirstCwdFrameInBacktrace<'a>(&'a Backtrace);
|
||||
struct CwdBacktrace<'a> {
|
||||
backtrace: &'a Backtrace,
|
||||
first_frame_only: bool,
|
||||
}
|
||||
|
||||
impl<'a> Debug for FirstCwdFrameInBacktrace<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
let mut print_path = |fmt: &mut fmt::Formatter<'_>, path: BytesOrWideString<'_>| {
|
||||
fmt::Display::fmt(&path, fmt)
|
||||
};
|
||||
let mut fmt = BacktraceFmt::new(f, backtrace::PrintFmt::Full, &mut print_path);
|
||||
for frame in self.0.frames() {
|
||||
let mut formatted_frame = fmt.frame();
|
||||
if frame
|
||||
.symbols()
|
||||
.iter()
|
||||
.any(|s| s.filename().map_or(false, |f| f.starts_with(&cwd)))
|
||||
{
|
||||
formatted_frame.backtrace_frame(frame)?;
|
||||
break;
|
||||
}
|
||||
impl<'a> CwdBacktrace<'a> {
|
||||
fn new(backtrace: &'a Backtrace) -> Self {
|
||||
Self {
|
||||
backtrace,
|
||||
first_frame_only: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn first_frame(backtrace: &'a Backtrace) -> Self {
|
||||
Self {
|
||||
backtrace,
|
||||
first_frame_only: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Debug for CwdBacktrace<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
let mut print_path = |fmt: &mut fmt::Formatter<'_>, path: BytesOrWideString<'_>| {
|
||||
fmt::Display::fmt(&path, fmt)
|
||||
};
|
||||
let mut fmt = BacktraceFmt::new(f, backtrace::PrintFmt::Full, &mut print_path);
|
||||
for frame in self.backtrace.frames() {
|
||||
let mut formatted_frame = fmt.frame();
|
||||
if frame
|
||||
.symbols()
|
||||
.iter()
|
||||
.any(|s| s.filename().map_or(false, |f| f.starts_with(&cwd)))
|
||||
{
|
||||
formatted_frame.backtrace_frame(frame)?;
|
||||
if self.first_frame_only {
|
||||
break;
|
||||
}
|
||||
fmt.finish()
|
||||
}
|
||||
}
|
||||
fmt.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Trace {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
for ((backtrace, scheduled), spawned_from_foreground) in self
|
||||
.executed
|
||||
.iter()
|
||||
|
@ -340,7 +382,7 @@ impl Debug for Trace {
|
|||
{
|
||||
writeln!(f, "Scheduled")?;
|
||||
for backtrace in scheduled {
|
||||
writeln!(f, "- {:?}", FirstCwdFrameInBacktrace(backtrace))?;
|
||||
writeln!(f, "- {:?}", CwdBacktrace::first_frame(backtrace))?;
|
||||
}
|
||||
if scheduled.is_empty() {
|
||||
writeln!(f, "None")?;
|
||||
|
@ -349,14 +391,14 @@ impl Debug for Trace {
|
|||
|
||||
writeln!(f, "Spawned from foreground")?;
|
||||
for backtrace in spawned_from_foreground {
|
||||
writeln!(f, "- {:?}", FirstCwdFrameInBacktrace(backtrace))?;
|
||||
writeln!(f, "- {:?}", CwdBacktrace::first_frame(backtrace))?;
|
||||
}
|
||||
if spawned_from_foreground.is_empty() {
|
||||
writeln!(f, "None")?;
|
||||
}
|
||||
writeln!(f, "==========")?;
|
||||
|
||||
writeln!(f, "Run: {:?}", FirstCwdFrameInBacktrace(backtrace))?;
|
||||
writeln!(f, "Run: {:?}", CwdBacktrace::first_frame(backtrace))?;
|
||||
writeln!(f, "+++++++++++++++++++")?;
|
||||
}
|
||||
|
||||
|
@ -433,6 +475,31 @@ impl Foreground {
|
|||
*any_value.downcast().unwrap()
|
||||
}
|
||||
|
||||
pub fn parking_forbidden(&self) -> bool {
|
||||
match self {
|
||||
Self::Deterministic(executor) => executor.state.lock().forbid_parking,
|
||||
_ => panic!("this method can only be called on a deterministic executor"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start_waiting(&self) {
|
||||
match self {
|
||||
Self::Deterministic(executor) => {
|
||||
executor.state.lock().waiting_backtrace = Some(Backtrace::new_unresolved());
|
||||
}
|
||||
_ => panic!("this method can only be called on a deterministic executor"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finish_waiting(&self) {
|
||||
match self {
|
||||
Self::Deterministic(executor) => {
|
||||
executor.state.lock().waiting_backtrace.take();
|
||||
}
|
||||
_ => panic!("this method can only be called on a deterministic executor"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn forbid_parking(&self) {
|
||||
match self {
|
||||
Self::Deterministic(executor) => {
|
||||
|
@ -615,6 +682,17 @@ impl<T> Task<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: 'static, E: 'static + Display> Task<Result<T, E>> {
|
||||
pub fn detach_and_log_err(self, cx: &mut MutableAppContext) {
|
||||
cx.spawn(|_| async move {
|
||||
if let Err(err) = self.await {
|
||||
log::error!("{}", err);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Send> Task<T> {
|
||||
fn send(any_task: AnyTask) -> Self {
|
||||
Self::Send {
|
||||
|
|
|
@ -7,7 +7,13 @@ use std::{
|
|||
},
|
||||
};
|
||||
|
||||
use crate::{executor, platform, FontCache, MutableAppContext, Platform, TestAppContext};
|
||||
use futures::StreamExt;
|
||||
use smol::channel;
|
||||
|
||||
use crate::{
|
||||
executor, platform, Entity, FontCache, Handle, MutableAppContext, Platform, Subscription,
|
||||
TestAppContext,
|
||||
};
|
||||
|
||||
#[cfg(test)]
|
||||
#[ctor::ctor]
|
||||
|
@ -87,3 +93,47 @@ pub fn run_test(
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Observation<T> {
|
||||
rx: channel::Receiver<T>,
|
||||
_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl<T> futures::Stream for Observation<T> {
|
||||
type Item = T;
|
||||
|
||||
fn poll_next(
|
||||
mut self: std::pin::Pin<&mut Self>,
|
||||
cx: &mut std::task::Context<'_>,
|
||||
) -> std::task::Poll<Option<Self::Item>> {
|
||||
self.rx.poll_next_unpin(cx)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn observe<T: Entity>(entity: &impl Handle<T>, cx: &mut TestAppContext) -> Observation<()> {
|
||||
let (tx, rx) = smol::channel::unbounded();
|
||||
let _subscription = cx.update(|cx| {
|
||||
cx.observe(entity, move |_, _| {
|
||||
let _ = smol::block_on(tx.send(()));
|
||||
})
|
||||
});
|
||||
|
||||
Observation { rx, _subscription }
|
||||
}
|
||||
|
||||
pub fn subscribe<T: Entity>(
|
||||
entity: &impl Handle<T>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Observation<T::Event>
|
||||
where
|
||||
T::Event: Clone,
|
||||
{
|
||||
let (tx, rx) = smol::channel::unbounded();
|
||||
let _subscription = cx.update(|cx| {
|
||||
cx.subscribe(entity, move |_, event, _| {
|
||||
let _ = smol::block_on(tx.send(event.clone()));
|
||||
})
|
||||
});
|
||||
|
||||
Observation { rx, _subscription }
|
||||
}
|
||||
|
|
|
@ -65,9 +65,9 @@ pub struct Buffer {
|
|||
syntax_tree: Mutex<Option<SyntaxTree>>,
|
||||
parsing_in_background: bool,
|
||||
parse_count: usize,
|
||||
diagnostics: DiagnosticSet,
|
||||
remote_selections: TreeMap<ReplicaId, SelectionSet>,
|
||||
selections_update_count: usize,
|
||||
diagnostic_sets: Vec<DiagnosticSet>,
|
||||
diagnostics_update_count: usize,
|
||||
language_server: Option<LanguageServerState>,
|
||||
deferred_ops: OperationQueue<Operation>,
|
||||
|
@ -78,7 +78,7 @@ pub struct Buffer {
|
|||
pub struct BufferSnapshot {
|
||||
text: text::BufferSnapshot,
|
||||
tree: Option<Tree>,
|
||||
diagnostic_sets: Vec<DiagnosticSet>,
|
||||
diagnostics: DiagnosticSet,
|
||||
diagnostics_update_count: usize,
|
||||
remote_selections: TreeMap<ReplicaId, SelectionSet>,
|
||||
selections_update_count: usize,
|
||||
|
@ -129,7 +129,6 @@ struct LanguageServerSnapshot {
|
|||
pub enum Operation {
|
||||
Buffer(text::Operation),
|
||||
UpdateDiagnostics {
|
||||
provider_name: String,
|
||||
diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
|
||||
lamport_timestamp: clock::Lamport,
|
||||
},
|
||||
|
@ -323,17 +322,11 @@ impl Buffer {
|
|||
);
|
||||
}
|
||||
let snapshot = this.snapshot();
|
||||
for diagnostic_set in message.diagnostic_sets {
|
||||
let (provider_name, entries) = proto::deserialize_diagnostic_set(diagnostic_set);
|
||||
this.apply_diagnostic_update(
|
||||
DiagnosticSet::from_sorted_entries(
|
||||
provider_name,
|
||||
entries.into_iter().cloned(),
|
||||
&snapshot,
|
||||
),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
let entries = proto::deserialize_diagnostics(message.diagnostics);
|
||||
this.apply_diagnostic_update(
|
||||
DiagnosticSet::from_sorted_entries(entries.into_iter().cloned(), &snapshot),
|
||||
cx,
|
||||
);
|
||||
|
||||
let deferred_ops = message
|
||||
.deferred_operations
|
||||
|
@ -371,13 +364,7 @@ impl Buffer {
|
|||
lamport_timestamp: set.lamport_timestamp.value,
|
||||
})
|
||||
.collect(),
|
||||
diagnostic_sets: self
|
||||
.diagnostic_sets
|
||||
.iter()
|
||||
.map(|set| {
|
||||
proto::serialize_diagnostic_set(set.provider_name().to_string(), set.iter())
|
||||
})
|
||||
.collect(),
|
||||
diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
|
||||
deferred_operations: self
|
||||
.deferred_ops
|
||||
.iter()
|
||||
|
@ -423,7 +410,7 @@ impl Buffer {
|
|||
language: None,
|
||||
remote_selections: Default::default(),
|
||||
selections_update_count: 0,
|
||||
diagnostic_sets: Default::default(),
|
||||
diagnostics: Default::default(),
|
||||
diagnostics_update_count: 0,
|
||||
language_server: None,
|
||||
deferred_ops: OperationQueue::new(),
|
||||
|
@ -437,7 +424,7 @@ impl Buffer {
|
|||
text: self.text.snapshot(),
|
||||
tree: self.syntax_tree(),
|
||||
remote_selections: self.remote_selections.clone(),
|
||||
diagnostic_sets: self.diagnostic_sets.clone(),
|
||||
diagnostics: self.diagnostics.clone(),
|
||||
diagnostics_update_count: self.diagnostics_update_count,
|
||||
is_parsing: self.parsing_in_background,
|
||||
language: self.language.clone(),
|
||||
|
@ -793,7 +780,6 @@ impl Buffer {
|
|||
|
||||
pub fn update_diagnostics<T>(
|
||||
&mut self,
|
||||
provider_name: Arc<str>,
|
||||
version: Option<i32>,
|
||||
mut diagnostics: Vec<DiagnosticEntry<T>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
|
@ -883,10 +869,9 @@ impl Buffer {
|
|||
}
|
||||
drop(edits_since_save);
|
||||
|
||||
let set = DiagnosticSet::new(provider_name, sanitized_diagnostics, content);
|
||||
let set = DiagnosticSet::new(sanitized_diagnostics, content);
|
||||
self.apply_diagnostic_update(set.clone(), cx);
|
||||
Ok(Operation::UpdateDiagnostics {
|
||||
provider_name: set.provider_name().to_string(),
|
||||
diagnostics: set.iter().cloned().collect(),
|
||||
lamport_timestamp: self.text.lamport_clock.tick(),
|
||||
})
|
||||
|
@ -1395,17 +1380,12 @@ impl Buffer {
|
|||
unreachable!("buffer operations should never be applied at this layer")
|
||||
}
|
||||
Operation::UpdateDiagnostics {
|
||||
provider_name,
|
||||
diagnostics: diagnostic_set,
|
||||
..
|
||||
} => {
|
||||
let snapshot = self.snapshot();
|
||||
self.apply_diagnostic_update(
|
||||
DiagnosticSet::from_sorted_entries(
|
||||
provider_name,
|
||||
diagnostic_set.iter().cloned(),
|
||||
&snapshot,
|
||||
),
|
||||
DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
@ -1433,15 +1413,8 @@ impl Buffer {
|
|||
}
|
||||
}
|
||||
|
||||
fn apply_diagnostic_update(&mut self, set: DiagnosticSet, cx: &mut ModelContext<Self>) {
|
||||
match self
|
||||
.diagnostic_sets
|
||||
.binary_search_by_key(&set.provider_name(), |set| set.provider_name())
|
||||
{
|
||||
Ok(ix) => self.diagnostic_sets[ix] = set.clone(),
|
||||
Err(ix) => self.diagnostic_sets.insert(ix, set.clone()),
|
||||
}
|
||||
|
||||
fn apply_diagnostic_update(&mut self, diagnostics: DiagnosticSet, cx: &mut ModelContext<Self>) {
|
||||
self.diagnostics = diagnostics;
|
||||
self.diagnostics_update_count += 1;
|
||||
cx.notify();
|
||||
cx.emit(Event::DiagnosticsUpdated);
|
||||
|
@ -1712,7 +1685,7 @@ impl BufferSnapshot {
|
|||
let mut highlights = None;
|
||||
let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
|
||||
if let Some(theme) = theme {
|
||||
for (_, entry) in self.diagnostics_in_range::<_, usize>(range.clone()) {
|
||||
for entry in self.diagnostics_in_range::<_, usize>(range.clone()) {
|
||||
diagnostic_endpoints.push(DiagnosticEndpoint {
|
||||
offset: entry.range.start,
|
||||
is_start: true,
|
||||
|
@ -1853,38 +1826,28 @@ impl BufferSnapshot {
|
|||
pub fn diagnostics_in_range<'a, T, O>(
|
||||
&'a self,
|
||||
search_range: Range<T>,
|
||||
) -> impl 'a + Iterator<Item = (&'a str, DiagnosticEntry<O>)>
|
||||
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
|
||||
where
|
||||
T: 'a + Clone + ToOffset,
|
||||
O: 'a + FromAnchor,
|
||||
{
|
||||
self.diagnostic_sets.iter().flat_map(move |set| {
|
||||
set.range(search_range.clone(), self, true)
|
||||
.map(|e| (set.provider_name(), e))
|
||||
})
|
||||
self.diagnostics.range(search_range.clone(), self, true)
|
||||
}
|
||||
|
||||
pub fn diagnostic_groups(&self) -> Vec<DiagnosticGroup<Anchor>> {
|
||||
let mut groups = Vec::new();
|
||||
for set in &self.diagnostic_sets {
|
||||
set.groups(&mut groups, self);
|
||||
}
|
||||
self.diagnostics.groups(&mut groups, self);
|
||||
groups
|
||||
}
|
||||
|
||||
pub fn diagnostic_group<'a, O>(
|
||||
&'a self,
|
||||
provider_name: &str,
|
||||
group_id: usize,
|
||||
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
|
||||
where
|
||||
O: 'a + FromAnchor,
|
||||
{
|
||||
self.diagnostic_sets
|
||||
.iter()
|
||||
.find(|s| s.provider_name() == provider_name)
|
||||
.into_iter()
|
||||
.flat_map(move |s| s.group(group_id, self))
|
||||
self.diagnostics.group(group_id, self)
|
||||
}
|
||||
|
||||
pub fn diagnostics_update_count(&self) -> usize {
|
||||
|
@ -1906,8 +1869,8 @@ impl Clone for BufferSnapshot {
|
|||
text: self.text.clone(),
|
||||
tree: self.tree.clone(),
|
||||
remote_selections: self.remote_selections.clone(),
|
||||
diagnostics: self.diagnostics.clone(),
|
||||
selections_update_count: self.selections_update_count,
|
||||
diagnostic_sets: self.diagnostic_sets.clone(),
|
||||
diagnostics_update_count: self.diagnostics_update_count,
|
||||
is_parsing: self.is_parsing,
|
||||
language: self.language.clone(),
|
||||
|
|
|
@ -4,14 +4,12 @@ use std::{
|
|||
cmp::{Ordering, Reverse},
|
||||
iter,
|
||||
ops::Range,
|
||||
sync::Arc,
|
||||
};
|
||||
use sum_tree::{self, Bias, SumTree};
|
||||
use text::{Anchor, FromAnchor, Point, ToOffset};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DiagnosticSet {
|
||||
provider_name: Arc<str>,
|
||||
diagnostics: SumTree<DiagnosticEntry<Anchor>>,
|
||||
}
|
||||
|
||||
|
@ -21,6 +19,7 @@ pub struct DiagnosticEntry<T> {
|
|||
pub diagnostic: Diagnostic,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DiagnosticGroup<T> {
|
||||
pub entries: Vec<DiagnosticEntry<T>>,
|
||||
pub primary_ix: usize,
|
||||
|
@ -36,32 +35,22 @@ pub struct Summary {
|
|||
}
|
||||
|
||||
impl DiagnosticSet {
|
||||
pub fn provider_name(&self) -> &str {
|
||||
&self.provider_name
|
||||
}
|
||||
|
||||
pub fn from_sorted_entries<I>(
|
||||
provider_name: impl Into<Arc<str>>,
|
||||
iter: I,
|
||||
buffer: &text::BufferSnapshot,
|
||||
) -> Self
|
||||
pub fn from_sorted_entries<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
|
||||
where
|
||||
I: IntoIterator<Item = DiagnosticEntry<Anchor>>,
|
||||
{
|
||||
Self {
|
||||
provider_name: provider_name.into(),
|
||||
diagnostics: SumTree::from_iter(iter, buffer),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new<I>(provider_name: Arc<str>, iter: I, buffer: &text::BufferSnapshot) -> Self
|
||||
pub fn new<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
|
||||
where
|
||||
I: IntoIterator<Item = DiagnosticEntry<Point>>,
|
||||
{
|
||||
let mut entries = iter.into_iter().collect::<Vec<_>>();
|
||||
entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end)));
|
||||
Self {
|
||||
provider_name,
|
||||
diagnostics: SumTree::from_iter(
|
||||
entries.into_iter().map(|entry| DiagnosticEntry {
|
||||
range: buffer.anchor_before(entry.range.start)
|
||||
|
@ -159,7 +148,6 @@ impl DiagnosticSet {
|
|||
impl Default for DiagnosticSet {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
provider_name: "".into(),
|
||||
diagnostics: Default::default(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,10 +6,9 @@ pub mod proto;
|
|||
mod tests;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
pub use buffer::Operation;
|
||||
pub use buffer::*;
|
||||
use collections::{HashMap, HashSet};
|
||||
use collections::HashSet;
|
||||
pub use diagnostic_set::DiagnosticEntry;
|
||||
use gpui::AppContext;
|
||||
use highlight_map::HighlightMap;
|
||||
|
@ -47,6 +46,7 @@ pub struct LanguageConfig {
|
|||
pub struct LanguageServerConfig {
|
||||
pub binary: String,
|
||||
pub disk_based_diagnostic_sources: HashSet<String>,
|
||||
pub disk_based_diagnostics_progress_token: Option<String>,
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
#[serde(skip)]
|
||||
pub fake_server: Option<(Arc<lsp::LanguageServer>, Arc<std::sync::atomic::AtomicBool>)>,
|
||||
|
@ -60,18 +60,9 @@ pub struct BracketPair {
|
|||
pub newline: bool,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait DiagnosticProvider: 'static + Send + Sync {
|
||||
async fn diagnose(
|
||||
&self,
|
||||
path: Arc<Path>,
|
||||
) -> Result<HashMap<Arc<Path>, Vec<DiagnosticEntry<usize>>>>;
|
||||
}
|
||||
|
||||
pub struct Language {
|
||||
pub(crate) config: LanguageConfig,
|
||||
pub(crate) grammar: Option<Arc<Grammar>>,
|
||||
pub(crate) diagnostic_provider: Option<Arc<dyn DiagnosticProvider>>,
|
||||
}
|
||||
|
||||
pub struct Grammar {
|
||||
|
@ -136,7 +127,6 @@ impl Language {
|
|||
highlight_map: Default::default(),
|
||||
})
|
||||
}),
|
||||
diagnostic_provider: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -170,11 +160,6 @@ impl Language {
|
|||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn with_diagnostic_provider(mut self, source: impl DiagnosticProvider) -> Self {
|
||||
self.diagnostic_provider = Some(Arc::new(source));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn name(&self) -> &str {
|
||||
self.config.name.as_str()
|
||||
}
|
||||
|
@ -208,10 +193,6 @@ impl Language {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn diagnostic_provider(&self) -> Option<&Arc<dyn DiagnosticProvider>> {
|
||||
self.diagnostic_provider.as_ref()
|
||||
}
|
||||
|
||||
pub fn disk_based_diagnostic_sources(&self) -> Option<&HashSet<String>> {
|
||||
self.config
|
||||
.language_server
|
||||
|
@ -219,6 +200,13 @@ impl Language {
|
|||
.map(|config| &config.disk_based_diagnostic_sources)
|
||||
}
|
||||
|
||||
pub fn disk_based_diagnostics_progress_token(&self) -> Option<&String> {
|
||||
self.config
|
||||
.language_server
|
||||
.as_ref()
|
||||
.and_then(|config| config.disk_based_diagnostics_progress_token.as_ref())
|
||||
}
|
||||
|
||||
pub fn brackets(&self) -> &[BracketPair] {
|
||||
&self.config.brackets
|
||||
}
|
||||
|
@ -249,6 +237,7 @@ impl LanguageServerConfig {
|
|||
(
|
||||
Self {
|
||||
fake_server: Some((server, started)),
|
||||
disk_based_diagnostics_progress_token: Some("fakeServer/check".to_string()),
|
||||
..Default::default()
|
||||
},
|
||||
fake,
|
||||
|
|
|
@ -51,16 +51,12 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
|
|||
selections: serialize_selections(selections),
|
||||
}),
|
||||
Operation::UpdateDiagnostics {
|
||||
provider_name,
|
||||
diagnostics,
|
||||
lamport_timestamp,
|
||||
} => proto::operation::Variant::UpdateDiagnosticSet(proto::UpdateDiagnosticSet {
|
||||
} => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics {
|
||||
replica_id: lamport_timestamp.replica_id as u32,
|
||||
lamport_timestamp: lamport_timestamp.value,
|
||||
diagnostic_set: Some(serialize_diagnostic_set(
|
||||
provider_name.clone(),
|
||||
diagnostics.iter(),
|
||||
)),
|
||||
diagnostics: serialize_diagnostics(diagnostics.iter()),
|
||||
}),
|
||||
}),
|
||||
}
|
||||
|
@ -134,33 +130,29 @@ pub fn serialize_selections(selections: &Arc<[Selection<Anchor>]>) -> Vec<proto:
|
|||
.collect()
|
||||
}
|
||||
|
||||
pub fn serialize_diagnostic_set<'a>(
|
||||
provider_name: String,
|
||||
pub fn serialize_diagnostics<'a>(
|
||||
diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<Anchor>>,
|
||||
) -> proto::DiagnosticSet {
|
||||
proto::DiagnosticSet {
|
||||
provider_name,
|
||||
diagnostics: diagnostics
|
||||
.into_iter()
|
||||
.map(|entry| proto::Diagnostic {
|
||||
start: Some(serialize_anchor(&entry.range.start)),
|
||||
end: Some(serialize_anchor(&entry.range.end)),
|
||||
message: entry.diagnostic.message.clone(),
|
||||
severity: match entry.diagnostic.severity {
|
||||
DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
|
||||
DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
|
||||
DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
|
||||
DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
|
||||
_ => proto::diagnostic::Severity::None,
|
||||
} as i32,
|
||||
group_id: entry.diagnostic.group_id as u64,
|
||||
is_primary: entry.diagnostic.is_primary,
|
||||
is_valid: entry.diagnostic.is_valid,
|
||||
code: entry.diagnostic.code.clone(),
|
||||
is_disk_based: entry.diagnostic.is_disk_based,
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
) -> Vec<proto::Diagnostic> {
|
||||
diagnostics
|
||||
.into_iter()
|
||||
.map(|entry| proto::Diagnostic {
|
||||
start: Some(serialize_anchor(&entry.range.start)),
|
||||
end: Some(serialize_anchor(&entry.range.end)),
|
||||
message: entry.diagnostic.message.clone(),
|
||||
severity: match entry.diagnostic.severity {
|
||||
DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
|
||||
DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
|
||||
DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
|
||||
DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
|
||||
_ => proto::diagnostic::Severity::None,
|
||||
} as i32,
|
||||
group_id: entry.diagnostic.group_id as u64,
|
||||
is_primary: entry.diagnostic.is_primary,
|
||||
is_valid: entry.diagnostic.is_valid,
|
||||
code: entry.diagnostic.code.clone(),
|
||||
is_disk_based: entry.diagnostic.is_disk_based,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
|
||||
|
@ -239,21 +231,13 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
|
|||
selections: Arc::from(selections),
|
||||
}
|
||||
}
|
||||
proto::operation::Variant::UpdateDiagnosticSet(message) => {
|
||||
let (provider_name, diagnostics) = deserialize_diagnostic_set(
|
||||
message
|
||||
.diagnostic_set
|
||||
.ok_or_else(|| anyhow!("missing diagnostic set"))?,
|
||||
);
|
||||
Operation::UpdateDiagnostics {
|
||||
provider_name,
|
||||
diagnostics,
|
||||
lamport_timestamp: clock::Lamport {
|
||||
replica_id: message.replica_id as ReplicaId,
|
||||
value: message.lamport_timestamp,
|
||||
},
|
||||
}
|
||||
}
|
||||
proto::operation::Variant::UpdateDiagnostics(message) => Operation::UpdateDiagnostics {
|
||||
diagnostics: deserialize_diagnostics(message.diagnostics),
|
||||
lamport_timestamp: clock::Lamport {
|
||||
replica_id: message.replica_id as ReplicaId,
|
||||
value: message.lamport_timestamp,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
}
|
||||
|
@ -340,40 +324,32 @@ pub fn deserialize_selections(selections: Vec<proto::Selection>) -> Arc<[Selecti
|
|||
)
|
||||
}
|
||||
|
||||
pub fn deserialize_diagnostic_set(
|
||||
message: proto::DiagnosticSet,
|
||||
) -> (String, Arc<[DiagnosticEntry<Anchor>]>) {
|
||||
(
|
||||
message.provider_name,
|
||||
message
|
||||
.diagnostics
|
||||
.into_iter()
|
||||
.filter_map(|diagnostic| {
|
||||
Some(DiagnosticEntry {
|
||||
range: deserialize_anchor(diagnostic.start?)?
|
||||
..deserialize_anchor(diagnostic.end?)?,
|
||||
diagnostic: Diagnostic {
|
||||
severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)?
|
||||
{
|
||||
proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR,
|
||||
proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING,
|
||||
proto::diagnostic::Severity::Information => {
|
||||
DiagnosticSeverity::INFORMATION
|
||||
}
|
||||
proto::diagnostic::Severity::Hint => DiagnosticSeverity::HINT,
|
||||
proto::diagnostic::Severity::None => return None,
|
||||
},
|
||||
message: diagnostic.message,
|
||||
group_id: diagnostic.group_id as usize,
|
||||
code: diagnostic.code,
|
||||
is_valid: diagnostic.is_valid,
|
||||
is_primary: diagnostic.is_primary,
|
||||
is_disk_based: diagnostic.is_disk_based,
|
||||
pub fn deserialize_diagnostics(
|
||||
diagnostics: Vec<proto::Diagnostic>,
|
||||
) -> Arc<[DiagnosticEntry<Anchor>]> {
|
||||
diagnostics
|
||||
.into_iter()
|
||||
.filter_map(|diagnostic| {
|
||||
Some(DiagnosticEntry {
|
||||
range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?,
|
||||
diagnostic: Diagnostic {
|
||||
severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? {
|
||||
proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR,
|
||||
proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING,
|
||||
proto::diagnostic::Severity::Information => DiagnosticSeverity::INFORMATION,
|
||||
proto::diagnostic::Severity::Hint => DiagnosticSeverity::HINT,
|
||||
proto::diagnostic::Severity::None => return None,
|
||||
},
|
||||
})
|
||||
message: diagnostic.message,
|
||||
group_id: diagnostic.group_id as usize,
|
||||
code: diagnostic.code,
|
||||
is_valid: diagnostic.is_valid,
|
||||
is_primary: diagnostic.is_primary,
|
||||
is_disk_based: diagnostic.is_disk_based,
|
||||
},
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
|
||||
|
|
|
@ -460,7 +460,6 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
|||
// Receive diagnostics for an earlier version of the buffer.
|
||||
buffer
|
||||
.update_diagnostics(
|
||||
"lsp".into(),
|
||||
Some(open_notification.text_document.version),
|
||||
vec![
|
||||
DiagnosticEntry {
|
||||
|
@ -508,34 +507,28 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
|||
.diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
(
|
||||
"lsp",
|
||||
DiagnosticEntry {
|
||||
range: Point::new(3, 9)..Point::new(3, 11),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
message: "undefined variable 'BB'".to_string(),
|
||||
is_disk_based: true,
|
||||
group_id: 1,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: Point::new(3, 9)..Point::new(3, 11),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
message: "undefined variable 'BB'".to_string(),
|
||||
is_disk_based: true,
|
||||
group_id: 1,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: Point::new(4, 9)..Point::new(4, 12),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
message: "undefined variable 'CCC'".to_string(),
|
||||
is_disk_based: true,
|
||||
group_id: 2,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
}
|
||||
),
|
||||
(
|
||||
"lsp",
|
||||
DiagnosticEntry {
|
||||
range: Point::new(4, 9)..Point::new(4, 12),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
message: "undefined variable 'CCC'".to_string(),
|
||||
is_disk_based: true,
|
||||
group_id: 2,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
|
@ -562,7 +555,6 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
|||
// Ensure overlapping diagnostics are highlighted correctly.
|
||||
buffer
|
||||
.update_diagnostics(
|
||||
"lsp".into(),
|
||||
Some(open_notification.text_document.version),
|
||||
vec![
|
||||
DiagnosticEntry {
|
||||
|
@ -596,33 +588,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
|||
.diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
(
|
||||
"lsp",
|
||||
DiagnosticEntry {
|
||||
range: Point::new(2, 9)..Point::new(2, 12),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::WARNING,
|
||||
message: "unreachable statement".to_string(),
|
||||
group_id: 1,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
}
|
||||
DiagnosticEntry {
|
||||
range: Point::new(2, 9)..Point::new(2, 12),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::WARNING,
|
||||
message: "unreachable statement".to_string(),
|
||||
group_id: 1,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
}
|
||||
),
|
||||
(
|
||||
"lsp",
|
||||
DiagnosticEntry {
|
||||
range: Point::new(2, 9)..Point::new(2, 10),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
message: "undefined variable 'A'".to_string(),
|
||||
is_disk_based: true,
|
||||
group_id: 0,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
)
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: Point::new(2, 9)..Point::new(2, 10),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
message: "undefined variable 'A'".to_string(),
|
||||
is_disk_based: true,
|
||||
group_id: 0,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
|
@ -659,7 +645,6 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
|||
buffer.update(&mut cx, |buffer, cx| {
|
||||
buffer
|
||||
.update_diagnostics(
|
||||
"lsp".into(),
|
||||
Some(change_notification_2.text_document.version),
|
||||
vec![
|
||||
DiagnosticEntry {
|
||||
|
@ -694,34 +679,28 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
|||
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
(
|
||||
"lsp",
|
||||
DiagnosticEntry {
|
||||
range: Point::new(2, 21)..Point::new(2, 22),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
message: "undefined variable 'A'".to_string(),
|
||||
is_disk_based: true,
|
||||
group_id: 0,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
}
|
||||
DiagnosticEntry {
|
||||
range: Point::new(2, 21)..Point::new(2, 22),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
message: "undefined variable 'A'".to_string(),
|
||||
is_disk_based: true,
|
||||
group_id: 0,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
}
|
||||
),
|
||||
(
|
||||
"lsp",
|
||||
DiagnosticEntry {
|
||||
range: Point::new(3, 9)..Point::new(3, 11),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
message: "undefined variable 'BB'".to_string(),
|
||||
is_disk_based: true,
|
||||
group_id: 1,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
)
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: Point::new(3, 9)..Point::new(3, 11),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
message: "undefined variable 'BB'".to_string(),
|
||||
is_disk_based: true,
|
||||
group_id: 1,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
]
|
||||
);
|
||||
});
|
||||
|
@ -740,7 +719,6 @@ async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
|
|||
buffer.set_language(Some(Arc::new(rust_lang())), None, cx);
|
||||
buffer
|
||||
.update_diagnostics(
|
||||
"lsp".into(),
|
||||
None,
|
||||
vec![
|
||||
DiagnosticEntry {
|
||||
|
|
|
@ -28,7 +28,7 @@ pub use lsp_types::*;
|
|||
const JSON_RPC_VERSION: &'static str = "2.0";
|
||||
const CONTENT_LEN_HEADER: &'static str = "Content-Length: ";
|
||||
|
||||
type NotificationHandler = Box<dyn Send + Sync + Fn(&str)>;
|
||||
type NotificationHandler = Box<dyn Send + Sync + FnMut(&str)>;
|
||||
type ResponseHandler = Box<dyn Send + FnOnce(Result<&str, Error>)>;
|
||||
|
||||
pub struct LanguageServer {
|
||||
|
@ -139,7 +139,7 @@ impl LanguageServer {
|
|||
if let Ok(AnyNotification { method, params }) =
|
||||
serde_json::from_slice(&buffer)
|
||||
{
|
||||
if let Some(handler) = notification_handlers.read().get(method) {
|
||||
if let Some(handler) = notification_handlers.write().get_mut(method) {
|
||||
handler(params.get());
|
||||
} else {
|
||||
log::info!(
|
||||
|
@ -226,15 +226,15 @@ impl LanguageServer {
|
|||
process_id: Default::default(),
|
||||
root_path: Default::default(),
|
||||
root_uri: Some(root_uri),
|
||||
initialization_options: Some(json!({
|
||||
"checkOnSave": {
|
||||
"enable": false
|
||||
},
|
||||
})),
|
||||
initialization_options: Default::default(),
|
||||
capabilities: lsp_types::ClientCapabilities {
|
||||
experimental: Some(json!({
|
||||
"serverStatusNotification": true,
|
||||
})),
|
||||
window: Some(lsp_types::WindowClientCapabilities {
|
||||
work_done_progress: Some(true),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
},
|
||||
trace: Default::default(),
|
||||
|
@ -283,10 +283,10 @@ impl LanguageServer {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn on_notification<T, F>(&self, f: F) -> Subscription
|
||||
pub fn on_notification<T, F>(&self, mut f: F) -> Subscription
|
||||
where
|
||||
T: lsp_types::notification::Notification,
|
||||
F: 'static + Send + Sync + Fn(T::Params),
|
||||
F: 'static + Send + Sync + FnMut(T::Params),
|
||||
{
|
||||
let prev_handler = self.notification_handlers.write().insert(
|
||||
T::METHOD,
|
||||
|
@ -514,6 +514,22 @@ impl FakeLanguageServer {
|
|||
notification.params
|
||||
}
|
||||
|
||||
pub async fn start_progress(&mut self, token: impl Into<String>) {
|
||||
self.notify::<notification::Progress>(ProgressParams {
|
||||
token: NumberOrString::String(token.into()),
|
||||
value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(Default::default())),
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
pub async fn end_progress(&mut self, token: impl Into<String>) {
|
||||
self.notify::<notification::Progress>(ProgressParams {
|
||||
token: NumberOrString::String(token.into()),
|
||||
value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(Default::default())),
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn send(&mut self, message: Vec<u8>) {
|
||||
self.stdout
|
||||
.write_all(CONTENT_LEN_HEADER.as_bytes())
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
pub mod fs;
|
||||
mod ignore;
|
||||
mod worktree;
|
||||
pub mod worktree;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
|
||||
|
@ -18,7 +18,7 @@ use std::{
|
|||
path::Path,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
use util::{ResultExt, TryFutureExt as _};
|
||||
use util::TryFutureExt as _;
|
||||
|
||||
pub use fs::*;
|
||||
pub use worktree::*;
|
||||
|
@ -33,6 +33,7 @@ pub struct Project {
|
|||
client_state: ProjectClientState,
|
||||
collaborators: HashMap<PeerId, Collaborator>,
|
||||
subscriptions: Vec<client::Subscription>,
|
||||
pending_disk_based_diagnostics: isize,
|
||||
}
|
||||
|
||||
enum ProjectClientState {
|
||||
|
@ -60,6 +61,9 @@ pub struct Collaborator {
|
|||
pub enum Event {
|
||||
ActiveEntryChanged(Option<ProjectEntry>),
|
||||
WorktreeRemoved(WorktreeId),
|
||||
DiskBasedDiagnosticsStarted,
|
||||
DiskBasedDiagnosticsUpdated { worktree_id: WorktreeId },
|
||||
DiskBasedDiagnosticsFinished,
|
||||
DiagnosticsUpdated(ProjectPath),
|
||||
}
|
||||
|
||||
|
@ -69,7 +73,7 @@ pub struct ProjectPath {
|
|||
pub path: Arc<Path>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Debug, Default, PartialEq)]
|
||||
pub struct DiagnosticSummary {
|
||||
pub error_count: usize,
|
||||
pub warning_count: usize,
|
||||
|
@ -100,6 +104,16 @@ impl DiagnosticSummary {
|
|||
|
||||
this
|
||||
}
|
||||
|
||||
pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
|
||||
proto::DiagnosticSummary {
|
||||
path: path.to_string_lossy().to_string(),
|
||||
error_count: self.error_count as u32,
|
||||
warning_count: self.warning_count as u32,
|
||||
info_count: self.info_count as u32,
|
||||
hint_count: self.hint_count as u32,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
|
@ -176,6 +190,7 @@ impl Project {
|
|||
client,
|
||||
user_store,
|
||||
fs,
|
||||
pending_disk_based_diagnostics: 0,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -228,29 +243,51 @@ impl Project {
|
|||
collaborators.insert(collaborator.peer_id, collaborator);
|
||||
}
|
||||
|
||||
Ok(cx.add_model(|cx| Self {
|
||||
worktrees,
|
||||
active_entry: None,
|
||||
collaborators,
|
||||
languages,
|
||||
user_store,
|
||||
fs,
|
||||
subscriptions: vec![
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_unshare_project),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_share_worktree),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_unregister_worktree),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved),
|
||||
],
|
||||
client,
|
||||
client_state: ProjectClientState::Remote {
|
||||
sharing_has_stopped: false,
|
||||
remote_id,
|
||||
replica_id,
|
||||
},
|
||||
Ok(cx.add_model(|cx| {
|
||||
let mut this = Self {
|
||||
worktrees: Vec::new(),
|
||||
active_entry: None,
|
||||
collaborators,
|
||||
languages,
|
||||
user_store,
|
||||
fs,
|
||||
subscriptions: vec![
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_unshare_project),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_share_worktree),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_unregister_worktree),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree),
|
||||
client.subscribe_to_entity(
|
||||
remote_id,
|
||||
cx,
|
||||
Self::handle_update_diagnostic_summary,
|
||||
),
|
||||
client.subscribe_to_entity(
|
||||
remote_id,
|
||||
cx,
|
||||
Self::handle_disk_based_diagnostics_updating,
|
||||
),
|
||||
client.subscribe_to_entity(
|
||||
remote_id,
|
||||
cx,
|
||||
Self::handle_disk_based_diagnostics_updated,
|
||||
),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved),
|
||||
],
|
||||
client,
|
||||
client_state: ProjectClientState::Remote {
|
||||
sharing_has_stopped: false,
|
||||
remote_id,
|
||||
replica_id,
|
||||
},
|
||||
pending_disk_based_diagnostics: 0,
|
||||
};
|
||||
for worktree in worktrees {
|
||||
this.add_worktree(worktree, cx);
|
||||
}
|
||||
this
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -479,13 +516,30 @@ impl Project {
|
|||
|
||||
fn add_worktree(&mut self, worktree: ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
|
||||
cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
|
||||
cx.subscribe(&worktree, |_, worktree, event, cx| match event {
|
||||
cx.subscribe(&worktree, move |this, worktree, event, cx| match event {
|
||||
worktree::Event::DiagnosticsUpdated(path) => {
|
||||
cx.emit(Event::DiagnosticsUpdated(ProjectPath {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: path.clone(),
|
||||
}));
|
||||
}
|
||||
worktree::Event::DiskBasedDiagnosticsUpdating => {
|
||||
if this.pending_disk_based_diagnostics == 0 {
|
||||
cx.emit(Event::DiskBasedDiagnosticsStarted);
|
||||
}
|
||||
this.pending_disk_based_diagnostics += 1;
|
||||
}
|
||||
worktree::Event::DiskBasedDiagnosticsUpdated => {
|
||||
this.pending_disk_based_diagnostics -= 1;
|
||||
cx.emit(Event::DiskBasedDiagnosticsUpdated {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
});
|
||||
if this.pending_disk_based_diagnostics == 0 {
|
||||
if this.pending_disk_based_diagnostics == 0 {
|
||||
cx.emit(Event::DiskBasedDiagnosticsFinished);
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
self.worktrees.push(worktree);
|
||||
|
@ -507,34 +561,19 @@ impl Project {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn diagnose(&self, cx: &mut ModelContext<Self>) {
|
||||
for worktree_handle in &self.worktrees {
|
||||
if let Some(worktree) = worktree_handle.read(cx).as_local() {
|
||||
for language in worktree.languages() {
|
||||
if let Some(provider) = language.diagnostic_provider().cloned() {
|
||||
let worktree_path = worktree.abs_path().clone();
|
||||
let worktree_handle = worktree_handle.downgrade();
|
||||
cx.spawn_weak(|_, mut cx| async move {
|
||||
let diagnostics = provider.diagnose(worktree_path).await.log_err()?;
|
||||
let worktree_handle = worktree_handle.upgrade(&cx)?;
|
||||
worktree_handle.update(&mut cx, |worktree, cx| {
|
||||
for (path, diagnostics) in diagnostics {
|
||||
worktree
|
||||
.update_diagnostics_from_provider(
|
||||
path.into(),
|
||||
diagnostics,
|
||||
cx,
|
||||
)
|
||||
.log_err()?;
|
||||
}
|
||||
Some(())
|
||||
})
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn is_running_disk_based_diagnostics(&self) -> bool {
|
||||
self.pending_disk_based_diagnostics > 0
|
||||
}
|
||||
|
||||
pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
|
||||
let mut summary = DiagnosticSummary::default();
|
||||
for (_, path_summary) in self.diagnostic_summaries(cx) {
|
||||
summary.error_count += path_summary.error_count;
|
||||
summary.warning_count += path_summary.warning_count;
|
||||
summary.info_count += path_summary.info_count;
|
||||
summary.hint_count += path_summary.hint_count;
|
||||
}
|
||||
summary
|
||||
}
|
||||
|
||||
pub fn diagnostic_summaries<'a>(
|
||||
|
@ -685,6 +724,60 @@ impl Project {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_update_diagnostic_summary(
|
||||
&mut self,
|
||||
envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
|
||||
_: Arc<Client>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<()> {
|
||||
let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
|
||||
if let Some(worktree) = self.worktree_for_id(worktree_id, cx) {
|
||||
worktree.update(cx, |worktree, cx| {
|
||||
worktree
|
||||
.as_remote_mut()
|
||||
.unwrap()
|
||||
.update_diagnostic_summary(envelope, cx);
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_disk_based_diagnostics_updating(
|
||||
&mut self,
|
||||
envelope: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
|
||||
_: Arc<Client>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<()> {
|
||||
let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
|
||||
if let Some(worktree) = self.worktree_for_id(worktree_id, cx) {
|
||||
worktree.update(cx, |worktree, cx| {
|
||||
worktree
|
||||
.as_remote()
|
||||
.unwrap()
|
||||
.disk_based_diagnostics_updating(cx);
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_disk_based_diagnostics_updated(
|
||||
&mut self,
|
||||
envelope: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
|
||||
_: Arc<Client>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<()> {
|
||||
let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
|
||||
if let Some(worktree) = self.worktree_for_id(worktree_id, cx) {
|
||||
worktree.update(cx, |worktree, cx| {
|
||||
worktree
|
||||
.as_remote()
|
||||
.unwrap()
|
||||
.disk_based_diagnostics_updated(cx);
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn handle_update_buffer(
|
||||
&mut self,
|
||||
envelope: TypedEnvelope<proto::UpdateBuffer>,
|
||||
|
|
|
@ -7,8 +7,7 @@ use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
|
|||
use anyhow::{anyhow, Context, Result};
|
||||
use client::{proto, Client, PeerId, TypedEnvelope, UserStore};
|
||||
use clock::ReplicaId;
|
||||
use collections::{hash_map, HashMap};
|
||||
use collections::{BTreeMap, HashSet};
|
||||
use collections::{hash_map, HashMap, HashSet};
|
||||
use futures::{Stream, StreamExt};
|
||||
use fuzzy::CharBag;
|
||||
use gpui::{
|
||||
|
@ -35,7 +34,6 @@ use std::{
|
|||
ffi::{OsStr, OsString},
|
||||
fmt,
|
||||
future::Future,
|
||||
mem,
|
||||
ops::{Deref, Range},
|
||||
path::{Path, PathBuf},
|
||||
sync::{
|
||||
|
@ -44,14 +42,12 @@ use std::{
|
|||
},
|
||||
time::{Duration, SystemTime},
|
||||
};
|
||||
use sum_tree::Bias;
|
||||
use sum_tree::{Bias, TreeMap};
|
||||
use sum_tree::{Edit, SeekTarget, SumTree};
|
||||
use util::{post_inc, ResultExt, TryFutureExt};
|
||||
|
||||
lazy_static! {
|
||||
static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore");
|
||||
static ref DIAGNOSTIC_PROVIDER_NAME: Arc<str> = Arc::from("diagnostic_source");
|
||||
static ref LSP_PROVIDER_NAME: Arc<str> = Arc::from("lsp");
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
|
@ -69,8 +65,10 @@ pub enum Worktree {
|
|||
Remote(RemoteWorktree),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum Event {
|
||||
DiskBasedDiagnosticsUpdating,
|
||||
DiskBasedDiagnosticsUpdated,
|
||||
DiagnosticsUpdated(Arc<Path>),
|
||||
}
|
||||
|
||||
|
@ -143,7 +141,7 @@ impl Worktree {
|
|||
.map(|c| c.to_ascii_lowercase())
|
||||
.collect();
|
||||
let root_name = worktree.root_name.clone();
|
||||
let (entries_by_path, entries_by_id) = cx
|
||||
let (entries_by_path, entries_by_id, diagnostic_summaries) = cx
|
||||
.background()
|
||||
.spawn(async move {
|
||||
let mut entries_by_path_edits = Vec::new();
|
||||
|
@ -167,7 +165,22 @@ impl Worktree {
|
|||
let mut entries_by_id = SumTree::new();
|
||||
entries_by_path.edit(entries_by_path_edits, &());
|
||||
entries_by_id.edit(entries_by_id_edits, &());
|
||||
(entries_by_path, entries_by_id)
|
||||
|
||||
let diagnostic_summaries = TreeMap::from_ordered_entries(
|
||||
worktree.diagnostic_summaries.into_iter().map(|summary| {
|
||||
(
|
||||
PathKey(PathBuf::from(summary.path).into()),
|
||||
DiagnosticSummary {
|
||||
error_count: summary.error_count as usize,
|
||||
warning_count: summary.warning_count as usize,
|
||||
info_count: summary.info_count as usize,
|
||||
hint_count: summary.hint_count as usize,
|
||||
},
|
||||
)
|
||||
}),
|
||||
);
|
||||
|
||||
(entries_by_path, entries_by_id, diagnostic_summaries)
|
||||
})
|
||||
.await;
|
||||
|
||||
|
@ -224,10 +237,10 @@ impl Worktree {
|
|||
client: client.clone(),
|
||||
loading_buffers: Default::default(),
|
||||
open_buffers: Default::default(),
|
||||
diagnostic_summaries: Default::default(),
|
||||
queued_operations: Default::default(),
|
||||
languages,
|
||||
user_store,
|
||||
diagnostic_summaries,
|
||||
})
|
||||
})
|
||||
});
|
||||
|
@ -352,7 +365,7 @@ impl Worktree {
|
|||
Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
|
||||
}
|
||||
.iter()
|
||||
.map(|(path, summary)| (path.clone(), summary.clone()))
|
||||
.map(|(path, summary)| (path.0.clone(), summary.clone()))
|
||||
}
|
||||
|
||||
pub fn loading_buffers<'a>(&'a mut self) -> &'a mut LoadingBuffers {
|
||||
|
@ -676,9 +689,9 @@ impl Worktree {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn update_diagnostics_from_lsp(
|
||||
pub fn update_diagnostics(
|
||||
&mut self,
|
||||
mut params: lsp::PublishDiagnosticsParams,
|
||||
params: lsp::PublishDiagnosticsParams,
|
||||
disk_based_sources: &HashSet<String>,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Result<()> {
|
||||
|
@ -693,59 +706,104 @@ impl Worktree {
|
|||
.context("path is not within worktree")?,
|
||||
);
|
||||
|
||||
let mut group_ids_by_diagnostic_range = HashMap::default();
|
||||
let mut diagnostics_by_group_id = HashMap::default();
|
||||
let mut next_group_id = 0;
|
||||
for diagnostic in &mut params.diagnostics {
|
||||
let mut diagnostics = Vec::default();
|
||||
let mut primary_diagnostic_group_ids = HashMap::default();
|
||||
let mut sources_by_group_id = HashMap::default();
|
||||
let mut supporting_diagnostic_severities = HashMap::default();
|
||||
for diagnostic in ¶ms.diagnostics {
|
||||
let source = diagnostic.source.as_ref();
|
||||
let code = diagnostic.code.as_ref();
|
||||
let group_id = diagnostic_ranges(&diagnostic, &abs_path)
|
||||
.find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
|
||||
.copied()
|
||||
.unwrap_or_else(|| {
|
||||
let group_id = post_inc(&mut next_group_id);
|
||||
for range in diagnostic_ranges(&diagnostic, &abs_path) {
|
||||
group_ids_by_diagnostic_range.insert((source, code, range), group_id);
|
||||
}
|
||||
group_id
|
||||
let code = diagnostic.code.as_ref().map(|code| match code {
|
||||
lsp::NumberOrString::Number(code) => code.to_string(),
|
||||
lsp::NumberOrString::String(code) => code.clone(),
|
||||
});
|
||||
let range = range_from_lsp(diagnostic.range);
|
||||
let is_supporting = diagnostic
|
||||
.related_information
|
||||
.as_ref()
|
||||
.map_or(false, |infos| {
|
||||
infos.iter().any(|info| {
|
||||
primary_diagnostic_group_ids.contains_key(&(
|
||||
source,
|
||||
code.clone(),
|
||||
range_from_lsp(info.location.range),
|
||||
))
|
||||
})
|
||||
});
|
||||
|
||||
diagnostics_by_group_id
|
||||
.entry(group_id)
|
||||
.or_insert(Vec::new())
|
||||
.push(DiagnosticEntry {
|
||||
range: diagnostic.range.start.to_point_utf16()
|
||||
..diagnostic.range.end.to_point_utf16(),
|
||||
if is_supporting {
|
||||
if let Some(severity) = diagnostic.severity {
|
||||
supporting_diagnostic_severities
|
||||
.insert((source, code.clone(), range), severity);
|
||||
}
|
||||
} else {
|
||||
let group_id = post_inc(&mut next_group_id);
|
||||
let is_disk_based =
|
||||
source.map_or(false, |source| disk_based_sources.contains(source));
|
||||
|
||||
sources_by_group_id.insert(group_id, source);
|
||||
primary_diagnostic_group_ids
|
||||
.insert((source, code.clone(), range.clone()), group_id);
|
||||
|
||||
diagnostics.push(DiagnosticEntry {
|
||||
range,
|
||||
diagnostic: Diagnostic {
|
||||
code: diagnostic.code.clone().map(|code| match code {
|
||||
lsp::NumberOrString::Number(code) => code.to_string(),
|
||||
lsp::NumberOrString::String(code) => code,
|
||||
}),
|
||||
code: code.clone(),
|
||||
severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
|
||||
message: mem::take(&mut diagnostic.message),
|
||||
message: diagnostic.message.clone(),
|
||||
group_id,
|
||||
is_primary: false,
|
||||
is_primary: true,
|
||||
is_valid: true,
|
||||
is_disk_based: diagnostic
|
||||
.source
|
||||
.as_ref()
|
||||
.map_or(false, |source| disk_based_sources.contains(source)),
|
||||
is_disk_based,
|
||||
},
|
||||
});
|
||||
if let Some(infos) = &diagnostic.related_information {
|
||||
for info in infos {
|
||||
if info.location.uri == params.uri {
|
||||
let range = range_from_lsp(info.location.range);
|
||||
diagnostics.push(DiagnosticEntry {
|
||||
range,
|
||||
diagnostic: Diagnostic {
|
||||
code: code.clone(),
|
||||
severity: DiagnosticSeverity::INFORMATION,
|
||||
message: info.message.clone(),
|
||||
group_id,
|
||||
is_primary: false,
|
||||
is_valid: true,
|
||||
is_disk_based,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let diagnostics = diagnostics_by_group_id
|
||||
.into_values()
|
||||
.flat_map(|mut diagnostics| {
|
||||
let primary = diagnostics
|
||||
.iter_mut()
|
||||
.min_by_key(|entry| entry.diagnostic.severity)
|
||||
.unwrap();
|
||||
primary.diagnostic.is_primary = true;
|
||||
diagnostics
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
for entry in &mut diagnostics {
|
||||
let diagnostic = &mut entry.diagnostic;
|
||||
if !diagnostic.is_primary {
|
||||
let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
|
||||
if let Some(&severity) = supporting_diagnostic_severities.get(&(
|
||||
source,
|
||||
diagnostic.code.clone(),
|
||||
entry.range.clone(),
|
||||
)) {
|
||||
diagnostic.severity = severity;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.update_diagnostic_entries(worktree_path, params.version, diagnostics, cx)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_diagnostic_entries(
|
||||
&mut self,
|
||||
worktree_path: Arc<Path>,
|
||||
version: Option<i32>,
|
||||
diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<()> {
|
||||
let this = self.as_local_mut().unwrap();
|
||||
for buffer in this.open_buffers.values() {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
|
@ -757,12 +815,7 @@ impl Worktree {
|
|||
let (remote_id, operation) = buffer.update(cx, |buffer, cx| {
|
||||
(
|
||||
buffer.remote_id(),
|
||||
buffer.update_diagnostics(
|
||||
LSP_PROVIDER_NAME.clone(),
|
||||
params.version,
|
||||
diagnostics.clone(),
|
||||
cx,
|
||||
),
|
||||
buffer.update_diagnostics(version, diagnostics.clone(), cx),
|
||||
)
|
||||
});
|
||||
self.send_buffer_update(remote_id, operation?, cx);
|
||||
|
@ -772,50 +825,40 @@ impl Worktree {
|
|||
}
|
||||
|
||||
let this = self.as_local_mut().unwrap();
|
||||
let summary = DiagnosticSummary::new(&diagnostics);
|
||||
this.diagnostic_summaries
|
||||
.insert(worktree_path.clone(), DiagnosticSummary::new(&diagnostics));
|
||||
this.lsp_diagnostics
|
||||
.insert(worktree_path.clone(), diagnostics);
|
||||
.insert(PathKey(worktree_path.clone()), summary.clone());
|
||||
this.diagnostics.insert(worktree_path.clone(), diagnostics);
|
||||
|
||||
cx.emit(Event::DiagnosticsUpdated(worktree_path.clone()));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_diagnostics_from_provider(
|
||||
&mut self,
|
||||
path: Arc<Path>,
|
||||
diagnostics: Vec<DiagnosticEntry<usize>>,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Result<()> {
|
||||
let this = self.as_local_mut().unwrap();
|
||||
for buffer in this.open_buffers.values() {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
if buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map_or(false, |file| *file.path() == path)
|
||||
{
|
||||
let (remote_id, operation) = buffer.update(cx, |buffer, cx| {
|
||||
(
|
||||
buffer.remote_id(),
|
||||
buffer.update_diagnostics(
|
||||
DIAGNOSTIC_PROVIDER_NAME.clone(),
|
||||
None,
|
||||
diagnostics.clone(),
|
||||
cx,
|
||||
),
|
||||
)
|
||||
});
|
||||
self.send_buffer_update(remote_id, operation?, cx);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if let Some(share) = this.share.as_ref() {
|
||||
cx.foreground()
|
||||
.spawn({
|
||||
let client = this.client.clone();
|
||||
let project_id = share.project_id;
|
||||
let worktree_id = this.id().to_proto();
|
||||
let path = worktree_path.to_string_lossy().to_string();
|
||||
async move {
|
||||
client
|
||||
.send(proto::UpdateDiagnosticSummary {
|
||||
project_id,
|
||||
worktree_id,
|
||||
summary: Some(proto::DiagnosticSummary {
|
||||
path,
|
||||
error_count: summary.error_count as u32,
|
||||
warning_count: summary.warning_count as u32,
|
||||
info_count: summary.info_count as u32,
|
||||
hint_count: summary.hint_count as u32,
|
||||
}),
|
||||
})
|
||||
.await
|
||||
.log_err()
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
let this = self.as_local_mut().unwrap();
|
||||
this.diagnostic_summaries
|
||||
.insert(path.clone(), DiagnosticSummary::new(&diagnostics));
|
||||
this.provider_diagnostics.insert(path.clone(), diagnostics);
|
||||
cx.emit(Event::DiagnosticsUpdated(path.clone()));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -910,9 +953,8 @@ pub struct LocalWorktree {
|
|||
loading_buffers: LoadingBuffers,
|
||||
open_buffers: HashMap<usize, WeakModelHandle<Buffer>>,
|
||||
shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
|
||||
lsp_diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<PointUtf16>>>,
|
||||
provider_diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<usize>>>,
|
||||
diagnostic_summaries: BTreeMap<Arc<Path>, DiagnosticSummary>,
|
||||
diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<PointUtf16>>>,
|
||||
diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
|
||||
queued_operations: Vec<(u64, Operation)>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
client: Arc<Client>,
|
||||
|
@ -936,10 +978,10 @@ pub struct RemoteWorktree {
|
|||
replica_id: ReplicaId,
|
||||
loading_buffers: LoadingBuffers,
|
||||
open_buffers: HashMap<usize, RemoteBuffer>,
|
||||
diagnostic_summaries: BTreeMap<Arc<Path>, DiagnosticSummary>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
queued_operations: Vec<(u64, Operation)>,
|
||||
diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
|
||||
}
|
||||
|
||||
type LoadingBuffers = HashMap<
|
||||
|
@ -1018,8 +1060,7 @@ impl LocalWorktree {
|
|||
loading_buffers: Default::default(),
|
||||
open_buffers: Default::default(),
|
||||
shared_buffers: Default::default(),
|
||||
lsp_diagnostics: Default::default(),
|
||||
provider_diagnostics: Default::default(),
|
||||
diagnostics: Default::default(),
|
||||
diagnostic_summaries: Default::default(),
|
||||
queued_operations: Default::default(),
|
||||
language_registry: languages,
|
||||
|
@ -1093,23 +1134,133 @@ impl LocalWorktree {
|
|||
.log_err()
|
||||
.flatten()
|
||||
{
|
||||
enum DiagnosticProgress {
|
||||
Updating,
|
||||
Updated,
|
||||
}
|
||||
|
||||
let disk_based_sources = language
|
||||
.disk_based_diagnostic_sources()
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
let disk_based_diagnostics_progress_token =
|
||||
language.disk_based_diagnostics_progress_token().cloned();
|
||||
let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
|
||||
let (disk_based_diagnostics_done_tx, disk_based_diagnostics_done_rx) =
|
||||
smol::channel::unbounded();
|
||||
language_server
|
||||
.on_notification::<lsp::notification::PublishDiagnostics, _>(move |params| {
|
||||
smol::block_on(diagnostics_tx.send(params)).ok();
|
||||
})
|
||||
.detach();
|
||||
cx.spawn_weak(|this, mut cx| {
|
||||
let has_disk_based_diagnostic_progress_token =
|
||||
disk_based_diagnostics_progress_token.is_some();
|
||||
let disk_based_diagnostics_done_tx = disk_based_diagnostics_done_tx.clone();
|
||||
async move {
|
||||
while let Ok(diagnostics) = diagnostics_rx.recv().await {
|
||||
if let Some(handle) = cx.read(|cx| this.upgrade(cx)) {
|
||||
handle.update(&mut cx, |this, cx| {
|
||||
if !has_disk_based_diagnostic_progress_token {
|
||||
smol::block_on(
|
||||
disk_based_diagnostics_done_tx
|
||||
.send(DiagnosticProgress::Updating),
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
this.update_diagnostics(diagnostics, &disk_based_sources, cx)
|
||||
.log_err();
|
||||
if !has_disk_based_diagnostic_progress_token {
|
||||
smol::block_on(
|
||||
disk_based_diagnostics_done_tx
|
||||
.send(DiagnosticProgress::Updated),
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
})
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
let mut pending_disk_based_diagnostics: i32 = 0;
|
||||
language_server
|
||||
.on_notification::<lsp::notification::Progress, _>(move |params| {
|
||||
let token = match params.token {
|
||||
lsp::NumberOrString::Number(_) => None,
|
||||
lsp::NumberOrString::String(token) => Some(token),
|
||||
};
|
||||
|
||||
if token == disk_based_diagnostics_progress_token {
|
||||
match params.value {
|
||||
lsp::ProgressParamsValue::WorkDone(progress) => match progress {
|
||||
lsp::WorkDoneProgress::Begin(_) => {
|
||||
if pending_disk_based_diagnostics == 0 {
|
||||
smol::block_on(
|
||||
disk_based_diagnostics_done_tx
|
||||
.send(DiagnosticProgress::Updating),
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
pending_disk_based_diagnostics += 1;
|
||||
}
|
||||
lsp::WorkDoneProgress::End(_) => {
|
||||
pending_disk_based_diagnostics -= 1;
|
||||
if pending_disk_based_diagnostics == 0 {
|
||||
smol::block_on(
|
||||
disk_based_diagnostics_done_tx
|
||||
.send(DiagnosticProgress::Updated),
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
let rpc = self.client.clone();
|
||||
cx.spawn_weak(|this, mut cx| async move {
|
||||
while let Ok(diagnostics) = diagnostics_rx.recv().await {
|
||||
while let Ok(progress) = disk_based_diagnostics_done_rx.recv().await {
|
||||
if let Some(handle) = cx.read(|cx| this.upgrade(cx)) {
|
||||
handle.update(&mut cx, |this, cx| {
|
||||
this.update_diagnostics_from_lsp(diagnostics, &disk_based_sources, cx)
|
||||
.log_err();
|
||||
});
|
||||
match progress {
|
||||
DiagnosticProgress::Updating => {
|
||||
let message = handle.update(&mut cx, |this, cx| {
|
||||
cx.emit(Event::DiskBasedDiagnosticsUpdating);
|
||||
let this = this.as_local().unwrap();
|
||||
this.share.as_ref().map(|share| {
|
||||
proto::DiskBasedDiagnosticsUpdating {
|
||||
project_id: share.project_id,
|
||||
worktree_id: this.id().to_proto(),
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
if let Some(message) = message {
|
||||
rpc.send(message).await.log_err();
|
||||
}
|
||||
}
|
||||
DiagnosticProgress::Updated => {
|
||||
let message = handle.update(&mut cx, |this, cx| {
|
||||
cx.emit(Event::DiskBasedDiagnosticsUpdated);
|
||||
let this = this.as_local().unwrap();
|
||||
this.share.as_ref().map(|share| {
|
||||
proto::DiskBasedDiagnosticsUpdated {
|
||||
project_id: share.project_id,
|
||||
worktree_id: this.id().to_proto(),
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
if let Some(message) = message {
|
||||
rpc.send(message).await.log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
|
@ -1158,35 +1309,25 @@ impl LocalWorktree {
|
|||
.update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))
|
||||
.await?;
|
||||
|
||||
let (lsp_diagnostics, provider_diagnostics, language, language_server) =
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let this = this.as_local_mut().unwrap();
|
||||
let lsp_diagnostics = this.lsp_diagnostics.remove(&path);
|
||||
let provider_diagnostics = this.provider_diagnostics.remove(&path);
|
||||
let language = this
|
||||
.language_registry
|
||||
.select_language(file.full_path())
|
||||
.cloned();
|
||||
let server = language
|
||||
.as_ref()
|
||||
.and_then(|language| this.register_language(language, cx));
|
||||
(lsp_diagnostics, provider_diagnostics, language, server)
|
||||
});
|
||||
let (diagnostics, language, language_server) = this.update(&mut cx, |this, cx| {
|
||||
let this = this.as_local_mut().unwrap();
|
||||
let diagnostics = this.diagnostics.get(&path).cloned();
|
||||
let language = this
|
||||
.language_registry
|
||||
.select_language(file.full_path())
|
||||
.cloned();
|
||||
let server = language
|
||||
.as_ref()
|
||||
.and_then(|language| this.register_language(language, cx));
|
||||
(diagnostics, language, server)
|
||||
});
|
||||
|
||||
let mut buffer_operations = Vec::new();
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::from_file(0, contents, Box::new(file), cx);
|
||||
buffer.set_language(language, language_server, cx);
|
||||
if let Some(diagnostics) = lsp_diagnostics {
|
||||
let op = buffer
|
||||
.update_diagnostics(LSP_PROVIDER_NAME.clone(), None, diagnostics, cx)
|
||||
.unwrap();
|
||||
buffer_operations.push(op);
|
||||
}
|
||||
if let Some(diagnostics) = provider_diagnostics {
|
||||
let op = buffer
|
||||
.update_diagnostics(DIAGNOSTIC_PROVIDER_NAME.clone(), None, diagnostics, cx)
|
||||
.unwrap();
|
||||
if let Some(diagnostics) = diagnostics {
|
||||
let op = buffer.update_diagnostics(None, diagnostics, cx).unwrap();
|
||||
buffer_operations.push(op);
|
||||
}
|
||||
buffer
|
||||
|
@ -1405,10 +1546,11 @@ impl LocalWorktree {
|
|||
})
|
||||
.detach();
|
||||
|
||||
let diagnostic_summaries = self.diagnostic_summaries.clone();
|
||||
let share_message = cx.background().spawn(async move {
|
||||
proto::ShareWorktree {
|
||||
project_id,
|
||||
worktree: Some(snapshot.to_proto()),
|
||||
worktree: Some(snapshot.to_proto(&diagnostic_summaries)),
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -1576,6 +1718,34 @@ impl RemoteWorktree {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_diagnostic_summary(
|
||||
&mut self,
|
||||
envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) {
|
||||
if let Some(summary) = envelope.payload.summary {
|
||||
let path: Arc<Path> = Path::new(&summary.path).into();
|
||||
self.diagnostic_summaries.insert(
|
||||
PathKey(path.clone()),
|
||||
DiagnosticSummary {
|
||||
error_count: summary.error_count as usize,
|
||||
warning_count: summary.warning_count as usize,
|
||||
info_count: summary.info_count as usize,
|
||||
hint_count: summary.hint_count as usize,
|
||||
},
|
||||
);
|
||||
cx.emit(Event::DiagnosticsUpdated(path));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn disk_based_diagnostics_updating(&self, cx: &mut ModelContext<Worktree>) {
|
||||
cx.emit(Event::DiskBasedDiagnosticsUpdating);
|
||||
}
|
||||
|
||||
pub fn disk_based_diagnostics_updated(&self, cx: &mut ModelContext<Worktree>) {
|
||||
cx.emit(Event::DiskBasedDiagnosticsUpdated);
|
||||
}
|
||||
|
||||
pub fn remove_collaborator(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Worktree>) {
|
||||
for (_, buffer) in &self.open_buffers {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
|
@ -1605,17 +1775,24 @@ impl Snapshot {
|
|||
self.id
|
||||
}
|
||||
|
||||
pub fn to_proto(&self) -> proto::Worktree {
|
||||
pub fn to_proto(
|
||||
&self,
|
||||
diagnostic_summaries: &TreeMap<PathKey, DiagnosticSummary>,
|
||||
) -> proto::Worktree {
|
||||
let root_name = self.root_name.clone();
|
||||
proto::Worktree {
|
||||
id: self.id.0 as u64,
|
||||
root_name,
|
||||
entries: self
|
||||
.entries_by_path
|
||||
.cursor::<()>()
|
||||
.iter()
|
||||
.filter(|e| !e.is_ignored)
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
diagnostic_summaries: diagnostic_summaries
|
||||
.iter()
|
||||
.map(|(path, summary)| summary.to_proto(path.0.clone()))
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3013,32 +3190,10 @@ impl ToPointUtf16 for lsp::Position {
|
|||
}
|
||||
}
|
||||
|
||||
fn diagnostic_ranges<'a>(
|
||||
diagnostic: &'a lsp::Diagnostic,
|
||||
abs_path: &'a Path,
|
||||
) -> impl 'a + Iterator<Item = Range<PointUtf16>> {
|
||||
diagnostic
|
||||
.related_information
|
||||
.iter()
|
||||
.flatten()
|
||||
.filter_map(move |info| {
|
||||
if info.location.uri.to_file_path().ok()? == abs_path {
|
||||
let info_start = PointUtf16::new(
|
||||
info.location.range.start.line,
|
||||
info.location.range.start.character,
|
||||
);
|
||||
let info_end = PointUtf16::new(
|
||||
info.location.range.end.line,
|
||||
info.location.range.end.character,
|
||||
);
|
||||
Some(info_start..info_end)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.chain(Some(
|
||||
diagnostic.range.start.to_point_utf16()..diagnostic.range.end.to_point_utf16(),
|
||||
))
|
||||
fn range_from_lsp(range: lsp::Range) -> Range<PointUtf16> {
|
||||
let start = PointUtf16::new(range.start.line, range.start.character);
|
||||
let end = PointUtf16::new(range.end.line, range.end.character);
|
||||
start..end
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -3048,6 +3203,7 @@ mod tests {
|
|||
use anyhow::Result;
|
||||
use client::test::{FakeHttpClient, FakeServer};
|
||||
use fs::RealFs;
|
||||
use gpui::test::subscribe;
|
||||
use language::{tree_sitter_rust, DiagnosticEntry, LanguageServerConfig};
|
||||
use language::{Diagnostic, LanguageConfig};
|
||||
use lsp::Url;
|
||||
|
@ -3245,7 +3401,7 @@ mod tests {
|
|||
let remote = Worktree::remote(
|
||||
1,
|
||||
1,
|
||||
initial_snapshot.to_proto(),
|
||||
initial_snapshot.to_proto(&Default::default()),
|
||||
Client::new(http_client.clone()),
|
||||
user_store,
|
||||
Default::default(),
|
||||
|
@ -3697,6 +3853,10 @@ mod tests {
|
|||
async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
|
||||
let (language_server_config, mut fake_server) =
|
||||
LanguageServerConfig::fake(cx.background()).await;
|
||||
let progress_token = language_server_config
|
||||
.disk_based_diagnostics_progress_token
|
||||
.clone()
|
||||
.unwrap();
|
||||
let mut languages = LanguageRegistry::new();
|
||||
languages.add(Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
|
@ -3736,6 +3896,18 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut events = subscribe(&tree, &mut cx);
|
||||
|
||||
fake_server.start_progress(&progress_token).await;
|
||||
assert_eq!(
|
||||
events.next().await.unwrap(),
|
||||
Event::DiskBasedDiagnosticsUpdating
|
||||
);
|
||||
|
||||
fake_server.start_progress(&progress_token).await;
|
||||
fake_server.end_progress(&progress_token).await;
|
||||
fake_server.start_progress(&progress_token).await;
|
||||
|
||||
fake_server
|
||||
.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
|
||||
uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
|
||||
|
@ -3748,6 +3920,17 @@ mod tests {
|
|||
}],
|
||||
})
|
||||
.await;
|
||||
assert_eq!(
|
||||
events.next().await.unwrap(),
|
||||
Event::DiagnosticsUpdated(Arc::from(Path::new("a.rs")))
|
||||
);
|
||||
|
||||
fake_server.end_progress(&progress_token).await;
|
||||
fake_server.end_progress(&progress_token).await;
|
||||
assert_eq!(
|
||||
events.next().await.unwrap(),
|
||||
Event::DiskBasedDiagnosticsUpdated
|
||||
);
|
||||
|
||||
let buffer = tree
|
||||
.update(&mut cx, |tree, cx| tree.open_buffer("a.rs", cx))
|
||||
|
@ -3761,19 +3944,16 @@ mod tests {
|
|||
.collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
diagnostics,
|
||||
&[(
|
||||
LSP_PROVIDER_NAME.as_ref(),
|
||||
DiagnosticEntry {
|
||||
range: Point::new(0, 9)..Point::new(0, 10),
|
||||
diagnostic: Diagnostic {
|
||||
severity: lsp::DiagnosticSeverity::ERROR,
|
||||
message: "undefined variable 'A'".to_string(),
|
||||
group_id: 0,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
}
|
||||
&[DiagnosticEntry {
|
||||
range: Point::new(0, 9)..Point::new(0, 10),
|
||||
diagnostic: Diagnostic {
|
||||
severity: lsp::DiagnosticSeverity::ERROR,
|
||||
message: "undefined variable 'A'".to_string(),
|
||||
group_id: 0,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
}
|
||||
)]
|
||||
}]
|
||||
)
|
||||
});
|
||||
}
|
||||
|
@ -3918,7 +4098,7 @@ mod tests {
|
|||
|
||||
worktree
|
||||
.update(&mut cx, |tree, cx| {
|
||||
tree.update_diagnostics_from_lsp(message, &Default::default(), cx)
|
||||
tree.update_diagnostics(message, &Default::default(), cx)
|
||||
})
|
||||
.unwrap();
|
||||
let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
|
||||
|
@ -3928,78 +4108,61 @@ mod tests {
|
|||
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
(
|
||||
LSP_PROVIDER_NAME.as_ref(),
|
||||
DiagnosticEntry {
|
||||
range: Point::new(1, 8)..Point::new(1, 9),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::WARNING,
|
||||
message: "error 1".to_string(),
|
||||
group_id: 0,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
}
|
||||
DiagnosticEntry {
|
||||
range: Point::new(1, 8)..Point::new(1, 9),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::WARNING,
|
||||
message: "error 1".to_string(),
|
||||
group_id: 0,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
}
|
||||
),
|
||||
(
|
||||
LSP_PROVIDER_NAME.as_ref(),
|
||||
DiagnosticEntry {
|
||||
range: Point::new(1, 8)..Point::new(1, 9),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::HINT,
|
||||
message: "error 1 hint 1".to_string(),
|
||||
group_id: 0,
|
||||
is_primary: false,
|
||||
..Default::default()
|
||||
}
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: Point::new(1, 8)..Point::new(1, 9),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::HINT,
|
||||
message: "error 1 hint 1".to_string(),
|
||||
group_id: 0,
|
||||
is_primary: false,
|
||||
..Default::default()
|
||||
}
|
||||
),
|
||||
(
|
||||
LSP_PROVIDER_NAME.as_ref(),
|
||||
DiagnosticEntry {
|
||||
range: Point::new(1, 13)..Point::new(1, 15),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::HINT,
|
||||
message: "error 2 hint 1".to_string(),
|
||||
group_id: 1,
|
||||
is_primary: false,
|
||||
..Default::default()
|
||||
}
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: Point::new(1, 13)..Point::new(1, 15),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::HINT,
|
||||
message: "error 2 hint 1".to_string(),
|
||||
group_id: 1,
|
||||
is_primary: false,
|
||||
..Default::default()
|
||||
}
|
||||
),
|
||||
(
|
||||
LSP_PROVIDER_NAME.as_ref(),
|
||||
DiagnosticEntry {
|
||||
range: Point::new(1, 13)..Point::new(1, 15),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::HINT,
|
||||
message: "error 2 hint 2".to_string(),
|
||||
group_id: 1,
|
||||
is_primary: false,
|
||||
..Default::default()
|
||||
}
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: Point::new(1, 13)..Point::new(1, 15),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::HINT,
|
||||
message: "error 2 hint 2".to_string(),
|
||||
group_id: 1,
|
||||
is_primary: false,
|
||||
..Default::default()
|
||||
}
|
||||
),
|
||||
(
|
||||
LSP_PROVIDER_NAME.as_ref(),
|
||||
DiagnosticEntry {
|
||||
range: Point::new(2, 8)..Point::new(2, 17),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
message: "error 2".to_string(),
|
||||
group_id: 1,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
}
|
||||
},
|
||||
DiagnosticEntry {
|
||||
range: Point::new(2, 8)..Point::new(2, 17),
|
||||
diagnostic: Diagnostic {
|
||||
severity: DiagnosticSeverity::ERROR,
|
||||
message: "error 2".to_string(),
|
||||
group_id: 1,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
}
|
||||
)
|
||||
}
|
||||
]
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
buffer
|
||||
.diagnostic_group::<Point>(&LSP_PROVIDER_NAME, 0)
|
||||
.collect::<Vec<_>>(),
|
||||
buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
|
||||
&[
|
||||
DiagnosticEntry {
|
||||
range: Point::new(1, 8)..Point::new(1, 9),
|
||||
|
@ -4024,9 +4187,7 @@ mod tests {
|
|||
]
|
||||
);
|
||||
assert_eq!(
|
||||
buffer
|
||||
.diagnostic_group::<Point>(&LSP_PROVIDER_NAME, 1)
|
||||
.collect::<Vec<_>>(),
|
||||
buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
|
||||
&[
|
||||
DiagnosticEntry {
|
||||
range: Point::new(1, 13)..Point::new(1, 15),
|
||||
|
|
|
@ -10,6 +10,7 @@ path = "src/project_panel.rs"
|
|||
gpui = { path = "../gpui" }
|
||||
project = { path = "../project" }
|
||||
theme = { path = "../theme" }
|
||||
util = { path = "../util" }
|
||||
workspace = { path = "../workspace" }
|
||||
postage = { version = "0.4.1", features = ["futures-traits"] }
|
||||
|
||||
|
|
|
@ -124,14 +124,14 @@ impl ProjectPanel {
|
|||
if let Some(worktree) = project.read(cx).worktree_for_id(worktree_id, cx) {
|
||||
if let Some(entry) = worktree.read(cx).entry_for_id(entry_id) {
|
||||
workspace
|
||||
.open_entry(
|
||||
.open_path(
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: entry.path.clone(),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.map(|t| t.detach());
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,32 +23,34 @@ message Envelope {
|
|||
|
||||
RegisterWorktree register_worktree = 17;
|
||||
UnregisterWorktree unregister_worktree = 18;
|
||||
ShareWorktree share_worktree = 100;
|
||||
UpdateWorktree update_worktree = 19;
|
||||
UpdateDiagnosticSummary update_diagnostic_summary = 20;
|
||||
ShareWorktree share_worktree = 19;
|
||||
UpdateWorktree update_worktree = 20;
|
||||
UpdateDiagnosticSummary update_diagnostic_summary = 21;
|
||||
DiskBasedDiagnosticsUpdating disk_based_diagnostics_updating = 22;
|
||||
DiskBasedDiagnosticsUpdated disk_based_diagnostics_updated = 23;
|
||||
|
||||
OpenBuffer open_buffer = 22;
|
||||
OpenBufferResponse open_buffer_response = 23;
|
||||
CloseBuffer close_buffer = 24;
|
||||
UpdateBuffer update_buffer = 25;
|
||||
SaveBuffer save_buffer = 26;
|
||||
BufferSaved buffer_saved = 27;
|
||||
OpenBuffer open_buffer = 24;
|
||||
OpenBufferResponse open_buffer_response = 25;
|
||||
CloseBuffer close_buffer = 26;
|
||||
UpdateBuffer update_buffer = 27;
|
||||
SaveBuffer save_buffer = 28;
|
||||
BufferSaved buffer_saved = 29;
|
||||
|
||||
GetChannels get_channels = 28;
|
||||
GetChannelsResponse get_channels_response = 29;
|
||||
JoinChannel join_channel = 30;
|
||||
JoinChannelResponse join_channel_response = 31;
|
||||
LeaveChannel leave_channel = 32;
|
||||
SendChannelMessage send_channel_message = 33;
|
||||
SendChannelMessageResponse send_channel_message_response = 34;
|
||||
ChannelMessageSent channel_message_sent = 35;
|
||||
GetChannelMessages get_channel_messages = 36;
|
||||
GetChannelMessagesResponse get_channel_messages_response = 37;
|
||||
GetChannels get_channels = 30;
|
||||
GetChannelsResponse get_channels_response = 31;
|
||||
JoinChannel join_channel = 32;
|
||||
JoinChannelResponse join_channel_response = 33;
|
||||
LeaveChannel leave_channel = 34;
|
||||
SendChannelMessage send_channel_message = 35;
|
||||
SendChannelMessageResponse send_channel_message_response = 36;
|
||||
ChannelMessageSent channel_message_sent = 37;
|
||||
GetChannelMessages get_channel_messages = 38;
|
||||
GetChannelMessagesResponse get_channel_messages_response = 39;
|
||||
|
||||
UpdateContacts update_contacts = 38;
|
||||
UpdateContacts update_contacts = 40;
|
||||
|
||||
GetUsers get_users = 39;
|
||||
GetUsersResponse get_users_response = 40;
|
||||
GetUsers get_users = 41;
|
||||
GetUsersResponse get_users_response = 42;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -169,9 +171,25 @@ message BufferSaved {
|
|||
message UpdateDiagnosticSummary {
|
||||
uint64 project_id = 1;
|
||||
uint64 worktree_id = 2;
|
||||
DiagnosticSummary summary = 3;
|
||||
}
|
||||
|
||||
message DiagnosticSummary {
|
||||
string path = 3;
|
||||
uint32 error_count = 4;
|
||||
uint32 warning_count = 5;
|
||||
uint32 info_count = 6;
|
||||
uint32 hint_count = 7;
|
||||
}
|
||||
|
||||
message DiskBasedDiagnosticsUpdating {
|
||||
uint64 project_id = 1;
|
||||
uint64 worktree_id = 2;
|
||||
}
|
||||
|
||||
message DiskBasedDiagnosticsUpdated {
|
||||
uint64 project_id = 1;
|
||||
uint64 worktree_id = 2;
|
||||
}
|
||||
|
||||
message GetChannels {}
|
||||
|
@ -248,6 +266,7 @@ message Worktree {
|
|||
uint64 id = 1;
|
||||
string root_name = 2;
|
||||
repeated Entry entries = 3;
|
||||
repeated DiagnosticSummary diagnostic_summaries = 4;
|
||||
}
|
||||
|
||||
message Entry {
|
||||
|
@ -268,7 +287,7 @@ message Buffer {
|
|||
repeated UndoMapEntry undo_map = 5;
|
||||
repeated VectorClockEntry version = 6;
|
||||
repeated SelectionSet selections = 7;
|
||||
repeated DiagnosticSet diagnostic_sets = 8;
|
||||
repeated Diagnostic diagnostics = 8;
|
||||
uint32 lamport_timestamp = 9;
|
||||
repeated Operation deferred_operations = 10;
|
||||
}
|
||||
|
@ -309,15 +328,10 @@ enum Bias {
|
|||
Right = 1;
|
||||
}
|
||||
|
||||
message UpdateDiagnosticSet {
|
||||
message UpdateDiagnostics {
|
||||
uint32 replica_id = 1;
|
||||
uint32 lamport_timestamp = 2;
|
||||
DiagnosticSet diagnostic_set = 3;
|
||||
}
|
||||
|
||||
message DiagnosticSet {
|
||||
string provider_name = 1;
|
||||
repeated Diagnostic diagnostics = 2;
|
||||
repeated Diagnostic diagnostics = 3;
|
||||
}
|
||||
|
||||
message Diagnostic {
|
||||
|
@ -345,7 +359,7 @@ message Operation {
|
|||
Edit edit = 1;
|
||||
Undo undo = 2;
|
||||
UpdateSelections update_selections = 3;
|
||||
UpdateDiagnosticSet update_diagnostic_set = 4;
|
||||
UpdateDiagnostics update_diagnostics = 4;
|
||||
}
|
||||
|
||||
message Edit {
|
||||
|
|
|
@ -125,6 +125,8 @@ messages!(
|
|||
BufferSaved,
|
||||
ChannelMessageSent,
|
||||
CloseBuffer,
|
||||
DiskBasedDiagnosticsUpdated,
|
||||
DiskBasedDiagnosticsUpdating,
|
||||
Error,
|
||||
GetChannelMessages,
|
||||
GetChannelMessagesResponse,
|
||||
|
@ -155,6 +157,7 @@ messages!(
|
|||
UnshareProject,
|
||||
UpdateBuffer,
|
||||
UpdateContacts,
|
||||
UpdateDiagnosticSummary,
|
||||
UpdateWorktree,
|
||||
);
|
||||
|
||||
|
@ -178,17 +181,20 @@ request_messages!(
|
|||
entity_messages!(
|
||||
project_id,
|
||||
AddProjectCollaborator,
|
||||
RemoveProjectCollaborator,
|
||||
BufferSaved,
|
||||
CloseBuffer,
|
||||
DiskBasedDiagnosticsUpdated,
|
||||
DiskBasedDiagnosticsUpdating,
|
||||
JoinProject,
|
||||
LeaveProject,
|
||||
BufferSaved,
|
||||
OpenBuffer,
|
||||
CloseBuffer,
|
||||
RemoveProjectCollaborator,
|
||||
SaveBuffer,
|
||||
ShareWorktree,
|
||||
UnregisterWorktree,
|
||||
UnshareProject,
|
||||
UpdateBuffer,
|
||||
UpdateDiagnosticSummary,
|
||||
UpdateWorktree,
|
||||
);
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ use rpc::{
|
|||
Connection, ConnectionId, Peer, TypedEnvelope,
|
||||
};
|
||||
use sha1::{Digest as _, Sha1};
|
||||
use std::{any::TypeId, future::Future, mem, sync::Arc, time::Instant};
|
||||
use std::{any::TypeId, future::Future, mem, path::PathBuf, sync::Arc, time::Instant};
|
||||
use store::{Store, Worktree};
|
||||
use surf::StatusCode;
|
||||
use tide::log;
|
||||
|
@ -71,6 +71,9 @@ impl Server {
|
|||
.add_handler(Server::unregister_worktree)
|
||||
.add_handler(Server::share_worktree)
|
||||
.add_handler(Server::update_worktree)
|
||||
.add_handler(Server::update_diagnostic_summary)
|
||||
.add_handler(Server::disk_based_diagnostics_updating)
|
||||
.add_handler(Server::disk_based_diagnostics_updated)
|
||||
.add_handler(Server::open_buffer)
|
||||
.add_handler(Server::close_buffer)
|
||||
.add_handler(Server::update_buffer)
|
||||
|
@ -300,6 +303,11 @@ impl Server {
|
|||
id: *id,
|
||||
root_name: worktree.root_name.clone(),
|
||||
entries: share.entries.values().cloned().collect(),
|
||||
diagnostic_summaries: share
|
||||
.diagnostic_summaries
|
||||
.values()
|
||||
.cloned()
|
||||
.collect(),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
@ -471,11 +479,17 @@ impl Server {
|
|||
.map(|entry| (entry.id, entry))
|
||||
.collect();
|
||||
|
||||
let diagnostic_summaries = mem::take(&mut worktree.diagnostic_summaries)
|
||||
.into_iter()
|
||||
.map(|summary| (PathBuf::from(summary.path.clone()), summary))
|
||||
.collect();
|
||||
|
||||
let contact_user_ids = self.state_mut().share_worktree(
|
||||
request.payload.project_id,
|
||||
worktree.id,
|
||||
request.sender_id,
|
||||
entries,
|
||||
diagnostic_summaries,
|
||||
);
|
||||
if let Some(contact_user_ids) = contact_user_ids {
|
||||
self.peer.respond(request.receipt(), proto::Ack {}).await?;
|
||||
|
@ -517,6 +531,64 @@ impl Server {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn update_diagnostic_summary(
|
||||
mut self: Arc<Server>,
|
||||
request: TypedEnvelope<proto::UpdateDiagnosticSummary>,
|
||||
) -> tide::Result<()> {
|
||||
let receiver_ids = request
|
||||
.payload
|
||||
.summary
|
||||
.clone()
|
||||
.and_then(|summary| {
|
||||
self.state_mut().update_diagnostic_summary(
|
||||
request.payload.project_id,
|
||||
request.payload.worktree_id,
|
||||
request.sender_id,
|
||||
summary,
|
||||
)
|
||||
})
|
||||
.ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
|
||||
|
||||
broadcast(request.sender_id, receiver_ids, |connection_id| {
|
||||
self.peer
|
||||
.forward_send(request.sender_id, connection_id, request.payload.clone())
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn disk_based_diagnostics_updating(
|
||||
self: Arc<Server>,
|
||||
request: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
|
||||
) -> tide::Result<()> {
|
||||
let receiver_ids = self
|
||||
.state()
|
||||
.project_connection_ids(request.payload.project_id, request.sender_id)
|
||||
.ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
|
||||
broadcast(request.sender_id, receiver_ids, |connection_id| {
|
||||
self.peer
|
||||
.forward_send(request.sender_id, connection_id, request.payload.clone())
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn disk_based_diagnostics_updated(
|
||||
self: Arc<Server>,
|
||||
request: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
|
||||
) -> tide::Result<()> {
|
||||
let receiver_ids = self
|
||||
.state()
|
||||
.project_connection_ids(request.payload.project_id, request.sender_id)
|
||||
.ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
|
||||
broadcast(request.sender_id, receiver_ids, |connection_id| {
|
||||
self.peer
|
||||
.forward_send(request.sender_id, connection_id, request.payload.clone())
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn open_buffer(
|
||||
self: Arc<Server>,
|
||||
request: TypedEnvelope<proto::OpenBuffer>,
|
||||
|
@ -999,7 +1071,7 @@ mod tests {
|
|||
};
|
||||
use ::rpc::Peer;
|
||||
use async_std::task;
|
||||
use gpui::{ModelHandle, TestAppContext};
|
||||
use gpui::{executor, ModelHandle, TestAppContext};
|
||||
use parking_lot::Mutex;
|
||||
use postage::{mpsc, watch};
|
||||
use rpc::PeerId;
|
||||
|
@ -1008,6 +1080,7 @@ mod tests {
|
|||
use std::{
|
||||
ops::Deref,
|
||||
path::Path,
|
||||
rc::Rc,
|
||||
sync::{
|
||||
atomic::{AtomicBool, Ordering::SeqCst},
|
||||
Arc,
|
||||
|
@ -1026,7 +1099,7 @@ mod tests {
|
|||
LanguageRegistry, LanguageServerConfig, Point,
|
||||
},
|
||||
lsp,
|
||||
project::Project,
|
||||
project::{DiagnosticSummary, Project, ProjectPath},
|
||||
};
|
||||
|
||||
#[gpui::test]
|
||||
|
@ -1037,7 +1110,7 @@ mod tests {
|
|||
cx_a.foreground().forbid_parking();
|
||||
|
||||
// Connect to a server as 2 clients.
|
||||
let mut server = TestServer::start().await;
|
||||
let mut server = TestServer::start(cx_a.foreground()).await;
|
||||
let client_a = server.create_client(&mut cx_a, "user_a").await;
|
||||
let client_b = server.create_client(&mut cx_b, "user_b").await;
|
||||
|
||||
|
@ -1170,7 +1243,7 @@ mod tests {
|
|||
cx_a.foreground().forbid_parking();
|
||||
|
||||
// Connect to a server as 2 clients.
|
||||
let mut server = TestServer::start().await;
|
||||
let mut server = TestServer::start(cx_a.foreground()).await;
|
||||
let client_a = server.create_client(&mut cx_a, "user_a").await;
|
||||
let client_b = server.create_client(&mut cx_b, "user_b").await;
|
||||
|
||||
|
@ -1246,7 +1319,7 @@ mod tests {
|
|||
cx_a.foreground().forbid_parking();
|
||||
|
||||
// Connect to a server as 3 clients.
|
||||
let mut server = TestServer::start().await;
|
||||
let mut server = TestServer::start(cx_a.foreground()).await;
|
||||
let client_a = server.create_client(&mut cx_a, "user_a").await;
|
||||
let client_b = server.create_client(&mut cx_b, "user_b").await;
|
||||
let client_c = server.create_client(&mut cx_c, "user_c").await;
|
||||
|
@ -1396,7 +1469,7 @@ mod tests {
|
|||
let fs = Arc::new(FakeFs::new());
|
||||
|
||||
// Connect to a server as 2 clients.
|
||||
let mut server = TestServer::start().await;
|
||||
let mut server = TestServer::start(cx_a.foreground()).await;
|
||||
let client_a = server.create_client(&mut cx_a, "user_a").await;
|
||||
let client_b = server.create_client(&mut cx_b, "user_b").await;
|
||||
|
||||
|
@ -1492,7 +1565,7 @@ mod tests {
|
|||
let fs = Arc::new(FakeFs::new());
|
||||
|
||||
// Connect to a server as 2 clients.
|
||||
let mut server = TestServer::start().await;
|
||||
let mut server = TestServer::start(cx_a.foreground()).await;
|
||||
let client_a = server.create_client(&mut cx_a, "user_a").await;
|
||||
let client_b = server.create_client(&mut cx_b, "user_b").await;
|
||||
|
||||
|
@ -1572,7 +1645,7 @@ mod tests {
|
|||
let fs = Arc::new(FakeFs::new());
|
||||
|
||||
// Connect to a server as 2 clients.
|
||||
let mut server = TestServer::start().await;
|
||||
let mut server = TestServer::start(cx_a.foreground()).await;
|
||||
let client_a = server.create_client(&mut cx_a, "user_a").await;
|
||||
let client_b = server.create_client(&mut cx_b, "user_b").await;
|
||||
|
||||
|
@ -1647,7 +1720,7 @@ mod tests {
|
|||
let fs = Arc::new(FakeFs::new());
|
||||
|
||||
// Connect to a server as 2 clients.
|
||||
let mut server = TestServer::start().await;
|
||||
let mut server = TestServer::start(cx_a.foreground()).await;
|
||||
let client_a = server.create_client(&mut cx_a, "user_a").await;
|
||||
let client_b = server.create_client(&mut cx_b, "user_b").await;
|
||||
|
||||
|
@ -1734,7 +1807,7 @@ mod tests {
|
|||
)));
|
||||
|
||||
// Connect to a server as 2 clients.
|
||||
let mut server = TestServer::start().await;
|
||||
let mut server = TestServer::start(cx_a.foreground()).await;
|
||||
let client_a = server.create_client(&mut cx_a, "user_a").await;
|
||||
let client_b = server.create_client(&mut cx_b, "user_b").await;
|
||||
|
||||
|
@ -1767,6 +1840,7 @@ mod tests {
|
|||
let project_id = project_a
|
||||
.update(&mut cx_a, |project, _| project.next_remote_id())
|
||||
.await;
|
||||
let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id());
|
||||
project_a
|
||||
.update(&mut cx_a, |project, cx| project.share(cx))
|
||||
.await
|
||||
|
@ -1782,6 +1856,68 @@ mod tests {
|
|||
.unwrap();
|
||||
|
||||
// Simulate a language server reporting errors for a file.
|
||||
fake_language_server
|
||||
.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
severity: Some(lsp::DiagnosticSeverity::ERROR),
|
||||
range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 7)),
|
||||
message: "message 1".to_string(),
|
||||
..Default::default()
|
||||
}],
|
||||
})
|
||||
.await;
|
||||
|
||||
// Wait for server to see the diagnostics update.
|
||||
server
|
||||
.condition(|store| {
|
||||
let worktree = store
|
||||
.project(project_id)
|
||||
.unwrap()
|
||||
.worktrees
|
||||
.get(&worktree_id.to_proto())
|
||||
.unwrap();
|
||||
|
||||
!worktree
|
||||
.share
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.diagnostic_summaries
|
||||
.is_empty()
|
||||
})
|
||||
.await;
|
||||
|
||||
// Join the worktree as client B.
|
||||
let project_b = Project::remote(
|
||||
project_id,
|
||||
client_b.clone(),
|
||||
client_b.user_store.clone(),
|
||||
lang_registry.clone(),
|
||||
fs.clone(),
|
||||
&mut cx_b.to_async(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
project_b.read_with(&cx_b, |project, cx| {
|
||||
assert_eq!(
|
||||
project.diagnostic_summaries(cx).collect::<Vec<_>>(),
|
||||
&[(
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: Arc::from(Path::new("a.rs")),
|
||||
},
|
||||
DiagnosticSummary {
|
||||
error_count: 1,
|
||||
warning_count: 0,
|
||||
..Default::default()
|
||||
},
|
||||
)]
|
||||
)
|
||||
});
|
||||
|
||||
// Simulate a language server reporting more errors for a file.
|
||||
fake_language_server
|
||||
.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
|
||||
|
@ -1806,20 +1942,26 @@ mod tests {
|
|||
})
|
||||
.await;
|
||||
|
||||
// Join the worktree as client B.
|
||||
let project_b = Project::remote(
|
||||
project_id,
|
||||
client_b.clone(),
|
||||
client_b.user_store.clone(),
|
||||
lang_registry.clone(),
|
||||
fs.clone(),
|
||||
&mut cx_b.to_async(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let worktree_b = project_b.update(&mut cx_b, |p, _| p.worktrees()[0].clone());
|
||||
// Client b gets the updated summaries
|
||||
project_b
|
||||
.condition(&cx_b, |project, cx| {
|
||||
project.diagnostic_summaries(cx).collect::<Vec<_>>()
|
||||
== &[(
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: Arc::from(Path::new("a.rs")),
|
||||
},
|
||||
DiagnosticSummary {
|
||||
error_count: 1,
|
||||
warning_count: 1,
|
||||
..Default::default()
|
||||
},
|
||||
)]
|
||||
})
|
||||
.await;
|
||||
|
||||
// Open the file with the errors.
|
||||
// Open the file with the errors on client B. They should be present.
|
||||
let worktree_b = project_b.update(&mut cx_b, |p, _| p.worktrees()[0].clone());
|
||||
let buffer_b = cx_b
|
||||
.background()
|
||||
.spawn(worktree_b.update(&mut cx_b, |worktree, cx| worktree.open_buffer("a.rs", cx)))
|
||||
|
@ -1831,7 +1973,7 @@ mod tests {
|
|||
buffer
|
||||
.snapshot()
|
||||
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||
.map(|(_, entry)| entry)
|
||||
.map(|entry| entry)
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
DiagnosticEntry {
|
||||
|
@ -1864,7 +2006,7 @@ mod tests {
|
|||
cx_a.foreground().forbid_parking();
|
||||
|
||||
// Connect to a server as 2 clients.
|
||||
let mut server = TestServer::start().await;
|
||||
let mut server = TestServer::start(cx_a.foreground()).await;
|
||||
let client_a = server.create_client(&mut cx_a, "user_a").await;
|
||||
let client_b = server.create_client(&mut cx_b, "user_b").await;
|
||||
|
||||
|
@ -2003,7 +2145,7 @@ mod tests {
|
|||
async fn test_chat_message_validation(mut cx_a: TestAppContext) {
|
||||
cx_a.foreground().forbid_parking();
|
||||
|
||||
let mut server = TestServer::start().await;
|
||||
let mut server = TestServer::start(cx_a.foreground()).await;
|
||||
let client_a = server.create_client(&mut cx_a, "user_a").await;
|
||||
|
||||
let db = &server.app_state.db;
|
||||
|
@ -2064,7 +2206,7 @@ mod tests {
|
|||
cx_a.foreground().forbid_parking();
|
||||
|
||||
// Connect to a server as 2 clients.
|
||||
let mut server = TestServer::start().await;
|
||||
let mut server = TestServer::start(cx_a.foreground()).await;
|
||||
let client_a = server.create_client(&mut cx_a, "user_a").await;
|
||||
let client_b = server.create_client(&mut cx_b, "user_b").await;
|
||||
let mut status_b = client_b.status();
|
||||
|
@ -2282,7 +2424,7 @@ mod tests {
|
|||
let fs = Arc::new(FakeFs::new());
|
||||
|
||||
// Connect to a server as 3 clients.
|
||||
let mut server = TestServer::start().await;
|
||||
let mut server = TestServer::start(cx_a.foreground()).await;
|
||||
let client_a = server.create_client(&mut cx_a, "user_a").await;
|
||||
let client_b = server.create_client(&mut cx_b, "user_b").await;
|
||||
let client_c = server.create_client(&mut cx_c, "user_c").await;
|
||||
|
@ -2415,6 +2557,7 @@ mod tests {
|
|||
peer: Arc<Peer>,
|
||||
app_state: Arc<AppState>,
|
||||
server: Arc<Server>,
|
||||
foreground: Rc<executor::Foreground>,
|
||||
notifications: mpsc::Receiver<()>,
|
||||
connection_killers: Arc<Mutex<HashMap<UserId, watch::Sender<Option<()>>>>>,
|
||||
forbid_connections: Arc<AtomicBool>,
|
||||
|
@ -2422,7 +2565,7 @@ mod tests {
|
|||
}
|
||||
|
||||
impl TestServer {
|
||||
async fn start() -> Self {
|
||||
async fn start(foreground: Rc<executor::Foreground>) -> Self {
|
||||
let test_db = TestDb::new();
|
||||
let app_state = Self::build_app_state(&test_db).await;
|
||||
let peer = Peer::new();
|
||||
|
@ -2432,6 +2575,7 @@ mod tests {
|
|||
peer,
|
||||
app_state,
|
||||
server,
|
||||
foreground,
|
||||
notifications: notifications.1,
|
||||
connection_killers: Default::default(),
|
||||
forbid_connections: Default::default(),
|
||||
|
@ -2547,7 +2691,9 @@ mod tests {
|
|||
{
|
||||
async_std::future::timeout(Duration::from_millis(500), async {
|
||||
while !(predicate)(&*self.server.store.read()) {
|
||||
self.foreground.start_waiting();
|
||||
self.notifications.recv().await;
|
||||
self.foreground.finish_waiting();
|
||||
}
|
||||
})
|
||||
.await
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use crate::db::{ChannelId, UserId};
|
||||
use anyhow::anyhow;
|
||||
use collections::{HashMap, HashSet};
|
||||
use collections::{BTreeMap, HashMap, HashSet};
|
||||
use rpc::{proto, ConnectionId};
|
||||
use std::collections::hash_map;
|
||||
use std::{collections::hash_map, path::PathBuf};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Store {
|
||||
|
@ -41,6 +41,7 @@ pub struct ProjectShare {
|
|||
|
||||
pub struct WorktreeShare {
|
||||
pub entries: HashMap<u64, proto::Entry>,
|
||||
pub diagnostic_summaries: BTreeMap<PathBuf, proto::DiagnosticSummary>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
|
@ -385,17 +386,42 @@ impl Store {
|
|||
worktree_id: u64,
|
||||
connection_id: ConnectionId,
|
||||
entries: HashMap<u64, proto::Entry>,
|
||||
diagnostic_summaries: BTreeMap<PathBuf, proto::DiagnosticSummary>,
|
||||
) -> Option<Vec<UserId>> {
|
||||
let project = self.projects.get_mut(&project_id)?;
|
||||
let worktree = project.worktrees.get_mut(&worktree_id)?;
|
||||
if project.host_connection_id == connection_id && project.share.is_some() {
|
||||
worktree.share = Some(WorktreeShare { entries });
|
||||
worktree.share = Some(WorktreeShare {
|
||||
entries,
|
||||
diagnostic_summaries,
|
||||
});
|
||||
Some(project.authorized_user_ids())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_diagnostic_summary(
|
||||
&mut self,
|
||||
project_id: u64,
|
||||
worktree_id: u64,
|
||||
connection_id: ConnectionId,
|
||||
summary: proto::DiagnosticSummary,
|
||||
) -> Option<Vec<ConnectionId>> {
|
||||
let project = self.projects.get_mut(&project_id)?;
|
||||
let worktree = project.worktrees.get_mut(&worktree_id)?;
|
||||
if project.host_connection_id == connection_id {
|
||||
if let Some(share) = worktree.share.as_mut() {
|
||||
share
|
||||
.diagnostic_summaries
|
||||
.insert(summary.path.clone().into(), summary);
|
||||
return Some(project.connection_ids());
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn join_project(
|
||||
&mut self,
|
||||
connection_id: ConnectionId,
|
||||
|
@ -497,6 +523,11 @@ impl Store {
|
|||
Some(self.channels.get(&channel_id)?.connection_ids())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn project(&self, project_id: u64) -> Option<&Project> {
|
||||
self.projects.get(&project_id)
|
||||
}
|
||||
|
||||
pub fn read_project(&self, project_id: u64, connection_id: ConnectionId) -> Option<&Project> {
|
||||
let project = self.projects.get(&project_id)?;
|
||||
if project.host_connection_id == connection_id
|
||||
|
|
|
@ -21,6 +21,16 @@ pub struct MapKey<K>(K);
|
|||
pub struct MapKeyRef<'a, K>(Option<&'a K>);
|
||||
|
||||
impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
|
||||
pub fn from_ordered_entries(entries: impl IntoIterator<Item = (K, V)>) -> Self {
|
||||
let tree = SumTree::from_iter(
|
||||
entries
|
||||
.into_iter()
|
||||
.map(|(key, value)| MapEntry { key, value }),
|
||||
&(),
|
||||
);
|
||||
Self(tree)
|
||||
}
|
||||
|
||||
pub fn get<'a>(&self, key: &'a K) -> Option<&V> {
|
||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||
cursor.seek(&MapKeyRef(Some(key)), Bias::Left, &());
|
||||
|
|
|
@ -1536,7 +1536,7 @@ impl BufferSnapshot {
|
|||
insertion_cursor.prev(&());
|
||||
}
|
||||
let insertion = insertion_cursor.item().expect("invalid insertion");
|
||||
debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
|
||||
assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
|
||||
|
||||
fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left, &None);
|
||||
let fragment = fragment_cursor.item().unwrap();
|
||||
|
@ -1578,7 +1578,7 @@ impl BufferSnapshot {
|
|||
insertion_cursor.prev(&());
|
||||
}
|
||||
let insertion = insertion_cursor.item().expect("invalid insertion");
|
||||
debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
|
||||
assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
|
||||
|
||||
let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>();
|
||||
fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None);
|
||||
|
|
|
@ -24,6 +24,7 @@ pub struct Theme {
|
|||
pub project_panel: ProjectPanel,
|
||||
pub selector: Selector,
|
||||
pub editor: EditorStyle,
|
||||
pub project_diagnostics: ProjectDiagnostics,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Default)]
|
||||
|
@ -226,6 +227,14 @@ pub struct ContainedLabel {
|
|||
pub label: LabelStyle,
|
||||
}
|
||||
|
||||
#[derive(Clone, Deserialize, Default)]
|
||||
pub struct ProjectDiagnostics {
|
||||
#[serde(flatten)]
|
||||
pub container: ContainerStyle,
|
||||
pub empty_message: TextStyle,
|
||||
pub status_bar_item: ContainedText,
|
||||
}
|
||||
|
||||
#[derive(Clone, Deserialize, Default)]
|
||||
pub struct EditorStyle {
|
||||
pub text: TextStyle,
|
||||
|
@ -253,6 +262,8 @@ pub struct EditorStyle {
|
|||
#[derive(Copy, Clone, Deserialize, Default)]
|
||||
pub struct DiagnosticStyle {
|
||||
pub text: Color,
|
||||
#[serde(default)]
|
||||
pub header: ContainerStyle,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Default, Deserialize)]
|
||||
|
|
|
@ -293,7 +293,7 @@ impl View for ThemeSelector {
|
|||
Container::new(
|
||||
Flex::new(Axis::Vertical)
|
||||
.with_child(ChildView::new(self.query_editor.id()).boxed())
|
||||
.with_child(Flexible::new(1.0, self.render_matches(cx)).boxed())
|
||||
.with_child(Flexible::new(1.0, false, self.render_matches(cx)).boxed())
|
||||
.boxed(),
|
||||
)
|
||||
.with_style(settings.theme.selector.container)
|
||||
|
|
|
@ -12,6 +12,7 @@ test-support = ["client/test-support", "project/test-support"]
|
|||
[dependencies]
|
||||
client = { path = "../client" }
|
||||
clock = { path = "../clock" }
|
||||
collections = { path = "../collections" }
|
||||
gpui = { path = "../gpui" }
|
||||
language = { path = "../language" }
|
||||
project = { path = "../project" }
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
use super::{ItemViewHandle, SplitDirection};
|
||||
use crate::Settings;
|
||||
use crate::{ItemHandle, Settings, Workspace};
|
||||
use gpui::{
|
||||
action,
|
||||
elements::*,
|
||||
geometry::{rect::RectF, vector::vec2f},
|
||||
keymap::Binding,
|
||||
platform::CursorStyle,
|
||||
Entity, MutableAppContext, Quad, RenderContext, View, ViewContext, ViewHandle,
|
||||
Entity, MutableAppContext, Quad, RenderContext, View, ViewContext,
|
||||
};
|
||||
use postage::watch;
|
||||
use project::ProjectPath;
|
||||
use std::cmp;
|
||||
|
||||
action!(Split, SplitDirection);
|
||||
|
@ -70,7 +69,7 @@ pub struct TabState {
|
|||
}
|
||||
|
||||
pub struct Pane {
|
||||
items: Vec<Box<dyn ItemViewHandle>>,
|
||||
item_views: Vec<(usize, Box<dyn ItemViewHandle>)>,
|
||||
active_item: usize,
|
||||
settings: watch::Receiver<Settings>,
|
||||
}
|
||||
|
@ -78,7 +77,7 @@ pub struct Pane {
|
|||
impl Pane {
|
||||
pub fn new(settings: watch::Receiver<Settings>) -> Self {
|
||||
Self {
|
||||
items: Vec::new(),
|
||||
item_views: Vec::new(),
|
||||
active_item: 0,
|
||||
settings,
|
||||
}
|
||||
|
@ -88,43 +87,70 @@ impl Pane {
|
|||
cx.emit(Event::Activate);
|
||||
}
|
||||
|
||||
pub fn add_item(&mut self, item: Box<dyn ItemViewHandle>, cx: &mut ViewContext<Self>) -> usize {
|
||||
let item_idx = cmp::min(self.active_item + 1, self.items.len());
|
||||
self.items.insert(item_idx, item);
|
||||
cx.notify();
|
||||
item_idx
|
||||
pub fn open_item<T>(
|
||||
&mut self,
|
||||
item_handle: T,
|
||||
workspace: &Workspace,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Box<dyn ItemViewHandle>
|
||||
where
|
||||
T: 'static + ItemHandle,
|
||||
{
|
||||
for (ix, (item_id, item_view)) in self.item_views.iter().enumerate() {
|
||||
if *item_id == item_handle.id() {
|
||||
let item_view = item_view.boxed_clone();
|
||||
self.activate_item(ix, cx);
|
||||
return item_view;
|
||||
}
|
||||
}
|
||||
|
||||
let item_view = item_handle.add_view(cx.window_id(), workspace, cx);
|
||||
self.add_item_view(item_view.boxed_clone(), cx);
|
||||
item_view
|
||||
}
|
||||
|
||||
pub fn items(&self) -> &[Box<dyn ItemViewHandle>] {
|
||||
&self.items
|
||||
pub fn add_item_view(
|
||||
&mut self,
|
||||
item_view: Box<dyn ItemViewHandle>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
item_view.added_to_pane(cx);
|
||||
let item_idx = cmp::min(self.active_item + 1, self.item_views.len());
|
||||
self.item_views
|
||||
.insert(item_idx, (item_view.item_handle(cx).id(), item_view));
|
||||
self.activate_item(item_idx, cx);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn contains_item(&self, item: &dyn ItemHandle) -> bool {
|
||||
let item_id = item.id();
|
||||
self.item_views
|
||||
.iter()
|
||||
.any(|(existing_item_id, _)| *existing_item_id == item_id)
|
||||
}
|
||||
|
||||
pub fn item_views(&self) -> impl Iterator<Item = &Box<dyn ItemViewHandle>> {
|
||||
self.item_views.iter().map(|(_, view)| view)
|
||||
}
|
||||
|
||||
pub fn active_item(&self) -> Option<Box<dyn ItemViewHandle>> {
|
||||
self.items.get(self.active_item).cloned()
|
||||
self.item_views
|
||||
.get(self.active_item)
|
||||
.map(|(_, view)| view.clone())
|
||||
}
|
||||
|
||||
pub fn activate_entry(
|
||||
&mut self,
|
||||
project_path: ProjectPath,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<Box<dyn ItemViewHandle>> {
|
||||
if let Some(index) = self.items.iter().position(|item| {
|
||||
item.project_path(cx.as_ref())
|
||||
.map_or(false, |item_path| item_path == project_path)
|
||||
}) {
|
||||
self.activate_item(index, cx);
|
||||
self.items.get(index).map(|handle| handle.boxed_clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
pub fn index_for_item_view(&self, item_view: &dyn ItemViewHandle) -> Option<usize> {
|
||||
self.item_views
|
||||
.iter()
|
||||
.position(|(_, i)| i.id() == item_view.id())
|
||||
}
|
||||
|
||||
pub fn item_index(&self, item: &dyn ItemViewHandle) -> Option<usize> {
|
||||
self.items.iter().position(|i| i.id() == item.id())
|
||||
pub fn index_for_item(&self, item: &dyn ItemHandle) -> Option<usize> {
|
||||
self.item_views.iter().position(|(id, _)| *id == item.id())
|
||||
}
|
||||
|
||||
pub fn activate_item(&mut self, index: usize, cx: &mut ViewContext<Self>) {
|
||||
if index < self.items.len() {
|
||||
if index < self.item_views.len() {
|
||||
self.active_item = index;
|
||||
self.focus_active_item(cx);
|
||||
cx.notify();
|
||||
|
@ -134,15 +160,15 @@ impl Pane {
|
|||
pub fn activate_prev_item(&mut self, cx: &mut ViewContext<Self>) {
|
||||
if self.active_item > 0 {
|
||||
self.active_item -= 1;
|
||||
} else if self.items.len() > 0 {
|
||||
self.active_item = self.items.len() - 1;
|
||||
} else if self.item_views.len() > 0 {
|
||||
self.active_item = self.item_views.len() - 1;
|
||||
}
|
||||
self.focus_active_item(cx);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn activate_next_item(&mut self, cx: &mut ViewContext<Self>) {
|
||||
if self.active_item + 1 < self.items.len() {
|
||||
if self.active_item + 1 < self.item_views.len() {
|
||||
self.active_item += 1;
|
||||
} else {
|
||||
self.active_item = 0;
|
||||
|
@ -152,15 +178,15 @@ impl Pane {
|
|||
}
|
||||
|
||||
pub fn close_active_item(&mut self, cx: &mut ViewContext<Self>) {
|
||||
if !self.items.is_empty() {
|
||||
self.close_item(self.items[self.active_item].id(), cx)
|
||||
if !self.item_views.is_empty() {
|
||||
self.close_item(self.item_views[self.active_item].1.id(), cx)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn close_item(&mut self, item_id: usize, cx: &mut ViewContext<Self>) {
|
||||
self.items.retain(|item| item.id() != item_id);
|
||||
self.active_item = cmp::min(self.active_item, self.items.len().saturating_sub(1));
|
||||
if self.items.is_empty() {
|
||||
self.item_views.retain(|(_, item)| item.id() != item_id);
|
||||
self.active_item = cmp::min(self.active_item, self.item_views.len().saturating_sub(1));
|
||||
if self.item_views.is_empty() {
|
||||
cx.emit(Event::Remove);
|
||||
}
|
||||
cx.notify();
|
||||
|
@ -183,11 +209,11 @@ impl Pane {
|
|||
enum Tabs {}
|
||||
let tabs = MouseEventHandler::new::<Tabs, _, _, _>(cx.view_id(), cx, |mouse_state, cx| {
|
||||
let mut row = Flex::row();
|
||||
for (ix, item) in self.items.iter().enumerate() {
|
||||
for (ix, (_, item_view)) in self.item_views.iter().enumerate() {
|
||||
let is_active = ix == self.active_item;
|
||||
|
||||
row.add_child({
|
||||
let mut title = item.title(cx);
|
||||
let mut title = item_view.title(cx);
|
||||
if title.len() > MAX_TAB_TITLE_LEN {
|
||||
let mut truncated_len = MAX_TAB_TITLE_LEN;
|
||||
while !title.is_char_boundary(truncated_len) {
|
||||
|
@ -212,9 +238,9 @@ impl Pane {
|
|||
.with_child(
|
||||
Align::new({
|
||||
let diameter = 7.0;
|
||||
let icon_color = if item.has_conflict(cx) {
|
||||
let icon_color = if item_view.has_conflict(cx) {
|
||||
Some(style.icon_conflict)
|
||||
} else if item.is_dirty(cx) {
|
||||
} else if item_view.is_dirty(cx) {
|
||||
Some(style.icon_dirty)
|
||||
} else {
|
||||
None
|
||||
|
@ -271,7 +297,7 @@ impl Pane {
|
|||
.with_child(
|
||||
Align::new(
|
||||
ConstrainedBox::new(if mouse_state.hovered {
|
||||
let item_id = item.id();
|
||||
let item_id = item_view.id();
|
||||
enum TabCloseButton {}
|
||||
let icon = Svg::new("icons/x.svg");
|
||||
MouseEventHandler::new::<TabCloseButton, _, _, _>(
|
||||
|
@ -314,13 +340,11 @@ impl Pane {
|
|||
}
|
||||
|
||||
row.add_child(
|
||||
Expanded::new(
|
||||
0.0,
|
||||
Container::new(Empty::new().boxed())
|
||||
.with_border(theme.workspace.tab.container.border)
|
||||
.boxed(),
|
||||
)
|
||||
.named("filler"),
|
||||
Empty::new()
|
||||
.contained()
|
||||
.with_border(theme.workspace.tab.container.border)
|
||||
.flexible(0., true)
|
||||
.named("filler"),
|
||||
);
|
||||
|
||||
row.boxed()
|
||||
|
@ -345,7 +369,7 @@ impl View for Pane {
|
|||
if let Some(active_item) = self.active_item() {
|
||||
Flex::column()
|
||||
.with_child(self.render_tabs(cx))
|
||||
.with_child(Expanded::new(1.0, ChildView::new(active_item.id()).boxed()).boxed())
|
||||
.with_child(ChildView::new(active_item.id()).flexible(1., true).boxed())
|
||||
.named("pane")
|
||||
} else {
|
||||
Empty::new().named("pane")
|
||||
|
@ -356,17 +380,3 @@ impl View for Pane {
|
|||
self.focus_active_item(cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub trait PaneHandle {
|
||||
fn add_item_view(&self, item: Box<dyn ItemViewHandle>, cx: &mut MutableAppContext);
|
||||
}
|
||||
|
||||
impl PaneHandle for ViewHandle<Pane> {
|
||||
fn add_item_view(&self, item: Box<dyn ItemViewHandle>, cx: &mut MutableAppContext) {
|
||||
item.set_parent_pane(self, cx);
|
||||
self.update(cx, |pane, cx| {
|
||||
let item_idx = pane.add_item(item, cx);
|
||||
pane.activate_item(item_idx, cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -183,7 +183,7 @@ impl PaneAxis {
|
|||
member = Container::new(member).with_border(border).boxed();
|
||||
}
|
||||
|
||||
Expanded::new(1.0, member).boxed()
|
||||
Flexible::new(1.0, true, member).boxed()
|
||||
}))
|
||||
.boxed()
|
||||
}
|
||||
|
|
|
@ -135,19 +135,16 @@ impl Sidebar {
|
|||
}
|
||||
|
||||
container.add_child(
|
||||
Flexible::new(
|
||||
1.,
|
||||
Hook::new(
|
||||
ConstrainedBox::new(ChildView::new(active_item.id()).boxed())
|
||||
.with_max_width(*self.width.borrow())
|
||||
.boxed(),
|
||||
)
|
||||
.on_after_layout({
|
||||
let width = self.width.clone();
|
||||
move |size, _| *width.borrow_mut() = size.x()
|
||||
})
|
||||
.boxed(),
|
||||
Hook::new(
|
||||
ConstrainedBox::new(ChildView::new(active_item.id()).boxed())
|
||||
.with_max_width(*self.width.borrow())
|
||||
.boxed(),
|
||||
)
|
||||
.on_after_layout({
|
||||
let width = self.width.clone();
|
||||
move |size, _| *width.borrow_mut() = size.x()
|
||||
})
|
||||
.flexible(1., false)
|
||||
.boxed(),
|
||||
);
|
||||
if matches!(self.side, Side::Left) {
|
||||
|
|
|
@ -47,7 +47,7 @@ impl View for StatusBar {
|
|||
.iter()
|
||||
.map(|i| ChildView::new(i.id()).aligned().boxed()),
|
||||
)
|
||||
.with_child(Empty::new().expanded(1.).boxed())
|
||||
.with_child(Empty::new().flexible(1., true).boxed())
|
||||
.with_children(
|
||||
self.right_items
|
||||
.iter()
|
||||
|
|
|
@ -7,6 +7,7 @@ mod status_bar;
|
|||
use anyhow::{anyhow, Result};
|
||||
use client::{Authenticate, ChannelList, Client, User, UserStore};
|
||||
use clock::ReplicaId;
|
||||
use collections::HashSet;
|
||||
use gpui::{
|
||||
action,
|
||||
color::Color,
|
||||
|
@ -15,9 +16,9 @@ use gpui::{
|
|||
json::{self, to_string_pretty, ToJson},
|
||||
keymap::Binding,
|
||||
platform::{CursorStyle, WindowOptions},
|
||||
AnyViewHandle, AppContext, ClipboardItem, Entity, ModelContext, ModelHandle, MutableAppContext,
|
||||
PathPromptOptions, PromptLevel, RenderContext, Task, View, ViewContext, ViewHandle,
|
||||
WeakModelHandle,
|
||||
AnyModelHandle, AnyViewHandle, AppContext, ClipboardItem, Entity, ModelContext, ModelHandle,
|
||||
MutableAppContext, PathPromptOptions, PromptLevel, RenderContext, Task, View, ViewContext,
|
||||
ViewHandle, WeakModelHandle, WeakViewHandle,
|
||||
};
|
||||
use language::LanguageRegistry;
|
||||
use log::error;
|
||||
|
@ -32,6 +33,7 @@ use status_bar::StatusBar;
|
|||
pub use status_bar::StatusItemView;
|
||||
use std::{
|
||||
future::Future,
|
||||
hash::{Hash, Hasher},
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
@ -94,7 +96,7 @@ pub struct AppState {
|
|||
pub user_store: ModelHandle<client::UserStore>,
|
||||
pub fs: Arc<dyn fs::Fs>,
|
||||
pub channel_list: ModelHandle<client::ChannelList>,
|
||||
pub entry_openers: Arc<[Box<dyn EntryOpener>]>,
|
||||
pub path_openers: Arc<[Box<dyn PathOpener>]>,
|
||||
pub build_window_options: &'static dyn Fn() -> WindowOptions<'static>,
|
||||
pub build_workspace: &'static dyn Fn(
|
||||
ModelHandle<Project>,
|
||||
|
@ -115,7 +117,7 @@ pub struct JoinProjectParams {
|
|||
pub app_state: Arc<AppState>,
|
||||
}
|
||||
|
||||
pub trait EntryOpener {
|
||||
pub trait PathOpener {
|
||||
fn open(
|
||||
&self,
|
||||
worktree: &mut Worktree,
|
||||
|
@ -129,7 +131,7 @@ pub trait Item: Entity + Sized {
|
|||
|
||||
fn build_view(
|
||||
handle: ModelHandle<Self>,
|
||||
settings: watch::Receiver<Settings>,
|
||||
workspace: &Workspace,
|
||||
cx: &mut ViewContext<Self::View>,
|
||||
) -> Self::View;
|
||||
|
||||
|
@ -137,6 +139,9 @@ pub trait Item: Entity + Sized {
|
|||
}
|
||||
|
||||
pub trait ItemView: View {
|
||||
type ItemHandle: ItemHandle;
|
||||
|
||||
fn item_handle(&self, cx: &AppContext) -> Self::ItemHandle;
|
||||
fn title(&self, cx: &AppContext) -> String;
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
|
||||
fn clone_on_split(&self, _: &mut ViewContext<Self>) -> Option<Self>
|
||||
|
@ -172,27 +177,31 @@ pub trait ItemView: View {
|
|||
}
|
||||
|
||||
pub trait ItemHandle: Send + Sync {
|
||||
fn id(&self) -> usize;
|
||||
fn add_view(
|
||||
&self,
|
||||
window_id: usize,
|
||||
settings: watch::Receiver<Settings>,
|
||||
workspace: &Workspace,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> Box<dyn ItemViewHandle>;
|
||||
fn boxed_clone(&self) -> Box<dyn ItemHandle>;
|
||||
fn downgrade(&self) -> Box<dyn WeakItemHandle>;
|
||||
fn to_any(&self) -> AnyModelHandle;
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
|
||||
}
|
||||
|
||||
pub trait WeakItemHandle {
|
||||
fn id(&self) -> usize;
|
||||
fn upgrade(&self, cx: &AppContext) -> Option<Box<dyn ItemHandle>>;
|
||||
}
|
||||
|
||||
pub trait ItemViewHandle {
|
||||
fn item_handle(&self, cx: &AppContext) -> Box<dyn ItemHandle>;
|
||||
fn title(&self, cx: &AppContext) -> String;
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
|
||||
fn boxed_clone(&self) -> Box<dyn ItemViewHandle>;
|
||||
fn clone_on_split(&self, cx: &mut MutableAppContext) -> Option<Box<dyn ItemViewHandle>>;
|
||||
fn set_parent_pane(&self, pane: &ViewHandle<Pane>, cx: &mut MutableAppContext);
|
||||
fn added_to_pane(&self, cx: &mut ViewContext<Pane>);
|
||||
fn id(&self) -> usize;
|
||||
fn to_any(&self) -> AnyViewHandle;
|
||||
fn is_dirty(&self, cx: &AppContext) -> bool;
|
||||
|
@ -209,13 +218,17 @@ pub trait ItemViewHandle {
|
|||
}
|
||||
|
||||
impl<T: Item> ItemHandle for ModelHandle<T> {
|
||||
fn id(&self) -> usize {
|
||||
self.id()
|
||||
}
|
||||
|
||||
fn add_view(
|
||||
&self,
|
||||
window_id: usize,
|
||||
settings: watch::Receiver<Settings>,
|
||||
workspace: &Workspace,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> Box<dyn ItemViewHandle> {
|
||||
Box::new(cx.add_view(window_id, |cx| T::build_view(self.clone(), settings, cx)))
|
||||
Box::new(cx.add_view(window_id, |cx| T::build_view(self.clone(), workspace, cx)))
|
||||
}
|
||||
|
||||
fn boxed_clone(&self) -> Box<dyn ItemHandle> {
|
||||
|
@ -226,19 +239,27 @@ impl<T: Item> ItemHandle for ModelHandle<T> {
|
|||
Box::new(self.downgrade())
|
||||
}
|
||||
|
||||
fn to_any(&self) -> AnyModelHandle {
|
||||
self.clone().into()
|
||||
}
|
||||
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
|
||||
self.read(cx).project_path()
|
||||
}
|
||||
}
|
||||
|
||||
impl ItemHandle for Box<dyn ItemHandle> {
|
||||
fn id(&self) -> usize {
|
||||
ItemHandle::id(self.as_ref())
|
||||
}
|
||||
|
||||
fn add_view(
|
||||
&self,
|
||||
window_id: usize,
|
||||
settings: watch::Receiver<Settings>,
|
||||
workspace: &Workspace,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> Box<dyn ItemViewHandle> {
|
||||
ItemHandle::add_view(self.as_ref(), window_id, settings, cx)
|
||||
ItemHandle::add_view(self.as_ref(), window_id, workspace, cx)
|
||||
}
|
||||
|
||||
fn boxed_clone(&self) -> Box<dyn ItemHandle> {
|
||||
|
@ -249,18 +270,44 @@ impl ItemHandle for Box<dyn ItemHandle> {
|
|||
self.as_ref().downgrade()
|
||||
}
|
||||
|
||||
fn to_any(&self) -> AnyModelHandle {
|
||||
self.as_ref().to_any()
|
||||
}
|
||||
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
|
||||
self.as_ref().project_path(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Item> WeakItemHandle for WeakModelHandle<T> {
|
||||
fn id(&self) -> usize {
|
||||
WeakModelHandle::id(self)
|
||||
}
|
||||
|
||||
fn upgrade(&self, cx: &AppContext) -> Option<Box<dyn ItemHandle>> {
|
||||
WeakModelHandle::<T>::upgrade(*self, cx).map(|i| Box::new(i) as Box<dyn ItemHandle>)
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for Box<dyn WeakItemHandle> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.id().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Box<dyn WeakItemHandle> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.id() == other.id()
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Box<dyn WeakItemHandle> {}
|
||||
|
||||
impl<T: ItemView> ItemViewHandle for ViewHandle<T> {
|
||||
fn item_handle(&self, cx: &AppContext) -> Box<dyn ItemHandle> {
|
||||
Box::new(self.read(cx).item_handle(cx))
|
||||
}
|
||||
|
||||
fn title(&self, cx: &AppContext) -> String {
|
||||
self.read(cx).title(cx)
|
||||
}
|
||||
|
@ -280,25 +327,23 @@ impl<T: ItemView> ItemViewHandle for ViewHandle<T> {
|
|||
.map(|handle| Box::new(handle) as Box<dyn ItemViewHandle>)
|
||||
}
|
||||
|
||||
fn set_parent_pane(&self, pane: &ViewHandle<Pane>, cx: &mut MutableAppContext) {
|
||||
pane.update(cx, |_, cx| {
|
||||
cx.subscribe(self, |pane, item, event, cx| {
|
||||
if T::should_close_item_on_event(event) {
|
||||
pane.close_item(item.id(), cx);
|
||||
return;
|
||||
fn added_to_pane(&self, cx: &mut ViewContext<Pane>) {
|
||||
cx.subscribe(self, |pane, item, event, cx| {
|
||||
if T::should_close_item_on_event(event) {
|
||||
pane.close_item(item.id(), cx);
|
||||
return;
|
||||
}
|
||||
if T::should_activate_item_on_event(event) {
|
||||
if let Some(ix) = pane.index_for_item_view(&item) {
|
||||
pane.activate_item(ix, cx);
|
||||
pane.activate(cx);
|
||||
}
|
||||
if T::should_activate_item_on_event(event) {
|
||||
if let Some(ix) = pane.item_index(&item) {
|
||||
pane.activate_item(ix, cx);
|
||||
pane.activate(cx);
|
||||
}
|
||||
}
|
||||
if T::should_update_tab_on_event(event) {
|
||||
cx.notify()
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
});
|
||||
}
|
||||
if T::should_update_tab_on_event(event) {
|
||||
cx.notify()
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn save(&self, cx: &mut MutableAppContext) -> Result<Task<Result<()>>> {
|
||||
|
@ -360,7 +405,7 @@ pub struct WorkspaceParams {
|
|||
pub settings: watch::Receiver<Settings>,
|
||||
pub user_store: ModelHandle<UserStore>,
|
||||
pub channel_list: ModelHandle<ChannelList>,
|
||||
pub entry_openers: Arc<[Box<dyn EntryOpener>]>,
|
||||
pub path_openers: Arc<[Box<dyn PathOpener>]>,
|
||||
}
|
||||
|
||||
impl WorkspaceParams {
|
||||
|
@ -392,7 +437,7 @@ impl WorkspaceParams {
|
|||
languages,
|
||||
settings: watch::channel_with(settings).1,
|
||||
user_store,
|
||||
entry_openers: Arc::from([]),
|
||||
path_openers: Arc::from([]),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -412,13 +457,14 @@ impl WorkspaceParams {
|
|||
settings: app_state.settings.clone(),
|
||||
user_store: app_state.user_store.clone(),
|
||||
channel_list: app_state.channel_list.clone(),
|
||||
entry_openers: app_state.entry_openers.clone(),
|
||||
path_openers: app_state.path_openers.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Workspace {
|
||||
pub settings: watch::Receiver<Settings>,
|
||||
weak_self: WeakViewHandle<Self>,
|
||||
client: Arc<Client>,
|
||||
user_store: ModelHandle<client::UserStore>,
|
||||
fs: Arc<dyn Fs>,
|
||||
|
@ -430,8 +476,8 @@ pub struct Workspace {
|
|||
active_pane: ViewHandle<Pane>,
|
||||
status_bar: ViewHandle<StatusBar>,
|
||||
project: ModelHandle<Project>,
|
||||
entry_openers: Arc<[Box<dyn EntryOpener>]>,
|
||||
items: Vec<Box<dyn WeakItemHandle>>,
|
||||
path_openers: Arc<[Box<dyn PathOpener>]>,
|
||||
items: HashSet<Box<dyn WeakItemHandle>>,
|
||||
_observe_current_user: Task<()>,
|
||||
}
|
||||
|
||||
|
@ -473,6 +519,7 @@ impl Workspace {
|
|||
|
||||
Workspace {
|
||||
modal: None,
|
||||
weak_self: cx.weak_handle(),
|
||||
center: PaneGroup::new(pane.id()),
|
||||
panes: vec![pane.clone()],
|
||||
active_pane: pane.clone(),
|
||||
|
@ -484,12 +531,20 @@ impl Workspace {
|
|||
left_sidebar: Sidebar::new(Side::Left),
|
||||
right_sidebar: Sidebar::new(Side::Right),
|
||||
project: params.project.clone(),
|
||||
entry_openers: params.entry_openers.clone(),
|
||||
path_openers: params.path_openers.clone(),
|
||||
items: Default::default(),
|
||||
_observe_current_user,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn weak_handle(&self) -> WeakViewHandle<Self> {
|
||||
self.weak_self.clone()
|
||||
}
|
||||
|
||||
pub fn settings(&self) -> watch::Receiver<Settings> {
|
||||
self.settings.clone()
|
||||
}
|
||||
|
||||
pub fn left_sidebar_mut(&mut self) -> &mut Sidebar {
|
||||
&mut self.left_sidebar
|
||||
}
|
||||
|
@ -560,13 +615,13 @@ impl Workspace {
|
|||
async move {
|
||||
let project_path = project_path.await.ok()?;
|
||||
if fs.is_file(&abs_path).await {
|
||||
if let Some(entry) =
|
||||
this.update(&mut cx, |this, cx| this.open_entry(project_path, cx))
|
||||
{
|
||||
return Some(entry.await);
|
||||
}
|
||||
Some(
|
||||
this.update(&mut cx, |this, cx| this.open_path(project_path, cx))
|
||||
.await,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
|
@ -665,104 +720,59 @@ impl Workspace {
|
|||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn open_entry(
|
||||
pub fn open_path(
|
||||
&mut self,
|
||||
project_path: ProjectPath,
|
||||
path: ProjectPath,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<Task<Result<Box<dyn ItemViewHandle>, Arc<anyhow::Error>>>> {
|
||||
let pane = self.active_pane().clone();
|
||||
if let Some(existing_item) =
|
||||
self.activate_or_open_existing_entry(project_path.clone(), &pane, cx)
|
||||
{
|
||||
return Some(cx.foreground().spawn(async move { Ok(existing_item) }));
|
||||
) -> Task<Result<Box<dyn ItemViewHandle>, Arc<anyhow::Error>>> {
|
||||
if let Some(existing_item) = self.item_for_path(&path, cx) {
|
||||
return Task::ready(Ok(self.open_item(existing_item, cx)));
|
||||
}
|
||||
|
||||
let worktree = match self
|
||||
.project
|
||||
.read(cx)
|
||||
.worktree_for_id(project_path.worktree_id, cx)
|
||||
{
|
||||
let worktree = match self.project.read(cx).worktree_for_id(path.worktree_id, cx) {
|
||||
Some(worktree) => worktree,
|
||||
None => {
|
||||
log::error!("worktree {} does not exist", project_path.worktree_id);
|
||||
return None;
|
||||
return Task::ready(Err(Arc::new(anyhow!(
|
||||
"worktree {} does not exist",
|
||||
path.worktree_id
|
||||
))));
|
||||
}
|
||||
};
|
||||
|
||||
let project_path = project_path.clone();
|
||||
let entry_openers = self.entry_openers.clone();
|
||||
let task = worktree.update(cx, |worktree, cx| {
|
||||
for opener in entry_openers.iter() {
|
||||
let project_path = path.clone();
|
||||
let path_openers = self.path_openers.clone();
|
||||
let open_task = worktree.update(cx, |worktree, cx| {
|
||||
for opener in path_openers.iter() {
|
||||
if let Some(task) = opener.open(worktree, project_path.clone(), cx) {
|
||||
return Some(task);
|
||||
return task;
|
||||
}
|
||||
}
|
||||
log::error!("no opener for path {:?} found", project_path);
|
||||
None
|
||||
})?;
|
||||
Task::ready(Err(anyhow!("no opener found for path {:?}", project_path)))
|
||||
});
|
||||
|
||||
let pane = pane.downgrade();
|
||||
Some(cx.spawn(|this, mut cx| async move {
|
||||
let load_result = task.await;
|
||||
let pane = self.active_pane().clone().downgrade();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let item = open_task.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let pane = pane
|
||||
.upgrade(&cx)
|
||||
.ok_or_else(|| anyhow!("could not upgrade pane reference"))?;
|
||||
let item = load_result?;
|
||||
|
||||
// By the time loading finishes, the entry could have been already added
|
||||
// to the pane. If it was, we activate it, otherwise we'll store the
|
||||
// item and add a new view for it.
|
||||
if let Some(existing) =
|
||||
this.activate_or_open_existing_entry(project_path, &pane, cx)
|
||||
{
|
||||
Ok(existing)
|
||||
} else {
|
||||
Ok(this.add_item(item, cx))
|
||||
}
|
||||
Ok(this.open_item_in_pane(item, &pane, cx))
|
||||
})
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
fn activate_or_open_existing_entry(
|
||||
&mut self,
|
||||
project_path: ProjectPath,
|
||||
pane: &ViewHandle<Pane>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<Box<dyn ItemViewHandle>> {
|
||||
// If the pane contains a view for this file, then activate
|
||||
// that item view.
|
||||
if let Some(existing_item_view) =
|
||||
pane.update(cx, |pane, cx| pane.activate_entry(project_path.clone(), cx))
|
||||
{
|
||||
return Some(existing_item_view);
|
||||
}
|
||||
fn item_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Box<dyn ItemHandle>> {
|
||||
self.items
|
||||
.iter()
|
||||
.filter_map(|i| i.upgrade(cx))
|
||||
.find(|i| i.project_path(cx).as_ref() == Some(path))
|
||||
}
|
||||
|
||||
// Otherwise, if this file is already open somewhere in the workspace,
|
||||
// then add another view for it.
|
||||
let settings = self.settings.clone();
|
||||
let mut view_for_existing_item = None;
|
||||
self.items.retain(|item| {
|
||||
if let Some(item) = item.upgrade(cx) {
|
||||
if view_for_existing_item.is_none()
|
||||
&& item
|
||||
.project_path(cx)
|
||||
.map_or(false, |item_project_path| item_project_path == project_path)
|
||||
{
|
||||
view_for_existing_item =
|
||||
Some(item.add_view(cx.window_id(), settings.clone(), cx.as_mut()));
|
||||
}
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
if let Some(view) = view_for_existing_item {
|
||||
pane.add_item_view(view.boxed_clone(), cx.as_mut());
|
||||
Some(view)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
pub fn item_of_type<T: Item>(&self, cx: &AppContext) -> Option<ModelHandle<T>> {
|
||||
self.items
|
||||
.iter()
|
||||
.find_map(|i| i.upgrade(cx).and_then(|i| i.to_any().downcast()))
|
||||
}
|
||||
|
||||
pub fn active_item(&self, cx: &AppContext) -> Option<Box<dyn ItemViewHandle>> {
|
||||
|
@ -791,24 +801,16 @@ impl Workspace {
|
|||
{
|
||||
error!("failed to save item: {:?}, ", error);
|
||||
}
|
||||
|
||||
handle.update(&mut cx, |this, cx| {
|
||||
this.project.update(cx, |project, cx| project.diagnose(cx))
|
||||
});
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
},
|
||||
);
|
||||
} else {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
if let Err(error) = cx.update(|cx| item.save(cx)).unwrap().await {
|
||||
error!("failed to save item: {:?}, ", error);
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.project.update(cx, |project, cx| project.diagnose(cx))
|
||||
});
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
@ -840,10 +842,6 @@ impl Workspace {
|
|||
if let Err(error) = result {
|
||||
error!("failed to save item: {:?}, ", error);
|
||||
}
|
||||
|
||||
handle.update(&mut cx, |this, cx| {
|
||||
this.project.update(cx, |project, cx| project.diagnose(cx))
|
||||
});
|
||||
})
|
||||
.detach()
|
||||
}
|
||||
|
@ -920,19 +918,65 @@ impl Workspace {
|
|||
pane
|
||||
}
|
||||
|
||||
pub fn add_item<T>(
|
||||
pub fn open_item<T>(
|
||||
&mut self,
|
||||
item_handle: T,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Box<dyn ItemViewHandle>
|
||||
where
|
||||
T: ItemHandle,
|
||||
T: 'static + ItemHandle,
|
||||
{
|
||||
let view = item_handle.add_view(cx.window_id(), self.settings.clone(), cx);
|
||||
self.items.push(item_handle.downgrade());
|
||||
self.active_pane()
|
||||
.add_item_view(view.boxed_clone(), cx.as_mut());
|
||||
view
|
||||
self.open_item_in_pane(item_handle, &self.active_pane().clone(), cx)
|
||||
}
|
||||
|
||||
pub fn open_item_in_pane<T>(
|
||||
&mut self,
|
||||
item_handle: T,
|
||||
pane: &ViewHandle<Pane>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Box<dyn ItemViewHandle>
|
||||
where
|
||||
T: 'static + ItemHandle,
|
||||
{
|
||||
self.items.insert(item_handle.downgrade());
|
||||
pane.update(cx, |pane, cx| pane.open_item(item_handle, self, cx))
|
||||
}
|
||||
|
||||
pub fn activate_pane_for_item(
|
||||
&mut self,
|
||||
item: &dyn ItemHandle,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> bool {
|
||||
let pane = self.panes.iter().find_map(|pane| {
|
||||
if pane.read(cx).contains_item(item) {
|
||||
Some(pane.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
if let Some(pane) = pane {
|
||||
self.activate_pane(pane.clone(), cx);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn activate_item(&mut self, item: &dyn ItemHandle, cx: &mut ViewContext<Self>) -> bool {
|
||||
let result = self.panes.iter().find_map(|pane| {
|
||||
if let Some(ix) = pane.read(cx).index_for_item(item) {
|
||||
Some((pane.clone(), ix))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
if let Some((pane, ix)) = result {
|
||||
self.activate_pane(pane.clone(), cx);
|
||||
pane.update(cx, |pane, cx| pane.activate_item(ix, cx));
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn activate_pane(&mut self, pane: ViewHandle<Pane>, cx: &mut ViewContext<Self>) {
|
||||
|
@ -977,7 +1021,7 @@ impl Workspace {
|
|||
self.activate_pane(new_pane.clone(), cx);
|
||||
if let Some(item) = pane.read(cx).active_item() {
|
||||
if let Some(clone) = item.clone_on_split(cx.as_mut()) {
|
||||
new_pane.add_item_view(clone, cx.as_mut());
|
||||
new_pane.update(cx, |new_pane, cx| new_pane.add_item_view(clone, cx));
|
||||
}
|
||||
}
|
||||
self.center
|
||||
|
@ -1203,50 +1247,40 @@ impl View for Workspace {
|
|||
fn render(&mut self, cx: &mut RenderContext<Self>) -> ElementBox {
|
||||
let settings = self.settings.borrow();
|
||||
let theme = &settings.theme;
|
||||
Container::new(
|
||||
Flex::column()
|
||||
.with_child(self.render_titlebar(&theme, cx))
|
||||
.with_child(
|
||||
Expanded::new(
|
||||
1.0,
|
||||
Stack::new()
|
||||
.with_child({
|
||||
let mut content = Flex::row();
|
||||
content.add_child(self.left_sidebar.render(&settings, cx));
|
||||
if let Some(element) =
|
||||
self.left_sidebar.render_active_item(&settings, cx)
|
||||
{
|
||||
content.add_child(Flexible::new(0.8, element).boxed());
|
||||
}
|
||||
content.add_child(
|
||||
Flex::column()
|
||||
.with_child(
|
||||
Expanded::new(1.0, self.center.render(&settings.theme))
|
||||
.boxed(),
|
||||
)
|
||||
.with_child(ChildView::new(self.status_bar.id()).boxed())
|
||||
.expanded(1.)
|
||||
Flex::column()
|
||||
.with_child(self.render_titlebar(&theme, cx))
|
||||
.with_child(
|
||||
Stack::new()
|
||||
.with_child({
|
||||
let mut content = Flex::row();
|
||||
content.add_child(self.left_sidebar.render(&settings, cx));
|
||||
if let Some(element) = self.left_sidebar.render_active_item(&settings, cx) {
|
||||
content.add_child(Flexible::new(0.8, false, element).boxed());
|
||||
}
|
||||
content.add_child(
|
||||
Flex::column()
|
||||
.with_child(
|
||||
Flexible::new(1., true, self.center.render(&settings.theme))
|
||||
.boxed(),
|
||||
);
|
||||
if let Some(element) =
|
||||
self.right_sidebar.render_active_item(&settings, cx)
|
||||
{
|
||||
content.add_child(Flexible::new(0.8, element).boxed());
|
||||
}
|
||||
content.add_child(self.right_sidebar.render(&settings, cx));
|
||||
content.boxed()
|
||||
})
|
||||
.with_children(
|
||||
self.modal.as_ref().map(|m| ChildView::new(m.id()).boxed()),
|
||||
)
|
||||
.boxed(),
|
||||
)
|
||||
)
|
||||
.with_child(ChildView::new(self.status_bar.id()).boxed())
|
||||
.flexible(1., true)
|
||||
.boxed(),
|
||||
);
|
||||
if let Some(element) = self.right_sidebar.render_active_item(&settings, cx)
|
||||
{
|
||||
content.add_child(Flexible::new(0.8, false, element).boxed());
|
||||
}
|
||||
content.add_child(self.right_sidebar.render(&settings, cx));
|
||||
content.boxed()
|
||||
})
|
||||
.with_children(self.modal.as_ref().map(|m| ChildView::new(m.id()).boxed()))
|
||||
.flexible(1.0, true)
|
||||
.boxed(),
|
||||
)
|
||||
.boxed(),
|
||||
)
|
||||
.with_background_color(settings.theme.workspace.background)
|
||||
.named("workspace")
|
||||
)
|
||||
.contained()
|
||||
.with_background_color(settings.theme.workspace.background)
|
||||
.named("workspace")
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
|
|
|
@ -249,11 +249,28 @@ line_number_active = "$text.0.color"
|
|||
selection = "$selection.host"
|
||||
guest_selections = "$selection.guests"
|
||||
error_color = "$status.bad"
|
||||
error_diagnostic = { text = "$status.bad" }
|
||||
invalid_error_diagnostic = { text = "$text.3.color" }
|
||||
warning_diagnostic = { text = "$status.warn" }
|
||||
invalid_warning_diagnostic = { text = "$text.3.color" }
|
||||
information_diagnostic = { text = "$status.info" }
|
||||
invalid_information_diagnostic = { text = "$text.3.color" }
|
||||
hint_diagnostic = { text = "$status.info" }
|
||||
invalid_hint_diagnostic = { text = "$text.3.color" }
|
||||
|
||||
[editor.error_diagnostic]
|
||||
text = "$status.bad"
|
||||
header = { padding = { left = 10 }, background = "#ffffff08" }
|
||||
|
||||
[editor.warning_diagnostic]
|
||||
text = "$status.warn"
|
||||
header = { padding = { left = 10 }, background = "#ffffff08" }
|
||||
|
||||
[editor.information_diagnostic]
|
||||
text = "$status.info"
|
||||
header = { padding = { left = 10 }, background = "#ffffff08" }
|
||||
|
||||
[editor.hint_diagnostic]
|
||||
text = "$status.info"
|
||||
header = { padding = { left = 10 }, background = "#ffffff08" }
|
||||
|
||||
[project_diagnostics]
|
||||
background = "$surface.1"
|
||||
empty_message = "$text.0"
|
||||
status_bar_item = { extends = "$text.2", margin.right = 10 }
|
||||
|
|
|
@ -13,3 +13,4 @@ brackets = [
|
|||
[language_server]
|
||||
binary = "rust-analyzer"
|
||||
disk_based_diagnostic_sources = ["rustc"]
|
||||
disk_based_diagnostics_progress_token = "rustAnalyzer/cargo check"
|
||||
|
|
|
@ -7,184 +7,6 @@ use std::{str, sync::Arc};
|
|||
#[folder = "languages"]
|
||||
struct LanguageDir;
|
||||
|
||||
mod rust {
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use collections::{HashMap, HashSet};
|
||||
use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity};
|
||||
use parking_lot::Mutex;
|
||||
use serde::Deserialize;
|
||||
use serde_json::Deserializer;
|
||||
use smol::process::Command;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct DiagnosticProvider {
|
||||
reported_paths: Mutex<HashSet<Arc<Path>>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Check {
|
||||
message: CompilerMessage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct CompilerMessage {
|
||||
code: Option<ErrorCode>,
|
||||
spans: Vec<Span>,
|
||||
message: String,
|
||||
level: ErrorLevel,
|
||||
children: Vec<CompilerMessage>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
enum ErrorLevel {
|
||||
#[serde(rename = "warning")]
|
||||
Warning,
|
||||
#[serde(rename = "error")]
|
||||
Error,
|
||||
#[serde(rename = "help")]
|
||||
Help,
|
||||
#[serde(rename = "note")]
|
||||
Note,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ErrorCode {
|
||||
code: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
struct Span {
|
||||
is_primary: bool,
|
||||
file_name: PathBuf,
|
||||
byte_start: usize,
|
||||
byte_end: usize,
|
||||
expansion: Option<Box<Expansion>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
struct Expansion {
|
||||
span: Span,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl language::DiagnosticProvider for DiagnosticProvider {
|
||||
async fn diagnose(
|
||||
&self,
|
||||
root_path: Arc<Path>,
|
||||
) -> Result<HashMap<Arc<Path>, Vec<DiagnosticEntry<usize>>>> {
|
||||
let output = Command::new("cargo")
|
||||
.arg("check")
|
||||
.args(["--message-format", "json"])
|
||||
.current_dir(&root_path)
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
let mut group_id = 0;
|
||||
let mut diagnostics_by_path = HashMap::default();
|
||||
let mut new_reported_paths = HashSet::default();
|
||||
for value in
|
||||
Deserializer::from_slice(&output.stdout).into_iter::<&serde_json::value::RawValue>()
|
||||
{
|
||||
if let Ok(check) = serde_json::from_str::<Check>(value?.get()) {
|
||||
let check_severity = match check.message.level {
|
||||
ErrorLevel::Warning => DiagnosticSeverity::WARNING,
|
||||
ErrorLevel::Error => DiagnosticSeverity::ERROR,
|
||||
ErrorLevel::Help => DiagnosticSeverity::HINT,
|
||||
ErrorLevel::Note => DiagnosticSeverity::INFORMATION,
|
||||
};
|
||||
|
||||
let mut primary_span = None;
|
||||
for mut span in check.message.spans {
|
||||
if let Some(mut expansion) = span.expansion {
|
||||
expansion.span.is_primary = span.is_primary;
|
||||
span = expansion.span;
|
||||
}
|
||||
|
||||
let span_path: Arc<Path> = span.file_name.as_path().into();
|
||||
new_reported_paths.insert(span_path.clone());
|
||||
diagnostics_by_path
|
||||
.entry(span_path)
|
||||
.or_insert(Vec::new())
|
||||
.push(DiagnosticEntry {
|
||||
range: span.byte_start..span.byte_end,
|
||||
diagnostic: Diagnostic {
|
||||
code: check.message.code.as_ref().map(|c| c.code.clone()),
|
||||
severity: check_severity,
|
||||
message: check.message.message.clone(),
|
||||
group_id,
|
||||
is_valid: true,
|
||||
is_primary: span.is_primary,
|
||||
is_disk_based: true,
|
||||
},
|
||||
});
|
||||
|
||||
if span.is_primary {
|
||||
primary_span = Some(span);
|
||||
}
|
||||
}
|
||||
|
||||
for mut child in check.message.children {
|
||||
if child.spans.is_empty() {
|
||||
if let Some(primary_span) = primary_span.clone() {
|
||||
child.spans.push(primary_span);
|
||||
}
|
||||
} else {
|
||||
// TODO
|
||||
continue;
|
||||
}
|
||||
|
||||
let child_severity = match child.level {
|
||||
ErrorLevel::Warning => DiagnosticSeverity::WARNING,
|
||||
ErrorLevel::Error => DiagnosticSeverity::ERROR,
|
||||
ErrorLevel::Help => DiagnosticSeverity::HINT,
|
||||
ErrorLevel::Note => DiagnosticSeverity::INFORMATION,
|
||||
};
|
||||
|
||||
for mut span in child.spans {
|
||||
if let Some(expansion) = span.expansion {
|
||||
span = expansion.span;
|
||||
}
|
||||
|
||||
let span_path: Arc<Path> = span.file_name.as_path().into();
|
||||
new_reported_paths.insert(span_path.clone());
|
||||
diagnostics_by_path
|
||||
.entry(span_path)
|
||||
.or_insert(Vec::new())
|
||||
.push(DiagnosticEntry {
|
||||
range: span.byte_start..span.byte_end,
|
||||
diagnostic: Diagnostic {
|
||||
code: child.code.as_ref().map(|c| c.code.clone()),
|
||||
severity: child_severity,
|
||||
message: child.message.clone(),
|
||||
group_id,
|
||||
is_valid: true,
|
||||
is_primary: false,
|
||||
is_disk_based: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
group_id += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let reported_paths = &mut *self.reported_paths.lock();
|
||||
for old_reported_path in reported_paths.iter() {
|
||||
if !diagnostics_by_path.contains_key(old_reported_path) {
|
||||
diagnostics_by_path.insert(old_reported_path.clone(), Default::default());
|
||||
}
|
||||
}
|
||||
*reported_paths = new_reported_paths;
|
||||
|
||||
Ok(diagnostics_by_path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_language_registry() -> LanguageRegistry {
|
||||
let mut languages = LanguageRegistry::default();
|
||||
languages.add(Arc::new(rust()));
|
||||
|
@ -202,7 +24,6 @@ fn rust() -> Language {
|
|||
.unwrap()
|
||||
.with_indents_query(load_query("rust/indents.scm").as_ref())
|
||||
.unwrap()
|
||||
.with_diagnostic_provider(rust::DiagnosticProvider::default())
|
||||
}
|
||||
|
||||
fn markdown() -> Language {
|
||||
|
|
|
@ -51,11 +51,11 @@ fn main() {
|
|||
let http = http::client();
|
||||
let client = client::Client::new(http.clone());
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx));
|
||||
let mut entry_openers = Vec::new();
|
||||
let mut path_openers = Vec::new();
|
||||
|
||||
client::init(client.clone(), cx);
|
||||
workspace::init(cx);
|
||||
editor::init(cx, &mut entry_openers);
|
||||
editor::init(cx, &mut path_openers);
|
||||
go_to_line::init(cx);
|
||||
file_finder::init(cx);
|
||||
chat_panel::init(cx);
|
||||
|
@ -72,7 +72,7 @@ fn main() {
|
|||
client,
|
||||
user_store,
|
||||
fs: Arc::new(RealFs),
|
||||
entry_openers: Arc::from(entry_openers),
|
||||
path_openers: Arc::from(path_openers),
|
||||
build_window_options: &build_window_options,
|
||||
build_workspace: &build_workspace,
|
||||
});
|
||||
|
|
|
@ -16,8 +16,8 @@ fn init_logger() {
|
|||
}
|
||||
|
||||
pub fn test_app_state(cx: &mut MutableAppContext) -> Arc<AppState> {
|
||||
let mut entry_openers = Vec::new();
|
||||
editor::init(cx, &mut entry_openers);
|
||||
let mut path_openers = Vec::new();
|
||||
editor::init(cx, &mut path_openers);
|
||||
let (settings_tx, settings) = watch::channel_with(build_settings(cx));
|
||||
let themes = ThemeRegistry::new(Assets, cx.font_cache().clone());
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
|
@ -41,7 +41,7 @@ pub fn test_app_state(cx: &mut MutableAppContext) -> Arc<AppState> {
|
|||
client,
|
||||
user_store,
|
||||
fs: Arc::new(FakeFs::new()),
|
||||
entry_openers: Arc::from(entry_openers),
|
||||
path_openers: Arc::from(path_openers),
|
||||
build_window_options: &build_window_options,
|
||||
build_workspace: &build_workspace,
|
||||
})
|
||||
|
|
|
@ -62,7 +62,7 @@ pub fn build_workspace(
|
|||
settings: app_state.settings.clone(),
|
||||
user_store: app_state.user_store.clone(),
|
||||
channel_list: app_state.channel_list.clone(),
|
||||
entry_openers: app_state.entry_openers.clone(),
|
||||
path_openers: app_state.path_openers.clone(),
|
||||
};
|
||||
let mut workspace = Workspace::new(&workspace_params, cx);
|
||||
let project = workspace.project().clone();
|
||||
|
@ -88,12 +88,20 @@ pub fn build_workspace(
|
|||
.into(),
|
||||
);
|
||||
|
||||
let diagnostic =
|
||||
let diagnostic_message =
|
||||
cx.add_view(|_| editor::items::DiagnosticMessage::new(app_state.settings.clone()));
|
||||
let diagnostic_summary = cx.add_view(|cx| {
|
||||
diagnostics::items::DiagnosticSummary::new(
|
||||
workspace.project(),
|
||||
app_state.settings.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let cursor_position =
|
||||
cx.add_view(|_| editor::items::CursorPosition::new(app_state.settings.clone()));
|
||||
workspace.status_bar().update(cx, |status_bar, cx| {
|
||||
status_bar.add_left_item(diagnostic, cx);
|
||||
status_bar.add_left_item(diagnostic_summary, cx);
|
||||
status_bar.add_left_item(diagnostic_message, cx);
|
||||
status_bar.add_right_item(cursor_position, cx);
|
||||
});
|
||||
|
||||
|
@ -256,8 +264,7 @@ mod tests {
|
|||
|
||||
// Open the first entry
|
||||
let entry_1 = workspace
|
||||
.update(&mut cx, |w, cx| w.open_entry(file1.clone(), cx))
|
||||
.unwrap()
|
||||
.update(&mut cx, |w, cx| w.open_path(file1.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
cx.read(|cx| {
|
||||
|
@ -266,13 +273,12 @@ mod tests {
|
|||
pane.active_item().unwrap().project_path(cx),
|
||||
Some(file1.clone())
|
||||
);
|
||||
assert_eq!(pane.items().len(), 1);
|
||||
assert_eq!(pane.item_views().count(), 1);
|
||||
});
|
||||
|
||||
// Open the second entry
|
||||
workspace
|
||||
.update(&mut cx, |w, cx| w.open_entry(file2.clone(), cx))
|
||||
.unwrap()
|
||||
.update(&mut cx, |w, cx| w.open_path(file2.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
cx.read(|cx| {
|
||||
|
@ -281,12 +287,12 @@ mod tests {
|
|||
pane.active_item().unwrap().project_path(cx),
|
||||
Some(file2.clone())
|
||||
);
|
||||
assert_eq!(pane.items().len(), 2);
|
||||
assert_eq!(pane.item_views().count(), 2);
|
||||
});
|
||||
|
||||
// Open the first entry again. The existing pane item is activated.
|
||||
let entry_1b = workspace
|
||||
.update(&mut cx, |w, cx| w.open_entry(file1.clone(), cx).unwrap())
|
||||
.update(&mut cx, |w, cx| w.open_path(file1.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(entry_1.id(), entry_1b.id());
|
||||
|
@ -297,14 +303,14 @@ mod tests {
|
|||
pane.active_item().unwrap().project_path(cx),
|
||||
Some(file1.clone())
|
||||
);
|
||||
assert_eq!(pane.items().len(), 2);
|
||||
assert_eq!(pane.item_views().count(), 2);
|
||||
});
|
||||
|
||||
// Split the pane with the first entry, then open the second entry again.
|
||||
workspace
|
||||
.update(&mut cx, |w, cx| {
|
||||
w.split_pane(w.active_pane().clone(), SplitDirection::Right, cx);
|
||||
w.open_entry(file2.clone(), cx).unwrap()
|
||||
w.open_path(file2.clone(), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
@ -323,8 +329,8 @@ mod tests {
|
|||
// Open the third entry twice concurrently. Only one pane item is added.
|
||||
let (t1, t2) = workspace.update(&mut cx, |w, cx| {
|
||||
(
|
||||
w.open_entry(file3.clone(), cx).unwrap(),
|
||||
w.open_entry(file3.clone(), cx).unwrap(),
|
||||
w.open_path(file3.clone(), cx),
|
||||
w.open_path(file3.clone(), cx),
|
||||
)
|
||||
});
|
||||
t1.await.unwrap();
|
||||
|
@ -336,8 +342,7 @@ mod tests {
|
|||
Some(file3.clone())
|
||||
);
|
||||
let pane_entries = pane
|
||||
.items()
|
||||
.iter()
|
||||
.item_views()
|
||||
.map(|i| i.project_path(cx).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(pane_entries, &[file1, file2, file3]);
|
||||
|
@ -553,15 +558,13 @@ mod tests {
|
|||
workspace
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx);
|
||||
workspace
|
||||
.open_entry(
|
||||
ProjectPath {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: Path::new("the-new-name.rs").into(),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.unwrap()
|
||||
workspace.open_path(
|
||||
ProjectPath {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: Path::new("the-new-name.rs").into(),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
@ -574,7 +577,10 @@ mod tests {
|
|||
.unwrap()
|
||||
});
|
||||
cx.read(|cx| {
|
||||
assert_eq!(editor2.read(cx).buffer(), editor.read(cx).buffer());
|
||||
assert_eq!(
|
||||
editor2.read(cx).buffer().read(cx).as_singleton().unwrap(),
|
||||
editor.read(cx).buffer().read(cx).as_singleton().unwrap()
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -658,8 +664,7 @@ mod tests {
|
|||
let pane_1 = cx.read(|cx| workspace.read(cx).active_pane().clone());
|
||||
|
||||
workspace
|
||||
.update(&mut cx, |w, cx| w.open_entry(file1.clone(), cx))
|
||||
.unwrap()
|
||||
.update(&mut cx, |w, cx| w.open_path(file1.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
cx.read(|cx| {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue