Start on adding support for editing via the assistant panel (#14795)
Note that this shouldn't have any visible user-facing behavior yet. The feature is incomplete but we wanna merge early to avoid a long-running branch. Release Notes: - N/A --------- Co-authored-by: Nathan <nathan@zed.dev>
This commit is contained in:
parent
87457f9ae8
commit
4d177918c1
44 changed files with 1999 additions and 968 deletions
|
@ -7,7 +7,6 @@ mod inline_assistant;
|
|||
mod model_selector;
|
||||
mod prompt_library;
|
||||
mod prompts;
|
||||
mod search;
|
||||
mod slash_command;
|
||||
mod streaming_diff;
|
||||
mod terminal_inline_assistant;
|
||||
|
@ -53,9 +52,9 @@ actions!(
|
|||
InsertActivePrompt,
|
||||
DeployHistory,
|
||||
DeployPromptLibrary,
|
||||
ApplyEdit,
|
||||
ConfirmCommand,
|
||||
ToggleModelSelector
|
||||
ToggleModelSelector,
|
||||
DebugEditSteps
|
||||
]
|
||||
);
|
||||
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
use crate::{
|
||||
assistant_settings::{AssistantDockPosition, AssistantSettings},
|
||||
humanize_token_count, parse_next_edit_suggestion,
|
||||
humanize_token_count,
|
||||
prompt_library::open_prompt_library,
|
||||
search::*,
|
||||
slash_command::{
|
||||
default_command::DefaultSlashCommand,
|
||||
docs_command::{DocsSlashCommand, DocsSlashCommandArgs},
|
||||
SlashCommandCompletionProvider, SlashCommandRegistry,
|
||||
},
|
||||
terminal_inline_assistant::TerminalInlineAssistant,
|
||||
ApplyEdit, Assist, CompletionProvider, ConfirmCommand, Context, ContextEvent, ContextId,
|
||||
ContextStore, CycleMessageRole, DeployHistory, DeployPromptLibrary, EditSuggestion,
|
||||
InlineAssist, InlineAssistant, InsertIntoEditor, MessageStatus, ModelSelector,
|
||||
PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection, RemoteContextMetadata,
|
||||
ResetKey, Role, SavedContextMetadata, Split, ToggleFocus, ToggleModelSelector,
|
||||
Assist, CompletionProvider, ConfirmCommand, Context, ContextEvent, ContextId, ContextStore,
|
||||
CycleMessageRole, DebugEditSteps, DeployHistory, DeployPromptLibrary, EditStep,
|
||||
EditStepOperations, EditSuggestionGroup, InlineAssist, InlineAssistId, InlineAssistant,
|
||||
InsertIntoEditor, MessageStatus, ModelSelector, PendingSlashCommand, PendingSlashCommandStatus,
|
||||
QuoteSelection, RemoteContextMetadata, ResetKey, Role, SavedContextMetadata, Split,
|
||||
ToggleFocus, ToggleModelSelector,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{SlashCommand, SlashCommandOutputSection};
|
||||
|
@ -25,29 +25,36 @@ use editor::{
|
|||
display_map::{
|
||||
BlockDisposition, BlockId, BlockProperties, BlockStyle, Crease, RenderBlock, ToDisplayPoint,
|
||||
},
|
||||
scroll::{Autoscroll, AutoscrollStrategy},
|
||||
Anchor, Editor, EditorEvent, RowExt, ToOffset as _, ToPoint,
|
||||
scroll::{Autoscroll, AutoscrollStrategy, ScrollAnchor},
|
||||
Anchor, Editor, EditorEvent, ExcerptRange, MultiBuffer, RowExt, ToOffset as _, ToPoint,
|
||||
};
|
||||
use editor::{display_map::CreaseId, FoldPlaceholder};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
div, percentage, point, Action, Animation, AnimationExt, AnyElement, AnyView, AppContext,
|
||||
AsyncWindowContext, ClipboardItem, DismissEvent, Empty, EventEmitter, FocusHandle,
|
||||
FocusableView, InteractiveElement, IntoElement, Model, ParentElement, Pixels, Render,
|
||||
SharedString, StatefulInteractiveElement, Styled, Subscription, Task, Transformation,
|
||||
AsyncWindowContext, ClipboardItem, Context as _, DismissEvent, Empty, Entity, EventEmitter,
|
||||
FocusHandle, FocusableView, InteractiveElement, IntoElement, Model, ParentElement, Pixels,
|
||||
Render, SharedString, StatefulInteractiveElement, Styled, Subscription, Task, Transformation,
|
||||
UpdateGlobal, View, ViewContext, VisualContext, WeakView, WindowContext,
|
||||
};
|
||||
use indexed_docs::IndexedDocsStore;
|
||||
use language::{
|
||||
language_settings::SoftWrap, AutoindentMode, Buffer, LanguageRegistry, LspAdapterDelegate,
|
||||
OffsetRangeExt as _, Point, ToOffset,
|
||||
language_settings::SoftWrap, Buffer, Capability, LanguageRegistry, LspAdapterDelegate, Point,
|
||||
ToOffset,
|
||||
};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::{Project, ProjectLspAdapterDelegate, ProjectTransaction};
|
||||
use project::{Project, ProjectLspAdapterDelegate};
|
||||
use search::{buffer_search::DivRegistrar, BufferSearchBar};
|
||||
use settings::Settings;
|
||||
use std::{cmp, fmt::Write, ops::Range, path::PathBuf, sync::Arc, time::Duration};
|
||||
use std::{
|
||||
cmp::{self, Ordering},
|
||||
fmt::Write,
|
||||
ops::Range,
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
use terminal_view::{terminal_panel::TerminalPanel, TerminalView};
|
||||
use theme::ThemeSettings;
|
||||
use ui::{
|
||||
|
@ -60,7 +67,8 @@ use util::ResultExt;
|
|||
use workspace::{
|
||||
dock::{DockPosition, Panel, PanelEvent},
|
||||
item::{self, BreadcrumbText, FollowableItem, Item, ItemHandle},
|
||||
pane,
|
||||
notifications::NotifyTaskExt,
|
||||
pane::{self, SaveIntent},
|
||||
searchable::{SearchEvent, SearchableItem},
|
||||
Pane, Save, ToggleZoom, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace,
|
||||
};
|
||||
|
@ -591,6 +599,7 @@ impl AssistantPanel {
|
|||
make_lsp_adapter_delegate(workspace.project(), cx).log_err()
|
||||
});
|
||||
|
||||
let assistant_panel = cx.view().downgrade();
|
||||
let editor = cx.new_view(|cx| {
|
||||
let mut editor = ContextEditor::for_context(
|
||||
context,
|
||||
|
@ -598,6 +607,7 @@ impl AssistantPanel {
|
|||
workspace.clone(),
|
||||
self.project.clone(),
|
||||
lsp_adapter_delegate,
|
||||
assistant_panel,
|
||||
cx,
|
||||
);
|
||||
editor.insert_default_prompt(cx);
|
||||
|
@ -720,6 +730,7 @@ impl AssistantPanel {
|
|||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let context = context.await?;
|
||||
let assistant_panel = this.clone();
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let workspace = workspace
|
||||
.upgrade()
|
||||
|
@ -731,6 +742,7 @@ impl AssistantPanel {
|
|||
workspace,
|
||||
project,
|
||||
lsp_adapter_delegate,
|
||||
assistant_panel,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
@ -774,6 +786,7 @@ impl AssistantPanel {
|
|||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let context = context.await?;
|
||||
let assistant_panel = this.clone();
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let workspace = workspace
|
||||
.upgrade()
|
||||
|
@ -785,6 +798,7 @@ impl AssistantPanel {
|
|||
workspace,
|
||||
this.project.clone(),
|
||||
lsp_adapter_delegate,
|
||||
assistant_panel,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
@ -956,10 +970,18 @@ struct ScrollPosition {
|
|||
cursor: Anchor,
|
||||
}
|
||||
|
||||
struct ActiveEditStep {
|
||||
start: language::Anchor,
|
||||
assist_ids: Vec<InlineAssistId>,
|
||||
editor: Option<WeakView<Editor>>,
|
||||
_open_editor: Task<Result<()>>,
|
||||
}
|
||||
|
||||
pub struct ContextEditor {
|
||||
context: Model<Context>,
|
||||
fs: Arc<dyn Fs>,
|
||||
workspace: WeakView<Workspace>,
|
||||
project: Model<Project>,
|
||||
lsp_adapter_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
editor: View<Editor>,
|
||||
blocks: HashSet<BlockId>,
|
||||
|
@ -968,6 +990,8 @@ pub struct ContextEditor {
|
|||
pending_slash_command_creases: HashMap<Range<language::Anchor>, CreaseId>,
|
||||
pending_slash_command_blocks: HashMap<Range<language::Anchor>, BlockId>,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
active_edit_step: Option<ActiveEditStep>,
|
||||
assistant_panel: WeakView<AssistantPanel>,
|
||||
}
|
||||
|
||||
impl ContextEditor {
|
||||
|
@ -979,6 +1003,7 @@ impl ContextEditor {
|
|||
workspace: View<Workspace>,
|
||||
project: Model<Project>,
|
||||
lsp_adapter_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
assistant_panel: WeakView<AssistantPanel>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let completion_provider = SlashCommandCompletionProvider::new(
|
||||
|
@ -996,7 +1021,7 @@ impl ContextEditor {
|
|||
editor.set_show_wrap_guides(false, cx);
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_completion_provider(Box::new(completion_provider));
|
||||
editor.set_collaboration_hub(Box::new(project));
|
||||
editor.set_collaboration_hub(Box::new(project.clone()));
|
||||
editor
|
||||
});
|
||||
|
||||
|
@ -1017,9 +1042,12 @@ impl ContextEditor {
|
|||
remote_id: None,
|
||||
fs,
|
||||
workspace: workspace.downgrade(),
|
||||
project,
|
||||
pending_slash_command_creases: HashMap::default(),
|
||||
pending_slash_command_blocks: HashMap::default(),
|
||||
_subscriptions,
|
||||
active_edit_step: None,
|
||||
assistant_panel,
|
||||
};
|
||||
this.update_message_headers(cx);
|
||||
this.insert_slash_command_output_sections(sections, cx);
|
||||
|
@ -1052,31 +1080,37 @@ impl ContextEditor {
|
|||
}
|
||||
|
||||
fn assist(&mut self, _: &Assist, cx: &mut ViewContext<Self>) {
|
||||
let cursors = self.cursors(cx);
|
||||
if !self.apply_edit_step(cx) {
|
||||
self.send_to_model(cx);
|
||||
}
|
||||
}
|
||||
|
||||
let user_messages = self.context.update(cx, |context, cx| {
|
||||
let selected_messages = context
|
||||
.messages_for_offsets(cursors, cx)
|
||||
.into_iter()
|
||||
.map(|message| message.id)
|
||||
.collect();
|
||||
context.assist(selected_messages, cx)
|
||||
});
|
||||
let new_selections = user_messages
|
||||
.iter()
|
||||
.map(|message| {
|
||||
let cursor = message
|
||||
fn apply_edit_step(&mut self, cx: &mut ViewContext<Self>) -> bool {
|
||||
if let Some(step) = self.active_edit_step.as_ref() {
|
||||
InlineAssistant::update_global(cx, |assistant, cx| {
|
||||
for assist_id in &step.assist_ids {
|
||||
assistant.start_assist(*assist_id, cx);
|
||||
}
|
||||
!step.assist_ids.is_empty()
|
||||
})
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn send_to_model(&mut self, cx: &mut ViewContext<Self>) {
|
||||
if let Some(user_message) = self.context.update(cx, |context, cx| context.assist(cx)) {
|
||||
let new_selection = {
|
||||
let cursor = user_message
|
||||
.start
|
||||
.to_offset(self.context.read(cx).buffer().read(cx));
|
||||
cursor..cursor
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
if !new_selections.is_empty() {
|
||||
};
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(
|
||||
Some(Autoscroll::Strategy(AutoscrollStrategy::Fit)),
|
||||
cx,
|
||||
|selections| selections.select_ranges(new_selections),
|
||||
|selections| selections.select_ranges([new_selection]),
|
||||
);
|
||||
});
|
||||
// Avoid scrolling to the new cursor position so the assistant's output is stable.
|
||||
|
@ -1093,6 +1127,53 @@ impl ContextEditor {
|
|||
}
|
||||
}
|
||||
|
||||
fn debug_edit_steps(&mut self, _: &DebugEditSteps, cx: &mut ViewContext<Self>) {
|
||||
let mut output = String::new();
|
||||
for (i, step) in self.context.read(cx).edit_steps().iter().enumerate() {
|
||||
output.push_str(&format!("Step {}:\n", i + 1));
|
||||
output.push_str(&format!(
|
||||
"Content: {}\n",
|
||||
self.context
|
||||
.read(cx)
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.text_for_range(step.source_range.clone())
|
||||
.collect::<String>()
|
||||
));
|
||||
match &step.operations {
|
||||
Some(EditStepOperations::Parsed {
|
||||
operations,
|
||||
raw_output,
|
||||
}) => {
|
||||
output.push_str(&format!("Raw Output:\n{raw_output}\n"));
|
||||
output.push_str("Parsed Operations:\n");
|
||||
for op in operations {
|
||||
output.push_str(&format!(" {:?}\n", op));
|
||||
}
|
||||
}
|
||||
Some(EditStepOperations::Pending(_)) => {
|
||||
output.push_str("Operations: Pending\n");
|
||||
}
|
||||
None => {
|
||||
output.push_str("Operations: None\n");
|
||||
}
|
||||
}
|
||||
output.push('\n');
|
||||
}
|
||||
|
||||
let editor = self
|
||||
.workspace
|
||||
.update(cx, |workspace, cx| Editor::new_in_workspace(workspace, cx));
|
||||
|
||||
if let Ok(editor) = editor {
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
let editor = editor.await?;
|
||||
editor.update(&mut cx, |editor, cx| editor.set_text(output, cx))
|
||||
})
|
||||
.detach_and_notify_err(cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn cycle_message_role(&mut self, _: &CycleMessageRole, cx: &mut ViewContext<Self>) {
|
||||
let cursors = self.cursors(cx);
|
||||
self.context.update(cx, |context, cx| {
|
||||
|
@ -1222,39 +1303,8 @@ impl ContextEditor {
|
|||
context.save(Some(Duration::from_millis(500)), self.fs.clone(), cx);
|
||||
});
|
||||
}
|
||||
ContextEvent::EditSuggestionsChanged => {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
let excerpt_id = *buffer.as_singleton().unwrap().0;
|
||||
let context = self.context.read(cx);
|
||||
let highlighted_rows = context
|
||||
.edit_suggestions()
|
||||
.iter()
|
||||
.map(|suggestion| {
|
||||
let start = buffer
|
||||
.anchor_in_excerpt(excerpt_id, suggestion.source_range.start)
|
||||
.unwrap();
|
||||
let end = buffer
|
||||
.anchor_in_excerpt(excerpt_id, suggestion.source_range.end)
|
||||
.unwrap();
|
||||
start..=end
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
editor.clear_row_highlights::<EditSuggestion>();
|
||||
for range in highlighted_rows {
|
||||
editor.highlight_rows::<EditSuggestion>(
|
||||
range,
|
||||
Some(
|
||||
cx.theme()
|
||||
.colors()
|
||||
.editor_document_highlight_read_background,
|
||||
),
|
||||
false,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
});
|
||||
ContextEvent::EditStepsChanged => {
|
||||
cx.notify();
|
||||
}
|
||||
ContextEvent::SummaryChanged => {
|
||||
cx.emit(EditorEvent::TitleChanged);
|
||||
|
@ -1515,12 +1565,200 @@ impl ContextEditor {
|
|||
}
|
||||
EditorEvent::SelectionsChanged { .. } => {
|
||||
self.scroll_position = self.cursor_scroll_position(cx);
|
||||
if self
|
||||
.edit_step_for_cursor(cx)
|
||||
.map(|step| step.source_range.start)
|
||||
!= self.active_edit_step.as_ref().map(|step| step.start)
|
||||
{
|
||||
if let Some(old_active_edit_step) = self.active_edit_step.take() {
|
||||
if let Some(editor) = old_active_edit_step
|
||||
.editor
|
||||
.and_then(|editor| editor.upgrade())
|
||||
{
|
||||
self.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
if let Some(pane) = workspace.pane_for(&editor) {
|
||||
pane.update(cx, |pane, cx| {
|
||||
let item_id = editor.entity_id();
|
||||
if pane.is_active_preview_item(item_id) {
|
||||
pane.close_item_by_id(
|
||||
item_id,
|
||||
SaveIntent::Skip,
|
||||
cx,
|
||||
)
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(new_active_step) = self.edit_step_for_cursor(cx) {
|
||||
let suggestions = new_active_step.edit_suggestions(&self.project, cx);
|
||||
self.active_edit_step = Some(ActiveEditStep {
|
||||
start: new_active_step.source_range.start,
|
||||
assist_ids: Vec::new(),
|
||||
editor: None,
|
||||
_open_editor: self.open_editor_for_edit_suggestions(suggestions, cx),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
cx.emit(event.clone());
|
||||
}
|
||||
|
||||
fn open_editor_for_edit_suggestions(
|
||||
&mut self,
|
||||
edit_suggestions: Task<HashMap<Model<Buffer>, Vec<EditSuggestionGroup>>>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let workspace = self.workspace.clone();
|
||||
let project = self.project.clone();
|
||||
let assistant_panel = self.assistant_panel.clone();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let edit_suggestions = edit_suggestions.await;
|
||||
|
||||
let mut assist_ids = Vec::new();
|
||||
let editor = if edit_suggestions.is_empty() {
|
||||
return Ok(());
|
||||
} else if edit_suggestions.len() == 1
|
||||
&& edit_suggestions.values().next().unwrap().len() == 1
|
||||
{
|
||||
// If there's only one buffer and one suggestion group, open it directly
|
||||
let (buffer, suggestion_groups) = edit_suggestions.into_iter().next().unwrap();
|
||||
let suggestion_group = suggestion_groups.into_iter().next().unwrap();
|
||||
let editor = workspace.update(&mut cx, |workspace, cx| {
|
||||
let active_pane = workspace.active_pane().clone();
|
||||
workspace.open_project_item::<Editor>(active_pane, buffer, false, false, cx)
|
||||
})?;
|
||||
|
||||
cx.update(|cx| {
|
||||
for suggestion in suggestion_group.suggestions {
|
||||
let description = suggestion.description.unwrap_or_else(|| "Delete".into());
|
||||
let range = {
|
||||
let buffer = editor.read(cx).buffer().read(cx).read(cx);
|
||||
let (&excerpt_id, _, _) = buffer.as_singleton().unwrap();
|
||||
buffer
|
||||
.anchor_in_excerpt(excerpt_id, suggestion.range.start)
|
||||
.unwrap()
|
||||
..buffer
|
||||
.anchor_in_excerpt(excerpt_id, suggestion.range.end)
|
||||
.unwrap()
|
||||
};
|
||||
let initial_text = suggestion.prepend_newline.then(|| "\n".into());
|
||||
InlineAssistant::update_global(cx, |assistant, cx| {
|
||||
assist_ids.push(assistant.suggest_assist(
|
||||
&editor,
|
||||
range,
|
||||
description,
|
||||
initial_text,
|
||||
Some(workspace.clone()),
|
||||
assistant_panel.upgrade().as_ref(),
|
||||
cx,
|
||||
));
|
||||
});
|
||||
}
|
||||
|
||||
// Scroll the editor to the suggested assist
|
||||
editor.update(cx, |editor, cx| {
|
||||
let anchor = {
|
||||
let buffer = editor.buffer().read(cx).read(cx);
|
||||
let (&excerpt_id, _, _) = buffer.as_singleton().unwrap();
|
||||
buffer
|
||||
.anchor_in_excerpt(excerpt_id, suggestion_group.context_range.start)
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
editor.set_scroll_anchor(
|
||||
ScrollAnchor {
|
||||
offset: gpui::Point::default(),
|
||||
anchor,
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
})?;
|
||||
|
||||
editor
|
||||
} else {
|
||||
// If there are multiple buffers or suggestion groups, create a multibuffer
|
||||
let mut inline_assist_suggestions = Vec::new();
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let replica_id = project.read(cx).replica_id();
|
||||
let mut multibuffer = MultiBuffer::new(replica_id, Capability::ReadWrite);
|
||||
for (buffer, suggestion_groups) in edit_suggestions {
|
||||
let excerpt_ids = multibuffer.push_excerpts(
|
||||
buffer,
|
||||
suggestion_groups
|
||||
.iter()
|
||||
.map(|suggestion_group| ExcerptRange {
|
||||
context: suggestion_group.context_range.clone(),
|
||||
primary: None,
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
|
||||
for (excerpt_id, suggestion_group) in
|
||||
excerpt_ids.into_iter().zip(suggestion_groups)
|
||||
{
|
||||
for suggestion in suggestion_group.suggestions {
|
||||
let description =
|
||||
suggestion.description.unwrap_or_else(|| "Delete".into());
|
||||
let range = {
|
||||
let multibuffer = multibuffer.read(cx);
|
||||
multibuffer
|
||||
.anchor_in_excerpt(excerpt_id, suggestion.range.start)
|
||||
.unwrap()
|
||||
..multibuffer
|
||||
.anchor_in_excerpt(excerpt_id, suggestion.range.end)
|
||||
.unwrap()
|
||||
};
|
||||
let initial_text =
|
||||
suggestion.prepend_newline.then(|| "\n".to_string());
|
||||
inline_assist_suggestions.push((range, description, initial_text));
|
||||
}
|
||||
}
|
||||
}
|
||||
multibuffer
|
||||
})?;
|
||||
|
||||
let editor = cx
|
||||
.new_view(|cx| Editor::for_multibuffer(multibuffer, Some(project), true, cx))?;
|
||||
cx.update(|cx| {
|
||||
InlineAssistant::update_global(cx, |assistant, cx| {
|
||||
for (range, description, initial_text) in inline_assist_suggestions {
|
||||
assist_ids.push(assistant.suggest_assist(
|
||||
&editor,
|
||||
range,
|
||||
description,
|
||||
initial_text,
|
||||
Some(workspace.clone()),
|
||||
assistant_panel.upgrade().as_ref(),
|
||||
cx,
|
||||
));
|
||||
}
|
||||
})
|
||||
})?;
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, false, cx)
|
||||
})?;
|
||||
|
||||
editor
|
||||
};
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
if let Some(step) = this.active_edit_step.as_mut() {
|
||||
step.assist_ids = assist_ids;
|
||||
step.editor = Some(editor.downgrade());
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_editor_search_event(
|
||||
&mut self,
|
||||
_: View<Editor>,
|
||||
|
@ -1785,173 +2023,6 @@ impl ContextEditor {
|
|||
});
|
||||
}
|
||||
|
||||
fn apply_edit(&mut self, _: &ApplyEdit, cx: &mut ViewContext<Self>) {
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let project = workspace.read(cx).project().clone();
|
||||
|
||||
struct Edit {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
}
|
||||
|
||||
let context = self.context.read(cx);
|
||||
let context_buffer = context.buffer().read(cx);
|
||||
let context_buffer_snapshot = context_buffer.snapshot();
|
||||
|
||||
let selections = self.editor.read(cx).selections.disjoint_anchors();
|
||||
let mut selections = selections.iter().peekable();
|
||||
let selected_suggestions = context
|
||||
.edit_suggestions()
|
||||
.iter()
|
||||
.filter(|suggestion| {
|
||||
while let Some(selection) = selections.peek() {
|
||||
if selection
|
||||
.end
|
||||
.text_anchor
|
||||
.cmp(&suggestion.source_range.start, context_buffer)
|
||||
.is_lt()
|
||||
{
|
||||
selections.next();
|
||||
continue;
|
||||
}
|
||||
if selection
|
||||
.start
|
||||
.text_anchor
|
||||
.cmp(&suggestion.source_range.end, context_buffer)
|
||||
.is_gt()
|
||||
{
|
||||
break;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
false
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut opened_buffers: HashMap<PathBuf, Task<Result<Model<Buffer>>>> = HashMap::default();
|
||||
project.update(cx, |project, cx| {
|
||||
for suggestion in &selected_suggestions {
|
||||
opened_buffers
|
||||
.entry(suggestion.full_path.clone())
|
||||
.or_insert_with(|| {
|
||||
project.open_buffer_for_full_path(&suggestion.full_path, cx)
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let mut buffers_by_full_path = HashMap::default();
|
||||
for (full_path, buffer) in opened_buffers {
|
||||
if let Some(buffer) = buffer.await.log_err() {
|
||||
buffers_by_full_path.insert(full_path, buffer);
|
||||
}
|
||||
}
|
||||
|
||||
let mut suggestions_by_buffer = HashMap::default();
|
||||
cx.update(|cx| {
|
||||
for suggestion in selected_suggestions {
|
||||
if let Some(buffer) = buffers_by_full_path.get(&suggestion.full_path) {
|
||||
let (_, edits) = suggestions_by_buffer
|
||||
.entry(buffer.clone())
|
||||
.or_insert_with(|| (buffer.read(cx).snapshot(), Vec::new()));
|
||||
|
||||
let mut lines = context_buffer_snapshot
|
||||
.as_rope()
|
||||
.chunks_in_range(
|
||||
suggestion.source_range.to_offset(&context_buffer_snapshot),
|
||||
)
|
||||
.lines();
|
||||
if let Some(suggestion) = parse_next_edit_suggestion(&mut lines) {
|
||||
let old_text = context_buffer_snapshot
|
||||
.text_for_range(suggestion.old_text_range)
|
||||
.collect();
|
||||
let new_text = context_buffer_snapshot
|
||||
.text_for_range(suggestion.new_text_range)
|
||||
.collect();
|
||||
edits.push(Edit { old_text, new_text });
|
||||
}
|
||||
}
|
||||
}
|
||||
})?;
|
||||
|
||||
let edits_by_buffer = cx
|
||||
.background_executor()
|
||||
.spawn(async move {
|
||||
let mut result = HashMap::default();
|
||||
for (buffer, (snapshot, suggestions)) in suggestions_by_buffer {
|
||||
let edits =
|
||||
result
|
||||
.entry(buffer)
|
||||
.or_insert(Vec::<(Range<language::Anchor>, _)>::new());
|
||||
for suggestion in suggestions {
|
||||
if let Some(range) =
|
||||
fuzzy_search_lines(snapshot.as_rope(), &suggestion.old_text)
|
||||
{
|
||||
let edit_start = snapshot.anchor_after(range.start);
|
||||
let edit_end = snapshot.anchor_before(range.end);
|
||||
if let Err(ix) = edits.binary_search_by(|(range, _)| {
|
||||
range.start.cmp(&edit_start, &snapshot)
|
||||
}) {
|
||||
edits.insert(
|
||||
ix,
|
||||
(edit_start..edit_end, suggestion.new_text.clone()),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
log::info!(
|
||||
"assistant edit did not match any text in buffer {:?}",
|
||||
&suggestion.old_text
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
result
|
||||
})
|
||||
.await;
|
||||
|
||||
let mut project_transaction = ProjectTransaction::default();
|
||||
let (editor, workspace, title) = this.update(&mut cx, |this, cx| {
|
||||
for (buffer_handle, edits) in edits_by_buffer {
|
||||
buffer_handle.update(cx, |buffer, cx| {
|
||||
buffer.start_transaction();
|
||||
buffer.edit(
|
||||
edits,
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: Vec::new(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
buffer.end_transaction(cx);
|
||||
if let Some(transaction) = buffer.finalize_last_transaction() {
|
||||
project_transaction
|
||||
.0
|
||||
.insert(buffer_handle.clone(), transaction.clone());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
(
|
||||
this.editor.downgrade(),
|
||||
this.workspace.clone(),
|
||||
this.title(cx),
|
||||
)
|
||||
})?;
|
||||
|
||||
Editor::open_project_transaction(
|
||||
&editor,
|
||||
workspace,
|
||||
project_transaction,
|
||||
format!("Edits from {}", title),
|
||||
cx,
|
||||
)
|
||||
.await
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn save(&mut self, _: &Save, cx: &mut ViewContext<Self>) {
|
||||
self.context
|
||||
.update(cx, |context, cx| context.save(None, self.fs.clone(), cx));
|
||||
|
@ -1967,6 +2038,14 @@ impl ContextEditor {
|
|||
|
||||
fn render_send_button(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let focus_handle = self.focus_handle(cx).clone();
|
||||
let button_text = match self.edit_step_for_cursor(cx) {
|
||||
Some(edit_step) => match &edit_step.operations {
|
||||
Some(EditStepOperations::Pending(_)) => "Computing Changes...",
|
||||
Some(EditStepOperations::Parsed { .. }) => "Apply Changes",
|
||||
None => "Send",
|
||||
},
|
||||
None => "Send",
|
||||
};
|
||||
ButtonLike::new("send_button")
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
|
@ -1974,11 +2053,38 @@ impl ContextEditor {
|
|||
KeyBinding::for_action_in(&Assist, &focus_handle, cx)
|
||||
.map(|binding| binding.into_any_element()),
|
||||
)
|
||||
.child(Label::new("Send"))
|
||||
.child(Label::new(button_text))
|
||||
.on_click(move |_event, cx| {
|
||||
focus_handle.dispatch_action(&Assist, cx);
|
||||
})
|
||||
}
|
||||
|
||||
fn edit_step_for_cursor<'a>(&'a self, cx: &'a AppContext) -> Option<&'a EditStep> {
|
||||
let newest_cursor = self
|
||||
.editor
|
||||
.read(cx)
|
||||
.selections
|
||||
.newest_anchor()
|
||||
.head()
|
||||
.text_anchor;
|
||||
let context = self.context.read(cx);
|
||||
let buffer = context.buffer().read(cx);
|
||||
|
||||
let edit_steps = context.edit_steps();
|
||||
edit_steps
|
||||
.binary_search_by(|step| {
|
||||
let step_range = step.source_range.clone();
|
||||
if newest_cursor.cmp(&step_range.start, buffer).is_lt() {
|
||||
Ordering::Greater
|
||||
} else if newest_cursor.cmp(&step_range.end, buffer).is_gt() {
|
||||
Ordering::Less
|
||||
} else {
|
||||
Ordering::Equal
|
||||
}
|
||||
})
|
||||
.ok()
|
||||
.map(|index| &edit_steps[index])
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<EditorEvent> for ContextEditor {}
|
||||
|
@ -1995,7 +2101,7 @@ impl Render for ContextEditor {
|
|||
.capture_action(cx.listener(ContextEditor::confirm_command))
|
||||
.on_action(cx.listener(ContextEditor::assist))
|
||||
.on_action(cx.listener(ContextEditor::split))
|
||||
.on_action(cx.listener(ContextEditor::apply_edit))
|
||||
.on_action(cx.listener(ContextEditor::debug_edit_steps))
|
||||
.size_full()
|
||||
.v_flex()
|
||||
.child(
|
||||
|
|
|
@ -20,11 +20,10 @@ use crate::{
|
|||
};
|
||||
use anyhow::Result;
|
||||
use client::Client;
|
||||
use futures::{future::BoxFuture, stream::BoxStream};
|
||||
use futures::{future::BoxFuture, stream::BoxStream, StreamExt};
|
||||
use gpui::{AnyView, AppContext, BorrowAppContext, Task, WindowContext};
|
||||
use settings::{Settings, SettingsStore};
|
||||
use std::time::Duration;
|
||||
use std::{any::Any, sync::Arc};
|
||||
use std::{any::Any, pin::Pin, sync::Arc, task::Poll, time::Duration};
|
||||
|
||||
/// Choose which model to use for openai provider.
|
||||
/// If the model is not available, try to use the first available model, or fallback to the original model.
|
||||
|
@ -55,10 +54,21 @@ pub fn init(client: Arc<Client>, cx: &mut AppContext) {
|
|||
}
|
||||
|
||||
pub struct CompletionResponse {
|
||||
pub inner: BoxFuture<'static, Result<BoxStream<'static, Result<String>>>>,
|
||||
inner: BoxStream<'static, Result<String>>,
|
||||
_lock: SemaphoreGuardArc,
|
||||
}
|
||||
|
||||
impl futures::Stream for CompletionResponse {
|
||||
type Item = Result<String>;
|
||||
|
||||
fn poll_next(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut std::task::Context<'_>,
|
||||
) -> Poll<Option<Self::Item>> {
|
||||
Pin::new(&mut self.inner).poll_next(cx)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait LanguageModelCompletionProvider: Send + Sync {
|
||||
fn available_models(&self, cx: &AppContext) -> Vec<LanguageModel>;
|
||||
fn settings_version(&self) -> usize;
|
||||
|
@ -72,7 +82,7 @@ pub trait LanguageModelCompletionProvider: Send + Sync {
|
|||
request: LanguageModelRequest,
|
||||
cx: &AppContext,
|
||||
) -> BoxFuture<'static, Result<usize>>;
|
||||
fn complete(
|
||||
fn stream_completion(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>>;
|
||||
|
@ -136,20 +146,34 @@ impl CompletionProvider {
|
|||
self.provider.read().count_tokens(request, cx)
|
||||
}
|
||||
|
||||
pub fn complete(
|
||||
pub fn stream_completion(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
cx: &AppContext,
|
||||
) -> Task<CompletionResponse> {
|
||||
) -> Task<Result<CompletionResponse>> {
|
||||
let rate_limiter = self.request_limiter.clone();
|
||||
let provider = self.provider.clone();
|
||||
cx.background_executor().spawn(async move {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let lock = rate_limiter.acquire_arc().await;
|
||||
let response = provider.read().complete(request);
|
||||
CompletionResponse {
|
||||
let response = provider.read().stream_completion(request);
|
||||
let response = response.await?;
|
||||
Ok(CompletionResponse {
|
||||
inner: response,
|
||||
_lock: lock,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn complete(&self, request: LanguageModelRequest, cx: &AppContext) -> Task<Result<String>> {
|
||||
let response = self.stream_completion(request, cx);
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let mut chunks = response.await?;
|
||||
let mut completion = String::new();
|
||||
while let Some(chunk) = chunks.next().await {
|
||||
let chunk = chunk?;
|
||||
completion.push_str(&chunk);
|
||||
}
|
||||
Ok(completion)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -300,7 +324,7 @@ mod tests {
|
|||
|
||||
// Enqueue some requests
|
||||
for i in 0..MAX_CONCURRENT_COMPLETION_REQUESTS * 2 {
|
||||
let response = provider.complete(
|
||||
let response = provider.stream_completion(
|
||||
LanguageModelRequest {
|
||||
temperature: i as f32 / 10.0,
|
||||
..Default::default()
|
||||
|
@ -309,8 +333,7 @@ mod tests {
|
|||
);
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
let response = response.await;
|
||||
let mut stream = response.inner.await.unwrap();
|
||||
let mut stream = response.await.unwrap();
|
||||
while let Some(message) = stream.next().await {
|
||||
message.unwrap();
|
||||
}
|
||||
|
@ -326,7 +349,7 @@ mod tests {
|
|||
|
||||
// Get the first completion request that is in flight and mark it as completed.
|
||||
let completion = fake_provider
|
||||
.running_completions()
|
||||
.pending_completions()
|
||||
.into_iter()
|
||||
.next()
|
||||
.unwrap();
|
||||
|
@ -347,7 +370,7 @@ mod tests {
|
|||
);
|
||||
|
||||
// Mark all completion requests as finished that are in flight.
|
||||
for request in fake_provider.running_completions() {
|
||||
for request in fake_provider.pending_completions() {
|
||||
fake_provider.finish_completion(&request);
|
||||
}
|
||||
|
||||
|
@ -362,7 +385,7 @@ mod tests {
|
|||
);
|
||||
|
||||
// Finish all remaining completion requests.
|
||||
for request in fake_provider.running_completions() {
|
||||
for request in fake_provider.pending_completions() {
|
||||
fake_provider.finish_completion(&request);
|
||||
}
|
||||
|
||||
|
|
|
@ -94,7 +94,7 @@ impl LanguageModelCompletionProvider for AnthropicCompletionProvider {
|
|||
count_open_ai_tokens(request, cx.background_executor())
|
||||
}
|
||||
|
||||
fn complete(
|
||||
fn stream_completion(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
|
||||
|
|
|
@ -135,7 +135,7 @@ impl LanguageModelCompletionProvider for CloudCompletionProvider {
|
|||
}
|
||||
}
|
||||
|
||||
fn complete(
|
||||
fn stream_completion(
|
||||
&self,
|
||||
mut request: LanguageModelRequest,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
|
||||
|
|
|
@ -23,7 +23,7 @@ impl FakeCompletionProvider {
|
|||
this
|
||||
}
|
||||
|
||||
pub fn running_completions(&self) -> Vec<LanguageModelRequest> {
|
||||
pub fn pending_completions(&self) -> Vec<LanguageModelRequest> {
|
||||
self.current_completion_txs
|
||||
.lock()
|
||||
.keys()
|
||||
|
@ -35,7 +35,7 @@ impl FakeCompletionProvider {
|
|||
self.current_completion_txs.lock().len()
|
||||
}
|
||||
|
||||
pub fn send_completion(&self, request: &LanguageModelRequest, chunk: String) {
|
||||
pub fn send_completion_chunk(&self, request: &LanguageModelRequest, chunk: String) {
|
||||
let json = serde_json::to_string(request).unwrap();
|
||||
self.current_completion_txs
|
||||
.lock()
|
||||
|
@ -45,10 +45,19 @@ impl FakeCompletionProvider {
|
|||
.unwrap();
|
||||
}
|
||||
|
||||
pub fn send_last_completion_chunk(&self, chunk: String) {
|
||||
self.send_completion_chunk(self.pending_completions().last().unwrap(), chunk);
|
||||
}
|
||||
|
||||
pub fn finish_completion(&self, request: &LanguageModelRequest) {
|
||||
self.current_completion_txs
|
||||
.lock()
|
||||
.remove(&serde_json::to_string(request).unwrap());
|
||||
.remove(&serde_json::to_string(request).unwrap())
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
pub fn finish_last_completion(&self) {
|
||||
self.finish_completion(self.pending_completions().last().unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -89,7 +98,7 @@ impl LanguageModelCompletionProvider for FakeCompletionProvider {
|
|||
futures::future::ready(Ok(0)).boxed()
|
||||
}
|
||||
|
||||
fn complete(
|
||||
fn stream_completion(
|
||||
&self,
|
||||
_request: LanguageModelRequest,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
|
||||
|
|
|
@ -91,7 +91,7 @@ impl LanguageModelCompletionProvider for OllamaCompletionProvider {
|
|||
async move { Ok(token_count) }.boxed()
|
||||
}
|
||||
|
||||
fn complete(
|
||||
fn stream_completion(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
|
||||
|
|
|
@ -179,7 +179,7 @@ impl LanguageModelCompletionProvider for OpenAiCompletionProvider {
|
|||
count_open_ai_tokens(request, cx.background_executor())
|
||||
}
|
||||
|
||||
fn complete(
|
||||
fn stream_completion(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -16,7 +16,12 @@ use editor::{
|
|||
ExcerptRange, GutterDimensions, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint,
|
||||
};
|
||||
use fs::Fs;
|
||||
use futures::{channel::mpsc, SinkExt, Stream, StreamExt};
|
||||
use futures::{
|
||||
channel::mpsc,
|
||||
future::LocalBoxFuture,
|
||||
stream::{self, BoxStream},
|
||||
SinkExt, Stream, StreamExt,
|
||||
};
|
||||
use gpui::{
|
||||
point, AppContext, EventEmitter, FocusHandle, FocusableView, FontStyle, Global, HighlightStyle,
|
||||
Model, ModelContext, Subscription, Task, TextStyle, UpdateGlobal, View, ViewContext, WeakView,
|
||||
|
@ -28,8 +33,11 @@ use parking_lot::Mutex;
|
|||
use rope::Rope;
|
||||
use settings::{update_settings_file, Settings};
|
||||
use similar::TextDiff;
|
||||
use smol::future::FutureExt;
|
||||
use std::{
|
||||
cmp, mem,
|
||||
cmp,
|
||||
future::Future,
|
||||
mem,
|
||||
ops::{Range, RangeInclusive},
|
||||
pin::Pin,
|
||||
sync::Arc,
|
||||
|
@ -134,7 +142,6 @@ impl InlineAssistant {
|
|||
let prompt_buffer = cx.new_model(|cx| MultiBuffer::singleton(prompt_buffer, cx));
|
||||
|
||||
let mut assists = Vec::new();
|
||||
let mut assist_blocks = Vec::new();
|
||||
let mut assist_to_focus = None;
|
||||
for range in codegen_ranges {
|
||||
let assist_id = self.next_assist_id.post_inc();
|
||||
|
@ -142,6 +149,7 @@ impl InlineAssistant {
|
|||
Codegen::new(
|
||||
editor.read(cx).buffer().clone(),
|
||||
range.clone(),
|
||||
None,
|
||||
self.telemetry.clone(),
|
||||
cx,
|
||||
)
|
||||
|
@ -174,42 +182,18 @@ impl InlineAssistant {
|
|||
}
|
||||
}
|
||||
|
||||
assist_blocks.push(BlockProperties {
|
||||
style: BlockStyle::Sticky,
|
||||
position: range.start,
|
||||
height: prompt_editor.read(cx).height_in_lines,
|
||||
render: build_assist_editor_renderer(&prompt_editor),
|
||||
disposition: BlockDisposition::Above,
|
||||
});
|
||||
assist_blocks.push(BlockProperties {
|
||||
style: BlockStyle::Sticky,
|
||||
position: range.end,
|
||||
height: 1,
|
||||
render: Box::new(|cx| {
|
||||
v_flex()
|
||||
.h_full()
|
||||
.w_full()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().status().info_border)
|
||||
.into_any_element()
|
||||
}),
|
||||
disposition: BlockDisposition::Below,
|
||||
});
|
||||
assists.push((assist_id, prompt_editor));
|
||||
}
|
||||
let [prompt_block_id, end_block_id] =
|
||||
self.insert_assist_blocks(editor, &range, &prompt_editor, cx);
|
||||
|
||||
let assist_block_ids = editor.update(cx, |editor, cx| {
|
||||
editor.insert_blocks(assist_blocks, None, cx)
|
||||
});
|
||||
assists.push((assist_id, prompt_editor, prompt_block_id, end_block_id));
|
||||
}
|
||||
|
||||
let editor_assists = self
|
||||
.assists_by_editor
|
||||
.entry(editor.downgrade())
|
||||
.or_insert_with(|| EditorInlineAssists::new(&editor, cx));
|
||||
let mut assist_group = InlineAssistGroup::new();
|
||||
for ((assist_id, prompt_editor), block_ids) in
|
||||
assists.into_iter().zip(assist_block_ids.chunks_exact(2))
|
||||
{
|
||||
for (assist_id, prompt_editor, prompt_block_id, end_block_id) in assists {
|
||||
self.assists.insert(
|
||||
assist_id,
|
||||
InlineAssist::new(
|
||||
|
@ -218,8 +202,8 @@ impl InlineAssistant {
|
|||
assistant_panel.is_some(),
|
||||
editor,
|
||||
&prompt_editor,
|
||||
block_ids[0],
|
||||
block_ids[1],
|
||||
prompt_block_id,
|
||||
end_block_id,
|
||||
prompt_editor.read(cx).codegen.clone(),
|
||||
workspace.clone(),
|
||||
cx,
|
||||
|
@ -235,6 +219,128 @@ impl InlineAssistant {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn suggest_assist(
|
||||
&mut self,
|
||||
editor: &View<Editor>,
|
||||
mut range: Range<Anchor>,
|
||||
initial_prompt: String,
|
||||
initial_insertion: Option<String>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
assistant_panel: Option<&View<AssistantPanel>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> InlineAssistId {
|
||||
let assist_group_id = self.next_assist_group_id.post_inc();
|
||||
let prompt_buffer = cx.new_model(|cx| Buffer::local(&initial_prompt, cx));
|
||||
let prompt_buffer = cx.new_model(|cx| MultiBuffer::singleton(prompt_buffer, cx));
|
||||
|
||||
let assist_id = self.next_assist_id.post_inc();
|
||||
|
||||
let buffer = editor.read(cx).buffer().clone();
|
||||
let prepend_transaction_id = initial_insertion.and_then(|initial_insertion| {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.start_transaction(cx);
|
||||
buffer.edit([(range.start..range.start, initial_insertion)], None, cx);
|
||||
buffer.end_transaction(cx)
|
||||
})
|
||||
});
|
||||
|
||||
range.start = range.start.bias_left(&buffer.read(cx).read(cx));
|
||||
range.end = range.end.bias_right(&buffer.read(cx).read(cx));
|
||||
|
||||
let codegen = cx.new_model(|cx| {
|
||||
Codegen::new(
|
||||
editor.read(cx).buffer().clone(),
|
||||
range.clone(),
|
||||
prepend_transaction_id,
|
||||
self.telemetry.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let gutter_dimensions = Arc::new(Mutex::new(GutterDimensions::default()));
|
||||
let prompt_editor = cx.new_view(|cx| {
|
||||
PromptEditor::new(
|
||||
assist_id,
|
||||
gutter_dimensions.clone(),
|
||||
self.prompt_history.clone(),
|
||||
prompt_buffer.clone(),
|
||||
codegen.clone(),
|
||||
editor,
|
||||
assistant_panel,
|
||||
workspace.clone(),
|
||||
self.fs.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let [prompt_block_id, end_block_id] =
|
||||
self.insert_assist_blocks(editor, &range, &prompt_editor, cx);
|
||||
|
||||
let editor_assists = self
|
||||
.assists_by_editor
|
||||
.entry(editor.downgrade())
|
||||
.or_insert_with(|| EditorInlineAssists::new(&editor, cx));
|
||||
|
||||
let mut assist_group = InlineAssistGroup::new();
|
||||
self.assists.insert(
|
||||
assist_id,
|
||||
InlineAssist::new(
|
||||
assist_id,
|
||||
assist_group_id,
|
||||
assistant_panel.is_some(),
|
||||
editor,
|
||||
&prompt_editor,
|
||||
prompt_block_id,
|
||||
end_block_id,
|
||||
prompt_editor.read(cx).codegen.clone(),
|
||||
workspace.clone(),
|
||||
cx,
|
||||
),
|
||||
);
|
||||
assist_group.assist_ids.push(assist_id);
|
||||
editor_assists.assist_ids.push(assist_id);
|
||||
self.assist_groups.insert(assist_group_id, assist_group);
|
||||
assist_id
|
||||
}
|
||||
|
||||
fn insert_assist_blocks(
|
||||
&self,
|
||||
editor: &View<Editor>,
|
||||
range: &Range<Anchor>,
|
||||
prompt_editor: &View<PromptEditor>,
|
||||
cx: &mut WindowContext,
|
||||
) -> [BlockId; 2] {
|
||||
let assist_blocks = vec![
|
||||
BlockProperties {
|
||||
style: BlockStyle::Sticky,
|
||||
position: range.start,
|
||||
height: prompt_editor.read(cx).height_in_lines,
|
||||
render: build_assist_editor_renderer(prompt_editor),
|
||||
disposition: BlockDisposition::Above,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Sticky,
|
||||
position: range.end,
|
||||
height: 1,
|
||||
render: Box::new(|cx| {
|
||||
v_flex()
|
||||
.h_full()
|
||||
.w_full()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().status().info_border)
|
||||
.into_any_element()
|
||||
}),
|
||||
disposition: BlockDisposition::Below,
|
||||
},
|
||||
];
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
let block_ids = editor.insert_blocks(assist_blocks, None, cx);
|
||||
[block_ids[0], block_ids[1]]
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_prompt_editor_focus_in(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
|
||||
let assist = &self.assists[&assist_id];
|
||||
let Some(decorations) = assist.decorations.as_ref() else {
|
||||
|
@ -379,6 +485,14 @@ impl InlineAssistant {
|
|||
cx.propagate();
|
||||
}
|
||||
|
||||
fn handle_editor_release(&mut self, editor: WeakView<Editor>, cx: &mut WindowContext) {
|
||||
if let Some(editor_assists) = self.assists_by_editor.get_mut(&editor) {
|
||||
for assist_id in editor_assists.assist_ids.clone() {
|
||||
self.finish_assist(assist_id, true, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_editor_change(&mut self, editor: View<Editor>, cx: &mut WindowContext) {
|
||||
let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) else {
|
||||
return;
|
||||
|
@ -698,7 +812,7 @@ impl InlineAssistant {
|
|||
assist_group.assist_ids.clone()
|
||||
}
|
||||
|
||||
fn start_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
|
||||
pub fn start_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
|
||||
let assist = if let Some(assist) = self.assists.get_mut(&assist_id) {
|
||||
assist
|
||||
} else {
|
||||
|
@ -727,16 +841,26 @@ impl InlineAssistant {
|
|||
self.prompt_history.pop_front();
|
||||
}
|
||||
|
||||
assist.codegen.update(cx, |codegen, cx| codegen.undo(cx));
|
||||
let codegen = assist.codegen.clone();
|
||||
let request = self.request_for_inline_assist(assist_id, cx);
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let request = request.await?;
|
||||
codegen.update(&mut cx, |codegen, cx| codegen.start(request, cx))?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
let telemetry_id = CompletionProvider::global(cx).model().telemetry_id();
|
||||
let chunks: LocalBoxFuture<Result<BoxStream<Result<String>>>> =
|
||||
if user_prompt.trim().to_lowercase() == "delete" {
|
||||
async { Ok(stream::empty().boxed()) }.boxed_local()
|
||||
} else {
|
||||
let request = self.request_for_inline_assist(assist_id, cx);
|
||||
let mut cx = cx.to_async();
|
||||
async move {
|
||||
let request = request.await?;
|
||||
let chunks = cx
|
||||
.update(|cx| CompletionProvider::global(cx).stream_completion(request, cx))?
|
||||
.await?;
|
||||
Ok(chunks.boxed())
|
||||
}
|
||||
.boxed_local()
|
||||
};
|
||||
codegen.update(cx, |codegen, cx| {
|
||||
codegen.start(telemetry_id, chunks, cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn request_for_inline_assist(
|
||||
|
@ -855,7 +979,7 @@ impl InlineAssistant {
|
|||
})
|
||||
}
|
||||
|
||||
fn stop_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
|
||||
pub fn stop_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
|
||||
let assist = if let Some(assist) = self.assists.get_mut(&assist_id) {
|
||||
assist
|
||||
} else {
|
||||
|
@ -1074,6 +1198,14 @@ impl EditorInlineAssists {
|
|||
}
|
||||
}),
|
||||
_subscriptions: vec![
|
||||
cx.observe_release(editor, {
|
||||
let editor = editor.downgrade();
|
||||
|_, cx| {
|
||||
InlineAssistant::update_global(cx, |this, cx| {
|
||||
this.handle_editor_release(editor, cx);
|
||||
})
|
||||
}
|
||||
}),
|
||||
cx.observe(editor, move |editor, cx| {
|
||||
InlineAssistant::update_global(cx, |this, cx| {
|
||||
this.handle_editor_change(editor, cx)
|
||||
|
@ -1138,7 +1270,7 @@ fn build_assist_editor_renderer(editor: &View<PromptEditor>) -> RenderBlock {
|
|||
}
|
||||
|
||||
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Hash)]
|
||||
struct InlineAssistId(usize);
|
||||
pub struct InlineAssistId(usize);
|
||||
|
||||
impl InlineAssistId {
|
||||
fn post_inc(&mut self) -> InlineAssistId {
|
||||
|
@ -1882,7 +2014,8 @@ pub struct Codegen {
|
|||
range: Range<Anchor>,
|
||||
edit_position: Anchor,
|
||||
last_equal_ranges: Vec<Range<Anchor>>,
|
||||
transaction_id: Option<TransactionId>,
|
||||
prepend_transaction_id: Option<TransactionId>,
|
||||
generation_transaction_id: Option<TransactionId>,
|
||||
status: CodegenStatus,
|
||||
generation: Task<()>,
|
||||
diff: Diff,
|
||||
|
@ -1911,6 +2044,7 @@ impl Codegen {
|
|||
pub fn new(
|
||||
buffer: Model<MultiBuffer>,
|
||||
range: Range<Anchor>,
|
||||
prepend_transaction_id: Option<TransactionId>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
|
@ -1943,7 +2077,8 @@ impl Codegen {
|
|||
range,
|
||||
snapshot,
|
||||
last_equal_ranges: Default::default(),
|
||||
transaction_id: Default::default(),
|
||||
prepend_transaction_id,
|
||||
generation_transaction_id: None,
|
||||
status: CodegenStatus::Idle,
|
||||
generation: Task::ready(()),
|
||||
diff: Diff::default(),
|
||||
|
@ -1959,8 +2094,13 @@ impl Codegen {
|
|||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
if let multi_buffer::Event::TransactionUndone { transaction_id } = event {
|
||||
if self.transaction_id == Some(*transaction_id) {
|
||||
self.transaction_id = None;
|
||||
if self.generation_transaction_id == Some(*transaction_id) {
|
||||
self.generation_transaction_id = None;
|
||||
self.generation = Task::ready(());
|
||||
cx.emit(CodegenEvent::Undone);
|
||||
} else if self.prepend_transaction_id == Some(*transaction_id) {
|
||||
self.prepend_transaction_id = None;
|
||||
self.generation_transaction_id = None;
|
||||
self.generation = Task::ready(());
|
||||
cx.emit(CodegenEvent::Undone);
|
||||
}
|
||||
|
@ -1971,7 +2111,12 @@ impl Codegen {
|
|||
&self.last_equal_ranges
|
||||
}
|
||||
|
||||
pub fn start(&mut self, prompt: LanguageModelRequest, cx: &mut ModelContext<Self>) {
|
||||
pub fn start(
|
||||
&mut self,
|
||||
telemetry_id: String,
|
||||
stream: impl 'static + Future<Output = Result<BoxStream<'static, Result<String>>>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
let range = self.range.clone();
|
||||
let snapshot = self.snapshot.clone();
|
||||
let selected_text = snapshot
|
||||
|
@ -1985,15 +2130,17 @@ impl Codegen {
|
|||
.next()
|
||||
.unwrap_or_else(|| snapshot.indent_size_for_line(MultiBufferRow(selection_start.row)));
|
||||
|
||||
let model_telemetry_id = prompt.model.telemetry_id();
|
||||
let response = CompletionProvider::global(cx).complete(prompt, cx);
|
||||
let telemetry = self.telemetry.clone();
|
||||
self.edit_position = range.start;
|
||||
self.diff = Diff::default();
|
||||
self.status = CodegenStatus::Pending;
|
||||
if let Some(transaction_id) = self.generation_transaction_id.take() {
|
||||
self.buffer
|
||||
.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx));
|
||||
}
|
||||
self.generation = cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
let response = response.await;
|
||||
let chunks = stream.await;
|
||||
let generate = async {
|
||||
let mut edit_start = range.start.to_offset(&snapshot);
|
||||
|
||||
|
@ -2003,7 +2150,7 @@ impl Codegen {
|
|||
let mut response_latency = None;
|
||||
let request_start = Instant::now();
|
||||
let diff = async {
|
||||
let chunks = StripInvalidSpans::new(response.inner.await?);
|
||||
let chunks = StripInvalidSpans::new(chunks?);
|
||||
futures::pin_mut!(chunks);
|
||||
let mut diff = StreamingDiff::new(selected_text.to_string());
|
||||
|
||||
|
@ -2086,7 +2233,7 @@ impl Codegen {
|
|||
telemetry.report_assistant_event(
|
||||
None,
|
||||
telemetry_events::AssistantKind::Inline,
|
||||
model_telemetry_id,
|
||||
telemetry_id,
|
||||
response_latency,
|
||||
error_message,
|
||||
);
|
||||
|
@ -2136,7 +2283,7 @@ impl Codegen {
|
|||
});
|
||||
|
||||
if let Some(transaction) = transaction {
|
||||
if let Some(first_transaction) = this.transaction_id {
|
||||
if let Some(first_transaction) = this.generation_transaction_id {
|
||||
// Group all assistant edits into the first transaction.
|
||||
this.buffer.update(cx, |buffer, cx| {
|
||||
buffer.merge_transactions(
|
||||
|
@ -2146,7 +2293,7 @@ impl Codegen {
|
|||
)
|
||||
});
|
||||
} else {
|
||||
this.transaction_id = Some(transaction);
|
||||
this.generation_transaction_id = Some(transaction);
|
||||
this.buffer.update(cx, |buffer, cx| {
|
||||
buffer.finalize_last_transaction(cx)
|
||||
});
|
||||
|
@ -2189,7 +2336,12 @@ impl Codegen {
|
|||
}
|
||||
|
||||
pub fn undo(&mut self, cx: &mut ModelContext<Self>) {
|
||||
if let Some(transaction_id) = self.transaction_id.take() {
|
||||
if let Some(transaction_id) = self.prepend_transaction_id.take() {
|
||||
self.buffer
|
||||
.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx));
|
||||
}
|
||||
|
||||
if let Some(transaction_id) = self.generation_transaction_id.take() {
|
||||
self.buffer
|
||||
.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx));
|
||||
}
|
||||
|
@ -2451,11 +2603,8 @@ fn merge_ranges(ranges: &mut Vec<Range<Anchor>>, buffer: &MultiBufferSnapshot) {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::FakeCompletionProvider;
|
||||
|
||||
use super::*;
|
||||
use crate::FakeCompletionProvider;
|
||||
use futures::stream::{self};
|
||||
use gpui::{Context, TestAppContext};
|
||||
use indoc::indoc;
|
||||
|
@ -2466,6 +2615,7 @@ mod tests {
|
|||
use rand::prelude::*;
|
||||
use serde::Serialize;
|
||||
use settings::SettingsStore;
|
||||
use std::{future, sync::Arc};
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct DummyCompletionRequest {
|
||||
|
@ -2475,7 +2625,7 @@ mod tests {
|
|||
#[gpui::test(iterations = 10)]
|
||||
async fn test_transform_autoindent(cx: &mut TestAppContext, mut rng: StdRng) {
|
||||
cx.set_global(cx.update(SettingsStore::test));
|
||||
let provider = cx.update(|cx| FakeCompletionProvider::setup_test(cx));
|
||||
cx.update(|cx| FakeCompletionProvider::setup_test(cx));
|
||||
cx.update(language_settings::init);
|
||||
|
||||
let text = indoc! {"
|
||||
|
@ -2493,14 +2643,17 @@ mod tests {
|
|||
let snapshot = buffer.snapshot(cx);
|
||||
snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(4, 5))
|
||||
});
|
||||
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, cx));
|
||||
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, None, cx));
|
||||
|
||||
let (chunks_tx, chunks_rx) = mpsc::unbounded();
|
||||
codegen.update(cx, |codegen, cx| {
|
||||
codegen.start(LanguageModelRequest::default(), cx)
|
||||
codegen.start(
|
||||
String::new(),
|
||||
future::ready(Ok(chunks_rx.map(|chunk| Ok(chunk)).boxed())),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
let mut new_text = concat!(
|
||||
" let mut x = 0;\n",
|
||||
" while x < 10 {\n",
|
||||
|
@ -2511,11 +2664,11 @@ mod tests {
|
|||
let max_len = cmp::min(new_text.len(), 10);
|
||||
let len = rng.gen_range(1..=max_len);
|
||||
let (chunk, suffix) = new_text.split_at(len);
|
||||
provider.send_completion(&LanguageModelRequest::default(), chunk.into());
|
||||
chunks_tx.unbounded_send(chunk.to_string()).unwrap();
|
||||
new_text = suffix;
|
||||
cx.background_executor.run_until_parked();
|
||||
}
|
||||
provider.finish_completion(&LanguageModelRequest::default());
|
||||
drop(chunks_tx);
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
|
@ -2536,7 +2689,6 @@ mod tests {
|
|||
cx: &mut TestAppContext,
|
||||
mut rng: StdRng,
|
||||
) {
|
||||
let provider = cx.update(|cx| FakeCompletionProvider::setup_test(cx));
|
||||
cx.set_global(cx.update(SettingsStore::test));
|
||||
cx.update(language_settings::init);
|
||||
|
||||
|
@ -2552,10 +2704,16 @@ mod tests {
|
|||
let snapshot = buffer.snapshot(cx);
|
||||
snapshot.anchor_before(Point::new(1, 6))..snapshot.anchor_after(Point::new(1, 6))
|
||||
});
|
||||
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, cx));
|
||||
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, None, cx));
|
||||
|
||||
let request = LanguageModelRequest::default();
|
||||
codegen.update(cx, |codegen, cx| codegen.start(request, cx));
|
||||
let (chunks_tx, chunks_rx) = mpsc::unbounded();
|
||||
codegen.update(cx, |codegen, cx| {
|
||||
codegen.start(
|
||||
String::new(),
|
||||
future::ready(Ok(chunks_rx.map(|chunk| Ok(chunk)).boxed())),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
|
@ -2569,11 +2727,11 @@ mod tests {
|
|||
let max_len = cmp::min(new_text.len(), 10);
|
||||
let len = rng.gen_range(1..=max_len);
|
||||
let (chunk, suffix) = new_text.split_at(len);
|
||||
provider.send_completion(&LanguageModelRequest::default(), chunk.into());
|
||||
chunks_tx.unbounded_send(chunk.to_string()).unwrap();
|
||||
new_text = suffix;
|
||||
cx.background_executor.run_until_parked();
|
||||
}
|
||||
provider.finish_completion(&LanguageModelRequest::default());
|
||||
drop(chunks_tx);
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
|
@ -2594,7 +2752,7 @@ mod tests {
|
|||
cx: &mut TestAppContext,
|
||||
mut rng: StdRng,
|
||||
) {
|
||||
let provider = cx.update(|cx| FakeCompletionProvider::setup_test(cx));
|
||||
cx.update(|cx| FakeCompletionProvider::setup_test(cx));
|
||||
cx.set_global(cx.update(SettingsStore::test));
|
||||
cx.update(language_settings::init);
|
||||
|
||||
|
@ -2610,10 +2768,16 @@ mod tests {
|
|||
let snapshot = buffer.snapshot(cx);
|
||||
snapshot.anchor_before(Point::new(1, 2))..snapshot.anchor_after(Point::new(1, 2))
|
||||
});
|
||||
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, cx));
|
||||
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, None, cx));
|
||||
|
||||
let request = LanguageModelRequest::default();
|
||||
codegen.update(cx, |codegen, cx| codegen.start(request, cx));
|
||||
let (chunks_tx, chunks_rx) = mpsc::unbounded();
|
||||
codegen.update(cx, |codegen, cx| {
|
||||
codegen.start(
|
||||
String::new(),
|
||||
future::ready(Ok(chunks_rx.map(|chunk| Ok(chunk)).boxed())),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
|
@ -2627,11 +2791,11 @@ mod tests {
|
|||
let max_len = cmp::min(new_text.len(), 10);
|
||||
let len = rng.gen_range(1..=max_len);
|
||||
let (chunk, suffix) = new_text.split_at(len);
|
||||
provider.send_completion(&LanguageModelRequest::default(), chunk.into());
|
||||
chunks_tx.unbounded_send(chunk.to_string()).unwrap();
|
||||
new_text = suffix;
|
||||
cx.background_executor.run_until_parked();
|
||||
}
|
||||
provider.finish_completion(&LanguageModelRequest::default());
|
||||
drop(chunks_tx);
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
|
|
|
@ -3,6 +3,7 @@ use crate::{
|
|||
InlineAssist, InlineAssistant, LanguageModelRequest, LanguageModelRequestMessage, Role,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use assets::Assets;
|
||||
use chrono::{DateTime, Utc};
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::{actions::Tab, CurrentLineHighlight, Editor, EditorElement, EditorEvent, EditorStyle};
|
||||
|
@ -12,8 +13,8 @@ use futures::{
|
|||
};
|
||||
use fuzzy::StringMatchCandidate;
|
||||
use gpui::{
|
||||
actions, point, size, transparent_black, AppContext, BackgroundExecutor, Bounds, EventEmitter,
|
||||
Global, HighlightStyle, PromptLevel, ReadGlobal, Subscription, Task, TextStyle,
|
||||
actions, point, size, transparent_black, AppContext, AssetSource, BackgroundExecutor, Bounds,
|
||||
EventEmitter, Global, HighlightStyle, PromptLevel, ReadGlobal, Subscription, Task, TextStyle,
|
||||
TitlebarOptions, UpdateGlobal, View, WindowBounds, WindowHandle, WindowOptions,
|
||||
};
|
||||
use heed::{types::SerdeBincode, Database, RoTxn};
|
||||
|
@ -1296,6 +1297,17 @@ impl PromptStore {
|
|||
fn first(&self) -> Option<PromptMetadata> {
|
||||
self.metadata_cache.read().metadata.first().cloned()
|
||||
}
|
||||
|
||||
pub fn operations_prompt(&self) -> String {
|
||||
String::from_utf8(
|
||||
Assets
|
||||
.load("prompts/operations.md")
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.to_vec(),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
/// Wraps a shared future to a prompt store so it can be assigned as a context global.
|
||||
|
|
|
@ -1,171 +0,0 @@
|
|||
use language::Rope;
|
||||
use std::ops::Range;
|
||||
|
||||
/// Search the given buffer for the given substring, ignoring any differences
|
||||
/// in line indentation between the query and the buffer.
|
||||
///
|
||||
/// Returns a vector of ranges of byte offsets in the buffer corresponding
|
||||
/// to the entire lines of the buffer.
|
||||
pub fn fuzzy_search_lines(haystack: &Rope, needle: &str) -> Option<Range<usize>> {
|
||||
const SIMILARITY_THRESHOLD: f64 = 0.8;
|
||||
|
||||
let mut best_match: Option<(Range<usize>, f64)> = None; // (range, score)
|
||||
let mut haystack_lines = haystack.chunks().lines();
|
||||
let mut haystack_line_start = 0;
|
||||
while let Some(mut haystack_line) = haystack_lines.next() {
|
||||
let next_haystack_line_start = haystack_line_start + haystack_line.len() + 1;
|
||||
let mut advanced_to_next_haystack_line = false;
|
||||
|
||||
let mut matched = true;
|
||||
let match_start = haystack_line_start;
|
||||
let mut match_end = next_haystack_line_start;
|
||||
let mut match_score = 0.0;
|
||||
let mut needle_lines = needle.lines().peekable();
|
||||
while let Some(needle_line) = needle_lines.next() {
|
||||
let similarity = line_similarity(haystack_line, needle_line);
|
||||
if similarity >= SIMILARITY_THRESHOLD {
|
||||
match_end = haystack_lines.offset();
|
||||
match_score += similarity;
|
||||
|
||||
if needle_lines.peek().is_some() {
|
||||
if let Some(next_haystack_line) = haystack_lines.next() {
|
||||
advanced_to_next_haystack_line = true;
|
||||
haystack_line = next_haystack_line;
|
||||
} else {
|
||||
matched = false;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
matched = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if matched
|
||||
&& best_match
|
||||
.as_ref()
|
||||
.map(|(_, best_score)| match_score > *best_score)
|
||||
.unwrap_or(true)
|
||||
{
|
||||
best_match = Some((match_start..match_end, match_score));
|
||||
}
|
||||
|
||||
if advanced_to_next_haystack_line {
|
||||
haystack_lines.seek(next_haystack_line_start);
|
||||
}
|
||||
haystack_line_start = next_haystack_line_start;
|
||||
}
|
||||
|
||||
best_match.map(|(range, _)| range)
|
||||
}
|
||||
|
||||
/// Calculates the similarity between two lines, ignoring leading and trailing whitespace,
|
||||
/// using the Jaro-Winkler distance.
|
||||
///
|
||||
/// Returns a value between 0.0 and 1.0, where 1.0 indicates an exact match.
|
||||
fn line_similarity(line1: &str, line2: &str) -> f64 {
|
||||
strsim::jaro_winkler(line1.trim(), line2.trim())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use gpui::{AppContext, Context as _};
|
||||
use language::Buffer;
|
||||
use unindent::Unindent as _;
|
||||
use util::test::marked_text_ranges;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_fuzzy_search_lines(cx: &mut AppContext) {
|
||||
let (text, expected_ranges) = marked_text_ranges(
|
||||
&r#"
|
||||
fn main() {
|
||||
if a() {
|
||||
assert_eq!(
|
||||
1 + 2,
|
||||
does_not_match,
|
||||
);
|
||||
}
|
||||
|
||||
println!("hi");
|
||||
|
||||
assert_eq!(
|
||||
1 + 2,
|
||||
3,
|
||||
); // this last line does not match
|
||||
|
||||
« assert_eq!(
|
||||
1 + 2,
|
||||
3,
|
||||
);
|
||||
»
|
||||
|
||||
« assert_eq!(
|
||||
"something",
|
||||
"else",
|
||||
);
|
||||
»
|
||||
}
|
||||
"#
|
||||
.unindent(),
|
||||
false,
|
||||
);
|
||||
|
||||
let buffer = cx.new_model(|cx| Buffer::local(&text, cx));
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
|
||||
|
||||
let actual_range = fuzzy_search_lines(
|
||||
snapshot.as_rope(),
|
||||
&"
|
||||
assert_eq!(
|
||||
1 + 2,
|
||||
3,
|
||||
);
|
||||
"
|
||||
.unindent(),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(actual_range, expected_ranges[0]);
|
||||
|
||||
let actual_range = fuzzy_search_lines(
|
||||
snapshot.as_rope(),
|
||||
&"
|
||||
assert_eq!(
|
||||
1 + 2,
|
||||
3,
|
||||
);
|
||||
"
|
||||
.unindent(),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(actual_range, expected_ranges[0]);
|
||||
|
||||
let actual_range = fuzzy_search_lines(
|
||||
snapshot.as_rope(),
|
||||
&"
|
||||
asst_eq!(
|
||||
\"something\",
|
||||
\"els\"
|
||||
)
|
||||
"
|
||||
.unindent(),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(actual_range, expected_ranges[1]);
|
||||
|
||||
let actual_range = fuzzy_search_lines(
|
||||
snapshot.as_rope(),
|
||||
&"
|
||||
assert_eq!(
|
||||
2 + 1,
|
||||
3,
|
||||
);
|
||||
"
|
||||
.unindent(),
|
||||
);
|
||||
assert_eq!(actual_range, None);
|
||||
}
|
||||
}
|
|
@ -1026,7 +1026,7 @@ impl Codegen {
|
|||
|
||||
let telemetry = self.telemetry.clone();
|
||||
let model_telemetry_id = prompt.model.telemetry_id();
|
||||
let response = CompletionProvider::global(cx).complete(prompt, cx);
|
||||
let response = CompletionProvider::global(cx).stream_completion(prompt, cx);
|
||||
|
||||
self.generation = cx.spawn(|this, mut cx| async move {
|
||||
let response = response.await;
|
||||
|
@ -1037,8 +1037,8 @@ impl Codegen {
|
|||
let mut response_latency = None;
|
||||
let request_start = Instant::now();
|
||||
let task = async {
|
||||
let mut response = response.inner.await?;
|
||||
while let Some(chunk) = response.next().await {
|
||||
let mut chunks = response?;
|
||||
while let Some(chunk) = chunks.next().await {
|
||||
if response_latency.is_none() {
|
||||
response_latency = Some(request_start.elapsed());
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue