Allow the assistant to suggest edits to files in the project (#11993)

### Todo

* [x] tuck the new system prompt away somehow
* for now, we're treating it as built-in, and not editable. once we have
a way to fold away default prompts, let's make it a default prompt.
* [x] when applying edits, re-parse the edit from the latest content of
the assistant buffer (to allow for manual editing of edits)
* [x] automatically adjust the indentation of edits suggested by the
assistant
* [x] fix edit row highlights persisting even when assistant messages
with edits are deleted
* ~adjust the fuzzy search to allow for small errors in the old text,
using some string similarity routine~

We decided to defer the fuzzy searching thing to a separate PR, since
it's a little bit involved, and the current functionality works well
enough to be worth landing. A couple of notes on the fuzzy searching:
* sometimes the assistant accidentally omits line breaks from the text
that it wants to replace
* when the old text has hallucinations, the new text often contains the
same hallucinations. so we'll probably need to use a more fine-grained
editing strategy where we perform a character-wise diff of the old and
new text as reported by the assistant, and then adjust that diff so that
it can be applied to the actual buffer text

Release Notes:

- Added the ability to request edits to project files using the
assistant panel.

---------

Co-authored-by: Antonio Scandurra <me@as-cii.com>
Co-authored-by: Marshall <marshall@zed.dev>
Co-authored-by: Antonio <antonio@zed.dev>
Co-authored-by: Nathan <nathan@zed.dev>
This commit is contained in:
Max Brunsfeld 2024-05-17 15:38:14 -07:00 committed by GitHub
parent 4386268a94
commit 84affa96ff
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 912 additions and 181 deletions

2
Cargo.lock generated
View file

@ -361,6 +361,7 @@ dependencies = [
"project",
"rand 0.8.5",
"regex",
"rope",
"schemars",
"search",
"serde",
@ -372,6 +373,7 @@ dependencies = [
"tiktoken-rs",
"toml 0.8.10",
"ui",
"unindent",
"util",
"uuid",
"workspace",

View file

@ -33,6 +33,7 @@ ordered-float.workspace = true
parking_lot.workspace = true
project.workspace = true
regex.workspace = true
rope.workspace = true
schemars.workspace = true
search.workspace = true
serde.workspace = true
@ -55,3 +56,4 @@ env_logger.workspace = true
log.workspace = true
project = { workspace = true, features = ["test-support"] }
rand.workspace = true
unindent.workspace = true

View file

@ -1,3 +0,0 @@
Push content to a deeper layer.
A context can have multiple sublayers.
You can enable or disable arbitrary sublayers at arbitrary nesting depths when viewing the document.

View file

@ -10,6 +10,19 @@ pub struct AmbientContext {
pub current_project: CurrentProjectContext,
}
impl AmbientContext {
pub fn snapshot(&self) -> AmbientContextSnapshot {
AmbientContextSnapshot {
recent_buffers: self.recent_buffers.snapshot.clone(),
}
}
}
#[derive(Clone, Default, Debug)]
pub struct AmbientContextSnapshot {
pub recent_buffers: RecentBuffersSnapshot,
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
pub enum ContextUpdated {
Updating,

View file

@ -1,21 +1,14 @@
use std::fmt::Write;
use std::iter;
use std::path::PathBuf;
use std::time::Duration;
use anyhow::Result;
use crate::{assistant_panel::Conversation, LanguageModelRequestMessage, Role};
use gpui::{ModelContext, Subscription, Task, WeakModel};
use language::{Buffer, BufferSnapshot, DiagnosticEntry, Point};
use util::ResultExt;
use language::{Buffer, BufferSnapshot, Rope};
use std::{fmt::Write, path::PathBuf, time::Duration};
use crate::ambient_context::ContextUpdated;
use crate::assistant_panel::Conversation;
use crate::{LanguageModelRequestMessage, Role};
use super::ContextUpdated;
pub struct RecentBuffersContext {
pub enabled: bool,
pub buffers: Vec<RecentBuffer>,
pub message: String,
pub snapshot: RecentBuffersSnapshot,
pub pending_message: Option<Task<()>>,
}
@ -29,27 +22,19 @@ impl Default for RecentBuffersContext {
Self {
enabled: true,
buffers: Vec::new(),
message: String::new(),
snapshot: RecentBuffersSnapshot::default(),
pending_message: None,
}
}
}
impl RecentBuffersContext {
/// Returns the [`RecentBuffersContext`] as a message to the language model.
pub fn to_message(&self) -> Option<LanguageModelRequestMessage> {
self.enabled.then(|| LanguageModelRequestMessage {
role: Role::System,
content: self.message.clone(),
})
}
pub fn update(&mut self, cx: &mut ModelContext<Conversation>) -> ContextUpdated {
let buffers = self
let source_buffers = self
.buffers
.iter()
.filter_map(|recent| {
recent
let (full_path, snapshot) = recent
.buffer
.read_with(cx, |buffer, cx| {
(
@ -57,12 +42,18 @@ impl RecentBuffersContext {
buffer.snapshot(),
)
})
.ok()
.ok()?;
Some(SourceBufferSnapshot {
full_path,
model: recent.buffer.clone(),
snapshot,
})
})
.collect::<Vec<_>>();
if !self.enabled || buffers.is_empty() {
self.message.clear();
if !self.enabled || source_buffers.is_empty() {
self.snapshot.message = Default::default();
self.snapshot.source_buffers.clear();
self.pending_message = None;
cx.notify();
ContextUpdated::Disabled
@ -71,131 +62,84 @@ impl RecentBuffersContext {
const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(100);
cx.background_executor().timer(DEBOUNCE_TIMEOUT).await;
let message_task = cx
.background_executor()
.spawn(async move { Self::build_message(&buffers) });
if let Some(message) = message_task.await.log_err() {
this.update(&mut cx, |conversation, cx| {
conversation.ambient_context.recent_buffers.message = message;
conversation.count_remaining_tokens(cx);
cx.notify();
})
.log_err();
}
let message = if source_buffers.is_empty() {
Rope::new()
} else {
cx.background_executor()
.spawn({
let source_buffers = source_buffers.clone();
async move { message_for_recent_buffers(source_buffers) }
})
.await
};
this.update(&mut cx, |this, cx| {
this.ambient_context.recent_buffers.snapshot.source_buffers = source_buffers;
this.ambient_context.recent_buffers.snapshot.message = message;
this.count_remaining_tokens(cx);
cx.notify();
})
.ok();
}));
ContextUpdated::Updating
}
}
fn build_message(buffers: &[(Option<PathBuf>, BufferSnapshot)]) -> Result<String> {
let mut message = String::new();
writeln!(
message,
"The following is a list of recent buffers that the user has opened."
)?;
writeln!(
message,
"For every line in the buffer, I will include a row number that line corresponds to."
)?;
writeln!(
message,
"Lines that don't have a number correspond to errors and warnings. For example:"
)?;
writeln!(message, "path/to/file.md")?;
writeln!(message, "```markdown")?;
writeln!(message, "1 The quick brown fox")?;
writeln!(message, "2 jumps over one active")?;
writeln!(message, " --- error: should be 'the'")?;
writeln!(message, " ------ error: should be 'lazy'")?;
writeln!(message, "3 dog")?;
writeln!(message, "```")?;
message.push('\n');
writeln!(message, "Here's the actual recent buffer list:")?;
for (path, buffer) in buffers {
if let Some(path) = path {
writeln!(message, "{}", path.display())?;
} else {
writeln!(message, "untitled")?;
}
if let Some(language) = buffer.language() {
writeln!(message, "```{}", language.name().to_lowercase())?;
} else {
writeln!(message, "```")?;
}
let mut diagnostics = buffer
.diagnostics_in_range::<_, Point>(
language::Anchor::MIN..language::Anchor::MAX,
false,
)
.peekable();
let mut active_diagnostics = Vec::<DiagnosticEntry<Point>>::new();
const GUTTER_PADDING: usize = 4;
let gutter_width =
((buffer.max_point().row + 1) as f32).log10() as usize + 1 + GUTTER_PADDING;
for buffer_row in 0..=buffer.max_point().row {
let display_row = buffer_row + 1;
active_diagnostics.retain(|diagnostic| {
(diagnostic.range.start.row..=diagnostic.range.end.row).contains(&buffer_row)
});
while diagnostics.peek().map_or(false, |diagnostic| {
(diagnostic.range.start.row..=diagnostic.range.end.row).contains(&buffer_row)
}) {
active_diagnostics.push(diagnostics.next().unwrap());
}
let row_width = (display_row as f32).log10() as usize + 1;
write!(message, "{}", display_row)?;
if row_width < gutter_width {
message.extend(iter::repeat(' ').take(gutter_width - row_width));
}
for chunk in buffer.text_for_range(
Point::new(buffer_row, 0)..Point::new(buffer_row, buffer.line_len(buffer_row)),
) {
message.push_str(chunk);
}
message.push('\n');
for diagnostic in &active_diagnostics {
message.extend(iter::repeat(' ').take(gutter_width));
let start_column = if diagnostic.range.start.row == buffer_row {
message
.extend(iter::repeat(' ').take(diagnostic.range.start.column as usize));
diagnostic.range.start.column
} else {
0
};
let end_column = if diagnostic.range.end.row == buffer_row {
diagnostic.range.end.column
} else {
buffer.line_len(buffer_row)
};
message.extend(iter::repeat('-').take((end_column - start_column) as usize));
writeln!(message, " {}", diagnostic.diagnostic.message)?;
}
}
message.push('\n');
}
writeln!(
message,
"When quoting the above code, mention which rows the code occurs at."
)?;
writeln!(
message,
"Never include rows in the quoted code itself and only report lines that didn't start with a row number."
)
?;
Ok(message)
/// Returns the [`RecentBuffersContext`] as a message to the language model.
pub fn to_message(&self) -> Option<LanguageModelRequestMessage> {
self.enabled.then(|| LanguageModelRequestMessage {
role: Role::System,
content: self.snapshot.message.to_string(),
})
}
}
#[derive(Clone, Default, Debug)]
pub struct RecentBuffersSnapshot {
pub message: Rope,
pub source_buffers: Vec<SourceBufferSnapshot>,
}
#[derive(Clone)]
pub struct SourceBufferSnapshot {
pub full_path: Option<PathBuf>,
pub model: WeakModel<Buffer>,
pub snapshot: BufferSnapshot,
}
impl std::fmt::Debug for SourceBufferSnapshot {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("SourceBufferSnapshot")
.field("full_path", &self.full_path)
.field("model (entity id)", &self.model.entity_id())
.field("snapshot (text)", &self.snapshot.text())
.finish()
}
}
fn message_for_recent_buffers(buffers: Vec<SourceBufferSnapshot>) -> Rope {
let mut message = String::new();
writeln!(
message,
"The following is a list of recent buffers that the user has opened."
)
.unwrap();
for buffer in buffers {
if let Some(path) = buffer.full_path {
writeln!(message, "```{}", path.display()).unwrap();
} else {
writeln!(message, "```untitled").unwrap();
}
for chunk in buffer.snapshot.chunks(0..buffer.snapshot.len(), false) {
message.push_str(chunk.text);
}
if !message.ends_with('\n') {
message.push('\n');
}
message.push_str("```\n");
}
Rope::from(message.as_str())
}

View file

@ -6,8 +6,10 @@ mod completion_provider;
mod prompt_library;
mod prompts;
mod saved_conversation;
mod search;
mod streaming_diff;
use ambient_context::AmbientContextSnapshot;
pub use assistant_panel::AssistantPanel;
use assistant_settings::{AnthropicModel, AssistantSettings, OpenAiModel, ZedDotDevModel};
use client::{proto, Client};
@ -35,6 +37,7 @@ actions!(
InsertActivePrompt,
ToggleIncludeConversation,
ToggleHistory,
ApplyEdit
]
);
@ -184,6 +187,9 @@ pub struct LanguageModelChoiceDelta {
struct MessageMetadata {
role: Role,
status: MessageStatus,
// todo!("delete this")
#[serde(skip)]
ambient_context: AmbientContextSnapshot,
}
#[derive(Clone, Debug, Serialize, Deserialize)]

View file

@ -1,14 +1,14 @@
use crate::ambient_context::{AmbientContext, ContextUpdated, RecentBuffer};
use crate::InsertActivePrompt;
use crate::{
ambient_context::*,
assistant_settings::{AssistantDockPosition, AssistantSettings, ZedDotDevModel},
codegen::{self, Codegen, CodegenKind},
prompt_library::{PromptLibrary, PromptManager},
prompts::generate_content_prompt,
Assist, CompletionProvider, CycleMessageRole, InlineAssist, LanguageModel,
LanguageModelRequest, LanguageModelRequestMessage, MessageId, MessageMetadata, MessageStatus,
QuoteSelection, ResetKey, Role, SavedConversation, SavedConversationMetadata, SavedMessage,
Split, ToggleFocus, ToggleHistory, ToggleIncludeConversation,
search::*,
ApplyEdit, Assist, CompletionProvider, CycleMessageRole, InlineAssist, InsertActivePrompt,
LanguageModel, LanguageModelRequest, LanguageModelRequestMessage, MessageId, MessageMetadata,
MessageStatus, QuoteSelection, ResetKey, Role, SavedConversation, SavedConversationMetadata,
SavedMessage, Split, ToggleFocus, ToggleHistory, ToggleIncludeConversation,
};
use anyhow::{anyhow, Result};
use client::telemetry::Telemetry;
@ -30,17 +30,20 @@ use gpui::{
AsyncWindowContext, AvailableSpace, ClipboardItem, Context, Entity, EventEmitter, FocusHandle,
FocusableView, FontStyle, FontWeight, HighlightStyle, InteractiveElement, IntoElement, Model,
ModelContext, ParentElement, Pixels, Render, SharedString, StatefulInteractiveElement, Styled,
Subscription, Task, TextStyle, UniformListScrollHandle, UpdateGlobal, View, ViewContext,
VisualContext, WeakModel, WeakView, WhiteSpace, WindowContext,
Subscription, Task, TextStyle, UniformListScrollHandle, View, ViewContext, VisualContext,
WeakModel, WeakView, WhiteSpace, WindowContext,
};
use language::{
language_settings::SoftWrap, AutoindentMode, Buffer, BufferSnapshot, LanguageRegistry,
OffsetRangeExt as _, Point, ToOffset as _,
};
use language::{language_settings::SoftWrap, Buffer, LanguageRegistry, Point, ToOffset as _};
use multi_buffer::MultiBufferRow;
use parking_lot::Mutex;
use project::Project;
use project::{Project, ProjectTransaction};
use search::{buffer_search::DivRegistrar, BufferSearchBar};
use settings::Settings;
use std::{
cmp,
cmp::{self, Ordering},
fmt::Write,
iter,
ops::Range,
@ -252,7 +255,7 @@ impl AssistantPanel {
|| prev_settings_version != CompletionProvider::global(cx).settings_version()
{
self.authentication_prompt =
Some(CompletionProvider::update_global(cx, |provider, cx| {
Some(cx.update_global::<CompletionProvider, _>(|provider, cx| {
provider.authentication_prompt(cx)
}));
}
@ -1122,7 +1125,7 @@ impl AssistantPanel {
}
fn authenticate(&mut self, cx: &mut ViewContext<Self>) -> Task<Result<()>> {
CompletionProvider::update_global(cx, |provider, cx| provider.authenticate(cx))
cx.update_global::<CompletionProvider, _>(|provider, cx| provider.authenticate(cx))
}
fn render_signed_in(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
@ -1380,6 +1383,7 @@ impl FocusableView for AssistantPanel {
enum ConversationEvent {
MessagesEdited,
SummaryChanged,
EditSuggestionsChanged,
StreamedCompletion,
}
@ -1393,6 +1397,7 @@ pub struct Conversation {
id: Option<String>,
buffer: Model<Buffer>,
pub(crate) ambient_context: AmbientContext,
edit_suggestions: Vec<EditSuggestion>,
message_anchors: Vec<MessageAnchor>,
messages_metadata: HashMap<MessageId, MessageMetadata>,
next_message_id: MessageId,
@ -1403,10 +1408,12 @@ pub struct Conversation {
model: LanguageModel,
token_count: Option<usize>,
pending_token_count: Task<Option<()>>,
pending_edit_suggestion_parse: Option<Task<()>>,
pending_save: Task<Result<()>>,
path: Option<PathBuf>,
_subscriptions: Vec<Subscription>,
telemetry: Option<Arc<Telemetry>>,
language_registry: Arc<LanguageRegistry>,
}
impl EventEmitter<ConversationEvent> for Conversation {}
@ -1418,18 +1425,9 @@ impl Conversation {
telemetry: Option<Arc<Telemetry>>,
cx: &mut ModelContext<Self>,
) -> Self {
let markdown = language_registry.language_for_name("Markdown");
let buffer = cx.new_model(|cx| {
let mut buffer = Buffer::local("", cx);
buffer.set_language_registry(language_registry);
cx.spawn(|buffer, mut cx| async move {
let markdown = markdown.await?;
buffer.update(&mut cx, |buffer: &mut Buffer, cx| {
buffer.set_language(Some(markdown), cx)
})?;
anyhow::Ok(())
})
.detach_and_log_err(cx);
buffer.set_language_registry(language_registry.clone());
buffer
});
@ -1439,18 +1437,21 @@ impl Conversation {
messages_metadata: Default::default(),
next_message_id: Default::default(),
ambient_context: AmbientContext::default(),
edit_suggestions: Vec::new(),
summary: None,
pending_summary: Task::ready(None),
completion_count: Default::default(),
pending_completions: Default::default(),
token_count: None,
pending_token_count: Task::ready(None),
pending_edit_suggestion_parse: None,
model,
_subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
pending_save: Task::ready(Ok(())),
path: None,
buffer,
telemetry,
language_registry,
};
let message = MessageAnchor {
@ -1463,9 +1464,11 @@ impl Conversation {
MessageMetadata {
role: Role::User,
status: MessageStatus::Done,
ambient_context: AmbientContextSnapshot::default(),
},
);
this.set_language(cx);
this.count_remaining_tokens(cx);
this
}
@ -1517,7 +1520,7 @@ impl Conversation {
});
next_message_id = cmp::max(next_message_id, MessageId(message.id.0 + 1));
}
buffer.set_language_registry(language_registry);
buffer.set_language_registry(language_registry.clone());
cx.spawn(|buffer, mut cx| async move {
let markdown = markdown.await?;
buffer.update(&mut cx, |buffer: &mut Buffer, cx| {
@ -1529,13 +1532,14 @@ impl Conversation {
buffer
})?;
cx.new_model(|cx| {
cx.new_model(move |cx| {
let mut this = Self {
id,
message_anchors,
messages_metadata: saved_conversation.message_metadata,
next_message_id,
ambient_context: AmbientContext::default(),
edit_suggestions: Vec::new(),
summary: Some(Summary {
text: saved_conversation.summary,
done: true,
@ -1544,6 +1548,7 @@ impl Conversation {
completion_count: Default::default(),
pending_completions: Default::default(),
token_count: None,
pending_edit_suggestion_parse: None,
pending_token_count: Task::ready(None),
model,
_subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
@ -1551,12 +1556,27 @@ impl Conversation {
path: Some(path),
buffer,
telemetry,
language_registry,
};
this.set_language(cx);
this.reparse_edit_suggestions(cx);
this.count_remaining_tokens(cx);
this
})
}
fn set_language(&mut self, cx: &mut ModelContext<Self>) {
let markdown = self.language_registry.language_for_name("Markdown");
cx.spawn(|this, mut cx| async move {
let markdown = markdown.await?;
this.update(&mut cx, |this, cx| {
this.buffer
.update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
})
})
.detach_and_log_err(cx);
}
fn toggle_recent_buffers(&mut self, cx: &mut ModelContext<Self>) {
self.ambient_context.recent_buffers.enabled = !self.ambient_context.recent_buffers.enabled;
match self.ambient_context.recent_buffers.update(cx) {
@ -1619,6 +1639,7 @@ impl Conversation {
) {
if *event == language::Event::Edited {
self.count_remaining_tokens(cx);
self.reparse_edit_suggestions(cx);
cx.emit(ConversationEvent::MessagesEdited);
}
}
@ -1645,6 +1666,65 @@ impl Conversation {
});
}
fn reparse_edit_suggestions(&mut self, cx: &mut ModelContext<Self>) {
self.pending_edit_suggestion_parse = Some(cx.spawn(|this, mut cx| async move {
cx.background_executor()
.timer(Duration::from_millis(200))
.await;
this.update(&mut cx, |this, cx| {
this.reparse_edit_suggestions_in_range(0..this.buffer.read(cx).len(), cx);
})
.ok();
}));
}
fn reparse_edit_suggestions_in_range(
&mut self,
range: Range<usize>,
cx: &mut ModelContext<Self>,
) {
self.buffer.update(cx, |buffer, _| {
let range_start = buffer.anchor_before(range.start);
let range_end = buffer.anchor_after(range.end);
let start_ix = self
.edit_suggestions
.binary_search_by(|probe| {
probe
.source_range
.end
.cmp(&range_start, buffer)
.then(Ordering::Greater)
})
.unwrap_err();
let end_ix = self
.edit_suggestions
.binary_search_by(|probe| {
probe
.source_range
.start
.cmp(&range_end, buffer)
.then(Ordering::Less)
})
.unwrap_err();
let mut new_edit_suggestions = Vec::new();
let mut message_lines = buffer.as_rope().chunks_in_range(range).lines();
while let Some(suggestion) = parse_next_edit_suggestion(&mut message_lines) {
let start_anchor = buffer.anchor_after(suggestion.outer_range.start);
let end_anchor = buffer.anchor_before(suggestion.outer_range.end);
new_edit_suggestions.push(EditSuggestion {
source_range: start_anchor..end_anchor,
full_path: suggestion.path,
});
}
self.edit_suggestions
.splice(start_ix..end_ix, new_edit_suggestions);
});
cx.emit(ConversationEvent::EditSuggestionsChanged);
cx.notify();
}
fn remaining_tokens(&self) -> Option<isize> {
Some(self.model.max_token_count() as isize - self.token_count? as isize)
}
@ -1733,15 +1813,26 @@ impl Conversation {
.message_anchors
.iter()
.position(|message| message.id == assistant_message_id)?;
this.buffer.update(cx, |buffer, cx| {
let offset = this.message_anchors[message_ix + 1..]
let message_range = this.buffer.update(cx, |buffer, cx| {
let message_start_offset =
this.message_anchors[message_ix].start.to_offset(buffer);
let message_old_end_offset = this.message_anchors
[message_ix + 1..]
.iter()
.find(|message| message.start.is_valid(buffer))
.map_or(buffer.len(), |message| {
message.start.to_offset(buffer).saturating_sub(1)
});
buffer.edit([(offset..offset, text)], None, cx);
let message_new_end_offset =
message_old_end_offset + text.len();
buffer.edit(
[(message_old_end_offset..message_old_end_offset, text)],
None,
cx,
);
message_start_offset..message_new_end_offset
});
this.reparse_edit_suggestions_in_range(message_range, cx);
cx.emit(ConversationEvent::StreamedCompletion);
Some(())
@ -1801,11 +1892,17 @@ impl Conversation {
}
fn to_completion_request(&self, cx: &mut ModelContext<Conversation>) -> LanguageModelRequest {
let edits_system_prompt = LanguageModelRequestMessage {
role: Role::System,
content: include_str!("./system_prompts/edits.md").to_string(),
};
let recent_buffers_context = self.ambient_context.recent_buffers.to_message();
let current_project_context = self.ambient_context.current_project.to_message();
let messages = recent_buffers_context
let messages = Some(edits_system_prompt)
.into_iter()
.chain(recent_buffers_context)
.chain(current_project_context)
.chain(
self.messages(cx)
@ -1870,8 +1967,14 @@ impl Conversation {
};
self.message_anchors
.insert(next_message_ix, message.clone());
self.messages_metadata
.insert(message.id, MessageMetadata { role, status });
self.messages_metadata.insert(
message.id,
MessageMetadata {
role,
status,
ambient_context: self.ambient_context.snapshot(),
},
);
cx.emit(ConversationEvent::MessagesEdited);
Some(message)
} else {
@ -1929,6 +2032,7 @@ impl Conversation {
MessageMetadata {
role,
status: MessageStatus::Done,
ambient_context: message.ambient_context.clone(),
},
);
@ -1973,6 +2077,7 @@ impl Conversation {
MessageMetadata {
role,
status: MessageStatus::Done,
ambient_context: message.ambient_context,
},
);
(Some(selection), Some(suffix))
@ -2104,6 +2209,7 @@ impl Conversation {
anchor: message_anchor.start,
role: metadata.role,
status: metadata.status.clone(),
ambient_context: metadata.ambient_context.clone(),
});
}
None
@ -2168,6 +2274,99 @@ impl Conversation {
}
}
#[derive(Debug)]
enum EditParsingState {
None,
InOldText {
path: PathBuf,
start_offset: usize,
old_text_start_offset: usize,
},
InNewText {
path: PathBuf,
start_offset: usize,
old_text_range: Range<usize>,
new_text_start_offset: usize,
},
}
#[derive(Clone, Debug, PartialEq)]
struct EditSuggestion {
source_range: Range<language::Anchor>,
full_path: PathBuf,
}
struct ParsedEditSuggestion {
path: PathBuf,
outer_range: Range<usize>,
old_text_range: Range<usize>,
new_text_range: Range<usize>,
}
fn parse_next_edit_suggestion(lines: &mut rope::Lines) -> Option<ParsedEditSuggestion> {
let mut state = EditParsingState::None;
loop {
let offset = lines.offset();
let message_line = lines.next()?;
match state {
EditParsingState::None => {
if let Some(rest) = message_line.strip_prefix("```edit ") {
let path = rest.trim();
if !path.is_empty() {
state = EditParsingState::InOldText {
path: PathBuf::from(path),
start_offset: offset,
old_text_start_offset: lines.offset(),
};
}
}
}
EditParsingState::InOldText {
path,
start_offset,
old_text_start_offset,
} => {
if message_line == "---" {
state = EditParsingState::InNewText {
path,
start_offset,
old_text_range: old_text_start_offset..offset,
new_text_start_offset: lines.offset(),
};
} else {
state = EditParsingState::InOldText {
path,
start_offset,
old_text_start_offset,
};
}
}
EditParsingState::InNewText {
path,
start_offset,
old_text_range,
new_text_start_offset,
} => {
if message_line == "```" {
return Some(ParsedEditSuggestion {
path,
outer_range: start_offset..offset + "```".len(),
old_text_range,
new_text_range: new_text_start_offset..offset,
});
} else {
state = EditParsingState::InNewText {
path,
start_offset,
old_text_range,
new_text_start_offset,
};
}
}
}
}
}
struct PendingCompletion {
id: usize,
_task: Task<()>,
@ -2317,6 +2516,40 @@ impl ConversationEditor {
conversation.save(Some(Duration::from_millis(500)), self.fs.clone(), cx);
});
}
ConversationEvent::EditSuggestionsChanged => {
self.editor.update(cx, |editor, cx| {
let buffer = editor.buffer().read(cx).snapshot(cx);
let excerpt_id = *buffer.as_singleton().unwrap().0;
let conversation = self.conversation.read(cx);
let highlighted_rows = conversation
.edit_suggestions
.iter()
.map(|suggestion| {
let start = buffer
.anchor_in_excerpt(excerpt_id, suggestion.source_range.start)
.unwrap();
let end = buffer
.anchor_in_excerpt(excerpt_id, suggestion.source_range.end)
.unwrap();
start..=end
})
.collect::<Vec<_>>();
editor.clear_row_highlights::<EditSuggestion>();
for range in highlighted_rows {
editor.highlight_rows::<EditSuggestion>(
range,
Some(
cx.theme()
.colors()
.editor_document_highlight_read_background,
),
false,
cx,
);
}
});
}
ConversationEvent::SummaryChanged => {
cx.emit(ConversationEditorEvent::TabContentChanged);
self.conversation.update(cx, |conversation, cx| {
@ -2737,6 +2970,159 @@ impl ConversationEditor {
});
}
fn apply_edit(&mut self, _: &ApplyEdit, cx: &mut ViewContext<Self>) {
struct Edit {
old_text: String,
new_text: String,
}
let conversation = self.conversation.read(cx);
let conversation_buffer = conversation.buffer.read(cx);
let conversation_buffer_snapshot = conversation_buffer.snapshot();
let selections = self.editor.read(cx).selections.disjoint_anchors();
let mut selections = selections.iter().peekable();
let selected_suggestions = conversation.edit_suggestions.iter().filter(|suggestion| {
while let Some(selection) = selections.peek() {
if selection
.end
.text_anchor
.cmp(&suggestion.source_range.start, conversation_buffer)
.is_lt()
{
selections.next();
continue;
}
if selection
.start
.text_anchor
.cmp(&suggestion.source_range.end, conversation_buffer)
.is_gt()
{
break;
}
return true;
}
false
});
let mut suggestions_by_buffer =
HashMap::<Model<Buffer>, (BufferSnapshot, Vec<Edit>)>::default();
for suggestion in selected_suggestions {
let offset = suggestion.source_range.start.to_offset(conversation_buffer);
if let Some(message) = conversation.message_for_offset(offset, cx) {
if let Some(buffer) = message
.ambient_context
.recent_buffers
.source_buffers
.iter()
.find(|source_buffer| {
source_buffer.full_path.as_ref() == Some(&suggestion.full_path)
})
{
if let Some(buffer) = buffer.model.upgrade() {
let (_, edits) = suggestions_by_buffer
.entry(buffer.clone())
.or_insert_with(|| (buffer.read(cx).snapshot(), Vec::new()));
let mut lines = conversation_buffer_snapshot
.as_rope()
.chunks_in_range(
suggestion
.source_range
.to_offset(&conversation_buffer_snapshot),
)
.lines();
if let Some(suggestion) = parse_next_edit_suggestion(&mut lines) {
let old_text = conversation_buffer_snapshot
.text_for_range(suggestion.old_text_range)
.collect();
let new_text = conversation_buffer_snapshot
.text_for_range(suggestion.new_text_range)
.collect();
edits.push(Edit { old_text, new_text });
}
}
}
}
}
cx.spawn(|this, mut cx| async move {
let edits_by_buffer = cx
.background_executor()
.spawn(async move {
let mut result = HashMap::default();
for (buffer, (snapshot, suggestions)) in suggestions_by_buffer {
let edits =
result
.entry(buffer)
.or_insert(Vec::<(Range<language::Anchor>, _)>::new());
for suggestion in suggestions {
let ranges =
fuzzy_search_lines(snapshot.as_rope(), &suggestion.old_text);
if let Some(range) = ranges.first() {
let edit_start = snapshot.anchor_after(range.start);
let edit_end = snapshot.anchor_before(range.end);
if let Err(ix) = edits.binary_search_by(|(range, _)| {
range.start.cmp(&edit_start, &snapshot)
}) {
edits.insert(
ix,
(edit_start..edit_end, suggestion.new_text.clone()),
);
}
} else {
log::info!(
"assistant edit did not match any text in buffer {:?}",
&suggestion.old_text
);
}
}
}
result
})
.await;
let mut project_transaction = ProjectTransaction::default();
let (editor, workspace, title) = this.update(&mut cx, |this, cx| {
for (buffer_handle, edits) in edits_by_buffer {
buffer_handle.update(cx, |buffer, cx| {
buffer.start_transaction();
buffer.edit(
edits,
Some(AutoindentMode::Block {
original_indent_columns: Vec::new(),
}),
cx,
);
buffer.end_transaction(cx);
if let Some(transaction) = buffer.finalize_last_transaction() {
project_transaction
.0
.insert(buffer_handle.clone(), transaction.clone());
}
});
}
(
this.editor.downgrade(),
this.workspace.clone(),
this.title(cx),
)
})?;
Editor::open_project_transaction(
&editor,
workspace,
project_transaction,
format!("Edits from {}", title),
cx,
)
.await
})
.detach_and_log_err(cx);
}
fn save(&mut self, _: &Save, cx: &mut ViewContext<Self>) {
self.conversation.update(cx, |conversation, cx| {
conversation.save(None, self.fs.clone(), cx)
@ -2765,6 +3151,7 @@ impl Render for ConversationEditor {
.capture_action(cx.listener(ConversationEditor::cycle_message_role))
.on_action(cx.listener(ConversationEditor::assist))
.on_action(cx.listener(ConversationEditor::split))
.on_action(cx.listener(ConversationEditor::apply_edit))
.size_full()
.v_flex()
.child(
@ -2797,6 +3184,7 @@ pub struct Message {
anchor: language::Anchor,
role: Role,
status: MessageStatus,
ambient_context: AmbientContextSnapshot,
}
impl Message {
@ -3106,10 +3494,14 @@ fn merge_ranges(ranges: &mut Vec<Range<Anchor>>, buffer: &MultiBufferSnapshot) {
#[cfg(test)]
mod tests {
use std::path::Path;
use super::*;
use crate::{FakeCompletionProvider, MessageId};
use gpui::{AppContext, TestAppContext};
use rope::Rope;
use settings::SettingsStore;
use unindent::Unindent;
#[gpui::test]
fn test_inserting_and_removing_messages(cx: &mut AppContext) {
@ -3428,6 +3820,76 @@ mod tests {
}
}
#[test]
fn test_parse_next_edit_suggestion() {
let text = "
some output:
```edit src/foo.rs
let a = 1;
let b = 2;
---
let w = 1;
let x = 2;
let y = 3;
let z = 4;
```
some more output:
```edit src/foo.rs
let c = 1;
---
```
and the conclusion.
"
.unindent();
let rope = Rope::from(text.as_str());
let mut lines = rope.chunks().lines();
let mut suggestions = vec![];
while let Some(suggestion) = parse_next_edit_suggestion(&mut lines) {
suggestions.push((
suggestion.path.clone(),
text[suggestion.old_text_range].to_string(),
text[suggestion.new_text_range].to_string(),
));
}
assert_eq!(
suggestions,
vec![
(
Path::new("src/foo.rs").into(),
[
" let a = 1;", //
" let b = 2;",
"",
]
.join("\n"),
[
" let w = 1;",
" let x = 2;",
" let y = 3;",
" let z = 4;",
"",
]
.join("\n"),
),
(
Path::new("src/foo.rs").into(),
[
" let c = 1;", //
"",
]
.join("\n"),
String::new(),
)
]
);
}
#[gpui::test]
async fn test_serialization(cx: &mut TestAppContext) {
let settings_store = cx.update(SettingsStore::test);

View file

@ -0,0 +1,150 @@
use language::Rope;
use std::ops::Range;
/// Search the given buffer for the given substring, ignoring any differences
/// in line indentation between the query and the buffer.
///
/// Returns a vector of ranges of byte offsets in the buffer corresponding
/// to the entire lines of the buffer.
pub fn fuzzy_search_lines(haystack: &Rope, needle: &str) -> Vec<Range<usize>> {
let mut matches = Vec::new();
let mut haystack_lines = haystack.chunks().lines();
let mut haystack_line_start = 0;
while let Some(haystack_line) = haystack_lines.next() {
let next_haystack_line_start = haystack_line_start + haystack_line.len() + 1;
let mut trimmed_needle_lines = needle.lines().map(|line| line.trim());
if Some(haystack_line.trim()) == trimmed_needle_lines.next() {
let match_start = haystack_line_start;
let mut match_end = next_haystack_line_start;
let matched = loop {
match (haystack_lines.next(), trimmed_needle_lines.next()) {
(Some(haystack_line), Some(needle_line)) => {
// Haystack line differs from needle line: not a match.
if haystack_line.trim() == needle_line {
match_end = haystack_lines.offset();
} else {
break false;
}
}
// We exhausted the haystack but not the query: not a match.
(None, Some(_)) => break false,
// We exhausted the query: it's a match.
(_, None) => break true,
}
};
if matched {
matches.push(match_start..match_end)
}
// Advance to the next line.
haystack_lines.seek(next_haystack_line_start);
}
haystack_line_start = next_haystack_line_start;
}
matches
}
#[cfg(test)]
mod test {
use super::*;
use gpui::{AppContext, Context as _};
use language::{Buffer, OffsetRangeExt};
use unindent::Unindent as _;
use util::test::marked_text_ranges;
#[gpui::test]
fn test_fuzzy_search_lines(cx: &mut AppContext) {
let (text, expected_ranges) = marked_text_ranges(
&r#"
fn main() {
if a() {
assert_eq!(
1 + 2,
does_not_match,
);
}
println!("hi");
assert_eq!(
1 + 2,
3,
); // this last line does not match
« assert_eq!(
1 + 2,
3,
);
»
assert_eq!(
"something",
"else",
);
if b {
« assert_eq!(
1 + 2,
3,
);
» }
}
"#
.unindent(),
false,
);
let buffer = cx.new_model(|cx| Buffer::local(&text, cx));
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
let actual_ranges = fuzzy_search_lines(
snapshot.as_rope(),
&"
assert_eq!(
1 + 2,
3,
);
"
.unindent(),
);
assert_eq!(
actual_ranges,
expected_ranges,
"actual: {:?}, expected: {:?}",
actual_ranges
.iter()
.map(|range| range.to_point(&snapshot))
.collect::<Vec<_>>(),
expected_ranges
.iter()
.map(|range| range.to_point(&snapshot))
.collect::<Vec<_>>()
);
let actual_ranges = fuzzy_search_lines(
snapshot.as_rope(),
&"
assert_eq!(
1 + 2,
3,
);
"
.unindent(),
);
assert_eq!(
actual_ranges,
expected_ranges,
"actual: {:?}, expected: {:?}",
actual_ranges
.iter()
.map(|range| range.to_point(&snapshot))
.collect::<Vec<_>>(),
expected_ranges
.iter()
.map(|range| range.to_point(&snapshot))
.collect::<Vec<_>>()
);
}
}

View file

@ -0,0 +1,86 @@
When the user asks you to suggest edits for a buffer, use a strict template consisting of:
* A markdown code block with the file path as the language identifier.
* The original code that should be replaced
* A separator line (`---`)
* The new text that should replace the original lines
Each code block may only contain an edit for one single contiguous range of text. Use multiple code blocks for multiple edits.
## Example
If you have a buffer with the following lines:
```path/to/file.rs
fn quicksort(arr: &mut [i32]) {
if arr.len() <= 1 {
return;
}
let pivot_index = partition(arr);
let (left, right) = arr.split_at_mut(pivot_index);
quicksort(left);
quicksort(&mut right[1..]);
}
fn partition(arr: &mut [i32]) -> usize {
let last_index = arr.len() - 1;
let pivot = arr[last_index];
let mut i = 0;
for j in 0..last_index {
if arr[j] <= pivot {
arr.swap(i, j);
i += 1;
}
}
arr.swap(i, last_index);
i
}
```
And you want to replace the for loop inside `partition`, output the following.
```edit path/to/file.rs
for j in 0..last_index {
if arr[j] <= pivot {
arr.swap(i, j);
i += 1;
}
}
---
let mut j = 0;
while j < last_index {
if arr[j] <= pivot {
arr.swap(i, j);
i += 1;
}
j += 1;
}
```
If you wanted to insert comments above the partition function, output the following:
```edit path/to/file.rs
fn partition(arr: &mut [i32]) -> usize {
---
// A helper function used for quicksort.
fn partition(arr: &mut [i32]) -> usize {
```
If you wanted to delete the partition function, output the following:
```edit path/to/file.rs
fn partition(arr: &mut [i32]) -> usize {
let last_index = arr.len() - 1;
let pivot = arr[last_index];
let mut i = 0;
for j in 0..last_index {
if arr[j] <= pivot {
arr.swap(i, j);
i += 1;
}
}
arr.swap(i, last_index);
i
}
---
```

View file

@ -4074,7 +4074,7 @@ impl Editor {
}
}
async fn open_project_transaction(
pub async fn open_project_transaction(
this: &WeakView<Editor>,
workspace: WeakView<Workspace>,
transaction: ProjectTransaction,

View file

@ -617,6 +617,14 @@ impl<'a> Chunks<'a> {
let end = self.range.end - chunk_start;
Some(&chunk.0[start..chunk.0.len().min(end)])
}
pub fn lines(self) -> Lines<'a> {
Lines {
chunks: self,
current_line: String::new(),
done: false,
}
}
}
impl<'a> Iterator for Chunks<'a> {
@ -714,6 +722,49 @@ impl<'a> io::Read for Bytes<'a> {
}
}
pub struct Lines<'a> {
chunks: Chunks<'a>,
current_line: String,
done: bool,
}
impl<'a> Lines<'a> {
pub fn next(&mut self) -> Option<&str> {
if self.done {
return None;
}
self.current_line.clear();
while let Some(chunk) = self.chunks.peek() {
let mut lines = chunk.split('\n').peekable();
while let Some(line) = lines.next() {
self.current_line.push_str(line);
if lines.peek().is_some() {
self.chunks
.seek(self.chunks.offset() + line.len() + "\n".len());
return Some(&self.current_line);
}
}
self.chunks.next();
}
self.done = true;
Some(&self.current_line)
}
pub fn seek(&mut self, offset: usize) {
self.chunks.seek(offset);
self.current_line.clear();
self.done = false;
}
pub fn offset(&self) -> usize {
self.chunks.offset()
}
}
#[derive(Clone, Debug, Default)]
struct Chunk(ArrayString<{ 2 * CHUNK_BASE }>);
@ -1288,6 +1339,24 @@ mod tests {
);
}
#[test]
fn test_lines() {
let rope = Rope::from("abc\ndefg\nhi");
let mut lines = rope.chunks().lines();
assert_eq!(lines.next(), Some("abc"));
assert_eq!(lines.next(), Some("defg"));
assert_eq!(lines.next(), Some("hi"));
assert_eq!(lines.next(), None);
let rope = Rope::from("abc\ndefg\nhi\n");
let mut lines = rope.chunks().lines();
assert_eq!(lines.next(), Some("abc"));
assert_eq!(lines.next(), Some("defg"));
assert_eq!(lines.next(), Some("hi"));
assert_eq!(lines.next(), Some(""));
assert_eq!(lines.next(), None);
}
#[gpui::test(iterations = 100)]
fn test_random_rope(mut rng: StdRng) {
let operations = env::var("OPERATIONS")