agent: Do not reuse assistant message across generations (#29360)

#29354 introduced a bug where we would append tool uses to the last
assistant message even if it was from a previous request.

Release Notes:

- N/A

Co-authored-by: Bennet Bo Fenner <bennetbo@gmx.de>
This commit is contained in:
Agus Zubiaga 2025-04-24 14:56:47 -03:00 committed by GitHub
parent b0609272c0
commit 58604fba86
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 32 additions and 31 deletions

View file

@ -1267,6 +1267,8 @@ impl Thread {
.ok(); .ok();
} }
let mut request_assistant_message_id = None;
while let Some(event) = events.next().await { while let Some(event) = events.next().await {
if let Some((_, response_events)) = request_callback_parameters.as_mut() { if let Some((_, response_events)) = request_callback_parameters.as_mut() {
response_events response_events
@ -1278,11 +1280,11 @@ impl Thread {
thread.update(cx, |thread, cx| { thread.update(cx, |thread, cx| {
match event { match event {
LanguageModelCompletionEvent::StartMessage { .. } => { LanguageModelCompletionEvent::StartMessage { .. } => {
thread.insert_message( request_assistant_message_id = Some(thread.insert_message(
Role::Assistant, Role::Assistant,
vec![MessageSegment::Text(String::new())], vec![MessageSegment::Text(String::new())],
cx, cx,
); ));
} }
LanguageModelCompletionEvent::Stop(reason) => { LanguageModelCompletionEvent::Stop(reason) => {
stop_reason = reason; stop_reason = reason;
@ -1311,11 +1313,11 @@ impl Thread {
// //
// Importantly: We do *not* want to emit a `StreamedAssistantText` event here, as it // Importantly: We do *not* want to emit a `StreamedAssistantText` event here, as it
// will result in duplicating the text of the chunk in the rendered Markdown. // will result in duplicating the text of the chunk in the rendered Markdown.
thread.insert_message( request_assistant_message_id = Some(thread.insert_message(
Role::Assistant, Role::Assistant,
vec![MessageSegment::Text(chunk.to_string())], vec![MessageSegment::Text(chunk.to_string())],
cx, cx,
); ));
}; };
} }
} }
@ -1338,25 +1340,25 @@ impl Thread {
// //
// Importantly: We do *not* want to emit a `StreamedAssistantText` event here, as it // Importantly: We do *not* want to emit a `StreamedAssistantText` event here, as it
// will result in duplicating the text of the chunk in the rendered Markdown. // will result in duplicating the text of the chunk in the rendered Markdown.
thread.insert_message( request_assistant_message_id = Some(thread.insert_message(
Role::Assistant, Role::Assistant,
vec![MessageSegment::Thinking { vec![MessageSegment::Thinking {
text: chunk.to_string(), text: chunk.to_string(),
signature, signature,
}], }],
cx, cx,
); ));
}; };
} }
} }
LanguageModelCompletionEvent::ToolUse(tool_use) => { LanguageModelCompletionEvent::ToolUse(tool_use) => {
let last_assistant_message_id = thread let last_assistant_message_id = request_assistant_message_id
.messages
.iter_mut()
.rfind(|message| message.role == Role::Assistant)
.map(|message| message.id)
.unwrap_or_else(|| { .unwrap_or_else(|| {
thread.insert_message(Role::Assistant, vec![], cx) let new_assistant_message_id =
thread.insert_message(Role::Assistant, vec![], cx);
request_assistant_message_id =
Some(new_assistant_message_id);
new_assistant_message_id
}); });
let tool_use_id = tool_use.id.clone(); let tool_use_id = tool_use.id.clone();
@ -1775,22 +1777,19 @@ impl Thread {
window: Option<AnyWindowHandle>, window: Option<AnyWindowHandle>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> bool { ) -> bool {
let canceled = if self.pending_completions.pop().is_some() { let mut canceled = self.pending_completions.pop().is_some();
true
} else { for pending_tool_use in self.tool_use.cancel_pending() {
let mut canceled = false; canceled = true;
for pending_tool_use in self.tool_use.cancel_pending() { self.tool_finished(
canceled = true; pending_tool_use.id.clone(),
self.tool_finished( Some(pending_tool_use),
pending_tool_use.id.clone(), true,
Some(pending_tool_use), window,
true, cx,
window, );
cx, }
);
}
canceled
};
self.finalize_pending_checkpoint(cx); self.finalize_pending_checkpoint(cx);
canceled canceled
} }

View file

@ -453,9 +453,11 @@ impl CopilotChatLanguageModel {
} }
} }
messages.push(ChatMessage::User { if !text_content.is_empty() {
content: text_content, messages.push(ChatMessage::User {
}); content: text_content,
});
}
} }
Role::Assistant => { Role::Assistant => {
let mut tool_calls = Vec::new(); let mut tool_calls = Vec::new();