acp: Never build a request with a tool use without its corresponding result (#36847)

Release Notes:

- N/A
This commit is contained in:
Antonio Scandurra 2025-08-24 18:30:34 +02:00 committed by GitHub
parent d8bffd7ef2
commit a79aef7bdd
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 113 additions and 37 deletions

View file

@ -448,24 +448,33 @@ impl AgentMessage {
cache: false,
};
for chunk in &self.content {
let chunk = match chunk {
match chunk {
AgentMessageContent::Text(text) => {
language_model::MessageContent::Text(text.clone())
assistant_message
.content
.push(language_model::MessageContent::Text(text.clone()));
}
AgentMessageContent::Thinking { text, signature } => {
language_model::MessageContent::Thinking {
text: text.clone(),
signature: signature.clone(),
}
assistant_message
.content
.push(language_model::MessageContent::Thinking {
text: text.clone(),
signature: signature.clone(),
});
}
AgentMessageContent::RedactedThinking(value) => {
language_model::MessageContent::RedactedThinking(value.clone())
assistant_message.content.push(
language_model::MessageContent::RedactedThinking(value.clone()),
);
}
AgentMessageContent::ToolUse(value) => {
language_model::MessageContent::ToolUse(value.clone())
AgentMessageContent::ToolUse(tool_use) => {
if self.tool_results.contains_key(&tool_use.id) {
assistant_message
.content
.push(language_model::MessageContent::ToolUse(tool_use.clone()));
}
}
};
assistant_message.content.push(chunk);
}
let mut user_message = LanguageModelRequestMessage {
@ -1315,23 +1324,6 @@ impl Thread {
}
}
pub fn build_system_message(&self, cx: &App) -> LanguageModelRequestMessage {
log::debug!("Building system message");
let prompt = SystemPromptTemplate {
project: self.project_context.read(cx),
available_tools: self.tools.keys().cloned().collect(),
}
.render(&self.templates)
.context("failed to build system prompt")
.expect("Invalid template");
log::debug!("System message built");
LanguageModelRequestMessage {
role: Role::System,
content: vec![prompt.into()],
cache: true,
}
}
/// A helper method that's called on every streamed completion event.
/// Returns an optional tool result task, which the main agentic loop will
/// send back to the model when it resolves.
@ -1773,7 +1765,7 @@ impl Thread {
pub(crate) fn build_completion_request(
&self,
completion_intent: CompletionIntent,
cx: &mut App,
cx: &App,
) -> Result<LanguageModelRequest> {
let model = self.model().context("No language model configured")?;
let tools = if let Some(turn) = self.running_turn.as_ref() {
@ -1894,21 +1886,29 @@ impl Thread {
"Building request messages from {} thread messages",
self.messages.len()
);
let mut messages = vec![self.build_system_message(cx)];
let system_prompt = SystemPromptTemplate {
project: self.project_context.read(cx),
available_tools: self.tools.keys().cloned().collect(),
}
.render(&self.templates)
.context("failed to build system prompt")
.expect("Invalid template");
let mut messages = vec![LanguageModelRequestMessage {
role: Role::System,
content: vec![system_prompt.into()],
cache: false,
}];
for message in &self.messages {
messages.extend(message.to_request());
}
if let Some(message) = self.pending_message.as_ref() {
messages.extend(message.to_request());
if let Some(last_message) = messages.last_mut() {
last_message.cache = true;
}
if let Some(last_user_message) = messages
.iter_mut()
.rev()
.find(|message| message.role == Role::User)
{
last_user_message.cache = true;
if let Some(message) = self.pending_message.as_ref() {
messages.extend(message.to_request());
}
messages