New revision of the Assistant Panel (#10870)
This is a crate only addition of a new version of the AssistantPanel. We'll be putting this behind a feature flag while we iron out the new experience. Release Notes: - N/A --------- Co-authored-by: Nathan Sobo <nathan@zed.dev> Co-authored-by: Antonio Scandurra <me@as-cii.com> Co-authored-by: Conrad Irwin <conrad@zed.dev> Co-authored-by: Marshall Bowers <elliott.codes@gmail.com> Co-authored-by: Antonio Scandurra <antonio@zed.dev> Co-authored-by: Nate Butler <nate@zed.dev> Co-authored-by: Nate Butler <iamnbutler@gmail.com> Co-authored-by: Max Brunsfeld <maxbrunsfeld@gmail.com> Co-authored-by: Max <max@zed.dev>
This commit is contained in:
parent
e0c83a1d32
commit
68a1ad89bb
55 changed files with 2989 additions and 262 deletions
|
@ -1,5 +1,6 @@
|
|||
use anyhow::{anyhow, Result};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use rpc::proto;
|
||||
use util::ResultExt as _;
|
||||
|
||||
pub fn language_model_request_to_open_ai(
|
||||
request: proto::CompleteWithLanguageModel,
|
||||
|
@ -9,24 +10,83 @@ pub fn language_model_request_to_open_ai(
|
|||
messages: request
|
||||
.messages
|
||||
.into_iter()
|
||||
.map(|message| {
|
||||
.map(|message: proto::LanguageModelRequestMessage| {
|
||||
let role = proto::LanguageModelRole::from_i32(message.role)
|
||||
.ok_or_else(|| anyhow!("invalid role {}", message.role))?;
|
||||
Ok(open_ai::RequestMessage {
|
||||
role: match role {
|
||||
proto::LanguageModelRole::LanguageModelUser => open_ai::Role::User,
|
||||
proto::LanguageModelRole::LanguageModelAssistant => {
|
||||
open_ai::Role::Assistant
|
||||
}
|
||||
proto::LanguageModelRole::LanguageModelSystem => open_ai::Role::System,
|
||||
|
||||
let openai_message = match role {
|
||||
proto::LanguageModelRole::LanguageModelUser => open_ai::RequestMessage::User {
|
||||
content: message.content,
|
||||
},
|
||||
content: message.content,
|
||||
})
|
||||
proto::LanguageModelRole::LanguageModelAssistant => {
|
||||
open_ai::RequestMessage::Assistant {
|
||||
content: Some(message.content),
|
||||
tool_calls: message
|
||||
.tool_calls
|
||||
.into_iter()
|
||||
.filter_map(|call| {
|
||||
Some(open_ai::ToolCall {
|
||||
id: call.id,
|
||||
content: match call.variant? {
|
||||
proto::tool_call::Variant::Function(f) => {
|
||||
open_ai::ToolCallContent::Function {
|
||||
function: open_ai::FunctionContent {
|
||||
name: f.name,
|
||||
arguments: f.arguments,
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
proto::LanguageModelRole::LanguageModelSystem => {
|
||||
open_ai::RequestMessage::System {
|
||||
content: message.content,
|
||||
}
|
||||
}
|
||||
proto::LanguageModelRole::LanguageModelTool => open_ai::RequestMessage::Tool {
|
||||
tool_call_id: message
|
||||
.tool_call_id
|
||||
.ok_or_else(|| anyhow!("tool message is missing tool call id"))?,
|
||||
content: message.content,
|
||||
},
|
||||
};
|
||||
|
||||
Ok(openai_message)
|
||||
})
|
||||
.collect::<Result<Vec<open_ai::RequestMessage>>>()?,
|
||||
stream: true,
|
||||
stop: request.stop,
|
||||
temperature: request.temperature,
|
||||
tools: request
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|tool| {
|
||||
Some(match tool.variant? {
|
||||
proto::chat_completion_tool::Variant::Function(f) => {
|
||||
open_ai::ToolDefinition::Function {
|
||||
function: open_ai::FunctionDefinition {
|
||||
name: f.name,
|
||||
description: f.description,
|
||||
parameters: if let Some(params) = &f.parameters {
|
||||
Some(
|
||||
serde_json::from_str(params)
|
||||
.context("failed to deserialize tool parameters")
|
||||
.log_err()?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
tool_choice: request.tool_choice,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -58,6 +118,9 @@ pub fn language_model_request_message_to_google_ai(
|
|||
proto::LanguageModelRole::LanguageModelUser => google_ai::Role::User,
|
||||
proto::LanguageModelRole::LanguageModelAssistant => google_ai::Role::Model,
|
||||
proto::LanguageModelRole::LanguageModelSystem => google_ai::Role::User,
|
||||
proto::LanguageModelRole::LanguageModelTool => {
|
||||
Err(anyhow!("we don't handle tool calls with google ai yet"))?
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue