Add DeepSeek support (#23551)
- Added support for DeepSeek as a new language model provider in Zed Assistant - Implemented streaming API support for real-time responses from DeepSeek models. - Added a configuration UI for DeepSeek API key management and settings. - Updated documentation with detailed setup instructions for DeepSeek integration. - Added DeepSeek-specific icons and model definitions for seamless integration into the Zed UI. - Integrated DeepSeek into the language model registry, making it available alongside other providers like OpenAI and Anthropic. Release Notes: - Added support for DeepSeek to the Assistant. --------- Co-authored-by: Marshall Bowers <git@maxdeviant.com>
This commit is contained in:
parent
f096a28a19
commit
29bfb56739
19 changed files with 1090 additions and 1 deletions
|
@ -29,6 +29,7 @@ log.workspace = true
|
|||
ollama = { workspace = true, features = ["schemars"] }
|
||||
open_ai = { workspace = true, features = ["schemars"] }
|
||||
lmstudio = { workspace = true, features = ["schemars"] }
|
||||
deepseek = { workspace = true, features = ["schemars"] }
|
||||
parking_lot.workspace = true
|
||||
proto.workspace = true
|
||||
schemars.workspace = true
|
||||
|
|
|
@ -410,6 +410,84 @@ impl LanguageModelRequest {
|
|||
top_p: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_deepseek(self, model: String, max_output_tokens: Option<u32>) -> deepseek::Request {
|
||||
let is_reasoner = model == "deepseek-reasoner";
|
||||
|
||||
let len = self.messages.len();
|
||||
let merged_messages =
|
||||
self.messages
|
||||
.into_iter()
|
||||
.fold(Vec::with_capacity(len), |mut acc, msg| {
|
||||
let role = msg.role;
|
||||
let content = msg.string_contents();
|
||||
|
||||
if is_reasoner {
|
||||
if let Some(last_msg) = acc.last_mut() {
|
||||
match (last_msg, role) {
|
||||
(deepseek::RequestMessage::User { content: last }, Role::User) => {
|
||||
last.push(' ');
|
||||
last.push_str(&content);
|
||||
return acc;
|
||||
}
|
||||
|
||||
(
|
||||
deepseek::RequestMessage::Assistant {
|
||||
content: last_content,
|
||||
..
|
||||
},
|
||||
Role::Assistant,
|
||||
) => {
|
||||
*last_content = last_content
|
||||
.take()
|
||||
.map(|c| {
|
||||
let mut s =
|
||||
String::with_capacity(c.len() + content.len() + 1);
|
||||
s.push_str(&c);
|
||||
s.push(' ');
|
||||
s.push_str(&content);
|
||||
s
|
||||
})
|
||||
.or(Some(content));
|
||||
|
||||
return acc;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
acc.push(match role {
|
||||
Role::User => deepseek::RequestMessage::User { content },
|
||||
Role::Assistant => deepseek::RequestMessage::Assistant {
|
||||
content: Some(content),
|
||||
tool_calls: Vec::new(),
|
||||
},
|
||||
Role::System => deepseek::RequestMessage::System { content },
|
||||
});
|
||||
acc
|
||||
});
|
||||
|
||||
deepseek::Request {
|
||||
model,
|
||||
messages: merged_messages,
|
||||
stream: true,
|
||||
max_tokens: max_output_tokens,
|
||||
temperature: if is_reasoner { None } else { self.temperature },
|
||||
response_format: None,
|
||||
tools: self
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| deepseek::ToolDefinition::Function {
|
||||
function: deepseek::FunctionDefinition {
|
||||
name: tool.name,
|
||||
description: Some(tool.description),
|
||||
parameters: Some(tool.input_schema),
|
||||
},
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
|
|
|
@ -66,6 +66,16 @@ impl From<Role> for open_ai::Role {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<Role> for deepseek::Role {
|
||||
fn from(val: Role) -> Self {
|
||||
match val {
|
||||
Role::User => deepseek::Role::User,
|
||||
Role::Assistant => deepseek::Role::Assistant,
|
||||
Role::System => deepseek::Role::System,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Role> for lmstudio::Role {
|
||||
fn from(val: Role) -> Self {
|
||||
match val {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue