copilot: Add support for Gemini 2.0 Flash model to Copilot Chat (#24952)

Co-authored-by: Peter Tripp <peter@zed.dev>
This commit is contained in:
Richard Hao 2025-02-18 03:25:38 +08:00 committed by GitHub
parent 3e9722685b
commit f833a01a7e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 43 additions and 3 deletions

View file

@ -25,6 +25,7 @@ use strum::IntoEnumIterator;
use ui::prelude::*;
use super::anthropic::count_anthropic_tokens;
use super::google::count_google_tokens;
use super::open_ai::count_open_ai_tokens;
const PROVIDER_ID: &str = "copilot_chat";
@ -174,13 +175,16 @@ impl LanguageModel for CopilotChatLanguageModel {
) -> BoxFuture<'static, Result<usize>> {
match self.model {
CopilotChatModel::Claude3_5Sonnet => count_anthropic_tokens(request, cx),
CopilotChatModel::Gemini20Flash => count_google_tokens(request, cx),
_ => {
let model = match self.model {
CopilotChatModel::Gpt4o => open_ai::Model::FourOmni,
CopilotChatModel::Gpt4 => open_ai::Model::Four,
CopilotChatModel::Gpt3_5Turbo => open_ai::Model::ThreePointFiveTurbo,
CopilotChatModel::O1 | CopilotChatModel::O3Mini => open_ai::Model::Four,
CopilotChatModel::Claude3_5Sonnet => unreachable!(),
CopilotChatModel::Claude3_5Sonnet | CopilotChatModel::Gemini20Flash => {
unreachable!()
}
};
count_open_ai_tokens(request, model, cx)
}