Remove Qwen2 model (#18444)

Removed deprecated Qwen2 7B Instruct model from zed.dev provider (staff
only).

Release Notes:

- N/A
This commit is contained in:
Richard Feldman 2024-09-27 13:30:25 -04:00 committed by GitHub
parent ffd1083cc1
commit caaa9a00a9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 2 additions and 176 deletions

View file

@ -12,7 +12,6 @@ pub enum CloudModel {
Anthropic(anthropic::Model),
OpenAi(open_ai::Model),
Google(google_ai::Model),
Zed(ZedModel),
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, EnumIter)]
@ -21,26 +20,6 @@ pub enum ZedModel {
Qwen2_7bInstruct,
}
impl ZedModel {
pub fn id(&self) -> &str {
match self {
ZedModel::Qwen2_7bInstruct => "Qwen/Qwen2-7B-Instruct",
}
}
pub fn display_name(&self) -> &str {
match self {
ZedModel::Qwen2_7bInstruct => "Qwen2 7B Instruct",
}
}
pub fn max_token_count(&self) -> usize {
match self {
ZedModel::Qwen2_7bInstruct => 28000,
}
}
}
impl Default for CloudModel {
fn default() -> Self {
Self::Anthropic(anthropic::Model::default())
@ -53,7 +32,6 @@ impl CloudModel {
Self::Anthropic(model) => model.id(),
Self::OpenAi(model) => model.id(),
Self::Google(model) => model.id(),
Self::Zed(model) => model.id(),
}
}
@ -62,7 +40,6 @@ impl CloudModel {
Self::Anthropic(model) => model.display_name(),
Self::OpenAi(model) => model.display_name(),
Self::Google(model) => model.display_name(),
Self::Zed(model) => model.display_name(),
}
}
@ -78,7 +55,6 @@ impl CloudModel {
Self::Anthropic(model) => model.max_token_count(),
Self::OpenAi(model) => model.max_token_count(),
Self::Google(model) => model.max_token_count(),
Self::Zed(model) => model.max_token_count(),
}
}
@ -115,9 +91,6 @@ impl CloudModel {
LanguageModelAvailability::RequiresPlan(Plan::ZedPro)
}
},
Self::Zed(model) => match model {
ZedModel::Qwen2_7bInstruct => LanguageModelAvailability::RequiresPlan(Plan::ZedPro),
},
}
}
}

View file

@ -3,7 +3,7 @@ use crate::provider::anthropic::map_to_language_model_completion_events;
use crate::{
settings::AllLanguageModelSettings, CloudModel, LanguageModel, LanguageModelCacheConfiguration,
LanguageModelId, LanguageModelName, LanguageModelProviderId, LanguageModelProviderName,
LanguageModelProviderState, LanguageModelRequest, RateLimiter, ZedModel,
LanguageModelProviderState, LanguageModelRequest, RateLimiter,
};
use anthropic::AnthropicError;
use anyhow::{anyhow, Result};
@ -219,9 +219,6 @@ impl LanguageModelProvider for CloudLanguageModelProvider {
models.insert(model.id().to_string(), CloudModel::Google(model));
}
}
for model in ZedModel::iter() {
models.insert(model.id().to_string(), CloudModel::Zed(model));
}
} else {
models.insert(
anthropic::Model::Claude3_5Sonnet.id().to_string(),
@ -472,7 +469,7 @@ impl LanguageModel for CloudLanguageModel {
min_total_token: cache.min_total_token,
})
}
CloudModel::OpenAi(_) | CloudModel::Google(_) | CloudModel::Zed(_) => None,
CloudModel::OpenAi(_) | CloudModel::Google(_) => None,
}
}
@ -502,9 +499,6 @@ impl LanguageModel for CloudLanguageModel {
}
.boxed()
}
CloudModel::Zed(_) => {
count_open_ai_tokens(request, open_ai::Model::ThreePointFiveTurbo, cx)
}
}
}
@ -603,35 +597,6 @@ impl LanguageModel for CloudLanguageModel {
}
.boxed()
}
CloudModel::Zed(model) => {
let client = self.client.clone();
let mut request = request.into_open_ai(model.id().into(), None);
request.max_tokens = Some(4000);
let llm_api_token = self.llm_api_token.clone();
let future = self.request_limiter.stream(async move {
let response = Self::perform_llm_completion(
client.clone(),
llm_api_token,
PerformCompletionParams {
provider: client::LanguageModelProvider::Zed,
model: request.model.clone(),
provider_request: RawValue::from_string(serde_json::to_string(
&request,
)?)?,
},
None,
)
.await?;
Ok(open_ai::extract_text_from_events(response_lines(response)))
});
async move {
Ok(future
.await?
.map(|result| result.map(LanguageModelCompletionEvent::Text))
.boxed())
}
.boxed()
}
}
}
@ -735,51 +700,6 @@ impl LanguageModel for CloudLanguageModel {
CloudModel::Google(_) => {
future::ready(Err(anyhow!("tool use not implemented for Google AI"))).boxed()
}
CloudModel::Zed(model) => {
// All Zed models are OpenAI-based at the time of writing.
let mut request = request.into_open_ai(model.id().into(), None);
request.tool_choice = Some(open_ai::ToolChoice::Other(
open_ai::ToolDefinition::Function {
function: open_ai::FunctionDefinition {
name: tool_name.clone(),
description: None,
parameters: None,
},
},
));
request.tools = vec![open_ai::ToolDefinition::Function {
function: open_ai::FunctionDefinition {
name: tool_name.clone(),
description: Some(tool_description),
parameters: Some(input_schema),
},
}];
self.request_limiter
.run(async move {
let response = Self::perform_llm_completion(
client.clone(),
llm_api_token,
PerformCompletionParams {
provider: client::LanguageModelProvider::Zed,
model: request.model.clone(),
provider_request: RawValue::from_string(serde_json::to_string(
&request,
)?)?,
},
None,
)
.await?;
Ok(open_ai::extract_tool_args_from_events(
tool_name,
Box::pin(response_lines(response)),
)
.await?
.boxed())
})
.boxed()
}
}
}
}