Allow using a custom model when using zed.dev (#14933)

Release Notes:

- N/A
This commit is contained in:
Antonio Scandurra 2024-07-22 12:25:53 +02:00 committed by GitHub
parent a334c69e05
commit 0155435142
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 114 additions and 110 deletions

View file

@ -54,15 +54,15 @@ impl CloudCompletionProvider {
impl LanguageModelCompletionProvider for CloudCompletionProvider {
fn available_models(&self) -> Vec<LanguageModel> {
let mut custom_model = if let CloudModel::Custom(custom_model) = self.model.clone() {
Some(custom_model)
let mut custom_model = if matches!(self.model, CloudModel::Custom { .. }) {
Some(self.model.clone())
} else {
None
};
CloudModel::iter()
.filter_map(move |model| {
if let CloudModel::Custom(_) = model {
Some(CloudModel::Custom(custom_model.take()?))
if let CloudModel::Custom { .. } = model {
custom_model.take()
} else {
Some(model)
}
@ -117,9 +117,9 @@ impl LanguageModelCompletionProvider for CloudCompletionProvider {
// Can't find a tokenizer for Claude 3, so for now just use the same as OpenAI's as an approximation.
count_open_ai_tokens(request, cx.background_executor())
}
LanguageModel::Cloud(CloudModel::Custom(model)) => {
LanguageModel::Cloud(CloudModel::Custom { name, .. }) => {
let request = self.client.request(proto::CountTokensWithLanguageModel {
model,
model: name,
messages: request
.messages
.iter()

View file

@ -241,6 +241,7 @@ pub fn count_open_ai_tokens(
| LanguageModel::Cloud(CloudModel::Claude3Opus)
| LanguageModel::Cloud(CloudModel::Claude3Sonnet)
| LanguageModel::Cloud(CloudModel::Claude3Haiku)
| LanguageModel::Cloud(CloudModel::Custom { .. })
| LanguageModel::OpenAi(OpenAiModel::Custom { .. }) => {
// Tiktoken doesn't yet support these models, so we manually use the
// same tokenizer as GPT-4.