assistant: Make it easier to define custom models (#15442)

This PR makes it easier to specify custom models for the Google, OpenAI,
and Anthropic provider:

Before (google):

```json
{
  "language_models": {
    "google": {
      "available_models": [
        {
          "custom": {
            "name": "my-custom-google-model",
            "max_tokens": 12345
          }
        }
      ]
    }
  }
}
```

After (google):

```json
{
  "language_models": {
    "google": {
      "available_models": [
        {
          "name": "my-custom-google-model",
          "max_tokens": 12345
        }
      ]
    }
  }
}
```

Before (anthropic):

```json
{
  "language_models": {
    "anthropic": {
      "available_models": [
        {
          "custom": {
            "name": "my-custom-anthropic-model",
            "max_tokens": 12345
          }
        }
      ]
    }
  }
}
```

After (anthropic):

```json
{
  "language_models": {
    "anthropic": {
      "version": "1",
      "available_models": [
        {
          "name": "my-custom-anthropic-model",
          "max_tokens": 12345
        }
      ]
    }
  }
}

```

The settings will be auto-upgraded so the old versions will continue to
work (except for Google since that one has not been released).

/cc @as-cii 

Release Notes:

- N/A

---------

Co-authored-by: Thorsten <thorsten@zed.dev>
This commit is contained in:
Bennet Bo Fenner 2024-07-30 15:46:39 +02:00 committed by GitHub
parent 13dcb42c1c
commit 2ada2964c5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 256 additions and 47 deletions

View file

@ -12,6 +12,8 @@ use gpui::{
WhiteSpace,
};
use http_client::HttpClient;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore};
use std::{sync::Arc, time::Duration};
use strum::IntoEnumIterator;
@ -26,7 +28,14 @@ const PROVIDER_NAME: &str = "Anthropic";
pub struct AnthropicSettings {
pub api_url: String,
pub low_speed_timeout: Option<Duration>,
pub available_models: Vec<anthropic::Model>,
pub available_models: Vec<AvailableModel>,
pub needs_setting_migration: bool,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
pub struct AvailableModel {
pub name: String,
pub max_tokens: usize,
}
pub struct AnthropicLanguageModelProvider {
@ -84,7 +93,13 @@ impl LanguageModelProvider for AnthropicLanguageModelProvider {
.available_models
.iter()
{
models.insert(model.id().to_string(), model.clone());
models.insert(
model.name.clone(),
anthropic::Model::Custom {
name: model.name.clone(),
max_tokens: model.max_tokens,
},
);
}
models

View file

@ -8,6 +8,8 @@ use gpui::{
WhiteSpace,
};
use http_client::HttpClient;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore};
use std::{future, sync::Arc, time::Duration};
use strum::IntoEnumIterator;
@ -28,7 +30,13 @@ const PROVIDER_NAME: &str = "Google AI";
pub struct GoogleSettings {
pub api_url: String,
pub low_speed_timeout: Option<Duration>,
pub available_models: Vec<google_ai::Model>,
pub available_models: Vec<AvailableModel>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
pub struct AvailableModel {
name: String,
max_tokens: usize,
}
pub struct GoogleLanguageModelProvider {
@ -86,7 +94,13 @@ impl LanguageModelProvider for GoogleLanguageModelProvider {
.google
.available_models
{
models.insert(model.id().to_string(), model.clone());
models.insert(
model.name.clone(),
google_ai::Model::Custom {
name: model.name.clone(),
max_tokens: model.max_tokens,
},
);
}
models

View file

@ -8,6 +8,8 @@ use gpui::{
};
use http_client::HttpClient;
use open_ai::stream_completion;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore};
use std::{future, sync::Arc, time::Duration};
use strum::IntoEnumIterator;
@ -28,7 +30,14 @@ const PROVIDER_NAME: &str = "OpenAI";
pub struct OpenAiSettings {
pub api_url: String,
pub low_speed_timeout: Option<Duration>,
pub available_models: Vec<open_ai::Model>,
pub available_models: Vec<AvailableModel>,
pub needs_setting_migration: bool,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
pub struct AvailableModel {
pub name: String,
pub max_tokens: usize,
}
pub struct OpenAiLanguageModelProvider {
@ -86,7 +95,13 @@ impl LanguageModelProvider for OpenAiLanguageModelProvider {
.openai
.available_models
{
models.insert(model.id().to_string(), model.clone());
models.insert(
model.name.clone(),
open_ai::Model::Custom {
name: model.name.clone(),
max_tokens: model.max_tokens,
},
);
}
models