Add language_models crate to house language model providers (#20945)

This PR adds a new `language_models` crate to house the various language
model providers.

By extracting the provider definitions out of `language_model`, we're
able to remove `language_model`'s dependency on `editor`, which improves
incremental compilation when changing `editor`.

Release Notes:

- N/A
This commit is contained in:
Marshall Bowers 2024-11-20 18:49:34 -05:00 committed by GitHub
parent 335b112abd
commit cbba44900d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
27 changed files with 265 additions and 199 deletions

View file

@ -50,11 +50,11 @@ use indexed_docs::IndexedDocsStore;
use language::{
language_settings::SoftWrap, BufferSnapshot, LanguageRegistry, LspAdapterDelegate, ToOffset,
};
use language_model::{
provider::cloud::PROVIDER_ID, LanguageModelProvider, LanguageModelProviderId,
LanguageModelRegistry, Role,
};
use language_model::{LanguageModelImage, LanguageModelToolUse};
use language_model::{
LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry, Role,
ZED_CLOUD_PROVIDER_ID,
};
use multi_buffer::MultiBufferRow;
use picker::{Picker, PickerDelegate};
use project::lsp_store::LocalLspAdapterDelegate;
@ -664,7 +664,7 @@ impl AssistantPanel {
// If we're signed out and don't have a provider configured, or we're signed-out AND Zed.dev is
// the provider, we want to show a nudge to sign in.
let show_zed_ai_notice = client_status.is_signed_out()
&& active_provider.map_or(true, |provider| provider.id().0 == PROVIDER_ID);
&& active_provider.map_or(true, |provider| provider.id().0 == ZED_CLOUD_PROVIDER_ID);
self.show_zed_ai_notice = show_zed_ai_notice;
cx.notify();

View file

@ -5,13 +5,12 @@ use anthropic::Model as AnthropicModel;
use feature_flags::FeatureFlagAppExt;
use fs::Fs;
use gpui::{AppContext, Pixels};
use language_model::provider::open_ai;
use language_model::settings::{
AnthropicSettingsContent, AnthropicSettingsContentV1, OllamaSettingsContent,
OpenAiSettingsContent, OpenAiSettingsContentV1, VersionedAnthropicSettingsContent,
VersionedOpenAiSettingsContent,
use language_model::{CloudModel, LanguageModel};
use language_models::{
provider::open_ai, AllLanguageModelSettings, AnthropicSettingsContent,
AnthropicSettingsContentV1, OllamaSettingsContent, OpenAiSettingsContent,
OpenAiSettingsContentV1, VersionedAnthropicSettingsContent, VersionedOpenAiSettingsContent,
};
use language_model::{settings::AllLanguageModelSettings, CloudModel, LanguageModel};
use ollama::Model as OllamaModel;
use schemars::{schema::Schema, JsonSchema};
use serde::{Deserialize, Serialize};

View file

@ -25,13 +25,15 @@ use gpui::{
use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
use language_model::{
logging::report_assistant_event,
provider::cloud::{MaxMonthlySpendReachedError, PaymentRequiredError},
LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent,
LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolUse, MessageContent, Role,
StopReason,
};
use language_models::{
provider::cloud::{MaxMonthlySpendReachedError, PaymentRequiredError},
report_assistant_event,
};
use open_ai::Model as OpenAiModel;
use paths::contexts_dir;
use project::Project;

View file

@ -30,9 +30,10 @@ use gpui::{
};
use language::{Buffer, IndentKind, Point, Selection, TransactionId};
use language_model::{
logging::report_assistant_event, LanguageModel, LanguageModelRegistry, LanguageModelRequest,
LanguageModelRequestMessage, LanguageModelTextStream, Role,
LanguageModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
LanguageModelTextStream, Role,
};
use language_models::report_assistant_event;
use multi_buffer::MultiBufferRow;
use parking_lot::Mutex;
use project::{CodeAction, ProjectTransaction};

View file

@ -17,9 +17,9 @@ use gpui::{
};
use language::Buffer;
use language_model::{
logging::report_assistant_event, LanguageModelRegistry, LanguageModelRequest,
LanguageModelRequestMessage, Role,
LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role,
};
use language_models::report_assistant_event;
use settings::Settings;
use std::{
cmp,