Merge remote-tracking branch 'origin/main' into push-trzsxukkukpr
This commit is contained in:
commit
e58b089efe
446 changed files with 22587 additions and 8403 deletions
|
@ -16,18 +16,17 @@ ai_onboarding.workspace = true
|
|||
anthropic = { workspace = true, features = ["schemars"] }
|
||||
anyhow.workspace = true
|
||||
aws-config = { workspace = true, features = ["behavior-version-latest"] }
|
||||
aws-credential-types = { workspace = true, features = [
|
||||
"hardcoded-credentials",
|
||||
] }
|
||||
aws-credential-types = { workspace = true, features = ["hardcoded-credentials"] }
|
||||
aws_http_client.workspace = true
|
||||
bedrock.workspace = true
|
||||
chrono.workspace = true
|
||||
client.workspace = true
|
||||
cloud_llm_client.workspace = true
|
||||
collections.workspace = true
|
||||
component.workspace = true
|
||||
credentials_provider.workspace = true
|
||||
convert_case.workspace = true
|
||||
copilot.workspace = true
|
||||
credentials_provider.workspace = true
|
||||
deepseek = { workspace = true, features = ["schemars"] }
|
||||
editor.workspace = true
|
||||
fs.workspace = true
|
||||
|
@ -36,6 +35,7 @@ google_ai = { workspace = true, features = ["schemars"] }
|
|||
gpui.workspace = true
|
||||
gpui_tokio.workspace = true
|
||||
http_client.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
lmstudio = { workspace = true, features = ["schemars"] }
|
||||
log.workspace = true
|
||||
|
@ -44,10 +44,7 @@ mistral = { workspace = true, features = ["schemars"] }
|
|||
ollama = { workspace = true, features = ["schemars"] }
|
||||
open_ai = { workspace = true, features = ["schemars"] }
|
||||
open_router = { workspace = true, features = ["schemars"] }
|
||||
vercel = { workspace = true, features = ["schemars"] }
|
||||
x_ai = { workspace = true, features = ["schemars"] }
|
||||
partial-json-fixer.workspace = true
|
||||
proto.workspace = true
|
||||
release_channel.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
|
@ -62,9 +59,9 @@ tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }
|
|||
ui.workspace = true
|
||||
ui_input.workspace = true
|
||||
util.workspace = true
|
||||
vercel = { workspace = true, features = ["schemars"] }
|
||||
workspace-hack.workspace = true
|
||||
zed_llm_client.workspace = true
|
||||
language.workspace = true
|
||||
x_ai = { workspace = true, features = ["schemars"] }
|
||||
|
||||
[dev-dependencies]
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
|
|
|
@ -1012,7 +1012,7 @@ impl Render for ConfigurationView {
|
|||
v_flex()
|
||||
.size_full()
|
||||
.on_action(cx.listener(Self::save_api_key))
|
||||
.child(Label::new("To use Zed's assistant with Anthropic, you need to add an API key. Follow these steps:"))
|
||||
.child(Label::new("To use Zed's agent with Anthropic, you need to add an API key. Follow these steps:"))
|
||||
.child(
|
||||
List::new()
|
||||
.child(
|
||||
|
|
|
@ -1251,7 +1251,7 @@ impl Render for ConfigurationView {
|
|||
v_flex()
|
||||
.size_full()
|
||||
.on_action(cx.listener(ConfigurationView::save_credentials))
|
||||
.child(Label::new("To use Zed's assistant with Bedrock, you can set a custom authentication strategy through the settings.json, or use static credentials."))
|
||||
.child(Label::new("To use Zed's agent with Bedrock, you can set a custom authentication strategy through the settings.json, or use static credentials."))
|
||||
.child(Label::new("But, to access models on AWS, you need to:").mt_1())
|
||||
.child(
|
||||
List::new()
|
||||
|
|
|
@ -3,6 +3,13 @@ use anthropic::AnthropicModelMode;
|
|||
use anyhow::{Context as _, Result, anyhow};
|
||||
use chrono::{DateTime, Utc};
|
||||
use client::{Client, ModelRequestUsage, UserStore, zed_urls};
|
||||
use cloud_llm_client::{
|
||||
CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, CURRENT_PLAN_HEADER_NAME, CompletionBody,
|
||||
CompletionEvent, CompletionRequestStatus, CountTokensBody, CountTokensResponse,
|
||||
EXPIRED_LLM_TOKEN_HEADER_NAME, ListModelsResponse, MODEL_REQUESTS_RESOURCE_HEADER_VALUE, Plan,
|
||||
SERVER_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, SUBSCRIPTION_LIMIT_RESOURCE_HEADER_NAME,
|
||||
TOOL_USE_LIMIT_REACHED_HEADER_NAME, ZED_VERSION_HEADER_NAME,
|
||||
};
|
||||
use futures::{
|
||||
AsyncBufReadExt, FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream,
|
||||
};
|
||||
|
@ -20,7 +27,6 @@ use language_model::{
|
|||
LanguageModelToolChoice, LanguageModelToolSchemaFormat, LlmApiToken,
|
||||
ModelRequestLimitReachedError, PaymentRequiredError, RateLimiter, RefreshLlmTokenListener,
|
||||
};
|
||||
use proto::Plan;
|
||||
use release_channel::AppVersion;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize, de::DeserializeOwned};
|
||||
|
@ -33,13 +39,6 @@ use std::time::Duration;
|
|||
use thiserror::Error;
|
||||
use ui::{TintColor, prelude::*};
|
||||
use util::{ResultExt as _, maybe};
|
||||
use zed_llm_client::{
|
||||
CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, CURRENT_PLAN_HEADER_NAME, CompletionBody,
|
||||
CompletionRequestStatus, CountTokensBody, CountTokensResponse, EXPIRED_LLM_TOKEN_HEADER_NAME,
|
||||
ListModelsResponse, MODEL_REQUESTS_RESOURCE_HEADER_VALUE,
|
||||
SERVER_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, SUBSCRIPTION_LIMIT_RESOURCE_HEADER_NAME,
|
||||
TOOL_USE_LIMIT_REACHED_HEADER_NAME, ZED_VERSION_HEADER_NAME,
|
||||
};
|
||||
|
||||
use crate::provider::anthropic::{AnthropicEventMapper, count_anthropic_tokens, into_anthropic};
|
||||
use crate::provider::google::{GoogleEventMapper, into_google};
|
||||
|
@ -120,10 +119,10 @@ pub struct State {
|
|||
user_store: Entity<UserStore>,
|
||||
status: client::Status,
|
||||
accept_terms_of_service_task: Option<Task<Result<()>>>,
|
||||
models: Vec<Arc<zed_llm_client::LanguageModel>>,
|
||||
default_model: Option<Arc<zed_llm_client::LanguageModel>>,
|
||||
default_fast_model: Option<Arc<zed_llm_client::LanguageModel>>,
|
||||
recommended_models: Vec<Arc<zed_llm_client::LanguageModel>>,
|
||||
models: Vec<Arc<cloud_llm_client::LanguageModel>>,
|
||||
default_model: Option<Arc<cloud_llm_client::LanguageModel>>,
|
||||
default_fast_model: Option<Arc<cloud_llm_client::LanguageModel>>,
|
||||
recommended_models: Vec<Arc<cloud_llm_client::LanguageModel>>,
|
||||
_fetch_models_task: Task<()>,
|
||||
_settings_subscription: Subscription,
|
||||
_llm_token_subscription: Subscription,
|
||||
|
@ -137,11 +136,10 @@ impl State {
|
|||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx);
|
||||
|
||||
Self {
|
||||
client: client.clone(),
|
||||
llm_api_token: LlmApiToken::default(),
|
||||
user_store,
|
||||
user_store: user_store.clone(),
|
||||
status,
|
||||
accept_terms_of_service_task: None,
|
||||
models: Vec::new(),
|
||||
|
@ -154,8 +152,9 @@ impl State {
|
|||
.read_with(cx, |this, _cx| (client.clone(), this.llm_api_token.clone()))?;
|
||||
|
||||
loop {
|
||||
let status = this.read_with(cx, |this, _cx| this.status)?;
|
||||
if matches!(status, client::Status::Connected { .. }) {
|
||||
let is_authenticated = user_store
|
||||
.read_with(cx, |user_store, _cx| user_store.current_user().is_some())?;
|
||||
if is_authenticated {
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -194,26 +193,20 @@ impl State {
|
|||
}
|
||||
}
|
||||
|
||||
fn is_signed_out(&self) -> bool {
|
||||
self.status.is_signed_out()
|
||||
fn is_signed_out(&self, cx: &App) -> bool {
|
||||
self.user_store.read(cx).current_user().is_none()
|
||||
}
|
||||
|
||||
fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
let client = self.client.clone();
|
||||
cx.spawn(async move |state, cx| {
|
||||
client
|
||||
.authenticate_and_connect(true, &cx)
|
||||
.await
|
||||
.into_response()?;
|
||||
client.sign_in_with_optional_connect(true, &cx).await?;
|
||||
state.update(cx, |_, cx| cx.notify())
|
||||
})
|
||||
}
|
||||
|
||||
fn has_accepted_terms_of_service(&self, cx: &App) -> bool {
|
||||
self.user_store
|
||||
.read(cx)
|
||||
.current_user_has_accepted_terms()
|
||||
.unwrap_or(false)
|
||||
self.user_store.read(cx).has_accepted_terms_of_service()
|
||||
}
|
||||
|
||||
fn accept_terms_of_service(&mut self, cx: &mut Context<Self>) {
|
||||
|
@ -238,8 +231,8 @@ impl State {
|
|||
// Right now we represent thinking variants of models as separate models on the client,
|
||||
// so we need to insert variants for any model that supports thinking.
|
||||
if model.supports_thinking {
|
||||
models.push(Arc::new(zed_llm_client::LanguageModel {
|
||||
id: zed_llm_client::LanguageModelId(format!("{}-thinking", model.id).into()),
|
||||
models.push(Arc::new(cloud_llm_client::LanguageModel {
|
||||
id: cloud_llm_client::LanguageModelId(format!("{}-thinking", model.id).into()),
|
||||
display_name: format!("{} Thinking", model.display_name),
|
||||
..model
|
||||
}));
|
||||
|
@ -328,7 +321,7 @@ impl CloudLanguageModelProvider {
|
|||
|
||||
fn create_language_model(
|
||||
&self,
|
||||
model: Arc<zed_llm_client::LanguageModel>,
|
||||
model: Arc<cloud_llm_client::LanguageModel>,
|
||||
llm_api_token: LlmApiToken,
|
||||
) -> Arc<dyn LanguageModel> {
|
||||
Arc::new(CloudLanguageModel {
|
||||
|
@ -398,7 +391,7 @@ impl LanguageModelProvider for CloudLanguageModelProvider {
|
|||
|
||||
fn is_authenticated(&self, cx: &App) -> bool {
|
||||
let state = self.state.read(cx);
|
||||
!state.is_signed_out() && state.has_accepted_terms_of_service(cx)
|
||||
!state.is_signed_out(cx) && state.has_accepted_terms_of_service(cx)
|
||||
}
|
||||
|
||||
fn authenticate(&self, _cx: &mut App) -> Task<Result<(), AuthenticateError>> {
|
||||
|
@ -518,7 +511,7 @@ fn render_accept_terms(
|
|||
|
||||
pub struct CloudLanguageModel {
|
||||
id: LanguageModelId,
|
||||
model: Arc<zed_llm_client::LanguageModel>,
|
||||
model: Arc<cloud_llm_client::LanguageModel>,
|
||||
llm_api_token: LlmApiToken,
|
||||
client: Arc<Client>,
|
||||
request_limiter: RateLimiter,
|
||||
|
@ -611,13 +604,8 @@ impl CloudLanguageModel {
|
|||
.headers()
|
||||
.get(CURRENT_PLAN_HEADER_NAME)
|
||||
.and_then(|plan| plan.to_str().ok())
|
||||
.and_then(|plan| zed_llm_client::Plan::from_str(plan).ok())
|
||||
.and_then(|plan| cloud_llm_client::Plan::from_str(plan).ok())
|
||||
{
|
||||
let plan = match plan {
|
||||
zed_llm_client::Plan::ZedFree => Plan::Free,
|
||||
zed_llm_client::Plan::ZedPro => Plan::ZedPro,
|
||||
zed_llm_client::Plan::ZedProTrial => Plan::ZedProTrial,
|
||||
};
|
||||
return Err(anyhow!(ModelRequestLimitReachedError { plan }));
|
||||
}
|
||||
}
|
||||
|
@ -729,7 +717,7 @@ impl LanguageModel for CloudLanguageModel {
|
|||
}
|
||||
|
||||
fn upstream_provider_id(&self) -> LanguageModelProviderId {
|
||||
use zed_llm_client::LanguageModelProvider::*;
|
||||
use cloud_llm_client::LanguageModelProvider::*;
|
||||
match self.model.provider {
|
||||
Anthropic => language_model::ANTHROPIC_PROVIDER_ID,
|
||||
OpenAi => language_model::OPEN_AI_PROVIDER_ID,
|
||||
|
@ -738,7 +726,7 @@ impl LanguageModel for CloudLanguageModel {
|
|||
}
|
||||
|
||||
fn upstream_provider_name(&self) -> LanguageModelProviderName {
|
||||
use zed_llm_client::LanguageModelProvider::*;
|
||||
use cloud_llm_client::LanguageModelProvider::*;
|
||||
match self.model.provider {
|
||||
Anthropic => language_model::ANTHROPIC_PROVIDER_NAME,
|
||||
OpenAi => language_model::OPEN_AI_PROVIDER_NAME,
|
||||
|
@ -772,11 +760,11 @@ impl LanguageModel for CloudLanguageModel {
|
|||
|
||||
fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
|
||||
match self.model.provider {
|
||||
zed_llm_client::LanguageModelProvider::Anthropic
|
||||
| zed_llm_client::LanguageModelProvider::OpenAi => {
|
||||
cloud_llm_client::LanguageModelProvider::Anthropic
|
||||
| cloud_llm_client::LanguageModelProvider::OpenAi => {
|
||||
LanguageModelToolSchemaFormat::JsonSchema
|
||||
}
|
||||
zed_llm_client::LanguageModelProvider::Google => {
|
||||
cloud_llm_client::LanguageModelProvider::Google => {
|
||||
LanguageModelToolSchemaFormat::JsonSchemaSubset
|
||||
}
|
||||
}
|
||||
|
@ -795,15 +783,15 @@ impl LanguageModel for CloudLanguageModel {
|
|||
|
||||
fn cache_configuration(&self) -> Option<LanguageModelCacheConfiguration> {
|
||||
match &self.model.provider {
|
||||
zed_llm_client::LanguageModelProvider::Anthropic => {
|
||||
cloud_llm_client::LanguageModelProvider::Anthropic => {
|
||||
Some(LanguageModelCacheConfiguration {
|
||||
min_total_token: 2_048,
|
||||
should_speculate: true,
|
||||
max_cache_anchors: 4,
|
||||
})
|
||||
}
|
||||
zed_llm_client::LanguageModelProvider::OpenAi
|
||||
| zed_llm_client::LanguageModelProvider::Google => None,
|
||||
cloud_llm_client::LanguageModelProvider::OpenAi
|
||||
| cloud_llm_client::LanguageModelProvider::Google => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -813,15 +801,17 @@ impl LanguageModel for CloudLanguageModel {
|
|||
cx: &App,
|
||||
) -> BoxFuture<'static, Result<u64>> {
|
||||
match self.model.provider {
|
||||
zed_llm_client::LanguageModelProvider::Anthropic => count_anthropic_tokens(request, cx),
|
||||
zed_llm_client::LanguageModelProvider::OpenAi => {
|
||||
cloud_llm_client::LanguageModelProvider::Anthropic => {
|
||||
count_anthropic_tokens(request, cx)
|
||||
}
|
||||
cloud_llm_client::LanguageModelProvider::OpenAi => {
|
||||
let model = match open_ai::Model::from_id(&self.model.id.0) {
|
||||
Ok(model) => model,
|
||||
Err(err) => return async move { Err(anyhow!(err)) }.boxed(),
|
||||
};
|
||||
count_open_ai_tokens(request, model, cx)
|
||||
}
|
||||
zed_llm_client::LanguageModelProvider::Google => {
|
||||
cloud_llm_client::LanguageModelProvider::Google => {
|
||||
let client = self.client.clone();
|
||||
let llm_api_token = self.llm_api_token.clone();
|
||||
let model_id = self.model.id.to_string();
|
||||
|
@ -832,7 +822,7 @@ impl LanguageModel for CloudLanguageModel {
|
|||
let token = llm_api_token.acquire(&client).await?;
|
||||
|
||||
let request_body = CountTokensBody {
|
||||
provider: zed_llm_client::LanguageModelProvider::Google,
|
||||
provider: cloud_llm_client::LanguageModelProvider::Google,
|
||||
model: model_id,
|
||||
provider_request: serde_json::to_value(&google_ai::CountTokensRequest {
|
||||
generate_content_request,
|
||||
|
@ -893,7 +883,7 @@ impl LanguageModel for CloudLanguageModel {
|
|||
let app_version = cx.update(|cx| AppVersion::global(cx)).ok();
|
||||
let thinking_allowed = request.thinking_allowed;
|
||||
match self.model.provider {
|
||||
zed_llm_client::LanguageModelProvider::Anthropic => {
|
||||
cloud_llm_client::LanguageModelProvider::Anthropic => {
|
||||
let request = into_anthropic(
|
||||
request,
|
||||
self.model.id.to_string(),
|
||||
|
@ -924,7 +914,7 @@ impl LanguageModel for CloudLanguageModel {
|
|||
prompt_id,
|
||||
intent,
|
||||
mode,
|
||||
provider: zed_llm_client::LanguageModelProvider::Anthropic,
|
||||
provider: cloud_llm_client::LanguageModelProvider::Anthropic,
|
||||
model: request.model.clone(),
|
||||
provider_request: serde_json::to_value(&request)
|
||||
.map_err(|e| anyhow!(e))?,
|
||||
|
@ -948,7 +938,7 @@ impl LanguageModel for CloudLanguageModel {
|
|||
});
|
||||
async move { Ok(future.await?.boxed()) }.boxed()
|
||||
}
|
||||
zed_llm_client::LanguageModelProvider::OpenAi => {
|
||||
cloud_llm_client::LanguageModelProvider::OpenAi => {
|
||||
let client = self.client.clone();
|
||||
let model = match open_ai::Model::from_id(&self.model.id.0) {
|
||||
Ok(model) => model,
|
||||
|
@ -976,7 +966,7 @@ impl LanguageModel for CloudLanguageModel {
|
|||
prompt_id,
|
||||
intent,
|
||||
mode,
|
||||
provider: zed_llm_client::LanguageModelProvider::OpenAi,
|
||||
provider: cloud_llm_client::LanguageModelProvider::OpenAi,
|
||||
model: request.model.clone(),
|
||||
provider_request: serde_json::to_value(&request)
|
||||
.map_err(|e| anyhow!(e))?,
|
||||
|
@ -996,7 +986,7 @@ impl LanguageModel for CloudLanguageModel {
|
|||
});
|
||||
async move { Ok(future.await?.boxed()) }.boxed()
|
||||
}
|
||||
zed_llm_client::LanguageModelProvider::Google => {
|
||||
cloud_llm_client::LanguageModelProvider::Google => {
|
||||
let client = self.client.clone();
|
||||
let request =
|
||||
into_google(request, self.model.id.to_string(), GoogleModelMode::Default);
|
||||
|
@ -1016,7 +1006,7 @@ impl LanguageModel for CloudLanguageModel {
|
|||
prompt_id,
|
||||
intent,
|
||||
mode,
|
||||
provider: zed_llm_client::LanguageModelProvider::Google,
|
||||
provider: cloud_llm_client::LanguageModelProvider::Google,
|
||||
model: request.model.model_id.clone(),
|
||||
provider_request: serde_json::to_value(&request)
|
||||
.map_err(|e| anyhow!(e))?,
|
||||
|
@ -1040,15 +1030,8 @@ impl LanguageModel for CloudLanguageModel {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum CloudCompletionEvent<T> {
|
||||
Status(CompletionRequestStatus),
|
||||
Event(T),
|
||||
}
|
||||
|
||||
fn map_cloud_completion_events<T, F>(
|
||||
stream: Pin<Box<dyn Stream<Item = Result<CloudCompletionEvent<T>>> + Send>>,
|
||||
stream: Pin<Box<dyn Stream<Item = Result<CompletionEvent<T>>> + Send>>,
|
||||
mut map_callback: F,
|
||||
) -> BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
|
||||
where
|
||||
|
@ -1063,10 +1046,10 @@ where
|
|||
Err(error) => {
|
||||
vec![Err(LanguageModelCompletionError::from(error))]
|
||||
}
|
||||
Ok(CloudCompletionEvent::Status(event)) => {
|
||||
Ok(CompletionEvent::Status(event)) => {
|
||||
vec![Ok(LanguageModelCompletionEvent::StatusUpdate(event))]
|
||||
}
|
||||
Ok(CloudCompletionEvent::Event(event)) => map_callback(event),
|
||||
Ok(CompletionEvent::Event(event)) => map_callback(event),
|
||||
})
|
||||
})
|
||||
.boxed()
|
||||
|
@ -1074,9 +1057,9 @@ where
|
|||
|
||||
fn usage_updated_event<T>(
|
||||
usage: Option<ModelRequestUsage>,
|
||||
) -> impl Stream<Item = Result<CloudCompletionEvent<T>>> {
|
||||
) -> impl Stream<Item = Result<CompletionEvent<T>>> {
|
||||
futures::stream::iter(usage.map(|usage| {
|
||||
Ok(CloudCompletionEvent::Status(
|
||||
Ok(CompletionEvent::Status(
|
||||
CompletionRequestStatus::UsageUpdated {
|
||||
amount: usage.amount as usize,
|
||||
limit: usage.limit,
|
||||
|
@ -1087,9 +1070,9 @@ fn usage_updated_event<T>(
|
|||
|
||||
fn tool_use_limit_reached_event<T>(
|
||||
tool_use_limit_reached: bool,
|
||||
) -> impl Stream<Item = Result<CloudCompletionEvent<T>>> {
|
||||
) -> impl Stream<Item = Result<CompletionEvent<T>>> {
|
||||
futures::stream::iter(tool_use_limit_reached.then(|| {
|
||||
Ok(CloudCompletionEvent::Status(
|
||||
Ok(CompletionEvent::Status(
|
||||
CompletionRequestStatus::ToolUseLimitReached,
|
||||
))
|
||||
}))
|
||||
|
@ -1098,7 +1081,7 @@ fn tool_use_limit_reached_event<T>(
|
|||
fn response_lines<T: DeserializeOwned>(
|
||||
response: Response<AsyncBody>,
|
||||
includes_status_messages: bool,
|
||||
) -> impl Stream<Item = Result<CloudCompletionEvent<T>>> {
|
||||
) -> impl Stream<Item = Result<CompletionEvent<T>>> {
|
||||
futures::stream::try_unfold(
|
||||
(String::new(), BufReader::new(response.into_body())),
|
||||
move |(mut line, mut body)| async move {
|
||||
|
@ -1106,9 +1089,9 @@ fn response_lines<T: DeserializeOwned>(
|
|||
Ok(0) => Ok(None),
|
||||
Ok(_) => {
|
||||
let event = if includes_status_messages {
|
||||
serde_json::from_str::<CloudCompletionEvent<T>>(&line)?
|
||||
serde_json::from_str::<CompletionEvent<T>>(&line)?
|
||||
} else {
|
||||
CloudCompletionEvent::Event(serde_json::from_str::<T>(&line)?)
|
||||
CompletionEvent::Event(serde_json::from_str::<T>(&line)?)
|
||||
};
|
||||
|
||||
line.clear();
|
||||
|
@ -1123,7 +1106,7 @@ fn response_lines<T: DeserializeOwned>(
|
|||
#[derive(IntoElement, RegisterComponent)]
|
||||
struct ZedAiConfiguration {
|
||||
is_connected: bool,
|
||||
plan: Option<proto::Plan>,
|
||||
plan: Option<Plan>,
|
||||
subscription_period: Option<(DateTime<Utc>, DateTime<Utc>)>,
|
||||
eligible_for_trial: bool,
|
||||
has_accepted_terms_of_service: bool,
|
||||
|
@ -1137,15 +1120,15 @@ impl RenderOnce for ZedAiConfiguration {
|
|||
fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
|
||||
let young_account_banner = YoungAccountBanner;
|
||||
|
||||
let is_pro = self.plan == Some(proto::Plan::ZedPro);
|
||||
let is_pro = self.plan == Some(Plan::ZedPro);
|
||||
let subscription_text = match (self.plan, self.subscription_period) {
|
||||
(Some(proto::Plan::ZedPro), Some(_)) => {
|
||||
(Some(Plan::ZedPro), Some(_)) => {
|
||||
"You have access to Zed's hosted models through your Pro subscription."
|
||||
}
|
||||
(Some(proto::Plan::ZedProTrial), Some(_)) => {
|
||||
(Some(Plan::ZedProTrial), Some(_)) => {
|
||||
"You have access to Zed's hosted models through your Pro trial."
|
||||
}
|
||||
(Some(proto::Plan::Free), Some(_)) => {
|
||||
(Some(Plan::ZedFree), Some(_)) => {
|
||||
"You have basic access to Zed's hosted models through the Free plan."
|
||||
}
|
||||
_ => {
|
||||
|
@ -1270,8 +1253,8 @@ impl Render for ConfigurationView {
|
|||
let user_store = state.user_store.read(cx);
|
||||
|
||||
ZedAiConfiguration {
|
||||
is_connected: !state.is_signed_out(),
|
||||
plan: user_store.current_plan(),
|
||||
is_connected: !state.is_signed_out(cx),
|
||||
plan: user_store.plan(),
|
||||
subscription_period: user_store.subscription_period(),
|
||||
eligible_for_trial: user_store.trial_started_at().is_none(),
|
||||
has_accepted_terms_of_service: state.has_accepted_terms_of_service(cx),
|
||||
|
@ -1291,7 +1274,7 @@ impl Component for ZedAiConfiguration {
|
|||
fn preview(_window: &mut Window, _cx: &mut App) -> Option<AnyElement> {
|
||||
fn configuration(
|
||||
is_connected: bool,
|
||||
plan: Option<proto::Plan>,
|
||||
plan: Option<Plan>,
|
||||
eligible_for_trial: bool,
|
||||
account_too_young: bool,
|
||||
has_accepted_terms_of_service: bool,
|
||||
|
@ -1335,15 +1318,15 @@ impl Component for ZedAiConfiguration {
|
|||
),
|
||||
single_example(
|
||||
"Free Plan",
|
||||
configuration(true, Some(proto::Plan::Free), true, false, true),
|
||||
configuration(true, Some(Plan::ZedFree), true, false, true),
|
||||
),
|
||||
single_example(
|
||||
"Zed Pro Trial Plan",
|
||||
configuration(true, Some(proto::Plan::ZedProTrial), true, false, true),
|
||||
configuration(true, Some(Plan::ZedProTrial), true, false, true),
|
||||
),
|
||||
single_example(
|
||||
"Zed Pro Plan",
|
||||
configuration(true, Some(proto::Plan::ZedPro), true, false, true),
|
||||
configuration(true, Some(Plan::ZedPro), true, false, true),
|
||||
),
|
||||
])
|
||||
.into_any_element(),
|
||||
|
|
|
@ -3,6 +3,7 @@ use std::str::FromStr as _;
|
|||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Result, anyhow};
|
||||
use cloud_llm_client::CompletionIntent;
|
||||
use collections::HashMap;
|
||||
use copilot::copilot_chat::{
|
||||
ChatMessage, ChatMessageContent, ChatMessagePart, CopilotChat, ImageUrl,
|
||||
|
@ -30,7 +31,6 @@ use settings::SettingsStore;
|
|||
use std::time::Duration;
|
||||
use ui::prelude::*;
|
||||
use util::debug_panic;
|
||||
use zed_llm_client::CompletionIntent;
|
||||
|
||||
use super::anthropic::count_anthropic_tokens;
|
||||
use super::google::count_google_tokens;
|
||||
|
@ -706,7 +706,8 @@ impl Render for ConfigurationView {
|
|||
.child(svg().size_8().path(IconName::CopilotError.path()))
|
||||
}
|
||||
_ => {
|
||||
const LABEL: &str = "To use Zed's assistant with GitHub Copilot, you need to be logged in to GitHub. Note that your GitHub account must have an active Copilot Chat subscription.";
|
||||
const LABEL: &str = "To use Zed's agent with GitHub Copilot, you need to be logged in to GitHub. Note that your GitHub account must have an active Copilot Chat subscription.";
|
||||
|
||||
v_flex().gap_2().child(Label::new(LABEL)).child(
|
||||
Button::new("sign_in", "Sign in to use GitHub Copilot")
|
||||
.icon_color(Color::Muted)
|
||||
|
|
|
@ -880,7 +880,7 @@ impl Render for ConfigurationView {
|
|||
v_flex()
|
||||
.size_full()
|
||||
.on_action(cx.listener(Self::save_api_key))
|
||||
.child(Label::new("To use Zed's assistant with Google AI, you need to add an API key. Follow these steps:"))
|
||||
.child(Label::new("To use Zed's agent with Google AI, you need to add an API key. Follow these steps:"))
|
||||
.child(
|
||||
List::new()
|
||||
.child(InstructionListItem::new(
|
||||
|
|
|
@ -744,7 +744,7 @@ impl Render for ConfigurationView {
|
|||
Button::new("retry_lmstudio_models", "Connect")
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon(IconName::Play)
|
||||
.icon(IconName::PlayOutlined)
|
||||
.on_click(cx.listener(move |this, _, _window, cx| {
|
||||
this.retry_connection(cx)
|
||||
})),
|
||||
|
|
|
@ -807,7 +807,7 @@ impl Render for ConfigurationView {
|
|||
v_flex()
|
||||
.size_full()
|
||||
.on_action(cx.listener(Self::save_api_key))
|
||||
.child(Label::new("To use Zed's assistant with Mistral, you need to add an API key. Follow these steps:"))
|
||||
.child(Label::new("To use Zed's agent with Mistral, you need to add an API key. Follow these steps:"))
|
||||
.child(
|
||||
List::new()
|
||||
.child(InstructionListItem::new(
|
||||
|
|
|
@ -23,7 +23,7 @@ use settings::{Settings, SettingsStore, update_settings_file};
|
|||
use std::pin::Pin;
|
||||
use std::sync::atomic::{AtomicU64, Ordering};
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
use ui::{Indicator, List, prelude::*};
|
||||
use ui::{ButtonLike, Indicator, List, prelude::*};
|
||||
use ui_input::SingleLineInput;
|
||||
use util::ResultExt;
|
||||
|
||||
|
@ -1004,63 +1004,73 @@ impl Render for ConfigurationView {
|
|||
.w_full()
|
||||
.justify_between()
|
||||
.gap_2()
|
||||
.child({
|
||||
let mut buttons = h_flex()
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.gap_2();
|
||||
if is_authenticated {
|
||||
buttons = buttons.child(
|
||||
Button::new("ollama-site", "Ollama Homepage")
|
||||
.gap_2()
|
||||
.map(|this| {
|
||||
if is_authenticated {
|
||||
this.child(
|
||||
Button::new("ollama-site", "Ollama")
|
||||
.style(ButtonStyle::Subtle)
|
||||
.icon(IconName::ArrowUpRight)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.on_click(move |_, _, cx| cx.open_url(OLLAMA_SITE))
|
||||
.into_any_element(),
|
||||
)
|
||||
} else {
|
||||
this.child(
|
||||
Button::new(
|
||||
"download_ollama_button",
|
||||
"Download Ollama",
|
||||
)
|
||||
.style(ButtonStyle::Subtle)
|
||||
.icon(IconName::ArrowUpRight)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.on_click(move |_, _, cx| {
|
||||
cx.open_url(OLLAMA_DOWNLOAD_URL)
|
||||
})
|
||||
.into_any_element(),
|
||||
)
|
||||
}
|
||||
})
|
||||
.child(
|
||||
Button::new("view-models", "View All Models")
|
||||
.style(ButtonStyle::Subtle)
|
||||
.icon(IconName::ArrowUpRight)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.on_click(move |_, _, cx| cx.open_url(OLLAMA_SITE))
|
||||
.into_any_element(),
|
||||
);
|
||||
} else {
|
||||
buttons = buttons.child(
|
||||
Button::new(
|
||||
"download_ollama_button",
|
||||
"Download Ollama",
|
||||
)
|
||||
.style(ButtonStyle::Filled)
|
||||
.icon(IconName::Download)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.on_click(move |_, _, cx| {
|
||||
cx.open_url(OLLAMA_DOWNLOAD_URL)
|
||||
})
|
||||
.into_any_element(),
|
||||
);
|
||||
}
|
||||
buttons.child(
|
||||
Button::new("view-models", "Browse Models")
|
||||
.style(ButtonStyle::Subtle)
|
||||
.icon(IconName::Library)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.on_click(move |_, _, cx| cx.open_url(OLLAMA_LIBRARY_URL)),
|
||||
)
|
||||
})
|
||||
.child(
|
||||
if is_authenticated {
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(Indicator::dot().color(Color::Success))
|
||||
.child(Label::new("Connected").size(LabelSize::Small))
|
||||
.into_any_element()
|
||||
} else {
|
||||
Button::new("retry_ollama_models", "Connect")
|
||||
.style(ButtonStyle::Filled)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon(IconName::Play)
|
||||
.on_click(cx.listener(move |this, _, _, cx| {
|
||||
this.retry_connection(cx)
|
||||
}))
|
||||
.into_any_element()
|
||||
}
|
||||
.on_click(move |_, _, cx| cx.open_url(OLLAMA_LIBRARY_URL)),
|
||||
),
|
||||
)
|
||||
.map(|this| {
|
||||
if is_authenticated {
|
||||
this.child(
|
||||
ButtonLike::new("connected")
|
||||
.disabled(true)
|
||||
.cursor_style(gpui::CursorStyle::Arrow)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(Indicator::dot().color(Color::Success))
|
||||
.child(Label::new("Connected"))
|
||||
.into_any_element(),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
this.child(
|
||||
Button::new("retry_ollama_models", "Connect")
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon(IconName::PlayOutlined)
|
||||
.on_click(cx.listener(move |this, _, _, cx| {
|
||||
this.retry_connection(cx)
|
||||
})),
|
||||
)
|
||||
}
|
||||
})
|
||||
)
|
||||
)
|
||||
.into_any()
|
||||
|
|
|
@ -780,7 +780,7 @@ impl Render for ConfigurationView {
|
|||
let api_key_section = if self.should_render_editor(cx) {
|
||||
v_flex()
|
||||
.on_action(cx.listener(Self::save_api_key))
|
||||
.child(Label::new("To use Zed's assistant with OpenAI, you need to add an API key. Follow these steps:"))
|
||||
.child(Label::new("To use Zed's agent with OpenAI, you need to add an API key. Follow these steps:"))
|
||||
.child(
|
||||
List::new()
|
||||
.child(InstructionListItem::new(
|
||||
|
@ -868,7 +868,7 @@ impl Render for ConfigurationView {
|
|||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.on_click(move |_, _window, cx| {
|
||||
cx.open_url("https://zed.dev/docs/ai/configuration#openai-api-compatible")
|
||||
cx.open_url("https://zed.dev/docs/ai/llm-providers#openai-api-compatible")
|
||||
}),
|
||||
);
|
||||
|
||||
|
|
|
@ -466,7 +466,7 @@ impl Render for ConfigurationView {
|
|||
let api_key_section = if self.should_render_editor(cx) {
|
||||
v_flex()
|
||||
.on_action(cx.listener(Self::save_api_key))
|
||||
.child(Label::new("To use Zed's assistant with an OpenAI compatible provider, you need to add an API key."))
|
||||
.child(Label::new("To use Zed's agent with an OpenAI-compatible provider, you need to add an API key."))
|
||||
.child(
|
||||
div()
|
||||
.pt(DynamicSpacing::Base04.rems(cx))
|
||||
|
|
|
@ -855,7 +855,7 @@ impl Render for ConfigurationView {
|
|||
v_flex()
|
||||
.size_full()
|
||||
.on_action(cx.listener(Self::save_api_key))
|
||||
.child(Label::new("To use Zed's assistant with OpenRouter, you need to add an API key. Follow these steps:"))
|
||||
.child(Label::new("To use Zed's agent with OpenRouter, you need to add an API key. Follow these steps:"))
|
||||
.child(
|
||||
List::new()
|
||||
.child(InstructionListItem::new(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue