From 1dd5da0585531c903df7f6e066e682ced90af02e Mon Sep 17 00:00:00 2001 From: Umesh Yadav Date: Wed, 9 Jul 2025 16:42:51 +0530 Subject: [PATCH 1/8] language_models: Add support for API key and API URL to ollama provider --- crates/language_models/src/provider/ollama.rs | 389 ++++++++++++++++-- crates/ollama/src/ollama.rs | 36 +- 2 files changed, 382 insertions(+), 43 deletions(-) diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index 0866cfa4c8..c1fd277459 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -1,4 +1,6 @@ -use anyhow::{Result, anyhow}; +use anyhow::{Context as _, Result, anyhow}; +use credentials_provider::CredentialsProvider; +use fs::Fs; use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream}; use futures::{Stream, TryFutureExt, stream}; use gpui::{AnyView, App, AsyncApp, Context, Subscription, Task}; @@ -10,17 +12,19 @@ use language_model::{ LanguageModelRequestTool, LanguageModelToolChoice, LanguageModelToolUse, LanguageModelToolUseId, MessageContent, RateLimiter, Role, StopReason, TokenUsage, }; +use menu; use ollama::{ - ChatMessage, ChatOptions, ChatRequest, ChatResponseDelta, KeepAlive, OllamaFunctionTool, - OllamaToolCall, get_models, show_model, stream_chat_completion, + ChatMessage, ChatOptions, ChatRequest, ChatResponseDelta, KeepAlive, OLLAMA_API_URL, + OllamaFunctionTool, OllamaToolCall, get_models, show_model, stream_chat_completion, }; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsStore}; +use settings::{Settings, SettingsStore, update_settings_file}; use std::pin::Pin; use std::sync::atomic::{AtomicU64, Ordering}; use std::{collections::HashMap, sync::Arc}; use ui::{ButtonLike, Indicator, List, prelude::*}; +use ui_input::SingleLineInput; use util::ResultExt; use crate::AllLanguageModelSettings; @@ -29,6 +33,7 @@ use crate::ui::InstructionListItem; const OLLAMA_DOWNLOAD_URL: &str = "https://ollama.com/download"; const OLLAMA_LIBRARY_URL: &str = "https://ollama.com/library"; const OLLAMA_SITE: &str = "https://ollama.com/"; +const OLLAMA_API_KEY_VAR: &str = "OLLAMA_API_KEY"; const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("ollama"); const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Ollama"); @@ -67,21 +72,61 @@ pub struct State { available_models: Vec, fetch_model_task: Option>>, _subscription: Subscription, + api_key: Option, + api_key_from_env: bool, } impl State { fn is_authenticated(&self) -> bool { - !self.available_models.is_empty() + !self.available_models.is_empty() || self.api_key.is_some() + } + + fn reset_api_key(&self, cx: &mut Context) -> Task> { + let credentials_provider = ::global(cx); + let api_url = AllLanguageModelSettings::get_global(cx) + .ollama + .api_url + .clone(); + cx.spawn(async move |this, cx| { + credentials_provider + .delete_credentials(&api_url, &cx) + .await + .log_err(); + this.update(cx, |this, cx| { + this.api_key = None; + this.api_key_from_env = false; + cx.notify(); + }) + }) + } + + fn set_api_key(&mut self, api_key: String, cx: &mut Context) -> Task> { + let credentials_provider = ::global(cx); + let api_url = AllLanguageModelSettings::get_global(cx) + .ollama + .api_url + .clone(); + cx.spawn(async move |this, cx| { + credentials_provider + .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx) + .await + .log_err(); + this.update(cx, |this, cx| { + this.api_key = Some(api_key); + cx.notify(); + }) + }) } fn fetch_models(&mut self, cx: &mut Context) -> Task> { let settings = &AllLanguageModelSettings::get_global(cx).ollama; let http_client = Arc::clone(&self.http_client); let api_url = settings.api_url.clone(); + let api_key = self.api_key.clone(); // As a proxy for the server being "authenticated", we'll check if its up by fetching the models cx.spawn(async move |this, cx| { - let models = get_models(http_client.as_ref(), &api_url, None).await?; + let models = get_models(http_client.as_ref(), &api_url, api_key.clone(), None).await?; let tasks = models .into_iter() @@ -92,9 +137,11 @@ impl State { .map(|model| { let http_client = Arc::clone(&http_client); let api_url = api_url.clone(); + let api_key = api_key.clone(); async move { let name = model.name.as_str(); - let capabilities = show_model(http_client.as_ref(), &api_url, name).await?; + let capabilities = + show_model(http_client.as_ref(), &api_url, api_key, name).await?; let ollama_model = ollama::Model::new( name, None, @@ -135,8 +182,38 @@ impl State { return Task::ready(Ok(())); } + let credentials_provider = ::global(cx); + let api_url = AllLanguageModelSettings::get_global(cx) + .ollama + .api_url + .clone(); let fetch_models_task = self.fetch_models(cx); - cx.spawn(async move |_this, _cx| Ok(fetch_models_task.await?)) + cx.spawn(async move |this, cx| { + let (api_key, from_env) = if let Ok(api_key) = std::env::var(OLLAMA_API_KEY_VAR) { + (Some(api_key), true) + } else { + match credentials_provider.read_credentials(&api_url, &cx).await { + Ok(Some((_, api_key))) => ( + Some(String::from_utf8(api_key).context("invalid Ollama API key")?), + false, + ), + Ok(None) => (None, false), + Err(_) => (None, false), + } + }; + + this.update(cx, |this, cx| { + this.api_key = api_key; + this.api_key_from_env = from_env; + cx.notify(); + })?; + + // Always try to fetch models - if no API key is needed (local Ollama), it will work + // If API key is needed and provided, it will work + // If API key is needed and not provided, it will fail gracefully + let _ = fetch_models_task.await; + Ok(()) + }) } } @@ -162,6 +239,8 @@ impl OllamaLanguageModelProvider { available_models: Default::default(), fetch_model_task: None, _subscription: subscription, + api_key: None, + api_key_from_env: false, } }), }; @@ -236,6 +315,7 @@ impl LanguageModelProvider for OllamaLanguageModelProvider { model: model.clone(), http_client: self.http_client.clone(), request_limiter: RateLimiter::new(4), + state: self.state.clone(), }) as Arc }) .collect::>(); @@ -258,7 +338,7 @@ impl LanguageModelProvider for OllamaLanguageModelProvider { } fn reset_credentials(&self, cx: &mut App) -> Task> { - self.state.update(cx, |state, cx| state.fetch_models(cx)) + self.state.update(cx, |state, cx| state.reset_api_key(cx)) } } @@ -267,6 +347,7 @@ pub struct OllamaLanguageModel { model: ollama::Model, http_client: Arc, request_limiter: RateLimiter, + state: gpui::Entity, } impl OllamaLanguageModel { @@ -412,15 +493,19 @@ impl LanguageModel for OllamaLanguageModel { let request = self.to_ollama_request(request); let http_client = self.http_client.clone(); - let Ok(api_url) = cx.update(|cx| { + let Ok((api_url, api_key)) = cx.update(|cx| { let settings = &AllLanguageModelSettings::get_global(cx).ollama; - settings.api_url.clone() + ( + settings.api_url.clone(), + self.state.read(cx).api_key.clone(), + ) }) else { return futures::future::ready(Err(anyhow!("App state dropped").into())).boxed(); }; let future = self.request_limiter.stream(async move { - let stream = stream_chat_completion(http_client.as_ref(), &api_url, request).await?; + let stream = + stream_chat_completion(http_client.as_ref(), &api_url, api_key, request).await?; let stream = map_to_language_model_completion_events(stream); Ok(stream) }); @@ -529,12 +614,44 @@ fn map_to_language_model_completion_events( } struct ConfigurationView { + api_key_editor: gpui::Entity, + api_url_editor: gpui::Entity, state: gpui::Entity, loading_models_task: Option>, } impl ConfigurationView { pub fn new(state: gpui::Entity, window: &mut Window, cx: &mut Context) -> Self { + let api_key_editor = cx.new(|cx| { + SingleLineInput::new( + window, + cx, + "ol-000000000000000000000000000000000000000000000000", + ) + .label("API key") + }); + + let api_url = AllLanguageModelSettings::get_global(cx) + .ollama + .api_url + .clone(); + + let api_url_editor = cx.new(|cx| { + let input = SingleLineInput::new(window, cx, OLLAMA_API_URL).label("API URL"); + + if !api_url.is_empty() { + input.editor.update(cx, |editor, cx| { + editor.set_text(&*api_url, window, cx); + }); + } + input + }); + + cx.observe(&state, |_, _, cx| { + cx.notify(); + }) + .detach(); + let loading_models_task = Some(cx.spawn_in(window, { let state = state.clone(); async move |this, cx| { @@ -553,6 +670,8 @@ impl ConfigurationView { })); Self { + api_key_editor, + api_url_editor, state, loading_models_task, } @@ -563,11 +682,114 @@ impl ConfigurationView { .update(cx, |state, cx| state.fetch_models(cx)) .detach_and_log_err(cx); } + + fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { + let api_key = self + .api_key_editor + .read(cx) + .editor() + .read(cx) + .text(cx) + .trim() + .to_string(); + + // Don't proceed if no API key is provided and we're not authenticated + if api_key.is_empty() && !self.state.read(cx).is_authenticated() { + return; + } + + let state = self.state.clone(); + cx.spawn_in(window, async move |_, cx| { + if !api_key.is_empty() { + state + .update(cx, |state, cx| state.set_api_key(api_key, cx))? + .await + } else { + Ok(()) + } + }) + .detach_and_log_err(cx); + + cx.notify(); + } + + fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context) { + self.api_key_editor.update(cx, |input, cx| { + input.editor.update(cx, |editor, cx| { + editor.set_text("", window, cx); + }); + }); + + let state = self.state.clone(); + cx.spawn_in(window, async move |_, cx| { + state.update(cx, |state, cx| state.reset_api_key(cx))?.await + }) + .detach_and_log_err(cx); + + cx.notify(); + } + + fn should_render_api_key_editor(&self, cx: &mut Context) -> bool { + let state = self.state.read(cx); + !state.is_authenticated() || (!state.api_key_from_env && state.api_key.is_some()) + } + + fn save_api_url(&mut self, cx: &mut Context) { + let api_url = self + .api_url_editor + .read(cx) + .editor() + .read(cx) + .text(cx) + .trim() + .to_string(); + + let current_url = AllLanguageModelSettings::get_global(cx) + .ollama + .api_url + .clone(); + + let effective_current_url = if current_url.is_empty() { + OLLAMA_API_URL + } else { + ¤t_url + }; + + if !api_url.is_empty() && api_url != effective_current_url { + let fs = ::global(cx); + update_settings_file::(fs, cx, move |settings, _| { + if let Some(settings) = settings.ollama.as_mut() { + settings.api_url = Some(api_url.clone()); + } else { + settings.ollama = Some(crate::settings::OllamaSettingsContent { + api_url: Some(api_url.clone()), + available_models: None, + }); + } + }); + } + } + + fn reset_api_url(&mut self, window: &mut Window, cx: &mut Context) { + self.api_url_editor.update(cx, |input, cx| { + input.editor.update(cx, |editor, cx| { + editor.set_text(OLLAMA_API_URL, window, cx); + }); + }); + let fs = ::global(cx); + update_settings_file::(fs, cx, |settings, _cx| { + if let Some(settings) = settings.ollama.as_mut() { + settings.api_url = None; + } + }); + cx.notify(); + } } impl Render for ConfigurationView { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { let is_authenticated = self.state.read(cx).is_authenticated(); + let env_var_set = self.state.read(cx).api_key_from_env; let ollama_intro = "Get up & running with Llama 3.3, Mistral, Gemma 2, and other LLMs with Ollama."; @@ -583,9 +805,108 @@ impl Render for ConfigurationView { .child(InstructionListItem::text_only("Ollama must be running with at least one model installed to use it in the assistant.")) .child(InstructionListItem::text_only( "Once installed, try `ollama run llama3.2`", + )) + .child(InstructionListItem::text_only( + "For remote Ollama instances, optionally provide an API key below.", )), ), ) + .child( + if self.should_render_api_key_editor(cx) { + v_flex() + .on_action(cx.listener(Self::save_api_key)) + .child(self.api_key_editor.clone()) + .child( + Label::new( + if env_var_set { + "API key loaded from OLLAMA_API_KEY environment variable" + } else { + "API key is optional for local Ollama instances" + } + ) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .into_any() + } else { + h_flex() + .mt_1() + .p_1() + .justify_between() + .rounded_md() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().background) + .child( + h_flex() + .gap_2() + .child(Indicator::dot().color(Color::Success)) + .child(Label::new( + if env_var_set { + "API key loaded from OLLAMA_API_KEY" + } else { + "Connected to Ollama" + } + )) + ) + .child( + Button::new("reset-api-key", "Reset API Key") + .style(ButtonStyle::Subtle) + .icon(IconName::Trash) + .icon_size(IconSize::XSmall) + .on_click(cx.listener(|this, _, window, cx| { + this.reset_api_key(window, cx) + })) + ) + .into_any() + } + ) + .child( + { + let custom_api_url_set = AllLanguageModelSettings::get_global(cx).ollama.api_url != OLLAMA_API_URL; + + if custom_api_url_set { + h_flex() + .mt_1() + .p_1() + .justify_between() + .rounded_md() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().background) + .child( + h_flex() + .gap_2() + .child(Indicator::dot().color(Color::Success)) + .child(Label::new("Custom API URL")) + ) + .child( + Button::new("reset-api-url", "Reset API URL") + .style(ButtonStyle::Subtle) + .icon(IconName::Trash) + .icon_size(IconSize::XSmall) + .on_click(cx.listener(|this, _, window, cx| { + this.reset_api_url(window, cx) + })) + ) + .into_any() + } else { + v_flex() + .on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| { + this.save_api_url(cx); + cx.notify(); + })) + .mt_2() + .pt_2() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .gap_1() + .child(Label::new("Configure a custom API URL (optional)")) + .child(self.api_url_editor.clone()) + .into_any() + } + } + ) .child( h_flex() .w_full() @@ -632,32 +953,30 @@ impl Render for ConfigurationView { .on_click(move |_, _, cx| cx.open_url(OLLAMA_LIBRARY_URL)), ), ) - .map(|this| { + .child( if is_authenticated { - this.child( - ButtonLike::new("connected") - .disabled(true) - .cursor_style(gpui::CursorStyle::Arrow) - .child( - h_flex() - .gap_2() - .child(Indicator::dot().color(Color::Success)) - .child(Label::new("Connected")) - .into_any_element(), - ), - ) + ButtonLike::new("connected") + .disabled(true) + .cursor_style(gpui::CursorStyle::Arrow) + .child( + h_flex() + .gap_2() + .child(Indicator::dot().color(Color::Success)) + .child(Label::new("Connected")) + .into_any_element(), + ) + .into_any_element() } else { - this.child( - Button::new("retry_ollama_models", "Connect") - .icon_position(IconPosition::Start) - .icon_size(IconSize::XSmall) - .icon(IconName::Play) - .on_click(cx.listener(move |this, _, _, cx| { - this.retry_connection(cx) - })), - ) + Button::new("retry_ollama_models", "Connect") + .icon_position(IconPosition::Start) + .icon_size(IconSize::XSmall) + .icon(IconName::Play) + .on_click(cx.listener(move |this, _, _, cx| { + this.retry_connection(cx) + })) + .into_any_element() } - }) + ) ) .into_any() } diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index 109fea7353..0997026d4c 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -274,14 +274,19 @@ pub async fn complete( pub async fn stream_chat_completion( client: &dyn HttpClient, api_url: &str, + api_key: Option, request: ChatRequest, ) -> Result>> { let uri = format!("{api_url}/api/chat"); - let request_builder = http::Request::builder() + let mut request_builder = http::Request::builder() .method(Method::POST) .uri(uri) .header("Content-Type", "application/json"); + if let Some(api_key) = api_key { + request_builder = request_builder.header("Authorization", format!("Bearer {api_key}")) + } + let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; let mut response = client.send(request).await?; if response.status().is_success() { @@ -308,14 +313,19 @@ pub async fn stream_chat_completion( pub async fn get_models( client: &dyn HttpClient, api_url: &str, + api_key: Option, _: Option, ) -> Result> { let uri = format!("{api_url}/api/tags"); - let request_builder = HttpRequest::builder() + let mut request_builder = HttpRequest::builder() .method(Method::GET) .uri(uri) .header("Accept", "application/json"); + if let Some(api_key) = api_key { + request_builder = request_builder.header("Authorization", format!("Bearer {api_key}")); + } + let request = request_builder.body(AsyncBody::default())?; let mut response = client.send(request).await?; @@ -335,15 +345,25 @@ pub async fn get_models( } /// Fetch details of a model, used to determine model capabilities -pub async fn show_model(client: &dyn HttpClient, api_url: &str, model: &str) -> Result { +pub async fn show_model( + client: &dyn HttpClient, + api_url: &str, + api_key: Option, + model: &str, +) -> Result { let uri = format!("{api_url}/api/show"); - let request = HttpRequest::builder() + let mut request_builder = HttpRequest::builder() .method(Method::POST) .uri(uri) - .header("Content-Type", "application/json") - .body(AsyncBody::from( - serde_json::json!({ "model": model }).to_string(), - ))?; + .header("Content-Type", "application/json"); + + if let Some(api_key) = api_key { + request_builder = request_builder.header("Authorization", format!("Bearer {api_key}")) + } + + let request = request_builder.body(AsyncBody::from( + serde_json::json!({ "model": model }).to_string(), + ))?; let mut response = client.send(request).await?; let mut body = String::new(); From 81d4126e0a72836439031c80d79a6d17a0d8848a Mon Sep 17 00:00:00 2001 From: Umesh Yadav Date: Wed, 9 Jul 2025 18:13:35 +0530 Subject: [PATCH 2/8] Redesign Ollama configuration UI for improved clarity --- crates/language_models/src/provider/ollama.rs | 440 +++++++++++------- 1 file changed, 259 insertions(+), 181 deletions(-) diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index c1fd277459..5d6b1e45e7 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -23,7 +23,7 @@ use settings::{Settings, SettingsStore, update_settings_file}; use std::pin::Pin; use std::sync::atomic::{AtomicU64, Ordering}; use std::{collections::HashMap, sync::Arc}; -use ui::{ButtonLike, Indicator, List, prelude::*}; +use ui::{Indicator, List, prelude::*}; use ui_input::SingleLineInput; use util::ResultExt; @@ -730,8 +730,7 @@ impl ConfigurationView { } fn should_render_api_key_editor(&self, cx: &mut Context) -> bool { - let state = self.state.read(cx); - !state.is_authenticated() || (!state.api_key_from_env && state.api_key.is_some()) + !self.state.read(cx).api_key.is_some() } fn save_api_url(&mut self, cx: &mut Context) { @@ -791,194 +790,273 @@ impl Render for ConfigurationView { let is_authenticated = self.state.read(cx).is_authenticated(); let env_var_set = self.state.read(cx).api_key_from_env; - let ollama_intro = - "Get up & running with Llama 3.3, Mistral, Gemma 2, and other LLMs with Ollama."; - if self.loading_models_task.is_some() { - div().child(Label::new("Loading models...")).into_any() - } else { - v_flex() - .gap_2() + div() .child( - v_flex().gap_1().child(Label::new(ollama_intro)).child( - List::new() - .child(InstructionListItem::text_only("Ollama must be running with at least one model installed to use it in the assistant.")) - .child(InstructionListItem::text_only( - "Once installed, try `ollama run llama3.2`", - )) - .child(InstructionListItem::text_only( - "For remote Ollama instances, optionally provide an API key below.", - )), - ), - ) - .child( - if self.should_render_api_key_editor(cx) { - v_flex() - .on_action(cx.listener(Self::save_api_key)) - .child(self.api_key_editor.clone()) - .child( - Label::new( - if env_var_set { - "API key loaded from OLLAMA_API_KEY environment variable" - } else { - "API key is optional for local Ollama instances" - } - ) - .size(LabelSize::Small) - .color(Color::Muted), - ) - .into_any() - } else { - h_flex() - .mt_1() - .p_1() - .justify_between() - .rounded_md() - .border_1() - .border_color(cx.theme().colors().border) - .bg(cx.theme().colors().background) - .child( - h_flex() - .gap_2() - .child(Indicator::dot().color(Color::Success)) - .child(Label::new( - if env_var_set { - "API key loaded from OLLAMA_API_KEY" - } else { - "Connected to Ollama" - } - )) - ) - .child( - Button::new("reset-api-key", "Reset API Key") - .style(ButtonStyle::Subtle) - .icon(IconName::Trash) - .icon_size(IconSize::XSmall) - .on_click(cx.listener(|this, _, window, cx| { - this.reset_api_key(window, cx) - })) - ) - .into_any() - } - ) - .child( - { - let custom_api_url_set = AllLanguageModelSettings::get_global(cx).ollama.api_url != OLLAMA_API_URL; - - if custom_api_url_set { - h_flex() - .mt_1() - .p_1() - .justify_between() - .rounded_md() - .border_1() - .border_color(cx.theme().colors().border) - .bg(cx.theme().colors().background) - .child( - h_flex() - .gap_2() - .child(Indicator::dot().color(Color::Success)) - .child(Label::new("Custom API URL")) - ) - .child( - Button::new("reset-api-url", "Reset API URL") - .style(ButtonStyle::Subtle) - .icon(IconName::Trash) - .icon_size(IconSize::XSmall) - .on_click(cx.listener(|this, _, window, cx| { - this.reset_api_url(window, cx) - })) - ) - .into_any() - } else { - v_flex() - .on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| { - this.save_api_url(cx); - cx.notify(); - })) - .mt_2() - .pt_2() - .border_t_1() - .border_color(cx.theme().colors().border_variant) - .gap_1() - .child(Label::new("Configure a custom API URL (optional)")) - .child(self.api_url_editor.clone()) - .into_any() - } - } - ) - .child( - h_flex() - .w_full() - .justify_between() + v_flex() .gap_2() .child( h_flex() - .w_full() .gap_2() - .map(|this| { - if is_authenticated { - this.child( - Button::new("ollama-site", "Ollama") - .style(ButtonStyle::Subtle) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) - .on_click(move |_, _, cx| cx.open_url(OLLAMA_SITE)) - .into_any_element(), - ) - } else { - this.child( - Button::new( - "download_ollama_button", - "Download Ollama", - ) - .style(ButtonStyle::Subtle) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) - .on_click(move |_, _, cx| { - cx.open_url(OLLAMA_DOWNLOAD_URL) - }) - .into_any_element(), - ) - } - }) - .child( - Button::new("view-models", "All Models") - .style(ButtonStyle::Subtle) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) - .on_click(move |_, _, cx| cx.open_url(OLLAMA_LIBRARY_URL)), - ), + .child(Indicator::dot().color(Color::Accent)) + .child(Label::new("Connecting to Ollama...")), ) .child( - if is_authenticated { - ButtonLike::new("connected") - .disabled(true) - .cursor_style(gpui::CursorStyle::Arrow) - .child( - h_flex() - .gap_2() - .child(Indicator::dot().color(Color::Success)) - .child(Label::new("Connected")) - .into_any_element(), - ) - .into_any_element() - } else { - Button::new("retry_ollama_models", "Connect") - .icon_position(IconPosition::Start) - .icon_size(IconSize::XSmall) - .icon(IconName::Play) - .on_click(cx.listener(move |this, _, _, cx| { - this.retry_connection(cx) - })) - .into_any_element() - } - ) + Label::new("Checking for available models and server status") + .size(LabelSize::Small) + .color(Color::Muted), + ), ) .into_any() + } else { + v_flex() + .gap_4() + .child( + v_flex() + .gap_2() + .child( + Label::new("Run powerful language models locally on your machine with Ollama. Get started with Llama 3.3, Mistral, Gemma 2, and hundreds of other models.") + .size(LabelSize::Small) + .color(Color::Muted) + ) + ) + .child( + if !is_authenticated { + v_flex() + .gap_2() + .child( + Label::new("Getting Started") + .size(LabelSize::Small) + .color(Color::Default) + ) + .child( + List::new() + .child(InstructionListItem::text_only("1. Download and install Ollama from ollama.com")) + .child(InstructionListItem::text_only("2. Start Ollama and download a model: `ollama run llama3.2`")) + .child(InstructionListItem::text_only("3. Click 'Connect' below to start using Ollama in Zed")) + ) + .into_any() + } else { + div().into_any() + } + ) + .child( + if self.should_render_api_key_editor(cx) { + v_flex() + .gap_2() + .child( + Label::new("API Key (Optional)") + .size(LabelSize::Small) + .color(Color::Default) + ) + .child( + Label::new("Only required for remote Ollama instances or servers with authentication enabled") + .size(LabelSize::XSmall) + .color(Color::Muted) + ) + .child( + v_flex() + .on_action(cx.listener(Self::save_api_key)) + .child(self.api_key_editor.clone()) + .child( + Label::new( + format!("You can also assign the {OLLAMA_API_KEY_VAR} environment variable and restart Zed.") + ) + .size(LabelSize::XSmall) + .color(Color::Muted), + ) + ) + .into_any() + } else { + v_flex() + .gap_2() + .child( + Label::new("API Key") + .size(LabelSize::Small) + .color(Color::Default) + ) + .child( + h_flex() + .p_3() + .justify_between() + .rounded_md() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().elevated_surface_background) + .child( + h_flex() + .gap_2() + .child(Indicator::dot().color(Color::Success)) + .child( + Label::new( + if env_var_set { + format!("API key set in {OLLAMA_API_KEY_VAR} environment variable.") + } else { + "API key configured.".to_string() + } + ) + ) + ) + .child( + Button::new("reset-api-key", "Reset API Key") + .style(ButtonStyle::Subtle) + .icon(IconName::Undo) + .icon_size(IconSize::XSmall) + .on_click(cx.listener(|this, _, window, cx| { + this.reset_api_key(window, cx) + })) + ) + ) + .into_any() + } + ) + .child({ + let custom_api_url_set = AllLanguageModelSettings::get_global(cx).ollama.api_url != OLLAMA_API_URL; + + if custom_api_url_set { + v_flex() + .gap_2() + .child( + Label::new("API URL (Optional)") + .size(LabelSize::Small) + .color(Color::Default) + ) + .child( + Label::new("Only required for remote Ollama instances or custom ports") + .size(LabelSize::XSmall) + .color(Color::Muted) + ) + .child( + v_flex() + .gap_2() + .child( + h_flex() + .p_3() + .justify_between() + .rounded_md() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().elevated_surface_background) + .child( + h_flex() + .gap_2() + .child(Indicator::dot().color(Color::Success)) + .child( + v_flex() + .gap_1() + .child(Label::new("Custom server configured")) + .child( + Label::new(&AllLanguageModelSettings::get_global(cx).ollama.api_url) + .size(LabelSize::XSmall) + .color(Color::Muted) + ) + ) + ) + .child( + Button::new("reset-api-url", "Reset") + .style(ButtonStyle::Subtle) + .icon(IconName::Undo) + .icon_size(IconSize::XSmall) + .on_click(cx.listener(|this, _, window, cx| { + this.reset_api_url(window, cx) + })) + ) + ) + ) + .into_any() + } else { + v_flex() + .gap_2() + .child( + Label::new("API URL (Optional)") + .size(LabelSize::Small) + .color(Color::Default) + ) + .child( + Label::new("Only required for remote Ollama instances or custom ports") + .size(LabelSize::XSmall) + .color(Color::Muted) + ) + .child( + v_flex() + .on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| { + this.save_api_url(cx); + cx.notify(); + })) + .gap_2() + .child(self.api_url_editor.clone()) + ) + .into_any() + } + }) + .child( + v_flex() + .gap_2() + .child( + h_flex() + .w_full() + .justify_between() + .gap_2() + .child({ + let mut buttons = h_flex() + .w_full() + .gap_2(); + if is_authenticated { + buttons = buttons.child( + Button::new("ollama-site", "Ollama Homepage") + .style(ButtonStyle::Subtle) + .icon(IconName::ArrowUpRight) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .on_click(move |_, _, cx| cx.open_url(OLLAMA_SITE)) + .into_any_element(), + ); + } else { + buttons = buttons.child( + Button::new( + "download_ollama_button", + "Download Ollama", + ) + .style(ButtonStyle::Filled) + .icon(IconName::Download) + .icon_size(IconSize::XSmall) + .on_click(move |_, _, cx| { + cx.open_url(OLLAMA_DOWNLOAD_URL) + }) + .into_any_element(), + ); + } + buttons.child( + Button::new("view-models", "Browse Models") + .style(ButtonStyle::Subtle) + .icon(IconName::Library) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .on_click(move |_, _, cx| cx.open_url(OLLAMA_LIBRARY_URL)), + ) + }) + .child( + if is_authenticated { + h_flex() + .gap_2() + .child(Indicator::dot().color(Color::Success)) + .child(Label::new("Connected").size(LabelSize::Small)) + .into_any_element() + } else { + Button::new("retry_ollama_models", "Connect") + .style(ButtonStyle::Filled) + .icon_position(IconPosition::Start) + .icon_size(IconSize::XSmall) + .icon(IconName::Play) + .on_click(cx.listener(move |this, _, _, cx| { + this.retry_connection(cx) + })) + .into_any_element() + } + ) + ) + ) + .into_any() } } } From 69affc952d744071882157fa832852619224e810 Mon Sep 17 00:00:00 2001 From: Umesh Yadav Date: Wed, 9 Jul 2025 18:22:13 +0530 Subject: [PATCH 3/8] fix clippy --- crates/language_models/src/provider/ollama.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index 5d6b1e45e7..2889135fe2 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -730,7 +730,7 @@ impl ConfigurationView { } fn should_render_api_key_editor(&self, cx: &mut Context) -> bool { - !self.state.read(cx).api_key.is_some() + self.state.read(cx).api_key.is_none() } fn save_api_url(&mut self, cx: &mut Context) { From a95d8f284a51d7f5af81ad0700b7822abc698a41 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 25 Jul 2025 10:02:54 -0400 Subject: [PATCH 4/8] Merge fix --- Cargo.lock | 1 + crates/language_models/Cargo.toml | 1 + 2 files changed, 2 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 2c65131db0..de1f631045 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9120,6 +9120,7 @@ dependencies = [ "credentials_provider", "deepseek", "editor", + "fs", "futures 0.3.31", "google_ai", "gpui", diff --git a/crates/language_models/Cargo.toml b/crates/language_models/Cargo.toml index 574579aaa7..6c2bf6739a 100644 --- a/crates/language_models/Cargo.toml +++ b/crates/language_models/Cargo.toml @@ -30,6 +30,7 @@ convert_case.workspace = true copilot.workspace = true deepseek = { workspace = true, features = ["schemars"] } editor.workspace = true +fs.workspace = true futures.workspace = true google_ai = { workspace = true, features = ["schemars"] } gpui.workspace = true From 43e6c451d9a6645ac13a27bc7ae60d7a52ece0db Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 6 Aug 2025 11:10:27 -0400 Subject: [PATCH 5/8] docs, settings text --- crates/language_models/src/provider/ollama.rs | 163 +++++++----------- docs/src/ai/llm-providers.md | 13 ++ 2 files changed, 73 insertions(+), 103 deletions(-) diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index f4914ff91e..1f4eee8980 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -23,7 +23,7 @@ use settings::{Settings, SettingsStore, update_settings_file}; use std::pin::Pin; use std::sync::atomic::{AtomicU64, Ordering}; use std::{collections::HashMap, sync::Arc}; -use ui::{ButtonLike, Indicator, List, prelude::*}; +use ui::{ButtonLike, ElevationIndex, Indicator, List, Tooltip, prelude::*}; use ui_input::SingleLineInput; use util::ResultExt; @@ -817,19 +817,14 @@ impl Render for ConfigurationView { .into_any() } else { v_flex() - .gap_4() .child( - v_flex() - .gap_2() - .child( + if !is_authenticated { + v_flex().child( Label::new("Run powerful language models locally on your machine with Ollama. Get started with Llama 3.3, Mistral, Gemma 2, and hundreds of other models.") .size(LabelSize::Small) .color(Color::Muted) - ) - ) - .child( - if !is_authenticated { - v_flex() + ) + .v_flex() .gap_2() .child( Label::new("Getting Started") @@ -839,8 +834,12 @@ impl Render for ConfigurationView { .child( List::new() .child(InstructionListItem::text_only("1. Download and install Ollama from ollama.com")) - .child(InstructionListItem::text_only("2. Start Ollama and download a model: `ollama run llama3.2`")) + .child(InstructionListItem::text_only("2. Start Ollama and download a model: `ollama run gpt-oss:20b`")) .child(InstructionListItem::text_only("3. Click 'Connect' below to start using Ollama in Zed")) + ).child( + Label::new("API Keys and API URLs are optional, Zed will default to local ollama usage.") + .size(LabelSize::Small) + .color(Color::Muted) ) .into_any() } else { @@ -849,39 +848,18 @@ impl Render for ConfigurationView { ) .child( if self.should_render_api_key_editor(cx) { - v_flex() - .gap_2() - .child( - Label::new("API Key (Optional)") - .size(LabelSize::Small) - .color(Color::Default) - ) - .child( - Label::new("Only required for remote Ollama instances or servers with authentication enabled") - .size(LabelSize::XSmall) - .color(Color::Muted) - ) - .child( - v_flex() - .on_action(cx.listener(Self::save_api_key)) - .child(self.api_key_editor.clone()) - .child( - Label::new( - format!("You can also assign the {OLLAMA_API_KEY_VAR} environment variable and restart Zed.") - ) - .size(LabelSize::XSmall) - .color(Color::Muted), - ) - ) - .into_any() + v_flex() + .on_action(cx.listener(Self::save_api_key)) + .child(self.api_key_editor.clone()) + .child( + Label::new( + format!("You can also assign the {OLLAMA_API_KEY_VAR} environment variable and restart Zed.") + ) + .size(LabelSize::XSmall) + .color(Color::Muted), + ).into_any() } else { v_flex() - .gap_2() - .child( - Label::new("API Key") - .size(LabelSize::Small) - .color(Color::Default) - ) .child( h_flex() .p_3() @@ -906,12 +884,15 @@ impl Render for ConfigurationView { ) .child( Button::new("reset-api-key", "Reset API Key") - .style(ButtonStyle::Subtle) + .label_size(LabelSize::Small) .icon(IconName::Undo) - .icon_size(IconSize::XSmall) - .on_click(cx.listener(|this, _, window, cx| { - this.reset_api_key(window, cx) - })) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .layer(ElevationIndex::ModalSurface) + .when(env_var_set, |this| { + this.tooltip(Tooltip::text(format!("To reset your API key, unset the {OLLAMA_API_KEY_VAR} environment variable."))) + }) + .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))), ) ) .into_any() @@ -924,66 +905,42 @@ impl Render for ConfigurationView { v_flex() .gap_2() .child( - Label::new("API URL (Optional)") - .size(LabelSize::Small) - .color(Color::Default) - ) - .child( - Label::new("Only required for remote Ollama instances or custom ports") - .size(LabelSize::XSmall) - .color(Color::Muted) - ) - .child( - v_flex() - .gap_2() - .child( - h_flex() - .p_3() - .justify_between() - .rounded_md() - .border_1() - .border_color(cx.theme().colors().border) - .bg(cx.theme().colors().elevated_surface_background) - .child( - h_flex() - .gap_2() - .child(Indicator::dot().color(Color::Success)) - .child( - v_flex() - .gap_1() - .child(Label::new("Custom server configured")) - .child( - Label::new(&AllLanguageModelSettings::get_global(cx).ollama.api_url) - .size(LabelSize::XSmall) - .color(Color::Muted) - ) - ) - ) - .child( - Button::new("reset-api-url", "Reset") - .style(ButtonStyle::Subtle) - .icon(IconName::Undo) - .icon_size(IconSize::XSmall) - .on_click(cx.listener(|this, _, window, cx| { - this.reset_api_url(window, cx) - })) - ) - ) + h_flex() + .p_3() + .justify_between() + .rounded_md() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().elevated_surface_background) + .child( + h_flex() + .gap_2() + .child(Indicator::dot().color(Color::Success)) + .child( + v_flex() + .gap_1() + .child( + Label::new( + format!("API URL configured. {}", &AllLanguageModelSettings::get_global(cx).ollama.api_url) + ) + ) + ) + ) + .child( + Button::new("reset-api-url", "Reset API URL") + .label_size(LabelSize::Small) + .icon(IconName::Undo) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .layer(ElevationIndex::ModalSurface) + .on_click(cx.listener(|this, _, window, cx| { + this.reset_api_url(window, cx) + })) + ) ) .into_any() } else { v_flex() - .gap_2() - .child( - Label::new("API URL (Optional)") - .size(LabelSize::Small) - .color(Color::Default) - ) - .child( - Label::new("Only required for remote Ollama instances or custom ports") - .size(LabelSize::XSmall) - .color(Color::Muted) - ) .child( v_flex() .on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| { diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index 04646213e6..538b35aee0 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -393,6 +393,19 @@ If the model is tagged with `thinking` in the Ollama catalog, set this option an The `supports_images` option enables the model's vision capabilities, allowing it to process images included in the conversation context. If the model is tagged with `vision` in the Ollama catalog, set this option and you can use it in Zed. +#### Ollama Authentication + +In addition to running Ollama on your own hardware, which generally does not require authentication, Zed also supports connecting to Ollama API Keys are required for authentication. + +One such service is [Ollama Turbo])(https://ollama.com/turbo). To configure Zed to use Ollama turbo: +1. Sign in to your Ollama account and subscribe to Ollama Turbo +2. Visit [ollama.com/settings/keys](https://ollama.com/settings/keys) and create an API key +3. Open the settings view (`agent: open settings`) and go to the Ollama section +4. Paste your API key and press enter. +5. For the API URL enter `https://ollama.com` + +Zed will also use the `OLLAMA_API_KEY` environment variables if defined. + ### OpenAI {#openai} > ✅ Supports tool use From c3b53333c856511660761cdfe5f91a30577c3523 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 6 Aug 2025 11:41:37 -0400 Subject: [PATCH 6/8] Two space gang. --- docs/src/ai/llm-providers.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index ce2eb7eb20..f4e6819669 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -382,7 +382,8 @@ If the model is tagged with `vision` in the Ollama catalog, set this option and In addition to running Ollama on your own hardware, which generally does not require authentication, Zed also supports connecting to Ollama API Keys are required for authentication. -One such service is [Ollama Turbo])(https://ollama.com/turbo). To configure Zed to use Ollama turbo: +One such service is [Ollama Turbo])(https://ollama.com/turbo). To configure Zed to use Ollama turbo: + 1. Sign in to your Ollama account and subscribe to Ollama Turbo 2. Visit [ollama.com/settings/keys](https://ollama.com/settings/keys) and create an API key 3. Open the settings view (`agent: open settings`) and go to the Ollama section From f15d4eea6d3f8b2fa6b027f59b46312c90ac5428 Mon Sep 17 00:00:00 2001 From: Umesh Yadav Date: Thu, 21 Aug 2025 00:00:30 +0530 Subject: [PATCH 7/8] =?UTF-8?q?=F0=9F=93=8E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Umesh Yadav --- crates/language_models/src/provider/ollama.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index fc6c1aeb88..d500524e08 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -89,7 +89,7 @@ impl State { .clone(); cx.spawn(async move |this, cx| { credentials_provider - .delete_credentials(&api_url, &cx) + .delete_credentials(&api_url, cx) .await .log_err(); this.update(cx, |this, cx| { @@ -108,7 +108,7 @@ impl State { .clone(); cx.spawn(async move |this, cx| { credentials_provider - .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx) + .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx) .await .log_err(); this.update(cx, |this, cx| { @@ -192,7 +192,7 @@ impl State { let (api_key, from_env) = if let Ok(api_key) = std::env::var(OLLAMA_API_KEY_VAR) { (Some(api_key), true) } else { - match credentials_provider.read_credentials(&api_url, &cx).await { + match credentials_provider.read_credentials(&api_url, cx).await { Ok(Some((_, api_key))) => ( Some(String::from_utf8(api_key).context("invalid Ollama API key")?), false, @@ -770,10 +770,10 @@ impl ConfigurationView { let fs = ::global(cx); update_settings_file::(fs, cx, move |settings, _| { if let Some(settings) = settings.ollama.as_mut() { - settings.api_url = Some(api_url.clone()); + settings.api_url = Some(api_url); } else { settings.ollama = Some(crate::settings::OllamaSettingsContent { - api_url: Some(api_url.clone()), + api_url: Some(api_url), available_models: None, }); } From 9936f65cd3f47ec723ee94723a2a3dc51523ee23 Mon Sep 17 00:00:00 2001 From: Umesh Yadav Date: Tue, 26 Aug 2025 15:51:45 +0530 Subject: [PATCH 8/8] Move OLLAMA_API_KEY_VAR to ollama crate Co-authored-by: Oliver Azevedo Barnes --- crates/language_models/src/provider/ollama.rs | 6 +++--- crates/ollama/src/ollama.rs | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index d500524e08..b8b55e252e 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -14,8 +14,9 @@ use language_model::{ }; use menu; use ollama::{ - ChatMessage, ChatOptions, ChatRequest, ChatResponseDelta, KeepAlive, OLLAMA_API_URL, - OllamaFunctionTool, OllamaToolCall, get_models, show_model, stream_chat_completion, + ChatMessage, ChatOptions, ChatRequest, ChatResponseDelta, KeepAlive, OLLAMA_API_KEY_VAR, + OLLAMA_API_URL, OllamaFunctionTool, OllamaToolCall, get_models, show_model, + stream_chat_completion, }; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -33,7 +34,6 @@ use crate::ui::InstructionListItem; const OLLAMA_DOWNLOAD_URL: &str = "https://ollama.com/download"; const OLLAMA_LIBRARY_URL: &str = "https://ollama.com/library"; const OLLAMA_SITE: &str = "https://ollama.com/"; -const OLLAMA_API_KEY_VAR: &str = "OLLAMA_API_KEY"; const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("ollama"); const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Ollama"); diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index f45f3f6f6f..6d17c7c4d7 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -6,6 +6,7 @@ use serde_json::Value; use std::time::Duration; pub const OLLAMA_API_URL: &str = "http://localhost:11434"; +pub const OLLAMA_API_KEY_VAR: &str = "OLLAMA_API_KEY"; #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]