Rename OllamaService to State
To follow the convention used in language_models
This commit is contained in:
parent
8bb757ab67
commit
62ce840fc1
4 changed files with 63 additions and 67 deletions
|
@ -416,7 +416,7 @@ impl EditPredictionButton {
|
|||
cx.observe_global::<SettingsStore>(move |_, cx| cx.notify())
|
||||
.detach();
|
||||
|
||||
if let Some(service) = ollama::OllamaService::global(cx) {
|
||||
if let Some(service) = ollama::State::global(cx) {
|
||||
cx.observe(&service, |_, _, cx| cx.notify()).detach();
|
||||
}
|
||||
|
||||
|
@ -879,7 +879,7 @@ impl EditPredictionButton {
|
|||
let mut available_models = ollama_settings.available_models.clone();
|
||||
|
||||
// Add discovered models from the global Ollama service
|
||||
if let Some(service) = ollama::OllamaService::global(cx) {
|
||||
if let Some(service) = ollama::State::global(cx) {
|
||||
let discovered_models = service.read(cx).available_models();
|
||||
for model in discovered_models {
|
||||
// Convert from ollama::Model to language_models AvailableModel
|
||||
|
@ -1059,7 +1059,7 @@ impl EditPredictionButton {
|
|||
models.insert(0, selected_model);
|
||||
} else {
|
||||
// Model not in settings - check if it's a discovered model and add it
|
||||
if let Some(service) = ollama::OllamaService::global(cx) {
|
||||
if let Some(service) = ollama::State::global(cx) {
|
||||
let discovered_models = service.read(cx).available_models();
|
||||
if let Some(discovered_model) =
|
||||
discovered_models.iter().find(|m| m.name == model_name)
|
||||
|
@ -1084,7 +1084,7 @@ impl EditPredictionButton {
|
|||
}
|
||||
|
||||
fn refresh_ollama_models(cx: &mut App) {
|
||||
if let Some(service) = ollama::OllamaService::global(cx) {
|
||||
if let Some(service) = ollama::State::global(cx) {
|
||||
service.update(cx, |service, cx| {
|
||||
service.refresh_models(cx);
|
||||
});
|
||||
|
@ -1282,7 +1282,7 @@ mod tests {
|
|||
use clock::FakeSystemClock;
|
||||
use gpui::TestAppContext;
|
||||
use http_client;
|
||||
use ollama::{OllamaService, fake::FakeHttpClient};
|
||||
use ollama::{State, fake::FakeHttpClient};
|
||||
use settings::SettingsStore;
|
||||
use std::sync::Arc;
|
||||
|
||||
|
@ -1358,7 +1358,7 @@ mod tests {
|
|||
|
||||
// Create and set global Ollama service
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -1367,7 +1367,7 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
// Wait for model discovery
|
||||
|
@ -1375,7 +1375,7 @@ mod tests {
|
|||
|
||||
// Verify models are accessible through the service
|
||||
cx.update(|cx| {
|
||||
if let Some(service) = OllamaService::global(cx) {
|
||||
if let Some(service) = State::global(cx) {
|
||||
let discovered_models = service.read(cx).available_models();
|
||||
assert_eq!(discovered_models.len(), 2);
|
||||
|
||||
|
@ -1432,7 +1432,7 @@ mod tests {
|
|||
|
||||
// Create and set global service
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client,
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -1441,14 +1441,14 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
// Test that discovered models are accessible
|
||||
cx.update(|cx| {
|
||||
if let Some(service) = OllamaService::global(cx) {
|
||||
if let Some(service) = State::global(cx) {
|
||||
let discovered_models = service.read(cx).available_models();
|
||||
assert_eq!(discovered_models.len(), 1);
|
||||
assert_eq!(discovered_models[0].name, "qwen2.5-coder:7b");
|
||||
|
@ -1468,7 +1468,7 @@ mod tests {
|
|||
fake_http_client.set_response("/api/tags", serde_json::json!({"models": []}).to_string());
|
||||
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -1477,7 +1477,7 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
@ -1574,7 +1574,7 @@ mod tests {
|
|||
);
|
||||
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -1583,7 +1583,7 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
@ -1680,7 +1680,7 @@ mod tests {
|
|||
|
||||
// Create and set global service
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -1689,14 +1689,14 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
// Verify model is discovered by the service
|
||||
let discovered_model_exists = cx.update(|cx| {
|
||||
if let Some(service) = OllamaService::global(cx) {
|
||||
if let Some(service) = State::global(cx) {
|
||||
let discovered_models = service.read(cx).available_models();
|
||||
discovered_models
|
||||
.iter()
|
||||
|
@ -1727,7 +1727,7 @@ mod tests {
|
|||
let mut available_models = ollama_settings.available_models.clone();
|
||||
|
||||
// Add discovered models from the global Ollama service
|
||||
if let Some(service) = ollama::OllamaService::global(cx) {
|
||||
if let Some(service) = ollama::State::global(cx) {
|
||||
let discovered_models = service.read(cx).available_models();
|
||||
for model in discovered_models {
|
||||
// Convert from ollama::Model to language_models AvailableModel
|
||||
|
@ -1793,7 +1793,7 @@ mod tests {
|
|||
|
||||
// Create and set global service
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -1802,7 +1802,7 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
@ -1817,7 +1817,7 @@ mod tests {
|
|||
let mut available_models = ollama_settings.available_models.clone();
|
||||
|
||||
// Add discovered models from the global Ollama service
|
||||
if let Some(service) = ollama::OllamaService::global(cx) {
|
||||
if let Some(service) = ollama::State::global(cx) {
|
||||
let discovered_models = service.read(cx).available_models();
|
||||
for model in discovered_models {
|
||||
// Convert from ollama::Model to language_models AvailableModel
|
||||
|
@ -1844,7 +1844,7 @@ mod tests {
|
|||
|
||||
// Verify that the switch_ollama_model function can find the discovered model
|
||||
// by checking it exists in the service
|
||||
if let Some(service) = ollama::OllamaService::global(cx) {
|
||||
if let Some(service) = ollama::State::global(cx) {
|
||||
let discovered_models = service.read(cx).available_models();
|
||||
let found_model = discovered_models
|
||||
.iter()
|
||||
|
@ -1891,7 +1891,7 @@ mod tests {
|
|||
|
||||
// Create and set global service
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -1900,14 +1900,14 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
// Verify model is discovered by service
|
||||
let discovered = cx.update(|cx| {
|
||||
if let Some(service) = ollama::OllamaService::global(cx) {
|
||||
if let Some(service) = ollama::State::global(cx) {
|
||||
let models = service.read(cx).available_models();
|
||||
models.iter().any(|m| m.name == "test-model:latest")
|
||||
} else {
|
||||
|
@ -1925,7 +1925,7 @@ mod tests {
|
|||
// We test this by verifying the function doesn't panic and can access the service
|
||||
cx.update(|cx| {
|
||||
// Verify the service is accessible within the function context
|
||||
if let Some(service) = ollama::OllamaService::global(cx) {
|
||||
if let Some(service) = ollama::State::global(cx) {
|
||||
let discovered_models = service.read(cx).available_models();
|
||||
let target_model = discovered_models
|
||||
.iter()
|
||||
|
|
|
@ -57,10 +57,6 @@ pub struct AvailableModel {
|
|||
pub supports_thinking: Option<bool>,
|
||||
}
|
||||
|
||||
// TODO
|
||||
// - Add API key authentication support. OllamaCompletionProvider already supports it
|
||||
// - Decide whether / how to integrate the new OllamaService into here, there seems to be
|
||||
// some overlap with State here.
|
||||
pub struct OllamaLanguageModelProvider {
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
state: gpui::Entity<State>,
|
||||
|
|
|
@ -38,7 +38,7 @@ impl SettingsModel {
|
|||
}
|
||||
|
||||
// Global Ollama service for managing models across all providers
|
||||
pub struct OllamaService {
|
||||
pub struct State {
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
api_url: String,
|
||||
api_key: Option<String>,
|
||||
|
@ -47,7 +47,7 @@ pub struct OllamaService {
|
|||
_settings_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl OllamaService {
|
||||
impl State {
|
||||
pub fn new(
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
api_url: String,
|
||||
|
@ -56,7 +56,7 @@ impl OllamaService {
|
|||
) -> Entity<Self> {
|
||||
cx.new(|cx| {
|
||||
let subscription = cx.observe_global::<SettingsStore>({
|
||||
move |this: &mut OllamaService, cx| {
|
||||
move |this: &mut State, cx| {
|
||||
this.restart_fetch_models_task(cx);
|
||||
}
|
||||
});
|
||||
|
@ -77,12 +77,12 @@ impl OllamaService {
|
|||
}
|
||||
|
||||
pub fn global(cx: &App) -> Option<Entity<Self>> {
|
||||
cx.try_global::<GlobalOllamaService>()
|
||||
cx.try_global::<GlobalOllamaState>()
|
||||
.map(|service| service.0.clone())
|
||||
}
|
||||
|
||||
pub fn set_global(service: Entity<Self>, cx: &mut App) {
|
||||
cx.set_global(GlobalOllamaService(service));
|
||||
cx.set_global(GlobalOllamaState(service));
|
||||
}
|
||||
|
||||
pub fn available_models(&self) -> &[Model] {
|
||||
|
@ -194,9 +194,9 @@ impl OllamaService {
|
|||
}
|
||||
}
|
||||
|
||||
struct GlobalOllamaService(Entity<OllamaService>);
|
||||
struct GlobalOllamaState(Entity<State>);
|
||||
|
||||
impl Global for GlobalOllamaService {}
|
||||
impl Global for GlobalOllamaState {}
|
||||
|
||||
// TODO refactor to OllamaEditPredictionProvider
|
||||
pub struct OllamaCompletionProvider {
|
||||
|
@ -212,13 +212,13 @@ pub struct OllamaCompletionProvider {
|
|||
impl OllamaCompletionProvider {
|
||||
pub fn new(model: String, api_key: Option<String>, cx: &mut Context<Self>) -> Self {
|
||||
// Update the global service with the API key if one is provided
|
||||
if let Some(service) = OllamaService::global(cx) {
|
||||
if let Some(service) = State::global(cx) {
|
||||
service.update(cx, |service, cx| {
|
||||
service.set_api_key(api_key.clone(), cx);
|
||||
});
|
||||
}
|
||||
|
||||
let subscription = if let Some(service) = OllamaService::global(cx) {
|
||||
let subscription = if let Some(service) = State::global(cx) {
|
||||
Some(cx.observe(&service, |_this, _service, cx| {
|
||||
cx.notify();
|
||||
}))
|
||||
|
@ -238,7 +238,7 @@ impl OllamaCompletionProvider {
|
|||
}
|
||||
|
||||
pub fn available_models(&self, cx: &App) -> Vec<Model> {
|
||||
if let Some(service) = OllamaService::global(cx) {
|
||||
if let Some(service) = State::global(cx) {
|
||||
service.read(cx).available_models().to_vec()
|
||||
} else {
|
||||
Vec::new()
|
||||
|
@ -246,7 +246,7 @@ impl OllamaCompletionProvider {
|
|||
}
|
||||
|
||||
pub fn refresh_models(&self, cx: &mut App) {
|
||||
if let Some(service) = OllamaService::global(cx) {
|
||||
if let Some(service) = State::global(cx) {
|
||||
service.update(cx, |service, cx| {
|
||||
service.refresh_models(cx);
|
||||
});
|
||||
|
@ -323,7 +323,7 @@ impl EditPredictionProvider for OllamaCompletionProvider {
|
|||
cx: &mut Context<Self>,
|
||||
) {
|
||||
// Get API settings from the global Ollama service or fallback
|
||||
let (http_client, api_url) = if let Some(service) = OllamaService::global(cx) {
|
||||
let (http_client, api_url) = if let Some(service) = State::global(cx) {
|
||||
let service_ref = service.read(cx);
|
||||
(service_ref.http_client.clone(), service_ref.api_url.clone())
|
||||
} else {
|
||||
|
@ -588,7 +588,7 @@ mod tests {
|
|||
|
||||
// Create global Ollama service for testing
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -598,7 +598,7 @@ mod tests {
|
|||
|
||||
// Set it as global
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
// Create completion provider
|
||||
|
@ -631,7 +631,7 @@ mod tests {
|
|||
|
||||
// Create global Ollama service that will fail
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -640,7 +640,7 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
// Create completion provider
|
||||
|
@ -670,7 +670,7 @@ mod tests {
|
|||
|
||||
// Create global Ollama service
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -679,7 +679,7 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
let provider = cx.update(|cx| {
|
||||
|
@ -751,7 +751,7 @@ mod tests {
|
|||
|
||||
// Create global Ollama service
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -760,7 +760,7 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
// Create provider
|
||||
|
@ -824,7 +824,7 @@ mod tests {
|
|||
|
||||
// Create global Ollama service
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -833,7 +833,7 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
// Create provider
|
||||
|
@ -876,7 +876,7 @@ mod tests {
|
|||
|
||||
// Create global Ollama service
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -885,7 +885,7 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
// Create provider
|
||||
|
@ -957,7 +957,7 @@ mod tests {
|
|||
|
||||
// Create global Ollama service
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -966,7 +966,7 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
// Create provider
|
||||
|
@ -1071,7 +1071,7 @@ mod tests {
|
|||
|
||||
// Create service
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -1187,7 +1187,7 @@ mod tests {
|
|||
);
|
||||
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
Some("test-api-key".to_string()),
|
||||
|
@ -1196,7 +1196,7 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
// Wait for model fetching to complete
|
||||
|
@ -1227,7 +1227,7 @@ mod tests {
|
|||
);
|
||||
|
||||
let service = cx.update(|cx| {
|
||||
OllamaService::new(
|
||||
State::new(
|
||||
fake_http_client.clone(),
|
||||
"http://localhost:11434".to_string(),
|
||||
None,
|
||||
|
@ -1236,7 +1236,7 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
OllamaService::set_global(service.clone(), cx);
|
||||
State::set_global(service.clone(), cx);
|
||||
});
|
||||
|
||||
// Clear initial requests
|
||||
|
|
|
@ -6,7 +6,7 @@ use gpui::{AnyWindowHandle, App, AppContext as _, Context, Entity, WeakEntity};
|
|||
|
||||
use language::language_settings::{EditPredictionProvider, all_language_settings};
|
||||
use language_models::AllLanguageModelSettings;
|
||||
use ollama::{OllamaCompletionProvider, OllamaService, SettingsModel};
|
||||
use ollama::{OllamaCompletionProvider, SettingsModel, State};
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use std::{cell::RefCell, rc::Rc, sync::Arc};
|
||||
use supermaven::{Supermaven, SupermavenCompletionProvider};
|
||||
|
@ -34,13 +34,13 @@ pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
|
|||
(api_url, settings_models)
|
||||
};
|
||||
|
||||
let ollama_service = OllamaService::new(client.http_client(), api_url, None, cx);
|
||||
let ollama_service = State::new(client.http_client(), api_url, None, cx);
|
||||
|
||||
ollama_service.update(cx, |service, cx| {
|
||||
service.set_settings_models(settings_models, cx);
|
||||
});
|
||||
|
||||
OllamaService::set_global(ollama_service, cx);
|
||||
State::set_global(ollama_service, cx);
|
||||
|
||||
let editors: Rc<RefCell<HashMap<WeakEntity<Editor>, AnyWindowHandle>>> = Rc::default();
|
||||
cx.observe_new({
|
||||
|
@ -122,7 +122,7 @@ pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
|
|||
} else if provider == EditPredictionProvider::Ollama {
|
||||
// Update global Ollama service when settings change
|
||||
let settings = &AllLanguageModelSettings::get_global(cx).ollama;
|
||||
if let Some(service) = OllamaService::global(cx) {
|
||||
if let Some(service) = State::global(cx) {
|
||||
let settings_models: Vec<SettingsModel> = settings
|
||||
.available_models
|
||||
.iter()
|
||||
|
@ -287,7 +287,7 @@ fn assign_edit_prediction_provider(
|
|||
// Get model from settings or use discovered models
|
||||
let model = if let Some(first_model) = settings.available_models.first() {
|
||||
Some(first_model.name.clone())
|
||||
} else if let Some(service) = OllamaService::global(cx) {
|
||||
} else if let Some(service) = State::global(cx) {
|
||||
// Use first discovered model
|
||||
service
|
||||
.read(cx)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue