First stab at the prediction menu

This commit is contained in:
Oliver Azevedo Barnes 2025-06-30 23:09:42 -03:00
parent 7f8dc940f7
commit ae6ff310c8
No known key found for this signature in database
2 changed files with 122 additions and 12 deletions

View file

@ -20,9 +20,12 @@ editor.workspace = true
feature_flags.workspace = true feature_flags.workspace = true
fs.workspace = true fs.workspace = true
gpui.workspace = true gpui.workspace = true
http_client.workspace = true
indoc.workspace = true indoc.workspace = true
inline_completion.workspace = true inline_completion.workspace = true
language.workspace = true language.workspace = true
language_models.workspace = true
ollama.workspace = true
paths.workspace = true paths.workspace = true
regex.workspace = true regex.workspace = true
settings.workspace = true settings.workspace = true
@ -36,10 +39,14 @@ zed_llm_client.workspace = true
zeta.workspace = true zeta.workspace = true
[dev-dependencies] [dev-dependencies]
clock.workspace = true
client = { workspace = true, features = ["test-support"] }
copilot = { workspace = true, features = ["test-support"] } copilot = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] }
futures.workspace = true futures.workspace = true
http_client = { workspace = true, features = ["test-support"] }
indoc.workspace = true indoc.workspace = true
language_model = { workspace = true, features = ["test-support"] }
lsp = { workspace = true, features = ["test-support"] } lsp = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] }
serde_json.workspace = true serde_json.workspace = true

View file

@ -13,11 +13,13 @@ use gpui::{
Focusable, IntoElement, ParentElement, Render, Subscription, WeakEntity, actions, div, Focusable, IntoElement, ParentElement, Render, Subscription, WeakEntity, actions, div,
pulsating_between, pulsating_between,
}; };
use indoc::indoc; use indoc::indoc;
use language::{ use language::{
EditPredictionsMode, File, Language, EditPredictionsMode, File, Language,
language_settings::{self, AllLanguageSettings, EditPredictionProvider, all_language_settings}, language_settings::{self, AllLanguageSettings, EditPredictionProvider, all_language_settings},
}; };
use language_models::AllLanguageModelSettings;
use regex::Regex; use regex::Regex;
use settings::{Settings, SettingsStore, update_settings_file}; use settings::{Settings, SettingsStore, update_settings_file};
use std::{ use std::{
@ -845,18 +847,54 @@ impl InlineCompletionButton {
window: &mut Window, window: &mut Window,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Entity<ContextMenu> { ) -> Entity<ContextMenu> {
let fs = self.fs.clone(); ContextMenu::build(window, cx, |menu, window, cx| {
ContextMenu::build(window, cx, |menu, _window, _cx| { let settings = AllLanguageModelSettings::get_global(cx);
menu.entry("Toggle Ollama Completions", None, { let ollama_settings = &settings.ollama;
let fs = fs.clone();
move |_window, cx| { // Check if we have available models (indicates connection)
toggle_inline_completions_globally(fs.clone(), cx); let is_connected = !ollama_settings.available_models.is_empty();
} let connection_status = if is_connected {
}) "Connected"
.entry("Ollama Settings...", None, |_window, cx| { } else {
// TODO: Open Ollama-specific settings "Disconnected"
cx.open_url("http://localhost:11434"); };
}) let api_url = ollama_settings.api_url.clone();
let menu =
menu.header("Ollama Status")
.entry(connection_status, None, |_window, _cx| {
// Status display only
});
let menu = if !ollama_settings.available_models.is_empty() {
let current_model = ollama_settings
.available_models
.first()
.map(|m| m.display_name.as_ref().unwrap_or(&m.name).clone())
.unwrap_or_else(|| "No model selected".to_string());
menu.separator().header("Current Model").entry(
current_model,
None,
|_window, _cx| {
// TODO: Open model selection dialog
},
)
} else {
menu
};
// Use the common language settings menu
let menu = self.build_language_settings_menu(menu, window, cx);
// Separator and Ollama-specific actions
menu.separator()
.entry("Open Ollama Web UI", None, move |_window, cx| {
cx.open_url(&api_url);
})
.entry("Download More Models", None, |_window, cx| {
cx.open_url("https://ollama.com/library");
})
}) })
} }
@ -1054,3 +1092,68 @@ fn toggle_edit_prediction_mode(fs: Arc<dyn Fs>, mode: EditPredictionsMode, cx: &
}); });
} }
} }
#[cfg(test)]
mod tests {
use super::*;
use client::{Client, UserStore};
use clock::FakeSystemClock;
use fs::{FakeFs, Fs};
use gpui::TestAppContext;
use http_client::FakeHttpClient;
use language_model;
use language_models::AllLanguageModelSettings;
use settings::SettingsStore;
use std::sync::Arc;
#[gpui::test]
async fn test_ollama_context_menu_functionality(cx: &mut TestAppContext) {
let fs: Arc<dyn Fs> = FakeFs::new(cx.executor());
cx.update(|cx| {
let store = SettingsStore::test(cx);
cx.set_global(store);
AllLanguageModelSettings::register(cx);
language_model::LanguageModelRegistry::test(cx);
let clock = Arc::new(FakeSystemClock::new());
let http = FakeHttpClient::with_404_response();
let client = Client::new(clock, http.clone(), cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
let popover_menu_handle = PopoverMenuHandle::default();
let button = cx.new(|cx| {
InlineCompletionButton::new(fs.clone(), user_store, popover_menu_handle, cx)
});
// Verify that the button was created successfully
assert!(button.entity_id().as_u64() > 0);
// Test that accessing Ollama settings doesn't panic
let settings = AllLanguageModelSettings::get_global(cx);
let _ollama_settings = &settings.ollama;
// Verify the button has access to build_language_settings_menu method
// This indirectly tests that Ollama menu can use the common functionality
button.read(cx);
});
}
#[gpui::test]
async fn test_ollama_settings_access(cx: &mut TestAppContext) {
cx.update(|cx| {
let store = SettingsStore::test(cx);
cx.set_global(store);
AllLanguageModelSettings::register(cx);
language_model::LanguageModelRegistry::test(cx);
// Test that Ollama settings can be accessed
let settings = AllLanguageModelSettings::get_global(cx);
let ollama_settings = &settings.ollama;
// Verify default settings structure
assert!(ollama_settings.api_url.contains("localhost"));
assert!(ollama_settings.available_models.is_empty());
});
}
}