Remove current inline completion button tests, they aren't very useful
This commit is contained in:
parent
2e45d30741
commit
eaae0df019
1 changed files with 0 additions and 412 deletions
|
@ -1196,415 +1196,3 @@ fn toggle_edit_prediction_mode(fs: Arc<dyn Fs>, mode: EditPredictionsMode, cx: &
|
|||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use client::{Client, UserStore};
|
||||
use clock::FakeSystemClock;
|
||||
use fs::{FakeFs, Fs};
|
||||
use gpui::TestAppContext;
|
||||
use http_client::FakeHttpClient;
|
||||
use language_model;
|
||||
use language_models::AllLanguageModelSettings;
|
||||
use settings::SettingsStore;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ollama_context_menu_functionality(cx: &mut TestAppContext) {
|
||||
let fs: Arc<dyn Fs> = FakeFs::new(cx.executor());
|
||||
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
AllLanguageModelSettings::register(cx);
|
||||
language_model::LanguageModelRegistry::test(cx);
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
let client = Client::new(clock, http.clone(), cx);
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
let popover_menu_handle = PopoverMenuHandle::default();
|
||||
|
||||
let button = cx.new(|cx| {
|
||||
InlineCompletionButton::new(fs.clone(), user_store, popover_menu_handle, cx)
|
||||
});
|
||||
|
||||
// Verify that the button was created successfully
|
||||
assert!(button.entity_id().as_u64() > 0);
|
||||
|
||||
// Test that accessing Ollama settings doesn't panic
|
||||
let settings = AllLanguageModelSettings::get_global(cx);
|
||||
let ollama_settings = &settings.ollama;
|
||||
|
||||
// Verify connection status is properly determined
|
||||
let is_connected = !ollama_settings.available_models.is_empty();
|
||||
assert!(!is_connected); // Should be disconnected in test environment
|
||||
|
||||
// Verify API URL is accessible
|
||||
assert!(ollama_settings.api_url.contains("localhost"));
|
||||
|
||||
// Verify the button has access to build_language_settings_menu method
|
||||
// This indirectly tests that Ollama menu can use the common functionality
|
||||
button.read(cx);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ollama_settings_access(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
AllLanguageModelSettings::register(cx);
|
||||
language_model::LanguageModelRegistry::test(cx);
|
||||
|
||||
// Test that Ollama settings can be accessed
|
||||
let settings = AllLanguageModelSettings::get_global(cx);
|
||||
let ollama_settings = &settings.ollama;
|
||||
|
||||
// Verify default settings structure
|
||||
assert!(ollama_settings.api_url.contains("localhost"));
|
||||
assert!(ollama_settings.available_models.is_empty());
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ollama_menu_structure(cx: &mut TestAppContext) {
|
||||
let fs: Arc<dyn Fs> = FakeFs::new(cx.executor());
|
||||
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
AllLanguageModelSettings::register(cx);
|
||||
language_model::LanguageModelRegistry::test(cx);
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
let client = Client::new(clock, http.clone(), cx);
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
let popover_menu_handle = PopoverMenuHandle::default();
|
||||
|
||||
let button = cx.new(|cx| {
|
||||
InlineCompletionButton::new(fs.clone(), user_store, popover_menu_handle, cx)
|
||||
});
|
||||
|
||||
// Test that the menu includes expected sections
|
||||
let settings = AllLanguageModelSettings::get_global(cx);
|
||||
let ollama_settings = &settings.ollama;
|
||||
|
||||
// Verify default connection status (should be disconnected in test)
|
||||
let is_connected = !ollama_settings.available_models.is_empty();
|
||||
assert!(!is_connected); // No models available in test environment
|
||||
|
||||
// Verify API URL is accessible and has default value
|
||||
assert!(ollama_settings.api_url.contains("localhost"));
|
||||
|
||||
// Verify button can be created and read without panicking
|
||||
button.read(cx);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ollama_default_settings(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
AllLanguageModelSettings::register(cx);
|
||||
language_model::LanguageModelRegistry::test(cx);
|
||||
|
||||
// Test default Ollama settings structure
|
||||
let settings = AllLanguageModelSettings::get_global(cx);
|
||||
let ollama_settings = &settings.ollama;
|
||||
|
||||
// Verify default configuration
|
||||
assert!(ollama_settings.api_url.contains("localhost"));
|
||||
assert!(ollama_settings.available_models.is_empty());
|
||||
|
||||
// Test that menu creation would work with these defaults
|
||||
// (We don't actually create the menu to avoid UI complexity in tests)
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ollama_api_url_navigation_regex(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
AllLanguageModelSettings::register(cx);
|
||||
language_model::LanguageModelRegistry::test(cx);
|
||||
|
||||
// Test the regex patterns used for API URL navigation
|
||||
let test_settings_content = r#"{
|
||||
"language_models": {
|
||||
"ollama": {
|
||||
"api_url": "http://localhost:11434",
|
||||
"available_models": []
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
|
||||
// Test the precise regex pattern
|
||||
let api_url_pattern =
|
||||
r#""language_models"\s*:\s*\{[\s\S]*?"ollama"\s*:\s*\{[\s\S]*?"api_url"\s*:\s*"([^"]*)"#;
|
||||
let regex = regex::Regex::new(api_url_pattern).unwrap();
|
||||
|
||||
if let Some(captures) = regex.captures(test_settings_content) {
|
||||
let value_capture = captures.get(1).unwrap();
|
||||
assert_eq!(value_capture.as_str(), "http://localhost:11434");
|
||||
|
||||
// Verify the capture positions are correct
|
||||
assert!(value_capture.start() > 0);
|
||||
assert!(value_capture.end() > value_capture.start());
|
||||
} else {
|
||||
panic!("Regex should match the test content");
|
||||
}
|
||||
|
||||
// Test with settings that include other providers to ensure we don't match them
|
||||
let test_settings_with_openai = r#"{
|
||||
"language_models": {
|
||||
"openai": {
|
||||
"api_url": "https://api.openai.com/v1",
|
||||
"available_models": []
|
||||
},
|
||||
"ollama": {
|
||||
"api_url": "http://localhost:11434",
|
||||
"available_models": []
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
|
||||
// Ensure our regex only matches Ollama's API URL, not OpenAI's
|
||||
if let Some(captures) = regex.captures(test_settings_with_openai) {
|
||||
let value_capture = captures.get(1).unwrap();
|
||||
assert_eq!(value_capture.as_str(), "http://localhost:11434");
|
||||
// Verify it's not matching OpenAI's URL
|
||||
assert_ne!(value_capture.as_str(), "https://api.openai.com/v1");
|
||||
} else {
|
||||
panic!("Regex should match Ollama's API URL even when other providers are present");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ollama_settings_navigation_with_other_providers(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
AllLanguageModelSettings::register(cx);
|
||||
language_model::LanguageModelRegistry::test(cx);
|
||||
|
||||
// Test scenario: User has OpenAI configured but no Ollama settings
|
||||
// The regex should not match OpenAI's api_url
|
||||
let settings_with_openai_only = r#"{
|
||||
"language_models": {
|
||||
"openai": {
|
||||
"api_url": "https://api.openai.com/v1",
|
||||
"available_models": []
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
|
||||
let api_url_pattern = r#""language_models"\s*:\s*\{[\s\S]*?"ollama"\s*:\s*\{[\s\S]*?"api_url"\s*:\s*"([^"]*)"#;
|
||||
let regex = regex::Regex::new(api_url_pattern).unwrap();
|
||||
|
||||
// Should not match OpenAI's API URL
|
||||
assert!(regex.captures(settings_with_openai_only).is_none());
|
||||
|
||||
// Test when both providers exist
|
||||
let settings_with_both = r#"{
|
||||
"language_models": {
|
||||
"openai": {
|
||||
"api_url": "https://api.openai.com/v1",
|
||||
"available_models": []
|
||||
},
|
||||
"ollama": {
|
||||
"api_url": "http://localhost:11434",
|
||||
"available_models": []
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
|
||||
// Should match only Ollama's API URL
|
||||
if let Some(captures) = regex.captures(settings_with_both) {
|
||||
let value_capture = captures.get(1).unwrap();
|
||||
assert_eq!(value_capture.as_str(), "http://localhost:11434");
|
||||
} else {
|
||||
panic!("Should match Ollama's API URL when it exists");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ollama_configure_api_url_menu_visibility(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
AllLanguageModelSettings::register(cx);
|
||||
language_model::LanguageModelRegistry::test(cx);
|
||||
|
||||
// Test that ollama_settings_exist returns false when no settings file exists
|
||||
// or when ollama section doesn't exist
|
||||
assert!(!InlineCompletionButton::ollama_settings_exist_in_content(
|
||||
""
|
||||
));
|
||||
|
||||
// Test with a settings file that has no ollama section
|
||||
let settings_without_ollama = r#"{
|
||||
"language_models": {
|
||||
"openai": {
|
||||
"api_url": "https://api.openai.com/v1"
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
|
||||
// Test that the function correctly identifies when ollama section is missing
|
||||
assert!(!InlineCompletionButton::ollama_settings_exist_in_content(
|
||||
settings_without_ollama
|
||||
));
|
||||
|
||||
// Test with a settings file that has ollama section
|
||||
let settings_with_ollama = r#"{
|
||||
"language_models": {
|
||||
"openai": {
|
||||
"api_url": "https://api.openai.com/v1"
|
||||
},
|
||||
"ollama": {
|
||||
"api_url": "http://localhost:11434"
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
|
||||
assert!(InlineCompletionButton::ollama_settings_exist_in_content(
|
||||
settings_with_ollama
|
||||
));
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ollama_no_models_configured(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
AllLanguageModelSettings::register(cx);
|
||||
language_model::LanguageModelRegistry::test(cx);
|
||||
|
||||
// Test menu behavior when no models are configured
|
||||
let settings = AllLanguageModelSettings::get_global(cx);
|
||||
let ollama_settings = &settings.ollama;
|
||||
|
||||
// Verify that available_models is empty by default
|
||||
assert!(ollama_settings.available_models.is_empty());
|
||||
|
||||
// This simulates the condition that would trigger the "Configure Models" menu
|
||||
let should_show_configure = ollama_settings.available_models.is_empty();
|
||||
assert!(should_show_configure);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ollama_eager_subtle_options_visibility(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
AllLanguageModelSettings::register(cx);
|
||||
AllLanguageSettings::register(cx);
|
||||
language_model::LanguageModelRegistry::test(cx);
|
||||
|
||||
// Test that eager/subtle options are available for Ollama provider
|
||||
|
||||
// Verify that when provider is Ollama, the eager/subtle logic should be triggered
|
||||
// This tests the condition: matches!(provider, EditPredictionProvider::Zed | EditPredictionProvider::Ollama)
|
||||
assert!(matches!(
|
||||
EditPredictionProvider::Ollama,
|
||||
EditPredictionProvider::Zed | EditPredictionProvider::Ollama
|
||||
));
|
||||
|
||||
// Verify that when provider is NOT Zed or Ollama, the eager/subtle logic should NOT be triggered
|
||||
assert!(!matches!(
|
||||
EditPredictionProvider::Copilot,
|
||||
EditPredictionProvider::Zed | EditPredictionProvider::Ollama
|
||||
));
|
||||
|
||||
assert!(!matches!(
|
||||
EditPredictionProvider::Supermaven,
|
||||
EditPredictionProvider::Zed | EditPredictionProvider::Ollama
|
||||
));
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ollama_edit_predictions_mode_setting(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
AllLanguageModelSettings::register(cx);
|
||||
AllLanguageSettings::register(cx);
|
||||
language_model::LanguageModelRegistry::test(cx);
|
||||
|
||||
// Test that edit predictions mode setting is read correctly
|
||||
let settings = AllLanguageSettings::get_global(cx);
|
||||
|
||||
// Default mode should be Eager
|
||||
assert_eq!(settings.edit_predictions_mode(), EditPredictionsMode::Eager);
|
||||
|
||||
// Test that the setting affects the preview_requires_modifier flag
|
||||
let preview_requires_modifier_eager =
|
||||
settings.edit_predictions_mode() == EditPredictionsMode::Subtle;
|
||||
assert!(!preview_requires_modifier_eager);
|
||||
|
||||
// Simulate changing to subtle mode by checking the condition
|
||||
let subtle_mode_check = EditPredictionsMode::Subtle == EditPredictionsMode::Subtle;
|
||||
assert!(subtle_mode_check);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ollama_model_switching_logic(cx: &mut TestAppContext) {
|
||||
let _fs: Arc<dyn Fs> = FakeFs::new(cx.executor());
|
||||
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
AllLanguageModelSettings::register(cx);
|
||||
language_model::LanguageModelRegistry::test(cx);
|
||||
|
||||
// Test the model switching function logic
|
||||
// This tests the internal logic without actually modifying settings
|
||||
let test_models = [
|
||||
language_models::provider::ollama::AvailableModel {
|
||||
name: "llama3.2:3b".to_string(),
|
||||
display_name: Some("Llama 3.2 3B".to_string()),
|
||||
max_tokens: 4096,
|
||||
keep_alive: None,
|
||||
supports_tools: Some(false),
|
||||
supports_images: Some(false),
|
||||
supports_thinking: Some(false),
|
||||
},
|
||||
language_models::provider::ollama::AvailableModel {
|
||||
name: "codellama:7b".to_string(),
|
||||
display_name: Some("CodeLlama 7B".to_string()),
|
||||
max_tokens: 8192,
|
||||
keep_alive: None,
|
||||
supports_tools: Some(true),
|
||||
supports_images: Some(false),
|
||||
supports_thinking: Some(false),
|
||||
},
|
||||
];
|
||||
|
||||
// Verify we can access the model data
|
||||
assert_eq!(test_models.len(), 2);
|
||||
assert_eq!(test_models[0].name, "llama3.2:3b");
|
||||
assert_eq!(test_models[1].name, "codellama:7b");
|
||||
|
||||
// Test model display name logic
|
||||
let first_model_display = test_models[0]
|
||||
.display_name
|
||||
.as_ref()
|
||||
.unwrap_or(&test_models[0].name);
|
||||
assert_eq!(first_model_display, "Llama 3.2 3B");
|
||||
|
||||
// Verify the switch_ollama_model function exists and can be called
|
||||
// (We don't actually call it to avoid file system operations in tests)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue