ZIm/crates/assistant/src/assistant.rs
Kyle Kelley d77e553466
File context for assistant panel (#9712)
Introducing the Active File Context portion of #9705. When someone is in
the assistant panel it now includes the active file as a system message
on send while showing them a nice little display in the lower right:


![image](https://github.com/zed-industries/zed/assets/836375/9abc56e0-e8f2-45ee-9e7e-b83b28b483ea)

For this iteration, I'd love to see the following before we land this:

* [x] Toggle-able context - user should be able to disable sending this
context
* [x] Show nothing if there is no context coming in
* [x] Update token count as we change items
* [x] Listen for a more finely scoped event for when the active item
changes
* [x] Create a global for pulling a file icon based on a path. Zed's
main way to do this is nested within project panel's `FileAssociation`s.
* [x] Get the code fence name for a Language for the system prompt
* [x] Update the token count when the buffer content changes

I'm seeing this PR as the foundation for providing other kinds of
context -- diagnostic summaries, failing tests, additional files, etc.

Release Notes:

- Added file context to assistant chat panel
([#9705](https://github.com/zed-industries/zed/issues/9705)).

<img width="1558" alt="image"
src="https://github.com/zed-industries/zed/assets/836375/86eb7e50-3e28-4754-9c3f-895be588616d">

---------

Co-authored-by: Conrad Irwin <conrad@zed.dev>
Co-authored-by: Nathan <nathan@zed.dev>
Co-authored-by: Antonio Scandurra <me@as-cii.com>
Co-authored-by: Mikayla Maki <mikayla@zed.dev>
2024-03-29 13:55:01 -07:00

251 lines
6.4 KiB
Rust

pub mod assistant_panel;
pub mod assistant_settings;
mod codegen;
mod completion_provider;
mod prompts;
mod saved_conversation;
mod streaming_diff;
mod embedded_scope;
pub use assistant_panel::AssistantPanel;
use assistant_settings::{AssistantSettings, OpenAiModel, ZedDotDevModel};
use chrono::{DateTime, Local};
use client::{proto, Client};
use command_palette_hooks::CommandPaletteFilter;
pub(crate) use completion_provider::*;
use gpui::{actions, AppContext, BorrowAppContext, Global, SharedString};
pub(crate) use saved_conversation::*;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore};
use std::{
fmt::{self, Display},
sync::Arc,
};
actions!(
assistant,
[
NewConversation,
Assist,
Split,
CycleMessageRole,
QuoteSelection,
ToggleFocus,
ResetKey,
InlineAssist,
ToggleIncludeConversation,
]
);
#[derive(
Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize,
)]
struct MessageId(usize);
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum Role {
User,
Assistant,
System,
}
impl Role {
pub fn cycle(&mut self) {
*self = match self {
Role::User => Role::Assistant,
Role::Assistant => Role::System,
Role::System => Role::User,
}
}
}
impl Display for Role {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Role::User => write!(f, "user"),
Role::Assistant => write!(f, "assistant"),
Role::System => write!(f, "system"),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
pub enum LanguageModel {
ZedDotDev(ZedDotDevModel),
OpenAi(OpenAiModel),
}
impl Default for LanguageModel {
fn default() -> Self {
LanguageModel::ZedDotDev(ZedDotDevModel::default())
}
}
impl LanguageModel {
pub fn telemetry_id(&self) -> String {
match self {
LanguageModel::OpenAi(model) => format!("openai/{}", model.id()),
LanguageModel::ZedDotDev(model) => format!("zed.dev/{}", model.id()),
}
}
pub fn display_name(&self) -> String {
match self {
LanguageModel::OpenAi(model) => format!("openai/{}", model.display_name()),
LanguageModel::ZedDotDev(model) => format!("zed.dev/{}", model.display_name()),
}
}
pub fn max_token_count(&self) -> usize {
match self {
LanguageModel::OpenAi(model) => model.max_token_count(),
LanguageModel::ZedDotDev(model) => model.max_token_count(),
}
}
pub fn id(&self) -> &str {
match self {
LanguageModel::OpenAi(model) => model.id(),
LanguageModel::ZedDotDev(model) => model.id(),
}
}
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct LanguageModelRequestMessage {
pub role: Role,
pub content: String,
}
impl LanguageModelRequestMessage {
pub fn to_proto(&self) -> proto::LanguageModelRequestMessage {
proto::LanguageModelRequestMessage {
role: match self.role {
Role::User => proto::LanguageModelRole::LanguageModelUser,
Role::Assistant => proto::LanguageModelRole::LanguageModelAssistant,
Role::System => proto::LanguageModelRole::LanguageModelSystem,
} as i32,
content: self.content.clone(),
}
}
}
#[derive(Debug, Default, Serialize)]
pub struct LanguageModelRequest {
pub model: LanguageModel,
pub messages: Vec<LanguageModelRequestMessage>,
pub stop: Vec<String>,
pub temperature: f32,
}
impl LanguageModelRequest {
pub fn to_proto(&self) -> proto::CompleteWithLanguageModel {
proto::CompleteWithLanguageModel {
model: self.model.id().to_string(),
messages: self.messages.iter().map(|m| m.to_proto()).collect(),
stop: self.stop.clone(),
temperature: self.temperature,
}
}
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct LanguageModelResponseMessage {
pub role: Option<Role>,
pub content: Option<String>,
}
#[derive(Deserialize, Debug)]
pub struct LanguageModelUsage {
pub prompt_tokens: u32,
pub completion_tokens: u32,
pub total_tokens: u32,
}
#[derive(Deserialize, Debug)]
pub struct LanguageModelChoiceDelta {
pub index: u32,
pub delta: LanguageModelResponseMessage,
pub finish_reason: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
struct MessageMetadata {
role: Role,
sent_at: DateTime<Local>,
status: MessageStatus,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
enum MessageStatus {
Pending,
Done,
Error(SharedString),
}
/// The state pertaining to the Assistant.
#[derive(Default)]
struct Assistant {
/// Whether the Assistant is enabled.
enabled: bool,
}
impl Global for Assistant {}
impl Assistant {
const NAMESPACE: &'static str = "assistant";
fn set_enabled(&mut self, enabled: bool, cx: &mut AppContext) {
if self.enabled == enabled {
return;
}
self.enabled = enabled;
if !enabled {
CommandPaletteFilter::update_global(cx, |filter, _cx| {
filter.hide_namespace(Self::NAMESPACE);
});
return;
}
CommandPaletteFilter::update_global(cx, |filter, _cx| {
filter.show_namespace(Self::NAMESPACE);
});
}
}
pub fn init(client: Arc<Client>, cx: &mut AppContext) {
cx.set_global(Assistant::default());
AssistantSettings::register(cx);
completion_provider::init(client, cx);
assistant_panel::init(cx);
CommandPaletteFilter::update_global(cx, |filter, _cx| {
filter.hide_namespace(Assistant::NAMESPACE);
});
cx.update_global(|assistant: &mut Assistant, cx: &mut AppContext| {
let settings = AssistantSettings::get_global(cx);
assistant.set_enabled(settings.enabled, cx);
});
cx.observe_global::<SettingsStore>(|cx| {
cx.update_global(|assistant: &mut Assistant, cx: &mut AppContext| {
let settings = AssistantSettings::get_global(cx);
assistant.set_enabled(settings.enabled, cx);
});
})
.detach();
}
#[cfg(test)]
#[ctor::ctor]
fn init_logger() {
if std::env::var("RUST_LOG").is_ok() {
env_logger::init();
}
}