Introduce recent files ambient context for assistant (#11791)
<img width="1637" alt="image" src="https://github.com/zed-industries/zed/assets/482957/5aaec657-3499-42c9-9528-c83728f2a7a1"> Release Notes: - Added a new ambient context feature that allows showing the model up to three buffers (along with their diagnostics) that the user interacted with recently. --------- Co-authored-by: Nathan Sobo <nathan@zed.dev>
This commit is contained in:
parent
e4c95b25bf
commit
a13a92fbbf
9 changed files with 522 additions and 411 deletions
|
@ -6,11 +6,8 @@ mod prompts;
|
|||
mod saved_conversation;
|
||||
mod streaming_diff;
|
||||
|
||||
mod embedded_scope;
|
||||
|
||||
pub use assistant_panel::AssistantPanel;
|
||||
use assistant_settings::{AssistantSettings, OpenAiModel, ZedDotDevModel};
|
||||
use chrono::{DateTime, Local};
|
||||
use client::{proto, Client};
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
pub(crate) use completion_provider::*;
|
||||
|
@ -26,7 +23,6 @@ use std::{
|
|||
actions!(
|
||||
assistant,
|
||||
[
|
||||
NewConversation,
|
||||
Assist,
|
||||
Split,
|
||||
CycleMessageRole,
|
||||
|
@ -35,6 +31,7 @@ actions!(
|
|||
ResetKey,
|
||||
InlineAssist,
|
||||
ToggleIncludeConversation,
|
||||
ToggleHistory,
|
||||
]
|
||||
);
|
||||
|
||||
|
@ -93,8 +90,8 @@ impl LanguageModel {
|
|||
|
||||
pub fn display_name(&self) -> String {
|
||||
match self {
|
||||
LanguageModel::OpenAi(model) => format!("openai/{}", model.display_name()),
|
||||
LanguageModel::ZedDotDev(model) => format!("zed.dev/{}", model.display_name()),
|
||||
LanguageModel::OpenAi(model) => model.display_name().into(),
|
||||
LanguageModel::ZedDotDev(model) => model.display_name().into(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -178,7 +175,6 @@ pub struct LanguageModelChoiceDelta {
|
|||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
struct MessageMetadata {
|
||||
role: Role,
|
||||
sent_at: DateTime<Local>,
|
||||
status: MessageStatus,
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,91 +0,0 @@
|
|||
use editor::MultiBuffer;
|
||||
use gpui::{AppContext, Model, ModelContext, Subscription};
|
||||
|
||||
use crate::{assistant_panel::Conversation, LanguageModelRequestMessage, Role};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct EmbeddedScope {
|
||||
active_buffer: Option<Model<MultiBuffer>>,
|
||||
active_buffer_enabled: bool,
|
||||
active_buffer_subscription: Option<Subscription>,
|
||||
}
|
||||
|
||||
impl EmbeddedScope {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
active_buffer: None,
|
||||
active_buffer_enabled: true,
|
||||
active_buffer_subscription: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_active_buffer(
|
||||
&mut self,
|
||||
buffer: Option<Model<MultiBuffer>>,
|
||||
cx: &mut ModelContext<Conversation>,
|
||||
) {
|
||||
self.active_buffer_subscription.take();
|
||||
|
||||
if let Some(active_buffer) = buffer.clone() {
|
||||
self.active_buffer_subscription =
|
||||
Some(cx.subscribe(&active_buffer, |conversation, _, e, cx| {
|
||||
if let multi_buffer::Event::Edited { .. } = e {
|
||||
conversation.count_remaining_tokens(cx)
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
self.active_buffer = buffer;
|
||||
}
|
||||
|
||||
pub fn active_buffer(&self) -> Option<&Model<MultiBuffer>> {
|
||||
self.active_buffer.as_ref()
|
||||
}
|
||||
|
||||
pub fn active_buffer_enabled(&self) -> bool {
|
||||
self.active_buffer_enabled
|
||||
}
|
||||
|
||||
pub fn set_active_buffer_enabled(&mut self, enabled: bool) {
|
||||
self.active_buffer_enabled = enabled;
|
||||
}
|
||||
|
||||
/// Provide a message for the language model based on the active buffer.
|
||||
pub fn message(&self, cx: &AppContext) -> Option<LanguageModelRequestMessage> {
|
||||
if !self.active_buffer_enabled {
|
||||
return None;
|
||||
}
|
||||
|
||||
let active_buffer = self.active_buffer.as_ref()?;
|
||||
let buffer = active_buffer.read(cx);
|
||||
|
||||
if let Some(singleton) = buffer.as_singleton() {
|
||||
let singleton = singleton.read(cx);
|
||||
|
||||
let filename = singleton
|
||||
.file()
|
||||
.map(|file| file.path().to_string_lossy())
|
||||
.unwrap_or("Untitled".into());
|
||||
|
||||
let text = singleton.text();
|
||||
|
||||
let language = singleton
|
||||
.language()
|
||||
.map(|l| {
|
||||
let name = l.code_fence_block_name();
|
||||
name.to_string()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let markdown =
|
||||
format!("User's active file `{filename}`:\n\n```{language}\n{text}```\n\n");
|
||||
|
||||
return Some(LanguageModelRequestMessage {
|
||||
role: Role::System,
|
||||
content: markdown,
|
||||
});
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue