Remove 2 suffix for assistant, journal
Co-authored-by: Mikayla <mikayla@zed.dev>
This commit is contained in:
parent
789ce8dd75
commit
ecbd115542
23 changed files with 909 additions and 6713 deletions
|
@ -12,12 +12,28 @@ use chrono::{DateTime, Local};
|
|||
use collections::HashMap;
|
||||
use fs::Fs;
|
||||
use futures::StreamExt;
|
||||
use gpui::AppContext;
|
||||
use gpui::{actions, AppContext, SharedString};
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{cmp::Reverse, ffi::OsStr, path::PathBuf, sync::Arc};
|
||||
use util::paths::CONVERSATIONS_DIR;
|
||||
|
||||
actions!(
|
||||
assistant,
|
||||
[
|
||||
NewConversation,
|
||||
Assist,
|
||||
Split,
|
||||
CycleMessageRole,
|
||||
QuoteSelection,
|
||||
ToggleFocus,
|
||||
ResetKey,
|
||||
InlineAssist,
|
||||
ToggleIncludeConversation,
|
||||
ToggleRetrieveContext,
|
||||
]
|
||||
);
|
||||
|
||||
#[derive(
|
||||
Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize,
|
||||
)]
|
||||
|
@ -34,7 +50,7 @@ struct MessageMetadata {
|
|||
enum MessageStatus {
|
||||
Pending,
|
||||
Done,
|
||||
Error(Arc<str>),
|
||||
Error(SharedString),
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,7 +1,8 @@
|
|||
use anyhow;
|
||||
use gpui::Pixels;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Setting;
|
||||
use settings::Settings;
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
||||
pub enum OpenAIModel {
|
||||
|
@ -51,8 +52,8 @@ pub enum AssistantDockPosition {
|
|||
pub struct AssistantSettings {
|
||||
pub button: bool,
|
||||
pub dock: AssistantDockPosition,
|
||||
pub default_width: f32,
|
||||
pub default_height: f32,
|
||||
pub default_width: Pixels,
|
||||
pub default_height: Pixels,
|
||||
pub default_open_ai_model: OpenAIModel,
|
||||
}
|
||||
|
||||
|
@ -65,7 +66,7 @@ pub struct AssistantSettingsContent {
|
|||
pub default_open_ai_model: Option<OpenAIModel>,
|
||||
}
|
||||
|
||||
impl Setting for AssistantSettings {
|
||||
impl Settings for AssistantSettings {
|
||||
const KEY: Option<&'static str> = Some("assistant");
|
||||
|
||||
type FileContent = AssistantSettingsContent;
|
||||
|
@ -73,7 +74,7 @@ impl Setting for AssistantSettings {
|
|||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_: &gpui::AppContext,
|
||||
_: &mut gpui::AppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ use ai::completion::{CompletionProvider, CompletionRequest};
|
|||
use anyhow::Result;
|
||||
use editor::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint};
|
||||
use futures::{channel::mpsc, SinkExt, Stream, StreamExt};
|
||||
use gpui::{Entity, ModelContext, ModelHandle, Task};
|
||||
use gpui::{EventEmitter, Model, ModelContext, Task};
|
||||
use language::{Rope, TransactionId};
|
||||
use multi_buffer;
|
||||
use std::{cmp, future, ops::Range, sync::Arc};
|
||||
|
@ -21,7 +21,7 @@ pub enum CodegenKind {
|
|||
|
||||
pub struct Codegen {
|
||||
provider: Arc<dyn CompletionProvider>,
|
||||
buffer: ModelHandle<MultiBuffer>,
|
||||
buffer: Model<MultiBuffer>,
|
||||
snapshot: MultiBufferSnapshot,
|
||||
kind: CodegenKind,
|
||||
last_equal_ranges: Vec<Range<Anchor>>,
|
||||
|
@ -32,13 +32,11 @@ pub struct Codegen {
|
|||
_subscription: gpui::Subscription,
|
||||
}
|
||||
|
||||
impl Entity for Codegen {
|
||||
type Event = Event;
|
||||
}
|
||||
impl EventEmitter<Event> for Codegen {}
|
||||
|
||||
impl Codegen {
|
||||
pub fn new(
|
||||
buffer: ModelHandle<MultiBuffer>,
|
||||
buffer: Model<MultiBuffer>,
|
||||
kind: CodegenKind,
|
||||
provider: Arc<dyn CompletionProvider>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
|
@ -60,7 +58,7 @@ impl Codegen {
|
|||
|
||||
fn handle_buffer_event(
|
||||
&mut self,
|
||||
_buffer: ModelHandle<MultiBuffer>,
|
||||
_buffer: Model<MultiBuffer>,
|
||||
event: &multi_buffer::Event,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
|
@ -111,13 +109,13 @@ impl Codegen {
|
|||
.unwrap_or_else(|| snapshot.indent_size_for_line(selection_start.row));
|
||||
|
||||
let response = self.provider.complete(prompt);
|
||||
self.generation = cx.spawn_weak(|this, mut cx| {
|
||||
self.generation = cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
let generate = async {
|
||||
let mut edit_start = range.start.to_offset(&snapshot);
|
||||
|
||||
let (mut hunks_tx, mut hunks_rx) = mpsc::channel(1);
|
||||
let diff = cx.background().spawn(async move {
|
||||
let diff = cx.background_executor().spawn(async move {
|
||||
let chunks = strip_invalid_spans_from_codeblock(response.await?);
|
||||
futures::pin_mut!(chunks);
|
||||
let mut diff = StreamingDiff::new(selected_text.to_string());
|
||||
|
@ -183,12 +181,6 @@ impl Codegen {
|
|||
});
|
||||
|
||||
while let Some(hunks) = hunks_rx.next().await {
|
||||
let this = if let Some(this) = this.upgrade(&cx) {
|
||||
this
|
||||
} else {
|
||||
break;
|
||||
};
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.last_equal_ranges.clear();
|
||||
|
||||
|
@ -245,7 +237,7 @@ impl Codegen {
|
|||
}
|
||||
|
||||
cx.notify();
|
||||
});
|
||||
})?;
|
||||
}
|
||||
|
||||
diff.await?;
|
||||
|
@ -253,17 +245,16 @@ impl Codegen {
|
|||
};
|
||||
|
||||
let result = generate.await;
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.last_equal_ranges.clear();
|
||||
this.idle = true;
|
||||
if let Err(error) = result {
|
||||
this.error = Some(error);
|
||||
}
|
||||
cx.emit(Event::Finished);
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.last_equal_ranges.clear();
|
||||
this.idle = true;
|
||||
if let Err(error) = result {
|
||||
this.error = Some(error);
|
||||
}
|
||||
cx.emit(Event::Finished);
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
});
|
||||
self.error.take();
|
||||
|
@ -372,7 +363,7 @@ mod tests {
|
|||
use super::*;
|
||||
use ai::test::FakeCompletionProvider;
|
||||
use futures::stream::{self};
|
||||
use gpui::{executor::Deterministic, TestAppContext};
|
||||
use gpui::{Context, TestAppContext};
|
||||
use indoc::indoc;
|
||||
use language::{language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, Point};
|
||||
use rand::prelude::*;
|
||||
|
@ -391,12 +382,8 @@ mod tests {
|
|||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_transform_autoindent(
|
||||
cx: &mut TestAppContext,
|
||||
mut rng: StdRng,
|
||||
deterministic: Arc<Deterministic>,
|
||||
) {
|
||||
cx.set_global(cx.read(SettingsStore::test));
|
||||
async fn test_transform_autoindent(cx: &mut TestAppContext, mut rng: StdRng) {
|
||||
cx.set_global(cx.update(SettingsStore::test));
|
||||
cx.update(language_settings::init);
|
||||
|
||||
let text = indoc! {"
|
||||
|
@ -408,14 +395,14 @@ mod tests {
|
|||
}
|
||||
"};
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let range = buffer.read_with(cx, |buffer, cx| {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(4, 5))
|
||||
});
|
||||
let provider = Arc::new(FakeCompletionProvider::new());
|
||||
let codegen = cx.add_model(|cx| {
|
||||
let codegen = cx.new_model(|cx| {
|
||||
Codegen::new(
|
||||
buffer.clone(),
|
||||
CodegenKind::Transform { range },
|
||||
|
@ -442,10 +429,10 @@ mod tests {
|
|||
println!("CHUNK: {:?}", &chunk);
|
||||
provider.send_completion(chunk);
|
||||
new_text = suffix;
|
||||
deterministic.run_until_parked();
|
||||
cx.background_executor.run_until_parked();
|
||||
}
|
||||
provider.finish_completion();
|
||||
deterministic.run_until_parked();
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),
|
||||
|
@ -464,9 +451,8 @@ mod tests {
|
|||
async fn test_autoindent_when_generating_past_indentation(
|
||||
cx: &mut TestAppContext,
|
||||
mut rng: StdRng,
|
||||
deterministic: Arc<Deterministic>,
|
||||
) {
|
||||
cx.set_global(cx.read(SettingsStore::test));
|
||||
cx.set_global(cx.update(SettingsStore::test));
|
||||
cx.update(language_settings::init);
|
||||
|
||||
let text = indoc! {"
|
||||
|
@ -475,14 +461,14 @@ mod tests {
|
|||
}
|
||||
"};
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let position = buffer.read_with(cx, |buffer, cx| {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
snapshot.anchor_before(Point::new(1, 6))
|
||||
});
|
||||
let provider = Arc::new(FakeCompletionProvider::new());
|
||||
let codegen = cx.add_model(|cx| {
|
||||
let codegen = cx.new_model(|cx| {
|
||||
Codegen::new(
|
||||
buffer.clone(),
|
||||
CodegenKind::Generate { position },
|
||||
|
@ -508,10 +494,10 @@ mod tests {
|
|||
let (chunk, suffix) = new_text.split_at(len);
|
||||
provider.send_completion(chunk);
|
||||
new_text = suffix;
|
||||
deterministic.run_until_parked();
|
||||
cx.background_executor.run_until_parked();
|
||||
}
|
||||
provider.finish_completion();
|
||||
deterministic.run_until_parked();
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),
|
||||
|
@ -530,9 +516,8 @@ mod tests {
|
|||
async fn test_autoindent_when_generating_before_indentation(
|
||||
cx: &mut TestAppContext,
|
||||
mut rng: StdRng,
|
||||
deterministic: Arc<Deterministic>,
|
||||
) {
|
||||
cx.set_global(cx.read(SettingsStore::test));
|
||||
cx.set_global(cx.update(SettingsStore::test));
|
||||
cx.update(language_settings::init);
|
||||
|
||||
let text = concat!(
|
||||
|
@ -541,14 +526,14 @@ mod tests {
|
|||
"}\n" //
|
||||
);
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let position = buffer.read_with(cx, |buffer, cx| {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
snapshot.anchor_before(Point::new(1, 2))
|
||||
});
|
||||
let provider = Arc::new(FakeCompletionProvider::new());
|
||||
let codegen = cx.add_model(|cx| {
|
||||
let codegen = cx.new_model(|cx| {
|
||||
Codegen::new(
|
||||
buffer.clone(),
|
||||
CodegenKind::Generate { position },
|
||||
|
@ -575,10 +560,10 @@ mod tests {
|
|||
println!("{:?}", &chunk);
|
||||
provider.send_completion(chunk);
|
||||
new_text = suffix;
|
||||
deterministic.run_until_parked();
|
||||
cx.background_executor.run_until_parked();
|
||||
}
|
||||
provider.finish_completion();
|
||||
deterministic.run_until_parked();
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),
|
||||
|
|
|
@ -176,7 +176,7 @@ pub(crate) mod tests {
|
|||
use super::*;
|
||||
use std::sync::Arc;
|
||||
|
||||
use gpui::AppContext;
|
||||
use gpui::{AppContext, Context};
|
||||
use indoc::indoc;
|
||||
use language::{language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, Point};
|
||||
use settings::SettingsStore;
|
||||
|
@ -227,7 +227,8 @@ pub(crate) mod tests {
|
|||
|
||||
#[gpui::test]
|
||||
fn test_outline_for_prompt(cx: &mut AppContext) {
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language_settings::init(cx);
|
||||
let text = indoc! {"
|
||||
struct X {
|
||||
|
@ -253,7 +254,7 @@ pub(crate) mod tests {
|
|||
}
|
||||
"};
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
|
||||
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
|
||||
assert_eq!(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue