assistant2: Rework @mentions (#26983)

https://github.com/user-attachments/assets/167f753f-2775-4d31-bfef-55565e61e4bc

Release Notes:

- N/A
This commit is contained in:
Bennet Bo Fenner 2025-03-24 19:32:52 +01:00 committed by GitHub
parent 4a5f89aded
commit 699369995b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
18 changed files with 1637 additions and 485 deletions

File diff suppressed because it is too large Load diff

View file

@ -81,77 +81,80 @@ impl FetchContextPickerDelegate {
url: String::new(),
}
}
}
async fn build_message(http_client: Arc<HttpClientWithUrl>, url: String) -> Result<String> {
let url = if !url.starts_with("https://") && !url.starts_with("http://") {
format!("https://{url}")
} else {
url
};
pub(crate) async fn fetch_url_content(
http_client: Arc<HttpClientWithUrl>,
url: String,
) -> Result<String> {
let url = if !url.starts_with("https://") && !url.starts_with("http://") {
format!("https://{url}")
} else {
url
};
let mut response = http_client.get(&url, AsyncBody::default(), true).await?;
let mut response = http_client.get(&url, AsyncBody::default(), true).await?;
let mut body = Vec::new();
response
.body_mut()
.read_to_end(&mut body)
.await
.context("error reading response body")?;
let mut body = Vec::new();
response
.body_mut()
.read_to_end(&mut body)
.await
.context("error reading response body")?;
if response.status().is_client_error() {
let text = String::from_utf8_lossy(body.as_slice());
bail!(
"status error {}, response: {text:?}",
response.status().as_u16()
);
if response.status().is_client_error() {
let text = String::from_utf8_lossy(body.as_slice());
bail!(
"status error {}, response: {text:?}",
response.status().as_u16()
);
}
let Some(content_type) = response.headers().get("content-type") else {
bail!("missing Content-Type header");
};
let content_type = content_type
.to_str()
.context("invalid Content-Type header")?;
let content_type = match content_type {
"text/html" => ContentType::Html,
"text/plain" => ContentType::Plaintext,
"application/json" => ContentType::Json,
_ => ContentType::Html,
};
match content_type {
ContentType::Html => {
let mut handlers: Vec<TagHandler> = vec![
Rc::new(RefCell::new(markdown::WebpageChromeRemover)),
Rc::new(RefCell::new(markdown::ParagraphHandler)),
Rc::new(RefCell::new(markdown::HeadingHandler)),
Rc::new(RefCell::new(markdown::ListHandler)),
Rc::new(RefCell::new(markdown::TableHandler::new())),
Rc::new(RefCell::new(markdown::StyledTextHandler)),
];
if url.contains("wikipedia.org") {
use html_to_markdown::structure::wikipedia;
handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaChromeRemover)));
handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaInfoboxHandler)));
handlers.push(Rc::new(
RefCell::new(wikipedia::WikipediaCodeHandler::new()),
));
} else {
handlers.push(Rc::new(RefCell::new(markdown::CodeHandler)));
}
convert_html_to_markdown(&body[..], &mut handlers)
}
ContentType::Plaintext => Ok(std::str::from_utf8(&body)?.to_owned()),
ContentType::Json => {
let json: serde_json::Value = serde_json::from_slice(&body)?;
let Some(content_type) = response.headers().get("content-type") else {
bail!("missing Content-Type header");
};
let content_type = content_type
.to_str()
.context("invalid Content-Type header")?;
let content_type = match content_type {
"text/html" => ContentType::Html,
"text/plain" => ContentType::Plaintext,
"application/json" => ContentType::Json,
_ => ContentType::Html,
};
match content_type {
ContentType::Html => {
let mut handlers: Vec<TagHandler> = vec![
Rc::new(RefCell::new(markdown::WebpageChromeRemover)),
Rc::new(RefCell::new(markdown::ParagraphHandler)),
Rc::new(RefCell::new(markdown::HeadingHandler)),
Rc::new(RefCell::new(markdown::ListHandler)),
Rc::new(RefCell::new(markdown::TableHandler::new())),
Rc::new(RefCell::new(markdown::StyledTextHandler)),
];
if url.contains("wikipedia.org") {
use html_to_markdown::structure::wikipedia;
handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaChromeRemover)));
handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaInfoboxHandler)));
handlers.push(Rc::new(
RefCell::new(wikipedia::WikipediaCodeHandler::new()),
));
} else {
handlers.push(Rc::new(RefCell::new(markdown::CodeHandler)));
}
convert_html_to_markdown(&body[..], &mut handlers)
}
ContentType::Plaintext => Ok(std::str::from_utf8(&body)?.to_owned()),
ContentType::Json => {
let json: serde_json::Value = serde_json::from_slice(&body)?;
Ok(format!(
"```json\n{}\n```",
serde_json::to_string_pretty(&json)?
))
}
Ok(format!(
"```json\n{}\n```",
serde_json::to_string_pretty(&json)?
))
}
}
}
@ -208,7 +211,7 @@ impl PickerDelegate for FetchContextPickerDelegate {
let confirm_behavior = self.confirm_behavior;
cx.spawn_in(window, async move |this, cx| {
let text = cx
.background_spawn(Self::build_message(http_client, url.clone()))
.background_spawn(fetch_url_content(http_client, url.clone()))
.await?;
this.update_in(cx, |this, window, cx| {

View file

@ -1,25 +1,15 @@
use std::collections::BTreeSet;
use std::ops::Range;
use std::path::Path;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use editor::actions::FoldAt;
use editor::display_map::{Crease, FoldId};
use editor::scroll::Autoscroll;
use editor::{Anchor, AnchorRangeExt, Editor, FoldPlaceholder, ToPoint};
use file_icons::FileIcons;
use fuzzy::PathMatch;
use gpui::{
AnyElement, App, AppContext, DismissEvent, Empty, Entity, FocusHandle, Focusable, Stateful,
Task, WeakEntity,
App, AppContext, DismissEvent, Entity, FocusHandle, Focusable, Stateful, Task, WeakEntity,
};
use multi_buffer::{MultiBufferPoint, MultiBufferRow};
use picker::{Picker, PickerDelegate};
use project::{PathMatchCandidateSet, ProjectPath, WorktreeId};
use rope::Point;
use text::SelectionGoal;
use ui::{prelude::*, ButtonLike, Disclosure, ListItem, TintColor, Tooltip};
use ui::{prelude::*, ListItem, Tooltip};
use util::ResultExt as _;
use workspace::{notifications::NotifyResultExt, Workspace};
@ -34,7 +24,6 @@ impl FileContextPicker {
pub fn new(
context_picker: WeakEntity<ContextPicker>,
workspace: WeakEntity<Workspace>,
editor: WeakEntity<Editor>,
context_store: WeakEntity<ContextStore>,
confirm_behavior: ConfirmBehavior,
window: &mut Window,
@ -43,7 +32,6 @@ impl FileContextPicker {
let delegate = FileContextPickerDelegate::new(
context_picker,
workspace,
editor,
context_store,
confirm_behavior,
);
@ -68,7 +56,6 @@ impl Render for FileContextPicker {
pub struct FileContextPickerDelegate {
context_picker: WeakEntity<ContextPicker>,
workspace: WeakEntity<Workspace>,
editor: WeakEntity<Editor>,
context_store: WeakEntity<ContextStore>,
confirm_behavior: ConfirmBehavior,
matches: Vec<PathMatch>,
@ -79,95 +66,18 @@ impl FileContextPickerDelegate {
pub fn new(
context_picker: WeakEntity<ContextPicker>,
workspace: WeakEntity<Workspace>,
editor: WeakEntity<Editor>,
context_store: WeakEntity<ContextStore>,
confirm_behavior: ConfirmBehavior,
) -> Self {
Self {
context_picker,
workspace,
editor,
context_store,
confirm_behavior,
matches: Vec::new(),
selected_index: 0,
}
}
fn search(
&mut self,
query: String,
cancellation_flag: Arc<AtomicBool>,
workspace: &Entity<Workspace>,
cx: &mut Context<Picker<Self>>,
) -> Task<Vec<PathMatch>> {
if query.is_empty() {
let workspace = workspace.read(cx);
let project = workspace.project().read(cx);
let recent_matches = workspace
.recent_navigation_history(Some(10), cx)
.into_iter()
.filter_map(|(project_path, _)| {
let worktree = project.worktree_for_id(project_path.worktree_id, cx)?;
Some(PathMatch {
score: 0.,
positions: Vec::new(),
worktree_id: project_path.worktree_id.to_usize(),
path: project_path.path,
path_prefix: worktree.read(cx).root_name().into(),
distance_to_relative_ancestor: 0,
is_dir: false,
})
});
let file_matches = project.worktrees(cx).flat_map(|worktree| {
let worktree = worktree.read(cx);
let path_prefix: Arc<str> = worktree.root_name().into();
worktree.entries(false, 0).map(move |entry| PathMatch {
score: 0.,
positions: Vec::new(),
worktree_id: worktree.id().to_usize(),
path: entry.path.clone(),
path_prefix: path_prefix.clone(),
distance_to_relative_ancestor: 0,
is_dir: entry.is_dir(),
})
});
Task::ready(recent_matches.chain(file_matches).collect())
} else {
let worktrees = workspace.read(cx).visible_worktrees(cx).collect::<Vec<_>>();
let candidate_sets = worktrees
.into_iter()
.map(|worktree| {
let worktree = worktree.read(cx);
PathMatchCandidateSet {
snapshot: worktree.snapshot(),
include_ignored: worktree
.root_entry()
.map_or(false, |entry| entry.is_ignored),
include_root_name: true,
candidates: project::Candidates::Entries,
}
})
.collect::<Vec<_>>();
let executor = cx.background_executor().clone();
cx.foreground_executor().spawn(async move {
fuzzy::match_path_sets(
candidate_sets.as_slice(),
query.as_str(),
None,
false,
100,
&cancellation_flag,
executor,
)
.await
})
}
}
}
impl PickerDelegate for FileContextPickerDelegate {
@ -204,7 +114,7 @@ impl PickerDelegate for FileContextPickerDelegate {
return Task::ready(());
};
let search_task = self.search(query, Arc::<AtomicBool>::default(), &workspace, cx);
let search_task = search_paths(query, Arc::<AtomicBool>::default(), &workspace, cx);
cx.spawn_in(window, async move |this, cx| {
// TODO: This should be probably be run in the background.
@ -222,14 +132,6 @@ impl PickerDelegate for FileContextPickerDelegate {
return;
};
let file_name = mat
.path
.file_name()
.map(|os_str| os_str.to_string_lossy().into_owned())
.unwrap_or(mat.path_prefix.to_string());
let full_path = mat.path.display().to_string();
let project_path = ProjectPath {
worktree_id: WorktreeId::from_usize(mat.worktree_id),
path: mat.path.clone(),
@ -237,106 +139,13 @@ impl PickerDelegate for FileContextPickerDelegate {
let is_directory = mat.is_dir;
let Some(editor_entity) = self.editor.upgrade() else {
return;
};
editor_entity.update(cx, |editor, cx| {
editor.transact(window, cx, |editor, window, cx| {
// Move empty selections left by 1 column to select the `@`s, so they get overwritten when we insert.
{
let mut selections = editor.selections.all::<MultiBufferPoint>(cx);
for selection in selections.iter_mut() {
if selection.is_empty() {
let old_head = selection.head();
let new_head = MultiBufferPoint::new(
old_head.row,
old_head.column.saturating_sub(1),
);
selection.set_head(new_head, SelectionGoal::None);
}
}
editor.change_selections(Some(Autoscroll::fit()), window, cx, |s| {
s.select(selections)
});
}
let start_anchors = {
let snapshot = editor.buffer().read(cx).snapshot(cx);
editor
.selections
.all::<Point>(cx)
.into_iter()
.map(|selection| snapshot.anchor_before(selection.start))
.collect::<Vec<_>>()
};
editor.insert(&full_path, window, cx);
let end_anchors = {
let snapshot = editor.buffer().read(cx).snapshot(cx);
editor
.selections
.all::<Point>(cx)
.into_iter()
.map(|selection| snapshot.anchor_after(selection.end))
.collect::<Vec<_>>()
};
editor.insert("\n", window, cx); // Needed to end the fold
let file_icon = if is_directory {
FileIcons::get_folder_icon(false, cx)
} else {
FileIcons::get_icon(&Path::new(&full_path), cx)
}
.unwrap_or_else(|| SharedString::new(""));
let placeholder = FoldPlaceholder {
render: render_fold_icon_button(
file_icon,
file_name.into(),
editor_entity.downgrade(),
),
..Default::default()
};
let render_trailer =
move |_row, _unfold, _window: &mut Window, _cx: &mut App| Empty.into_any();
let buffer = editor.buffer().read(cx).snapshot(cx);
let mut rows_to_fold = BTreeSet::new();
let crease_iter = start_anchors
.into_iter()
.zip(end_anchors)
.map(|(start, end)| {
rows_to_fold.insert(MultiBufferRow(start.to_point(&buffer).row));
Crease::inline(
start..end,
placeholder.clone(),
fold_toggle("tool-use"),
render_trailer,
)
});
editor.insert_creases(crease_iter, cx);
for buffer_row in rows_to_fold {
editor.fold_at(&FoldAt { buffer_row }, window, cx);
}
});
});
let Some(task) = self
.context_store
.update(cx, |context_store, cx| {
if is_directory {
context_store.add_directory(project_path, cx)
context_store.add_directory(project_path, true, cx)
} else {
context_store.add_file_from_path(project_path, cx)
context_store.add_file_from_path(project_path, true, cx)
}
})
.ok()
@ -390,6 +199,80 @@ impl PickerDelegate for FileContextPickerDelegate {
}
}
pub(crate) fn search_paths(
query: String,
cancellation_flag: Arc<AtomicBool>,
workspace: &Entity<Workspace>,
cx: &App,
) -> Task<Vec<PathMatch>> {
if query.is_empty() {
let workspace = workspace.read(cx);
let project = workspace.project().read(cx);
let recent_matches = workspace
.recent_navigation_history(Some(10), cx)
.into_iter()
.filter_map(|(project_path, _)| {
let worktree = project.worktree_for_id(project_path.worktree_id, cx)?;
Some(PathMatch {
score: 0.,
positions: Vec::new(),
worktree_id: project_path.worktree_id.to_usize(),
path: project_path.path,
path_prefix: worktree.read(cx).root_name().into(),
distance_to_relative_ancestor: 0,
is_dir: false,
})
});
let file_matches = project.worktrees(cx).flat_map(|worktree| {
let worktree = worktree.read(cx);
let path_prefix: Arc<str> = worktree.root_name().into();
worktree.entries(false, 0).map(move |entry| PathMatch {
score: 0.,
positions: Vec::new(),
worktree_id: worktree.id().to_usize(),
path: entry.path.clone(),
path_prefix: path_prefix.clone(),
distance_to_relative_ancestor: 0,
is_dir: entry.is_dir(),
})
});
Task::ready(recent_matches.chain(file_matches).collect())
} else {
let worktrees = workspace.read(cx).visible_worktrees(cx).collect::<Vec<_>>();
let candidate_sets = worktrees
.into_iter()
.map(|worktree| {
let worktree = worktree.read(cx);
PathMatchCandidateSet {
snapshot: worktree.snapshot(),
include_ignored: worktree
.root_entry()
.map_or(false, |entry| entry.is_ignored),
include_root_name: true,
candidates: project::Candidates::Entries,
}
})
.collect::<Vec<_>>();
let executor = cx.background_executor().clone();
cx.foreground_executor().spawn(async move {
fuzzy::match_path_sets(
candidate_sets.as_slice(),
query.as_str(),
None,
false,
100,
&cancellation_flag,
executor,
)
.await
})
}
}
pub fn render_file_context_entry(
id: ElementId,
path: &Path,
@ -484,85 +367,3 @@ pub fn render_file_context_entry(
}
})
}
fn render_fold_icon_button(
icon: SharedString,
label: SharedString,
editor: WeakEntity<Editor>,
) -> Arc<dyn Send + Sync + Fn(FoldId, Range<Anchor>, &mut App) -> AnyElement> {
Arc::new(move |fold_id, fold_range, cx| {
let is_in_text_selection = editor.upgrade().is_some_and(|editor| {
editor.update(cx, |editor, cx| {
let snapshot = editor
.buffer()
.update(cx, |multi_buffer, cx| multi_buffer.snapshot(cx));
let is_in_pending_selection = || {
editor
.selections
.pending
.as_ref()
.is_some_and(|pending_selection| {
pending_selection
.selection
.range()
.includes(&fold_range, &snapshot)
})
};
let mut is_in_complete_selection = || {
editor
.selections
.disjoint_in_range::<usize>(fold_range.clone(), cx)
.into_iter()
.any(|selection| {
// This is needed to cover a corner case, if we just check for an existing
// selection in the fold range, having a cursor at the start of the fold
// marks it as selected. Non-empty selections don't cause this.
let length = selection.end - selection.start;
length > 0
})
};
is_in_pending_selection() || is_in_complete_selection()
})
});
ButtonLike::new(fold_id)
.style(ButtonStyle::Filled)
.selected_style(ButtonStyle::Tinted(TintColor::Accent))
.toggle_state(is_in_text_selection)
.child(
h_flex()
.gap_1()
.child(
Icon::from_path(icon.clone())
.size(IconSize::Small)
.color(Color::Muted),
)
.child(
Label::new(label.clone())
.size(LabelSize::Small)
.single_line(),
),
)
.into_any_element()
})
}
fn fold_toggle(
name: &'static str,
) -> impl Fn(
MultiBufferRow,
bool,
Arc<dyn Fn(bool, &mut Window, &mut App) + Send + Sync>,
&mut Window,
&mut App,
) -> AnyElement {
move |row, is_folded, fold, _window, _cx| {
Disclosure::new((name, row.0 as u64), !is_folded)
.toggle_state(is_folded)
.on_click(move |_e, window, cx| fold(!is_folded, window, cx))
.into_any_element()
}
}

View file

@ -110,45 +110,11 @@ impl PickerDelegate for ThreadContextPickerDelegate {
window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Task<()> {
let Ok(threads) = self.thread_store.update(cx, |this, _cx| {
this.threads()
.into_iter()
.map(|thread| ThreadContextEntry {
id: thread.id,
summary: thread.summary,
})
.collect::<Vec<_>>()
}) else {
let Some(threads) = self.thread_store.upgrade() else {
return Task::ready(());
};
let executor = cx.background_executor().clone();
let search_task = cx.background_spawn(async move {
if query.is_empty() {
threads
} else {
let candidates = threads
.iter()
.enumerate()
.map(|(id, thread)| StringMatchCandidate::new(id, &thread.summary))
.collect::<Vec<_>>();
let matches = fuzzy::match_strings(
&candidates,
&query,
false,
100,
&Default::default(),
executor,
)
.await;
matches
.into_iter()
.map(|mat| threads[mat.candidate_id].clone())
.collect()
}
});
let search_task = search_threads(query, threads, cx);
cx.spawn_in(window, async move |this, cx| {
let matches = search_task.await;
this.update(cx, |this, cx| {
@ -176,7 +142,9 @@ impl PickerDelegate for ThreadContextPickerDelegate {
this.update_in(cx, |this, window, cx| {
this.delegate
.context_store
.update(cx, |context_store, cx| context_store.add_thread(thread, cx))
.update(cx, |context_store, cx| {
context_store.add_thread(thread, true, cx)
})
.ok();
match this.delegate.confirm_behavior {
@ -248,3 +216,46 @@ pub fn render_thread_context_entry(
)
})
}
pub(crate) fn search_threads(
query: String,
thread_store: Entity<ThreadStore>,
cx: &mut App,
) -> Task<Vec<ThreadContextEntry>> {
let threads = thread_store.update(cx, |this, _cx| {
this.threads()
.into_iter()
.map(|thread| ThreadContextEntry {
id: thread.id,
summary: thread.summary,
})
.collect::<Vec<_>>()
});
let executor = cx.background_executor().clone();
cx.background_spawn(async move {
if query.is_empty() {
threads
} else {
let candidates = threads
.iter()
.enumerate()
.map(|(id, thread)| StringMatchCandidate::new(id, &thread.summary))
.collect::<Vec<_>>();
let matches = fuzzy::match_strings(
&candidates,
&query,
false,
100,
&Default::default(),
executor,
)
.await;
matches
.into_iter()
.map(|mat| threads[mat.candidate_id].clone())
.collect()
}
})
}