catch up with main

This commit is contained in:
KCaverly 2023-08-02 16:48:11 -04:00
commit a125e318fe
59 changed files with 2619 additions and 618 deletions

View file

@ -6,14 +6,23 @@ jobs:
discord_release: discord_release:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Get appropriate URL
id: get-appropriate-url
run: |
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
URL="https://zed.dev/releases/preview/latest"
else
URL="https://zed.dev/releases/stable/latest"
fi
echo "::set-output name=URL::$URL"
- name: Discord Webhook Action - name: Discord Webhook Action
uses: tsickert/discord-webhook@v5.3.0 uses: tsickert/discord-webhook@v5.3.0
if: ${{ ! github.event.release.prerelease }}
with: with:
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }} webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
content: | content: |
📣 Zed ${{ github.event.release.tag_name }} was just released! 📣 Zed ${{ github.event.release.tag_name }} was just released!
Restart your Zed or head to https://zed.dev/releases/stable/latest to grab it. Restart your Zed or head to ${{ steps.get-appropriate-url.outputs.URL }} to grab it.
${{ github.event.release.body }} ${{ github.event.release.body }}

5
.zed/settings.json Normal file
View file

@ -0,0 +1,5 @@
{
"JSON": {
"tab_size": 4
}
}

569
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -109,9 +109,9 @@ pretty_assertions = "1.3.0"
tree-sitter-bash = { git = "https://github.com/tree-sitter/tree-sitter-bash", rev = "1b0321ee85701d5036c334a6f04761cdc672e64c" } tree-sitter-bash = { git = "https://github.com/tree-sitter/tree-sitter-bash", rev = "1b0321ee85701d5036c334a6f04761cdc672e64c" }
tree-sitter-c = "0.20.1" tree-sitter-c = "0.20.1"
tree-sitter-cpp = "0.20.0" tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev="f44509141e7e483323d2ec178f2d2e6c0fc041c1" }
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" } tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "4ba9dab6e2602960d95b2b625f3386c27e08084e" } tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "a2861e88a730287a60c11ea9299c033c7d076e30" }
tree-sitter-elm = { git = "https://github.com/elm-tooling/tree-sitter-elm", rev = "692c50c0b961364c40299e73c1306aecb5d20f40"} tree-sitter-elm = { git = "https://github.com/elm-tooling/tree-sitter-elm", rev = "692c50c0b961364c40299e73c1306aecb5d20f40"}
tree-sitter-embedded-template = "0.20.0" tree-sitter-embedded-template = "0.20.0"
tree-sitter-glsl = { git = "https://github.com/theHamsta/tree-sitter-glsl", rev = "2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3" } tree-sitter-glsl = { git = "https://github.com/theHamsta/tree-sitter-glsl", rev = "2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3" }
@ -131,6 +131,7 @@ tree-sitter-svelte = { git = "https://github.com/Himujjal/tree-sitter-svelte", r
tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a"} tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a"}
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930"} tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930"}
tree-sitter-lua = "0.0.14" tree-sitter-lua = "0.0.14"
tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "66e3e9ce9180ae08fc57372061006ef83f0abde7" }
[patch.crates-io] [patch.crates-io]
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "1c65ca24bc9a734ab70115188f465e12eecf224e" } tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "1c65ca24bc9a734ab70115188f465e12eecf224e" }

View file

@ -1,6 +1,6 @@
# syntax = docker/dockerfile:1.2 # syntax = docker/dockerfile:1.2
FROM rust:1.70-bullseye as builder FROM rust:1.71-bullseye as builder
WORKDIR app WORKDIR app
COPY . . COPY . .

View file

@ -1,159 +1,179 @@
{ {
"suffixes": { "suffixes": {
"aac": "audio", "aac": "audio",
"bash": "terminal", "accdb": "storage",
"bmp": "image", "bak": "backup",
"c": "code", "bash": "terminal",
"conf": "settings", "bash_aliases": "terminal",
"cpp": "code", "bash_logout": "terminal",
"cc": "code", "bash_profile": "terminal",
"css": "code", "bashrc": "terminal",
"doc": "document", "bmp": "image",
"docx": "document", "c": "code",
"eslintrc": "eslint", "cc": "code",
"eslintrc.js": "eslint", "conf": "settings",
"eslintrc.json": "eslint", "cpp": "code",
"flac": "audio", "css": "code",
"fish": "terminal", "csv": "storage",
"gitattributes": "vcs", "dat": "storage",
"gitignore": "vcs", "db": "storage",
"gitmodules": "vcs", "dbf": "storage",
"gif": "image", "dll": "storage",
"go": "code", "doc": "document",
"h": "code", "docx": "document",
"handlebars": "code", "eslintrc": "eslint",
"hbs": "template", "eslintrc.js": "eslint",
"htm": "template", "eslintrc.json": "eslint",
"html": "template", "fmp": "storage",
"svelte": "template", "fp7": "storage",
"hpp": "code", "flac": "audio",
"ico": "image", "fish": "terminal",
"ini": "settings", "frm": "storage",
"java": "code", "gdb": "storage",
"jpeg": "image", "gitattributes": "vcs",
"jpg": "image", "gitignore": "vcs",
"js": "code", "gitmodules": "vcs",
"json": "storage", "gif": "image",
"lock": "lock", "go": "code",
"log": "log", "h": "code",
"md": "document", "handlebars": "code",
"mdx": "document", "hbs": "template",
"mp3": "audio", "htm": "template",
"mp4": "video", "html": "template",
"ods": "document", "ib": "storage",
"odp": "document", "ico": "image",
"odt": "document", "ini": "settings",
"ogg": "video", "java": "code",
"pdf": "document", "jpeg": "image",
"php": "code", "jpg": "image",
"png": "image", "js": "code",
"ppt": "document", "json": "storage",
"pptx": "document", "ldf": "storage",
"prettierrc": "prettier", "lock": "lock",
"prettierignore": "prettier", "log": "log",
"ps1": "terminal", "mdb": "storage",
"psd": "image", "md": "document",
"py": "code", "mdf": "storage",
"rb": "code", "mdx": "document",
"rkt": "code", "mp3": "audio",
"rs": "rust", "mp4": "video",
"rtf": "document", "myd": "storage",
"scm": "code", "myi": "storage",
"sh": "terminal", "ods": "document",
"bashrc": "terminal", "odp": "document",
"bash_profile": "terminal", "odt": "document",
"bash_aliases": "terminal", "ogg": "video",
"bash_logout": "terminal", "pdb": "storage",
"profile": "terminal", "pdf": "document",
"zshrc": "terminal", "php": "code",
"zshenv": "terminal", "png": "image",
"zsh_profile": "terminal", "ppt": "document",
"zsh_aliases": "terminal", "pptx": "document",
"zsh_histfile": "terminal", "prettierignore": "prettier",
"zlogin": "terminal", "prettierrc": "prettier",
"sql": "code", "profile": "terminal",
"svg": "image", "ps1": "terminal",
"swift": "code", "psd": "image",
"tiff": "image", "py": "code",
"toml": "toml", "rb": "code",
"ts": "typescript", "rkt": "code",
"tsx": "code", "rs": "rust",
"txt": "document", "rtf": "document",
"wav": "audio", "sav": "storage",
"webm": "video", "scm": "code",
"xls": "document", "sh": "terminal",
"xlsx": "document", "sqlite": "storage",
"xml": "template", "sdf": "storage",
"yaml": "settings", "svelte": "template",
"yml": "settings", "svg": "image",
"zsh": "terminal" "swift": "code",
}, "ts": "typescript",
"types": { "tsx": "code",
"audio": { "tiff": "image",
"icon": "icons/file_icons/audio.svg" "toml": "toml",
"tsv": "storage",
"txt": "document",
"wav": "audio",
"webm": "video",
"xls": "document",
"xlsx": "document",
"xml": "template",
"yaml": "settings",
"yml": "settings",
"zlogin": "terminal",
"zsh": "terminal",
"zsh_aliases": "terminal",
"zshenv": "terminal",
"zsh_histfile": "terminal",
"zsh_profile": "terminal",
"zshrc": "terminal"
}, },
"code": { "types": {
"icon": "icons/file_icons/code.svg" "audio": {
}, "icon": "icons/file_icons/audio.svg"
"collapsed_chevron": { },
"icon": "icons/file_icons/chevron_right.svg" "code": {
}, "icon": "icons/file_icons/code.svg"
"collapsed_folder": { },
"icon": "icons/file_icons/folder.svg" "collapsed_chevron": {
}, "icon": "icons/file_icons/chevron_right.svg"
"default": { },
"icon": "icons/file_icons/file.svg" "collapsed_folder": {
}, "icon": "icons/file_icons/folder.svg"
"document": { },
"icon": "icons/file_icons/book.svg" "default": {
}, "icon": "icons/file_icons/file.svg"
"eslint": { },
"icon": "icons/file_icons/eslint.svg" "document": {
}, "icon": "icons/file_icons/book.svg"
"expanded_chevron": { },
"icon": "icons/file_icons/chevron_down.svg" "eslint": {
}, "icon": "icons/file_icons/eslint.svg"
"expanded_folder": { },
"icon": "icons/file_icons/folder_open.svg" "expanded_chevron": {
}, "icon": "icons/file_icons/chevron_down.svg"
"image": { },
"icon": "icons/file_icons/image.svg" "expanded_folder": {
}, "icon": "icons/file_icons/folder_open.svg"
"lock": { },
"icon": "icons/file_icons/lock.svg" "image": {
}, "icon": "icons/file_icons/image.svg"
"log": { },
"icon": "icons/file_icons/info.svg" "lock": {
}, "icon": "icons/file_icons/lock.svg"
"prettier": { },
"icon": "icons/file_icons/prettier.svg" "log": {
}, "icon": "icons/file_icons/info.svg"
"rust": { },
"icon": "icons/file_icons/rust.svg" "prettier": {
}, "icon": "icons/file_icons/prettier.svg"
"settings": { },
"icon": "icons/file_icons/settings.svg" "rust": {
}, "icon": "icons/file_icons/rust.svg"
"storage": { },
"icon": "icons/file_icons/database.svg" "settings": {
}, "icon": "icons/file_icons/settings.svg"
"template": { },
"icon": "icons/file_icons/html.svg" "storage": {
}, "icon": "icons/file_icons/database.svg"
"terminal": { },
"icon": "icons/file_icons/terminal.svg" "template": {
}, "icon": "icons/file_icons/html.svg"
"toml": { },
"icon": "icons/file_icons/toml.svg" "terminal": {
}, "icon": "icons/file_icons/terminal.svg"
"typescript": { },
"icon": "icons/file_icons/typescript.svg" "toml": {
}, "icon": "icons/file_icons/toml.svg"
"vcs": { },
"icon": "icons/file_icons/git.svg" "typescript": {
}, "icon": "icons/file_icons/typescript.svg"
"video": { },
"icon": "icons/file_icons/video.svg" "vcs": {
"icon": "icons/file_icons/git.svg"
},
"video": {
"icon": "icons/file_icons/video.svg"
}
} }
}
} }

View file

@ -22,6 +22,7 @@
"alt-cmd-right": "pane::ActivateNextItem", "alt-cmd-right": "pane::ActivateNextItem",
"cmd-w": "pane::CloseActiveItem", "cmd-w": "pane::CloseActiveItem",
"alt-cmd-t": "pane::CloseInactiveItems", "alt-cmd-t": "pane::CloseInactiveItems",
"ctrl-alt-cmd-w": "workspace::CloseInactiveTabsAndPanes",
"cmd-k u": "pane::CloseCleanItems", "cmd-k u": "pane::CloseCleanItems",
"cmd-k cmd-w": "pane::CloseAllItems", "cmd-k cmd-w": "pane::CloseAllItems",
"cmd-shift-w": "workspace::CloseWindow", "cmd-shift-w": "workspace::CloseWindow",
@ -226,6 +227,13 @@
"alt-enter": "search::SelectAllMatches" "alt-enter": "search::SelectAllMatches"
} }
}, },
{
"context": "BufferSearchBar > Editor",
"bindings": {
"up": "search::PreviousHistoryQuery",
"down": "search::NextHistoryQuery"
}
},
{ {
"context": "ProjectSearchBar", "context": "ProjectSearchBar",
"bindings": { "bindings": {
@ -233,6 +241,13 @@
"alt-tab": "project_search::CycleMode", "alt-tab": "project_search::CycleMode",
} }
}, },
{
"context": "ProjectSearchBar > Editor",
"bindings": {
"up": "search::PreviousHistoryQuery",
"down": "search::NextHistoryQuery"
}
},
{ {
"context": "ProjectSearchView", "context": "ProjectSearchView",
"bindings": { "bindings": {

View file

@ -1637,6 +1637,7 @@ impl ConversationEditor {
let mut editor = Editor::for_buffer(conversation.read(cx).buffer.clone(), None, cx); let mut editor = Editor::for_buffer(conversation.read(cx).buffer.clone(), None, cx);
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx); editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
editor.set_show_gutter(false, cx); editor.set_show_gutter(false, cx);
editor.set_show_wrap_guides(false, cx);
editor editor
}); });

View file

@ -183,7 +183,7 @@ async fn apply_server_operation(
let username; let username;
{ {
let mut plan = plan.lock(); let mut plan = plan.lock();
let mut user = plan.user(user_id); let user = plan.user(user_id);
if user.online { if user.online {
return false; return false;
} }

View file

@ -374,7 +374,7 @@ impl CollabTitlebarItem {
"Share Feedback", "Share Feedback",
feedback::feedback_editor::GiveFeedback, feedback::feedback_editor::GiveFeedback,
), ),
ContextMenuItem::action("Sign out", SignOut), ContextMenuItem::action("Sign Out", SignOut),
] ]
} else { } else {
vec![ vec![

View file

@ -338,9 +338,9 @@ impl Copilot {
let (server, fake_server) = let (server, fake_server) =
LanguageServer::fake("copilot".into(), Default::default(), cx.to_async()); LanguageServer::fake("copilot".into(), Default::default(), cx.to_async());
let http = util::http::FakeHttpClient::create(|_| async { unreachable!() }); let http = util::http::FakeHttpClient::create(|_| async { unreachable!() });
let this = cx.add_model(|cx| Self { let this = cx.add_model(|_| Self {
http: http.clone(), http: http.clone(),
node_runtime: NodeRuntime::instance(http, cx.background().clone()), node_runtime: NodeRuntime::instance(http),
server: CopilotServer::Running(RunningCopilotServer { server: CopilotServer::Running(RunningCopilotServer {
lsp: Arc::new(server), lsp: Arc::new(server),
sign_in_status: SignInStatus::Authorized, sign_in_status: SignInStatus::Authorized,

View file

@ -397,7 +397,7 @@ impl InlayMap {
buffer_snapshot: MultiBufferSnapshot, buffer_snapshot: MultiBufferSnapshot,
mut buffer_edits: Vec<text::Edit<usize>>, mut buffer_edits: Vec<text::Edit<usize>>,
) -> (InlaySnapshot, Vec<InlayEdit>) { ) -> (InlaySnapshot, Vec<InlayEdit>) {
let mut snapshot = &mut self.snapshot; let snapshot = &mut self.snapshot;
if buffer_edits.is_empty() { if buffer_edits.is_empty() {
if snapshot.buffer.trailing_excerpt_update_count() if snapshot.buffer.trailing_excerpt_update_count()
@ -572,7 +572,6 @@ impl InlayMap {
}) })
.collect(); .collect();
let buffer_snapshot = snapshot.buffer.clone(); let buffer_snapshot = snapshot.buffer.clone();
drop(snapshot);
let (snapshot, edits) = self.sync(buffer_snapshot, buffer_edits); let (snapshot, edits) = self.sync(buffer_snapshot, buffer_edits);
(snapshot, edits) (snapshot, edits)
} }
@ -635,7 +634,6 @@ impl InlayMap {
} }
log::info!("removing inlays: {:?}", to_remove); log::info!("removing inlays: {:?}", to_remove);
drop(snapshot);
let (snapshot, edits) = self.splice(to_remove, to_insert); let (snapshot, edits) = self.splice(to_remove, to_insert);
(snapshot, edits) (snapshot, edits)
} }

View file

@ -543,6 +543,7 @@ pub struct Editor {
show_local_selections: bool, show_local_selections: bool,
mode: EditorMode, mode: EditorMode,
show_gutter: bool, show_gutter: bool,
show_wrap_guides: Option<bool>,
placeholder_text: Option<Arc<str>>, placeholder_text: Option<Arc<str>>,
highlighted_rows: Option<Range<u32>>, highlighted_rows: Option<Range<u32>>,
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
@ -1375,6 +1376,7 @@ impl Editor {
show_local_selections: true, show_local_selections: true,
mode, mode,
show_gutter: mode == EditorMode::Full, show_gutter: mode == EditorMode::Full,
show_wrap_guides: None,
placeholder_text: None, placeholder_text: None,
highlighted_rows: None, highlighted_rows: None,
background_highlights: Default::default(), background_highlights: Default::default(),
@ -1537,7 +1539,7 @@ impl Editor {
self.collapse_matches = collapse_matches; self.collapse_matches = collapse_matches;
} }
fn range_for_match<T: std::marker::Copy>(&self, range: &Range<T>) -> Range<T> { pub fn range_for_match<T: std::marker::Copy>(&self, range: &Range<T>) -> Range<T> {
if self.collapse_matches { if self.collapse_matches {
return range.start..range.start; return range.start..range.start;
} }
@ -4219,7 +4221,7 @@ impl Editor {
_: &SortLinesCaseSensitive, _: &SortLinesCaseSensitive,
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) { ) {
self.manipulate_lines(cx, |text| text.sort()) self.manipulate_lines(cx, |lines| lines.sort())
} }
pub fn sort_lines_case_insensitive( pub fn sort_lines_case_insensitive(
@ -4227,7 +4229,7 @@ impl Editor {
_: &SortLinesCaseInsensitive, _: &SortLinesCaseInsensitive,
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) { ) {
self.manipulate_lines(cx, |text| text.sort_by_key(|line| line.to_lowercase())) self.manipulate_lines(cx, |lines| lines.sort_by_key(|line| line.to_lowercase()))
} }
pub fn reverse_lines(&mut self, _: &ReverseLines, cx: &mut ViewContext<Self>) { pub fn reverse_lines(&mut self, _: &ReverseLines, cx: &mut ViewContext<Self>) {
@ -4265,19 +4267,19 @@ impl Editor {
let text = buffer let text = buffer
.text_for_range(start_point..end_point) .text_for_range(start_point..end_point)
.collect::<String>(); .collect::<String>();
let mut text = text.split("\n").collect_vec(); let mut lines = text.split("\n").collect_vec();
let text_len = text.len(); let lines_len = lines.len();
callback(&mut text); callback(&mut lines);
// This is a current limitation with selections. // This is a current limitation with selections.
// If we wanted to support removing or adding lines, we'd need to fix the logic associated with selections. // If we wanted to support removing or adding lines, we'd need to fix the logic associated with selections.
debug_assert!( debug_assert!(
text.len() == text_len, lines.len() == lines_len,
"callback should not change the number of lines" "callback should not change the number of lines"
); );
edits.push((start_point..end_point, text.join("\n"))); edits.push((start_point..end_point, lines.join("\n")));
let start_anchor = buffer.anchor_after(start_point); let start_anchor = buffer.anchor_after(start_point);
let end_anchor = buffer.anchor_before(end_point); let end_anchor = buffer.anchor_before(end_point);
@ -6374,8 +6376,8 @@ impl Editor {
.range .range
.to_offset(definition.target.buffer.read(cx)); .to_offset(definition.target.buffer.read(cx));
let range = self.range_for_match(&range);
if Some(&definition.target.buffer) == self.buffer.read(cx).as_singleton().as_ref() { if Some(&definition.target.buffer) == self.buffer.read(cx).as_singleton().as_ref() {
let range = self.range_for_match(&range);
self.change_selections(Some(Autoscroll::fit()), cx, |s| { self.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]); s.select_ranges([range]);
}); });
@ -6392,7 +6394,6 @@ impl Editor {
// When selecting a definition in a different buffer, disable the nav history // When selecting a definition in a different buffer, disable the nav history
// to avoid creating a history entry at the previous cursor location. // to avoid creating a history entry at the previous cursor location.
pane.update(cx, |pane, _| pane.disable_history()); pane.update(cx, |pane, _| pane.disable_history());
let range = target_editor.range_for_match(&range);
target_editor.change_selections(Some(Autoscroll::fit()), cx, |s| { target_editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]); s.select_ranges([range]);
}); });
@ -7188,6 +7189,10 @@ impl Editor {
pub fn wrap_guides(&self, cx: &AppContext) -> SmallVec<[(usize, bool); 2]> { pub fn wrap_guides(&self, cx: &AppContext) -> SmallVec<[(usize, bool); 2]> {
let mut wrap_guides = smallvec::smallvec![]; let mut wrap_guides = smallvec::smallvec![];
if self.show_wrap_guides == Some(false) {
return wrap_guides;
}
let settings = self.buffer.read(cx).settings_at(0, cx); let settings = self.buffer.read(cx).settings_at(0, cx);
if settings.show_wrap_guides { if settings.show_wrap_guides {
if let SoftWrap::Column(soft_wrap) = self.soft_wrap_mode(cx) { if let SoftWrap::Column(soft_wrap) = self.soft_wrap_mode(cx) {
@ -7245,6 +7250,11 @@ impl Editor {
cx.notify(); cx.notify();
} }
pub fn set_show_wrap_guides(&mut self, show_gutter: bool, cx: &mut ViewContext<Self>) {
self.show_wrap_guides = Some(show_gutter);
cx.notify();
}
pub fn reveal_in_finder(&mut self, _: &RevealInFinder, cx: &mut ViewContext<Self>) { pub fn reveal_in_finder(&mut self, _: &RevealInFinder, cx: &mut ViewContext<Self>) {
if let Some(buffer) = self.buffer().read(cx).as_singleton() { if let Some(buffer) = self.buffer().read(cx).as_singleton() {
if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) {

View file

@ -546,8 +546,20 @@ impl EditorElement {
}); });
} }
let scroll_left =
layout.position_map.snapshot.scroll_position().x() * layout.position_map.em_width;
for (wrap_position, active) in layout.wrap_guides.iter() { for (wrap_position, active) in layout.wrap_guides.iter() {
let x = text_bounds.origin_x() + wrap_position + layout.position_map.em_width / 2.; let x =
(text_bounds.origin_x() + wrap_position + layout.position_map.em_width / 2.)
- scroll_left;
if x < text_bounds.origin_x()
|| (layout.show_scrollbars && x > self.scrollbar_left(&bounds))
{
continue;
}
let color = if *active { let color = if *active {
self.style.active_wrap_guide self.style.active_wrap_guide
} else { } else {
@ -1036,6 +1048,10 @@ impl EditorElement {
scene.pop_layer(); scene.pop_layer();
} }
fn scrollbar_left(&self, bounds: &RectF) -> f32 {
bounds.max_x() - self.style.theme.scrollbar.width
}
fn paint_scrollbar( fn paint_scrollbar(
&mut self, &mut self,
scene: &mut SceneBuilder, scene: &mut SceneBuilder,
@ -1054,7 +1070,7 @@ impl EditorElement {
let top = bounds.min_y(); let top = bounds.min_y();
let bottom = bounds.max_y(); let bottom = bounds.max_y();
let right = bounds.max_x(); let right = bounds.max_x();
let left = right - style.width; let left = self.scrollbar_left(&bounds);
let row_range = &layout.scrollbar_row_range; let row_range = &layout.scrollbar_row_range;
let max_row = layout.max_row as f32 + (row_range.end - row_range.start); let max_row = layout.max_row as f32 + (row_range.end - row_range.start);

View file

@ -571,7 +571,6 @@ fn new_update_task(
if let Some(buffer) = if let Some(buffer) =
refresh_multi_buffer.buffer(pending_refresh_query.buffer_id) refresh_multi_buffer.buffer(pending_refresh_query.buffer_id)
{ {
drop(refresh_multi_buffer);
editor.inlay_hint_cache.update_tasks.insert( editor.inlay_hint_cache.update_tasks.insert(
pending_refresh_query.excerpt_id, pending_refresh_query.excerpt_id,
UpdateTask { UpdateTask {

View file

@ -1128,6 +1128,12 @@ impl AppContext {
self.keystroke_matcher.clear_bindings(); self.keystroke_matcher.clear_bindings();
} }
pub fn binding_for_action(&self, action: &dyn Action) -> Option<&Binding> {
self.keystroke_matcher
.bindings_for_action(action.id())
.find(|binding| binding.action().eq(action))
}
pub fn default_global<T: 'static + Default>(&mut self) -> &T { pub fn default_global<T: 'static + Default>(&mut self) -> &T {
let type_id = TypeId::of::<T>(); let type_id = TypeId::of::<T>();
self.update(|this| { self.update(|this| {

View file

@ -844,8 +844,8 @@ impl LanguageRegistry {
} }
} }
} }
Err(err) => { Err(e) => {
log::error!("failed to load language {name} - {err}"); log::error!("failed to load language {name}:\n{:?}", e);
let mut state = this.state.write(); let mut state = this.state.write();
state.mark_language_loaded(id); state.mark_language_loaded(id);
if let Some(mut txs) = state.loading_languages.remove(&id) { if let Some(mut txs) = state.loading_languages.remove(&id) {
@ -853,7 +853,7 @@ impl LanguageRegistry {
let _ = tx.send(Err(anyhow!( let _ = tx.send(Err(anyhow!(
"failed to load language {}: {}", "failed to load language {}: {}",
name, name,
err e
))); )));
} }
} }
@ -1188,25 +1188,39 @@ impl Language {
pub fn with_queries(mut self, queries: LanguageQueries) -> Result<Self> { pub fn with_queries(mut self, queries: LanguageQueries) -> Result<Self> {
if let Some(query) = queries.highlights { if let Some(query) = queries.highlights {
self = self.with_highlights_query(query.as_ref())?; self = self
.with_highlights_query(query.as_ref())
.context("Error loading highlights query")?;
} }
if let Some(query) = queries.brackets { if let Some(query) = queries.brackets {
self = self.with_brackets_query(query.as_ref())?; self = self
.with_brackets_query(query.as_ref())
.context("Error loading brackets query")?;
} }
if let Some(query) = queries.indents { if let Some(query) = queries.indents {
self = self.with_indents_query(query.as_ref())?; self = self
.with_indents_query(query.as_ref())
.context("Error loading indents query")?;
} }
if let Some(query) = queries.outline { if let Some(query) = queries.outline {
self = self.with_outline_query(query.as_ref())?; self = self
.with_outline_query(query.as_ref())
.context("Error loading outline query")?;
} }
if let Some(query) = queries.embedding { if let Some(query) = queries.embedding {
self = self.with_embedding_query(query.as_ref())?; self = self
.with_embedding_query(query.as_ref())
.context("Error loading embedding query")?;
} }
if let Some(query) = queries.injections { if let Some(query) = queries.injections {
self = self.with_injection_query(query.as_ref())?; self = self
.with_injection_query(query.as_ref())
.context("Error loading injection query")?;
} }
if let Some(query) = queries.overrides { if let Some(query) = queries.overrides {
self = self.with_override_query(query.as_ref())?; self = self
.with_override_query(query.as_ref())
.context("Error loading override query")?;
} }
Ok(self) Ok(self)
} }

View file

@ -58,11 +58,14 @@ fn build_bridge(swift_target: &SwiftTarget) {
"cargo:rerun-if-changed={}/Package.resolved", "cargo:rerun-if-changed={}/Package.resolved",
SWIFT_PACKAGE_NAME SWIFT_PACKAGE_NAME
); );
let swift_package_root = swift_package_root(); let swift_package_root = swift_package_root();
let swift_target_folder = swift_target_folder();
if !Command::new("swift") if !Command::new("swift")
.arg("build") .arg("build")
.args(["--configuration", &env::var("PROFILE").unwrap()]) .args(["--configuration", &env::var("PROFILE").unwrap()])
.args(["--triple", &swift_target.target.triple]) .args(["--triple", &swift_target.target.triple])
.args(["--build-path".into(), swift_target_folder])
.current_dir(&swift_package_root) .current_dir(&swift_package_root)
.status() .status()
.unwrap() .unwrap()
@ -128,6 +131,12 @@ fn swift_package_root() -> PathBuf {
env::current_dir().unwrap().join(SWIFT_PACKAGE_NAME) env::current_dir().unwrap().join(SWIFT_PACKAGE_NAME)
} }
fn swift_target_folder() -> PathBuf {
env::current_dir()
.unwrap()
.join(format!("../../target/{SWIFT_PACKAGE_NAME}"))
}
fn copy_dir(source: &Path, destination: &Path) { fn copy_dir(source: &Path, destination: &Path) {
assert!( assert!(
Command::new("rm") Command::new("rm")
@ -155,8 +164,7 @@ fn copy_dir(source: &Path, destination: &Path) {
impl SwiftTarget { impl SwiftTarget {
fn out_dir_path(&self) -> PathBuf { fn out_dir_path(&self) -> PathBuf {
swift_package_root() swift_target_folder()
.join(".build")
.join(&self.target.unversioned_triple) .join(&self.target.unversioned_triple)
.join(env::var("PROFILE").unwrap()) .join(env::var("PROFILE").unwrap())
} }

View file

@ -1,9 +1,6 @@
use anyhow::{anyhow, bail, Context, Result}; use anyhow::{anyhow, bail, Context, Result};
use async_compression::futures::bufread::GzipDecoder; use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive; use async_tar::Archive;
use futures::lock::Mutex;
use futures::{future::Shared, FutureExt};
use gpui::{executor::Background, Task};
use serde::Deserialize; use serde::Deserialize;
use smol::{fs, io::BufReader, process::Command}; use smol::{fs, io::BufReader, process::Command};
use std::process::{Output, Stdio}; use std::process::{Output, Stdio};
@ -33,20 +30,12 @@ pub struct NpmInfoDistTags {
pub struct NodeRuntime { pub struct NodeRuntime {
http: Arc<dyn HttpClient>, http: Arc<dyn HttpClient>,
background: Arc<Background>,
installation_path: Mutex<Option<Shared<Task<Result<PathBuf, Arc<anyhow::Error>>>>>>,
} }
impl NodeRuntime { impl NodeRuntime {
pub fn instance(http: Arc<dyn HttpClient>, background: Arc<Background>) -> Arc<NodeRuntime> { pub fn instance(http: Arc<dyn HttpClient>) -> Arc<NodeRuntime> {
RUNTIME_INSTANCE RUNTIME_INSTANCE
.get_or_init(|| { .get_or_init(|| Arc::new(NodeRuntime { http }))
Arc::new(NodeRuntime {
http,
background,
installation_path: Mutex::new(None),
})
})
.clone() .clone()
} }
@ -61,7 +50,9 @@ impl NodeRuntime {
subcommand: &str, subcommand: &str,
args: &[&str], args: &[&str],
) -> Result<Output> { ) -> Result<Output> {
let attempt = |installation_path: PathBuf| async move { let attempt = || async move {
let installation_path = self.install_if_needed().await?;
let mut env_path = installation_path.join("bin").into_os_string(); let mut env_path = installation_path.join("bin").into_os_string();
if let Some(existing_path) = std::env::var_os("PATH") { if let Some(existing_path) = std::env::var_os("PATH") {
if !existing_path.is_empty() { if !existing_path.is_empty() {
@ -92,10 +83,9 @@ impl NodeRuntime {
command.output().await.map_err(|e| anyhow!("{e}")) command.output().await.map_err(|e| anyhow!("{e}"))
}; };
let installation_path = self.install_if_needed().await?; let mut output = attempt().await;
let mut output = attempt(installation_path.clone()).await;
if output.is_err() { if output.is_err() {
output = attempt(installation_path).await; output = attempt().await;
if output.is_err() { if output.is_err() {
return Err(anyhow!( return Err(anyhow!(
"failed to launch npm subcommand {subcommand} subcommand" "failed to launch npm subcommand {subcommand} subcommand"
@ -167,23 +157,8 @@ impl NodeRuntime {
} }
async fn install_if_needed(&self) -> Result<PathBuf> { async fn install_if_needed(&self) -> Result<PathBuf> {
let task = self log::info!("Node runtime install_if_needed");
.installation_path
.lock()
.await
.get_or_insert_with(|| {
let http = self.http.clone();
self.background
.spawn(async move { Self::install(http).await.map_err(Arc::new) })
.shared()
})
.clone();
task.await.map_err(|e| anyhow!("{}", e))
}
async fn install(http: Arc<dyn HttpClient>) -> Result<PathBuf> {
log::info!("installing Node runtime");
let arch = match consts::ARCH { let arch = match consts::ARCH {
"x86_64" => "x64", "x86_64" => "x64",
"aarch64" => "arm64", "aarch64" => "arm64",
@ -214,7 +189,8 @@ impl NodeRuntime {
let file_name = format!("node-{VERSION}-darwin-{arch}.tar.gz"); let file_name = format!("node-{VERSION}-darwin-{arch}.tar.gz");
let url = format!("https://nodejs.org/dist/{VERSION}/{file_name}"); let url = format!("https://nodejs.org/dist/{VERSION}/{file_name}");
let mut response = http let mut response = self
.http
.get(&url, Default::default(), true) .get(&url, Default::default(), true)
.await .await
.context("error downloading Node binary tarball")?; .context("error downloading Node binary tarball")?;

View file

@ -1,7 +1,6 @@
use crate::{worktree::WorktreeHandle, Event, *}; use crate::{search::PathMatcher, worktree::WorktreeHandle, Event, *};
use fs::{FakeFs, LineEnding, RealFs}; use fs::{FakeFs, LineEnding, RealFs};
use futures::{future, StreamExt}; use futures::{future, StreamExt};
use globset::Glob;
use gpui::{executor::Deterministic, test::subscribe, AppContext}; use gpui::{executor::Deterministic, test::subscribe, AppContext};
use language::{ use language::{
language_settings::{AllLanguageSettings, LanguageSettingsContent}, language_settings::{AllLanguageSettings, LanguageSettingsContent},
@ -3641,7 +3640,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
vec![Glob::new("*.odd").unwrap().compile_matcher()], vec![PathMatcher::new("*.odd").unwrap()],
Vec::new() Vec::new()
), ),
cx cx
@ -3659,7 +3658,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
vec![Glob::new("*.rs").unwrap().compile_matcher()], vec![PathMatcher::new("*.rs").unwrap()],
Vec::new() Vec::new()
), ),
cx cx
@ -3681,8 +3680,8 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
false, false,
true, true,
vec![ vec![
Glob::new("*.ts").unwrap().compile_matcher(), PathMatcher::new("*.ts").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher(), PathMatcher::new("*.odd").unwrap(),
], ],
Vec::new() Vec::new()
), ),
@ -3705,9 +3704,9 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
false, false,
true, true,
vec![ vec![
Glob::new("*.rs").unwrap().compile_matcher(), PathMatcher::new("*.rs").unwrap(),
Glob::new("*.ts").unwrap().compile_matcher(), PathMatcher::new("*.ts").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher(), PathMatcher::new("*.odd").unwrap(),
], ],
Vec::new() Vec::new()
), ),
@ -3752,7 +3751,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
false, false,
true, true,
Vec::new(), Vec::new(),
vec![Glob::new("*.odd").unwrap().compile_matcher()], vec![PathMatcher::new("*.odd").unwrap()],
), ),
cx cx
) )
@ -3775,7 +3774,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
false, false,
true, true,
Vec::new(), Vec::new(),
vec![Glob::new("*.rs").unwrap().compile_matcher()], vec![PathMatcher::new("*.rs").unwrap()],
), ),
cx cx
) )
@ -3797,8 +3796,8 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
true, true,
Vec::new(), Vec::new(),
vec![ vec![
Glob::new("*.ts").unwrap().compile_matcher(), PathMatcher::new("*.ts").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher(), PathMatcher::new("*.odd").unwrap(),
], ],
), ),
cx cx
@ -3821,9 +3820,9 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
true, true,
Vec::new(), Vec::new(),
vec![ vec![
Glob::new("*.rs").unwrap().compile_matcher(), PathMatcher::new("*.rs").unwrap(),
Glob::new("*.ts").unwrap().compile_matcher(), PathMatcher::new("*.ts").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher(), PathMatcher::new("*.odd").unwrap(),
], ],
), ),
cx cx
@ -3860,8 +3859,8 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query, search_query,
false, false,
true, true,
vec![Glob::new("*.odd").unwrap().compile_matcher()], vec![PathMatcher::new("*.odd").unwrap()],
vec![Glob::new("*.odd").unwrap().compile_matcher()], vec![PathMatcher::new("*.odd").unwrap()],
), ),
cx cx
) )
@ -3878,8 +3877,8 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query, search_query,
false, false,
true, true,
vec![Glob::new("*.ts").unwrap().compile_matcher()], vec![PathMatcher::new("*.ts").unwrap()],
vec![Glob::new("*.ts").unwrap().compile_matcher()], vec![PathMatcher::new("*.ts").unwrap()],
), ),
cx cx
) )
@ -3897,12 +3896,12 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
false, false,
true, true,
vec![ vec![
Glob::new("*.ts").unwrap().compile_matcher(), PathMatcher::new("*.ts").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher() PathMatcher::new("*.odd").unwrap()
], ],
vec![ vec![
Glob::new("*.ts").unwrap().compile_matcher(), PathMatcher::new("*.ts").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher() PathMatcher::new("*.odd").unwrap()
], ],
), ),
cx cx
@ -3921,12 +3920,12 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
false, false,
true, true,
vec![ vec![
Glob::new("*.ts").unwrap().compile_matcher(), PathMatcher::new("*.ts").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher() PathMatcher::new("*.odd").unwrap()
], ],
vec![ vec![
Glob::new("*.rs").unwrap().compile_matcher(), PathMatcher::new("*.rs").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher() PathMatcher::new("*.odd").unwrap()
], ],
), ),
cx cx

View file

@ -1,5 +1,5 @@
use aho_corasick::{AhoCorasick, AhoCorasickBuilder}; use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
use anyhow::Result; use anyhow::{Context, Result};
use client::proto; use client::proto;
use globset::{Glob, GlobMatcher}; use globset::{Glob, GlobMatcher};
use itertools::Itertools; use itertools::Itertools;
@ -9,7 +9,7 @@ use smol::future::yield_now;
use std::{ use std::{
io::{BufRead, BufReader, Read}, io::{BufRead, BufReader, Read},
ops::Range, ops::Range,
path::Path, path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
@ -20,8 +20,8 @@ pub enum SearchQuery {
query: Arc<str>, query: Arc<str>,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
files_to_include: Vec<GlobMatcher>, files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<GlobMatcher>, files_to_exclude: Vec<PathMatcher>,
}, },
Regex { Regex {
regex: Regex, regex: Regex,
@ -29,18 +29,43 @@ pub enum SearchQuery {
multiline: bool, multiline: bool,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
files_to_include: Vec<GlobMatcher>, files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<GlobMatcher>, files_to_exclude: Vec<PathMatcher>,
}, },
} }
#[derive(Clone, Debug)]
pub struct PathMatcher {
maybe_path: PathBuf,
glob: GlobMatcher,
}
impl std::fmt::Display for PathMatcher {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.maybe_path.to_string_lossy().fmt(f)
}
}
impl PathMatcher {
pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
Ok(PathMatcher {
glob: Glob::new(&maybe_glob)?.compile_matcher(),
maybe_path: PathBuf::from(maybe_glob),
})
}
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
}
}
impl SearchQuery { impl SearchQuery {
pub fn text( pub fn text(
query: impl ToString, query: impl ToString,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
files_to_include: Vec<GlobMatcher>, files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<GlobMatcher>, files_to_exclude: Vec<PathMatcher>,
) -> Self { ) -> Self {
let query = query.to_string(); let query = query.to_string();
let search = AhoCorasickBuilder::new() let search = AhoCorasickBuilder::new()
@ -61,8 +86,8 @@ impl SearchQuery {
query: impl ToString, query: impl ToString,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
files_to_include: Vec<GlobMatcher>, files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<GlobMatcher>, files_to_exclude: Vec<PathMatcher>,
) -> Result<Self> { ) -> Result<Self> {
let mut query = query.to_string(); let mut query = query.to_string();
let initial_query = Arc::from(query.as_str()); let initial_query = Arc::from(query.as_str());
@ -96,16 +121,16 @@ impl SearchQuery {
message.query, message.query,
message.whole_word, message.whole_word,
message.case_sensitive, message.case_sensitive,
deserialize_globs(&message.files_to_include)?, deserialize_path_matches(&message.files_to_include)?,
deserialize_globs(&message.files_to_exclude)?, deserialize_path_matches(&message.files_to_exclude)?,
) )
} else { } else {
Ok(Self::text( Ok(Self::text(
message.query, message.query,
message.whole_word, message.whole_word,
message.case_sensitive, message.case_sensitive,
deserialize_globs(&message.files_to_include)?, deserialize_path_matches(&message.files_to_include)?,
deserialize_globs(&message.files_to_exclude)?, deserialize_path_matches(&message.files_to_exclude)?,
)) ))
} }
} }
@ -120,12 +145,12 @@ impl SearchQuery {
files_to_include: self files_to_include: self
.files_to_include() .files_to_include()
.iter() .iter()
.map(|g| g.glob().to_string()) .map(|matcher| matcher.to_string())
.join(","), .join(","),
files_to_exclude: self files_to_exclude: self
.files_to_exclude() .files_to_exclude()
.iter() .iter()
.map(|g| g.glob().to_string()) .map(|matcher| matcher.to_string())
.join(","), .join(","),
} }
} }
@ -266,7 +291,7 @@ impl SearchQuery {
matches!(self, Self::Regex { .. }) matches!(self, Self::Regex { .. })
} }
pub fn files_to_include(&self) -> &[GlobMatcher] { pub fn files_to_include(&self) -> &[PathMatcher] {
match self { match self {
Self::Text { Self::Text {
files_to_include, .. files_to_include, ..
@ -277,7 +302,7 @@ impl SearchQuery {
} }
} }
pub fn files_to_exclude(&self) -> &[GlobMatcher] { pub fn files_to_exclude(&self) -> &[PathMatcher] {
match self { match self {
Self::Text { Self::Text {
files_to_exclude, .. files_to_exclude, ..
@ -306,11 +331,63 @@ impl SearchQuery {
} }
} }
fn deserialize_globs(glob_set: &str) -> Result<Vec<GlobMatcher>> { fn deserialize_path_matches(glob_set: &str) -> anyhow::Result<Vec<PathMatcher>> {
glob_set glob_set
.split(',') .split(',')
.map(str::trim) .map(str::trim)
.filter(|glob_str| !glob_str.is_empty()) .filter(|glob_str| !glob_str.is_empty())
.map(|glob_str| Ok(Glob::new(glob_str)?.compile_matcher())) .map(|glob_str| {
PathMatcher::new(glob_str)
.with_context(|| format!("deserializing path match glob {glob_str}"))
})
.collect() .collect()
} }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn path_matcher_creation_for_valid_paths() {
for valid_path in [
"file",
"Cargo.toml",
".DS_Store",
"~/dir/another_dir/",
"./dir/file",
"dir/[a-z].txt",
"../dir/filé",
] {
let path_matcher = PathMatcher::new(valid_path).unwrap_or_else(|e| {
panic!("Valid path {valid_path} should be accepted, but got: {e}")
});
assert!(
path_matcher.is_match(valid_path),
"Path matcher for valid path {valid_path} should match itself"
)
}
}
#[test]
fn path_matcher_creation_for_globs() {
for invalid_glob in ["dir/[].txt", "dir/[a-z.txt", "dir/{file"] {
match PathMatcher::new(invalid_glob) {
Ok(_) => panic!("Invalid glob {invalid_glob} should not be accepted"),
Err(_expected) => {}
}
}
for valid_glob in [
"dir/?ile",
"dir/*.txt",
"dir/**/file",
"dir/[a-z].txt",
"{dir,file}",
] {
match PathMatcher::new(valid_glob) {
Ok(_expected) => {}
Err(e) => panic!("Valid glob {valid_glob} should be accepted, but got: {e}"),
}
}
}
}

View file

@ -2369,7 +2369,7 @@ impl BackgroundScannerState {
} }
// Remove any git repositories whose .git entry no longer exists. // Remove any git repositories whose .git entry no longer exists.
let mut snapshot = &mut self.snapshot; let snapshot = &mut self.snapshot;
let mut repositories = mem::take(&mut snapshot.git_repositories); let mut repositories = mem::take(&mut snapshot.git_repositories);
let mut repository_entries = mem::take(&mut snapshot.repository_entries); let mut repository_entries = mem::take(&mut snapshot.repository_entries);
repositories.retain(|work_directory_id, _| { repositories.retain(|work_directory_id, _| {

View file

@ -115,6 +115,7 @@ actions!(
[ [
ExpandSelectedEntry, ExpandSelectedEntry,
CollapseSelectedEntry, CollapseSelectedEntry,
CollapseAllEntries,
NewDirectory, NewDirectory,
NewFile, NewFile,
Copy, Copy,
@ -140,6 +141,7 @@ pub fn init(assets: impl AssetSource, cx: &mut AppContext) {
file_associations::init(assets, cx); file_associations::init(assets, cx);
cx.add_action(ProjectPanel::expand_selected_entry); cx.add_action(ProjectPanel::expand_selected_entry);
cx.add_action(ProjectPanel::collapse_selected_entry); cx.add_action(ProjectPanel::collapse_selected_entry);
cx.add_action(ProjectPanel::collapse_all_entries);
cx.add_action(ProjectPanel::select_prev); cx.add_action(ProjectPanel::select_prev);
cx.add_action(ProjectPanel::select_next); cx.add_action(ProjectPanel::select_next);
cx.add_action(ProjectPanel::new_file); cx.add_action(ProjectPanel::new_file);
@ -514,6 +516,12 @@ impl ProjectPanel {
} }
} }
pub fn collapse_all_entries(&mut self, _: &CollapseAllEntries, cx: &mut ViewContext<Self>) {
self.expanded_dir_ids.clear();
self.update_visible_entries(None, cx);
cx.notify();
}
fn toggle_expanded(&mut self, entry_id: ProjectEntryId, cx: &mut ViewContext<Self>) { fn toggle_expanded(&mut self, entry_id: ProjectEntryId, cx: &mut ViewContext<Self>) {
if let Some(worktree_id) = self.project.read(cx).worktree_id_for_entry(entry_id, cx) { if let Some(worktree_id) = self.project.read(cx).worktree_id_for_entry(entry_id, cx) {
if let Some(expanded_dir_ids) = self.expanded_dir_ids.get_mut(&worktree_id) { if let Some(expanded_dir_ids) = self.expanded_dir_ids.get_mut(&worktree_id) {
@ -2678,6 +2686,63 @@ mod tests {
); );
} }
#[gpui::test]
async fn test_collapse_all_entries(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/project_root",
json!({
"dir_1": {
"nested_dir": {
"file_a.py": "# File contents",
"file_b.py": "# File contents",
"file_c.py": "# File contents",
},
"file_1.py": "# File contents",
"file_2.py": "# File contents",
"file_3.py": "# File contents",
},
"dir_2": {
"file_1.py": "# File contents",
"file_2.py": "# File contents",
"file_3.py": "# File contents",
}
}),
)
.await;
let project = Project::test(fs.clone(), ["/project_root".as_ref()], cx).await;
let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let panel = workspace.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx));
panel.update(cx, |panel, cx| {
panel.collapse_all_entries(&CollapseAllEntries, cx)
});
cx.foreground().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&["v project_root", " > dir_1", " > dir_2",]
);
// Open dir_1 and make sure nested_dir was collapsed when running collapse_all_entries
toggle_expand_dir(&panel, "project_root/dir_1", cx);
cx.foreground().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&[
"v project_root",
" v dir_1 <== selected",
" > nested_dir",
" file_1.py",
" file_2.py",
" file_3.py",
" > dir_2",
]
);
}
fn toggle_expand_dir( fn toggle_expand_dir(
panel: &ViewHandle<ProjectPanel>, panel: &ViewHandle<ProjectPanel>,
path: impl AsRef<Path>, path: impl AsRef<Path>,
@ -2878,3 +2943,4 @@ mod tests {
}); });
} }
} }
// TODO - a workspace command?

View file

@ -1,6 +1,6 @@
use crate::{ use crate::{
SearchOptions, SelectAllMatches, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, NextHistoryQuery, PreviousHistoryQuery, SearchHistory, SearchOptions, SelectAllMatches,
ToggleRegex, ToggleWholeWord, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, ToggleRegex, ToggleWholeWord,
}; };
use collections::HashMap; use collections::HashMap;
use editor::Editor; use editor::Editor;
@ -46,6 +46,8 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(BufferSearchBar::select_prev_match_on_pane); cx.add_action(BufferSearchBar::select_prev_match_on_pane);
cx.add_action(BufferSearchBar::select_all_matches_on_pane); cx.add_action(BufferSearchBar::select_all_matches_on_pane);
cx.add_action(BufferSearchBar::handle_editor_cancel); cx.add_action(BufferSearchBar::handle_editor_cancel);
cx.add_action(BufferSearchBar::next_history_query);
cx.add_action(BufferSearchBar::previous_history_query);
add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx); add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx);
add_toggle_option_action::<ToggleWholeWord>(SearchOptions::WHOLE_WORD, cx); add_toggle_option_action::<ToggleWholeWord>(SearchOptions::WHOLE_WORD, cx);
add_toggle_option_action::<ToggleRegex>(SearchOptions::REGEX, cx); add_toggle_option_action::<ToggleRegex>(SearchOptions::REGEX, cx);
@ -65,7 +67,7 @@ fn add_toggle_option_action<A: Action>(option: SearchOptions, cx: &mut AppContex
} }
pub struct BufferSearchBar { pub struct BufferSearchBar {
pub query_editor: ViewHandle<Editor>, query_editor: ViewHandle<Editor>,
active_searchable_item: Option<Box<dyn SearchableItemHandle>>, active_searchable_item: Option<Box<dyn SearchableItemHandle>>,
active_match_index: Option<usize>, active_match_index: Option<usize>,
active_searchable_item_subscription: Option<Subscription>, active_searchable_item_subscription: Option<Subscription>,
@ -76,6 +78,7 @@ pub struct BufferSearchBar {
default_options: SearchOptions, default_options: SearchOptions,
query_contains_error: bool, query_contains_error: bool,
dismissed: bool, dismissed: bool,
search_history: SearchHistory,
} }
impl Entity for BufferSearchBar { impl Entity for BufferSearchBar {
@ -106,6 +109,48 @@ impl View for BufferSearchBar {
.map(|active_searchable_item| active_searchable_item.supported_options()) .map(|active_searchable_item| active_searchable_item.supported_options())
.unwrap_or_default(); .unwrap_or_default();
let previous_query_keystrokes =
cx.binding_for_action(&PreviousHistoryQuery {})
.map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let next_query_keystrokes = cx.binding_for_action(&NextHistoryQuery {}).map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let new_placeholder_text = match (previous_query_keystrokes, next_query_keystrokes) {
(Some(previous_query_keystrokes), Some(next_query_keystrokes)) => {
format!(
"Search ({}/{} for previous/next query)",
previous_query_keystrokes.join(" "),
next_query_keystrokes.join(" ")
)
}
(None, Some(next_query_keystrokes)) => {
format!(
"Search ({} for next query)",
next_query_keystrokes.join(" ")
)
}
(Some(previous_query_keystrokes), None) => {
format!(
"Search ({} for previous query)",
previous_query_keystrokes.join(" ")
)
}
(None, None) => String::new(),
};
self.query_editor.update(cx, |editor, cx| {
editor.set_placeholder_text(new_placeholder_text, cx);
});
Flex::row() Flex::row()
.with_child( .with_child(
Flex::row() Flex::row()
@ -258,6 +303,7 @@ impl BufferSearchBar {
pending_search: None, pending_search: None,
query_contains_error: false, query_contains_error: false,
dismissed: true, dismissed: true,
search_history: SearchHistory::default(),
} }
} }
@ -341,7 +387,7 @@ impl BufferSearchBar {
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) -> oneshot::Receiver<()> { ) -> oneshot::Receiver<()> {
let options = options.unwrap_or(self.default_options); let options = options.unwrap_or(self.default_options);
if query != self.query_editor.read(cx).text(cx) || self.search_options != options { if query != self.query(cx) || self.search_options != options {
self.query_editor.update(cx, |query_editor, cx| { self.query_editor.update(cx, |query_editor, cx| {
query_editor.buffer().update(cx, |query_buffer, cx| { query_editor.buffer().update(cx, |query_buffer, cx| {
let len = query_buffer.len(cx); let len = query_buffer.len(cx);
@ -674,7 +720,7 @@ impl BufferSearchBar {
fn update_matches(&mut self, cx: &mut ViewContext<Self>) -> oneshot::Receiver<()> { fn update_matches(&mut self, cx: &mut ViewContext<Self>) -> oneshot::Receiver<()> {
let (done_tx, done_rx) = oneshot::channel(); let (done_tx, done_rx) = oneshot::channel();
let query = self.query_editor.read(cx).text(cx); let query = self.query(cx);
self.pending_search.take(); self.pending_search.take();
if let Some(active_searchable_item) = self.active_searchable_item.as_ref() { if let Some(active_searchable_item) = self.active_searchable_item.as_ref() {
if query.is_empty() { if query.is_empty() {
@ -707,6 +753,7 @@ impl BufferSearchBar {
) )
}; };
let query_text = query.as_str().to_string();
let matches = active_searchable_item.find_matches(query, cx); let matches = active_searchable_item.find_matches(query, cx);
let active_searchable_item = active_searchable_item.downgrade(); let active_searchable_item = active_searchable_item.downgrade();
@ -720,6 +767,7 @@ impl BufferSearchBar {
.insert(active_searchable_item.downgrade(), matches); .insert(active_searchable_item.downgrade(), matches);
this.update_match_index(cx); this.update_match_index(cx);
this.search_history.add(query_text);
if !this.dismissed { if !this.dismissed {
let matches = this let matches = this
.searchable_items_with_matches .searchable_items_with_matches
@ -753,6 +801,28 @@ impl BufferSearchBar {
cx.notify(); cx.notify();
} }
} }
fn next_history_query(&mut self, _: &NextHistoryQuery, cx: &mut ViewContext<Self>) {
if let Some(new_query) = self.search_history.next().map(str::to_string) {
let _ = self.search(&new_query, Some(self.search_options), cx);
} else {
self.search_history.reset_selection();
let _ = self.search("", Some(self.search_options), cx);
}
}
fn previous_history_query(&mut self, _: &PreviousHistoryQuery, cx: &mut ViewContext<Self>) {
if self.query(cx).is_empty() {
if let Some(new_query) = self.search_history.current().map(str::to_string) {
let _ = self.search(&new_query, Some(self.search_options), cx);
return;
}
}
if let Some(new_query) = self.search_history.previous().map(str::to_string) {
let _ = self.search(&new_query, Some(self.search_options), cx);
}
}
} }
#[cfg(test)] #[cfg(test)]
@ -1333,4 +1403,154 @@ mod tests {
); );
}); });
} }
#[gpui::test]
async fn test_search_query_history(cx: &mut TestAppContext) {
crate::project_search::tests::init_test(cx);
let buffer_text = r#"
A regular expression (shortened as regex or regexp;[1] also referred to as
rational expression[2][3]) is a sequence of characters that specifies a search
pattern in text. Usually such patterns are used by string-searching algorithms
for "find" or "find and replace" operations on strings, or for input validation.
"#
.unindent();
let buffer = cx.add_model(|cx| Buffer::new(0, buffer_text, cx));
let (window_id, _root_view) = cx.add_window(|_| EmptyView);
let editor = cx.add_view(window_id, |cx| Editor::for_buffer(buffer.clone(), None, cx));
let search_bar = cx.add_view(window_id, |cx| {
let mut search_bar = BufferSearchBar::new(cx);
search_bar.set_active_pane_item(Some(&editor), cx);
search_bar.show(cx);
search_bar
});
// Add 3 search items into the history.
search_bar
.update(cx, |search_bar, cx| search_bar.search("a", None, cx))
.await
.unwrap();
search_bar
.update(cx, |search_bar, cx| search_bar.search("b", None, cx))
.await
.unwrap();
search_bar
.update(cx, |search_bar, cx| {
search_bar.search("c", Some(SearchOptions::CASE_SENSITIVE), cx)
})
.await
.unwrap();
// Ensure that the latest search is active.
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next history query after the latest should set the query to the empty string.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// First previous query for empty current query should set the query to the latest.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Further previous items should go over the history in reverse order.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "b");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Previous items should never go behind the first history item.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "a");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "a");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next items should go over the history in the original order.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "b");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar
.update(cx, |search_bar, cx| search_bar.search("ba", None, cx))
.await
.unwrap();
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "ba");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
// New search input should add another entry to history and move the selection to the end of the history.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "b");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "ba");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
}
} }

View file

@ -1,14 +1,14 @@
use crate::{ use crate::{
SearchOptions, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, ToggleWholeWord, NextHistoryQuery, PreviousHistoryQuery, SearchHistory, SearchOptions, SelectNextMatch,
SelectPrevMatch, ToggleCaseSensitive, ToggleWholeWord,
}; };
use anyhow::Result; use anyhow::Context;
use collections::HashMap; use collections::HashMap;
use editor::{ use editor::{
items::active_match_index, scroll::autoscroll::Autoscroll, Anchor, Editor, MultiBuffer, items::active_match_index, scroll::autoscroll::Autoscroll, Anchor, Editor, MultiBuffer,
SelectAll, MAX_TAB_TITLE_LEN, SelectAll, MAX_TAB_TITLE_LEN,
}; };
use futures::StreamExt; use futures::StreamExt;
use globset::{Glob, GlobMatcher};
use gpui::color::Color; use gpui::color::Color;
use gpui::geometry::rect::RectF; use gpui::geometry::rect::RectF;
use gpui::geometry::vector::IntoVector2F; use gpui::geometry::vector::IntoVector2F;
@ -24,7 +24,10 @@ use gpui::{
use gpui::{scene::Path, LayoutContext}; use gpui::{scene::Path, LayoutContext};
use menu::Confirm; use menu::Confirm;
use postage::stream::Stream; use postage::stream::Stream;
use project::{search::SearchQuery, Entry, Project}; use project::{
search::{PathMatcher, SearchQuery},
Entry, Project,
};
use semantic_index::SemanticIndex; use semantic_index::SemanticIndex;
use smallvec::SmallVec; use smallvec::SmallVec;
use std::{ use std::{
@ -69,6 +72,8 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(ProjectSearchBar::select_next_match); cx.add_action(ProjectSearchBar::select_next_match);
cx.add_action(ProjectSearchBar::select_prev_match); cx.add_action(ProjectSearchBar::select_prev_match);
cx.add_action(ProjectSearchBar::cycle_mode); cx.add_action(ProjectSearchBar::cycle_mode);
cx.add_action(ProjectSearchBar::next_history_query);
cx.add_action(ProjectSearchBar::previous_history_query);
cx.capture_action(ProjectSearchBar::tab); cx.capture_action(ProjectSearchBar::tab);
cx.capture_action(ProjectSearchBar::tab_previous); cx.capture_action(ProjectSearchBar::tab_previous);
add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx); add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx);
@ -107,6 +112,7 @@ struct ProjectSearch {
match_ranges: Vec<Range<Anchor>>, match_ranges: Vec<Range<Anchor>>,
active_query: Option<SearchQuery>, active_query: Option<SearchQuery>,
search_id: usize, search_id: usize,
search_history: SearchHistory,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -228,6 +234,7 @@ impl ProjectSearch {
match_ranges: Default::default(), match_ranges: Default::default(),
active_query: None, active_query: None,
search_id: 0, search_id: 0,
search_history: SearchHistory::default(),
} }
} }
@ -241,6 +248,7 @@ impl ProjectSearch {
match_ranges: self.match_ranges.clone(), match_ranges: self.match_ranges.clone(),
active_query: self.active_query.clone(), active_query: self.active_query.clone(),
search_id: self.search_id, search_id: self.search_id,
search_history: self.search_history.clone(),
}) })
} }
@ -255,6 +263,7 @@ impl ProjectSearch {
.project .project
.update(cx, |project, cx| project.search(query.clone(), cx)); .update(cx, |project, cx| project.search(query.clone(), cx));
self.search_id += 1; self.search_id += 1;
self.search_history.add(query.as_str().to_string());
self.active_query = Some(query); self.active_query = Some(query);
self.match_ranges.clear(); self.match_ranges.clear();
self.pending_search = Some(cx.spawn_weak(|this, mut cx| async move { self.pending_search = Some(cx.spawn_weak(|this, mut cx| async move {
@ -290,27 +299,22 @@ impl ProjectSearch {
cx.notify(); cx.notify();
} }
fn semantic_search( fn semantic_search(&mut self, query: SearchQuery, cx: &mut ModelContext<Self>) {
&mut self,
query: String,
include_files: Vec<GlobMatcher>,
exclude_files: Vec<GlobMatcher>,
cx: &mut ModelContext<Self>,
) {
let search = SemanticIndex::global(cx).map(|index| { let search = SemanticIndex::global(cx).map(|index| {
index.update(cx, |semantic_index, cx| { index.update(cx, |semantic_index, cx| {
semantic_index.search_project( semantic_index.search_project(
self.project.clone(), self.project.clone(),
query.clone(), query.as_str().to_owned(),
10, 10,
include_files, query.files_to_include().to_vec(),
exclude_files, query.files_to_exclude().to_vec(),
cx, cx,
) )
}) })
}); });
self.search_id += 1; self.search_id += 1;
self.match_ranges.clear(); self.match_ranges.clear();
self.search_history.add(query.as_str().to_string());
self.pending_search = Some(cx.spawn(|this, mut cx| async move { self.pending_search = Some(cx.spawn(|this, mut cx| async move {
let results = search?.await.log_err()?; let results = search?.await.log_err()?;
@ -415,6 +419,49 @@ impl View for ProjectSearchView {
], ],
}; };
let previous_query_keystrokes =
cx.binding_for_action(&PreviousHistoryQuery {})
.map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let next_query_keystrokes =
cx.binding_for_action(&NextHistoryQuery {}).map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let new_placeholder_text = match (previous_query_keystrokes, next_query_keystrokes) {
(Some(previous_query_keystrokes), Some(next_query_keystrokes)) => {
format!(
"Search ({}/{} for previous/next query)",
previous_query_keystrokes.join(" "),
next_query_keystrokes.join(" ")
)
}
(None, Some(next_query_keystrokes)) => {
format!(
"Search ({} for next query)",
next_query_keystrokes.join(" ")
)
}
(Some(previous_query_keystrokes), None) => {
format!(
"Search ({} for previous query)",
previous_query_keystrokes.join(" ")
)
}
(None, None) => String::new(),
};
self.query_editor.update(cx, |editor, cx| {
editor.set_placeholder_text(new_placeholder_text, cx);
});
MouseEventHandler::<Status, _>::new(0, cx, |_, _| { MouseEventHandler::<Status, _>::new(0, cx, |_, _| {
Flex::column() Flex::column()
.with_child(Flex::column().contained().flex(1., true)) .with_child(Flex::column().contained().flex(1., true))
@ -641,6 +688,9 @@ impl Item for ProjectSearchView {
} }
impl ProjectSearchView { impl ProjectSearchView {
fn toggle_search_option(&mut self, option: SearchOptions) {
self.search_options.toggle(option);
}
fn activate_search_mode(&mut self, mode: SearchMode, cx: &mut ViewContext<Self>) { fn activate_search_mode(&mut self, mode: SearchMode, cx: &mut ViewContext<Self>) {
self.model.update(cx, |model, _| model.kill_search()); self.model.update(cx, |model, _| model.kill_search());
self.current_mode = mode; self.current_mode = mode;
@ -815,8 +865,7 @@ impl ProjectSearchView {
if !dir_entry.is_dir() { if !dir_entry.is_dir() {
return; return;
} }
let filter_path = dir_entry.path.join("**"); let Some(filter_str) = dir_entry.path.to_str() else { return; };
let Some(filter_str) = filter_path.to_str() else { return; };
let model = cx.add_model(|cx| ProjectSearch::new(workspace.project().clone(), cx)); let model = cx.add_model(|cx| ProjectSearch::new(workspace.project().clone(), cx));
let search = cx.add_view(|cx| ProjectSearchView::new(model, cx)); let search = cx.add_view(|cx| ProjectSearchView::new(model, cx));
@ -891,16 +940,13 @@ impl ProjectSearchView {
return; return;
} }
let query = self.query_editor.read(cx).text(cx); if let Some(query) = self.build_search_query(cx) {
if let Some((included_files, exclude_files)) = self.model
self.get_included_and_excluded_globsets(cx) .update(cx, |model, cx| model.semantic_search(query, cx));
{
self.model.update(cx, |model, cx| {
model.semantic_search(query, included_files, exclude_files, cx)
});
} }
} }
} }
_ => { _ => {
if let Some(query) = self.build_search_query(cx) { if let Some(query) = self.build_search_query(cx) {
self.model.update(cx, |model, cx| model.search(query, cx)); self.model.update(cx, |model, cx| model.search(query, cx));
@ -909,45 +955,10 @@ impl ProjectSearchView {
} }
} }
fn get_included_and_excluded_globsets(
&mut self,
cx: &mut ViewContext<Self>,
) -> Option<(Vec<GlobMatcher>, Vec<GlobMatcher>)> {
let included_files =
match Self::load_glob_set(&self.included_files_editor.read(cx).text(cx)) {
Ok(included_files) => {
self.panels_with_errors.remove(&InputPanel::Include);
included_files
}
Err(_e) => {
self.panels_with_errors.insert(InputPanel::Include);
cx.notify();
return None;
}
};
let excluded_files =
match Self::load_glob_set(&self.excluded_files_editor.read(cx).text(cx)) {
Ok(excluded_files) => {
self.panels_with_errors.remove(&InputPanel::Exclude);
excluded_files
}
Err(_e) => {
self.panels_with_errors.insert(InputPanel::Exclude);
cx.notify();
return None;
}
};
Some((included_files, excluded_files))
}
fn toggle_search_option(&mut self, option: SearchOptions) {
self.search_options.toggle(option);
self.semantic = None;
}
fn build_search_query(&mut self, cx: &mut ViewContext<Self>) -> Option<SearchQuery> { fn build_search_query(&mut self, cx: &mut ViewContext<Self>) -> Option<SearchQuery> {
let text = self.query_editor.read(cx).text(cx); let text = self.query_editor.read(cx).text(cx);
let included_files = let included_files =
match Self::load_glob_set(&self.included_files_editor.read(cx).text(cx)) { match Self::parse_path_matches(&self.included_files_editor.read(cx).text(cx)) {
Ok(included_files) => { Ok(included_files) => {
self.panels_with_errors.remove(&InputPanel::Include); self.panels_with_errors.remove(&InputPanel::Include);
included_files included_files
@ -959,7 +970,7 @@ impl ProjectSearchView {
} }
}; };
let excluded_files = let excluded_files =
match Self::load_glob_set(&self.excluded_files_editor.read(cx).text(cx)) { match Self::parse_path_matches(&self.excluded_files_editor.read(cx).text(cx)) {
Ok(excluded_files) => { Ok(excluded_files) => {
self.panels_with_errors.remove(&InputPanel::Exclude); self.panels_with_errors.remove(&InputPanel::Exclude);
excluded_files excluded_files
@ -999,11 +1010,14 @@ impl ProjectSearchView {
} }
} }
fn load_glob_set(text: &str) -> Result<Vec<GlobMatcher>> { fn parse_path_matches(text: &str) -> anyhow::Result<Vec<PathMatcher>> {
text.split(',') text.split(',')
.map(str::trim) .map(str::trim)
.filter(|glob_str| !glob_str.is_empty()) .filter(|maybe_glob_str| !maybe_glob_str.is_empty())
.map(|glob_str| anyhow::Ok(Glob::new(glob_str)?.compile_matcher())) .map(|maybe_glob_str| {
PathMatcher::new(maybe_glob_str)
.with_context(|| format!("parsing {maybe_glob_str} as path matcher"))
})
.collect() .collect()
} }
@ -1016,6 +1030,7 @@ impl ProjectSearchView {
let range_to_select = match_ranges[new_index].clone(); let range_to_select = match_ranges[new_index].clone();
self.results_editor.update(cx, |editor, cx| { self.results_editor.update(cx, |editor, cx| {
let range_to_select = editor.range_for_match(&range_to_select);
editor.unfold_ranges([range_to_select.clone()], false, true, cx); editor.unfold_ranges([range_to_select.clone()], false, true, cx);
editor.change_selections(Some(Autoscroll::fit()), cx, |s| { editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range_to_select]) s.select_ranges([range_to_select])
@ -1057,8 +1072,12 @@ impl ProjectSearchView {
let is_new_search = self.search_id != prev_search_id; let is_new_search = self.search_id != prev_search_id;
self.results_editor.update(cx, |editor, cx| { self.results_editor.update(cx, |editor, cx| {
if is_new_search { if is_new_search {
let range_to_select = match_ranges
.first()
.clone()
.map(|range| editor.range_for_match(range));
editor.change_selections(Some(Autoscroll::fit()), cx, |s| { editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges(match_ranges.first().cloned()) s.select_ranges(range_to_select)
}); });
} }
editor.highlight_background::<Self>( editor.highlight_background::<Self>(
@ -1597,6 +1616,47 @@ impl ProjectSearchBar {
false false
} }
} }
fn next_history_query(&mut self, _: &NextHistoryQuery, cx: &mut ViewContext<Self>) {
if let Some(search_view) = self.active_project_search.as_ref() {
search_view.update(cx, |search_view, cx| {
let new_query = search_view.model.update(cx, |model, _| {
if let Some(new_query) = model.search_history.next().map(str::to_string) {
new_query
} else {
model.search_history.reset_selection();
String::new()
}
});
search_view.set_query(&new_query, cx);
});
}
}
fn previous_history_query(&mut self, _: &PreviousHistoryQuery, cx: &mut ViewContext<Self>) {
if let Some(search_view) = self.active_project_search.as_ref() {
search_view.update(cx, |search_view, cx| {
if search_view.query_editor.read(cx).text(cx).is_empty() {
if let Some(new_query) = search_view
.model
.read(cx)
.search_history
.current()
.map(str::to_string)
{
search_view.set_query(&new_query, cx);
return;
}
}
if let Some(new_query) = search_view.model.update(cx, |model, _| {
model.search_history.previous().map(str::to_string)
}) {
search_view.set_query(&new_query, cx);
}
});
}
}
} }
impl Entity for ProjectSearchBar { impl Entity for ProjectSearchBar {
@ -1869,6 +1929,7 @@ pub mod tests {
use editor::DisplayPoint; use editor::DisplayPoint;
use gpui::{color::Color, executor::Deterministic, TestAppContext}; use gpui::{color::Color, executor::Deterministic, TestAppContext};
use project::FakeFs; use project::FakeFs;
use semantic_index::semantic_index_settings::SemanticIndexSettings;
use serde_json::json; use serde_json::json;
use settings::SettingsStore; use settings::SettingsStore;
use std::sync::Arc; use std::sync::Arc;
@ -2270,7 +2331,7 @@ pub mod tests {
search_view.included_files_editor.update(cx, |editor, cx| { search_view.included_files_editor.update(cx, |editor, cx| {
assert_eq!( assert_eq!(
editor.display_text(cx), editor.display_text(cx),
a_dir_entry.path.join("**").display().to_string(), a_dir_entry.path.to_str().unwrap(),
"New search in directory should have included dir entry path" "New search in directory should have included dir entry path"
); );
}); });
@ -2294,6 +2355,192 @@ pub mod tests {
}); });
} }
#[gpui::test]
async fn test_search_query_history(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/dir",
json!({
"one.rs": "const ONE: usize = 1;",
"two.rs": "const TWO: usize = one::ONE + one::ONE;",
"three.rs": "const THREE: usize = one::ONE + two::TWO;",
"four.rs": "const FOUR: usize = one::ONE + three::THREE;",
}),
)
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx));
workspace.update(cx, |workspace, cx| {
ProjectSearchView::deploy(workspace, &workspace::NewSearch, cx)
});
let search_view = cx.read(|cx| {
workspace
.read(cx)
.active_pane()
.read(cx)
.active_item()
.and_then(|item| item.downcast::<ProjectSearchView>())
.expect("Search view expected to appear after new search event trigger")
});
let search_bar = cx.add_view(window_id, |cx| {
let mut search_bar = ProjectSearchBar::new();
search_bar.set_active_pane_item(Some(&search_view), cx);
// search_bar.show(cx);
search_bar
});
// Add 3 search items into the history + another unsubmitted one.
search_view.update(cx, |search_view, cx| {
search_view.search_options = SearchOptions::CASE_SENSITIVE;
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("ONE", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("TWO", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("THREE", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
search_view.query_editor.update(cx, |query_editor, cx| {
query_editor.set_text("JUST_TEXT_INPUT", cx)
});
});
cx.foreground().run_until_parked();
// Ensure that the latest input with search settings is active.
search_view.update(cx, |search_view, cx| {
assert_eq!(
search_view.query_editor.read(cx).text(cx),
"JUST_TEXT_INPUT"
);
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next history query after the latest should set the query to the empty string.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// First previous query for empty current query should set the query to the latest submitted one.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Further previous items should go over the history in reverse order.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Previous items should never go behind the first history item.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "ONE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "ONE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next items should go over the history in the original order.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_view.update(cx, |search_view, cx| {
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("TWO_NEW", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO_NEW");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// New search input should add another entry to history and move the selection to the end of the history.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO_NEW");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
}
pub fn init_test(cx: &mut TestAppContext) { pub fn init_test(cx: &mut TestAppContext) {
cx.foreground().forbid_parking(); cx.foreground().forbid_parking();
let fonts = cx.font_cache(); let fonts = cx.font_cache();
@ -2303,6 +2550,7 @@ pub mod tests {
cx.update(|cx| { cx.update(|cx| {
cx.set_global(SettingsStore::test(cx)); cx.set_global(SettingsStore::test(cx));
cx.set_global(ActiveSearches::default()); cx.set_global(ActiveSearches::default());
settings::register::<SemanticIndexSettings>(cx);
theme::init((), cx); theme::init((), cx);
cx.update_global::<SettingsStore, _, _>(|store, _| { cx.update_global::<SettingsStore, _, _>(|store, _| {

View file

@ -3,6 +3,7 @@ pub use buffer_search::BufferSearchBar;
use gpui::{actions, Action, AppContext}; use gpui::{actions, Action, AppContext};
use project::search::SearchQuery; use project::search::SearchQuery;
pub use project_search::{ProjectSearchBar, ProjectSearchView}; pub use project_search::{ProjectSearchBar, ProjectSearchView};
use smallvec::SmallVec;
pub mod buffer_search; pub mod buffer_search;
pub mod project_search; pub mod project_search;
@ -21,6 +22,8 @@ actions!(
SelectNextMatch, SelectNextMatch,
SelectPrevMatch, SelectPrevMatch,
SelectAllMatches, SelectAllMatches,
NextHistoryQuery,
PreviousHistoryQuery,
] ]
); );
@ -65,3 +68,187 @@ impl SearchOptions {
options options
} }
} }
const SEARCH_HISTORY_LIMIT: usize = 20;
#[derive(Default, Debug, Clone)]
pub struct SearchHistory {
history: SmallVec<[String; SEARCH_HISTORY_LIMIT]>,
selected: Option<usize>,
}
impl SearchHistory {
pub fn add(&mut self, search_string: String) {
if let Some(i) = self.selected {
if search_string == self.history[i] {
return;
}
}
if let Some(previously_searched) = self.history.last_mut() {
if search_string.find(previously_searched.as_str()).is_some() {
*previously_searched = search_string;
self.selected = Some(self.history.len() - 1);
return;
}
}
self.history.push(search_string);
if self.history.len() > SEARCH_HISTORY_LIMIT {
self.history.remove(0);
}
self.selected = Some(self.history.len() - 1);
}
pub fn next(&mut self) -> Option<&str> {
let history_size = self.history.len();
if history_size == 0 {
return None;
}
let selected = self.selected?;
if selected == history_size - 1 {
return None;
}
let next_index = selected + 1;
self.selected = Some(next_index);
Some(&self.history[next_index])
}
pub fn current(&self) -> Option<&str> {
Some(&self.history[self.selected?])
}
pub fn previous(&mut self) -> Option<&str> {
let history_size = self.history.len();
if history_size == 0 {
return None;
}
let prev_index = match self.selected {
Some(selected_index) => {
if selected_index == 0 {
return None;
} else {
selected_index - 1
}
}
None => history_size - 1,
};
self.selected = Some(prev_index);
Some(&self.history[prev_index])
}
pub fn reset_selection(&mut self) {
self.selected = None;
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_add() {
let mut search_history = SearchHistory::default();
assert_eq!(
search_history.current(),
None,
"No current selection should be set fo the default search history"
);
search_history.add("rust".to_string());
assert_eq!(
search_history.current(),
Some("rust"),
"Newly added item should be selected"
);
// check if duplicates are not added
search_history.add("rust".to_string());
assert_eq!(
search_history.history.len(),
1,
"Should not add a duplicate"
);
assert_eq!(search_history.current(), Some("rust"));
// check if new string containing the previous string replaces it
search_history.add("rustlang".to_string());
assert_eq!(
search_history.history.len(),
1,
"Should replace previous item if it's a substring"
);
assert_eq!(search_history.current(), Some("rustlang"));
// push enough items to test SEARCH_HISTORY_LIMIT
for i in 0..SEARCH_HISTORY_LIMIT * 2 {
search_history.add(format!("item{i}"));
}
assert!(search_history.history.len() <= SEARCH_HISTORY_LIMIT);
}
#[test]
fn test_next_and_previous() {
let mut search_history = SearchHistory::default();
assert_eq!(
search_history.next(),
None,
"Default search history should not have a next item"
);
search_history.add("Rust".to_string());
assert_eq!(search_history.next(), None);
search_history.add("JavaScript".to_string());
assert_eq!(search_history.next(), None);
search_history.add("TypeScript".to_string());
assert_eq!(search_history.next(), None);
assert_eq!(search_history.current(), Some("TypeScript"));
assert_eq!(search_history.previous(), Some("JavaScript"));
assert_eq!(search_history.current(), Some("JavaScript"));
assert_eq!(search_history.previous(), Some("Rust"));
assert_eq!(search_history.current(), Some("Rust"));
assert_eq!(search_history.previous(), None);
assert_eq!(search_history.current(), Some("Rust"));
assert_eq!(search_history.next(), Some("JavaScript"));
assert_eq!(search_history.current(), Some("JavaScript"));
assert_eq!(search_history.next(), Some("TypeScript"));
assert_eq!(search_history.current(), Some("TypeScript"));
assert_eq!(search_history.next(), None);
assert_eq!(search_history.current(), Some("TypeScript"));
}
#[test]
fn test_reset_selection() {
let mut search_history = SearchHistory::default();
search_history.add("Rust".to_string());
search_history.add("JavaScript".to_string());
search_history.add("TypeScript".to_string());
assert_eq!(search_history.current(), Some("TypeScript"));
search_history.reset_selection();
assert_eq!(search_history.current(), None);
assert_eq!(
search_history.previous(),
Some("TypeScript"),
"Should start from the end after reset on previous item query"
);
search_history.previous();
assert_eq!(search_history.current(), Some("JavaScript"));
search_history.previous();
assert_eq!(search_history.current(), Some("Rust"));
search_history.reset_selection();
assert_eq!(search_history.current(), None);
}
}

View file

@ -54,9 +54,12 @@ tempdir.workspace = true
ctor.workspace = true ctor.workspace = true
env_logger.workspace = true env_logger.workspace = true
tree-sitter-typescript = "*" tree-sitter-typescript.workspace = true
tree-sitter-json = "*" tree-sitter-json.workspace = true
tree-sitter-rust = "*" tree-sitter-rust.workspace = true
tree-sitter-toml = "*" tree-sitter-toml.workspace = true
tree-sitter-cpp = "*" tree-sitter-cpp.workspace = true
tree-sitter-elixir = "*" tree-sitter-elixir.workspace = true
tree-sitter-lua.workspace = true
tree-sitter-ruby.workspace = true
tree-sitter-php.workspace = true

View file

@ -1,7 +1,6 @@
use crate::{parsing::Document, SEMANTIC_INDEX_VERSION}; use crate::{parsing::Document, SEMANTIC_INDEX_VERSION};
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use globset::GlobMatcher; use project::{search::PathMatcher, Fs};
use project::Fs;
use rpc::proto::Timestamp; use rpc::proto::Timestamp;
use rusqlite::{ use rusqlite::{
params, params,
@ -290,8 +289,8 @@ impl VectorDatabase {
pub fn retrieve_included_file_ids( pub fn retrieve_included_file_ids(
&self, &self,
worktree_ids: &[i64], worktree_ids: &[i64],
include_globs: Vec<GlobMatcher>, includes: &[PathMatcher],
exclude_globs: Vec<GlobMatcher>, excludes: &[PathMatcher],
) -> Result<Vec<i64>> { ) -> Result<Vec<i64>> {
let mut file_query = self.db.prepare( let mut file_query = self.db.prepare(
" "
@ -310,13 +309,9 @@ impl VectorDatabase {
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let file_id = row.get(0)?; let file_id = row.get(0)?;
let relative_path = row.get_ref(1)?.as_str()?; let relative_path = row.get_ref(1)?.as_str()?;
let included = include_globs.is_empty() let included =
|| include_globs includes.is_empty() || includes.iter().any(|glob| glob.is_match(relative_path));
.iter() let excluded = excludes.iter().any(|glob| glob.is_match(relative_path));
.any(|glob| glob.is_match(relative_path));
let excluded = exclude_globs
.iter()
.any(|glob| glob.is_match(relative_path));
if included && !excluded { if included && !excluded {
file_ids.push(file_id); file_ids.push(file_id);
} }

View file

@ -21,7 +21,9 @@ const CODE_CONTEXT_TEMPLATE: &str =
"The below code snippet is from file '<path>'\n\n```<language>\n<item>\n```"; "The below code snippet is from file '<path>'\n\n```<language>\n<item>\n```";
const ENTIRE_FILE_TEMPLATE: &str = const ENTIRE_FILE_TEMPLATE: &str =
"The below snippet is from file '<path>'\n\n```<language>\n<item>\n```"; "The below snippet is from file '<path>'\n\n```<language>\n<item>\n```";
pub const PARSEABLE_ENTIRE_FILE_TYPES: &[&str] = &["TOML", "YAML", "CSS"]; const MARKDOWN_CONTEXT_TEMPLATE: &str = "The below file contents is from file '<path>'\n\n<item>";
pub const PARSEABLE_ENTIRE_FILE_TYPES: &[&str] =
&["TOML", "YAML", "CSS", "HEEX", "ERB", "SVELTE", "HTML"];
pub struct CodeContextRetriever { pub struct CodeContextRetriever {
pub parser: Parser, pub parser: Parser,
@ -59,7 +61,7 @@ impl CodeContextRetriever {
let document_span = ENTIRE_FILE_TEMPLATE let document_span = ENTIRE_FILE_TEMPLATE
.replace("<path>", relative_path.to_string_lossy().as_ref()) .replace("<path>", relative_path.to_string_lossy().as_ref())
.replace("<language>", language_name.as_ref()) .replace("<language>", language_name.as_ref())
.replace("item", &content); .replace("<item>", &content);
Ok(vec![Document { Ok(vec![Document {
range: 0..content.len(), range: 0..content.len(),
@ -69,6 +71,19 @@ impl CodeContextRetriever {
}]) }])
} }
fn parse_markdown_file(&self, relative_path: &Path, content: &str) -> Result<Vec<Document>> {
let document_span = MARKDOWN_CONTEXT_TEMPLATE
.replace("<path>", relative_path.to_string_lossy().as_ref())
.replace("<item>", &content);
Ok(vec![Document {
range: 0..content.len(),
content: document_span,
embedding: Vec::new(),
name: "Markdown".to_string(),
}])
}
fn get_matches_in_file( fn get_matches_in_file(
&mut self, &mut self,
content: &str, content: &str,
@ -135,6 +150,8 @@ impl CodeContextRetriever {
if PARSEABLE_ENTIRE_FILE_TYPES.contains(&language_name.as_ref()) { if PARSEABLE_ENTIRE_FILE_TYPES.contains(&language_name.as_ref()) {
return self.parse_entire_file(relative_path, language_name, &content); return self.parse_entire_file(relative_path, language_name, &content);
} else if &language_name.to_string() == &"Markdown".to_string() {
return self.parse_markdown_file(relative_path, &content);
} }
let mut documents = self.parse_file(content, language)?; let mut documents = self.parse_file(content, language)?;
@ -200,7 +217,12 @@ impl CodeContextRetriever {
let mut document_content = String::new(); let mut document_content = String::new();
for context_range in &context_match.context_ranges { for context_range in &context_match.context_ranges {
document_content.push_str(&content[context_range.clone()]); add_content_from_range(
&mut document_content,
content,
context_range.clone(),
context_match.start_col,
);
document_content.push_str("\n"); document_content.push_str("\n");
} }

View file

@ -11,13 +11,12 @@ use anyhow::{anyhow, Result};
use db::VectorDatabase; use db::VectorDatabase;
use embedding::{EmbeddingProvider, OpenAIEmbeddings}; use embedding::{EmbeddingProvider, OpenAIEmbeddings};
use futures::{channel::oneshot, Future}; use futures::{channel::oneshot, Future};
use globset::GlobMatcher;
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle}; use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
use language::{Anchor, Buffer, Language, LanguageRegistry}; use language::{Anchor, Buffer, Language, LanguageRegistry};
use parking_lot::Mutex; use parking_lot::Mutex;
use parsing::{CodeContextRetriever, Document, PARSEABLE_ENTIRE_FILE_TYPES}; use parsing::{CodeContextRetriever, Document, PARSEABLE_ENTIRE_FILE_TYPES};
use postage::watch; use postage::watch;
use project::{Fs, Project, WorktreeId}; use project::{search::PathMatcher, Fs, Project, WorktreeId};
use smol::channel; use smol::channel;
use std::{ use std::{
cmp::Ordering, cmp::Ordering,
@ -613,6 +612,7 @@ impl SemanticIndex {
.await .await
{ {
if !PARSEABLE_ENTIRE_FILE_TYPES.contains(&language.name().as_ref()) if !PARSEABLE_ENTIRE_FILE_TYPES.contains(&language.name().as_ref())
&& &language.name().as_ref() != &"Markdown"
&& language && language
.grammar() .grammar()
.and_then(|grammar| grammar.embedding_config.as_ref()) .and_then(|grammar| grammar.embedding_config.as_ref())
@ -682,8 +682,8 @@ impl SemanticIndex {
project: ModelHandle<Project>, project: ModelHandle<Project>,
phrase: String, phrase: String,
limit: usize, limit: usize,
include_globs: Vec<GlobMatcher>, includes: Vec<PathMatcher>,
exclude_globs: Vec<GlobMatcher>, excludes: Vec<PathMatcher>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<SearchResult>>> { ) -> Task<Result<Vec<SearchResult>>> {
let project_state = if let Some(state) = self.projects.get(&project.downgrade()) { let project_state = if let Some(state) = self.projects.get(&project.downgrade()) {
@ -714,11 +714,8 @@ impl SemanticIndex {
.next() .next()
.unwrap(); .unwrap();
let file_ids = database.retrieve_included_file_ids( let file_ids =
&worktree_db_ids, database.retrieve_included_file_ids(&worktree_db_ids, &includes, &excludes)?;
include_globs,
exclude_globs,
)?;
let batch_n = cx.background().num_cpus(); let batch_n = cx.background().num_cpus();
let ids_len = file_ids.clone().len(); let ids_len = file_ids.clone().len();

View file

@ -7,11 +7,10 @@ use crate::{
}; };
use anyhow::Result; use anyhow::Result;
use async_trait::async_trait; use async_trait::async_trait;
use globset::Glob;
use gpui::{Task, TestAppContext}; use gpui::{Task, TestAppContext};
use language::{Language, LanguageConfig, LanguageRegistry, ToOffset}; use language::{Language, LanguageConfig, LanguageRegistry, ToOffset};
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use project::{project_settings::ProjectSettings, FakeFs, Fs, Project}; use project::{project_settings::ProjectSettings, search::PathMatcher, FakeFs, Fs, Project};
use rand::{rngs::StdRng, Rng}; use rand::{rngs::StdRng, Rng};
use serde_json::json; use serde_json::json;
use settings::SettingsStore; use settings::SettingsStore;
@ -121,8 +120,8 @@ async fn test_semantic_index(cx: &mut TestAppContext) {
); );
// Test Include Files Functonality // Test Include Files Functonality
let include_files = vec![Glob::new("*.rs").unwrap().compile_matcher()]; let include_files = vec![PathMatcher::new("*.rs").unwrap()];
let exclude_files = vec![Glob::new("*.rs").unwrap().compile_matcher()]; let exclude_files = vec![PathMatcher::new("*.rs").unwrap()];
let rust_only_search_results = store let rust_only_search_results = store
.update(cx, |store, cx| { .update(cx, |store, cx| {
store.search_project( store.search_project(
@ -486,6 +485,79 @@ async fn test_code_context_retrieval_javascript() {
) )
} }
#[gpui::test]
async fn test_code_context_retrieval_lua() {
let language = lua_lang();
let mut retriever = CodeContextRetriever::new();
let text = r#"
-- Creates a new class
-- @param baseclass The Baseclass of this class, or nil.
-- @return A new class reference.
function classes.class(baseclass)
-- Create the class definition and metatable.
local classdef = {}
-- Find the super class, either Object or user-defined.
baseclass = baseclass or classes.Object
-- If this class definition does not know of a function, it will 'look up' to the Baseclass via the __index of the metatable.
setmetatable(classdef, { __index = baseclass })
-- All class instances have a reference to the class object.
classdef.class = classdef
--- Recursivly allocates the inheritance tree of the instance.
-- @param mastertable The 'root' of the inheritance tree.
-- @return Returns the instance with the allocated inheritance tree.
function classdef.alloc(mastertable)
-- All class instances have a reference to a superclass object.
local instance = { super = baseclass.alloc(mastertable) }
-- Any functions this instance does not know of will 'look up' to the superclass definition.
setmetatable(instance, { __index = classdef, __newindex = mastertable })
return instance
end
end
"#.unindent();
let documents = retriever.parse_file(&text, language.clone()).unwrap();
assert_documents_eq(
&documents,
&[
(r#"
-- Creates a new class
-- @param baseclass The Baseclass of this class, or nil.
-- @return A new class reference.
function classes.class(baseclass)
-- Create the class definition and metatable.
local classdef = {}
-- Find the super class, either Object or user-defined.
baseclass = baseclass or classes.Object
-- If this class definition does not know of a function, it will 'look up' to the Baseclass via the __index of the metatable.
setmetatable(classdef, { __index = baseclass })
-- All class instances have a reference to the class object.
classdef.class = classdef
--- Recursivly allocates the inheritance tree of the instance.
-- @param mastertable The 'root' of the inheritance tree.
-- @return Returns the instance with the allocated inheritance tree.
function classdef.alloc(mastertable)
--[ ... ]--
--[ ... ]--
end
end"#.unindent(),
114),
(r#"
--- Recursivly allocates the inheritance tree of the instance.
-- @param mastertable The 'root' of the inheritance tree.
-- @return Returns the instance with the allocated inheritance tree.
function classdef.alloc(mastertable)
-- All class instances have a reference to a superclass object.
local instance = { super = baseclass.alloc(mastertable) }
-- Any functions this instance does not know of will 'look up' to the superclass definition.
setmetatable(instance, { __index = classdef, __newindex = mastertable })
return instance
end"#.unindent(), 809),
]
);
}
#[gpui::test] #[gpui::test]
async fn test_code_context_retrieval_elixir() { async fn test_code_context_retrieval_elixir() {
let language = elixir_lang(); let language = elixir_lang();
@ -754,6 +826,346 @@ async fn test_code_context_retrieval_cpp() {
); );
} }
#[gpui::test]
async fn test_code_context_retrieval_ruby() {
let language = ruby_lang();
let mut retriever = CodeContextRetriever::new();
let text = r#"
# This concern is inspired by "sudo mode" on GitHub. It
# is a way to re-authenticate a user before allowing them
# to see or perform an action.
#
# Add `before_action :require_challenge!` to actions you
# want to protect.
#
# The user will be shown a page to enter the challenge (which
# is either the password, or just the username when no
# password exists). Upon passing, there is a grace period
# during which no challenge will be asked from the user.
#
# Accessing challenge-protected resources during the grace
# period will refresh the grace period.
module ChallengableConcern
extend ActiveSupport::Concern
CHALLENGE_TIMEOUT = 1.hour.freeze
def require_challenge!
return if skip_challenge?
if challenge_passed_recently?
session[:challenge_passed_at] = Time.now.utc
return
end
@challenge = Form::Challenge.new(return_to: request.url)
if params.key?(:form_challenge)
if challenge_passed?
session[:challenge_passed_at] = Time.now.utc
else
flash.now[:alert] = I18n.t('challenge.invalid_password')
render_challenge
end
else
render_challenge
end
end
def challenge_passed?
current_user.valid_password?(challenge_params[:current_password])
end
end
class Animal
include Comparable
attr_reader :legs
def initialize(name, legs)
@name, @legs = name, legs
end
def <=>(other)
legs <=> other.legs
end
end
# Singleton method for car object
def car.wheels
puts "There are four wheels"
end"#
.unindent();
let documents = retriever.parse_file(&text, language.clone()).unwrap();
assert_documents_eq(
&documents,
&[
(
r#"
# This concern is inspired by "sudo mode" on GitHub. It
# is a way to re-authenticate a user before allowing them
# to see or perform an action.
#
# Add `before_action :require_challenge!` to actions you
# want to protect.
#
# The user will be shown a page to enter the challenge (which
# is either the password, or just the username when no
# password exists). Upon passing, there is a grace period
# during which no challenge will be asked from the user.
#
# Accessing challenge-protected resources during the grace
# period will refresh the grace period.
module ChallengableConcern
extend ActiveSupport::Concern
CHALLENGE_TIMEOUT = 1.hour.freeze
def require_challenge!
# ...
end
def challenge_passed?
# ...
end
end"#
.unindent(),
558,
),
(
r#"
def require_challenge!
return if skip_challenge?
if challenge_passed_recently?
session[:challenge_passed_at] = Time.now.utc
return
end
@challenge = Form::Challenge.new(return_to: request.url)
if params.key?(:form_challenge)
if challenge_passed?
session[:challenge_passed_at] = Time.now.utc
else
flash.now[:alert] = I18n.t('challenge.invalid_password')
render_challenge
end
else
render_challenge
end
end"#
.unindent(),
663,
),
(
r#"
def challenge_passed?
current_user.valid_password?(challenge_params[:current_password])
end"#
.unindent(),
1254,
),
(
r#"
class Animal
include Comparable
attr_reader :legs
def initialize(name, legs)
# ...
end
def <=>(other)
# ...
end
end"#
.unindent(),
1363,
),
(
r#"
def initialize(name, legs)
@name, @legs = name, legs
end"#
.unindent(),
1427,
),
(
r#"
def <=>(other)
legs <=> other.legs
end"#
.unindent(),
1501,
),
(
r#"
# Singleton method for car object
def car.wheels
puts "There are four wheels"
end"#
.unindent(),
1591,
),
],
);
}
#[gpui::test]
async fn test_code_context_retrieval_php() {
let language = php_lang();
let mut retriever = CodeContextRetriever::new();
let text = r#"
<?php
namespace LevelUp\Experience\Concerns;
/*
This is a multiple-lines comment block
that spans over multiple
lines
*/
function functionName() {
echo "Hello world!";
}
trait HasAchievements
{
/**
* @throws \Exception
*/
public function grantAchievement(Achievement $achievement, $progress = null): void
{
if ($progress > 100) {
throw new Exception(message: 'Progress cannot be greater than 100');
}
if ($this->achievements()->find($achievement->id)) {
throw new Exception(message: 'User already has this Achievement');
}
$this->achievements()->attach($achievement, [
'progress' => $progress ?? null,
]);
$this->when(value: ($progress === null) || ($progress === 100), callback: fn (): ?array => event(new AchievementAwarded(achievement: $achievement, user: $this)));
}
public function achievements(): BelongsToMany
{
return $this->belongsToMany(related: Achievement::class)
->withPivot(columns: 'progress')
->where('is_secret', false)
->using(AchievementUser::class);
}
}
interface Multiplier
{
public function qualifies(array $data): bool;
public function setMultiplier(): int;
}
enum AuditType: string
{
case Add = 'add';
case Remove = 'remove';
case Reset = 'reset';
case LevelUp = 'level_up';
}
?>"#
.unindent();
let documents = retriever.parse_file(&text, language.clone()).unwrap();
assert_documents_eq(
&documents,
&[
(
r#"
/*
This is a multiple-lines comment block
that spans over multiple
lines
*/
function functionName() {
echo "Hello world!";
}"#
.unindent(),
123,
),
(
r#"
trait HasAchievements
{
/**
* @throws \Exception
*/
public function grantAchievement(Achievement $achievement, $progress = null): void
{/* ... */}
public function achievements(): BelongsToMany
{/* ... */}
}"#
.unindent(),
177,
),
(r#"
/**
* @throws \Exception
*/
public function grantAchievement(Achievement $achievement, $progress = null): void
{
if ($progress > 100) {
throw new Exception(message: 'Progress cannot be greater than 100');
}
if ($this->achievements()->find($achievement->id)) {
throw new Exception(message: 'User already has this Achievement');
}
$this->achievements()->attach($achievement, [
'progress' => $progress ?? null,
]);
$this->when(value: ($progress === null) || ($progress === 100), callback: fn (): ?array => event(new AchievementAwarded(achievement: $achievement, user: $this)));
}"#.unindent(), 245),
(r#"
public function achievements(): BelongsToMany
{
return $this->belongsToMany(related: Achievement::class)
->withPivot(columns: 'progress')
->where('is_secret', false)
->using(AchievementUser::class);
}"#.unindent(), 902),
(r#"
interface Multiplier
{
public function qualifies(array $data): bool;
public function setMultiplier(): int;
}"#.unindent(),
1146),
(r#"
enum AuditType: string
{
case Add = 'add';
case Remove = 'remove';
case Reset = 'reset';
case LevelUp = 'level_up';
}"#.unindent(), 1265)
],
);
}
#[gpui::test] #[gpui::test]
fn test_dot_product(mut rng: StdRng) { fn test_dot_product(mut rng: StdRng) {
assert_eq!(dot(&[1., 0., 0., 0., 0.], &[0., 1., 0., 0., 0.]), 0.); assert_eq!(dot(&[1., 0., 0., 0., 0.], &[0., 1., 0., 0., 0.]), 0.);
@ -1084,6 +1496,131 @@ fn cpp_lang() -> Arc<Language> {
) )
} }
fn lua_lang() -> Arc<Language> {
Arc::new(
Language::new(
LanguageConfig {
name: "Lua".into(),
path_suffixes: vec!["lua".into()],
collapsed_placeholder: "--[ ... ]--".to_string(),
..Default::default()
},
Some(tree_sitter_lua::language()),
)
.with_embedding_query(
r#"
(
(comment)* @context
.
(function_declaration
"function" @name
name: (_) @name
(comment)* @collapse
body: (block) @collapse
) @item
)
"#,
)
.unwrap(),
)
}
fn php_lang() -> Arc<Language> {
Arc::new(
Language::new(
LanguageConfig {
name: "PHP".into(),
path_suffixes: vec!["php".into()],
collapsed_placeholder: "/* ... */".into(),
..Default::default()
},
Some(tree_sitter_php::language()),
)
.with_embedding_query(
r#"
(
(comment)* @context
.
[
(function_definition
"function" @name
name: (_) @name
body: (_
"{" @keep
"}" @keep) @collapse
)
(trait_declaration
"trait" @name
name: (_) @name)
(method_declaration
"function" @name
name: (_) @name
body: (_
"{" @keep
"}" @keep) @collapse
)
(interface_declaration
"interface" @name
name: (_) @name
)
(enum_declaration
"enum" @name
name: (_) @name
)
] @item
)
"#,
)
.unwrap(),
)
}
fn ruby_lang() -> Arc<Language> {
Arc::new(
Language::new(
LanguageConfig {
name: "Ruby".into(),
path_suffixes: vec!["rb".into()],
collapsed_placeholder: "# ...".to_string(),
..Default::default()
},
Some(tree_sitter_ruby::language()),
)
.with_embedding_query(
r#"
(
(comment)* @context
.
[
(module
"module" @name
name: (_) @name)
(method
"def" @name
name: (_) @name
body: (body_statement) @collapse)
(class
"class" @name
name: (_) @name)
(singleton_method
"def" @name
object: (_) @name
"." @name
name: (_) @name
body: (body_statement) @collapse)
] @item
)
"#,
)
.unwrap(),
)
}
fn elixir_lang() -> Arc<Language> { fn elixir_lang() -> Arc<Language> {
Arc::new( Arc::new(
Language::new( Language::new(

View file

@ -202,7 +202,7 @@ where
self.position = D::default(); self.position = D::default();
} }
let mut entry = self.stack.last_mut().unwrap(); let entry = self.stack.last_mut().unwrap();
if !descending { if !descending {
if entry.index == 0 { if entry.index == 0 {
self.stack.pop(); self.stack.pop();
@ -438,6 +438,7 @@ where
} => { } => {
if ascending { if ascending {
entry.index += 1; entry.index += 1;
entry.position = self.position.clone();
} }
for (child_tree, child_summary) in child_trees[entry.index..] for (child_tree, child_summary) in child_trees[entry.index..]

View file

@ -738,7 +738,7 @@ mod tests {
for _ in 0..num_operations { for _ in 0..num_operations {
let splice_end = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1); let splice_end = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
let splice_start = rng.gen_range(0..splice_end + 1); let splice_start = rng.gen_range(0..splice_end + 1);
let count = rng.gen_range(0..3); let count = rng.gen_range(0..10);
let tree_end = tree.extent::<Count>(&()); let tree_end = tree.extent::<Count>(&());
let new_items = rng let new_items = rng
.sample_iter(distributions::Standard) .sample_iter(distributions::Standard)
@ -805,10 +805,12 @@ mod tests {
} }
assert_eq!(filter_cursor.item(), None); assert_eq!(filter_cursor.item(), None);
let mut pos = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
let mut before_start = false; let mut before_start = false;
let mut cursor = tree.cursor::<Count>(); let mut cursor = tree.cursor::<Count>();
cursor.seek(&Count(pos), Bias::Right, &()); let start_pos = rng.gen_range(0..=reference_items.len());
cursor.seek(&Count(start_pos), Bias::Right, &());
let mut pos = rng.gen_range(start_pos..=reference_items.len());
cursor.seek_forward(&Count(pos), Bias::Right, &());
for i in 0..10 { for i in 0..10 {
assert_eq!(cursor.start().0, pos); assert_eq!(cursor.start().0, pos);

View file

@ -16,7 +16,7 @@ db = { path = "../db" }
theme = { path = "../theme" } theme = { path = "../theme" }
util = { path = "../util" } util = { path = "../util" }
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev = "a51dbe25d67e84d6ed4261e640d3954fbdd9be45" } alacritty_terminal = { git = "https://github.com/alacritty/alacritty", rev = "7b9f32300ee0a249c0872302c97635b460e45ba5" }
procinfo = { git = "https://github.com/zed-industries/wezterm", rev = "5cd757e5f2eb039ed0c6bb6512223e69d5efc64d", default-features = false } procinfo = { git = "https://github.com/zed-industries/wezterm", rev = "5cd757e5f2eb039ed0c6bb6512223e69d5efc64d", default-features = false }
smallvec.workspace = true smallvec.workspace = true
smol.workspace = true smol.workspace = true

View file

@ -114,11 +114,7 @@ fn rgb_for_index(i: &u8) -> (u8, u8, u8) {
//Convenience method to convert from a GPUI color to an alacritty Rgb //Convenience method to convert from a GPUI color to an alacritty Rgb
pub fn to_alac_rgb(color: Color) -> AlacRgb { pub fn to_alac_rgb(color: Color) -> AlacRgb {
AlacRgb { AlacRgb::new(color.r, color.g, color.g)
r: color.r,
g: color.g,
b: color.g,
}
} }
#[cfg(test)] #[cfg(test)]

View file

@ -1,20 +1,64 @@
use gpui::{elements::Label, AnyElement, Element, Entity, View, ViewContext}; use gpui::{
elements::{Empty, Label},
AnyElement, Element, Entity, Subscription, View, ViewContext,
};
use settings::SettingsStore;
use workspace::{item::ItemHandle, StatusItemView}; use workspace::{item::ItemHandle, StatusItemView};
use crate::state::Mode; use crate::{state::Mode, Vim, VimEvent, VimModeSetting};
pub struct ModeIndicator { pub struct ModeIndicator {
pub mode: Mode, pub mode: Option<Mode>,
_subscription: Subscription,
} }
impl ModeIndicator { impl ModeIndicator {
pub fn new(mode: Mode) -> Self { pub fn new(cx: &mut ViewContext<Self>) -> Self {
Self { mode } let handle = cx.handle().downgrade();
let _subscription = cx.subscribe_global::<VimEvent, _>(move |&event, cx| {
if let Some(mode_indicator) = handle.upgrade(cx) {
match event {
VimEvent::ModeChanged { mode } => {
cx.update_window(mode_indicator.window_id(), |cx| {
mode_indicator.update(cx, move |mode_indicator, cx| {
mode_indicator.set_mode(mode, cx);
})
});
}
}
}
});
cx.observe_global::<SettingsStore, _>(move |mode_indicator, cx| {
if settings::get::<VimModeSetting>(cx).0 {
mode_indicator.mode = cx
.has_global::<Vim>()
.then(|| cx.global::<Vim>().state.mode);
} else {
mode_indicator.mode.take();
}
})
.detach();
// Vim doesn't exist in some tests
let mode = cx
.has_global::<Vim>()
.then(|| {
let vim = cx.global::<Vim>();
vim.enabled.then(|| vim.state.mode)
})
.flatten();
Self {
mode,
_subscription,
}
} }
pub fn set_mode(&mut self, mode: Mode, cx: &mut ViewContext<Self>) { pub fn set_mode(&mut self, mode: Mode, cx: &mut ViewContext<Self>) {
if mode != self.mode { if self.mode != Some(mode) {
self.mode = mode; self.mode = Some(mode);
cx.notify(); cx.notify();
} }
} }
@ -30,11 +74,16 @@ impl View for ModeIndicator {
} }
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> { fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
let Some(mode) = self.mode.as_ref() else {
return Empty::new().into_any();
};
let theme = &theme::current(cx).workspace.status_bar; let theme = &theme::current(cx).workspace.status_bar;
// we always choose text to be 12 monospace characters // we always choose text to be 12 monospace characters
// so that as the mode indicator changes, the rest of the // so that as the mode indicator changes, the rest of the
// UI stays still. // UI stays still.
let text = match self.mode { let text = match mode {
Mode::Normal => "-- NORMAL --", Mode::Normal => "-- NORMAL --",
Mode::Insert => "-- INSERT --", Mode::Insert => "-- INSERT --",
Mode::Visual { line: false } => "-- VISUAL --", Mode::Visual { line: false } => "-- VISUAL --",

View file

@ -93,7 +93,7 @@ fn search_submit(workspace: &mut Workspace, _: &SearchSubmit, cx: &mut ViewConte
pane.update(cx, |pane, cx| { pane.update(cx, |pane, cx| {
if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::<BufferSearchBar>() { if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::<BufferSearchBar>() {
search_bar.update(cx, |search_bar, cx| { search_bar.update(cx, |search_bar, cx| {
let mut state = &mut vim.state.search; let state = &mut vim.state.search;
let mut count = state.count; let mut count = state.count;
// in the case that the query has changed, the search bar // in the case that the query has changed, the search bar
@ -222,7 +222,7 @@ mod test {
}); });
search_bar.read_with(cx.cx, |bar, cx| { search_bar.read_with(cx.cx, |bar, cx| {
assert_eq!(bar.query_editor.read(cx).text(cx), "cc"); assert_eq!(bar.query(cx), "cc");
}); });
deterministic.run_until_parked(); deterministic.run_until_parked();

View file

@ -99,7 +99,7 @@ async fn test_buffer_search(cx: &mut gpui::TestAppContext) {
}); });
search_bar.read_with(cx.cx, |bar, cx| { search_bar.read_with(cx.cx, |bar, cx| {
assert_eq!(bar.query_editor.read(cx).text(cx), ""); assert_eq!(bar.query(cx), "");
}) })
} }
@ -175,7 +175,7 @@ async fn test_selection_on_search(cx: &mut gpui::TestAppContext) {
}); });
search_bar.read_with(cx.cx, |bar, cx| { search_bar.read_with(cx.cx, |bar, cx| {
assert_eq!(bar.query_editor.read(cx).text(cx), "cc"); assert_eq!(bar.query(cx), "cc");
}); });
// wait for the query editor change event to fire. // wait for the query editor change event to fire.
@ -215,7 +215,7 @@ async fn test_status_indicator(
assert_eq!( assert_eq!(
cx.workspace(|_, cx| mode_indicator.read(cx).mode), cx.workspace(|_, cx| mode_indicator.read(cx).mode),
Mode::Normal Some(Mode::Normal)
); );
// shows the correct mode // shows the correct mode
@ -223,7 +223,7 @@ async fn test_status_indicator(
deterministic.run_until_parked(); deterministic.run_until_parked();
assert_eq!( assert_eq!(
cx.workspace(|_, cx| mode_indicator.read(cx).mode), cx.workspace(|_, cx| mode_indicator.read(cx).mode),
Mode::Insert Some(Mode::Insert)
); );
// shows even in search // shows even in search
@ -231,7 +231,7 @@ async fn test_status_indicator(
deterministic.run_until_parked(); deterministic.run_until_parked();
assert_eq!( assert_eq!(
cx.workspace(|_, cx| mode_indicator.read(cx).mode), cx.workspace(|_, cx| mode_indicator.read(cx).mode),
Mode::Visual { line: false } Some(Mode::Visual { line: false })
); );
// hides if vim mode is disabled // hides if vim mode is disabled
@ -239,15 +239,15 @@ async fn test_status_indicator(
deterministic.run_until_parked(); deterministic.run_until_parked();
cx.workspace(|workspace, cx| { cx.workspace(|workspace, cx| {
let status_bar = workspace.status_bar().read(cx); let status_bar = workspace.status_bar().read(cx);
let mode_indicator = status_bar.item_of_type::<ModeIndicator>(); let mode_indicator = status_bar.item_of_type::<ModeIndicator>().unwrap();
assert!(mode_indicator.is_none()); assert!(mode_indicator.read(cx).mode.is_none());
}); });
cx.enable_vim(); cx.enable_vim();
deterministic.run_until_parked(); deterministic.run_until_parked();
cx.workspace(|workspace, cx| { cx.workspace(|workspace, cx| {
let status_bar = workspace.status_bar().read(cx); let status_bar = workspace.status_bar().read(cx);
let mode_indicator = status_bar.item_of_type::<ModeIndicator>(); let mode_indicator = status_bar.item_of_type::<ModeIndicator>().unwrap();
assert!(mode_indicator.is_some()); assert!(mode_indicator.read(cx).mode.is_some());
}); });
} }

View file

@ -43,6 +43,10 @@ impl<'a> VimTestContext<'a> {
toolbar.add_item(project_search_bar, cx); toolbar.add_item(project_search_bar, cx);
}) })
}); });
workspace.status_bar().update(cx, |status_bar, cx| {
let vim_mode_indicator = cx.add_view(ModeIndicator::new);
status_bar.add_right_item(vim_mode_indicator, cx);
});
}); });
Self { cx } Self { cx }

View file

@ -43,6 +43,11 @@ struct Number(u8);
actions!(vim, [Tab, Enter]); actions!(vim, [Tab, Enter]);
impl_actions!(vim, [Number, SwitchMode, PushOperator]); impl_actions!(vim, [Number, SwitchMode, PushOperator]);
#[derive(Copy, Clone, Debug)]
enum VimEvent {
ModeChanged { mode: Mode },
}
pub fn init(cx: &mut AppContext) { pub fn init(cx: &mut AppContext) {
settings::register::<VimModeSetting>(cx); settings::register::<VimModeSetting>(cx);
@ -121,8 +126,6 @@ pub fn observe_keystrokes(cx: &mut WindowContext) {
pub struct Vim { pub struct Vim {
active_editor: Option<WeakViewHandle<Editor>>, active_editor: Option<WeakViewHandle<Editor>>,
editor_subscription: Option<Subscription>, editor_subscription: Option<Subscription>,
mode_indicator: Option<ViewHandle<ModeIndicator>>,
enabled: bool, enabled: bool,
state: VimState, state: VimState,
} }
@ -181,9 +184,7 @@ impl Vim {
self.state.mode = mode; self.state.mode = mode;
self.state.operator_stack.clear(); self.state.operator_stack.clear();
if let Some(mode_indicator) = &self.mode_indicator { cx.emit_global(VimEvent::ModeChanged { mode });
mode_indicator.update(cx, |mode_indicator, cx| mode_indicator.set_mode(mode, cx))
}
// Sync editor settings like clip mode // Sync editor settings like clip mode
self.sync_vim_settings(cx); self.sync_vim_settings(cx);
@ -271,44 +272,6 @@ impl Vim {
} }
} }
fn sync_mode_indicator(cx: &mut WindowContext) {
let Some(workspace) = cx.root_view()
.downcast_ref::<Workspace>()
.map(|workspace| workspace.downgrade()) else {
return;
};
cx.spawn(|mut cx| async move {
workspace.update(&mut cx, |workspace, cx| {
Vim::update(cx, |vim, cx| {
workspace.status_bar().update(cx, |status_bar, cx| {
let current_position = status_bar.position_of_item::<ModeIndicator>();
if vim.enabled && current_position.is_none() {
if vim.mode_indicator.is_none() {
vim.mode_indicator =
Some(cx.add_view(|_| ModeIndicator::new(vim.state.mode)));
};
let mode_indicator = vim.mode_indicator.as_ref().unwrap();
let position = status_bar
.position_of_item::<language_selector::ActiveBufferLanguage>();
if let Some(position) = position {
status_bar.insert_item_after(position, mode_indicator.clone(), cx)
} else {
status_bar.add_left_item(mode_indicator.clone(), cx)
}
} else if !vim.enabled {
if let Some(position) = current_position {
status_bar.remove_item_at(position, cx)
}
}
})
})
})
})
.detach_and_log_err(cx);
}
fn set_enabled(&mut self, enabled: bool, cx: &mut AppContext) { fn set_enabled(&mut self, enabled: bool, cx: &mut AppContext) {
if self.enabled != enabled { if self.enabled != enabled {
self.enabled = enabled; self.enabled = enabled;
@ -359,8 +322,6 @@ impl Vim {
self.unhook_vim_settings(editor, cx); self.unhook_vim_settings(editor, cx);
} }
}); });
Vim::sync_mode_indicator(cx);
} }
fn unhook_vim_settings(&self, editor: &mut Editor, cx: &mut ViewContext<Editor>) { fn unhook_vim_settings(&self, editor: &mut Editor, cx: &mut ViewContext<Editor>) {

View file

@ -746,6 +746,10 @@ impl Pane {
_: &CloseAllItems, _: &CloseAllItems,
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) -> Option<Task<Result<()>>> { ) -> Option<Task<Result<()>>> {
if self.items.is_empty() {
return None;
}
Some(self.close_items(cx, move |_| true)) Some(self.close_items(cx, move |_| true))
} }

View file

@ -122,6 +122,7 @@ actions!(
NewFile, NewFile,
NewWindow, NewWindow,
CloseWindow, CloseWindow,
CloseInactiveTabsAndPanes,
AddFolderToProject, AddFolderToProject,
Unfollow, Unfollow,
Save, Save,
@ -240,6 +241,7 @@ pub fn init(app_state: Arc<AppState>, cx: &mut AppContext) {
cx.add_async_action(Workspace::follow_next_collaborator); cx.add_async_action(Workspace::follow_next_collaborator);
cx.add_async_action(Workspace::close); cx.add_async_action(Workspace::close);
cx.add_async_action(Workspace::close_inactive_items_and_panes);
cx.add_global_action(Workspace::close_global); cx.add_global_action(Workspace::close_global);
cx.add_global_action(restart); cx.add_global_action(restart);
cx.add_async_action(Workspace::save_all); cx.add_async_action(Workspace::save_all);
@ -1671,6 +1673,45 @@ impl Workspace {
} }
} }
pub fn close_inactive_items_and_panes(
&mut self,
_: &CloseInactiveTabsAndPanes,
cx: &mut ViewContext<Self>,
) -> Option<Task<Result<()>>> {
let current_pane = self.active_pane();
let mut tasks = Vec::new();
if let Some(current_pane_close) = current_pane.update(cx, |pane, cx| {
pane.close_inactive_items(&CloseInactiveItems, cx)
}) {
tasks.push(current_pane_close);
};
for pane in self.panes() {
if pane.id() == current_pane.id() {
continue;
}
if let Some(close_pane_items) = pane.update(cx, |pane: &mut Pane, cx| {
pane.close_all_items(&CloseAllItems, cx)
}) {
tasks.push(close_pane_items)
}
}
if tasks.is_empty() {
None
} else {
Some(cx.spawn(|_, _| async move {
for task in tasks {
task.await?
}
Ok(())
}))
}
}
pub fn toggle_dock(&mut self, dock_side: DockPosition, cx: &mut ViewContext<Self>) { pub fn toggle_dock(&mut self, dock_side: DockPosition, cx: &mut ViewContext<Self>) {
let dock = match dock_side { let dock = match dock_side {
DockPosition::Left => &self.left_dock, DockPosition::Left => &self.left_dock,

View file

@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
description = "The fast, collaborative code editor." description = "The fast, collaborative code editor."
edition = "2021" edition = "2021"
name = "zed" name = "zed"
version = "0.98.0" version = "0.99.0"
publish = false publish = false
[lib] [lib]
@ -128,6 +128,7 @@ tree-sitter-svelte.workspace = true
tree-sitter-racket.workspace = true tree-sitter-racket.workspace = true
tree-sitter-yaml.workspace = true tree-sitter-yaml.workspace = true
tree-sitter-lua.workspace = true tree-sitter-lua.workspace = true
tree-sitter-nix.workspace = true
url = "2.2" url = "2.2"
urlencoding = "2.1.2" urlencoding = "2.1.2"

View file

@ -152,8 +152,10 @@ pub fn init(languages: Arc<LanguageRegistry>, node_runtime: Arc<NodeRuntime>) {
tree_sitter_php::language(), tree_sitter_php::language(),
vec![Arc::new(php::IntelephenseLspAdapter::new(node_runtime))], vec![Arc::new(php::IntelephenseLspAdapter::new(node_runtime))],
); );
language("elm", tree_sitter_elm::language(), vec![]); language("elm", tree_sitter_elm::language(), vec![]);
language("glsl", tree_sitter_glsl::language(), vec![]); language("glsl", tree_sitter_glsl::language(), vec![]);
language("nix", tree_sitter_nix::language(), vec![]);
} }
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]

View file

@ -7,3 +7,4 @@ brackets = [
{ start = "[", end = "]", close = true, newline = true }, { start = "[", end = "]", close = true, newline = true },
{ start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] }, { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] },
] ]
collapsed_placeholder = "--[ ... ]--"

View file

@ -0,0 +1,10 @@
(
(comment)* @context
.
(function_declaration
"function" @name
name: (_) @name
(comment)* @collapse
body: (block) @collapse
) @item
)

View file

@ -0,0 +1,11 @@
name = "Nix"
path_suffixes = ["nix"]
line_comment = "# "
block_comment = ["/* ", " */"]
autoclose_before = ";:.,=}])>` \n\t\""
brackets = [
{ start = "{", end = "}", close = true, newline = true },
{ start = "[", end = "]", close = true, newline = true },
{ start = "(", end = ")", close = true, newline = true },
{ start = "<", end = ">", close = true, newline = true },
]

View file

@ -0,0 +1,95 @@
(comment) @comment
[
"if"
"then"
"else"
"let"
"inherit"
"in"
"rec"
"with"
"assert"
"or"
] @keyword
[
(string_expression)
(indented_string_expression)
] @string
[
(path_expression)
(hpath_expression)
(spath_expression)
] @string.special.path
(uri_expression) @link_uri
[
(integer_expression)
(float_expression)
] @number
(interpolation
"${" @punctuation.special
"}" @punctuation.special) @embedded
(escape_sequence) @escape
(dollar_escape) @escape
(function_expression
universal: (identifier) @parameter
)
(formal
name: (identifier) @parameter
"?"? @punctuation.delimiter)
(select_expression
attrpath: (attrpath (identifier)) @property)
(apply_expression
function: [
(variable_expression (identifier)) @function
(select_expression
attrpath: (attrpath
attr: (identifier) @function .))])
(unary_expression
operator: _ @operator)
(binary_expression
operator: _ @operator)
(variable_expression (identifier) @variable)
(binding
attrpath: (attrpath (identifier)) @property)
"=" @operator
[
";"
"."
","
] @punctuation.delimiter
[
"("
")"
"["
"]"
"{"
"}"
] @punctuation.bracket
(identifier) @variable
((identifier) @function.builtin
(#match? @function.builtin "^(__add|__addErrorContext|__all|__any|__appendContext|__attrNames|__attrValues|__bitAnd|__bitOr|__bitXor|__catAttrs|__compareVersions|__concatLists|__concatMap|__concatStringsSep|__deepSeq|__div|__elem|__elemAt|__fetchurl|__filter|__filterSource|__findFile|__foldl'|__fromJSON|__functionArgs|__genList|__genericClosure|__getAttr|__getContext|__getEnv|__hasAttr|__hasContext|__hashFile|__hashString|__head|__intersectAttrs|__isAttrs|__isBool|__isFloat|__isFunction|__isInt|__isList|__isPath|__isString|__langVersion|__length|__lessThan|__listToAttrs|__mapAttrs|__match|__mul|__parseDrvName|__partition|__path|__pathExists|__readDir|__readFile|__replaceStrings|__seq|__sort|__split|__splitVersion|__storePath|__stringLength|__sub|__substring|__tail|__toFile|__toJSON|__toPath|__toXML|__trace|__tryEval|__typeOf|__unsafeDiscardOutputDependency|__unsafeDiscardStringContext|__unsafeGetAttrPos|__valueSize|abort|baseNameOf|derivation|derivationStrict|dirOf|fetchGit|fetchMercurial|fetchTarball|fromTOML|import|isNull|map|placeholder|removeAttrs|scopedImport|throw|toString)$")
(#is-not? local))
((identifier) @variable.builtin
(#match? @variable.builtin "^(__currentSystem|__currentTime|__nixPath|__nixVersion|__storeDir|builtins|false|null|true)$")
(#is-not? local))

View file

@ -9,3 +9,4 @@ brackets = [
{ start = "(", end = ")", close = true, newline = true }, { start = "(", end = ")", close = true, newline = true },
{ start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] }, { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] },
] ]
collapsed_placeholder = "/* ... */"

View file

@ -0,0 +1,36 @@
(
(comment)* @context
.
[
(function_definition
"function" @name
name: (_) @name
body: (_
"{" @keep
"}" @keep) @collapse
)
(trait_declaration
"trait" @name
name: (_) @name)
(method_declaration
"function" @name
name: (_) @name
body: (_
"{" @keep
"}" @keep) @collapse
)
(interface_declaration
"interface" @name
name: (_) @name
)
(enum_declaration
"enum" @name
name: (_) @name
)
] @item
)

View file

@ -8,8 +8,6 @@
name: (_) @name name: (_) @name
) @item ) @item
(method_declaration (method_declaration
"function" @context "function" @context
name: (_) @name name: (_) @name
@ -24,3 +22,8 @@
"enum" @context "enum" @context
name: (_) @name name: (_) @name
) @item ) @item
(trait_declaration
"trait" @context
name: (_) @name
) @item

View file

@ -10,3 +10,4 @@ brackets = [
{ start = "\"", end = "\"", close = true, newline = false, not_in = ["comment", "string"] }, { start = "\"", end = "\"", close = true, newline = false, not_in = ["comment", "string"] },
{ start = "'", end = "'", close = true, newline = false, not_in = ["comment", "string"] }, { start = "'", end = "'", close = true, newline = false, not_in = ["comment", "string"] },
] ]
collapsed_placeholder = "# ..."

View file

@ -0,0 +1,22 @@
(
(comment)* @context
.
[
(module
"module" @name
name: (_) @name)
(method
"def" @name
name: (_) @name
body: (body_statement) @collapse)
(class
"class" @name
name: (_) @name)
(singleton_method
"def" @name
object: (_) @name
"." @name
name: (_) @name
body: (body_statement) @collapse)
] @item
)

View file

@ -45,6 +45,7 @@ use std::{
use sum_tree::Bias; use sum_tree::Bias;
use terminal_view::{get_working_directory, TerminalSettings, TerminalView}; use terminal_view::{get_working_directory, TerminalSettings, TerminalView};
use util::{ use util::{
channel::ReleaseChannel,
http::{self, HttpClient}, http::{self, HttpClient},
paths::PathLikeWithPosition, paths::PathLikeWithPosition,
}; };
@ -136,7 +137,7 @@ fn main() {
languages.set_executor(cx.background().clone()); languages.set_executor(cx.background().clone());
languages.set_language_server_download_dir(paths::LANGUAGES_DIR.clone()); languages.set_language_server_download_dir(paths::LANGUAGES_DIR.clone());
let languages = Arc::new(languages); let languages = Arc::new(languages);
let node_runtime = NodeRuntime::instance(http.clone(), cx.background().to_owned()); let node_runtime = NodeRuntime::instance(http.clone());
languages::init(languages.clone(), node_runtime.clone()); languages::init(languages.clone(), node_runtime.clone());
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx)); let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx));
@ -415,22 +416,41 @@ fn init_panic_hook(app: &App, installation_id: Option<String>) {
panic::set_hook(Box::new(move |info| { panic::set_hook(Box::new(move |info| {
let prior_panic_count = PANIC_COUNT.fetch_add(1, Ordering::SeqCst); let prior_panic_count = PANIC_COUNT.fetch_add(1, Ordering::SeqCst);
if prior_panic_count > 0 { if prior_panic_count > 0 {
std::panic::resume_unwind(Box::new(())); // Give the panic-ing thread time to write the panic file
loop {
std::thread::yield_now();
}
}
let thread = thread::current();
let thread_name = thread.name().unwrap_or("<unnamed>");
let payload = info
.payload()
.downcast_ref::<&str>()
.map(|s| s.to_string())
.or_else(|| info.payload().downcast_ref::<String>().map(|s| s.clone()))
.unwrap_or_else(|| "Box<Any>".to_string());
if *util::channel::RELEASE_CHANNEL == ReleaseChannel::Dev {
let location = info.location().unwrap();
let backtrace = Backtrace::new();
eprintln!(
"Thread {:?} panicked with {:?} at {}:{}:{}\n{:?}",
thread_name,
payload,
location.file(),
location.line(),
location.column(),
backtrace,
);
std::process::exit(-1);
} }
let app_version = ZED_APP_VERSION let app_version = ZED_APP_VERSION
.or_else(|| platform.app_version().ok()) .or_else(|| platform.app_version().ok())
.map_or("dev".to_string(), |v| v.to_string()); .map_or("dev".to_string(), |v| v.to_string());
let thread = thread::current();
let thread = thread.name().unwrap_or("<unnamed>");
let payload = info.payload();
let payload = None
.or_else(|| payload.downcast_ref::<&str>().map(|s| s.to_string()))
.or_else(|| payload.downcast_ref::<String>().map(|s| s.clone()))
.unwrap_or_else(|| "Box<Any>".to_string());
let backtrace = Backtrace::new(); let backtrace = Backtrace::new();
let mut backtrace = backtrace let mut backtrace = backtrace
.frames() .frames()
@ -447,7 +467,7 @@ fn init_panic_hook(app: &App, installation_id: Option<String>) {
} }
let panic_data = Panic { let panic_data = Panic {
thread: thread.into(), thread: thread_name.into(),
payload: payload.into(), payload: payload.into(),
location_data: info.location().map(|location| LocationData { location_data: info.location().map(|location| LocationData {
file: location.file().into(), file: location.file().into(),
@ -717,7 +737,7 @@ async fn watch_languages(_: Arc<dyn Fs>, _: Arc<LanguageRegistry>) -> Option<()>
} }
#[cfg(not(debug_assertions))] #[cfg(not(debug_assertions))]
fn watch_file_types(fs: Arc<dyn Fs>, cx: &mut AppContext) {} fn watch_file_types(_fs: Arc<dyn Fs>, _cx: &mut AppContext) {}
fn connect_to_cli( fn connect_to_cli(
server_name: &str, server_name: &str,

View file

@ -308,6 +308,7 @@ pub fn initialize_workspace(
); );
let active_buffer_language = let active_buffer_language =
cx.add_view(|_| language_selector::ActiveBufferLanguage::new(workspace)); cx.add_view(|_| language_selector::ActiveBufferLanguage::new(workspace));
let vim_mode_indicator = cx.add_view(|cx| vim::ModeIndicator::new(cx));
let feedback_button = cx.add_view(|_| { let feedback_button = cx.add_view(|_| {
feedback::deploy_feedback_button::DeployFeedbackButton::new(workspace) feedback::deploy_feedback_button::DeployFeedbackButton::new(workspace)
}); });
@ -319,6 +320,7 @@ pub fn initialize_workspace(
status_bar.add_right_item(feedback_button, cx); status_bar.add_right_item(feedback_button, cx);
status_bar.add_right_item(copilot, cx); status_bar.add_right_item(copilot, cx);
status_bar.add_right_item(active_buffer_language, cx); status_bar.add_right_item(active_buffer_language, cx);
status_bar.add_right_item(vim_mode_indicator, cx);
status_bar.add_right_item(cursor_position, cx); status_bar.add_right_item(cursor_position, cx);
}); });
@ -543,7 +545,6 @@ pub fn handle_keymap_file_changes(
reload_keymaps(cx, &keymap_content); reload_keymaps(cx, &keymap_content);
} }
}) })
.detach();
})); }));
} }
} }
@ -2362,7 +2363,7 @@ mod tests {
languages.set_executor(cx.background().clone()); languages.set_executor(cx.background().clone());
let languages = Arc::new(languages); let languages = Arc::new(languages);
let http = FakeHttpClient::with_404_response(); let http = FakeHttpClient::with_404_response();
let node_runtime = NodeRuntime::instance(http, cx.background().to_owned()); let node_runtime = NodeRuntime::instance(http);
languages::init(languages.clone(), node_runtime); languages::init(languages.clone(), node_runtime);
for name in languages.language_names() { for name in languages.language_names() {
languages.language_for_name(&name); languages.language_for_name(&name);

29
docs/theme/generating-theme-types.md vendored Normal file
View file

@ -0,0 +1,29 @@
[⬅ Back to Index](../index.md)
# Generating Theme Types
## How to generate theme types:
Run a script
```bash
./script/build-theme-types
```
Types are generated in `styles/src/types/zed.ts`
## How it works:
1. Rust types
The `crates/theme` contains theme types.
Crate `schemars` used to generate a JSON schema from the theme structs.
Every struct that represent theme type has a `#[derive(JsonSchema)]` attribute.
Task lotaked at `crates/xtask/src/main.rs` generates a JSON schema from the theme structs.
2. TypeScript types
Script `npm run build-types` from `styles` package generates TypeScript types from the JSON schema and saves them to `styles/src/types/zed.ts`.

View file

@ -1,4 +1,4 @@
[toolchain] [toolchain]
channel = "1.70" channel = "1.71"
components = [ "rustfmt" ] components = [ "rustfmt" ]
targets = [ "x86_64-apple-darwin", "aarch64-apple-darwin", "wasm32-wasi" ] targets = [ "x86_64-apple-darwin", "aarch64-apple-darwin", "wasm32-wasi" ]

View file

@ -170,8 +170,8 @@ export default function editor(): any {
line_number: with_opacity(foreground(layer), 0.35), line_number: with_opacity(foreground(layer), 0.35),
line_number_active: foreground(layer), line_number_active: foreground(layer),
rename_fade: 0.6, rename_fade: 0.6,
wrap_guide: with_opacity(foreground(layer), 0.1), wrap_guide: with_opacity(foreground(layer), 0.05),
active_wrap_guide: with_opacity(foreground(layer), 0.2), active_wrap_guide: with_opacity(foreground(layer), 0.1),
unnecessary_code_fade: 0.5, unnecessary_code_fade: 0.5,
selection: theme.players[0], selection: theme.players[0],
whitespace: theme.ramps.neutral(0.5).hex(), whitespace: theme.ramps.neutral(0.5).hex(),

View file

@ -44,7 +44,7 @@ export default function status_bar(): any {
icon_spacing: 4, icon_spacing: 4,
icon_width: 14, icon_width: 14,
height: 18, height: 18,
message: text(layer, "sans"), message: text(layer, "sans", { size: "xs" }),
icon_color: foreground(layer), icon_color: foreground(layer),
}, },
state: { state: {