catch up with main

This commit is contained in:
KCaverly 2023-07-18 10:26:28 -04:00
commit b9fdfd60f0
93 changed files with 3016 additions and 760 deletions

View file

@ -148,8 +148,8 @@ jobs:
- name: Create app bundle - name: Create app bundle
run: script/bundle run: script/bundle
- name: Upload app bundle to workflow run if main branch or specifi label - name: Upload app bundle to workflow run if main branch or specific label
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v3
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-build-dmg') }} if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-build-dmg') }}
with: with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg

48
Cargo.lock generated
View file

@ -1243,9 +1243,9 @@ dependencies = [
[[package]] [[package]]
name = "clap" name = "clap"
version = "4.3.14" version = "4.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98330784c494e49850cb23b8e2afcca13587d2500b2e3f1f78ae20248059c9be" checksum = "8f644d0dac522c8b05ddc39aaaccc5b136d5dc4ff216610c5641e3be5becf56c"
dependencies = [ dependencies = [
"clap_builder", "clap_builder",
"clap_derive 4.3.12", "clap_derive 4.3.12",
@ -1254,9 +1254,9 @@ dependencies = [
[[package]] [[package]]
name = "clap_builder" name = "clap_builder"
version = "4.3.14" version = "4.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e182eb5f2562a67dda37e2c57af64d720a9e010c5e860ed87c056586aeafa52e" checksum = "af410122b9778e024f9e0fb35682cc09cc3f85cad5e8d3ba8f47a9702df6e73d"
dependencies = [ dependencies = [
"anstream", "anstream",
"anstyle", "anstyle",
@ -2253,7 +2253,6 @@ dependencies = [
"theme", "theme",
"tree-sitter", "tree-sitter",
"tree-sitter-html", "tree-sitter-html",
"tree-sitter-javascript",
"tree-sitter-rust", "tree-sitter-rust",
"tree-sitter-typescript 0.20.2 (git+https://github.com/tree-sitter/tree-sitter-typescript?rev=5d20856f34315b068c41edaee2ac8a100081d259)", "tree-sitter-typescript 0.20.2 (git+https://github.com/tree-sitter/tree-sitter-typescript?rev=5d20856f34315b068c41edaee2ac8a100081d259)",
"unindent", "unindent",
@ -3750,15 +3749,16 @@ dependencies = [
"text", "text",
"theme", "theme",
"tree-sitter", "tree-sitter",
"tree-sitter-elixir 0.1.0 (git+https://github.com/elixir-lang/tree-sitter-elixir?rev=4ba9dab6e2602960d95b2b625f3386c27e08084e)",
"tree-sitter-embedded-template", "tree-sitter-embedded-template",
"tree-sitter-heex",
"tree-sitter-html", "tree-sitter-html",
"tree-sitter-javascript", "tree-sitter-json 0.20.0",
"tree-sitter-json 0.19.0",
"tree-sitter-markdown", "tree-sitter-markdown",
"tree-sitter-python", "tree-sitter-python",
"tree-sitter-ruby", "tree-sitter-ruby",
"tree-sitter-rust", "tree-sitter-rust",
"tree-sitter-typescript 0.20.2 (registry+https://github.com/rust-lang/crates.io-index)", "tree-sitter-typescript 0.20.2 (git+https://github.com/tree-sitter/tree-sitter-typescript?rev=5d20856f34315b068c41edaee2ac8a100081d259)",
"unicase", "unicase",
"unindent", "unindent",
"util", "util",
@ -8083,16 +8083,6 @@ dependencies = [
"tree-sitter", "tree-sitter",
] ]
[[package]]
name = "tree-sitter-javascript"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2490fab08630b2c8943c320f7b63473cbf65511c8d83aec551beb9b4375906ed"
dependencies = [
"cc",
"tree-sitter",
]
[[package]] [[package]]
name = "tree-sitter-json" name = "tree-sitter-json"
version = "0.19.0" version = "0.19.0"
@ -8131,6 +8121,15 @@ dependencies = [
"tree-sitter", "tree-sitter",
] ]
[[package]]
name = "tree-sitter-php"
version = "0.19.1"
source = "git+https://github.com/tree-sitter/tree-sitter-php?rev=d43130fd1525301e9826f420c5393a4d169819fc#d43130fd1525301e9826f420c5393a4d169819fc"
dependencies = [
"cc",
"tree-sitter",
]
[[package]] [[package]]
name = "tree-sitter-python" name = "tree-sitter-python"
version = "0.20.2" version = "0.20.2"
@ -8179,6 +8178,15 @@ dependencies = [
"tree-sitter", "tree-sitter",
] ]
[[package]]
name = "tree-sitter-svelte"
version = "0.10.2"
source = "git+https://github.com/Himujjal/tree-sitter-svelte?rev=697bb515471871e85ff799ea57a76298a71a9cca#697bb515471871e85ff799ea57a76298a71a9cca"
dependencies = [
"cc",
"tree-sitter",
]
[[package]] [[package]]
name = "tree-sitter-toml" name = "tree-sitter-toml"
version = "0.5.1" version = "0.5.1"
@ -9449,7 +9457,7 @@ name = "xtask"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"clap 4.3.14", "clap 4.3.15",
"schemars", "schemars",
"serde_json", "serde_json",
"theme", "theme",
@ -9580,11 +9588,13 @@ dependencies = [
"tree-sitter-json 0.20.0", "tree-sitter-json 0.20.0",
"tree-sitter-lua", "tree-sitter-lua",
"tree-sitter-markdown", "tree-sitter-markdown",
"tree-sitter-php",
"tree-sitter-python", "tree-sitter-python",
"tree-sitter-racket", "tree-sitter-racket",
"tree-sitter-ruby", "tree-sitter-ruby",
"tree-sitter-rust", "tree-sitter-rust",
"tree-sitter-scheme", "tree-sitter-scheme",
"tree-sitter-svelte",
"tree-sitter-toml 0.5.1", "tree-sitter-toml 0.5.1",
"tree-sitter-typescript 0.20.2 (git+https://github.com/tree-sitter/tree-sitter-typescript?rev=5d20856f34315b068c41edaee2ac8a100081d259)", "tree-sitter-typescript 0.20.2 (git+https://github.com/tree-sitter/tree-sitter-typescript?rev=5d20856f34315b068c41edaee2ac8a100081d259)",
"tree-sitter-yaml", "tree-sitter-yaml",

View file

@ -107,6 +107,28 @@ tree-sitter = "0.20"
unindent = { version = "0.1.7" } unindent = { version = "0.1.7" }
pretty_assertions = "1.3.0" pretty_assertions = "1.3.0"
tree-sitter-c = "0.20.1"
tree-sitter-cpp = "0.20.0"
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "4ba9dab6e2602960d95b2b625f3386c27e08084e" }
tree-sitter-embedded-template = "0.20.0"
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "2e1348c3cf2c9323e87c2744796cf3f3868aa82a" }
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "40a81c01a40ac48744e0c8ccabbaba1920441199" }
tree-sitter-rust = "0.20.3"
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
tree-sitter-php = { git = "https://github.com/tree-sitter/tree-sitter-php", rev = "d43130fd1525301e9826f420c5393a4d169819fc" }
tree-sitter-python = "0.20.2"
tree-sitter-toml = { git = "https://github.com/tree-sitter/tree-sitter-toml", rev = "342d9be207c2dba869b9967124c679b5e6fd0ebe" }
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259" }
tree-sitter-ruby = "0.20.0"
tree-sitter-html = "0.19.0"
tree-sitter-scheme = { git = "https://github.com/6cdh/tree-sitter-scheme", rev = "af0fd1fa452cb2562dc7b5c8a8c55551c39273b9"}
tree-sitter-svelte = { git = "https://github.com/Himujjal/tree-sitter-svelte", rev = "697bb515471871e85ff799ea57a76298a71a9cca"}
tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a"}
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930"}
tree-sitter-lua = "0.0.14"
[patch.crates-io] [patch.crates-io]
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "49226023693107fba9a1191136a4f47f38cdca73" } tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "49226023693107fba9a1191136a4f47f38cdca73" }
async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" } async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" }

View file

@ -9,6 +9,7 @@
"context": "Editor", "context": "Editor",
"bindings": { "bindings": {
"cmd-b": "editor::GoToDefinition", "cmd-b": "editor::GoToDefinition",
"alt-cmd-b": "editor::GoToDefinitionSplit",
"cmd-<": "editor::ScrollCursorCenter", "cmd-<": "editor::ScrollCursorCenter",
"cmd-g": [ "cmd-g": [
"editor::SelectNext", "editor::SelectNext",

View file

@ -13,6 +13,7 @@
"cmd-up": "menu::SelectFirst", "cmd-up": "menu::SelectFirst",
"cmd-down": "menu::SelectLast", "cmd-down": "menu::SelectLast",
"enter": "menu::Confirm", "enter": "menu::Confirm",
"cmd-enter": "menu::SecondaryConfirm",
"escape": "menu::Cancel", "escape": "menu::Cancel",
"ctrl-c": "menu::Cancel", "ctrl-c": "menu::Cancel",
"cmd-{": "pane::ActivatePrevItem", "cmd-{": "pane::ActivatePrevItem",
@ -194,8 +195,8 @@
{ {
"context": "Editor && mode == auto_height", "context": "Editor && mode == auto_height",
"bindings": { "bindings": {
"alt-enter": "editor::Newline", "shift-enter": "editor::Newline",
"cmd-alt-enter": "editor::NewlineBelow" "cmd-shift-enter": "editor::NewlineBelow"
} }
}, },
{ {
@ -221,7 +222,8 @@
"escape": "buffer_search::Dismiss", "escape": "buffer_search::Dismiss",
"tab": "buffer_search::FocusEditor", "tab": "buffer_search::FocusEditor",
"enter": "search::SelectNextMatch", "enter": "search::SelectNextMatch",
"shift-enter": "search::SelectPrevMatch" "shift-enter": "search::SelectPrevMatch",
"alt-enter": "search::SelectAllMatches"
} }
}, },
{ {
@ -242,6 +244,7 @@
"cmd-f": "project_search::ToggleFocus", "cmd-f": "project_search::ToggleFocus",
"cmd-g": "search::SelectNextMatch", "cmd-g": "search::SelectNextMatch",
"cmd-shift-g": "search::SelectPrevMatch", "cmd-shift-g": "search::SelectPrevMatch",
"alt-enter": "search::SelectAllMatches",
"alt-cmd-c": "search::ToggleCaseSensitive", "alt-cmd-c": "search::ToggleCaseSensitive",
"alt-cmd-w": "search::ToggleWholeWord", "alt-cmd-w": "search::ToggleWholeWord",
"alt-cmd-r": "search::ToggleRegex" "alt-cmd-r": "search::ToggleRegex"
@ -296,7 +299,9 @@
"shift-f8": "editor::GoToPrevDiagnostic", "shift-f8": "editor::GoToPrevDiagnostic",
"f2": "editor::Rename", "f2": "editor::Rename",
"f12": "editor::GoToDefinition", "f12": "editor::GoToDefinition",
"alt-f12": "editor::GoToDefinitionSplit",
"cmd-f12": "editor::GoToTypeDefinition", "cmd-f12": "editor::GoToTypeDefinition",
"alt-cmd-f12": "editor::GoToTypeDefinitionSplit",
"alt-shift-f12": "editor::FindAllReferences", "alt-shift-f12": "editor::FindAllReferences",
"ctrl-m": "editor::MoveToEnclosingBracket", "ctrl-m": "editor::MoveToEnclosingBracket",
"alt-cmd-[": "editor::Fold", "alt-cmd-[": "editor::Fold",

View file

@ -46,8 +46,9 @@
"alt-f7": "editor::FindAllReferences", "alt-f7": "editor::FindAllReferences",
"cmd-alt-f7": "editor::FindAllReferences", "cmd-alt-f7": "editor::FindAllReferences",
"cmd-b": "editor::GoToDefinition", "cmd-b": "editor::GoToDefinition",
"cmd-alt-b": "editor::GoToDefinition", "cmd-alt-b": "editor::GoToDefinitionSplit",
"cmd-shift-b": "editor::GoToTypeDefinition", "cmd-shift-b": "editor::GoToTypeDefinition",
"cmd-alt-shift-b": "editor::GoToTypeDefinitionSplit",
"alt-enter": "editor::ToggleCodeActions", "alt-enter": "editor::ToggleCodeActions",
"f2": "editor::GoToDiagnostic", "f2": "editor::GoToDiagnostic",
"cmd-f2": "editor::GoToPrevDiagnostic", "cmd-f2": "editor::GoToPrevDiagnostic",

View file

@ -20,6 +20,7 @@
"cmd-shift-a": "editor::SelectLargerSyntaxNode", "cmd-shift-a": "editor::SelectLargerSyntaxNode",
"shift-f12": "editor::FindAllReferences", "shift-f12": "editor::FindAllReferences",
"alt-cmd-down": "editor::GoToDefinition", "alt-cmd-down": "editor::GoToDefinition",
"ctrl-alt-cmd-down": "editor::GoToDefinitionSplit",
"alt-shift-cmd-down": "editor::FindAllReferences", "alt-shift-cmd-down": "editor::FindAllReferences",
"ctrl-.": "editor::GoToHunk", "ctrl-.": "editor::GoToHunk",
"ctrl-,": "editor::GoToPrevHunk", "ctrl-,": "editor::GoToPrevHunk",

View file

@ -12,6 +12,7 @@
"cmd-l": "go_to_line::Toggle", "cmd-l": "go_to_line::Toggle",
"ctrl-shift-d": "editor::DuplicateLine", "ctrl-shift-d": "editor::DuplicateLine",
"cmd-b": "editor::GoToDefinition", "cmd-b": "editor::GoToDefinition",
"alt-cmd-b": "editor::GoToDefinition",
"cmd-j": "editor::ScrollCursorCenter", "cmd-j": "editor::ScrollCursorCenter",
"cmd-shift-l": "editor::SelectLine", "cmd-shift-l": "editor::SelectLine",
"cmd-shift-t": "outline::Toggle", "cmd-shift-t": "outline::Toggle",

View file

@ -99,6 +99,10 @@
"vim::SwitchMode", "vim::SwitchMode",
"Normal" "Normal"
], ],
"ctrl+[": [
"vim::SwitchMode",
"Normal"
],
"0": "vim::StartOfLine", // When no number operator present, use start of line motion "0": "vim::StartOfLine", // When no number operator present, use start of line motion
"1": [ "1": [
"vim::Number", "vim::Number",
@ -234,10 +238,6 @@
"h": "editor::Hover", "h": "editor::Hover",
"t": "pane::ActivateNextItem", "t": "pane::ActivateNextItem",
"shift-t": "pane::ActivatePrevItem", "shift-t": "pane::ActivatePrevItem",
"escape": [
"vim::SwitchMode",
"Normal"
],
"d": "editor::GoToDefinition" "d": "editor::GoToDefinition"
} }
}, },
@ -265,10 +265,6 @@
"t": "editor::ScrollCursorTop", "t": "editor::ScrollCursorTop",
"z": "editor::ScrollCursorCenter", "z": "editor::ScrollCursorCenter",
"b": "editor::ScrollCursorBottom", "b": "editor::ScrollCursorBottom",
"escape": [
"vim::SwitchMode",
"Normal"
]
} }
}, },
{ {
@ -322,7 +318,8 @@
"context": "Editor && vim_mode == insert", "context": "Editor && vim_mode == insert",
"bindings": { "bindings": {
"escape": "vim::NormalBefore", "escape": "vim::NormalBefore",
"ctrl-c": "vim::NormalBefore" "ctrl-c": "vim::NormalBefore",
"ctrl-[": "vim::NormalBefore",
} }
}, },
{ {
@ -333,6 +330,10 @@
"escape": [ "escape": [
"vim::SwitchMode", "vim::SwitchMode",
"Normal" "Normal"
],
"ctrl+[": [
"vim::SwitchMode",
"Normal"
] ]
} }
} }

View file

@ -24,6 +24,17 @@
}, },
// The default font size for text in the editor // The default font size for text in the editor
"buffer_font_size": 15, "buffer_font_size": 15,
// Set the buffer's line height.
// May take 3 values:
// 1. Use a line height that's comfortable for reading (1.618)
// "line_height": "comfortable"
// 2. Use a standard line height, (1.3)
// "line_height": "standard",
// 3. Use a custom line height
// "line_height": {
// "custom": 2
// },
"buffer_line_height": "comfortable",
// The factor to grow the active pane by. Defaults to 1.0 // The factor to grow the active pane by. Defaults to 1.0
// which gives the same size as all other panes. // which gives the same size as all other panes.
"active_pane_magnification": 1.0, "active_pane_magnification": 1.0,
@ -117,6 +128,13 @@
// 4. Save when idle for a certain amount of time: // 4. Save when idle for a certain amount of time:
// "autosave": { "after_delay": {"milliseconds": 500} }, // "autosave": { "after_delay": {"milliseconds": 500} },
"autosave": "off", "autosave": "off",
// Settings related to the editor's tabs
"tabs": {
// Show git status colors in the editor tabs.
"git_status": false,
// Position of the close button on the editor tabs.
"close_position": "right"
},
// Whether or not to remove any trailing whitespace from lines of a buffer // Whether or not to remove any trailing whitespace from lines of a buffer
// before saving it. // before saving it.
"remove_trailing_whitespace_on_save": true, "remove_trailing_whitespace_on_save": true,
@ -282,7 +300,6 @@
// "line_height": { // "line_height": {
// "custom": 2 // "custom": 2
// }, // },
//
"line_height": "comfortable" "line_height": "comfortable"
// Set the terminal's font size. If this option is not included, // Set the terminal's font size. If this option is not included,
// the terminal will default to matching the buffer's font size. // the terminal will default to matching the buffer's font size.

View file

@ -7217,7 +7217,7 @@ async fn test_peers_following_each_other(
// Clients A and B follow each other in split panes // Clients A and B follow each other in split panes
workspace_a.update(cx_a, |workspace, cx| { workspace_a.update(cx_a, |workspace, cx| {
workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx); workspace.split_and_clone(workspace.active_pane().clone(), SplitDirection::Right, cx);
}); });
workspace_a workspace_a
.update(cx_a, |workspace, cx| { .update(cx_a, |workspace, cx| {
@ -7228,7 +7228,7 @@ async fn test_peers_following_each_other(
.await .await
.unwrap(); .unwrap();
workspace_b.update(cx_b, |workspace, cx| { workspace_b.update(cx_b, |workspace, cx| {
workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx); workspace.split_and_clone(workspace.active_pane().clone(), SplitDirection::Right, cx);
}); });
workspace_b workspace_b
.update(cx_b, |workspace, cx| { .update(cx_b, |workspace, cx| {
@ -7455,7 +7455,7 @@ async fn test_auto_unfollowing(
// When client B activates a different pane, it continues following client A in the original pane. // When client B activates a different pane, it continues following client A in the original pane.
workspace_b.update(cx_b, |workspace, cx| { workspace_b.update(cx_b, |workspace, cx| {
workspace.split_pane(pane_b.clone(), SplitDirection::Right, cx) workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, cx)
}); });
assert_eq!( assert_eq!(
workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),

View file

@ -67,7 +67,7 @@ impl PickerDelegate for ContactFinderDelegate {
}) })
} }
fn confirm(&mut self, cx: &mut ViewContext<Picker<Self>>) { fn confirm(&mut self, _: bool, cx: &mut ViewContext<Picker<Self>>) {
if let Some(user) = self.potential_contacts.get(self.selected_index) { if let Some(user) = self.potential_contacts.get(self.selected_index) {
let user_store = self.user_store.read(cx); let user_store = self.user_store.read(cx);
match user_store.contact_request_status(user) { match user_store.contact_request_status(user) {

View file

@ -160,7 +160,7 @@ impl PickerDelegate for CommandPaletteDelegate {
fn dismissed(&mut self, _cx: &mut ViewContext<Picker<Self>>) {} fn dismissed(&mut self, _cx: &mut ViewContext<Picker<Self>>) {}
fn confirm(&mut self, cx: &mut ViewContext<Picker<Self>>) { fn confirm(&mut self, _: bool, cx: &mut ViewContext<Picker<Self>>) {
if !self.matches.is_empty() { if !self.matches.is_empty() {
let window_id = cx.window_id(); let window_id = cx.window_id();
let focused_view_id = self.focused_view_id; let focused_view_id = self.focused_view_id;

View file

@ -7,7 +7,6 @@ use anyhow::Context;
use gpui::AppContext; use gpui::AppContext;
pub use indoc::indoc; pub use indoc::indoc;
pub use lazy_static; pub use lazy_static;
use parking_lot::{Mutex, RwLock};
pub use smol; pub use smol;
pub use sqlez; pub use sqlez;
pub use sqlez_macros; pub use sqlez_macros;
@ -17,11 +16,9 @@ pub use util::paths::DB_DIR;
use sqlez::domain::Migrator; use sqlez::domain::Migrator;
use sqlez::thread_safe_connection::ThreadSafeConnection; use sqlez::thread_safe_connection::ThreadSafeConnection;
use sqlez_macros::sql; use sqlez_macros::sql;
use std::fs::create_dir_all;
use std::future::Future; use std::future::Future;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::time::{SystemTime, UNIX_EPOCH};
use util::channel::ReleaseChannel; use util::channel::ReleaseChannel;
use util::{async_iife, ResultExt}; use util::{async_iife, ResultExt};
@ -42,10 +39,8 @@ const DB_FILE_NAME: &'static str = "db.sqlite";
lazy_static::lazy_static! { lazy_static::lazy_static! {
pub static ref ZED_STATELESS: bool = std::env::var("ZED_STATELESS").map_or(false, |v| !v.is_empty()); pub static ref ZED_STATELESS: bool = std::env::var("ZED_STATELESS").map_or(false, |v| !v.is_empty());
pub static ref BACKUP_DB_PATH: RwLock<Option<PathBuf>> = RwLock::new(None);
pub static ref ALL_FILE_DB_FAILED: AtomicBool = AtomicBool::new(false); pub static ref ALL_FILE_DB_FAILED: AtomicBool = AtomicBool::new(false);
} }
static DB_FILE_OPERATIONS: Mutex<()> = Mutex::new(());
/// Open or create a database at the given directory path. /// Open or create a database at the given directory path.
/// This will retry a couple times if there are failures. If opening fails once, the db directory /// This will retry a couple times if there are failures. If opening fails once, the db directory
@ -63,66 +58,14 @@ pub async fn open_db<M: Migrator + 'static>(
let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name))); let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name)));
let connection = async_iife!({ let connection = async_iife!({
// Note: This still has a race condition where 1 set of migrations succeeds smol::fs::create_dir_all(&main_db_dir)
// (e.g. (Workspace, Editor)) and another fails (e.g. (Workspace, Terminal)) .await
// This will cause the first connection to have the database taken out .context("Could not create db directory")
// from under it. This *should* be fine though. The second dabatase failure will .log_err()?;
// cause errors in the log and so should be observed by developers while writing
// soon-to-be good migrations. If user databases are corrupted, we toss them out
// and try again from a blank. As long as running all migrations from start to end
// on a blank database is ok, this race condition will never be triggered.
//
// Basically: Don't ever push invalid migrations to stable or everyone will have
// a bad time.
// If no db folder, create one at 0-{channel}
create_dir_all(&main_db_dir).context("Could not create db directory")?;
let db_path = main_db_dir.join(Path::new(DB_FILE_NAME)); let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));
open_main_db(&db_path).await
// Optimistically open databases in parallel })
if !DB_FILE_OPERATIONS.is_locked() { .await;
// Try building a connection
if let Some(connection) = open_main_db(&db_path).await {
return Ok(connection)
};
}
// Take a lock in the failure case so that we move the db once per process instead
// of potentially multiple times from different threads. This shouldn't happen in the
// normal path
let _lock = DB_FILE_OPERATIONS.lock();
if let Some(connection) = open_main_db(&db_path).await {
return Ok(connection)
};
let backup_timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("System clock is set before the unix timestamp, Zed does not support this region of spacetime")
.as_millis();
// If failed, move 0-{channel} to {current unix timestamp}-{channel}
let backup_db_dir = db_dir.join(Path::new(&format!(
"{}-{}",
backup_timestamp,
release_channel_name,
)));
std::fs::rename(&main_db_dir, &backup_db_dir)
.context("Failed clean up corrupted database, panicking.")?;
// Set a static ref with the failed timestamp and error so we can notify the user
{
let mut guard = BACKUP_DB_PATH.write();
*guard = Some(backup_db_dir);
}
// Create a new 0-{channel}
create_dir_all(&main_db_dir).context("Should be able to create the database directory")?;
let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));
// Try again
open_main_db(&db_path).await.context("Could not newly created db")
}).await.log_err();
if let Some(connection) = connection { if let Some(connection) = connection {
return connection; return connection;
@ -249,13 +192,13 @@ where
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::{fs, thread}; use std::thread;
use sqlez::{connection::Connection, domain::Domain}; use sqlez::domain::Domain;
use sqlez_macros::sql; use sqlez_macros::sql;
use tempdir::TempDir; use tempdir::TempDir;
use crate::{open_db, DB_FILE_NAME}; use crate::open_db;
// Test bad migration panics // Test bad migration panics
#[gpui::test] #[gpui::test]
@ -321,31 +264,10 @@ mod tests {
.unwrap() .unwrap()
.is_none() .is_none()
); );
let mut corrupted_backup_dir = fs::read_dir(tempdir.path())
.unwrap()
.find(|entry| {
!entry
.as_ref()
.unwrap()
.file_name()
.to_str()
.unwrap()
.starts_with("0")
})
.unwrap()
.unwrap()
.path();
corrupted_backup_dir.push(DB_FILE_NAME);
let backup = Connection::open_file(&corrupted_backup_dir.to_string_lossy());
assert!(backup.select_row::<usize>("SELECT * FROM test").unwrap()()
.unwrap()
.is_none());
} }
/// Test that DB exists but corrupted (causing recreate) /// Test that DB exists but corrupted (causing recreate)
#[gpui::test] #[gpui::test(iterations = 30)]
async fn test_simultaneous_db_corruption() { async fn test_simultaneous_db_corruption() {
enum CorruptedDB {} enum CorruptedDB {}

View file

@ -57,16 +57,16 @@ ordered-float.workspace = true
parking_lot.workspace = true parking_lot.workspace = true
postage.workspace = true postage.workspace = true
pulldown-cmark = { version = "0.9.2", default-features = false } pulldown-cmark = { version = "0.9.2", default-features = false }
rand = { workspace = true, optional = true }
schemars.workspace = true schemars.workspace = true
serde.workspace = true serde.workspace = true
serde_derive.workspace = true serde_derive.workspace = true
smallvec.workspace = true smallvec.workspace = true
smol.workspace = true smol.workspace = true
tree-sitter-rust = { version = "*", optional = true }
tree-sitter-html = { version = "*", optional = true } rand = { workspace = true, optional = true }
tree-sitter-javascript = { version = "*", optional = true } tree-sitter-rust = { workspace = true, optional = true }
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259", optional = true } tree-sitter-html = { workspace = true, optional = true }
tree-sitter-typescript = { workspace = true, optional = true }
[dev-dependencies] [dev-dependencies]
copilot = { path = "../copilot", features = ["test-support"] } copilot = { path = "../copilot", features = ["test-support"] }
@ -84,7 +84,6 @@ env_logger.workspace = true
rand.workspace = true rand.workspace = true
unindent.workspace = true unindent.workspace = true
tree-sitter.workspace = true tree-sitter.workspace = true
tree-sitter-rust = "0.20" tree-sitter-rust.workspace = true
tree-sitter-html = "0.19" tree-sitter-html.workspace = true
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259" } tree-sitter-typescript.workspace = true
tree-sitter-javascript = "0.20"

View file

@ -271,7 +271,9 @@ actions!(
SelectLargerSyntaxNode, SelectLargerSyntaxNode,
SelectSmallerSyntaxNode, SelectSmallerSyntaxNode,
GoToDefinition, GoToDefinition,
GoToDefinitionSplit,
GoToTypeDefinition, GoToTypeDefinition,
GoToTypeDefinitionSplit,
MoveToEnclosingBracket, MoveToEnclosingBracket,
UndoSelection, UndoSelection,
RedoSelection, RedoSelection,
@ -407,7 +409,9 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(Editor::go_to_hunk); cx.add_action(Editor::go_to_hunk);
cx.add_action(Editor::go_to_prev_hunk); cx.add_action(Editor::go_to_prev_hunk);
cx.add_action(Editor::go_to_definition); cx.add_action(Editor::go_to_definition);
cx.add_action(Editor::go_to_definition_split);
cx.add_action(Editor::go_to_type_definition); cx.add_action(Editor::go_to_type_definition);
cx.add_action(Editor::go_to_type_definition_split);
cx.add_action(Editor::fold); cx.add_action(Editor::fold);
cx.add_action(Editor::fold_at); cx.add_action(Editor::fold_at);
cx.add_action(Editor::unfold_lines); cx.add_action(Editor::unfold_lines);
@ -494,6 +498,7 @@ pub enum SoftWrap {
#[derive(Clone)] #[derive(Clone)]
pub struct EditorStyle { pub struct EditorStyle {
pub text: TextStyle, pub text: TextStyle,
pub line_height_scalar: f32,
pub placeholder_text: Option<TextStyle>, pub placeholder_text: Option<TextStyle>,
pub theme: theme::Editor, pub theme: theme::Editor,
pub theme_id: usize, pub theme_id: usize,
@ -6184,14 +6189,31 @@ impl Editor {
} }
pub fn go_to_definition(&mut self, _: &GoToDefinition, cx: &mut ViewContext<Self>) { pub fn go_to_definition(&mut self, _: &GoToDefinition, cx: &mut ViewContext<Self>) {
self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, cx); self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, false, cx);
} }
pub fn go_to_type_definition(&mut self, _: &GoToTypeDefinition, cx: &mut ViewContext<Self>) { pub fn go_to_type_definition(&mut self, _: &GoToTypeDefinition, cx: &mut ViewContext<Self>) {
self.go_to_definition_of_kind(GotoDefinitionKind::Type, cx); self.go_to_definition_of_kind(GotoDefinitionKind::Type, false, cx);
} }
fn go_to_definition_of_kind(&mut self, kind: GotoDefinitionKind, cx: &mut ViewContext<Self>) { pub fn go_to_definition_split(&mut self, _: &GoToDefinitionSplit, cx: &mut ViewContext<Self>) {
self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, true, cx);
}
pub fn go_to_type_definition_split(
&mut self,
_: &GoToTypeDefinitionSplit,
cx: &mut ViewContext<Self>,
) {
self.go_to_definition_of_kind(GotoDefinitionKind::Type, true, cx);
}
fn go_to_definition_of_kind(
&mut self,
kind: GotoDefinitionKind,
split: bool,
cx: &mut ViewContext<Self>,
) {
let Some(workspace) = self.workspace(cx) else { return }; let Some(workspace) = self.workspace(cx) else { return };
let buffer = self.buffer.read(cx); let buffer = self.buffer.read(cx);
let head = self.selections.newest::<usize>(cx).head(); let head = self.selections.newest::<usize>(cx).head();
@ -6210,7 +6232,7 @@ impl Editor {
cx.spawn_labeled("Fetching Definition...", |editor, mut cx| async move { cx.spawn_labeled("Fetching Definition...", |editor, mut cx| async move {
let definitions = definitions.await?; let definitions = definitions.await?;
editor.update(&mut cx, |editor, cx| { editor.update(&mut cx, |editor, cx| {
editor.navigate_to_definitions(definitions, cx); editor.navigate_to_definitions(definitions, split, cx);
})?; })?;
Ok::<(), anyhow::Error>(()) Ok::<(), anyhow::Error>(())
}) })
@ -6220,6 +6242,7 @@ impl Editor {
pub fn navigate_to_definitions( pub fn navigate_to_definitions(
&mut self, &mut self,
mut definitions: Vec<LocationLink>, mut definitions: Vec<LocationLink>,
split: bool,
cx: &mut ViewContext<Editor>, cx: &mut ViewContext<Editor>,
) { ) {
let Some(workspace) = self.workspace(cx) else { return }; let Some(workspace) = self.workspace(cx) else { return };
@ -6239,7 +6262,11 @@ impl Editor {
} else { } else {
cx.window_context().defer(move |cx| { cx.window_context().defer(move |cx| {
let target_editor: ViewHandle<Self> = workspace.update(cx, |workspace, cx| { let target_editor: ViewHandle<Self> = workspace.update(cx, |workspace, cx| {
if split {
workspace.split_project_item(definition.target.buffer.clone(), cx)
} else {
workspace.open_project_item(definition.target.buffer.clone(), cx) workspace.open_project_item(definition.target.buffer.clone(), cx)
}
}); });
target_editor.update(cx, |target_editor, cx| { target_editor.update(cx, |target_editor, cx| {
// When selecting a definition in a different buffer, disable the nav history // When selecting a definition in a different buffer, disable the nav history
@ -6275,7 +6302,9 @@ impl Editor {
.map(|definition| definition.target) .map(|definition| definition.target)
.collect(); .collect();
workspace.update(cx, |workspace, cx| { workspace.update(cx, |workspace, cx| {
Self::open_locations_in_multibuffer(workspace, locations, replica_id, title, cx) Self::open_locations_in_multibuffer(
workspace, locations, replica_id, title, split, cx,
)
}); });
}); });
} }
@ -6320,7 +6349,7 @@ impl Editor {
}) })
.unwrap(); .unwrap();
Self::open_locations_in_multibuffer( Self::open_locations_in_multibuffer(
workspace, locations, replica_id, title, cx, workspace, locations, replica_id, title, false, cx,
); );
})?; })?;
@ -6335,6 +6364,7 @@ impl Editor {
mut locations: Vec<Location>, mut locations: Vec<Location>,
replica_id: ReplicaId, replica_id: ReplicaId,
title: String, title: String,
split: bool,
cx: &mut ViewContext<Workspace>, cx: &mut ViewContext<Workspace>,
) { ) {
// If there are multiple definitions, open them in a multibuffer // If there are multiple definitions, open them in a multibuffer
@ -6381,8 +6411,12 @@ impl Editor {
cx, cx,
); );
}); });
if split {
workspace.split_item(Box::new(editor), cx);
} else {
workspace.add_item(Box::new(editor), cx); workspace.add_item(Box::new(editor), cx);
} }
}
pub fn rename(&mut self, _: &Rename, cx: &mut ViewContext<Self>) -> Option<Task<Result<()>>> { pub fn rename(&mut self, _: &Rename, cx: &mut ViewContext<Self>) -> Option<Task<Result<()>>> {
use language::ToOffset as _; use language::ToOffset as _;
@ -8101,7 +8135,7 @@ fn build_style(
cx: &AppContext, cx: &AppContext,
) -> EditorStyle { ) -> EditorStyle {
let font_cache = cx.font_cache(); let font_cache = cx.font_cache();
let line_height_scalar = settings.line_height();
let theme_id = settings.theme.meta.id; let theme_id = settings.theme.meta.id;
let mut theme = settings.theme.editor.clone(); let mut theme = settings.theme.editor.clone();
let mut style = if let Some(get_field_editor_theme) = get_field_editor_theme { let mut style = if let Some(get_field_editor_theme) = get_field_editor_theme {
@ -8115,6 +8149,7 @@ fn build_style(
EditorStyle { EditorStyle {
text: field_editor_theme.text, text: field_editor_theme.text,
placeholder_text: field_editor_theme.placeholder_text, placeholder_text: field_editor_theme.placeholder_text,
line_height_scalar,
theme, theme,
theme_id, theme_id,
} }
@ -8137,6 +8172,7 @@ fn build_style(
underline: Default::default(), underline: Default::default(),
}, },
placeholder_text: None, placeholder_text: None,
line_height_scalar,
theme, theme,
theme_id, theme_id,
} }

View file

@ -3836,7 +3836,7 @@ async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) {
autoclose_before: "})]>".into(), autoclose_before: "})]>".into(),
..Default::default() ..Default::default()
}, },
Some(tree_sitter_javascript::language()), Some(tree_sitter_typescript::language_tsx()),
)); ));
let registry = Arc::new(LanguageRegistry::test()); let registry = Arc::new(LanguageRegistry::test());
@ -5383,7 +5383,7 @@ async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) {
line_comment: Some("// ".into()), line_comment: Some("// ".into()),
..Default::default() ..Default::default()
}, },
Some(tree_sitter_javascript::language()), Some(tree_sitter_typescript::language_tsx()),
)); ));
let registry = Arc::new(LanguageRegistry::test()); let registry = Arc::new(LanguageRegistry::test());

View file

@ -156,6 +156,7 @@ impl EditorElement {
event.position, event.position,
event.cmd, event.cmd,
event.shift, event.shift,
event.alt,
position_map.as_ref(), position_map.as_ref(),
text_bounds, text_bounds,
cx, cx,
@ -308,6 +309,7 @@ impl EditorElement {
position: Vector2F, position: Vector2F,
cmd: bool, cmd: bool,
shift: bool, shift: bool,
alt: bool,
position_map: &PositionMap, position_map: &PositionMap,
text_bounds: RectF, text_bounds: RectF,
cx: &mut EventContext<Editor>, cx: &mut EventContext<Editor>,
@ -324,9 +326,9 @@ impl EditorElement {
if point == target_point { if point == target_point {
if shift { if shift {
go_to_fetched_type_definition(editor, point, cx); go_to_fetched_type_definition(editor, point, alt, cx);
} else { } else {
go_to_fetched_definition(editor, point, cx); go_to_fetched_definition(editor, point, alt, cx);
} }
return true; return true;
@ -1182,8 +1184,10 @@ impl EditorElement {
}); });
scene.push_mouse_region( scene.push_mouse_region(
MouseRegion::new::<ScrollbarMouseHandlers>(cx.view_id(), cx.view_id(), track_bounds) MouseRegion::new::<ScrollbarMouseHandlers>(cx.view_id(), cx.view_id(), track_bounds)
.on_move(move |_, editor: &mut Editor, cx| { .on_move(move |event, editor: &mut Editor, cx| {
if event.pressed_button.is_none() {
editor.scroll_manager.show_scrollbar(cx); editor.scroll_manager.show_scrollbar(cx);
}
}) })
.on_down(MouseButton::Left, { .on_down(MouseButton::Left, {
let row_range = row_range.clone(); let row_range = row_range.clone();
@ -1973,7 +1977,7 @@ impl Element<Editor> for EditorElement {
let snapshot = editor.snapshot(cx); let snapshot = editor.snapshot(cx);
let style = self.style.clone(); let style = self.style.clone();
let line_height = style.text.line_height(cx.font_cache()); let line_height = (style.text.font_size * style.line_height_scalar).round();
let gutter_padding; let gutter_padding;
let gutter_width; let gutter_width;

View file

@ -887,10 +887,20 @@ pub(crate) enum BufferSearchHighlights {}
impl SearchableItem for Editor { impl SearchableItem for Editor {
type Match = Range<Anchor>; type Match = Range<Anchor>;
fn to_search_event(event: &Self::Event) -> Option<SearchEvent> { fn to_search_event(
&mut self,
event: &Self::Event,
_: &mut ViewContext<Self>,
) -> Option<SearchEvent> {
match event { match event {
Event::BufferEdited => Some(SearchEvent::MatchesInvalidated), Event::BufferEdited => Some(SearchEvent::MatchesInvalidated),
Event::SelectionsChanged { .. } => Some(SearchEvent::ActiveMatchChanged), Event::SelectionsChanged { .. } => {
if self.selections.disjoint_anchors().len() == 1 {
Some(SearchEvent::ActiveMatchChanged)
} else {
None
}
}
_ => None, _ => None,
} }
} }
@ -941,6 +951,11 @@ impl SearchableItem for Editor {
}); });
} }
fn select_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
self.unfold_ranges(matches.clone(), false, false, cx);
self.change_selections(None, cx, |s| s.select_ranges(matches));
}
fn match_index_for_direction( fn match_index_for_direction(
&mut self, &mut self,
matches: &Vec<Range<Anchor>>, matches: &Vec<Range<Anchor>>,
@ -949,8 +964,16 @@ impl SearchableItem for Editor {
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) -> usize { ) -> usize {
let buffer = self.buffer().read(cx).snapshot(cx); let buffer = self.buffer().read(cx).snapshot(cx);
let cursor = self.selections.newest_anchor().head(); let current_index_position = if self.selections.disjoint_anchors().len() == 1 {
if matches[current_index].start.cmp(&cursor, &buffer).is_gt() { self.selections.newest_anchor().head()
} else {
matches[current_index].start
};
if matches[current_index]
.start
.cmp(&current_index_position, &buffer)
.is_gt()
{
if direction == Direction::Prev { if direction == Direction::Prev {
if current_index == 0 { if current_index == 0 {
current_index = matches.len() - 1; current_index = matches.len() - 1;
@ -958,7 +981,11 @@ impl SearchableItem for Editor {
current_index -= 1; current_index -= 1;
} }
} }
} else if matches[current_index].end.cmp(&cursor, &buffer).is_lt() { } else if matches[current_index]
.end
.cmp(&current_index_position, &buffer)
.is_lt()
{
if direction == Direction::Next { if direction == Direction::Next {
current_index = 0; current_index = 0;
} }

View file

@ -246,23 +246,26 @@ pub fn hide_link_definition(editor: &mut Editor, cx: &mut ViewContext<Editor>) {
pub fn go_to_fetched_definition( pub fn go_to_fetched_definition(
editor: &mut Editor, editor: &mut Editor,
point: DisplayPoint, point: DisplayPoint,
split: bool,
cx: &mut ViewContext<Editor>, cx: &mut ViewContext<Editor>,
) { ) {
go_to_fetched_definition_of_kind(LinkDefinitionKind::Symbol, editor, point, cx); go_to_fetched_definition_of_kind(LinkDefinitionKind::Symbol, editor, point, split, cx);
} }
pub fn go_to_fetched_type_definition( pub fn go_to_fetched_type_definition(
editor: &mut Editor, editor: &mut Editor,
point: DisplayPoint, point: DisplayPoint,
split: bool,
cx: &mut ViewContext<Editor>, cx: &mut ViewContext<Editor>,
) { ) {
go_to_fetched_definition_of_kind(LinkDefinitionKind::Type, editor, point, cx); go_to_fetched_definition_of_kind(LinkDefinitionKind::Type, editor, point, split, cx);
} }
fn go_to_fetched_definition_of_kind( fn go_to_fetched_definition_of_kind(
kind: LinkDefinitionKind, kind: LinkDefinitionKind,
editor: &mut Editor, editor: &mut Editor,
point: DisplayPoint, point: DisplayPoint,
split: bool,
cx: &mut ViewContext<Editor>, cx: &mut ViewContext<Editor>,
) { ) {
let cached_definitions = editor.link_go_to_definition_state.definitions.clone(); let cached_definitions = editor.link_go_to_definition_state.definitions.clone();
@ -275,7 +278,7 @@ fn go_to_fetched_definition_of_kind(
cx.focus_self(); cx.focus_self();
} }
editor.navigate_to_definitions(cached_definitions, cx); editor.navigate_to_definitions(cached_definitions, split, cx);
} else { } else {
editor.select( editor.select(
SelectPhase::Begin { SelectPhase::Begin {
@ -403,7 +406,7 @@ mod tests {
}); });
cx.update_editor(|editor, cx| { cx.update_editor(|editor, cx| {
go_to_fetched_type_definition(editor, hover_point, cx); go_to_fetched_type_definition(editor, hover_point, false, cx);
}); });
requests.next().await; requests.next().await;
cx.foreground().run_until_parked(); cx.foreground().run_until_parked();
@ -614,7 +617,7 @@ mod tests {
// Cmd click with existing definition doesn't re-request and dismisses highlight // Cmd click with existing definition doesn't re-request and dismisses highlight
cx.update_editor(|editor, cx| { cx.update_editor(|editor, cx| {
go_to_fetched_definition(editor, hover_point, cx); go_to_fetched_definition(editor, hover_point, false, cx);
}); });
// Assert selection moved to to definition // Assert selection moved to to definition
cx.lsp cx.lsp
@ -655,7 +658,7 @@ mod tests {
]))) ])))
}); });
cx.update_editor(|editor, cx| { cx.update_editor(|editor, cx| {
go_to_fetched_definition(editor, hover_point, cx); go_to_fetched_definition(editor, hover_point, false, cx);
}); });
requests.next().await; requests.next().await;
cx.foreground().run_until_parked(); cx.foreground().run_until_parked();

View file

@ -16,13 +16,13 @@ use crate::{
Anchor, DisplayPoint, ExcerptId, MultiBuffer, MultiBufferSnapshot, SelectMode, ToOffset, Anchor, DisplayPoint, ExcerptId, MultiBuffer, MultiBufferSnapshot, SelectMode, ToOffset,
}; };
#[derive(Clone)] #[derive(Debug, Clone)]
pub struct PendingSelection { pub struct PendingSelection {
pub selection: Selection<Anchor>, pub selection: Selection<Anchor>,
pub mode: SelectMode, pub mode: SelectMode,
} }
#[derive(Clone)] #[derive(Debug, Clone)]
pub struct SelectionsCollection { pub struct SelectionsCollection {
display_map: ModelHandle<DisplayMap>, display_map: ModelHandle<DisplayMap>,
buffer: ModelHandle<MultiBuffer>, buffer: ModelHandle<MultiBuffer>,

View file

@ -60,6 +60,7 @@ pub(crate) struct FeedbackEditor {
system_specs: SystemSpecs, system_specs: SystemSpecs,
editor: ViewHandle<Editor>, editor: ViewHandle<Editor>,
project: ModelHandle<Project>, project: ModelHandle<Project>,
pub allow_submission: bool,
} }
impl FeedbackEditor { impl FeedbackEditor {
@ -82,10 +83,15 @@ impl FeedbackEditor {
system_specs: system_specs.clone(), system_specs: system_specs.clone(),
editor, editor,
project, project,
allow_submission: true,
} }
} }
pub fn submit(&mut self, cx: &mut ViewContext<Self>) -> Task<anyhow::Result<()>> { pub fn submit(&mut self, cx: &mut ViewContext<Self>) -> Task<anyhow::Result<()>> {
if !self.allow_submission {
return Task::ready(Ok(()));
}
let feedback_text = self.editor.read(cx).text(cx); let feedback_text = self.editor.read(cx).text(cx);
let feedback_char_count = feedback_text.chars().count(); let feedback_char_count = feedback_text.chars().count();
let feedback_text = feedback_text.trim().to_string(); let feedback_text = feedback_text.trim().to_string();
@ -122,19 +128,26 @@ impl FeedbackEditor {
let answer = answer.recv().await; let answer = answer.recv().await;
if answer == Some(0) { if answer == Some(0) {
this.update(&mut cx, |feedback_editor, cx| {
feedback_editor.set_allow_submission(false, cx);
})
.log_err();
match FeedbackEditor::submit_feedback(&feedback_text, client, specs).await { match FeedbackEditor::submit_feedback(&feedback_text, client, specs).await {
Ok(_) => { Ok(_) => {
this.update(&mut cx, |_, cx| cx.emit(editor::Event::Closed)) this.update(&mut cx, |_, cx| cx.emit(editor::Event::Closed))
.log_err(); .log_err();
} }
Err(error) => { Err(error) => {
log::error!("{}", error); log::error!("{}", error);
this.update(&mut cx, |_, cx| { this.update(&mut cx, |feedback_editor, cx| {
cx.prompt( cx.prompt(
PromptLevel::Critical, PromptLevel::Critical,
FEEDBACK_SUBMISSION_ERROR_TEXT, FEEDBACK_SUBMISSION_ERROR_TEXT,
&["OK"], &["OK"],
); );
feedback_editor.set_allow_submission(true, cx);
}) })
.log_err(); .log_err();
} }
@ -146,6 +159,11 @@ impl FeedbackEditor {
Task::ready(Ok(())) Task::ready(Ok(()))
} }
fn set_allow_submission(&mut self, allow_submission: bool, cx: &mut ViewContext<Self>) {
self.allow_submission = allow_submission;
cx.notify();
}
async fn submit_feedback( async fn submit_feedback(
feedback_text: &str, feedback_text: &str,
zed_client: Arc<Client>, zed_client: Arc<Client>,
@ -362,8 +380,13 @@ impl Item for FeedbackEditor {
impl SearchableItem for FeedbackEditor { impl SearchableItem for FeedbackEditor {
type Match = Range<Anchor>; type Match = Range<Anchor>;
fn to_search_event(event: &Self::Event) -> Option<workspace::searchable::SearchEvent> { fn to_search_event(
Editor::to_search_event(event) &mut self,
event: &Self::Event,
cx: &mut ViewContext<Self>,
) -> Option<workspace::searchable::SearchEvent> {
self.editor
.update(cx, |editor, cx| editor.to_search_event(event, cx))
} }
fn clear_matches(&mut self, cx: &mut ViewContext<Self>) { fn clear_matches(&mut self, cx: &mut ViewContext<Self>) {
@ -391,6 +414,11 @@ impl SearchableItem for FeedbackEditor {
.update(cx, |editor, cx| editor.activate_match(index, matches, cx)) .update(cx, |editor, cx| editor.activate_match(index, matches, cx))
} }
fn select_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
self.editor
.update(cx, |e, cx| e.select_matches(matches, cx))
}
fn find_matches( fn find_matches(
&mut self, &mut self,
query: project::search::SearchQuery, query: project::search::SearchQuery,

View file

@ -46,10 +46,28 @@ impl View for SubmitFeedbackButton {
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> { fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
let theme = theme::current(cx).clone(); let theme = theme::current(cx).clone();
let allow_submission = self
.active_item
.as_ref()
.map_or(true, |i| i.read(cx).allow_submission);
enum SubmitFeedbackButton {} enum SubmitFeedbackButton {}
MouseEventHandler::<SubmitFeedbackButton, Self>::new(0, cx, |state, _| { MouseEventHandler::<SubmitFeedbackButton, Self>::new(0, cx, |state, _| {
let style = theme.feedback.submit_button.style_for(state); let text;
Label::new("Submit as Markdown", style.text.clone()) let style = if allow_submission {
text = "Submit as Markdown";
theme.feedback.submit_button.style_for(state)
} else {
text = "Submitting...";
theme
.feedback
.submit_button
.disabled
.as_ref()
.unwrap_or(&theme.feedback.submit_button.default)
};
Label::new(text, style.text.clone())
.contained() .contained()
.with_style(style.container) .with_style(style.container)
}) })

View file

@ -442,10 +442,18 @@ impl PickerDelegate for FileFinderDelegate {
} }
} }
fn confirm(&mut self, cx: &mut ViewContext<FileFinder>) { fn confirm(&mut self, secondary: bool, cx: &mut ViewContext<FileFinder>) {
if let Some(m) = self.matches.get(self.selected_index()) { if let Some(m) = self.matches.get(self.selected_index()) {
if let Some(workspace) = self.workspace.upgrade(cx) { if let Some(workspace) = self.workspace.upgrade(cx) {
let open_task = workspace.update(cx, |workspace, cx| match m { let open_task = workspace.update(cx, move |workspace, cx| {
let split_or_open = |workspace: &mut Workspace, project_path, cx| {
if secondary {
workspace.split_path(project_path, cx)
} else {
workspace.open_path(project_path, None, true, cx)
}
};
match m {
Match::History(history_match) => { Match::History(history_match) => {
let worktree_id = history_match.project.worktree_id; let worktree_id = history_match.project.worktree_id;
if workspace if workspace
@ -454,41 +462,51 @@ impl PickerDelegate for FileFinderDelegate {
.worktree_for_id(worktree_id, cx) .worktree_for_id(worktree_id, cx)
.is_some() .is_some()
{ {
workspace.open_path( split_or_open(
workspace,
ProjectPath { ProjectPath {
worktree_id, worktree_id,
path: Arc::clone(&history_match.project.path), path: Arc::clone(&history_match.project.path),
}, },
None,
true,
cx, cx,
) )
} else { } else {
match history_match.absolute.as_ref() { match history_match.absolute.as_ref() {
Some(abs_path) => { Some(abs_path) => {
workspace.open_abs_path(abs_path.to_path_buf(), false, cx) if secondary {
workspace.split_abs_path(
abs_path.to_path_buf(),
false,
cx,
)
} else {
workspace.open_abs_path(
abs_path.to_path_buf(),
false,
cx,
)
} }
None => workspace.open_path( }
None => split_or_open(
workspace,
ProjectPath { ProjectPath {
worktree_id, worktree_id,
path: Arc::clone(&history_match.project.path), path: Arc::clone(&history_match.project.path),
}, },
None,
true,
cx, cx,
), ),
} }
} }
} }
Match::Search(m) => workspace.open_path( Match::Search(m) => split_or_open(
workspace,
ProjectPath { ProjectPath {
worktree_id: WorktreeId::from_usize(m.worktree_id), worktree_id: WorktreeId::from_usize(m.worktree_id),
path: m.path.clone(), path: m.path.clone(),
}, },
None,
true,
cx, cx,
), ),
}
}); });
let row = self let row = self

View file

@ -33,6 +33,7 @@ pub trait GitRepository: Send {
fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>>; fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>>;
fn status(&self, path: &RepoPath) -> Result<Option<GitFileStatus>>; fn status(&self, path: &RepoPath) -> Result<Option<GitFileStatus>>;
fn branches(&self) -> Result<Vec<Branch>> { fn branches(&self) -> Result<Vec<Branch>> {
Ok(vec![]) Ok(vec![])
} }

View file

@ -1073,7 +1073,7 @@ impl AppContext {
pub fn is_action_available(&self, action: &dyn Action) -> bool { pub fn is_action_available(&self, action: &dyn Action) -> bool {
let mut available_in_window = false; let mut available_in_window = false;
let action_type = action.as_any().type_id(); let action_id = action.id();
if let Some(window_id) = self.platform.main_window_id() { if let Some(window_id) = self.platform.main_window_id() {
available_in_window = self available_in_window = self
.read_window(window_id, |cx| { .read_window(window_id, |cx| {
@ -1083,7 +1083,7 @@ impl AppContext {
cx.views_metadata.get(&(window_id, view_id)) cx.views_metadata.get(&(window_id, view_id))
{ {
if let Some(actions) = cx.actions.get(&view_metadata.type_id) { if let Some(actions) = cx.actions.get(&view_metadata.type_id) {
if actions.contains_key(&action_type) { if actions.contains_key(&action_id) {
return true; return true;
} }
} }
@ -1094,7 +1094,7 @@ impl AppContext {
}) })
.unwrap_or(false); .unwrap_or(false);
} }
available_in_window || self.global_actions.contains_key(&action_type) available_in_window || self.global_actions.contains_key(&action_id)
} }
fn actions_mut( fn actions_mut(
@ -3399,7 +3399,7 @@ impl<'a, 'b, 'c, V: View> LayoutContext<'a, 'b, 'c, V> {
for (i, view_id) in self.ancestors(view_id).enumerate() { for (i, view_id) in self.ancestors(view_id).enumerate() {
if let Some(view_metadata) = self.views_metadata.get(&(window_id, view_id)) { if let Some(view_metadata) = self.views_metadata.get(&(window_id, view_id)) {
if let Some(actions) = self.actions.get(&view_metadata.type_id) { if let Some(actions) = self.actions.get(&view_metadata.type_id) {
if actions.contains_key(&action.as_any().type_id()) { if actions.contains_key(&action.id()) {
handler_depth = Some(i); handler_depth = Some(i);
} }
} }
@ -3407,12 +3407,12 @@ impl<'a, 'b, 'c, V: View> LayoutContext<'a, 'b, 'c, V> {
} }
} }
if self.global_actions.contains_key(&action.as_any().type_id()) { if self.global_actions.contains_key(&action.id()) {
handler_depth = Some(contexts.len()) handler_depth = Some(contexts.len())
} }
self.keystroke_matcher self.keystroke_matcher
.bindings_for_action_type(action.as_any().type_id()) .bindings_for_action(action.id())
.find_map(|b| { .find_map(|b| {
let highest_handler = handler_depth?; let highest_handler = handler_depth?;
if action.eq(b.action()) if action.eq(b.action())

View file

@ -14,8 +14,8 @@ use crate::{
text_layout::TextLayoutCache, text_layout::TextLayoutCache,
util::post_inc, util::post_inc,
Action, AnyView, AnyViewHandle, AppContext, BorrowAppContext, BorrowWindowContext, Effect, Action, AnyView, AnyViewHandle, AppContext, BorrowAppContext, BorrowWindowContext, Effect,
Element, Entity, Handle, LayoutContext, MouseRegion, MouseRegionId, NoAction, SceneBuilder, Element, Entity, Handle, LayoutContext, MouseRegion, MouseRegionId, SceneBuilder, Subscription,
Subscription, View, ViewContext, ViewHandle, WindowInvalidation, View, ViewContext, ViewHandle, WindowInvalidation,
}; };
use anyhow::{anyhow, bail, Result}; use anyhow::{anyhow, bail, Result};
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
@ -363,17 +363,13 @@ impl<'a> WindowContext<'a> {
) -> Vec<(&'static str, Box<dyn Action>, SmallVec<[Binding; 1]>)> { ) -> Vec<(&'static str, Box<dyn Action>, SmallVec<[Binding; 1]>)> {
let window_id = self.window_id; let window_id = self.window_id;
let mut contexts = Vec::new(); let mut contexts = Vec::new();
let mut handler_depths_by_action_type = HashMap::<TypeId, usize>::default(); let mut handler_depths_by_action_id = HashMap::<TypeId, usize>::default();
for (depth, view_id) in self.ancestors(view_id).enumerate() { for (depth, view_id) in self.ancestors(view_id).enumerate() {
if let Some(view_metadata) = self.views_metadata.get(&(window_id, view_id)) { if let Some(view_metadata) = self.views_metadata.get(&(window_id, view_id)) {
contexts.push(view_metadata.keymap_context.clone()); contexts.push(view_metadata.keymap_context.clone());
if let Some(actions) = self.actions.get(&view_metadata.type_id) { if let Some(actions) = self.actions.get(&view_metadata.type_id) {
handler_depths_by_action_type.extend( handler_depths_by_action_id
actions .extend(actions.keys().copied().map(|action_id| (action_id, depth)));
.keys()
.copied()
.map(|action_type| (action_type, depth)),
);
} }
} else { } else {
log::error!( log::error!(
@ -383,21 +379,21 @@ impl<'a> WindowContext<'a> {
} }
} }
handler_depths_by_action_type.extend( handler_depths_by_action_id.extend(
self.global_actions self.global_actions
.keys() .keys()
.copied() .copied()
.map(|action_type| (action_type, contexts.len())), .map(|action_id| (action_id, contexts.len())),
); );
self.action_deserializers self.action_deserializers
.iter() .iter()
.filter_map(move |(name, (type_id, deserialize))| { .filter_map(move |(name, (action_id, deserialize))| {
if let Some(action_depth) = handler_depths_by_action_type.get(type_id).copied() { if let Some(action_depth) = handler_depths_by_action_id.get(action_id).copied() {
let action = deserialize(serde_json::Value::Object(Default::default())).ok()?; let action = deserialize(serde_json::Value::Object(Default::default())).ok()?;
let bindings = self let bindings = self
.keystroke_matcher .keystroke_matcher
.bindings_for_action_type(*type_id) .bindings_for_action(*action_id)
.filter(|b| { .filter(|b| {
action.eq(b.action()) action.eq(b.action())
&& (0..=action_depth) && (0..=action_depth)
@ -434,11 +430,7 @@ impl<'a> WindowContext<'a> {
MatchResult::None => false, MatchResult::None => false,
MatchResult::Pending => true, MatchResult::Pending => true,
MatchResult::Matches(matches) => { MatchResult::Matches(matches) => {
let no_action_id = (NoAction {}).id();
for (view_id, action) in matches { for (view_id, action) in matches {
if action.id() == no_action_id {
return false;
}
if self.dispatch_action(Some(*view_id), action.as_ref()) { if self.dispatch_action(Some(*view_id), action.as_ref()) {
self.keystroke_matcher.clear_pending(); self.keystroke_matcher.clear_pending();
handled_by = Some(action.boxed_clone()); handled_by = Some(action.boxed_clone());
@ -1268,6 +1260,19 @@ impl Vector2FExt for Vector2F {
} }
} }
pub trait RectFExt {
fn length_along(self, axis: Axis) -> f32;
}
impl RectFExt for RectF {
fn length_along(self, axis: Axis) -> f32 {
match axis {
Axis::Horizontal => self.width(),
Axis::Vertical => self.height(),
}
}
}
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub struct SizeConstraint { pub struct SizeConstraint {
pub min: Vector2F, pub min: Vector2F,

View file

@ -27,7 +27,7 @@ pub mod json;
pub mod keymap_matcher; pub mod keymap_matcher;
pub mod platform; pub mod platform;
pub use gpui_macros::{test, Element}; pub use gpui_macros::{test, Element};
pub use window::{Axis, SizeConstraint, Vector2FExt, WindowContext}; pub use window::{Axis, RectFExt, SizeConstraint, Vector2FExt, WindowContext};
pub use anyhow; pub use anyhow;
pub use serde_json; pub use serde_json;

View file

@ -8,7 +8,7 @@ use std::{any::TypeId, fmt::Debug};
use collections::HashMap; use collections::HashMap;
use smallvec::SmallVec; use smallvec::SmallVec;
use crate::Action; use crate::{Action, NoAction};
pub use binding::{Binding, BindingMatchResult}; pub use binding::{Binding, BindingMatchResult};
pub use keymap::Keymap; pub use keymap::Keymap;
@ -47,8 +47,8 @@ impl KeymapMatcher {
self.keymap.clear(); self.keymap.clear();
} }
pub fn bindings_for_action_type(&self, action_type: TypeId) -> impl Iterator<Item = &Binding> { pub fn bindings_for_action(&self, action_id: TypeId) -> impl Iterator<Item = &Binding> {
self.keymap.bindings_for_action_type(action_type) self.keymap.bindings_for_action(action_id)
} }
pub fn clear_pending(&mut self) { pub fn clear_pending(&mut self) {
@ -81,6 +81,7 @@ impl KeymapMatcher {
// The key is the reverse position of the binding in the bindings list so that later bindings // The key is the reverse position of the binding in the bindings list so that later bindings
// match before earlier ones in the user's config // match before earlier ones in the user's config
let mut matched_bindings: Vec<(usize, Box<dyn Action>)> = Default::default(); let mut matched_bindings: Vec<(usize, Box<dyn Action>)> = Default::default();
let no_action_id = (NoAction {}).id();
let first_keystroke = self.pending_keystrokes.is_empty(); let first_keystroke = self.pending_keystrokes.is_empty();
self.pending_keystrokes.push(keystroke.clone()); self.pending_keystrokes.push(keystroke.clone());
@ -108,8 +109,10 @@ impl KeymapMatcher {
match binding.match_keys_and_context(&self.pending_keystrokes, &self.contexts[i..]) match binding.match_keys_and_context(&self.pending_keystrokes, &self.contexts[i..])
{ {
BindingMatchResult::Complete(action) => { BindingMatchResult::Complete(action) => {
if action.id() != no_action_id {
matched_bindings.push((*view_id, action)); matched_bindings.push((*view_id, action));
} }
}
BindingMatchResult::Partial => { BindingMatchResult::Partial => {
self.pending_views self.pending_views
.insert(*view_id, self.contexts[i].clone()); .insert(*view_id, self.contexts[i].clone());

View file

@ -7,8 +7,8 @@ use super::{KeymapContext, KeymapContextPredicate, Keystroke};
pub struct Binding { pub struct Binding {
action: Box<dyn Action>, action: Box<dyn Action>,
keystrokes: SmallVec<[Keystroke; 2]>, pub(super) keystrokes: SmallVec<[Keystroke; 2]>,
context_predicate: Option<KeymapContextPredicate>, pub(super) context_predicate: Option<KeymapContextPredicate>,
} }
impl std::fmt::Debug for Binding { impl std::fmt::Debug for Binding {

View file

@ -1,61 +1,388 @@
use collections::HashSet;
use smallvec::SmallVec; use smallvec::SmallVec;
use std::{ use std::{any::TypeId, collections::HashMap};
any::{Any, TypeId},
collections::HashMap,
};
use super::Binding; use crate::{Action, NoAction};
use super::{Binding, KeymapContextPredicate, Keystroke};
#[derive(Default)] #[derive(Default)]
pub struct Keymap { pub struct Keymap {
bindings: Vec<Binding>, bindings: Vec<Binding>,
binding_indices_by_action_type: HashMap<TypeId, SmallVec<[usize; 3]>>, binding_indices_by_action_id: HashMap<TypeId, SmallVec<[usize; 3]>>,
disabled_keystrokes: HashMap<SmallVec<[Keystroke; 2]>, HashSet<Option<KeymapContextPredicate>>>,
} }
impl Keymap { impl Keymap {
pub fn new(bindings: Vec<Binding>) -> Self { #[cfg(test)]
let mut binding_indices_by_action_type = HashMap::new(); pub(super) fn new(bindings: Vec<Binding>) -> Self {
for (ix, binding) in bindings.iter().enumerate() { let mut this = Self::default();
binding_indices_by_action_type this.add_bindings(bindings);
.entry(binding.action().type_id()) this
.or_insert_with(SmallVec::new)
.push(ix);
} }
Self { pub(crate) fn bindings_for_action(
binding_indices_by_action_type,
bindings,
}
}
pub(crate) fn bindings_for_action_type(
&self, &self,
action_type: TypeId, action_id: TypeId,
) -> impl Iterator<Item = &'_ Binding> { ) -> impl Iterator<Item = &'_ Binding> {
self.binding_indices_by_action_type self.binding_indices_by_action_id
.get(&action_type) .get(&action_id)
.map(SmallVec::as_slice) .map(SmallVec::as_slice)
.unwrap_or(&[]) .unwrap_or(&[])
.iter() .iter()
.map(|ix| &self.bindings[*ix]) .map(|ix| &self.bindings[*ix])
.filter(|binding| !self.binding_disabled(binding))
} }
pub(crate) fn add_bindings<T: IntoIterator<Item = Binding>>(&mut self, bindings: T) { pub(crate) fn add_bindings<T: IntoIterator<Item = Binding>>(&mut self, bindings: T) {
let no_action_id = (NoAction {}).id();
let mut new_bindings = Vec::new();
let mut has_new_disabled_keystrokes = false;
for binding in bindings { for binding in bindings {
self.binding_indices_by_action_type if binding.action().id() == no_action_id {
.entry(binding.action().as_any().type_id()) has_new_disabled_keystrokes |= self
.disabled_keystrokes
.entry(binding.keystrokes)
.or_default()
.insert(binding.context_predicate);
} else {
new_bindings.push(binding);
}
}
if has_new_disabled_keystrokes {
self.binding_indices_by_action_id.retain(|_, indices| {
indices.retain(|ix| {
let binding = &self.bindings[*ix];
match self.disabled_keystrokes.get(&binding.keystrokes) {
Some(disabled_predicates) => {
!disabled_predicates.contains(&binding.context_predicate)
}
None => true,
}
});
!indices.is_empty()
});
}
for new_binding in new_bindings {
if !self.binding_disabled(&new_binding) {
self.binding_indices_by_action_id
.entry(new_binding.action().id())
.or_default() .or_default()
.push(self.bindings.len()); .push(self.bindings.len());
self.bindings.push(binding); self.bindings.push(new_binding);
}
} }
} }
pub(crate) fn clear(&mut self) { pub(crate) fn clear(&mut self) {
self.bindings.clear(); self.bindings.clear();
self.binding_indices_by_action_type.clear(); self.binding_indices_by_action_id.clear();
self.disabled_keystrokes.clear();
} }
pub fn bindings(&self) -> &Vec<Binding> { pub fn bindings(&self) -> Vec<&Binding> {
&self.bindings self.bindings
.iter()
.filter(|binding| !self.binding_disabled(binding))
.collect()
}
fn binding_disabled(&self, binding: &Binding) -> bool {
match self.disabled_keystrokes.get(&binding.keystrokes) {
Some(disabled_predicates) => disabled_predicates.contains(&binding.context_predicate),
None => false,
}
}
}
#[cfg(test)]
mod tests {
use crate::actions;
use super::*;
actions!(
keymap_test,
[Present1, Present2, Present3, Duplicate, Missing]
);
#[test]
fn regular_keymap() {
let present_1 = Binding::new("ctrl-q", Present1 {}, None);
let present_2 = Binding::new("ctrl-w", Present2 {}, Some("pane"));
let present_3 = Binding::new("ctrl-e", Present3 {}, Some("editor"));
let keystroke_duplicate_to_1 = Binding::new("ctrl-q", Duplicate {}, None);
let full_duplicate_to_2 = Binding::new("ctrl-w", Present2 {}, Some("pane"));
let missing = Binding::new("ctrl-r", Missing {}, None);
let all_bindings = [
&present_1,
&present_2,
&present_3,
&keystroke_duplicate_to_1,
&full_duplicate_to_2,
&missing,
];
let mut keymap = Keymap::default();
assert_absent(&keymap, &all_bindings);
assert!(keymap.bindings().is_empty());
keymap.add_bindings([present_1.clone(), present_2.clone(), present_3.clone()]);
assert_absent(&keymap, &[&keystroke_duplicate_to_1, &missing]);
assert_present(
&keymap,
&[(&present_1, "q"), (&present_2, "w"), (&present_3, "e")],
);
keymap.add_bindings([
keystroke_duplicate_to_1.clone(),
full_duplicate_to_2.clone(),
]);
assert_absent(&keymap, &[&missing]);
assert!(
!keymap.binding_disabled(&keystroke_duplicate_to_1),
"Duplicate binding 1 was added and should not be disabled"
);
assert!(
!keymap.binding_disabled(&full_duplicate_to_2),
"Duplicate binding 2 was added and should not be disabled"
);
assert_eq!(
keymap
.bindings_for_action(keystroke_duplicate_to_1.action().id())
.map(|binding| &binding.keystrokes)
.flatten()
.collect::<Vec<_>>(),
vec![&Keystroke {
ctrl: true,
alt: false,
shift: false,
cmd: false,
function: false,
key: "q".to_string()
}],
"{keystroke_duplicate_to_1:?} should have the expected keystroke in the keymap"
);
assert_eq!(
keymap
.bindings_for_action(full_duplicate_to_2.action().id())
.map(|binding| &binding.keystrokes)
.flatten()
.collect::<Vec<_>>(),
vec![
&Keystroke {
ctrl: true,
alt: false,
shift: false,
cmd: false,
function: false,
key: "w".to_string()
},
&Keystroke {
ctrl: true,
alt: false,
shift: false,
cmd: false,
function: false,
key: "w".to_string()
}
],
"{full_duplicate_to_2:?} should have a duplicated keystroke in the keymap"
);
let updated_bindings = keymap.bindings();
let expected_updated_bindings = vec![
&present_1,
&present_2,
&present_3,
&keystroke_duplicate_to_1,
&full_duplicate_to_2,
];
assert_eq!(
updated_bindings.len(),
expected_updated_bindings.len(),
"Unexpected updated keymap bindings {updated_bindings:?}"
);
for (i, expected) in expected_updated_bindings.iter().enumerate() {
let keymap_binding = &updated_bindings[i];
assert_eq!(
keymap_binding.context_predicate, expected.context_predicate,
"Unexpected context predicate for keymap {i} element: {keymap_binding:?}"
);
assert_eq!(
keymap_binding.keystrokes, expected.keystrokes,
"Unexpected keystrokes for keymap {i} element: {keymap_binding:?}"
);
}
keymap.clear();
assert_absent(&keymap, &all_bindings);
assert!(keymap.bindings().is_empty());
}
#[test]
fn keymap_with_ignored() {
let present_1 = Binding::new("ctrl-q", Present1 {}, None);
let present_2 = Binding::new("ctrl-w", Present2 {}, Some("pane"));
let present_3 = Binding::new("ctrl-e", Present3 {}, Some("editor"));
let keystroke_duplicate_to_1 = Binding::new("ctrl-q", Duplicate {}, None);
let full_duplicate_to_2 = Binding::new("ctrl-w", Present2 {}, Some("pane"));
let ignored_1 = Binding::new("ctrl-q", NoAction {}, None);
let ignored_2 = Binding::new("ctrl-w", NoAction {}, Some("pane"));
let ignored_3_with_other_context =
Binding::new("ctrl-e", NoAction {}, Some("other_context"));
let mut keymap = Keymap::default();
keymap.add_bindings([
ignored_1.clone(),
ignored_2.clone(),
ignored_3_with_other_context.clone(),
]);
assert_absent(&keymap, &[&present_3]);
assert_disabled(
&keymap,
&[
&present_1,
&present_2,
&ignored_1,
&ignored_2,
&ignored_3_with_other_context,
],
);
assert!(keymap.bindings().is_empty());
keymap.clear();
keymap.add_bindings([
present_1.clone(),
present_2.clone(),
present_3.clone(),
ignored_1.clone(),
ignored_2.clone(),
ignored_3_with_other_context.clone(),
]);
assert_present(&keymap, &[(&present_3, "e")]);
assert_disabled(
&keymap,
&[
&present_1,
&present_2,
&ignored_1,
&ignored_2,
&ignored_3_with_other_context,
],
);
keymap.clear();
keymap.add_bindings([
present_1.clone(),
present_2.clone(),
present_3.clone(),
ignored_1.clone(),
]);
assert_present(&keymap, &[(&present_2, "w"), (&present_3, "e")]);
assert_disabled(&keymap, &[&present_1, &ignored_1]);
assert_absent(&keymap, &[&ignored_2, &ignored_3_with_other_context]);
keymap.clear();
keymap.add_bindings([
present_1.clone(),
present_2.clone(),
present_3.clone(),
keystroke_duplicate_to_1.clone(),
full_duplicate_to_2.clone(),
ignored_1.clone(),
ignored_2.clone(),
ignored_3_with_other_context.clone(),
]);
assert_present(&keymap, &[(&present_3, "e")]);
assert_disabled(
&keymap,
&[
&present_1,
&present_2,
&keystroke_duplicate_to_1,
&full_duplicate_to_2,
&ignored_1,
&ignored_2,
&ignored_3_with_other_context,
],
);
keymap.clear();
}
#[track_caller]
fn assert_present(keymap: &Keymap, expected_bindings: &[(&Binding, &str)]) {
let keymap_bindings = keymap.bindings();
assert_eq!(
expected_bindings.len(),
keymap_bindings.len(),
"Unexpected keymap bindings {keymap_bindings:?}"
);
for (i, (expected, expected_key)) in expected_bindings.iter().enumerate() {
assert!(
!keymap.binding_disabled(expected),
"{expected:?} should not be disabled as it was added into keymap for element {i}"
);
assert_eq!(
keymap
.bindings_for_action(expected.action().id())
.map(|binding| &binding.keystrokes)
.flatten()
.collect::<Vec<_>>(),
vec![&Keystroke {
ctrl: true,
alt: false,
shift: false,
cmd: false,
function: false,
key: expected_key.to_string()
}],
"{expected:?} should have the expected keystroke with key '{expected_key}' in the keymap for element {i}"
);
let keymap_binding = &keymap_bindings[i];
assert_eq!(
keymap_binding.context_predicate, expected.context_predicate,
"Unexpected context predicate for keymap {i} element: {keymap_binding:?}"
);
assert_eq!(
keymap_binding.keystrokes, expected.keystrokes,
"Unexpected keystrokes for keymap {i} element: {keymap_binding:?}"
);
}
}
#[track_caller]
fn assert_absent(keymap: &Keymap, bindings: &[&Binding]) {
for binding in bindings.iter() {
assert!(
!keymap.binding_disabled(binding),
"{binding:?} should not be disabled in the keymap where was not added"
);
assert_eq!(
keymap.bindings_for_action(binding.action().id()).count(),
0,
"{binding:?} should have no actions in the keymap where was not added"
);
}
}
#[track_caller]
fn assert_disabled(keymap: &Keymap, bindings: &[&Binding]) {
for binding in bindings.iter() {
assert!(
keymap.binding_disabled(binding),
"{binding:?} should be disabled in the keymap"
);
assert_eq!(
keymap.bindings_for_action(binding.action().id()).count(),
0,
"{binding:?} should have no actions in the keymap where it was disabled"
);
}
} }
} }

View file

@ -44,7 +44,7 @@ impl KeymapContext {
} }
} }
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub enum KeymapContextPredicate { pub enum KeymapContextPredicate {
Identifier(String), Identifier(String),
Equal(String, String), Equal(String, String),

View file

@ -3,7 +3,7 @@ use std::fmt::Write;
use anyhow::anyhow; use anyhow::anyhow;
use serde::Deserialize; use serde::Deserialize;
#[derive(Clone, Debug, Eq, PartialEq, Default, Deserialize)] #[derive(Clone, Debug, Eq, PartialEq, Default, Deserialize, Hash)]
pub struct Keystroke { pub struct Keystroke {
pub ctrl: bool, pub ctrl: bool,
pub alt: bool, pub alt: bool,

View file

@ -231,7 +231,7 @@ impl MacForegroundPlatform {
} => { } => {
// TODO // TODO
let keystrokes = keystroke_matcher let keystrokes = keystroke_matcher
.bindings_for_action_type(action.as_any().type_id()) .bindings_for_action(action.id())
.find(|binding| binding.action().eq(action.as_ref())) .find(|binding| binding.action().eq(action.as_ref()))
.map(|binding| binding.keystrokes()); .map(|binding| binding.keystrokes());
let selector = match os_action { let selector = match os_action {

View file

@ -46,7 +46,6 @@ lazy_static.workspace = true
log.workspace = true log.workspace = true
parking_lot.workspace = true parking_lot.workspace = true
postage.workspace = true postage.workspace = true
rand = { workspace = true, optional = true }
regex.workspace = true regex.workspace = true
schemars.workspace = true schemars.workspace = true
serde.workspace = true serde.workspace = true
@ -56,10 +55,12 @@ similar = "1.3"
smallvec.workspace = true smallvec.workspace = true
smol.workspace = true smol.workspace = true
tree-sitter.workspace = true tree-sitter.workspace = true
tree-sitter-rust = { version = "*", optional = true }
tree-sitter-typescript = { version = "*", optional = true }
unicase = "2.6" unicase = "2.6"
rand = { workspace = true, optional = true }
tree-sitter-rust = { workspace = true, optional = true }
tree-sitter-typescript = { workspace = true, optional = true }
[dev-dependencies] [dev-dependencies]
client = { path = "../client", features = ["test-support"] } client = { path = "../client", features = ["test-support"] }
collections = { path = "../collections", features = ["test-support"] } collections = { path = "../collections", features = ["test-support"] }
@ -74,12 +75,13 @@ indoc.workspace = true
rand.workspace = true rand.workspace = true
unindent.workspace = true unindent.workspace = true
tree-sitter-embedded-template = "*" tree-sitter-embedded-template.workspace = true
tree-sitter-html = "*" tree-sitter-html.workspace = true
tree-sitter-javascript = "*" tree-sitter-json.workspace = true
tree-sitter-json = "*" tree-sitter-markdown.workspace = true
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" } tree-sitter-rust.workspace = true
tree-sitter-rust = "*" tree-sitter-python.workspace = true
tree-sitter-python = "*" tree-sitter-typescript.workspace = true
tree-sitter-typescript = "*" tree-sitter-ruby.workspace = true
tree-sitter-ruby = "*" tree-sitter-elixir.workspace = true
tree-sitter-heex.workspace = true

View file

@ -2145,25 +2145,29 @@ impl BufferSnapshot {
pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> { pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
let offset = position.to_offset(self); let offset = position.to_offset(self);
let mut range = 0..self.len();
if let Some(layer_info) = self let mut scope = self.language.clone().map(|language| LanguageScope {
.syntax
.layers_for_range(offset..offset, &self.text)
.filter(|l| l.node().end_byte() > offset)
.last()
{
Some(LanguageScope {
language: layer_info.language.clone(),
override_id: layer_info.override_id(offset, &self.text),
})
} else {
self.language.clone().map(|language| LanguageScope {
language, language,
override_id: None, override_id: None,
}) });
// Use the layer that has the smallest node intersecting the given point.
for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
let mut cursor = layer.node().walk();
while cursor.goto_first_child_for_byte(offset).is_some() {}
let node_range = cursor.node().byte_range();
if node_range.to_inclusive().contains(&offset) && node_range.len() < range.len() {
range = node_range;
scope = Some(LanguageScope {
language: layer.language.clone(),
override_id: layer.override_id(offset, &self.text),
});
} }
} }
scope
}
pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) { pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
let mut start = start.to_offset(self); let mut start = start.to_offset(self);
let mut end = start; let mut end = start;

View file

@ -1533,47 +1533,9 @@ fn test_autoindent_with_injected_languages(cx: &mut AppContext) {
]) ])
}); });
let html_language = Arc::new( let html_language = Arc::new(html_lang());
Language::new(
LanguageConfig {
name: "HTML".into(),
..Default::default()
},
Some(tree_sitter_html::language()),
)
.with_indents_query(
"
(element
(start_tag) @start
(end_tag)? @end) @indent
",
)
.unwrap()
.with_injection_query(
r#"
(script_element
(raw_text) @content
(#set! "language" "javascript"))
"#,
)
.unwrap(),
);
let javascript_language = Arc::new( let javascript_language = Arc::new(javascript_lang());
Language::new(
LanguageConfig {
name: "JavaScript".into(),
..Default::default()
},
Some(tree_sitter_javascript::language()),
)
.with_indents_query(
r#"
(object "}" @end) @indent
"#,
)
.unwrap(),
);
let language_registry = Arc::new(LanguageRegistry::test()); let language_registry = Arc::new(LanguageRegistry::test());
language_registry.add(html_language.clone()); language_registry.add(html_language.clone());
@ -1669,7 +1631,7 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
} }
#[gpui::test] #[gpui::test]
fn test_language_config_at(cx: &mut AppContext) { fn test_language_scope_at(cx: &mut AppContext) {
init_settings(cx, |_| {}); init_settings(cx, |_| {});
cx.add_model(|cx| { cx.add_model(|cx| {
@ -1709,7 +1671,7 @@ fn test_language_config_at(cx: &mut AppContext) {
.collect(), .collect(),
..Default::default() ..Default::default()
}, },
Some(tree_sitter_javascript::language()), Some(tree_sitter_typescript::language_tsx()),
) )
.with_override_query( .with_override_query(
r#" r#"
@ -1756,6 +1718,54 @@ fn test_language_config_at(cx: &mut AppContext) {
}); });
} }
#[gpui::test]
fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
init_settings(cx, |_| {});
cx.add_model(|cx| {
let text = r#"
<ol>
<% people.each do |person| %>
<li>
<%= person.name %>
</li>
<% end %>
</ol>
"#
.unindent();
let language_registry = Arc::new(LanguageRegistry::test());
language_registry.add(Arc::new(ruby_lang()));
language_registry.add(Arc::new(html_lang()));
language_registry.add(Arc::new(erb_lang()));
let mut buffer = Buffer::new(0, text, cx);
buffer.set_language_registry(language_registry.clone());
buffer.set_language(
language_registry
.language_for_name("ERB")
.now_or_never()
.unwrap()
.ok(),
cx,
);
let snapshot = buffer.snapshot();
let html_config = snapshot.language_scope_at(Point::new(2, 4)).unwrap();
assert_eq!(html_config.line_comment_prefix(), None);
assert_eq!(
html_config.block_comment_delimiters(),
Some((&"<!--".into(), &"-->".into()))
);
let ruby_config = snapshot.language_scope_at(Point::new(3, 12)).unwrap();
assert_eq!(ruby_config.line_comment_prefix().unwrap().as_ref(), "# ");
assert_eq!(ruby_config.block_comment_delimiters(), None);
buffer
});
}
#[gpui::test] #[gpui::test]
fn test_serialization(cx: &mut gpui::AppContext) { fn test_serialization(cx: &mut gpui::AppContext) {
let mut now = Instant::now(); let mut now = Instant::now();
@ -2143,6 +2153,7 @@ fn ruby_lang() -> Language {
LanguageConfig { LanguageConfig {
name: "Ruby".into(), name: "Ruby".into(),
path_suffixes: vec!["rb".to_string()], path_suffixes: vec!["rb".to_string()],
line_comment: Some("# ".into()),
..Default::default() ..Default::default()
}, },
Some(tree_sitter_ruby::language()), Some(tree_sitter_ruby::language()),
@ -2158,6 +2169,61 @@ fn ruby_lang() -> Language {
.unwrap() .unwrap()
} }
fn html_lang() -> Language {
Language::new(
LanguageConfig {
name: "HTML".into(),
block_comment: Some(("<!--".into(), "-->".into())),
..Default::default()
},
Some(tree_sitter_html::language()),
)
.with_indents_query(
"
(element
(start_tag) @start
(end_tag)? @end) @indent
",
)
.unwrap()
.with_injection_query(
r#"
(script_element
(raw_text) @content
(#set! "language" "javascript"))
"#,
)
.unwrap()
}
fn erb_lang() -> Language {
Language::new(
LanguageConfig {
name: "ERB".into(),
path_suffixes: vec!["erb".to_string()],
block_comment: Some(("<%#".into(), "%>".into())),
..Default::default()
},
Some(tree_sitter_embedded_template::language()),
)
.with_injection_query(
r#"
(
(code) @content
(#set! "language" "ruby")
(#set! "combined")
)
(
(content) @content
(#set! "language" "html")
(#set! "combined")
)
"#,
)
.unwrap()
}
fn rust_lang() -> Language { fn rust_lang() -> Language {
Language::new( Language::new(
LanguageConfig { LanguageConfig {
@ -2227,7 +2293,7 @@ fn javascript_lang() -> Language {
name: "JavaScript".into(), name: "JavaScript".into(),
..Default::default() ..Default::default()
}, },
Some(tree_sitter_javascript::language()), Some(tree_sitter_typescript::language_tsx()),
) )
.with_brackets_query( .with_brackets_query(
r#" r#"
@ -2236,6 +2302,12 @@ fn javascript_lang() -> Language {
"#, "#,
) )
.unwrap() .unwrap()
.with_indents_query(
r#"
(object "}" @end) @indent
"#,
)
.unwrap()
} }
fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String { fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {

View file

@ -830,6 +830,7 @@ impl LanguageRegistry {
Ok(language) => { Ok(language) => {
let language = Arc::new(language); let language = Arc::new(language);
let mut state = this.state.write(); let mut state = this.state.write();
state.add(language.clone()); state.add(language.clone());
state.mark_language_loaded(id); state.mark_language_loaded(id);
if let Some(mut txs) = state.loading_languages.remove(&id) { if let Some(mut txs) = state.loading_languages.remove(&id) {
@ -1787,7 +1788,7 @@ mod tests {
first_line_pattern: Some(Regex::new(r"\bnode\b").unwrap()), first_line_pattern: Some(Regex::new(r"\bnode\b").unwrap()),
..Default::default() ..Default::default()
}, },
tree_sitter_javascript::language(), tree_sitter_typescript::language_tsx(),
vec![], vec![],
|_| Default::default(), |_| Default::default(),
); );

View file

@ -569,11 +569,19 @@ impl SyntaxSnapshot {
range.end = range.end.saturating_sub(step_start_byte); range.end = range.end.saturating_sub(step_start_byte);
} }
included_ranges = splice_included_ranges( let changed_indices;
(included_ranges, changed_indices) = splice_included_ranges(
old_tree.included_ranges(), old_tree.included_ranges(),
&parent_layer_changed_ranges, &parent_layer_changed_ranges,
&included_ranges, &included_ranges,
); );
insert_newlines_between_ranges(
changed_indices,
&mut included_ranges,
&text,
step_start_byte,
step_start_point,
);
} }
if included_ranges.is_empty() { if included_ranges.is_empty() {
@ -586,7 +594,7 @@ impl SyntaxSnapshot {
} }
log::trace!( log::trace!(
"update layer. language:{}, start:{:?}, ranges:{:?}", "update layer. language:{}, start:{:?}, included_ranges:{:?}",
language.name(), language.name(),
LogAnchorRange(&step.range, text), LogAnchorRange(&step.range, text),
LogIncludedRanges(&included_ranges), LogIncludedRanges(&included_ranges),
@ -608,6 +616,16 @@ impl SyntaxSnapshot {
}), }),
); );
} else { } else {
if matches!(step.mode, ParseMode::Combined { .. }) {
insert_newlines_between_ranges(
0..included_ranges.len(),
&mut included_ranges,
text,
step_start_byte,
step_start_point,
);
}
if included_ranges.is_empty() { if included_ranges.is_empty() {
included_ranges.push(tree_sitter::Range { included_ranges.push(tree_sitter::Range {
start_byte: 0, start_byte: 0,
@ -771,8 +789,10 @@ impl SyntaxSnapshot {
range: Range<T>, range: Range<T>,
buffer: &'a BufferSnapshot, buffer: &'a BufferSnapshot,
) -> impl 'a + Iterator<Item = SyntaxLayerInfo> { ) -> impl 'a + Iterator<Item = SyntaxLayerInfo> {
let start = buffer.anchor_before(range.start.to_offset(buffer)); let start_offset = range.start.to_offset(buffer);
let end = buffer.anchor_after(range.end.to_offset(buffer)); let end_offset = range.end.to_offset(buffer);
let start = buffer.anchor_before(start_offset);
let end = buffer.anchor_after(end_offset);
let mut cursor = self.layers.filter::<_, ()>(move |summary| { let mut cursor = self.layers.filter::<_, ()>(move |summary| {
if summary.max_depth > summary.min_depth { if summary.max_depth > summary.min_depth {
@ -787,20 +807,21 @@ impl SyntaxSnapshot {
cursor.next(buffer); cursor.next(buffer);
iter::from_fn(move || { iter::from_fn(move || {
while let Some(layer) = cursor.item() { while let Some(layer) = cursor.item() {
let mut info = None;
if let SyntaxLayerContent::Parsed { tree, language } = &layer.content { if let SyntaxLayerContent::Parsed { tree, language } = &layer.content {
let info = SyntaxLayerInfo { let layer_start_offset = layer.range.start.to_offset(buffer);
let layer_start_point = layer.range.start.to_point(buffer).to_ts_point();
info = Some(SyntaxLayerInfo {
tree, tree,
language, language,
depth: layer.depth, depth: layer.depth,
offset: ( offset: (layer_start_offset, layer_start_point),
layer.range.start.to_offset(buffer), });
layer.range.start.to_point(buffer).to_ts_point(), }
),
};
cursor.next(buffer);
return Some(info);
} else {
cursor.next(buffer); cursor.next(buffer);
if info.is_some() {
return info;
} }
} }
None None
@ -1272,14 +1293,20 @@ fn get_injections(
} }
} }
/// Update the given list of included `ranges`, removing any ranges that intersect
/// `removed_ranges`, and inserting the given `new_ranges`.
///
/// Returns a new vector of ranges, and the range of the vector that was changed,
/// from the previous `ranges` vector.
pub(crate) fn splice_included_ranges( pub(crate) fn splice_included_ranges(
mut ranges: Vec<tree_sitter::Range>, mut ranges: Vec<tree_sitter::Range>,
removed_ranges: &[Range<usize>], removed_ranges: &[Range<usize>],
new_ranges: &[tree_sitter::Range], new_ranges: &[tree_sitter::Range],
) -> Vec<tree_sitter::Range> { ) -> (Vec<tree_sitter::Range>, Range<usize>) {
let mut removed_ranges = removed_ranges.iter().cloned().peekable(); let mut removed_ranges = removed_ranges.iter().cloned().peekable();
let mut new_ranges = new_ranges.into_iter().cloned().peekable(); let mut new_ranges = new_ranges.into_iter().cloned().peekable();
let mut ranges_ix = 0; let mut ranges_ix = 0;
let mut changed_portion = usize::MAX..0;
loop { loop {
let next_new_range = new_ranges.peek(); let next_new_range = new_ranges.peek();
let next_removed_range = removed_ranges.peek(); let next_removed_range = removed_ranges.peek();
@ -1341,11 +1368,69 @@ pub(crate) fn splice_included_ranges(
} }
} }
changed_portion.start = changed_portion.start.min(start_ix);
changed_portion.end = changed_portion.end.max(if insert.is_some() {
start_ix + 1
} else {
start_ix
});
ranges.splice(start_ix..end_ix, insert); ranges.splice(start_ix..end_ix, insert);
ranges_ix = start_ix; ranges_ix = start_ix;
} }
ranges if changed_portion.end < changed_portion.start {
changed_portion = 0..0;
}
(ranges, changed_portion)
}
/// Ensure there are newline ranges in between content range that appear on
/// different lines. For performance, only iterate through the given range of
/// indices. All of the ranges in the array are relative to a given start byte
/// and point.
fn insert_newlines_between_ranges(
indices: Range<usize>,
ranges: &mut Vec<tree_sitter::Range>,
text: &text::BufferSnapshot,
start_byte: usize,
start_point: Point,
) {
let mut ix = indices.end + 1;
while ix > indices.start {
ix -= 1;
if 0 == ix || ix == ranges.len() {
continue;
}
let range_b = ranges[ix].clone();
let range_a = &mut ranges[ix - 1];
if range_a.end_point.column == 0 {
continue;
}
if range_a.end_point.row < range_b.start_point.row {
let end_point = start_point + Point::from_ts_point(range_a.end_point);
let line_end = Point::new(end_point.row, text.line_len(end_point.row));
if end_point.column as u32 >= line_end.column {
range_a.end_byte += 1;
range_a.end_point.row += 1;
range_a.end_point.column = 0;
} else {
let newline_offset = text.point_to_offset(line_end);
ranges.insert(
ix,
tree_sitter::Range {
start_byte: newline_offset - start_byte,
end_byte: newline_offset - start_byte + 1,
start_point: (line_end - start_point).to_ts_point(),
end_point: ((line_end - start_point) + Point::new(1, 0)).to_ts_point(),
},
)
}
}
}
} }
impl OwnedSyntaxLayerInfo { impl OwnedSyntaxLayerInfo {

View file

@ -11,7 +11,7 @@ use util::test::marked_text_ranges;
fn test_splice_included_ranges() { fn test_splice_included_ranges() {
let ranges = vec![ts_range(20..30), ts_range(50..60), ts_range(80..90)]; let ranges = vec![ts_range(20..30), ts_range(50..60), ts_range(80..90)];
let new_ranges = splice_included_ranges( let (new_ranges, change) = splice_included_ranges(
ranges.clone(), ranges.clone(),
&[54..56, 58..68], &[54..56, 58..68],
&[ts_range(50..54), ts_range(59..67)], &[ts_range(50..54), ts_range(59..67)],
@ -25,14 +25,16 @@ fn test_splice_included_ranges() {
ts_range(80..90), ts_range(80..90),
] ]
); );
assert_eq!(change, 1..3);
let new_ranges = splice_included_ranges(ranges.clone(), &[70..71, 91..100], &[]); let (new_ranges, change) = splice_included_ranges(ranges.clone(), &[70..71, 91..100], &[]);
assert_eq!( assert_eq!(
new_ranges, new_ranges,
&[ts_range(20..30), ts_range(50..60), ts_range(80..90)] &[ts_range(20..30), ts_range(50..60), ts_range(80..90)]
); );
assert_eq!(change, 2..3);
let new_ranges = let (new_ranges, change) =
splice_included_ranges(ranges.clone(), &[], &[ts_range(0..2), ts_range(70..75)]); splice_included_ranges(ranges.clone(), &[], &[ts_range(0..2), ts_range(70..75)]);
assert_eq!( assert_eq!(
new_ranges, new_ranges,
@ -44,16 +46,21 @@ fn test_splice_included_ranges() {
ts_range(80..90) ts_range(80..90)
] ]
); );
assert_eq!(change, 0..4);
let new_ranges = splice_included_ranges(ranges.clone(), &[30..50], &[ts_range(25..55)]); let (new_ranges, change) =
splice_included_ranges(ranges.clone(), &[30..50], &[ts_range(25..55)]);
assert_eq!(new_ranges, &[ts_range(25..55), ts_range(80..90)]); assert_eq!(new_ranges, &[ts_range(25..55), ts_range(80..90)]);
assert_eq!(change, 0..1);
// does not create overlapping ranges // does not create overlapping ranges
let new_ranges = splice_included_ranges(ranges.clone(), &[0..18], &[ts_range(20..32)]); let (new_ranges, change) =
splice_included_ranges(ranges.clone(), &[0..18], &[ts_range(20..32)]);
assert_eq!( assert_eq!(
new_ranges, new_ranges,
&[ts_range(20..32), ts_range(50..60), ts_range(80..90)] &[ts_range(20..32), ts_range(50..60), ts_range(80..90)]
); );
assert_eq!(change, 0..1);
fn ts_range(range: Range<usize>) -> tree_sitter::Range { fn ts_range(range: Range<usize>) -> tree_sitter::Range {
tree_sitter::Range { tree_sitter::Range {
@ -511,7 +518,7 @@ fn test_removing_injection_by_replacing_across_boundary() {
} }
#[gpui::test] #[gpui::test]
fn test_combined_injections() { fn test_combined_injections_simple() {
let (buffer, syntax_map) = test_edit_sequence( let (buffer, syntax_map) = test_edit_sequence(
"ERB", "ERB",
&[ &[
@ -653,33 +660,78 @@ fn test_combined_injections_editing_after_last_injection() {
#[gpui::test] #[gpui::test]
fn test_combined_injections_inside_injections() { fn test_combined_injections_inside_injections() {
let (_buffer, _syntax_map) = test_edit_sequence( let (buffer, syntax_map) = test_edit_sequence(
"Markdown", "Markdown",
&[ &[
r#" r#"
here is some ERB code: here is
some
ERB code:
```erb ```erb
<ul> <ul>
<% people.each do |person| %> <% people.each do |person| %>
<li><%= person.name %></li> <li><%= person.name %></li>
<li><%= person.age %></li>
<% end %> <% end %>
</ul> </ul>
``` ```
"#, "#,
r#" r#"
here is some ERB code: here is
some
ERB code:
```erb ```erb
<ul> <ul>
<% people«2».each do |person| %> <% people«2».each do |person| %>
<li><%= person.name %></li> <li><%= person.name %></li>
<li><%= person.age %></li>
<% end %>
</ul>
```
"#,
// Inserting a comment character inside one code directive
// does not cause the other code directive to become a comment,
// because newlines are included in between each injection range.
r#"
here is
some
ERB code:
```erb
<ul>
<% people2.each do |person| %>
<li><%= «# »person.name %></li>
<li><%= person.age %></li>
<% end %> <% end %>
</ul> </ul>
``` ```
"#, "#,
], ],
); );
// Check that the code directive below the ruby comment is
// not parsed as a comment.
assert_capture_ranges(
&syntax_map,
&buffer,
&["method"],
"
here is
some
ERB code:
```erb
<ul>
<% people2.«each» do |person| %>
<li><%= # person.name %></li>
<li><%= person.«age» %></li>
<% end %>
</ul>
```
",
);
} }
#[gpui::test] #[gpui::test]
@ -711,11 +763,7 @@ fn test_empty_combined_injections_inside_injections() {
} }
#[gpui::test(iterations = 50)] #[gpui::test(iterations = 50)]
fn test_random_syntax_map_edits(mut rng: StdRng) { fn test_random_syntax_map_edits_rust_macros(rng: StdRng) {
let operations = env::var("OPERATIONS")
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
.unwrap_or(10);
let text = r#" let text = r#"
fn test_something() { fn test_something() {
let vec = vec![5, 1, 3, 8]; let vec = vec![5, 1, 3, 8];
@ -736,68 +784,12 @@ fn test_random_syntax_map_edits(mut rng: StdRng) {
let registry = Arc::new(LanguageRegistry::test()); let registry = Arc::new(LanguageRegistry::test());
let language = Arc::new(rust_lang()); let language = Arc::new(rust_lang());
registry.add(language.clone()); registry.add(language.clone());
let mut buffer = Buffer::new(0, 0, text);
let mut syntax_map = SyntaxMap::new(); test_random_edits(text, registry, language, rng);
syntax_map.set_language_registry(registry.clone());
syntax_map.reparse(language.clone(), &buffer);
let mut reference_syntax_map = SyntaxMap::new();
reference_syntax_map.set_language_registry(registry.clone());
log::info!("initial text:\n{}", buffer.text());
for _ in 0..operations {
let prev_buffer = buffer.snapshot();
let prev_syntax_map = syntax_map.snapshot();
buffer.randomly_edit(&mut rng, 3);
log::info!("text:\n{}", buffer.text());
syntax_map.interpolate(&buffer);
check_interpolation(&prev_syntax_map, &syntax_map, &prev_buffer, &buffer);
syntax_map.reparse(language.clone(), &buffer);
reference_syntax_map.clear();
reference_syntax_map.reparse(language.clone(), &buffer);
}
for i in 0..operations {
let i = operations - i - 1;
buffer.undo();
log::info!("undoing operation {}", i);
log::info!("text:\n{}", buffer.text());
syntax_map.interpolate(&buffer);
syntax_map.reparse(language.clone(), &buffer);
reference_syntax_map.clear();
reference_syntax_map.reparse(language.clone(), &buffer);
assert_eq!(
syntax_map.layers(&buffer).len(),
reference_syntax_map.layers(&buffer).len(),
"wrong number of layers after undoing edit {i}"
);
}
let layers = syntax_map.layers(&buffer);
let reference_layers = reference_syntax_map.layers(&buffer);
for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers.into_iter()) {
assert_eq!(
edited_layer.node().to_sexp(),
reference_layer.node().to_sexp()
);
assert_eq!(edited_layer.node().range(), reference_layer.node().range());
}
} }
#[gpui::test(iterations = 50)] #[gpui::test(iterations = 50)]
fn test_random_syntax_map_edits_with_combined_injections(mut rng: StdRng) { fn test_random_syntax_map_edits_with_erb(rng: StdRng) {
let operations = env::var("OPERATIONS")
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
.unwrap_or(10);
let text = r#" let text = r#"
<div id="main"> <div id="main">
<% if one?(:two) %> <% if one?(:two) %>
@ -814,13 +806,60 @@ fn test_random_syntax_map_edits_with_combined_injections(mut rng: StdRng) {
</div> </div>
"# "#
.unindent() .unindent()
.repeat(8); .repeat(5);
let registry = Arc::new(LanguageRegistry::test()); let registry = Arc::new(LanguageRegistry::test());
let language = Arc::new(erb_lang()); let language = Arc::new(erb_lang());
registry.add(language.clone()); registry.add(language.clone());
registry.add(Arc::new(ruby_lang())); registry.add(Arc::new(ruby_lang()));
registry.add(Arc::new(html_lang())); registry.add(Arc::new(html_lang()));
test_random_edits(text, registry, language, rng);
}
#[gpui::test(iterations = 50)]
fn test_random_syntax_map_edits_with_heex(rng: StdRng) {
let text = r#"
defmodule TheModule do
def the_method(assigns) do
~H"""
<%= if @empty do %>
<div class="h-4"></div>
<% else %>
<div class="max-w-2xl w-full animate-pulse">
<div class="flex-1 space-y-4">
<div class={[@bg_class, "h-4 rounded-lg w-3/4"]}></div>
<div class={[@bg_class, "h-4 rounded-lg"]}></div>
<div class={[@bg_class, "h-4 rounded-lg w-5/6"]}></div>
</div>
</div>
<% end %>
"""
end
end
"#
.unindent()
.repeat(3);
let registry = Arc::new(LanguageRegistry::test());
let language = Arc::new(elixir_lang());
registry.add(language.clone());
registry.add(Arc::new(heex_lang()));
registry.add(Arc::new(html_lang()));
test_random_edits(text, registry, language, rng);
}
fn test_random_edits(
text: String,
registry: Arc<LanguageRegistry>,
language: Arc<Language>,
mut rng: StdRng,
) {
let operations = env::var("OPERATIONS")
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
.unwrap_or(10);
let mut buffer = Buffer::new(0, 0, text); let mut buffer = Buffer::new(0, 0, text);
let mut syntax_map = SyntaxMap::new(); let mut syntax_map = SyntaxMap::new();
@ -984,11 +1023,14 @@ fn check_interpolation(
fn test_edit_sequence(language_name: &str, steps: &[&str]) -> (Buffer, SyntaxMap) { fn test_edit_sequence(language_name: &str, steps: &[&str]) -> (Buffer, SyntaxMap) {
let registry = Arc::new(LanguageRegistry::test()); let registry = Arc::new(LanguageRegistry::test());
registry.add(Arc::new(elixir_lang()));
registry.add(Arc::new(heex_lang()));
registry.add(Arc::new(rust_lang())); registry.add(Arc::new(rust_lang()));
registry.add(Arc::new(ruby_lang())); registry.add(Arc::new(ruby_lang()));
registry.add(Arc::new(html_lang())); registry.add(Arc::new(html_lang()));
registry.add(Arc::new(erb_lang())); registry.add(Arc::new(erb_lang()));
registry.add(Arc::new(markdown_lang())); registry.add(Arc::new(markdown_lang()));
let language = registry let language = registry
.language_for_name(language_name) .language_for_name(language_name)
.now_or_never() .now_or_never()
@ -1074,6 +1116,7 @@ fn ruby_lang() -> Language {
r#" r#"
["if" "do" "else" "end"] @keyword ["if" "do" "else" "end"] @keyword
(instance_variable) @ivar (instance_variable) @ivar
(call method: (identifier) @method)
"#, "#,
) )
.unwrap() .unwrap()
@ -1158,6 +1201,52 @@ fn markdown_lang() -> Language {
.unwrap() .unwrap()
} }
fn elixir_lang() -> Language {
Language::new(
LanguageConfig {
name: "Elixir".into(),
path_suffixes: vec!["ex".into()],
..Default::default()
},
Some(tree_sitter_elixir::language()),
)
.with_highlights_query(
r#"
"#,
)
.unwrap()
}
fn heex_lang() -> Language {
Language::new(
LanguageConfig {
name: "HEEx".into(),
path_suffixes: vec!["heex".into()],
..Default::default()
},
Some(tree_sitter_heex::language()),
)
.with_injection_query(
r#"
(
(directive
[
(partial_expression_value)
(expression_value)
(ending_expression_value)
] @content)
(#set! language "elixir")
(#set! combined)
)
((expression (expression_value) @content)
(#set! language "elixir"))
"#,
)
.unwrap()
}
fn range_for_text(buffer: &Buffer, text: &str) -> Range<usize> { fn range_for_text(buffer: &Buffer, text: &str) -> Range<usize> {
let start = buffer.as_rope().to_string().find(text).unwrap(); let start = buffer.as_rope().to_string().find(text).unwrap();
start..start + text.len() start..start + text.len()

View file

@ -93,7 +93,7 @@ impl PickerDelegate for LanguageSelectorDelegate {
self.matches.len() self.matches.len()
} }
fn confirm(&mut self, cx: &mut ViewContext<Picker<Self>>) { fn confirm(&mut self, _: bool, cx: &mut ViewContext<Picker<Self>>) {
if let Some(mat) = self.matches.get(self.selected_index) { if let Some(mat) = self.matches.get(self.selected_index) {
let language_name = &self.candidates[mat.candidate_id].string; let language_name = &self.candidates[mat.candidate_id].string;
let language = self.language_registry.language_for_name(language_name); let language = self.language_registry.language_for_name(language_name);

View file

@ -467,8 +467,13 @@ impl Item for LspLogView {
impl SearchableItem for LspLogView { impl SearchableItem for LspLogView {
type Match = <Editor as SearchableItem>::Match; type Match = <Editor as SearchableItem>::Match;
fn to_search_event(event: &Self::Event) -> Option<workspace::searchable::SearchEvent> { fn to_search_event(
Editor::to_search_event(event) &mut self,
event: &Self::Event,
cx: &mut ViewContext<Self>,
) -> Option<workspace::searchable::SearchEvent> {
self.editor
.update(cx, |editor, cx| editor.to_search_event(event, cx))
} }
fn clear_matches(&mut self, cx: &mut ViewContext<Self>) { fn clear_matches(&mut self, cx: &mut ViewContext<Self>) {
@ -494,6 +499,11 @@ impl SearchableItem for LspLogView {
.update(cx, |e, cx| e.activate_match(index, matches, cx)) .update(cx, |e, cx| e.activate_match(index, matches, cx))
} }
fn select_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
self.editor
.update(cx, |e, cx| e.select_matches(matches, cx))
}
fn find_matches( fn find_matches(
&mut self, &mut self,
query: project::search::SearchQuery, query: project::search::SearchQuery,

View file

@ -151,16 +151,17 @@ impl LanguageServer {
let stdin = server.stdin.take().unwrap(); let stdin = server.stdin.take().unwrap();
let stout = server.stdout.take().unwrap(); let stout = server.stdout.take().unwrap();
let mut server = Self::new_internal( let mut server = Self::new_internal(
server_id, server_id.clone(),
stdin, stdin,
stout, stout,
Some(server), Some(server),
root_path, root_path,
code_action_kinds, code_action_kinds,
cx, cx,
|notification| { move |notification| {
log::info!( log::info!(
"unhandled notification {}:\n{}", "{} unhandled notification {}:\n{}",
server_id,
notification.method, notification.method,
serde_json::to_string_pretty( serde_json::to_string_pretty(
&notification &notification

View file

@ -3,6 +3,7 @@ gpui::actions!(
[ [
Cancel, Cancel,
Confirm, Confirm,
SecondaryConfirm,
SelectPrev, SelectPrev,
SelectNext, SelectNext,
SelectFirst, SelectFirst,

View file

@ -6,13 +6,13 @@ use futures::{future::Shared, FutureExt};
use gpui::{executor::Background, Task}; use gpui::{executor::Background, Task};
use serde::Deserialize; use serde::Deserialize;
use smol::{fs, io::BufReader, process::Command}; use smol::{fs, io::BufReader, process::Command};
use std::process::Output; use std::process::{Output, Stdio};
use std::{ use std::{
env::consts, env::consts,
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::{Arc, OnceLock}, sync::{Arc, OnceLock},
}; };
use util::{http::HttpClient, ResultExt}; use util::http::HttpClient;
const VERSION: &str = "v18.15.0"; const VERSION: &str = "v18.15.0";
@ -84,9 +84,8 @@ impl NodeRuntime {
}; };
let installation_path = self.install_if_needed().await?; let installation_path = self.install_if_needed().await?;
let mut output = attempt(installation_path).await; let mut output = attempt(installation_path.clone()).await;
if output.is_err() { if output.is_err() {
let installation_path = self.reinstall().await?;
output = attempt(installation_path).await; output = attempt(installation_path).await;
if output.is_err() { if output.is_err() {
return Err(anyhow!( return Err(anyhow!(
@ -158,29 +157,6 @@ impl NodeRuntime {
Ok(()) Ok(())
} }
async fn reinstall(&self) -> Result<PathBuf> {
log::info!("beginnning to reinstall Node runtime");
let mut installation_path = self.installation_path.lock().await;
if let Some(task) = installation_path.as_ref().cloned() {
if let Ok(installation_path) = task.await {
smol::fs::remove_dir_all(&installation_path)
.await
.context("node dir removal")
.log_err();
}
}
let http = self.http.clone();
let task = self
.background
.spawn(async move { Self::install(http).await.map_err(Arc::new) })
.shared();
*installation_path = Some(task.clone());
task.await.map_err(|e| anyhow!("{}", e))
}
async fn install_if_needed(&self) -> Result<PathBuf> { async fn install_if_needed(&self) -> Result<PathBuf> {
let task = self let task = self
.installation_path .installation_path
@ -209,8 +185,19 @@ impl NodeRuntime {
let node_containing_dir = util::paths::SUPPORT_DIR.join("node"); let node_containing_dir = util::paths::SUPPORT_DIR.join("node");
let node_dir = node_containing_dir.join(folder_name); let node_dir = node_containing_dir.join(folder_name);
let node_binary = node_dir.join("bin/node"); let node_binary = node_dir.join("bin/node");
let npm_file = node_dir.join("bin/npm");
if fs::metadata(&node_binary).await.is_err() { let result = Command::new(&node_binary)
.arg(npm_file)
.arg("--version")
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()
.await;
let valid = matches!(result, Ok(status) if status.success());
if !valid {
_ = fs::remove_dir_all(&node_containing_dir).await; _ = fs::remove_dir_all(&node_containing_dir).await;
fs::create_dir(&node_containing_dir) fs::create_dir(&node_containing_dir)
.await .await

View file

@ -177,7 +177,7 @@ impl PickerDelegate for OutlineViewDelegate {
Task::ready(()) Task::ready(())
} }
fn confirm(&mut self, cx: &mut ViewContext<OutlineView>) { fn confirm(&mut self, _: bool, cx: &mut ViewContext<OutlineView>) {
self.prev_scroll_position.take(); self.prev_scroll_position.take();
self.active_editor.update(cx, |active_editor, cx| { self.active_editor.update(cx, |active_editor, cx| {
if let Some(rows) = active_editor.highlighted_rows() { if let Some(rows) = active_editor.highlighted_rows() {

View file

@ -7,7 +7,7 @@ use gpui::{
AnyElement, AnyViewHandle, AppContext, Axis, Entity, MouseState, Task, View, ViewContext, AnyElement, AnyViewHandle, AppContext, Axis, Entity, MouseState, Task, View, ViewContext,
ViewHandle, ViewHandle,
}; };
use menu::{Cancel, Confirm, SelectFirst, SelectLast, SelectNext, SelectPrev}; use menu::{Cancel, Confirm, SecondaryConfirm, SelectFirst, SelectLast, SelectNext, SelectPrev};
use parking_lot::Mutex; use parking_lot::Mutex;
use std::{cmp, sync::Arc}; use std::{cmp, sync::Arc};
use util::ResultExt; use util::ResultExt;
@ -34,7 +34,7 @@ pub trait PickerDelegate: Sized + 'static {
fn selected_index(&self) -> usize; fn selected_index(&self) -> usize;
fn set_selected_index(&mut self, ix: usize, cx: &mut ViewContext<Picker<Self>>); fn set_selected_index(&mut self, ix: usize, cx: &mut ViewContext<Picker<Self>>);
fn update_matches(&mut self, query: String, cx: &mut ViewContext<Picker<Self>>) -> Task<()>; fn update_matches(&mut self, query: String, cx: &mut ViewContext<Picker<Self>>) -> Task<()>;
fn confirm(&mut self, cx: &mut ViewContext<Picker<Self>>); fn confirm(&mut self, secondary: bool, cx: &mut ViewContext<Picker<Self>>);
fn dismissed(&mut self, cx: &mut ViewContext<Picker<Self>>); fn dismissed(&mut self, cx: &mut ViewContext<Picker<Self>>);
fn render_match( fn render_match(
&self, &self,
@ -118,8 +118,8 @@ impl<D: PickerDelegate> View for Picker<D> {
// Capture mouse events // Capture mouse events
.on_down(MouseButton::Left, |_, _, _| {}) .on_down(MouseButton::Left, |_, _, _| {})
.on_up(MouseButton::Left, |_, _, _| {}) .on_up(MouseButton::Left, |_, _, _| {})
.on_click(MouseButton::Left, move |_, picker, cx| { .on_click(MouseButton::Left, move |click, picker, cx| {
picker.select_index(ix, cx); picker.select_index(ix, click.cmd, cx);
}) })
.with_cursor_style(CursorStyle::PointingHand) .with_cursor_style(CursorStyle::PointingHand)
.into_any() .into_any()
@ -175,6 +175,7 @@ impl<D: PickerDelegate> Picker<D> {
cx.add_action(Self::select_next); cx.add_action(Self::select_next);
cx.add_action(Self::select_prev); cx.add_action(Self::select_prev);
cx.add_action(Self::confirm); cx.add_action(Self::confirm);
cx.add_action(Self::secondary_confirm);
cx.add_action(Self::cancel); cx.add_action(Self::cancel);
} }
@ -288,11 +289,11 @@ impl<D: PickerDelegate> Picker<D> {
cx.notify(); cx.notify();
} }
pub fn select_index(&mut self, index: usize, cx: &mut ViewContext<Self>) { pub fn select_index(&mut self, index: usize, cmd: bool, cx: &mut ViewContext<Self>) {
if self.delegate.match_count() > 0 { if self.delegate.match_count() > 0 {
self.confirmed = true; self.confirmed = true;
self.delegate.set_selected_index(index, cx); self.delegate.set_selected_index(index, cx);
self.delegate.confirm(cx); self.delegate.confirm(cmd, cx);
} }
} }
@ -330,7 +331,12 @@ impl<D: PickerDelegate> Picker<D> {
pub fn confirm(&mut self, _: &Confirm, cx: &mut ViewContext<Self>) { pub fn confirm(&mut self, _: &Confirm, cx: &mut ViewContext<Self>) {
self.confirmed = true; self.confirmed = true;
self.delegate.confirm(cx); self.delegate.confirm(false, cx);
}
pub fn secondary_confirm(&mut self, _: &SecondaryConfirm, cx: &mut ViewContext<Self>) {
self.confirmed = true;
self.delegate.confirm(true, cx);
} }
fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) { fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) {

View file

@ -2709,7 +2709,6 @@ impl Project {
Some(language_server) => language_server, Some(language_server) => language_server,
None => return Ok(None), None => return Ok(None),
}; };
let this = match this.upgrade(cx) { let this = match this.upgrade(cx) {
Some(this) => this, Some(this) => this,
None => return Err(anyhow!("failed to upgrade project handle")), None => return Err(anyhow!("failed to upgrade project handle")),
@ -3045,6 +3044,8 @@ impl Project {
) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> { ) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
let key = (worktree_id, adapter_name); let key = (worktree_id, adapter_name);
if let Some(server_id) = self.language_server_ids.remove(&key) { if let Some(server_id) = self.language_server_ids.remove(&key) {
log::info!("stopping language server {}", key.1 .0);
// Remove other entries for this language server as well // Remove other entries for this language server as well
let mut orphaned_worktrees = vec![worktree_id]; let mut orphaned_worktrees = vec![worktree_id];
let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>(); let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();

View file

@ -397,6 +397,7 @@ impl Worktree {
})) }))
} }
// abcdefghi
pub fn remote( pub fn remote(
project_remote_id: u64, project_remote_id: u64,
replica_id: ReplicaId, replica_id: ReplicaId,
@ -2022,6 +2023,9 @@ impl LocalSnapshot {
) -> Vec<Arc<Path>> { ) -> Vec<Arc<Path>> {
let mut changes = vec![]; let mut changes = vec![];
let mut edits = vec![]; let mut edits = vec![];
let statuses = repo_ptr.statuses();
for mut entry in self for mut entry in self
.descendent_entries(false, false, &work_directory.0) .descendent_entries(false, false, &work_directory.0)
.cloned() .cloned()
@ -2029,10 +2033,8 @@ impl LocalSnapshot {
let Ok(repo_path) = entry.path.strip_prefix(&work_directory.0) else { let Ok(repo_path) = entry.path.strip_prefix(&work_directory.0) else {
continue; continue;
}; };
let git_file_status = repo_ptr let repo_path = RepoPath(repo_path.to_path_buf());
.status(&RepoPath(repo_path.into())) let git_file_status = statuses.as_ref().and_then(|s| s.get(&repo_path).copied());
.log_err()
.flatten();
if entry.git_status != git_file_status { if entry.git_status != git_file_status {
entry.git_status = git_file_status; entry.git_status = git_file_status;
changes.push(entry.path.clone()); changes.push(entry.path.clone());

View file

@ -159,6 +159,9 @@ pub enum Event {
entry_id: ProjectEntryId, entry_id: ProjectEntryId,
focus_opened_item: bool, focus_opened_item: bool,
}, },
SplitEntry {
entry_id: ProjectEntryId,
},
DockPositionChanged, DockPositionChanged,
Focus, Focus,
} }
@ -290,6 +293,21 @@ impl ProjectPanel {
} }
} }
} }
&Event::SplitEntry { entry_id } => {
if let Some(worktree) = project.read(cx).worktree_for_entry(entry_id, cx) {
if let Some(entry) = worktree.read(cx).entry_for_id(entry_id) {
workspace
.split_path(
ProjectPath {
worktree_id: worktree.read(cx).id(),
path: entry.path.clone(),
},
cx,
)
.detach_and_log_err(cx);
}
}
}
_ => {} _ => {}
} }
}) })
@ -620,6 +638,10 @@ impl ProjectPanel {
}); });
} }
fn split_entry(&mut self, entry_id: ProjectEntryId, cx: &mut ViewContext<Self>) {
cx.emit(Event::SplitEntry { entry_id });
}
fn new_file(&mut self, _: &NewFile, cx: &mut ViewContext<Self>) { fn new_file(&mut self, _: &NewFile, cx: &mut ViewContext<Self>) {
self.add_entry(false, cx) self.add_entry(false, cx)
} }
@ -1333,9 +1355,13 @@ impl ProjectPanel {
if kind.is_dir() { if kind.is_dir() {
this.toggle_expanded(entry_id, cx); this.toggle_expanded(entry_id, cx);
} else { } else {
if event.cmd {
this.split_entry(entry_id, cx);
} else if !event.cmd {
this.open_entry(entry_id, event.click_count > 1, cx); this.open_entry(entry_id, event.click_count > 1, cx);
} }
} }
}
}) })
.on_down(MouseButton::Right, move |event, this, cx| { .on_down(MouseButton::Right, move |event, this, cx| {
this.deploy_context_menu(event.position, entry_id, cx); this.deploy_context_menu(event.position, entry_id, cx);

View file

@ -104,7 +104,7 @@ impl PickerDelegate for ProjectSymbolsDelegate {
"Search project symbols...".into() "Search project symbols...".into()
} }
fn confirm(&mut self, cx: &mut ViewContext<ProjectSymbols>) { fn confirm(&mut self, secondary: bool, cx: &mut ViewContext<ProjectSymbols>) {
if let Some(symbol) = self if let Some(symbol) = self
.matches .matches
.get(self.selected_match_index) .get(self.selected_match_index)
@ -122,7 +122,12 @@ impl PickerDelegate for ProjectSymbolsDelegate {
.read(cx) .read(cx)
.clip_point_utf16(symbol.range.start, Bias::Left); .clip_point_utf16(symbol.range.start, Bias::Left);
let editor = workspace.open_project_item::<Editor>(buffer, cx); let editor = if secondary {
workspace.split_project_item::<Editor>(buffer, cx)
} else {
workspace.open_project_item::<Editor>(buffer, cx)
};
editor.update(cx, |editor, cx| { editor.update(cx, |editor, cx| {
editor.change_selections(Some(Autoscroll::center()), cx, |s| { editor.change_selections(Some(Autoscroll::center()), cx, |s| {
s.select_ranges([position..position]) s.select_ranges([position..position])

View file

@ -161,7 +161,7 @@ impl PickerDelegate for RecentProjectsDelegate {
Task::ready(()) Task::ready(())
} }
fn confirm(&mut self, cx: &mut ViewContext<RecentProjects>) { fn confirm(&mut self, _: bool, cx: &mut ViewContext<RecentProjects>) {
if let Some((selected_match, workspace)) = self if let Some((selected_match, workspace)) = self
.matches .matches
.get(self.selected_index()) .get(self.selected_index())

View file

@ -1,6 +1,6 @@
use crate::{ use crate::{
SearchOption, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, ToggleRegex, SearchOption, SelectAllMatches, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive,
ToggleWholeWord, ToggleRegex, ToggleWholeWord,
}; };
use collections::HashMap; use collections::HashMap;
use editor::Editor; use editor::Editor;
@ -39,8 +39,10 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(BufferSearchBar::focus_editor); cx.add_action(BufferSearchBar::focus_editor);
cx.add_action(BufferSearchBar::select_next_match); cx.add_action(BufferSearchBar::select_next_match);
cx.add_action(BufferSearchBar::select_prev_match); cx.add_action(BufferSearchBar::select_prev_match);
cx.add_action(BufferSearchBar::select_all_matches);
cx.add_action(BufferSearchBar::select_next_match_on_pane); cx.add_action(BufferSearchBar::select_next_match_on_pane);
cx.add_action(BufferSearchBar::select_prev_match_on_pane); cx.add_action(BufferSearchBar::select_prev_match_on_pane);
cx.add_action(BufferSearchBar::select_all_matches_on_pane);
cx.add_action(BufferSearchBar::handle_editor_cancel); cx.add_action(BufferSearchBar::handle_editor_cancel);
add_toggle_option_action::<ToggleCaseSensitive>(SearchOption::CaseSensitive, cx); add_toggle_option_action::<ToggleCaseSensitive>(SearchOption::CaseSensitive, cx);
add_toggle_option_action::<ToggleWholeWord>(SearchOption::WholeWord, cx); add_toggle_option_action::<ToggleWholeWord>(SearchOption::WholeWord, cx);
@ -66,7 +68,7 @@ pub struct BufferSearchBar {
active_searchable_item: Option<Box<dyn SearchableItemHandle>>, active_searchable_item: Option<Box<dyn SearchableItemHandle>>,
active_match_index: Option<usize>, active_match_index: Option<usize>,
active_searchable_item_subscription: Option<Subscription>, active_searchable_item_subscription: Option<Subscription>,
seachable_items_with_matches: searchable_items_with_matches:
HashMap<Box<dyn WeakSearchableItemHandle>, Vec<Box<dyn Any + Send>>>, HashMap<Box<dyn WeakSearchableItemHandle>, Vec<Box<dyn Any + Send>>>,
pending_search: Option<Task<()>>, pending_search: Option<Task<()>>,
case_sensitive: bool, case_sensitive: bool,
@ -118,7 +120,7 @@ impl View for BufferSearchBar {
.with_children(self.active_searchable_item.as_ref().and_then( .with_children(self.active_searchable_item.as_ref().and_then(
|searchable_item| { |searchable_item| {
let matches = self let matches = self
.seachable_items_with_matches .searchable_items_with_matches
.get(&searchable_item.downgrade())?; .get(&searchable_item.downgrade())?;
let message = if let Some(match_ix) = self.active_match_index { let message = if let Some(match_ix) = self.active_match_index {
format!("{}/{}", match_ix + 1, matches.len()) format!("{}/{}", match_ix + 1, matches.len())
@ -146,6 +148,7 @@ impl View for BufferSearchBar {
Flex::row() Flex::row()
.with_child(self.render_nav_button("<", Direction::Prev, cx)) .with_child(self.render_nav_button("<", Direction::Prev, cx))
.with_child(self.render_nav_button(">", Direction::Next, cx)) .with_child(self.render_nav_button(">", Direction::Next, cx))
.with_child(self.render_action_button("Select All", cx))
.aligned(), .aligned(),
) )
.with_child( .with_child(
@ -249,7 +252,7 @@ impl BufferSearchBar {
active_searchable_item: None, active_searchable_item: None,
active_searchable_item_subscription: None, active_searchable_item_subscription: None,
active_match_index: None, active_match_index: None,
seachable_items_with_matches: Default::default(), searchable_items_with_matches: Default::default(),
case_sensitive: false, case_sensitive: false,
whole_word: false, whole_word: false,
regex: false, regex: false,
@ -265,7 +268,7 @@ impl BufferSearchBar {
pub fn dismiss(&mut self, _: &Dismiss, cx: &mut ViewContext<Self>) { pub fn dismiss(&mut self, _: &Dismiss, cx: &mut ViewContext<Self>) {
self.dismissed = true; self.dismissed = true;
for searchable_item in self.seachable_items_with_matches.keys() { for searchable_item in self.searchable_items_with_matches.keys() {
if let Some(searchable_item) = if let Some(searchable_item) =
WeakSearchableItemHandle::upgrade(searchable_item.as_ref(), cx) WeakSearchableItemHandle::upgrade(searchable_item.as_ref(), cx)
{ {
@ -401,6 +404,37 @@ impl BufferSearchBar {
.into_any() .into_any()
} }
fn render_action_button(
&self,
icon: &'static str,
cx: &mut ViewContext<Self>,
) -> AnyElement<Self> {
let tooltip = "Select All Matches";
let tooltip_style = theme::current(cx).tooltip.clone();
let action_type_id = 0_usize;
enum ActionButton {}
MouseEventHandler::<ActionButton, _>::new(action_type_id, cx, |state, cx| {
let theme = theme::current(cx);
let style = theme.search.action_button.style_for(state);
Label::new(icon, style.text.clone())
.contained()
.with_style(style.container)
})
.on_click(MouseButton::Left, move |_, this, cx| {
this.select_all_matches(&SelectAllMatches, cx)
})
.with_cursor_style(CursorStyle::PointingHand)
.with_tooltip::<ActionButton>(
action_type_id,
tooltip.to_string(),
Some(Box::new(SelectAllMatches)),
tooltip_style,
cx,
)
.into_any()
}
fn render_close_button( fn render_close_button(
&self, &self,
theme: &theme::Search, theme: &theme::Search,
@ -488,11 +522,25 @@ impl BufferSearchBar {
self.select_match(Direction::Prev, cx); self.select_match(Direction::Prev, cx);
} }
fn select_all_matches(&mut self, _: &SelectAllMatches, cx: &mut ViewContext<Self>) {
if !self.dismissed {
if let Some(searchable_item) = self.active_searchable_item.as_ref() {
if let Some(matches) = self
.searchable_items_with_matches
.get(&searchable_item.downgrade())
{
searchable_item.select_matches(matches, cx);
self.focus_editor(&FocusEditor, cx);
}
}
}
}
pub fn select_match(&mut self, direction: Direction, cx: &mut ViewContext<Self>) { pub fn select_match(&mut self, direction: Direction, cx: &mut ViewContext<Self>) {
if let Some(index) = self.active_match_index { if let Some(index) = self.active_match_index {
if let Some(searchable_item) = self.active_searchable_item.as_ref() { if let Some(searchable_item) = self.active_searchable_item.as_ref() {
if let Some(matches) = self if let Some(matches) = self
.seachable_items_with_matches .searchable_items_with_matches
.get(&searchable_item.downgrade()) .get(&searchable_item.downgrade())
{ {
let new_match_index = let new_match_index =
@ -524,6 +572,16 @@ impl BufferSearchBar {
} }
} }
fn select_all_matches_on_pane(
pane: &mut Pane,
action: &SelectAllMatches,
cx: &mut ViewContext<Pane>,
) {
if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::<BufferSearchBar>() {
search_bar.update(cx, |bar, cx| bar.select_all_matches(action, cx));
}
}
fn on_query_editor_event( fn on_query_editor_event(
&mut self, &mut self,
_: ViewHandle<Editor>, _: ViewHandle<Editor>,
@ -547,7 +605,7 @@ impl BufferSearchBar {
fn clear_matches(&mut self, cx: &mut ViewContext<Self>) { fn clear_matches(&mut self, cx: &mut ViewContext<Self>) {
let mut active_item_matches = None; let mut active_item_matches = None;
for (searchable_item, matches) in self.seachable_items_with_matches.drain() { for (searchable_item, matches) in self.searchable_items_with_matches.drain() {
if let Some(searchable_item) = if let Some(searchable_item) =
WeakSearchableItemHandle::upgrade(searchable_item.as_ref(), cx) WeakSearchableItemHandle::upgrade(searchable_item.as_ref(), cx)
{ {
@ -559,7 +617,7 @@ impl BufferSearchBar {
} }
} }
self.seachable_items_with_matches self.searchable_items_with_matches
.extend(active_item_matches); .extend(active_item_matches);
} }
@ -605,13 +663,13 @@ impl BufferSearchBar {
if let Some(active_searchable_item) = if let Some(active_searchable_item) =
WeakSearchableItemHandle::upgrade(active_searchable_item.as_ref(), cx) WeakSearchableItemHandle::upgrade(active_searchable_item.as_ref(), cx)
{ {
this.seachable_items_with_matches this.searchable_items_with_matches
.insert(active_searchable_item.downgrade(), matches); .insert(active_searchable_item.downgrade(), matches);
this.update_match_index(cx); this.update_match_index(cx);
if !this.dismissed { if !this.dismissed {
let matches = this let matches = this
.seachable_items_with_matches .searchable_items_with_matches
.get(&active_searchable_item.downgrade()) .get(&active_searchable_item.downgrade())
.unwrap(); .unwrap();
active_searchable_item.update_matches(matches, cx); active_searchable_item.update_matches(matches, cx);
@ -637,7 +695,7 @@ impl BufferSearchBar {
.as_ref() .as_ref()
.and_then(|searchable_item| { .and_then(|searchable_item| {
let matches = self let matches = self
.seachable_items_with_matches .searchable_items_with_matches
.get(&searchable_item.downgrade())?; .get(&searchable_item.downgrade())?;
searchable_item.active_match_index(matches, cx) searchable_item.active_match_index(matches, cx)
}); });
@ -966,4 +1024,133 @@ mod tests {
assert_eq!(search_bar.active_match_index, Some(2)); assert_eq!(search_bar.active_match_index, Some(2));
}); });
} }
#[gpui::test]
async fn test_search_select_all_matches(cx: &mut TestAppContext) {
crate::project_search::tests::init_test(cx);
let buffer_text = r#"
A regular expression (shortened as regex or regexp;[1] also referred to as
rational expression[2][3]) is a sequence of characters that specifies a search
pattern in text. Usually such patterns are used by string-searching algorithms
for "find" or "find and replace" operations on strings, or for input validation.
"#
.unindent();
let expected_query_matches_count = buffer_text
.chars()
.filter(|c| c.to_ascii_lowercase() == 'a')
.count();
assert!(
expected_query_matches_count > 1,
"Should pick a query with multiple results"
);
let buffer = cx.add_model(|cx| Buffer::new(0, buffer_text, cx));
let (window_id, _root_view) = cx.add_window(|_| EmptyView);
let editor = cx.add_view(window_id, |cx| Editor::for_buffer(buffer.clone(), None, cx));
let search_bar = cx.add_view(window_id, |cx| {
let mut search_bar = BufferSearchBar::new(cx);
search_bar.set_active_pane_item(Some(&editor), cx);
search_bar.show(false, true, cx);
search_bar
});
search_bar.update(cx, |search_bar, cx| {
search_bar.set_query("a", cx);
});
editor.next_notification(cx).await;
let initial_selections = editor.update(cx, |editor, cx| {
let initial_selections = editor.selections.display_ranges(cx);
assert_eq!(
initial_selections.len(), 1,
"Expected to have only one selection before adding carets to all matches, but got: {initial_selections:?}",
);
initial_selections
});
search_bar.update(cx, |search_bar, _| {
assert_eq!(search_bar.active_match_index, Some(0));
});
search_bar.update(cx, |search_bar, cx| {
search_bar.select_all_matches(&SelectAllMatches, cx);
let all_selections =
editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
assert_eq!(
all_selections.len(),
expected_query_matches_count,
"Should select all `a` characters in the buffer, but got: {all_selections:?}"
);
});
search_bar.update(cx, |search_bar, _| {
assert_eq!(
search_bar.active_match_index,
Some(0),
"Match index should not change after selecting all matches"
);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.select_next_match(&SelectNextMatch, cx);
let all_selections =
editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
assert_eq!(
all_selections.len(),
1,
"On next match, should deselect items and select the next match"
);
assert_ne!(
all_selections, initial_selections,
"Next match should be different from the first selection"
);
});
search_bar.update(cx, |search_bar, _| {
assert_eq!(
search_bar.active_match_index,
Some(1),
"Match index should be updated to the next one"
);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.select_all_matches(&SelectAllMatches, cx);
let all_selections =
editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
assert_eq!(
all_selections.len(),
expected_query_matches_count,
"Should select all `a` characters in the buffer, but got: {all_selections:?}"
);
});
search_bar.update(cx, |search_bar, _| {
assert_eq!(
search_bar.active_match_index,
Some(1),
"Match index should not change after selecting all matches"
);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.select_prev_match(&SelectPrevMatch, cx);
let all_selections =
editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
assert_eq!(
all_selections.len(),
1,
"On previous match, should deselect items and select the previous item"
);
assert_eq!(
all_selections, initial_selections,
"Previous match should be the same as the first selection"
);
});
search_bar.update(cx, |search_bar, _| {
assert_eq!(
search_bar.active_match_index,
Some(0),
"Match index should be updated to the previous one"
);
});
}
} }

View file

@ -17,7 +17,8 @@ actions!(
ToggleCaseSensitive, ToggleCaseSensitive,
ToggleRegex, ToggleRegex,
SelectNextMatch, SelectNextMatch,
SelectPrevMatch SelectPrevMatch,
SelectAllMatches,
] ]
); );

View file

@ -67,11 +67,13 @@ impl EmbeddingProvider for DummyEmbeddings {
} }
} }
const INPUT_LIMIT: usize = 8190;
impl OpenAIEmbeddings { impl OpenAIEmbeddings {
async fn truncate(span: String) -> String { fn truncate(span: String) -> String {
let mut tokens = OPENAI_BPE_TOKENIZER.encode_with_special_tokens(span.as_ref()); let mut tokens = OPENAI_BPE_TOKENIZER.encode_with_special_tokens(span.as_ref());
if tokens.len() > 8190 { if tokens.len() > INPUT_LIMIT {
tokens.truncate(8190); tokens.truncate(INPUT_LIMIT);
let result = OPENAI_BPE_TOKENIZER.decode(tokens.clone()); let result = OPENAI_BPE_TOKENIZER.decode(tokens.clone());
if result.is_ok() { if result.is_ok() {
let transformed = result.unwrap(); let transformed = result.unwrap();
@ -80,7 +82,7 @@ impl OpenAIEmbeddings {
} }
} }
return span.to_string(); span
} }
async fn send_request(&self, api_key: &str, spans: Vec<&str>) -> Result<Response<AsyncBody>> { async fn send_request(&self, api_key: &str, spans: Vec<&str>) -> Result<Response<AsyncBody>> {
@ -142,7 +144,7 @@ impl EmbeddingProvider for OpenAIEmbeddings {
// Don't worry about delaying bad request, as we can assume // Don't worry about delaying bad request, as we can assume
// we haven't been rate limited yet. // we haven't been rate limited yet.
for span in spans.iter_mut() { for span in spans.iter_mut() {
*span = Self::truncate(span.to_string()).await; *span = Self::truncate(span.to_string());
} }
} }
StatusCode::OK => { StatusCode::OK => {

View file

@ -198,7 +198,7 @@ impl TerminalLineHeight {
match self { match self {
TerminalLineHeight::Comfortable => 1.618, TerminalLineHeight::Comfortable => 1.618,
TerminalLineHeight::Standard => 1.3, TerminalLineHeight::Standard => 1.3,
TerminalLineHeight::Custom(line_height) => *line_height, TerminalLineHeight::Custom(line_height) => f32::max(*line_height, 1.),
} }
} }
} }
@ -908,6 +908,21 @@ impl Terminal {
} }
} }
pub fn select_matches(&mut self, matches: Vec<RangeInclusive<Point>>) {
let matches_to_select = self
.matches
.iter()
.filter(|self_match| matches.contains(self_match))
.cloned()
.collect::<Vec<_>>();
for match_to_select in matches_to_select {
self.set_selection(Some((
make_selection(&match_to_select),
*match_to_select.end(),
)));
}
}
fn set_selection(&mut self, selection: Option<(Selection, Point)>) { fn set_selection(&mut self, selection: Option<(Selection, Point)>) {
self.events self.events
.push_back(InternalEvent::SetSelection(selection)); .push_back(InternalEvent::SetSelection(selection));

View file

@ -647,7 +647,11 @@ impl SearchableItem for TerminalView {
} }
/// Convert events raised by this item into search-relevant events (if applicable) /// Convert events raised by this item into search-relevant events (if applicable)
fn to_search_event(event: &Self::Event) -> Option<SearchEvent> { fn to_search_event(
&mut self,
event: &Self::Event,
_: &mut ViewContext<Self>,
) -> Option<SearchEvent> {
match event { match event {
Event::Wakeup => Some(SearchEvent::MatchesInvalidated), Event::Wakeup => Some(SearchEvent::MatchesInvalidated),
Event::SelectionsChanged => Some(SearchEvent::ActiveMatchChanged), Event::SelectionsChanged => Some(SearchEvent::ActiveMatchChanged),
@ -682,6 +686,13 @@ impl SearchableItem for TerminalView {
cx.notify(); cx.notify();
} }
/// Add selections for all matches given.
fn select_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
self.terminal()
.update(cx, |term, _| term.select_matches(matches));
cx.notify();
}
/// Get all of the matches for this query, should be done on the background /// Get all of the matches for this query, should be done on the background
fn find_matches( fn find_matches(
&mut self, &mut self,

View file

@ -350,6 +350,7 @@ pub struct Tab {
pub icon_close_active: Color, pub icon_close_active: Color,
pub icon_dirty: Color, pub icon_dirty: Color,
pub icon_conflict: Color, pub icon_conflict: Color,
pub git: GitProjectStatus,
} }
#[derive(Clone, Deserialize, Default, JsonSchema)] #[derive(Clone, Deserialize, Default, JsonSchema)]
@ -379,6 +380,7 @@ pub struct Search {
pub invalid_include_exclude_editor: ContainerStyle, pub invalid_include_exclude_editor: ContainerStyle,
pub include_exclude_inputs: ContainedText, pub include_exclude_inputs: ContainedText,
pub option_button: Toggleable<Interactive<ContainedText>>, pub option_button: Toggleable<Interactive<ContainedText>>,
pub action_button: Interactive<ContainedText>,
pub match_background: Color, pub match_background: Color,
pub match_index: ContainedText, pub match_index: ContainedText,
pub results_status: TextStyle, pub results_status: TextStyle,
@ -721,12 +723,12 @@ pub struct Scrollbar {
pub thumb: ContainerStyle, pub thumb: ContainerStyle,
pub width: f32, pub width: f32,
pub min_height_factor: f32, pub min_height_factor: f32,
pub git: GitDiffColors, pub git: BufferGitDiffColors,
pub selections: Color, pub selections: Color,
} }
#[derive(Clone, Deserialize, Default, JsonSchema)] #[derive(Clone, Deserialize, Default, JsonSchema)]
pub struct GitDiffColors { pub struct BufferGitDiffColors {
pub inserted: Color, pub inserted: Color,
pub modified: Color, pub modified: Color,
pub deleted: Color, pub deleted: Color,

View file

@ -5,6 +5,7 @@ use parking_lot::Mutex;
use serde::Deserialize; use serde::Deserialize;
use serde_json::Value; use serde_json::Value;
use std::{ use std::{
borrow::Cow,
collections::HashMap, collections::HashMap,
sync::{ sync::{
atomic::{AtomicUsize, Ordering::SeqCst}, atomic::{AtomicUsize, Ordering::SeqCst},
@ -43,7 +44,7 @@ impl ThemeRegistry {
this this
} }
pub fn list(&self, staff: bool) -> impl Iterator<Item = ThemeMeta> + '_ { pub fn list_names(&self, staff: bool) -> impl Iterator<Item = Cow<str>> + '_ {
let mut dirs = self.assets.list("themes/"); let mut dirs = self.assets.list("themes/");
if !staff { if !staff {
@ -53,10 +54,21 @@ impl ThemeRegistry {
.collect() .collect()
} }
dirs.into_iter().filter_map(|path| { fn get_name(path: &str) -> Option<&str> {
let filename = path.strip_prefix("themes/")?; path.strip_prefix("themes/")?.strip_suffix(".json")
let theme_name = filename.strip_suffix(".json")?; }
self.get(theme_name).ok().map(|theme| theme.meta.clone())
dirs.into_iter().filter_map(|path| match path {
Cow::Borrowed(path) => Some(Cow::Borrowed(get_name(path)?)),
Cow::Owned(path) => Some(Cow::Owned(get_name(&path)?.to_string())),
})
}
pub fn list(&self, staff: bool) -> impl Iterator<Item = ThemeMeta> + '_ {
self.list_names(staff).filter_map(|theme_name| {
self.get(theme_name.as_ref())
.ok()
.map(|theme| theme.meta.clone())
}) })
} }

View file

@ -13,6 +13,7 @@ use std::sync::Arc;
use util::ResultExt as _; use util::ResultExt as _;
const MIN_FONT_SIZE: f32 = 6.0; const MIN_FONT_SIZE: f32 = 6.0;
const MIN_LINE_HEIGHT: f32 = 1.0;
#[derive(Clone, JsonSchema)] #[derive(Clone, JsonSchema)]
pub struct ThemeSettings { pub struct ThemeSettings {
@ -20,6 +21,7 @@ pub struct ThemeSettings {
pub buffer_font_features: fonts::Features, pub buffer_font_features: fonts::Features,
pub buffer_font_family: FamilyId, pub buffer_font_family: FamilyId,
pub(crate) buffer_font_size: f32, pub(crate) buffer_font_size: f32,
pub(crate) buffer_line_height: BufferLineHeight,
#[serde(skip)] #[serde(skip)]
pub theme: Arc<Theme>, pub theme: Arc<Theme>,
} }
@ -33,11 +35,32 @@ pub struct ThemeSettingsContent {
#[serde(default)] #[serde(default)]
pub buffer_font_size: Option<f32>, pub buffer_font_size: Option<f32>,
#[serde(default)] #[serde(default)]
pub buffer_line_height: Option<BufferLineHeight>,
#[serde(default)]
pub buffer_font_features: Option<fonts::Features>, pub buffer_font_features: Option<fonts::Features>,
#[serde(default)] #[serde(default)]
pub theme: Option<String>, pub theme: Option<String>,
} }
#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, JsonSchema, Default)]
#[serde(rename_all = "snake_case")]
pub enum BufferLineHeight {
#[default]
Comfortable,
Standard,
Custom(f32),
}
impl BufferLineHeight {
pub fn value(&self) -> f32 {
match self {
BufferLineHeight::Comfortable => 1.618,
BufferLineHeight::Standard => 1.3,
BufferLineHeight::Custom(line_height) => *line_height,
}
}
}
impl ThemeSettings { impl ThemeSettings {
pub fn buffer_font_size(&self, cx: &AppContext) -> f32 { pub fn buffer_font_size(&self, cx: &AppContext) -> f32 {
if cx.has_global::<AdjustedBufferFontSize>() { if cx.has_global::<AdjustedBufferFontSize>() {
@ -47,6 +70,10 @@ impl ThemeSettings {
} }
.max(MIN_FONT_SIZE) .max(MIN_FONT_SIZE)
} }
pub fn line_height(&self) -> f32 {
f32::max(self.buffer_line_height.value(), MIN_LINE_HEIGHT)
}
} }
pub fn adjusted_font_size(size: f32, cx: &AppContext) -> f32 { pub fn adjusted_font_size(size: f32, cx: &AppContext) -> f32 {
@ -106,6 +133,7 @@ impl settings::Setting for ThemeSettings {
buffer_font_family_name: defaults.buffer_font_family.clone().unwrap(), buffer_font_family_name: defaults.buffer_font_family.clone().unwrap(),
buffer_font_features, buffer_font_features,
buffer_font_size: defaults.buffer_font_size.unwrap(), buffer_font_size: defaults.buffer_font_size.unwrap(),
buffer_line_height: defaults.buffer_line_height.unwrap(),
theme: themes.get(defaults.theme.as_ref().unwrap()).unwrap(), theme: themes.get(defaults.theme.as_ref().unwrap()).unwrap(),
}; };
@ -136,6 +164,7 @@ impl settings::Setting for ThemeSettings {
} }
merge(&mut this.buffer_font_size, value.buffer_font_size); merge(&mut this.buffer_font_size, value.buffer_font_size);
merge(&mut this.buffer_line_height, value.buffer_line_height);
} }
Ok(this) Ok(this)
@ -149,8 +178,8 @@ impl settings::Setting for ThemeSettings {
let mut root_schema = generator.root_schema_for::<ThemeSettingsContent>(); let mut root_schema = generator.root_schema_for::<ThemeSettingsContent>();
let theme_names = cx let theme_names = cx
.global::<Arc<ThemeRegistry>>() .global::<Arc<ThemeRegistry>>()
.list(params.staff_mode) .list_names(params.staff_mode)
.map(|theme| Value::String(theme.name.clone())) .map(|theme_name| Value::String(theme_name.to_string()))
.collect(); .collect();
let theme_name_schema = SchemaObject { let theme_name_schema = SchemaObject {

View file

@ -120,7 +120,7 @@ impl PickerDelegate for ThemeSelectorDelegate {
self.matches.len() self.matches.len()
} }
fn confirm(&mut self, cx: &mut ViewContext<ThemeSelector>) { fn confirm(&mut self, _: bool, cx: &mut ViewContext<ThemeSelector>) {
self.selection_completed = true; self.selection_completed = true;
let theme_name = theme::current(cx).meta.name.clone(); let theme_name = theme::current(cx).meta.name.clone();

View file

@ -106,12 +106,14 @@ impl PickerDelegate for BranchListDelegate {
.read_with(&mut cx, |view, cx| { .read_with(&mut cx, |view, cx| {
let delegate = view.delegate(); let delegate = view.delegate();
let project = delegate.workspace.read(cx).project().read(&cx); let project = delegate.workspace.read(cx).project().read(&cx);
let mut cwd =
project let Some(worktree) = project
.visible_worktrees(cx) .visible_worktrees(cx)
.next() .next()
.unwrap() else {
.read(cx) bail!("Cannot update branch list as there are no visible worktrees")
};
let mut cwd = worktree .read(cx)
.abs_path() .abs_path()
.to_path_buf(); .to_path_buf();
cwd.push(".git"); cwd.push(".git");
@ -180,9 +182,11 @@ impl PickerDelegate for BranchListDelegate {
}) })
} }
fn confirm(&mut self, cx: &mut ViewContext<Picker<Self>>) { fn confirm(&mut self, _: bool, cx: &mut ViewContext<Picker<Self>>) {
let current_pick = self.selected_index(); let current_pick = self.selected_index();
let current_pick = self.matches[current_pick].string.clone(); let Some(current_pick) = self.matches.get(current_pick).map(|pick| pick.string.clone()) else {
return;
};
cx.spawn(|picker, mut cx| async move { cx.spawn(|picker, mut cx| async move {
picker picker
.update(&mut cx, |this, cx| { .update(&mut cx, |this, cx| {

View file

@ -120,7 +120,7 @@ impl PickerDelegate for BaseKeymapSelectorDelegate {
}) })
} }
fn confirm(&mut self, cx: &mut ViewContext<BaseKeymapSelector>) { fn confirm(&mut self, _: bool, cx: &mut ViewContext<BaseKeymapSelector>) {
if let Some(selection) = self.matches.get(self.selected_index) { if let Some(selection) = self.matches.get(self.selected_index) {
let base_keymap = BaseKeymap::from_names(&selection.string); let base_keymap = BaseKeymap::from_names(&selection.string);
update_settings_file::<BaseKeymap>(self.fs.clone(), cx, move |setting| { update_settings_file::<BaseKeymap>(self.fs.clone(), cx, move |setting| {

View file

@ -10,6 +10,9 @@ use gpui::{
ViewContext, ViewHandle, WeakViewHandle, WindowContext, ViewContext, ViewHandle, WeakViewHandle, WindowContext,
}; };
use project::{Project, ProjectEntryId, ProjectPath}; use project::{Project, ProjectEntryId, ProjectPath};
use schemars::JsonSchema;
use serde_derive::{Deserialize, Serialize};
use settings::Setting;
use smallvec::SmallVec; use smallvec::SmallVec;
use std::{ use std::{
any::{Any, TypeId}, any::{Any, TypeId},
@ -27,6 +30,49 @@ use std::{
}; };
use theme::Theme; use theme::Theme;
#[derive(Deserialize)]
pub struct ItemSettings {
pub git_status: bool,
pub close_position: ClosePosition,
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "lowercase")]
pub enum ClosePosition {
Left,
#[default]
Right,
}
impl ClosePosition {
pub fn right(&self) -> bool {
match self {
ClosePosition::Left => false,
ClosePosition::Right => true,
}
}
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
pub struct ItemSettingsContent {
git_status: Option<bool>,
close_position: Option<ClosePosition>,
}
impl Setting for ItemSettings {
const KEY: Option<&'static str> = Some("tabs");
type FileContent = ItemSettingsContent;
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &gpui::AppContext,
) -> anyhow::Result<Self> {
Self::load_via_json_merge(default_value, user_values)
}
}
#[derive(Eq, PartialEq, Hash, Debug)] #[derive(Eq, PartialEq, Hash, Debug)]
pub enum ItemEvent { pub enum ItemEvent {
CloseItem, CloseItem,

View file

@ -3,14 +3,16 @@ mod dragged_item_receiver;
use super::{ItemHandle, SplitDirection}; use super::{ItemHandle, SplitDirection};
pub use crate::toolbar::Toolbar; pub use crate::toolbar::Toolbar;
use crate::{ use crate::{
item::WeakItemHandle, notify_of_new_dock, AutosaveSetting, Item, NewCenterTerminal, NewFile, item::{ItemSettings, WeakItemHandle},
NewSearch, ToggleZoom, Workspace, WorkspaceSettings, notify_of_new_dock, AutosaveSetting, Item, NewCenterTerminal, NewFile, NewSearch, ToggleZoom,
Workspace, WorkspaceSettings,
}; };
use anyhow::Result; use anyhow::Result;
use collections::{HashMap, HashSet, VecDeque}; use collections::{HashMap, HashSet, VecDeque};
use context_menu::{ContextMenu, ContextMenuItem}; use context_menu::{ContextMenu, ContextMenuItem};
use drag_and_drop::{DragAndDrop, Draggable}; use drag_and_drop::{DragAndDrop, Draggable};
use dragged_item_receiver::dragged_item_receiver; use dragged_item_receiver::dragged_item_receiver;
use fs::repository::GitFileStatus;
use futures::StreamExt; use futures::StreamExt;
use gpui::{ use gpui::{
actions, actions,
@ -866,6 +868,7 @@ impl Pane {
.paths_by_item .paths_by_item
.get(&item.id()) .get(&item.id())
.and_then(|(_, abs_path)| abs_path.clone()); .and_then(|(_, abs_path)| abs_path.clone());
self.nav_history self.nav_history
.0 .0
.borrow_mut() .borrow_mut()
@ -1157,6 +1160,11 @@ impl Pane {
.zip(self.tab_details(cx)) .zip(self.tab_details(cx))
.enumerate() .enumerate()
{ {
let git_status = item
.project_path(cx)
.and_then(|path| self.project.read(cx).entry_for_path(&path, cx))
.and_then(|entry| entry.git_status());
let detail = if detail == 0 { None } else { Some(detail) }; let detail = if detail == 0 { None } else { Some(detail) };
let tab_active = ix == self.active_item_index; let tab_active = ix == self.active_item_index;
@ -1174,9 +1182,21 @@ impl Pane {
let tab_tooltip_text = let tab_tooltip_text =
item.tab_tooltip_text(cx).map(|text| text.into_owned()); item.tab_tooltip_text(cx).map(|text| text.into_owned());
let mut tab_style = theme
.workspace
.tab_bar
.tab_style(pane_active, tab_active)
.clone();
let should_show_status = settings::get::<ItemSettings>(cx).git_status;
if should_show_status && git_status != None {
tab_style.label.text.color = match git_status.unwrap() {
GitFileStatus::Added => tab_style.git.inserted,
GitFileStatus::Modified => tab_style.git.modified,
GitFileStatus::Conflict => tab_style.git.conflict,
};
}
move |mouse_state, cx| { move |mouse_state, cx| {
let tab_style =
theme.workspace.tab_bar.tab_style(pane_active, tab_active);
let hovered = mouse_state.hovered(); let hovered = mouse_state.hovered();
enum Tab {} enum Tab {}
@ -1188,7 +1208,7 @@ impl Pane {
ix == 0, ix == 0,
detail, detail,
hovered, hovered,
tab_style, &tab_style,
cx, cx,
) )
}) })
@ -1350,8 +1370,7 @@ impl Pane {
container.border.left = false; container.border.left = false;
} }
Flex::row() let buffer_jewel_element = {
.with_child({
let diameter = 7.0; let diameter = 7.0;
let icon_color = if item.has_conflict(cx) { let icon_color = if item.has_conflict(cx) {
Some(tab_style.icon_conflict) Some(tab_style.icon_conflict)
@ -1376,17 +1395,18 @@ impl Pane {
.with_width(diameter) .with_width(diameter)
.with_height(diameter) .with_height(diameter)
.aligned() .aligned()
}) };
.with_child(title.aligned().contained().with_style(ContainerStyle {
let title_element = title.aligned().contained().with_style(ContainerStyle {
margin: Margin { margin: Margin {
left: tab_style.spacing, left: tab_style.spacing,
right: tab_style.spacing, right: tab_style.spacing,
..Default::default() ..Default::default()
}, },
..Default::default() ..Default::default()
})) });
.with_child(
if hovered { let close_element = if hovered {
let item_id = item.id(); let item_id = item.id();
enum TabCloseButton {} enum TabCloseButton {}
let icon = Svg::new("icons/x_mark_8.svg"); let icon = Svg::new("icons/x_mark_8.svg");
@ -1418,8 +1438,21 @@ impl Pane {
Empty::new().constrained() Empty::new().constrained()
} }
.with_width(tab_style.close_icon_width) .with_width(tab_style.close_icon_width)
.aligned(), .aligned();
)
let close_right = settings::get::<ItemSettings>(cx).close_position.right();
if close_right {
Flex::row()
.with_child(buffer_jewel_element)
.with_child(title_element)
.with_child(close_element)
} else {
Flex::row()
.with_child(close_element)
.with_child(title_element)
.with_child(buffer_jewel_element)
}
.contained() .contained()
.with_style(container) .with_style(container)
.constrained() .constrained()

View file

@ -1,6 +1,8 @@
use std::sync::Arc; use std::{cell::RefCell, rc::Rc, sync::Arc};
use crate::{AppState, FollowerStatesByLeader, Pane, Workspace, WorkspaceSettings}; use crate::{
pane_group::element::PaneAxisElement, AppState, FollowerStatesByLeader, Pane, Workspace,
};
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use call::{ActiveCall, ParticipantLocation}; use call::{ActiveCall, ParticipantLocation};
use gpui::{ use gpui::{
@ -13,7 +15,11 @@ use project::Project;
use serde::Deserialize; use serde::Deserialize;
use theme::Theme; use theme::Theme;
#[derive(Clone, Debug, Eq, PartialEq)] const HANDLE_HITBOX_SIZE: f32 = 4.0;
const HORIZONTAL_MIN_SIZE: f32 = 80.;
const VERTICAL_MIN_SIZE: f32 = 100.;
#[derive(Clone, Debug, PartialEq)]
pub struct PaneGroup { pub struct PaneGroup {
pub(crate) root: Member, pub(crate) root: Member,
} }
@ -77,6 +83,7 @@ impl PaneGroup {
) -> AnyElement<Workspace> { ) -> AnyElement<Workspace> {
self.root.render( self.root.render(
project, project,
0,
theme, theme,
follower_states, follower_states,
active_call, active_call,
@ -94,7 +101,7 @@ impl PaneGroup {
} }
} }
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub(crate) enum Member { pub(crate) enum Member {
Axis(PaneAxis), Axis(PaneAxis),
Pane(ViewHandle<Pane>), Pane(ViewHandle<Pane>),
@ -119,7 +126,7 @@ impl Member {
Down | Right => vec![Member::Pane(old_pane), Member::Pane(new_pane)], Down | Right => vec![Member::Pane(old_pane), Member::Pane(new_pane)],
}; };
Member::Axis(PaneAxis { axis, members }) Member::Axis(PaneAxis::new(axis, members))
} }
fn contains(&self, needle: &ViewHandle<Pane>) -> bool { fn contains(&self, needle: &ViewHandle<Pane>) -> bool {
@ -132,6 +139,7 @@ impl Member {
pub fn render( pub fn render(
&self, &self,
project: &ModelHandle<Project>, project: &ModelHandle<Project>,
basis: usize,
theme: &Theme, theme: &Theme,
follower_states: &FollowerStatesByLeader, follower_states: &FollowerStatesByLeader,
active_call: Option<&ModelHandle<ActiveCall>>, active_call: Option<&ModelHandle<ActiveCall>>,
@ -272,6 +280,7 @@ impl Member {
} }
Member::Axis(axis) => axis.render( Member::Axis(axis) => axis.render(
project, project,
basis + 1,
theme, theme,
follower_states, follower_states,
active_call, active_call,
@ -295,13 +304,35 @@ impl Member {
} }
} }
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub(crate) struct PaneAxis { pub(crate) struct PaneAxis {
pub axis: Axis, pub axis: Axis,
pub members: Vec<Member>, pub members: Vec<Member>,
pub flexes: Rc<RefCell<Vec<f32>>>,
} }
impl PaneAxis { impl PaneAxis {
pub fn new(axis: Axis, members: Vec<Member>) -> Self {
let flexes = Rc::new(RefCell::new(vec![1.; members.len()]));
Self {
axis,
members,
flexes,
}
}
pub fn load(axis: Axis, members: Vec<Member>, flexes: Option<Vec<f32>>) -> Self {
let flexes = flexes.unwrap_or_else(|| vec![1.; members.len()]);
debug_assert!(members.len() == flexes.len());
let flexes = Rc::new(RefCell::new(flexes));
Self {
axis,
members,
flexes,
}
}
fn split( fn split(
&mut self, &mut self,
old_pane: &ViewHandle<Pane>, old_pane: &ViewHandle<Pane>,
@ -323,6 +354,7 @@ impl PaneAxis {
} }
self.members.insert(idx, Member::Pane(new_pane.clone())); self.members.insert(idx, Member::Pane(new_pane.clone()));
*self.flexes.borrow_mut() = vec![1.; self.members.len()];
} else { } else {
*member = *member =
Member::new_axis(old_pane.clone(), new_pane.clone(), direction); Member::new_axis(old_pane.clone(), new_pane.clone(), direction);
@ -362,10 +394,13 @@ impl PaneAxis {
if found_pane { if found_pane {
if let Some(idx) = remove_member { if let Some(idx) = remove_member {
self.members.remove(idx); self.members.remove(idx);
*self.flexes.borrow_mut() = vec![1.; self.members.len()];
} }
if self.members.len() == 1 { if self.members.len() == 1 {
Ok(self.members.pop()) let result = self.members.pop();
*self.flexes.borrow_mut() = vec![1.; self.members.len()];
Ok(result)
} else { } else {
Ok(None) Ok(None)
} }
@ -377,6 +412,7 @@ impl PaneAxis {
fn render( fn render(
&self, &self,
project: &ModelHandle<Project>, project: &ModelHandle<Project>,
basis: usize,
theme: &Theme, theme: &Theme,
follower_state: &FollowerStatesByLeader, follower_state: &FollowerStatesByLeader,
active_call: Option<&ModelHandle<ActiveCall>>, active_call: Option<&ModelHandle<ActiveCall>>,
@ -385,16 +421,22 @@ impl PaneAxis {
app_state: &Arc<AppState>, app_state: &Arc<AppState>,
cx: &mut ViewContext<Workspace>, cx: &mut ViewContext<Workspace>,
) -> AnyElement<Workspace> { ) -> AnyElement<Workspace> {
let last_member_ix = self.members.len() - 1; debug_assert!(self.members.len() == self.flexes.borrow().len());
Flex::new(self.axis)
.with_children(self.members.iter().enumerate().map(|(ix, member)| { let mut pane_axis = PaneAxisElement::new(self.axis, basis, self.flexes.clone());
let mut flex = 1.0; let mut active_pane_ix = None;
let mut members = self.members.iter().enumerate().peekable();
while let Some((ix, member)) = members.next() {
let last = members.peek().is_none();
if member.contains(active_pane) { if member.contains(active_pane) {
flex = settings::get::<WorkspaceSettings>(cx).active_pane_magnification; active_pane_ix = Some(ix);
} }
let mut member = member.render( let mut member = member.render(
project, project,
(basis + ix) * 10,
theme, theme,
follower_state, follower_state,
active_call, active_call,
@ -403,22 +445,26 @@ impl PaneAxis {
app_state, app_state,
cx, cx,
); );
if ix < last_member_ix {
if !last {
let mut border = theme.workspace.pane_divider; let mut border = theme.workspace.pane_divider;
border.left = false; border.left = false;
border.right = false; border.right = false;
border.top = false; border.top = false;
border.bottom = false; border.bottom = false;
match self.axis { match self.axis {
Axis::Vertical => border.bottom = true, Axis::Vertical => border.bottom = true,
Axis::Horizontal => border.right = true, Axis::Horizontal => border.right = true,
} }
member = member.contained().with_border(border).into_any(); member = member.contained().with_border(border).into_any();
} }
FlexItem::new(member).flex(flex, true) pane_axis = pane_axis.with_child(member.into_any());
})) }
.into_any() pane_axis.set_active_pane(active_pane_ix);
pane_axis.into_any()
} }
} }
@ -474,3 +520,336 @@ impl SplitDirection {
} }
} }
} }
mod element {
use std::{cell::RefCell, ops::Range, rc::Rc};
use gpui::{
geometry::{
rect::RectF,
vector::{vec2f, Vector2F},
},
json::{self, ToJson},
platform::{CursorStyle, MouseButton},
AnyElement, Axis, CursorRegion, Element, LayoutContext, MouseRegion, RectFExt,
SceneBuilder, SizeConstraint, Vector2FExt, ViewContext,
};
use crate::{
pane_group::{HANDLE_HITBOX_SIZE, HORIZONTAL_MIN_SIZE, VERTICAL_MIN_SIZE},
Workspace, WorkspaceSettings,
};
pub struct PaneAxisElement {
axis: Axis,
basis: usize,
active_pane_ix: Option<usize>,
flexes: Rc<RefCell<Vec<f32>>>,
children: Vec<AnyElement<Workspace>>,
}
impl PaneAxisElement {
pub fn new(axis: Axis, basis: usize, flexes: Rc<RefCell<Vec<f32>>>) -> Self {
Self {
axis,
basis,
flexes,
active_pane_ix: None,
children: Default::default(),
}
}
pub fn set_active_pane(&mut self, active_pane_ix: Option<usize>) {
self.active_pane_ix = active_pane_ix;
}
fn layout_children(
&mut self,
active_pane_magnification: f32,
constraint: SizeConstraint,
remaining_space: &mut f32,
remaining_flex: &mut f32,
cross_axis_max: &mut f32,
view: &mut Workspace,
cx: &mut LayoutContext<Workspace>,
) {
let flexes = self.flexes.borrow();
let cross_axis = self.axis.invert();
for (ix, child) in self.children.iter_mut().enumerate() {
let flex = if active_pane_magnification != 1. {
if let Some(active_pane_ix) = self.active_pane_ix {
if ix == active_pane_ix {
active_pane_magnification
} else {
1.
}
} else {
1.
}
} else {
flexes[ix]
};
let child_size = if *remaining_flex == 0.0 {
*remaining_space
} else {
let space_per_flex = *remaining_space / *remaining_flex;
space_per_flex * flex
};
let child_constraint = match self.axis {
Axis::Horizontal => SizeConstraint::new(
vec2f(child_size, constraint.min.y()),
vec2f(child_size, constraint.max.y()),
),
Axis::Vertical => SizeConstraint::new(
vec2f(constraint.min.x(), child_size),
vec2f(constraint.max.x(), child_size),
),
};
let child_size = child.layout(child_constraint, view, cx);
*remaining_space -= child_size.along(self.axis);
*remaining_flex -= flex;
*cross_axis_max = cross_axis_max.max(child_size.along(cross_axis));
}
}
}
impl Extend<AnyElement<Workspace>> for PaneAxisElement {
fn extend<T: IntoIterator<Item = AnyElement<Workspace>>>(&mut self, children: T) {
self.children.extend(children);
}
}
impl Element<Workspace> for PaneAxisElement {
type LayoutState = f32;
type PaintState = ();
fn layout(
&mut self,
constraint: SizeConstraint,
view: &mut Workspace,
cx: &mut LayoutContext<Workspace>,
) -> (Vector2F, Self::LayoutState) {
debug_assert!(self.children.len() == self.flexes.borrow().len());
let active_pane_magnification =
settings::get::<WorkspaceSettings>(cx).active_pane_magnification;
let mut remaining_flex = 0.;
if active_pane_magnification != 1. {
let active_pane_flex = self
.active_pane_ix
.map(|_| active_pane_magnification)
.unwrap_or(1.);
remaining_flex += self.children.len() as f32 - 1. + active_pane_flex;
} else {
for flex in self.flexes.borrow().iter() {
remaining_flex += flex;
}
}
let mut cross_axis_max: f32 = 0.0;
let mut remaining_space = constraint.max_along(self.axis);
if remaining_space.is_infinite() {
panic!("flex contains flexible children but has an infinite constraint along the flex axis");
}
self.layout_children(
active_pane_magnification,
constraint,
&mut remaining_space,
&mut remaining_flex,
&mut cross_axis_max,
view,
cx,
);
let mut size = match self.axis {
Axis::Horizontal => vec2f(constraint.max.x() - remaining_space, cross_axis_max),
Axis::Vertical => vec2f(cross_axis_max, constraint.max.y() - remaining_space),
};
if constraint.min.x().is_finite() {
size.set_x(size.x().max(constraint.min.x()));
}
if constraint.min.y().is_finite() {
size.set_y(size.y().max(constraint.min.y()));
}
if size.x() > constraint.max.x() {
size.set_x(constraint.max.x());
}
if size.y() > constraint.max.y() {
size.set_y(constraint.max.y());
}
(size, remaining_space)
}
fn paint(
&mut self,
scene: &mut SceneBuilder,
bounds: RectF,
visible_bounds: RectF,
remaining_space: &mut Self::LayoutState,
view: &mut Workspace,
cx: &mut ViewContext<Workspace>,
) -> Self::PaintState {
let can_resize = settings::get::<WorkspaceSettings>(cx).active_pane_magnification == 1.;
let visible_bounds = bounds.intersection(visible_bounds).unwrap_or_default();
let overflowing = *remaining_space < 0.;
if overflowing {
scene.push_layer(Some(visible_bounds));
}
let mut child_origin = bounds.origin();
let mut children_iter = self.children.iter_mut().enumerate().peekable();
while let Some((ix, child)) = children_iter.next() {
let child_start = child_origin.clone();
child.paint(scene, child_origin, visible_bounds, view, cx);
match self.axis {
Axis::Horizontal => child_origin += vec2f(child.size().x(), 0.0),
Axis::Vertical => child_origin += vec2f(0.0, child.size().y()),
}
if let Some(Some((next_ix, next_child))) = can_resize.then(|| children_iter.peek())
{
scene.push_stacking_context(None, None);
let handle_origin = match self.axis {
Axis::Horizontal => child_origin - vec2f(HANDLE_HITBOX_SIZE / 2., 0.0),
Axis::Vertical => child_origin - vec2f(0.0, HANDLE_HITBOX_SIZE / 2.),
};
let handle_bounds = match self.axis {
Axis::Horizontal => RectF::new(
handle_origin,
vec2f(HANDLE_HITBOX_SIZE, visible_bounds.height()),
),
Axis::Vertical => RectF::new(
handle_origin,
vec2f(visible_bounds.width(), HANDLE_HITBOX_SIZE),
),
};
let style = match self.axis {
Axis::Horizontal => CursorStyle::ResizeLeftRight,
Axis::Vertical => CursorStyle::ResizeUpDown,
};
scene.push_cursor_region(CursorRegion {
bounds: handle_bounds,
style,
});
let axis = self.axis;
let child_size = child.size();
let next_child_size = next_child.size();
let drag_bounds = visible_bounds.clone();
let flexes = self.flexes.clone();
let current_flex = flexes.borrow()[ix];
let next_ix = *next_ix;
let next_flex = flexes.borrow()[next_ix];
enum ResizeHandle {}
let mut mouse_region = MouseRegion::new::<ResizeHandle>(
cx.view_id(),
self.basis + ix,
handle_bounds,
);
mouse_region = mouse_region.on_drag(
MouseButton::Left,
move |drag, workspace: &mut Workspace, cx| {
let min_size = match axis {
Axis::Horizontal => HORIZONTAL_MIN_SIZE,
Axis::Vertical => VERTICAL_MIN_SIZE,
};
// Don't allow resizing to less than the minimum size, if elements are already too small
if min_size - 1. > child_size.along(axis)
|| min_size - 1. > next_child_size.along(axis)
{
return;
}
let mut current_target_size = (drag.position - child_start).along(axis);
let proposed_current_pixel_change =
current_target_size - child_size.along(axis);
if proposed_current_pixel_change < 0. {
current_target_size = f32::max(current_target_size, min_size);
} else if proposed_current_pixel_change > 0. {
// TODO: cascade this change to other children if current item is at min size
let next_target_size = f32::max(
next_child_size.along(axis) - proposed_current_pixel_change,
min_size,
);
current_target_size = f32::min(
current_target_size,
child_size.along(axis) + next_child_size.along(axis)
- next_target_size,
);
}
let current_pixel_change = current_target_size - child_size.along(axis);
let flex_change = current_pixel_change / drag_bounds.length_along(axis);
let current_target_flex = current_flex + flex_change;
let next_target_flex = next_flex - flex_change;
let mut borrow = flexes.borrow_mut();
*borrow.get_mut(ix).unwrap() = current_target_flex;
*borrow.get_mut(next_ix).unwrap() = next_target_flex;
workspace.schedule_serialize(cx);
cx.notify();
},
);
scene.push_mouse_region(mouse_region);
scene.pop_stacking_context();
}
}
if overflowing {
scene.pop_layer();
}
}
fn rect_for_text_range(
&self,
range_utf16: Range<usize>,
_: RectF,
_: RectF,
_: &Self::LayoutState,
_: &Self::PaintState,
view: &Workspace,
cx: &ViewContext<Workspace>,
) -> Option<RectF> {
self.children
.iter()
.find_map(|child| child.rect_for_text_range(range_utf16.clone(), view, cx))
}
fn debug(
&self,
bounds: RectF,
_: &Self::LayoutState,
_: &Self::PaintState,
view: &Workspace,
cx: &ViewContext<Workspace>,
) -> json::Value {
serde_json::json!({
"type": "PaneAxis",
"bounds": bounds.to_json(),
"axis": self.axis.to_json(),
"flexes": *self.flexes.borrow(),
"children": self.children.iter().map(|child| child.debug(view, cx)).collect::<Vec<json::Value>>()
})
}
}
}

View file

@ -45,6 +45,7 @@ define_connection! {
// parent_group_id: Option<usize>, // None indicates that this is the root node // parent_group_id: Option<usize>, // None indicates that this is the root node
// position: Optiopn<usize>, // None indicates that this is the root node // position: Optiopn<usize>, // None indicates that this is the root node
// axis: Option<Axis>, // 'Vertical', 'Horizontal' // axis: Option<Axis>, // 'Vertical', 'Horizontal'
// flexes: Option<Vec<f32>>, // A JSON array of floats
// ) // )
// //
// panes( // panes(
@ -168,7 +169,12 @@ define_connection! {
ALTER TABLE workspaces ADD COLUMN left_dock_zoom INTEGER; //bool ALTER TABLE workspaces ADD COLUMN left_dock_zoom INTEGER; //bool
ALTER TABLE workspaces ADD COLUMN right_dock_zoom INTEGER; //bool ALTER TABLE workspaces ADD COLUMN right_dock_zoom INTEGER; //bool
ALTER TABLE workspaces ADD COLUMN bottom_dock_zoom INTEGER; //bool ALTER TABLE workspaces ADD COLUMN bottom_dock_zoom INTEGER; //bool
)]; ),
// Add pane group flex data
sql!(
ALTER TABLE pane_groups ADD COLUMN flexes TEXT;
)
];
} }
impl WorkspaceDb { impl WorkspaceDb {
@ -359,9 +365,15 @@ impl WorkspaceDb {
group_id: Option<GroupId>, group_id: Option<GroupId>,
) -> Result<Vec<SerializedPaneGroup>> { ) -> Result<Vec<SerializedPaneGroup>> {
type GroupKey = (Option<GroupId>, WorkspaceId); type GroupKey = (Option<GroupId>, WorkspaceId);
type GroupOrPane = (Option<GroupId>, Option<Axis>, Option<PaneId>, Option<bool>); type GroupOrPane = (
Option<GroupId>,
Option<Axis>,
Option<PaneId>,
Option<bool>,
Option<String>,
);
self.select_bound::<GroupKey, GroupOrPane>(sql!( self.select_bound::<GroupKey, GroupOrPane>(sql!(
SELECT group_id, axis, pane_id, active SELECT group_id, axis, pane_id, active, flexes
FROM (SELECT FROM (SELECT
group_id, group_id,
axis, axis,
@ -369,7 +381,8 @@ impl WorkspaceDb {
NULL as active, NULL as active,
position, position,
parent_group_id, parent_group_id,
workspace_id workspace_id,
flexes
FROM pane_groups FROM pane_groups
UNION UNION
SELECT SELECT
@ -379,18 +392,24 @@ impl WorkspaceDb {
panes.active as active, panes.active as active,
position, position,
parent_group_id, parent_group_id,
panes.workspace_id as workspace_id panes.workspace_id as workspace_id,
NULL
FROM center_panes FROM center_panes
JOIN panes ON center_panes.pane_id = panes.pane_id) JOIN panes ON center_panes.pane_id = panes.pane_id)
WHERE parent_group_id IS ? AND workspace_id = ? WHERE parent_group_id IS ? AND workspace_id = ?
ORDER BY position ORDER BY position
))?((group_id, workspace_id))? ))?((group_id, workspace_id))?
.into_iter() .into_iter()
.map(|(group_id, axis, pane_id, active)| { .map(|(group_id, axis, pane_id, active, flexes)| {
if let Some((group_id, axis)) = group_id.zip(axis) { if let Some((group_id, axis)) = group_id.zip(axis) {
let flexes = flexes
.map(|flexes| serde_json::from_str::<Vec<f32>>(&flexes))
.transpose()?;
Ok(SerializedPaneGroup::Group { Ok(SerializedPaneGroup::Group {
axis, axis,
children: self.get_pane_group(workspace_id, Some(group_id))?, children: self.get_pane_group(workspace_id, Some(group_id))?,
flexes,
}) })
} else if let Some((pane_id, active)) = pane_id.zip(active) { } else if let Some((pane_id, active)) = pane_id.zip(active) {
Ok(SerializedPaneGroup::Pane(SerializedPane::new( Ok(SerializedPaneGroup::Pane(SerializedPane::new(
@ -417,14 +436,34 @@ impl WorkspaceDb {
parent: Option<(GroupId, usize)>, parent: Option<(GroupId, usize)>,
) -> Result<()> { ) -> Result<()> {
match pane_group { match pane_group {
SerializedPaneGroup::Group { axis, children } => { SerializedPaneGroup::Group {
axis,
children,
flexes,
} => {
let (parent_id, position) = unzip_option(parent); let (parent_id, position) = unzip_option(parent);
let flex_string = flexes
.as_ref()
.map(|flexes| serde_json::json!(flexes).to_string());
let group_id = conn.select_row_bound::<_, i64>(sql!( let group_id = conn.select_row_bound::<_, i64>(sql!(
INSERT INTO pane_groups(workspace_id, parent_group_id, position, axis) INSERT INTO pane_groups(
VALUES (?, ?, ?, ?) workspace_id,
parent_group_id,
position,
axis,
flexes
)
VALUES (?, ?, ?, ?, ?)
RETURNING group_id RETURNING group_id
))?((workspace_id, parent_id, position, *axis))? ))?((
workspace_id,
parent_id,
position,
*axis,
flex_string,
))?
.ok_or_else(|| anyhow!("Couldn't retrieve group_id from inserted pane_group"))?; .ok_or_else(|| anyhow!("Couldn't retrieve group_id from inserted pane_group"))?;
for (position, group) in children.iter().enumerate() { for (position, group) in children.iter().enumerate() {
@ -641,6 +680,14 @@ mod tests {
assert_eq!(test_text_1, "test-text-1"); assert_eq!(test_text_1, "test-text-1");
} }
fn group(axis: gpui::Axis, children: Vec<SerializedPaneGroup>) -> SerializedPaneGroup {
SerializedPaneGroup::Group {
axis,
flexes: None,
children,
}
}
#[gpui::test] #[gpui::test]
async fn test_full_workspace_serialization() { async fn test_full_workspace_serialization() {
env_logger::try_init().ok(); env_logger::try_init().ok();
@ -652,12 +699,12 @@ mod tests {
// | - - - | | // | - - - | |
// | 3,4 | | // | 3,4 | |
// ----------------- // -----------------
let center_group = SerializedPaneGroup::Group { let center_group = group(
axis: gpui::Axis::Horizontal, gpui::Axis::Horizontal,
children: vec![ vec![
SerializedPaneGroup::Group { group(
axis: gpui::Axis::Vertical, gpui::Axis::Vertical,
children: vec![ vec![
SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
vec![ vec![
SerializedItem::new("Terminal", 5, false), SerializedItem::new("Terminal", 5, false),
@ -673,7 +720,7 @@ mod tests {
false, false,
)), )),
], ],
}, ),
SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
vec![ vec![
SerializedItem::new("Terminal", 9, false), SerializedItem::new("Terminal", 9, false),
@ -682,7 +729,7 @@ mod tests {
false, false,
)), )),
], ],
}; );
let workspace = SerializedWorkspace { let workspace = SerializedWorkspace {
id: 5, id: 5,
@ -811,12 +858,12 @@ mod tests {
// | - - - | | // | - - - | |
// | 3,4 | | // | 3,4 | |
// ----------------- // -----------------
let center_pane = SerializedPaneGroup::Group { let center_pane = group(
axis: gpui::Axis::Horizontal, gpui::Axis::Horizontal,
children: vec![ vec![
SerializedPaneGroup::Group { group(
axis: gpui::Axis::Vertical, gpui::Axis::Vertical,
children: vec![ vec![
SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
vec![ vec![
SerializedItem::new("Terminal", 1, false), SerializedItem::new("Terminal", 1, false),
@ -832,7 +879,7 @@ mod tests {
true, true,
)), )),
], ],
}, ),
SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
vec![ vec![
SerializedItem::new("Terminal", 5, true), SerializedItem::new("Terminal", 5, true),
@ -841,7 +888,7 @@ mod tests {
false, false,
)), )),
], ],
}; );
let workspace = default_workspace(&["/tmp"], &center_pane); let workspace = default_workspace(&["/tmp"], &center_pane);
@ -858,12 +905,12 @@ mod tests {
let db = WorkspaceDb(open_test_db("test_cleanup_panes").await); let db = WorkspaceDb(open_test_db("test_cleanup_panes").await);
let center_pane = SerializedPaneGroup::Group { let center_pane = group(
axis: gpui::Axis::Horizontal, gpui::Axis::Horizontal,
children: vec![ vec![
SerializedPaneGroup::Group { group(
axis: gpui::Axis::Vertical, gpui::Axis::Vertical,
children: vec![ vec![
SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
vec![ vec![
SerializedItem::new("Terminal", 1, false), SerializedItem::new("Terminal", 1, false),
@ -879,7 +926,7 @@ mod tests {
true, true,
)), )),
], ],
}, ),
SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
vec![ vec![
SerializedItem::new("Terminal", 5, false), SerializedItem::new("Terminal", 5, false),
@ -888,7 +935,7 @@ mod tests {
false, false,
)), )),
], ],
}; );
let id = &["/tmp"]; let id = &["/tmp"];
@ -896,9 +943,9 @@ mod tests {
db.save_workspace(workspace.clone()).await; db.save_workspace(workspace.clone()).await;
workspace.center_group = SerializedPaneGroup::Group { workspace.center_group = group(
axis: gpui::Axis::Vertical, gpui::Axis::Vertical,
children: vec![ vec![
SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
vec![ vec![
SerializedItem::new("Terminal", 1, false), SerializedItem::new("Terminal", 1, false),
@ -914,7 +961,7 @@ mod tests {
true, true,
)), )),
], ],
}; );
db.save_workspace(workspace.clone()).await; db.save_workspace(workspace.clone()).await;

View file

@ -127,10 +127,11 @@ impl Bind for DockData {
} }
} }
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub enum SerializedPaneGroup { pub enum SerializedPaneGroup {
Group { Group {
axis: Axis, axis: Axis,
flexes: Option<Vec<f32>>,
children: Vec<SerializedPaneGroup>, children: Vec<SerializedPaneGroup>,
}, },
Pane(SerializedPane), Pane(SerializedPane),
@ -149,7 +150,7 @@ impl Default for SerializedPaneGroup {
impl SerializedPaneGroup { impl SerializedPaneGroup {
#[async_recursion(?Send)] #[async_recursion(?Send)]
pub(crate) async fn deserialize( pub(crate) async fn deserialize(
&self, self,
project: &ModelHandle<Project>, project: &ModelHandle<Project>,
workspace_id: WorkspaceId, workspace_id: WorkspaceId,
workspace: &WeakViewHandle<Workspace>, workspace: &WeakViewHandle<Workspace>,
@ -160,7 +161,11 @@ impl SerializedPaneGroup {
Vec<Option<Box<dyn ItemHandle>>>, Vec<Option<Box<dyn ItemHandle>>>,
)> { )> {
match self { match self {
SerializedPaneGroup::Group { axis, children } => { SerializedPaneGroup::Group {
axis,
children,
flexes,
} => {
let mut current_active_pane = None; let mut current_active_pane = None;
let mut members = Vec::new(); let mut members = Vec::new();
let mut items = Vec::new(); let mut items = Vec::new();
@ -184,10 +189,7 @@ impl SerializedPaneGroup {
} }
Some(( Some((
Member::Axis(PaneAxis { Member::Axis(PaneAxis::load(axis, members, flexes)),
axis: *axis,
members,
}),
current_active_pane, current_active_pane,
items, items,
)) ))

View file

@ -37,7 +37,11 @@ pub trait SearchableItem: Item {
regex: true, regex: true,
} }
} }
fn to_search_event(event: &Self::Event) -> Option<SearchEvent>; fn to_search_event(
&mut self,
event: &Self::Event,
cx: &mut ViewContext<Self>,
) -> Option<SearchEvent>;
fn clear_matches(&mut self, cx: &mut ViewContext<Self>); fn clear_matches(&mut self, cx: &mut ViewContext<Self>);
fn update_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>); fn update_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>);
fn query_suggestion(&mut self, cx: &mut ViewContext<Self>) -> String; fn query_suggestion(&mut self, cx: &mut ViewContext<Self>) -> String;
@ -47,6 +51,7 @@ pub trait SearchableItem: Item {
matches: Vec<Self::Match>, matches: Vec<Self::Match>,
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
); );
fn select_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>);
fn match_index_for_direction( fn match_index_for_direction(
&mut self, &mut self,
matches: &Vec<Self::Match>, matches: &Vec<Self::Match>,
@ -102,6 +107,7 @@ pub trait SearchableItemHandle: ItemHandle {
matches: &Vec<Box<dyn Any + Send>>, matches: &Vec<Box<dyn Any + Send>>,
cx: &mut WindowContext, cx: &mut WindowContext,
); );
fn select_matches(&self, matches: &Vec<Box<dyn Any + Send>>, cx: &mut WindowContext);
fn match_index_for_direction( fn match_index_for_direction(
&self, &self,
matches: &Vec<Box<dyn Any + Send>>, matches: &Vec<Box<dyn Any + Send>>,
@ -139,8 +145,9 @@ impl<T: SearchableItem> SearchableItemHandle for ViewHandle<T> {
cx: &mut WindowContext, cx: &mut WindowContext,
handler: Box<dyn Fn(SearchEvent, &mut WindowContext)>, handler: Box<dyn Fn(SearchEvent, &mut WindowContext)>,
) -> Subscription { ) -> Subscription {
cx.subscribe(self, move |_, event, cx| { cx.subscribe(self, move |handle, event, cx| {
if let Some(search_event) = T::to_search_event(event) { let search_event = handle.update(cx, |handle, cx| handle.to_search_event(event, cx));
if let Some(search_event) = search_event {
handler(search_event, cx) handler(search_event, cx)
} }
}) })
@ -165,6 +172,12 @@ impl<T: SearchableItem> SearchableItemHandle for ViewHandle<T> {
let matches = downcast_matches(matches); let matches = downcast_matches(matches);
self.update(cx, |this, cx| this.activate_match(index, matches, cx)); self.update(cx, |this, cx| this.activate_match(index, matches, cx));
} }
fn select_matches(&self, matches: &Vec<Box<dyn Any + Send>>, cx: &mut WindowContext) {
let matches = downcast_matches(matches);
self.update(cx, |this, cx| this.select_matches(matches, cx));
}
fn match_index_for_direction( fn match_index_for_direction(
&self, &self,
matches: &Vec<Box<dyn Any + Send>>, matches: &Vec<Box<dyn Any + Send>>,

View file

@ -1,8 +1,4 @@
pub mod dock; pub mod dock;
/// NOTE: Focus only 'takes' after an update has flushed_effects.
///
/// This may cause issues when you're trying to write tests that use workspace focus to add items at
/// specific locations.
pub mod item; pub mod item;
pub mod notifications; pub mod notifications;
pub mod pane; pub mod pane;
@ -207,6 +203,7 @@ pub type WorkspaceId = i64;
pub fn init_settings(cx: &mut AppContext) { pub fn init_settings(cx: &mut AppContext) {
settings::register::<WorkspaceSettings>(cx); settings::register::<WorkspaceSettings>(cx);
settings::register::<item::ItemSettings>(cx);
} }
pub fn init(app_state: Arc<AppState>, cx: &mut AppContext) { pub fn init(app_state: Arc<AppState>, cx: &mut AppContext) {
@ -508,6 +505,7 @@ pub struct Workspace {
subscriptions: Vec<Subscription>, subscriptions: Vec<Subscription>,
_apply_leader_updates: Task<Result<()>>, _apply_leader_updates: Task<Result<()>>,
_observe_current_user: Task<Result<()>>, _observe_current_user: Task<Result<()>>,
_schedule_serialize: Option<Task<()>>,
pane_history_timestamp: Arc<AtomicUsize>, pane_history_timestamp: Arc<AtomicUsize>,
} }
@ -722,6 +720,7 @@ impl Workspace {
app_state, app_state,
_observe_current_user, _observe_current_user,
_apply_leader_updates, _apply_leader_updates,
_schedule_serialize: None,
leader_updates_tx, leader_updates_tx,
subscriptions, subscriptions,
pane_history_timestamp, pane_history_timestamp,
@ -1823,6 +1822,13 @@ impl Workspace {
.update(cx, |pane, cx| pane.add_item(item, true, true, None, cx)); .update(cx, |pane, cx| pane.add_item(item, true, true, None, cx));
} }
pub fn split_item(&mut self, item: Box<dyn ItemHandle>, cx: &mut ViewContext<Self>) {
let new_pane = self.split_pane(self.active_pane.clone(), SplitDirection::Right, cx);
new_pane.update(cx, move |new_pane, cx| {
new_pane.add_item(item, true, true, None, cx)
})
}
pub fn open_abs_path( pub fn open_abs_path(
&mut self, &mut self,
abs_path: PathBuf, abs_path: PathBuf,
@ -1853,6 +1859,21 @@ impl Workspace {
}) })
} }
pub fn split_abs_path(
&mut self,
abs_path: PathBuf,
visible: bool,
cx: &mut ViewContext<Self>,
) -> Task<anyhow::Result<Box<dyn ItemHandle>>> {
let project_path_task =
Workspace::project_path_for_path(self.project.clone(), &abs_path, visible, cx);
cx.spawn(|this, mut cx| async move {
let (_, path) = project_path_task.await?;
this.update(&mut cx, |this, cx| this.split_path(path, cx))?
.await
})
}
pub fn open_path( pub fn open_path(
&mut self, &mut self,
path: impl Into<ProjectPath>, path: impl Into<ProjectPath>,
@ -1878,6 +1899,38 @@ impl Workspace {
}) })
} }
pub fn split_path(
&mut self,
path: impl Into<ProjectPath>,
cx: &mut ViewContext<Self>,
) -> Task<Result<Box<dyn ItemHandle>, anyhow::Error>> {
let pane = self.last_active_center_pane.clone().unwrap_or_else(|| {
self.panes
.first()
.expect("There must be an active pane")
.downgrade()
});
if let Member::Pane(center_pane) = &self.center.root {
if center_pane.read(cx).items_len() == 0 {
return self.open_path(path, Some(pane), true, cx);
}
}
let task = self.load_path(path.into(), cx);
cx.spawn(|this, mut cx| async move {
let (project_entry_id, build_item) = task.await?;
this.update(&mut cx, move |this, cx| -> Option<_> {
let pane = pane.upgrade(cx)?;
let new_pane = this.split_pane(pane, SplitDirection::Right, cx);
new_pane.update(cx, |new_pane, cx| {
Some(new_pane.open_item(project_entry_id, true, cx, build_item))
})
})
.map(|option| option.ok_or_else(|| anyhow!("pane was dropped")))?
})
}
pub(crate) fn load_path( pub(crate) fn load_path(
&mut self, &mut self,
path: ProjectPath, path: ProjectPath,
@ -1928,6 +1981,30 @@ impl Workspace {
item item
} }
pub fn split_project_item<T>(
&mut self,
project_item: ModelHandle<T::Item>,
cx: &mut ViewContext<Self>,
) -> ViewHandle<T>
where
T: ProjectItem,
{
use project::Item as _;
let entry_id = project_item.read(cx).entry_id(cx);
if let Some(item) = entry_id
.and_then(|entry_id| self.active_pane().read(cx).item_for_entry(entry_id, cx))
.and_then(|item| item.downcast())
{
self.activate_item(&item, cx);
return item;
}
let item = cx.add_view(|cx| T::for_project_item(self.project().clone(), project_item, cx));
self.split_item(Box::new(item.clone()), cx);
item
}
pub fn open_shared_screen(&mut self, peer_id: PeerId, cx: &mut ViewContext<Self>) { pub fn open_shared_screen(&mut self, peer_id: PeerId, cx: &mut ViewContext<Self>) {
if let Some(shared_screen) = self.shared_screen_for_peer(peer_id, &self.active_pane, cx) { if let Some(shared_screen) = self.shared_screen_for_peer(peer_id, &self.active_pane, cx) {
self.active_pane.update(cx, |pane, cx| { self.active_pane.update(cx, |pane, cx| {
@ -1955,7 +2032,7 @@ impl Workspace {
if let Some(pane) = panes.get(action.0).map(|p| (*p).clone()) { if let Some(pane) = panes.get(action.0).map(|p| (*p).clone()) {
cx.focus(&pane); cx.focus(&pane);
} else { } else {
self.split_pane(self.active_pane.clone(), SplitDirection::Right, cx); self.split_and_clone(self.active_pane.clone(), SplitDirection::Right, cx);
} }
} }
@ -2008,7 +2085,7 @@ impl Workspace {
match event { match event {
pane::Event::AddItem { item } => item.added_to_pane(self, pane, cx), pane::Event::AddItem { item } => item.added_to_pane(self, pane, cx),
pane::Event::Split(direction) => { pane::Event::Split(direction) => {
self.split_pane(pane, *direction, cx); self.split_and_clone(pane, *direction, cx);
} }
pane::Event::Remove => self.remove_pane(pane, cx), pane::Event::Remove => self.remove_pane(pane, cx),
pane::Event::ActivateItem { local } => { pane::Event::ActivateItem { local } => {
@ -2059,6 +2136,20 @@ impl Workspace {
} }
pub fn split_pane( pub fn split_pane(
&mut self,
pane_to_split: ViewHandle<Pane>,
split_direction: SplitDirection,
cx: &mut ViewContext<Self>,
) -> ViewHandle<Pane> {
let new_pane = self.add_pane(cx);
self.center
.split(&pane_to_split, &new_pane, split_direction)
.unwrap();
cx.notify();
new_pane
}
pub fn split_and_clone(
&mut self, &mut self,
pane: ViewHandle<Pane>, pane: ViewHandle<Pane>,
direction: SplitDirection, direction: SplitDirection,
@ -2897,6 +2988,14 @@ impl Workspace {
cx.notify(); cx.notify();
} }
fn schedule_serialize(&mut self, cx: &mut ViewContext<Self>) {
self._schedule_serialize = Some(cx.spawn(|this, cx| async move {
cx.background().timer(Duration::from_millis(100)).await;
this.read_with(&cx, |this, cx| this.serialize_workspace(cx))
.ok();
}));
}
fn serialize_workspace(&self, cx: &ViewContext<Self>) { fn serialize_workspace(&self, cx: &ViewContext<Self>) {
fn serialize_pane_handle( fn serialize_pane_handle(
pane_handle: &ViewHandle<Pane>, pane_handle: &ViewHandle<Pane>,
@ -2927,12 +3026,17 @@ impl Workspace {
cx: &AppContext, cx: &AppContext,
) -> SerializedPaneGroup { ) -> SerializedPaneGroup {
match pane_group { match pane_group {
Member::Axis(PaneAxis { axis, members }) => SerializedPaneGroup::Group { Member::Axis(PaneAxis {
axis,
members,
flexes,
}) => SerializedPaneGroup::Group {
axis: *axis, axis: *axis,
children: members children: members
.iter() .iter()
.map(|member| build_serialized_pane_group(member, cx)) .map(|member| build_serialized_pane_group(member, cx))
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
flexes: Some(flexes.borrow().clone()),
}, },
Member::Pane(pane_handle) => { Member::Pane(pane_handle) => {
SerializedPaneGroup::Pane(serialize_pane_handle(&pane_handle, cx)) SerializedPaneGroup::Pane(serialize_pane_handle(&pane_handle, cx))
@ -3399,27 +3503,11 @@ fn notify_if_database_failed(workspace: &WeakViewHandle<Workspace>, cx: &mut Asy
if (*db::ALL_FILE_DB_FAILED).load(std::sync::atomic::Ordering::Acquire) { if (*db::ALL_FILE_DB_FAILED).load(std::sync::atomic::Ordering::Acquire) {
workspace.show_notification_once(0, cx, |cx| { workspace.show_notification_once(0, cx, |cx| {
cx.add_view(|_| { cx.add_view(|_| {
MessageNotification::new("Failed to load any database file.") MessageNotification::new("Failed to load the database file.")
.with_click_message("Click to let us know about this error") .with_click_message("Click to let us know about this error")
.on_click(|cx| cx.platform().open_url(REPORT_ISSUE_URL)) .on_click(|cx| cx.platform().open_url(REPORT_ISSUE_URL))
}) })
}); });
} else {
let backup_path = (*db::BACKUP_DB_PATH).read();
if let Some(backup_path) = backup_path.clone() {
workspace.show_notification_once(1, cx, move |cx| {
cx.add_view(move |_| {
MessageNotification::new(format!(
"Database file was corrupted. Old database backed up to {}",
backup_path.display()
))
.with_click_message("Click to show old database in finder")
.on_click(move |cx| {
cx.platform().open_url(&backup_path.to_string_lossy())
})
})
});
}
} }
}) })
.log_err(); .log_err();
@ -4235,7 +4323,7 @@ mod tests {
}); });
workspace workspace
.split_pane(left_pane.clone(), SplitDirection::Right, cx) .split_and_clone(left_pane.clone(), SplitDirection::Right, cx)
.unwrap(); .unwrap();
left_pane left_pane

View file

@ -104,25 +104,28 @@ thiserror.workspace = true
tiny_http = "0.8" tiny_http = "0.8"
toml.workspace = true toml.workspace = true
tree-sitter.workspace = true tree-sitter.workspace = true
tree-sitter-c = "0.20.1" tree-sitter-c.workspace = true
tree-sitter-cpp = "0.20.0" tree-sitter-cpp.workspace = true
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" } tree-sitter-css.workspace = true
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "4ba9dab6e2602960d95b2b625f3386c27e08084e" } tree-sitter-elixir.workspace = true
tree-sitter-embedded-template = "0.20.0" tree-sitter-embedded-template.workspace = true
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" } tree-sitter-go.workspace = true
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "2e1348c3cf2c9323e87c2744796cf3f3868aa82a" } tree-sitter-heex.workspace = true
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "40a81c01a40ac48744e0c8ccabbaba1920441199" } tree-sitter-json.workspace = true
tree-sitter-rust = "0.20.3" tree-sitter-rust.workspace = true
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" } tree-sitter-markdown.workspace = true
tree-sitter-python = "0.20.2" tree-sitter-python.workspace = true
tree-sitter-toml = { git = "https://github.com/tree-sitter/tree-sitter-toml", rev = "342d9be207c2dba869b9967124c679b5e6fd0ebe" } tree-sitter-toml.workspace = true
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259" } tree-sitter-typescript.workspace = true
tree-sitter-ruby = "0.20.0" tree-sitter-ruby.workspace = true
tree-sitter-html = "0.19.0" tree-sitter-html.workspace = true
tree-sitter-scheme = { git = "https://github.com/6cdh/tree-sitter-scheme", rev = "af0fd1fa452cb2562dc7b5c8a8c55551c39273b9"} tree-sitter-php.workspace = true
tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a"} tree-sitter-scheme.workspace = true
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930"} tree-sitter-svelte.workspace = true
tree-sitter-lua = "0.0.14" tree-sitter-racket.workspace = true
tree-sitter-yaml.workspace = true
tree-sitter-lua.workspace = true
url = "2.2" url = "2.2"
urlencoding = "2.1.2" urlencoding = "2.1.2"
uuid = { version = "1.1.2", features = ["v4"] } uuid = { version = "1.1.2", features = ["v4"] }

View file

@ -13,9 +13,11 @@ mod json;
#[cfg(feature = "plugin_runtime")] #[cfg(feature = "plugin_runtime")]
mod language_plugin; mod language_plugin;
mod lua; mod lua;
mod php;
mod python; mod python;
mod ruby; mod ruby;
mod rust; mod rust;
mod svelte;
mod typescript; mod typescript;
mod yaml; mod yaml;
@ -135,7 +137,19 @@ pub fn init(languages: Arc<LanguageRegistry>, node_runtime: Arc<NodeRuntime>) {
language( language(
"yaml", "yaml",
tree_sitter_yaml::language(), tree_sitter_yaml::language(),
vec![Arc::new(yaml::YamlLspAdapter::new(node_runtime))], vec![Arc::new(yaml::YamlLspAdapter::new(node_runtime.clone()))],
);
language(
"svelte",
tree_sitter_svelte::language(),
vec![Arc::new(svelte::SvelteLspAdapter::new(
node_runtime.clone(),
))],
);
language(
"php",
tree_sitter_php::language(),
vec![Arc::new(php::IntelephenseLspAdapter::new(node_runtime))],
); );
} }

View file

@ -4,4 +4,4 @@ autoclose_before = ">})"
brackets = [ brackets = [
{ start = "<", end = ">", close = true, newline = true }, { start = "<", end = ">", close = true, newline = true },
] ]
block_comment = ["<%#", "%>"] block_comment = ["<%!-- ", " --%>"]

View file

@ -1,10 +1,7 @@
; HEEx delimiters ; HEEx delimiters
[ [
"--%>"
"-->"
"/>" "/>"
"<!" "<!"
"<!--"
"<" "<"
"</" "</"
"</:" "</:"
@ -21,6 +18,9 @@
"<%%=" "<%%="
"<%=" "<%="
"%>" "%>"
"--%>"
"-->"
"<!--"
] @keyword ] @keyword
; HEEx operators are highlighted as such ; HEEx operators are highlighted as such

View file

@ -0,0 +1,133 @@
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use collections::HashMap;
use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
use smol::{fs, stream::StreamExt};
use std::{
any::Any,
ffi::OsString,
path::{Path, PathBuf},
sync::Arc,
};
use util::ResultExt;
fn intelephense_server_binary_arguments(server_path: &Path) -> Vec<OsString> {
vec![server_path.into(), "--stdio".into()]
}
pub struct IntelephenseVersion(String);
pub struct IntelephenseLspAdapter {
node: Arc<NodeRuntime>,
}
impl IntelephenseLspAdapter {
const SERVER_PATH: &'static str = "node_modules/intelephense/lib/intelephense.js";
#[allow(unused)]
pub fn new(node: Arc<NodeRuntime>) -> Self {
Self { node }
}
}
#[async_trait]
impl LspAdapter for IntelephenseLspAdapter {
async fn name(&self) -> LanguageServerName {
LanguageServerName("intelephense".into())
}
async fn fetch_latest_server_version(
&self,
_delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> {
Ok(Box::new(IntelephenseVersion(
self.node.npm_package_latest_version("intelephense").await?,
)) as Box<_>)
}
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
container_dir: PathBuf,
_delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<IntelephenseVersion>().unwrap();
let server_path = container_dir.join(Self::SERVER_PATH);
if fs::metadata(&server_path).await.is_err() {
self.node
.npm_install_packages(&container_dir, [("intelephense", version.0.as_str())])
.await?;
}
Ok(LanguageServerBinary {
path: self.node.binary_path().await?,
arguments: intelephense_server_binary_arguments(&server_path),
})
}
async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
get_cached_server_binary(container_dir, &self.node).await
}
async fn installation_test_binary(
&self,
container_dir: PathBuf,
) -> Option<LanguageServerBinary> {
get_cached_server_binary(container_dir, &self.node).await
}
async fn label_for_completion(
&self,
_item: &lsp::CompletionItem,
_language: &Arc<language::Language>,
) -> Option<language::CodeLabel> {
None
}
async fn initialization_options(&self) -> Option<serde_json::Value> {
None
}
async fn language_ids(&self) -> HashMap<String, String> {
HashMap::from_iter([("PHP".into(), "php".into())])
}
}
async fn get_cached_server_binary(
container_dir: PathBuf,
node: &NodeRuntime,
) -> Option<LanguageServerBinary> {
(|| async move {
let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await {
let entry = entry?;
if entry.file_type().await?.is_dir() {
last_version_dir = Some(entry.path());
}
}
let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
let server_path = last_version_dir.join(IntelephenseLspAdapter::SERVER_PATH);
if server_path.exists() {
Ok(LanguageServerBinary {
path: node.binary_path().await?,
arguments: intelephense_server_binary_arguments(&server_path),
})
} else {
Err(anyhow!(
"missing executable in directory {:?}",
last_version_dir
))
}
})()
.await
.log_err()
}

View file

@ -0,0 +1,11 @@
name = "PHP"
path_suffixes = ["php"]
first_line_pattern = '^#!.*php'
line_comment = "// "
autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },
{ start = "[", end = "]", close = true, newline = true },
{ start = "(", end = ")", close = true, newline = true },
{ start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] },
]

View file

@ -0,0 +1,123 @@
(php_tag) @tag
"?>" @tag
; Types
(primitive_type) @type.builtin
(cast_type) @type.builtin
(named_type (name) @type) @type
(named_type (qualified_name) @type) @type
; Functions
(array_creation_expression "array" @function.builtin)
(list_literal "list" @function.builtin)
(method_declaration
name: (name) @function.method)
(function_call_expression
function: [(qualified_name (name)) (name)] @function)
(scoped_call_expression
name: (name) @function)
(member_call_expression
name: (name) @function.method)
(function_definition
name: (name) @function)
; Member
(property_element
(variable_name) @property)
(member_access_expression
name: (variable_name (name)) @property)
(member_access_expression
name: (name) @property)
; Variables
(relative_scope) @variable.builtin
((name) @constant
(#match? @constant "^_?[A-Z][A-Z\\d_]+$"))
((name) @constant.builtin
(#match? @constant.builtin "^__[A-Z][A-Z\d_]+__$"))
((name) @constructor
(#match? @constructor "^[A-Z]"))
((name) @variable.builtin
(#eq? @variable.builtin "this"))
(variable_name) @variable
; Basic tokens
[
(string)
(string_value)
(encapsed_string)
(heredoc)
(heredoc_body)
(nowdoc_body)
] @string
(boolean) @constant.builtin
(null) @constant.builtin
(integer) @number
(float) @number
(comment) @comment
"$" @operator
; Keywords
"abstract" @keyword
"as" @keyword
"break" @keyword
"case" @keyword
"catch" @keyword
"class" @keyword
"const" @keyword
"continue" @keyword
"declare" @keyword
"default" @keyword
"do" @keyword
"echo" @keyword
"else" @keyword
"elseif" @keyword
"enum" @keyword
"enddeclare" @keyword
"endforeach" @keyword
"endif" @keyword
"endswitch" @keyword
"endwhile" @keyword
"extends" @keyword
"final" @keyword
"finally" @keyword
"foreach" @keyword
"function" @keyword
"global" @keyword
"if" @keyword
"implements" @keyword
"include_once" @keyword
"include" @keyword
"insteadof" @keyword
"interface" @keyword
"namespace" @keyword
"new" @keyword
"private" @keyword
"protected" @keyword
"public" @keyword
"require_once" @keyword
"require" @keyword
"return" @keyword
"static" @keyword
"switch" @keyword
"throw" @keyword
"trait" @keyword
"try" @keyword
"use" @keyword
"while" @keyword

View file

@ -0,0 +1,3 @@
((text) @content
(#set! "language" "html")
(#set! "combined"))

View file

@ -0,0 +1,26 @@
(class_declaration
"class" @context
name: (name) @name
) @item
(function_definition
"function" @context
name: (_) @name
) @item
(method_declaration
"function" @context
name: (_) @name
) @item
(interface_declaration
"interface" @context
name: (_) @name
) @item
(enum_declaration
"enum" @context
name: (_) @name
) @item

View file

@ -0,0 +1,40 @@
(namespace_definition
name: (namespace_name) @name) @module
(interface_declaration
name: (name) @name) @definition.interface
(trait_declaration
name: (name) @name) @definition.interface
(class_declaration
name: (name) @name) @definition.class
(class_interface_clause [(name) (qualified_name)] @name) @impl
(property_declaration
(property_element (variable_name (name) @name))) @definition.field
(function_definition
name: (name) @name) @definition.function
(method_declaration
name: (name) @name) @definition.function
(object_creation_expression
[
(qualified_name (name) @name)
(variable_name (name) @name)
]) @reference.class
(function_call_expression
function: [
(qualified_name (name) @name)
(variable_name (name)) @name
]) @reference.call
(scoped_call_expression
name: (name) @name) @reference.call
(member_call_expression
name: (name) @name) @reference.call

View file

@ -0,0 +1,125 @@
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use futures::StreamExt;
use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
use serde_json::json;
use smol::fs;
use std::{
any::Any,
ffi::OsString,
path::{Path, PathBuf},
sync::Arc,
};
use util::ResultExt;
const SERVER_PATH: &'static str = "node_modules/svelte-language-server/bin/server.js";
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
vec![server_path.into(), "--stdio".into()]
}
pub struct SvelteLspAdapter {
node: Arc<NodeRuntime>,
}
impl SvelteLspAdapter {
pub fn new(node: Arc<NodeRuntime>) -> Self {
SvelteLspAdapter { node }
}
}
#[async_trait]
impl LspAdapter for SvelteLspAdapter {
async fn name(&self) -> LanguageServerName {
LanguageServerName("svelte-language-server".into())
}
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new(
self.node
.npm_package_latest_version("svelte-language-server")
.await?,
) as Box<_>)
}
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<String>().unwrap();
let server_path = container_dir.join(SERVER_PATH);
if fs::metadata(&server_path).await.is_err() {
self.node
.npm_install_packages(
&container_dir,
[("svelte-language-server", version.as_str())],
)
.await?;
}
Ok(LanguageServerBinary {
path: self.node.binary_path().await?,
arguments: server_binary_arguments(&server_path),
})
}
async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
get_cached_server_binary(container_dir, &self.node).await
}
async fn installation_test_binary(
&self,
container_dir: PathBuf,
) -> Option<LanguageServerBinary> {
get_cached_server_binary(container_dir, &self.node).await
}
async fn initialization_options(&self) -> Option<serde_json::Value> {
Some(json!({
"provideFormatter": true
}))
}
}
async fn get_cached_server_binary(
container_dir: PathBuf,
node: &NodeRuntime,
) -> Option<LanguageServerBinary> {
(|| async move {
let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await {
let entry = entry?;
if entry.file_type().await?.is_dir() {
last_version_dir = Some(entry.path());
}
}
let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
let server_path = last_version_dir.join(SERVER_PATH);
if server_path.exists() {
Ok(LanguageServerBinary {
path: node.binary_path().await?,
arguments: server_binary_arguments(&server_path),
})
} else {
Err(anyhow!(
"missing executable in directory {:?}",
last_version_dir
))
}
})()
.await
.log_err()
}

View file

@ -0,0 +1,18 @@
name = "Svelte"
path_suffixes = ["svelte"]
line_comment = "// "
autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },
{ start = "[", end = "]", close = true, newline = true },
{ start = "(", end = ")", close = true, newline = true },
{ start = "<", end = ">", close = false, newline = true, not_in = ["string", "comment"] },
{ start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] },
{ start = "'", end = "'", close = true, newline = false, not_in = ["string", "comment"] },
{ start = "`", end = "`", close = true, newline = false, not_in = ["string"] },
{ start = "/*", end = " */", close = true, newline = false, not_in = ["string", "comment"] },
]
[overrides.element]
line_comment = { remove = true }
block_comment = ["{/* ", " */}"]

View file

@ -0,0 +1,9 @@
[
(style_element)
(script_element)
(element)
(if_statement)
(else_statement)
(each_statement)
(await_statement)
] @fold

View file

@ -0,0 +1,42 @@
; Special identifiers
;--------------------
; TODO:
(tag_name) @tag
(attribute_name) @property
(erroneous_end_tag_name) @keyword
(comment) @comment
[
(attribute_value)
(quoted_attribute_value)
] @string
[
(text)
(raw_text_expr)
] @none
[
(special_block_keyword)
(then)
(as)
] @keyword
[
"{"
"}"
] @punctuation.bracket
"=" @operator
[
"<"
">"
"</"
"/>"
"#"
":"
"/"
"@"
] @tag.delimiter

View file

@ -0,0 +1,8 @@
[
(element)
(if_statement)
(each_statement)
(await_statement)
(script_element)
(style_element)
] @indent

View file

@ -0,0 +1,28 @@
; injections.scm
; --------------
(script_element
(raw_text) @content
(#set! "language" "javascript"))
((script_element
(start_tag
(attribute
(quoted_attribute_value (attribute_value) @_language)))
(raw_text) @content)
(#eq? @_language "ts")
(#set! "language" "typescript"))
((script_element
(start_tag
(attribute
(quoted_attribute_value (attribute_value) @_language)))
(raw_text) @content)
(#eq? @_language "typescript")
(#set! "language" "typescript"))
(style_element
(raw_text) @content
(#set! "language" "css"))
((raw_text_expr) @content
(#set! "language" "javascript"))

View file

@ -36,7 +36,7 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
str, str,
sync::{ sync::{
atomic::{AtomicBool, Ordering}, atomic::{AtomicBool, AtomicU32, Ordering},
Arc, Weak, Arc, Weak,
}, },
thread, thread,
@ -405,11 +405,18 @@ struct PanicRequest {
token: String, token: String,
} }
static PANIC_COUNT: AtomicU32 = AtomicU32::new(0);
fn init_panic_hook(app: &App, installation_id: Option<String>) { fn init_panic_hook(app: &App, installation_id: Option<String>) {
let is_pty = stdout_is_a_pty(); let is_pty = stdout_is_a_pty();
let platform = app.platform(); let platform = app.platform();
panic::set_hook(Box::new(move |info| { panic::set_hook(Box::new(move |info| {
let prior_panic_count = PANIC_COUNT.fetch_add(1, Ordering::SeqCst);
if prior_panic_count > 0 {
std::panic::resume_unwind(Box::new(()));
}
let app_version = ZED_APP_VERSION let app_version = ZED_APP_VERSION
.or_else(|| platform.app_version().ok()) .or_else(|| platform.app_version().ok())
.map_or("dev".to_string(), |v| v.to_string()); .map_or("dev".to_string(), |v| v.to_string());
@ -464,7 +471,6 @@ fn init_panic_hook(app: &App, installation_id: Option<String>) {
if is_pty { if is_pty {
if let Some(panic_data_json) = serde_json::to_string_pretty(&panic_data).log_err() { if let Some(panic_data_json) = serde_json::to_string_pretty(&panic_data).log_err() {
eprintln!("{}", panic_data_json); eprintln!("{}", panic_data_json);
return;
} }
} else { } else {
if let Some(panic_data_json) = serde_json::to_string(&panic_data).log_err() { if let Some(panic_data_json) = serde_json::to_string(&panic_data).log_err() {
@ -481,6 +487,8 @@ fn init_panic_hook(app: &App, installation_id: Option<String>) {
} }
} }
} }
std::process::abort();
})); }));
} }

View file

@ -517,11 +517,7 @@ pub fn handle_keymap_file_changes(
let mut settings_subscription = None; let mut settings_subscription = None;
while let Some(user_keymap_content) = user_keymap_file_rx.next().await { while let Some(user_keymap_content) = user_keymap_file_rx.next().await {
if let Ok(keymap_content) = KeymapFile::parse(&user_keymap_content) { if let Ok(keymap_content) = KeymapFile::parse(&user_keymap_content) {
cx.update(|cx| { cx.update(|cx| reload_keymaps(cx, &keymap_content));
cx.clear_bindings();
load_default_keymap(cx);
keymap_content.clone().add_to_cx(cx).log_err();
});
let mut old_base_keymap = cx.read(|cx| *settings::get::<BaseKeymap>(cx)); let mut old_base_keymap = cx.read(|cx| *settings::get::<BaseKeymap>(cx));
drop(settings_subscription); drop(settings_subscription);
@ -530,10 +526,7 @@ pub fn handle_keymap_file_changes(
let new_base_keymap = *settings::get::<BaseKeymap>(cx); let new_base_keymap = *settings::get::<BaseKeymap>(cx);
if new_base_keymap != old_base_keymap { if new_base_keymap != old_base_keymap {
old_base_keymap = new_base_keymap.clone(); old_base_keymap = new_base_keymap.clone();
reload_keymaps(cx, &keymap_content);
cx.clear_bindings();
load_default_keymap(cx);
keymap_content.clone().add_to_cx(cx).log_err();
} }
}) })
.detach(); .detach();
@ -544,6 +537,13 @@ pub fn handle_keymap_file_changes(
.detach(); .detach();
} }
fn reload_keymaps(cx: &mut AppContext, keymap_content: &KeymapFile) {
cx.clear_bindings();
load_default_keymap(cx);
keymap_content.clone().add_to_cx(cx).log_err();
cx.set_menus(menus::menus());
}
fn open_local_settings_file( fn open_local_settings_file(
workspace: &mut Workspace, workspace: &mut Workspace,
_: &OpenLocalSettings, _: &OpenLocalSettings,
@ -1021,7 +1021,7 @@ mod tests {
// Split the pane with the first entry, then open the second entry again. // Split the pane with the first entry, then open the second entry again.
workspace workspace
.update(cx, |w, cx| { .update(cx, |w, cx| {
w.split_pane(w.active_pane().clone(), SplitDirection::Right, cx); w.split_and_clone(w.active_pane().clone(), SplitDirection::Right, cx);
w.open_path(file2.clone(), None, true, cx) w.open_path(file2.clone(), None, true, cx)
}) })
.await .await
@ -1344,7 +1344,11 @@ mod tests {
cx.dispatch_action(window_id, NewFile); cx.dispatch_action(window_id, NewFile);
workspace workspace
.update(cx, |workspace, cx| { .update(cx, |workspace, cx| {
workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx); workspace.split_and_clone(
workspace.active_pane().clone(),
SplitDirection::Right,
cx,
);
workspace.open_path((worktree.read(cx).id(), "the-new-name.rs"), None, true, cx) workspace.open_path((worktree.read(cx).id(), "the-new-name.rs"), None, true, cx)
}) })
.await .await

View file

@ -114,7 +114,7 @@ export default function editor(): any {
color: foreground(layer, "default"), color: foreground(layer, "default"),
}, },
hovered: { hovered: {
color: foreground(layer, "variant"), color: foreground(layer, "on"),
}, },
}, },
}, },

View file

@ -33,6 +33,11 @@ export default function feedback(): any {
background: background(theme.highest, "on", "hovered"), background: background(theme.highest, "on", "hovered"),
border: border(theme.highest, "on", "hovered"), border: border(theme.highest, "on", "hovered"),
}, },
disabled: {
...text(theme.highest, "mono", "on", "disabled"),
background: background(theme.highest, "on", "disabled"),
border: border(theme.highest, "on", "disabled"),
}
}, },
}), }),
button_margin: 8, button_margin: 8,

View file

@ -83,6 +83,35 @@ export default function search(): any {
}, },
}, },
}), }),
action_button: interactive({
base: {
...text(theme.highest, "mono", "on"),
background: background(theme.highest, "on"),
corner_radius: 6,
border: border(theme.highest, "on"),
margin: {
right: 4,
},
padding: {
bottom: 2,
left: 10,
right: 10,
top: 2,
},
},
state: {
hovered: {
...text(theme.highest, "mono", "on", "hovered"),
background: background(theme.highest, "on", "hovered"),
border: border(theme.highest, "on", "hovered"),
},
clicked: {
...text(theme.highest, "mono", "on", "pressed"),
background: background(theme.highest, "on", "pressed"),
border: border(theme.highest, "on", "pressed"),
},
},
}),
editor, editor,
invalid_editor: { invalid_editor: {
...editor, ...editor,

View file

@ -6,6 +6,8 @@ import { useTheme } from "../common"
export default function tab_bar(): any { export default function tab_bar(): any {
const theme = useTheme() const theme = useTheme()
const { is_light } = theme
const height = 32 const height = 32
const active_layer = theme.highest const active_layer = theme.highest
@ -38,6 +40,18 @@ export default function tab_bar(): any {
icon_conflict: foreground(layer, "warning"), icon_conflict: foreground(layer, "warning"),
icon_dirty: foreground(layer, "accent"), icon_dirty: foreground(layer, "accent"),
git: {
modified: is_light
? theme.ramps.yellow(0.6).hex()
: theme.ramps.yellow(0.5).hex(),
inserted: is_light
? theme.ramps.green(0.45).hex()
: theme.ramps.green(0.5).hex(),
conflict: is_light
? theme.ramps.red(0.6).hex()
: theme.ramps.red(0.5).hex(),
},
// When two tabs of the same name are open, a label appears next to them // When two tabs of the same name are open, a label appears next to them
description: { description: {
margin: { left: 8 }, margin: { left: 8 },