diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000000..8d16a59bc1 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,5 @@ +[[PR Description]] + +Release Notes: + +* [[Added foo / Fixed bar / No notes]] diff --git a/.github/workflows/build_dmg.yml b/.github/workflows/build_dmg.yml new file mode 100644 index 0000000000..989914e5e8 --- /dev/null +++ b/.github/workflows/build_dmg.yml @@ -0,0 +1,54 @@ +name: Build Zed.dmg + +on: + push: + branches: + - main + - "v[0-9]+.[0-9]+.x" + pull_request: + +defaults: + run: + shell: bash -euxo pipefail {0} + +concurrency: + # Allow only one workflow per any non-`main` branch. + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + cancel-in-progress: true + +env: + RUST_BACKTRACE: 1 + COPT: '-Werror' + +jobs: + build-dmg: + if: github.ref_name == 'main' || contains(github.event.pull_request.labels.*.name, 'run-build-dmg') + runs-on: + - self-hosted + - test + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + clean: false + submodules: 'recursive' + + - name: Install Rust + run: | + rustup set profile minimal + rustup update stable + + - name: Install node + uses: actions/setup-node@v3 + with: + node-version: 18 + + - name: Build dmg bundle + run: ./script/bundle + + - name: Upload the build artifact + uses: actions/upload-artifact@v3 + with: + name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg + path: ./target/release/Zed.dmg diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2b7cb97efa..27af9e1164 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -62,6 +62,9 @@ jobs: clean: false submodules: 'recursive' + - name: Limit target directory size + run: script/clear-target-dir-if-larger-than 70 + - name: Run check run: cargo check --workspace @@ -110,6 +113,9 @@ jobs: clean: false submodules: 'recursive' + - name: Limit target directory size + run: script/clear-target-dir-if-larger-than 70 + - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | diff --git a/.github/workflows/release_actions.yml b/.github/workflows/release_actions.yml index 4a9d777769..5feb29e469 100644 --- a/.github/workflows/release_actions.yml +++ b/.github/workflows/release_actions.yml @@ -14,7 +14,7 @@ jobs: content: | 📣 Zed ${{ github.event.release.tag_name }} was just released! - Restart your Zed or head to https://zed.dev/releases/latest to grab it. + Restart your Zed or head to https://zed.dev/releases/stable/latest to grab it. ```md # Changelog diff --git a/Cargo.lock b/Cargo.lock index 216103959c..c2e0790cf4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1135,6 +1135,7 @@ dependencies = [ "plist", "serde", "serde_derive", + "util", ] [[package]] @@ -1228,7 +1229,7 @@ dependencies = [ [[package]] name = "collab" -version = "0.12.0" +version = "0.12.1" dependencies = [ "anyhow", "async-tungstenite", @@ -2253,6 +2254,7 @@ dependencies = [ "project", "serde_json", "settings", + "text", "theme", "util", "workspace", @@ -2418,6 +2420,7 @@ dependencies = [ "serde_derive", "serde_json", "smol", + "sum_tree", "tempfile", "util", ] @@ -2747,6 +2750,7 @@ dependencies = [ "postage", "settings", "text", + "util", "workspace", ] @@ -4838,6 +4842,7 @@ dependencies = [ "futures 0.3.28", "fuzzy", "git", + "git2", "glob", "gpui", "ignore", @@ -5930,6 +5935,7 @@ name = "search" version = "0.1.0" dependencies = [ "anyhow", + "client", "collections", "editor", "futures 0.3.28", @@ -6702,6 +6708,12 @@ dependencies = [ "winx", ] +[[package]] +name = "take-until" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8bdb6fa0dfa67b38c1e66b7041ba9dcf23b99d8121907cd31c807a332f7a0bbb" + [[package]] name = "target-lexicon" version = "0.12.7" @@ -6847,6 +6859,7 @@ name = "theme" version = "0.1.0" dependencies = [ "anyhow", + "fs", "gpui", "indexmap", "parking_lot 0.11.2", @@ -7756,6 +7769,7 @@ dependencies = [ "serde", "serde_json", "smol", + "take-until", "tempdir", "url", ] @@ -8736,7 +8750,7 @@ checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" [[package]] name = "zed" -version = "0.86.0" +version = "0.87.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/assets/keymaps/default.json b/assets/keymaps/default.json index d2fd4107e4..01a09e0cba 100644 --- a/assets/keymaps/default.json +++ b/assets/keymaps/default.json @@ -191,7 +191,7 @@ } }, { - "context": "BufferSearchBar > Editor", + "context": "BufferSearchBar", "bindings": { "escape": "buffer_search::Dismiss", "tab": "buffer_search::FocusEditor", @@ -200,13 +200,13 @@ } }, { - "context": "ProjectSearchBar > Editor", + "context": "ProjectSearchBar", "bindings": { "escape": "project_search::ToggleFocus" } }, { - "context": "ProjectSearchView > Editor", + "context": "ProjectSearchView", "bindings": { "escape": "project_search::ToggleFocus" } diff --git a/assets/keymaps/jetbrains.json b/assets/keymaps/jetbrains.json index 59e069e7f7..383de07904 100644 --- a/assets/keymaps/jetbrains.json +++ b/assets/keymaps/jetbrains.json @@ -11,6 +11,7 @@ "ctrl->": "zed::IncreaseBufferFontSize", "ctrl-<": "zed::DecreaseBufferFontSize", "cmd-d": "editor::DuplicateLine", + "cmd-backspace": "editor::DeleteLine", "cmd-pagedown": "editor::MovePageDown", "cmd-pageup": "editor::MovePageUp", "ctrl-alt-shift-b": "editor::SelectToPreviousWordStart", @@ -33,6 +34,7 @@ ], "shift-alt-up": "editor::MoveLineUp", "shift-alt-down": "editor::MoveLineDown", + "cmd-alt-l": "editor::Format", "cmd-[": "pane::GoBack", "cmd-]": "pane::GoForward", "alt-f7": "editor::FindAllReferences", @@ -63,6 +65,7 @@ { "context": "Workspace", "bindings": { + "cmd-shift-o": "file_finder::Toggle", "cmd-shift-a": "command_palette::Toggle", "cmd-alt-o": "project_symbols::Toggle", "cmd-1": "workspace::ToggleLeftSidebar", diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index ebfa50aaf8..300340a225 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -121,7 +121,7 @@ fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) { { format!("{server_url}/releases/preview/latest") } else { - format!("{server_url}/releases/latest") + format!("{server_url}/releases/stable/latest") }; cx.platform().open_url(&latest_release_url); } diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 9b8009dd69..2b4a375a5b 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -19,6 +19,7 @@ dirs = "3.0" ipc-channel = "0.16" serde.workspace = true serde_derive.workspace = true +util = { path = "../util" } [target.'cfg(target_os = "macos")'.dependencies] core-foundation = "0.9" diff --git a/crates/cli/src/cli.rs b/crates/cli/src/cli.rs index 7cad42b534..3a0abbaec7 100644 --- a/crates/cli/src/cli.rs +++ b/crates/cli/src/cli.rs @@ -1,6 +1,5 @@ pub use ipc_channel::ipc; use serde::{Deserialize, Serialize}; -use std::path::PathBuf; #[derive(Serialize, Deserialize)] pub struct IpcHandshake { @@ -10,7 +9,12 @@ pub struct IpcHandshake { #[derive(Debug, Serialize, Deserialize)] pub enum CliRequest { - Open { paths: Vec, wait: bool }, + // The filed is named `path` for compatibility, but now CLI can request + // opening a path at a certain row and/or column: `some/path:123` and `some/path:123:456`. + // + // Since Zed CLI has to be installed separately, there can be situations when old CLI is + // querying new Zed editors, support both formats by using `String` here and parsing it on Zed side later. + Open { paths: Vec, wait: bool }, } #[derive(Debug, Serialize, Deserialize)] @@ -20,3 +24,7 @@ pub enum CliResponse { Stderr { message: String }, Exit { status: i32 }, } + +/// When Zed started not as an *.app but as a binary (e.g. local development), +/// there's a possibility to tell it to behave "regularly". +pub const FORCE_CLI_MODE_ENV_VAR_NAME: &str = "ZED_FORCE_CLI_MODE"; diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index a31e59587f..feebbff61b 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -1,6 +1,6 @@ -use anyhow::{anyhow, Result}; +use anyhow::{anyhow, Context, Result}; use clap::Parser; -use cli::{CliRequest, CliResponse, IpcHandshake}; +use cli::{CliRequest, CliResponse, IpcHandshake, FORCE_CLI_MODE_ENV_VAR_NAME}; use core_foundation::{ array::{CFArray, CFIndex}, string::kCFStringEncodingUTF8, @@ -16,16 +16,20 @@ use std::{ path::{Path, PathBuf}, ptr, }; +use util::paths::PathLikeWithPosition; #[derive(Parser)] #[clap(name = "zed", global_setting(clap::AppSettings::NoAutoVersion))] struct Args { - /// Wait for all of the given paths to be closed before exiting. + /// Wait for all of the given paths to be opened/closed before exiting. #[clap(short, long)] wait: bool, /// A sequence of space-separated paths that you want to open. - #[clap()] - paths: Vec, + /// + /// Use `path:line:row` syntax to open a file at a specific location. + /// Non-existing paths and directories will ignore `:line:row` suffix. + #[clap(value_parser = parse_path_with_position)] + paths_with_position: Vec>, /// Print Zed's version and the app path. #[clap(short, long)] version: bool, @@ -34,6 +38,14 @@ struct Args { bundle_path: Option, } +fn parse_path_with_position( + argument_str: &str, +) -> Result, std::convert::Infallible> { + PathLikeWithPosition::parse_str(argument_str, |path_str| { + Ok(Path::new(path_str).to_path_buf()) + }) +} + #[derive(Debug, Deserialize)] struct InfoPlist { #[serde(rename = "CFBundleShortVersionString")] @@ -43,37 +55,37 @@ struct InfoPlist { fn main() -> Result<()> { let args = Args::parse(); - let bundle_path = if let Some(bundle_path) = args.bundle_path { - bundle_path.canonicalize()? - } else { - locate_bundle()? - }; + let bundle = Bundle::detect(args.bundle_path.as_deref()).context("Bundle detection")?; if args.version { - let plist_path = bundle_path.join("Contents/Info.plist"); - let plist = plist::from_file::<_, InfoPlist>(plist_path)?; - println!( - "Zed {} – {}", - plist.bundle_short_version_string, - bundle_path.to_string_lossy() - ); + println!("{}", bundle.zed_version_string()); return Ok(()); } - for path in args.paths.iter() { + for path in args + .paths_with_position + .iter() + .map(|path_with_position| &path_with_position.path_like) + { if !path.exists() { touch(path.as_path())?; } } - let (tx, rx) = launch_app(bundle_path)?; + let (tx, rx) = bundle.launch()?; tx.send(CliRequest::Open { paths: args - .paths + .paths_with_position .into_iter() - .map(|path| fs::canonicalize(path).map_err(|error| anyhow!(error))) - .collect::>>()?, + .map(|path_with_position| { + let path_with_position = path_with_position.map_path_like(|path| { + fs::canonicalize(&path) + .with_context(|| format!("path {path:?} canonicalization")) + })?; + Ok(path_with_position.to_string(|path| path.display().to_string())) + }) + .collect::>()?, wait: args.wait, })?; @@ -89,6 +101,148 @@ fn main() -> Result<()> { Ok(()) } +enum Bundle { + App { + app_bundle: PathBuf, + plist: InfoPlist, + }, + LocalPath { + executable: PathBuf, + plist: InfoPlist, + }, +} + +impl Bundle { + fn detect(args_bundle_path: Option<&Path>) -> anyhow::Result { + let bundle_path = if let Some(bundle_path) = args_bundle_path { + bundle_path + .canonicalize() + .with_context(|| format!("Args bundle path {bundle_path:?} canonicalization"))? + } else { + locate_bundle().context("bundle autodiscovery")? + }; + + match bundle_path.extension().and_then(|ext| ext.to_str()) { + Some("app") => { + let plist_path = bundle_path.join("Contents/Info.plist"); + let plist = plist::from_file::<_, InfoPlist>(&plist_path).with_context(|| { + format!("Reading *.app bundle plist file at {plist_path:?}") + })?; + Ok(Self::App { + app_bundle: bundle_path, + plist, + }) + } + _ => { + println!("Bundle path {bundle_path:?} has no *.app extension, attempting to locate a dev build"); + let plist_path = bundle_path + .parent() + .with_context(|| format!("Bundle path {bundle_path:?} has no parent"))? + .join("WebRTC.framework/Resources/Info.plist"); + let plist = plist::from_file::<_, InfoPlist>(&plist_path) + .with_context(|| format!("Reading dev bundle plist file at {plist_path:?}"))?; + Ok(Self::LocalPath { + executable: bundle_path, + plist, + }) + } + } + } + + fn plist(&self) -> &InfoPlist { + match self { + Self::App { plist, .. } => plist, + Self::LocalPath { plist, .. } => plist, + } + } + + fn path(&self) -> &Path { + match self { + Self::App { app_bundle, .. } => app_bundle, + Self::LocalPath { + executable: excutable, + .. + } => excutable, + } + } + + fn launch(&self) -> anyhow::Result<(IpcSender, IpcReceiver)> { + let (server, server_name) = + IpcOneShotServer::::new().context("Handshake before Zed spawn")?; + let url = format!("zed-cli://{server_name}"); + + match self { + Self::App { app_bundle, .. } => { + let app_path = app_bundle; + + let status = unsafe { + let app_url = CFURL::from_path(app_path, true) + .with_context(|| format!("invalid app path {app_path:?}"))?; + let url_to_open = CFURL::wrap_under_create_rule(CFURLCreateWithBytes( + ptr::null(), + url.as_ptr(), + url.len() as CFIndex, + kCFStringEncodingUTF8, + ptr::null(), + )); + let urls_to_open = CFArray::from_copyable(&[url_to_open.as_concrete_TypeRef()]); + LSOpenFromURLSpec( + &LSLaunchURLSpec { + appURL: app_url.as_concrete_TypeRef(), + itemURLs: urls_to_open.as_concrete_TypeRef(), + passThruParams: ptr::null(), + launchFlags: kLSLaunchDefaults, + asyncRefCon: ptr::null_mut(), + }, + ptr::null_mut(), + ) + }; + + anyhow::ensure!( + status == 0, + "cannot start app bundle {}", + self.zed_version_string() + ); + } + Self::LocalPath { executable, .. } => { + let executable_parent = executable + .parent() + .with_context(|| format!("Executable {executable:?} path has no parent"))?; + let subprocess_stdout_file = + fs::File::create(executable_parent.join("zed_dev.log")) + .with_context(|| format!("Log file creation in {executable_parent:?}"))?; + let subprocess_stdin_file = + subprocess_stdout_file.try_clone().with_context(|| { + format!("Cloning descriptor for file {subprocess_stdout_file:?}") + })?; + let mut command = std::process::Command::new(executable); + let command = command + .env(FORCE_CLI_MODE_ENV_VAR_NAME, "") + .stderr(subprocess_stdout_file) + .stdout(subprocess_stdin_file) + .arg(url); + + command + .spawn() + .with_context(|| format!("Spawning {command:?}"))?; + } + } + + let (_, handshake) = server.accept().context("Handshake after Zed spawn")?; + Ok((handshake.requests, handshake.responses)) + } + + fn zed_version_string(&self) -> String { + let is_dev = matches!(self, Self::LocalPath { .. }); + format!( + "Zed {}{} – {}", + self.plist().bundle_short_version_string, + if is_dev { " (dev)" } else { "" }, + self.path().display(), + ) + } +} + fn touch(path: &Path) -> io::Result<()> { match OpenOptions::new().create(true).write(true).open(path) { Ok(_) => Ok(()), @@ -106,38 +260,3 @@ fn locate_bundle() -> Result { } Ok(app_path) } - -fn launch_app(app_path: PathBuf) -> Result<(IpcSender, IpcReceiver)> { - let (server, server_name) = IpcOneShotServer::::new()?; - let url = format!("zed-cli://{server_name}"); - - let status = unsafe { - let app_url = - CFURL::from_path(&app_path, true).ok_or_else(|| anyhow!("invalid app path"))?; - let url_to_open = CFURL::wrap_under_create_rule(CFURLCreateWithBytes( - ptr::null(), - url.as_ptr(), - url.len() as CFIndex, - kCFStringEncodingUTF8, - ptr::null(), - )); - let urls_to_open = CFArray::from_copyable(&[url_to_open.as_concrete_TypeRef()]); - LSOpenFromURLSpec( - &LSLaunchURLSpec { - appURL: app_url.as_concrete_TypeRef(), - itemURLs: urls_to_open.as_concrete_TypeRef(), - passThruParams: ptr::null(), - launchFlags: kLSLaunchDefaults, - asyncRefCon: ptr::null_mut(), - }, - ptr::null_mut(), - ) - }; - - if status == 0 { - let (_, handshake) = server.accept()?; - Ok((handshake.requests, handshake.responses)) - } else { - Err(anyhow!("cannot start {:?}", app_path)) - } -} diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 5c8f208137..075c2956ec 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -85,6 +85,11 @@ pub enum ClickhouseEvent { copilot_enabled: bool, copilot_enabled_for_language: bool, }, + Copilot { + suggestion_id: Option, + suggestion_accepted: bool, + file_extension: Option, + }, } #[derive(Serialize, Debug)] diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index 84e58744b1..ba49373641 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -3,7 +3,7 @@ authors = ["Nathan Sobo "] default-run = "collab" edition = "2021" name = "collab" -version = "0.12.0" +version = "0.12.1" publish = false [[bin]] diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 684b6bffe0..7c6a49f179 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -86,8 +86,8 @@ CREATE TABLE "worktree_repositories" ( "project_id" INTEGER NOT NULL, "worktree_id" INTEGER NOT NULL, "work_directory_id" INTEGER NOT NULL, - "scan_id" INTEGER NOT NULL, "branch" VARCHAR, + "scan_id" INTEGER NOT NULL, "is_deleted" BOOL NOT NULL, PRIMARY KEY(project_id, worktree_id, work_directory_id), FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE, @@ -96,6 +96,23 @@ CREATE TABLE "worktree_repositories" ( CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id"); CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id"); +CREATE TABLE "worktree_repository_statuses" ( + "project_id" INTEGER NOT NULL, + "worktree_id" INTEGER NOT NULL, + "work_directory_id" INTEGER NOT NULL, + "repo_path" VARCHAR NOT NULL, + "status" INTEGER NOT NULL, + "scan_id" INTEGER NOT NULL, + "is_deleted" BOOL NOT NULL, + PRIMARY KEY(project_id, worktree_id, work_directory_id, repo_path), + FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE, + FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE +); +CREATE INDEX "index_worktree_repository_statuses_on_project_id" ON "worktree_repository_statuses" ("project_id"); +CREATE INDEX "index_worktree_repository_statuses_on_project_id_and_worktree_id" ON "worktree_repository_statuses" ("project_id", "worktree_id"); +CREATE INDEX "index_worktree_repository_statuses_on_project_id_and_worktree_id_and_work_directory_id" ON "worktree_repository_statuses" ("project_id", "worktree_id", "work_directory_id"); + + CREATE TABLE "worktree_diagnostic_summaries" ( "project_id" INTEGER NOT NULL, "worktree_id" INTEGER NOT NULL, diff --git a/crates/collab/migrations/20230511004019_add_repository_statuses.sql b/crates/collab/migrations/20230511004019_add_repository_statuses.sql new file mode 100644 index 0000000000..862561c686 --- /dev/null +++ b/crates/collab/migrations/20230511004019_add_repository_statuses.sql @@ -0,0 +1,15 @@ +CREATE TABLE "worktree_repository_statuses" ( + "project_id" INTEGER NOT NULL, + "worktree_id" INT8 NOT NULL, + "work_directory_id" INT8 NOT NULL, + "repo_path" VARCHAR NOT NULL, + "status" INT8 NOT NULL, + "scan_id" INT8 NOT NULL, + "is_deleted" BOOL NOT NULL, + PRIMARY KEY(project_id, worktree_id, work_directory_id, repo_path), + FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE, + FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE +); +CREATE INDEX "index_wt_repos_statuses_on_project_id" ON "worktree_repository_statuses" ("project_id"); +CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id" ON "worktree_repository_statuses" ("project_id", "worktree_id"); +CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id_and_wd_id" ON "worktree_repository_statuses" ("project_id", "worktree_id", "work_directory_id"); diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index bc5b816abf..453aa82b53 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -15,6 +15,7 @@ mod worktree; mod worktree_diagnostic_summary; mod worktree_entry; mod worktree_repository; +mod worktree_repository_statuses; use crate::executor::Executor; use crate::{Error, Result}; @@ -1568,6 +1569,50 @@ impl Database { worktree.updated_repositories.push(proto::RepositoryEntry { work_directory_id: db_repository.work_directory_id as u64, branch: db_repository.branch, + removed_repo_paths: Default::default(), + updated_statuses: Default::default(), + }); + } + } + } + + // Repository Status Entries + for repository in worktree.updated_repositories.iter_mut() { + let repository_status_entry_filter = + if let Some(rejoined_worktree) = rejoined_worktree { + worktree_repository_statuses::Column::ScanId + .gt(rejoined_worktree.scan_id) + } else { + worktree_repository_statuses::Column::IsDeleted.eq(false) + }; + + let mut db_repository_statuses = + worktree_repository_statuses::Entity::find() + .filter( + Condition::all() + .add( + worktree_repository_statuses::Column::WorktreeId + .eq(worktree.id), + ) + .add( + worktree_repository_statuses::Column::WorkDirectoryId + .eq(repository.work_directory_id), + ) + .add(repository_status_entry_filter), + ) + .stream(&*tx) + .await?; + + while let Some(db_status_entry) = db_repository_statuses.next().await { + let db_status_entry = db_status_entry?; + if db_status_entry.is_deleted { + repository + .removed_repo_paths + .push(db_status_entry.repo_path); + } else { + repository.updated_statuses.push(proto::StatusEntry { + repo_path: db_status_entry.repo_path, + status: db_status_entry.status as i32, }); } } @@ -2395,6 +2440,68 @@ impl Database { ) .exec(&*tx) .await?; + + for repository in update.updated_repositories.iter() { + if !repository.updated_statuses.is_empty() { + worktree_repository_statuses::Entity::insert_many( + repository.updated_statuses.iter().map(|status_entry| { + worktree_repository_statuses::ActiveModel { + project_id: ActiveValue::set(project_id), + worktree_id: ActiveValue::set(worktree_id), + work_directory_id: ActiveValue::set( + repository.work_directory_id as i64, + ), + repo_path: ActiveValue::set(status_entry.repo_path.clone()), + status: ActiveValue::set(status_entry.status as i64), + scan_id: ActiveValue::set(update.scan_id as i64), + is_deleted: ActiveValue::set(false), + } + }), + ) + .on_conflict( + OnConflict::columns([ + worktree_repository_statuses::Column::ProjectId, + worktree_repository_statuses::Column::WorktreeId, + worktree_repository_statuses::Column::WorkDirectoryId, + worktree_repository_statuses::Column::RepoPath, + ]) + .update_columns([ + worktree_repository_statuses::Column::ScanId, + worktree_repository_statuses::Column::Status, + worktree_repository_statuses::Column::IsDeleted, + ]) + .to_owned(), + ) + .exec(&*tx) + .await?; + } + + if !repository.removed_repo_paths.is_empty() { + worktree_repository_statuses::Entity::update_many() + .filter( + worktree_repository_statuses::Column::ProjectId + .eq(project_id) + .and( + worktree_repository_statuses::Column::WorktreeId + .eq(worktree_id), + ) + .and( + worktree_repository_statuses::Column::WorkDirectoryId + .eq(repository.work_directory_id as i64), + ) + .and(worktree_repository_statuses::Column::RepoPath.is_in( + repository.removed_repo_paths.iter().map(String::as_str), + )), + ) + .set(worktree_repository_statuses::ActiveModel { + is_deleted: ActiveValue::Set(true), + scan_id: ActiveValue::Set(update.scan_id as i64), + ..Default::default() + }) + .exec(&*tx) + .await?; + } + } } if !update.removed_repositories.is_empty() { @@ -2645,10 +2752,42 @@ impl Database { if let Some(worktree) = worktrees.get_mut(&(db_repository_entry.worktree_id as u64)) { - worktree.repository_entries.push(proto::RepositoryEntry { - work_directory_id: db_repository_entry.work_directory_id as u64, - branch: db_repository_entry.branch, - }); + worktree.repository_entries.insert( + db_repository_entry.work_directory_id as u64, + proto::RepositoryEntry { + work_directory_id: db_repository_entry.work_directory_id as u64, + branch: db_repository_entry.branch, + removed_repo_paths: Default::default(), + updated_statuses: Default::default(), + }, + ); + } + } + } + + { + let mut db_status_entries = worktree_repository_statuses::Entity::find() + .filter( + Condition::all() + .add(worktree_repository_statuses::Column::ProjectId.eq(project_id)) + .add(worktree_repository_statuses::Column::IsDeleted.eq(false)), + ) + .stream(&*tx) + .await?; + + while let Some(db_status_entry) = db_status_entries.next().await { + let db_status_entry = db_status_entry?; + if let Some(worktree) = worktrees.get_mut(&(db_status_entry.worktree_id as u64)) + { + if let Some(repository_entry) = worktree + .repository_entries + .get_mut(&(db_status_entry.work_directory_id as u64)) + { + repository_entry.updated_statuses.push(proto::StatusEntry { + repo_path: db_status_entry.repo_path, + status: db_status_entry.status as i32, + }); + } } } } @@ -3390,7 +3529,7 @@ pub struct Worktree { pub root_name: String, pub visible: bool, pub entries: Vec, - pub repository_entries: Vec, + pub repository_entries: BTreeMap, pub diagnostic_summaries: Vec, pub scan_id: u64, pub completed_scan_id: u64, diff --git a/crates/collab/src/db/worktree_repository_statuses.rs b/crates/collab/src/db/worktree_repository_statuses.rs new file mode 100644 index 0000000000..fc15efc816 --- /dev/null +++ b/crates/collab/src/db/worktree_repository_statuses.rs @@ -0,0 +1,23 @@ +use super::ProjectId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "worktree_repository_statuses")] +pub struct Model { + #[sea_orm(primary_key)] + pub project_id: ProjectId, + #[sea_orm(primary_key)] + pub worktree_id: i64, + #[sea_orm(primary_key)] + pub work_directory_id: i64, + #[sea_orm(primary_key)] + pub repo_path: String, + pub status: i64, + pub scan_id: i64, + pub is_deleted: bool, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 23935904d3..001f3462d0 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -1385,7 +1385,7 @@ async fn join_project( removed_entries: Default::default(), scan_id: worktree.scan_id, is_last_update: worktree.scan_id == worktree.completed_scan_id, - updated_repositories: worktree.repository_entries, + updated_repositories: worktree.repository_entries.into_values().collect(), removed_repositories: Default::default(), }; for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) { diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 5d1a915887..d0625066d5 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -10,7 +10,7 @@ use editor::{ ConfirmRename, Editor, ExcerptRange, MultiBuffer, Redo, Rename, ToOffset, ToggleCodeActions, Undo, }; -use fs::{FakeFs, Fs as _, LineEnding, RemoveOptions}; +use fs::{repository::GitFileStatus, FakeFs, Fs as _, LineEnding, RemoveOptions}; use futures::StreamExt as _; use gpui::{ executor::Deterministic, geometry::vector::vec2f, test::EmptyView, AppContext, ModelHandle, @@ -2693,6 +2693,154 @@ async fn test_git_branch_name( }); } +#[gpui::test] +async fn test_git_status_sync( + deterministic: Arc, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + deterministic.forbid_parking(); + let mut server = TestServer::start(&deterministic).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs + .insert_tree( + "/dir", + json!({ + ".git": {}, + "a.txt": "a", + "b.txt": "b", + }), + ) + .await; + + const A_TXT: &'static str = "a.txt"; + const B_TXT: &'static str = "b.txt"; + + client_a + .fs + .as_fake() + .set_status_for_repo( + Path::new("/dir/.git"), + &[ + (&Path::new(A_TXT), GitFileStatus::Added), + (&Path::new(B_TXT), GitFileStatus::Added), + ], + ) + .await; + + let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| { + call.share_project(project_local.clone(), cx) + }) + .await + .unwrap(); + + let project_remote = client_b.build_remote_project(project_id, cx_b).await; + + // Wait for it to catch up to the new status + deterministic.run_until_parked(); + + #[track_caller] + fn assert_status( + file: &impl AsRef, + status: Option, + project: &Project, + cx: &AppContext, + ) { + let file = file.as_ref(); + let worktrees = project.visible_worktrees(cx).collect::>(); + assert_eq!(worktrees.len(), 1); + let worktree = worktrees[0].clone(); + let snapshot = worktree.read(cx).snapshot(); + let root_entry = snapshot.root_git_entry().unwrap(); + assert_eq!(root_entry.status_for_file(&snapshot, file), status); + } + + // Smoke test status reading + project_local.read_with(cx_a, |project, cx| { + assert_status(&Path::new(A_TXT), Some(GitFileStatus::Added), project, cx); + assert_status(&Path::new(B_TXT), Some(GitFileStatus::Added), project, cx); + }); + project_remote.read_with(cx_b, |project, cx| { + assert_status(&Path::new(A_TXT), Some(GitFileStatus::Added), project, cx); + assert_status(&Path::new(B_TXT), Some(GitFileStatus::Added), project, cx); + }); + + client_a + .fs + .as_fake() + .set_status_for_repo( + Path::new("/dir/.git"), + &[ + (&Path::new(A_TXT), GitFileStatus::Modified), + (&Path::new(B_TXT), GitFileStatus::Modified), + ], + ) + .await; + + // Wait for buffer_local_a to receive it + deterministic.run_until_parked(); + + // Smoke test status reading + project_local.read_with(cx_a, |project, cx| { + assert_status( + &Path::new(A_TXT), + Some(GitFileStatus::Modified), + project, + cx, + ); + assert_status( + &Path::new(B_TXT), + Some(GitFileStatus::Modified), + project, + cx, + ); + }); + project_remote.read_with(cx_b, |project, cx| { + assert_status( + &Path::new(A_TXT), + Some(GitFileStatus::Modified), + project, + cx, + ); + assert_status( + &Path::new(B_TXT), + Some(GitFileStatus::Modified), + project, + cx, + ); + }); + + // And synchronization while joining + let project_remote_c = client_c.build_remote_project(project_id, cx_c).await; + deterministic.run_until_parked(); + + project_remote_c.read_with(cx_c, |project, cx| { + assert_status( + &Path::new(A_TXT), + Some(GitFileStatus::Modified), + project, + cx, + ); + assert_status( + &Path::new(B_TXT), + Some(GitFileStatus::Modified), + project, + cx, + ); + }); +} + #[gpui::test(iterations = 10)] async fn test_fs_operations( deterministic: Arc, diff --git a/crates/collab/src/tests/randomized_integration_tests.rs b/crates/collab/src/tests/randomized_integration_tests.rs index 3a87ced2e1..fb0645b147 100644 --- a/crates/collab/src/tests/randomized_integration_tests.rs +++ b/crates/collab/src/tests/randomized_integration_tests.rs @@ -8,12 +8,13 @@ use call::ActiveCall; use client::RECEIVE_TIMEOUT; use collections::BTreeMap; use editor::Bias; -use fs::{FakeFs, Fs as _}; +use fs::{repository::GitFileStatus, FakeFs, Fs as _}; use futures::StreamExt as _; use gpui::{executor::Deterministic, ModelHandle, Task, TestAppContext}; use language::{range_to_lsp, FakeLspAdapter, Language, LanguageConfig, PointUtf16}; use lsp::FakeLanguageServer; use parking_lot::Mutex; +use pretty_assertions::assert_eq; use project::{search::SearchQuery, Project, ProjectPath}; use rand::{ distributions::{Alphanumeric, DistString}, @@ -766,53 +767,85 @@ async fn apply_client_operation( } } - ClientOperation::WriteGitIndex { - repo_path, - contents, - } => { - if !client.fs.directories().contains(&repo_path) { - return Err(TestError::Inapplicable); - } - - log::info!( - "{}: writing git index for repo {:?}: {:?}", - client.username, + ClientOperation::GitOperation { operation } => match operation { + GitOperation::WriteGitIndex { repo_path, - contents - ); + contents, + } => { + if !client.fs.directories().contains(&repo_path) { + return Err(TestError::Inapplicable); + } - let dot_git_dir = repo_path.join(".git"); - let contents = contents - .iter() - .map(|(path, contents)| (path.as_path(), contents.clone())) - .collect::>(); - if client.fs.metadata(&dot_git_dir).await?.is_none() { - client.fs.create_dir(&dot_git_dir).await?; + log::info!( + "{}: writing git index for repo {:?}: {:?}", + client.username, + repo_path, + contents + ); + + let dot_git_dir = repo_path.join(".git"); + let contents = contents + .iter() + .map(|(path, contents)| (path.as_path(), contents.clone())) + .collect::>(); + if client.fs.metadata(&dot_git_dir).await?.is_none() { + client.fs.create_dir(&dot_git_dir).await?; + } + client.fs.set_index_for_repo(&dot_git_dir, &contents).await; } - client.fs.set_index_for_repo(&dot_git_dir, &contents).await; - } - - ClientOperation::WriteGitBranch { - repo_path, - new_branch, - } => { - if !client.fs.directories().contains(&repo_path) { - return Err(TestError::Inapplicable); - } - - log::info!( - "{}: writing git branch for repo {:?}: {:?}", - client.username, + GitOperation::WriteGitBranch { repo_path, - new_branch - ); + new_branch, + } => { + if !client.fs.directories().contains(&repo_path) { + return Err(TestError::Inapplicable); + } - let dot_git_dir = repo_path.join(".git"); - if client.fs.metadata(&dot_git_dir).await?.is_none() { - client.fs.create_dir(&dot_git_dir).await?; + log::info!( + "{}: writing git branch for repo {:?}: {:?}", + client.username, + repo_path, + new_branch + ); + + let dot_git_dir = repo_path.join(".git"); + if client.fs.metadata(&dot_git_dir).await?.is_none() { + client.fs.create_dir(&dot_git_dir).await?; + } + client.fs.set_branch_name(&dot_git_dir, new_branch).await; } - client.fs.set_branch_name(&dot_git_dir, new_branch).await; - } + GitOperation::WriteGitStatuses { + repo_path, + statuses, + } => { + if !client.fs.directories().contains(&repo_path) { + return Err(TestError::Inapplicable); + } + + log::info!( + "{}: writing git statuses for repo {:?}: {:?}", + client.username, + repo_path, + statuses + ); + + let dot_git_dir = repo_path.join(".git"); + + let statuses = statuses + .iter() + .map(|(path, val)| (path.as_path(), val.clone())) + .collect::>(); + + if client.fs.metadata(&dot_git_dir).await?.is_none() { + client.fs.create_dir(&dot_git_dir).await?; + } + + client + .fs + .set_status_for_repo(&dot_git_dir, statuses.as_slice()) + .await; + } + }, } Ok(()) } @@ -1181,6 +1214,13 @@ enum ClientOperation { is_dir: bool, content: String, }, + GitOperation { + operation: GitOperation, + }, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +enum GitOperation { WriteGitIndex { repo_path: PathBuf, contents: Vec<(PathBuf, String)>, @@ -1189,6 +1229,10 @@ enum ClientOperation { repo_path: PathBuf, new_branch: Option, }, + WriteGitStatuses { + repo_path: PathBuf, + statuses: Vec<(PathBuf, GitFileStatus)>, + }, } #[derive(Clone, Debug, Serialize, Deserialize)] @@ -1701,57 +1745,10 @@ impl TestPlan { } } - // Update a git index - 91..=93 => { - let repo_path = client - .fs - .directories() - .into_iter() - .choose(&mut self.rng) - .unwrap() - .clone(); - - let mut file_paths = client - .fs - .files() - .into_iter() - .filter(|path| path.starts_with(&repo_path)) - .collect::>(); - let count = self.rng.gen_range(0..=file_paths.len()); - file_paths.shuffle(&mut self.rng); - file_paths.truncate(count); - - let mut contents = Vec::new(); - for abs_child_file_path in &file_paths { - let child_file_path = abs_child_file_path - .strip_prefix(&repo_path) - .unwrap() - .to_path_buf(); - let new_base = Alphanumeric.sample_string(&mut self.rng, 16); - contents.push((child_file_path, new_base)); - } - - break ClientOperation::WriteGitIndex { - repo_path, - contents, - }; - } - - // Update a git branch - 94..=95 => { - let repo_path = client - .fs - .directories() - .choose(&mut self.rng) - .unwrap() - .clone(); - - let new_branch = (self.rng.gen_range(0..10) > 3) - .then(|| Alphanumeric.sample_string(&mut self.rng, 8)); - - break ClientOperation::WriteGitBranch { - repo_path, - new_branch, + // Update a git related action + 91..=95 => { + break ClientOperation::GitOperation { + operation: self.generate_git_operation(client), }; } @@ -1789,6 +1786,86 @@ impl TestPlan { }) } + fn generate_git_operation(&mut self, client: &TestClient) -> GitOperation { + fn generate_file_paths( + repo_path: &Path, + rng: &mut StdRng, + client: &TestClient, + ) -> Vec { + let mut paths = client + .fs + .files() + .into_iter() + .filter(|path| path.starts_with(repo_path)) + .collect::>(); + + let count = rng.gen_range(0..=paths.len()); + paths.shuffle(rng); + paths.truncate(count); + + paths + .iter() + .map(|path| path.strip_prefix(repo_path).unwrap().to_path_buf()) + .collect::>() + } + + let repo_path = client + .fs + .directories() + .choose(&mut self.rng) + .unwrap() + .clone(); + + match self.rng.gen_range(0..100_u32) { + 0..=25 => { + let file_paths = generate_file_paths(&repo_path, &mut self.rng, client); + + let contents = file_paths + .into_iter() + .map(|path| (path, Alphanumeric.sample_string(&mut self.rng, 16))) + .collect(); + + GitOperation::WriteGitIndex { + repo_path, + contents, + } + } + 26..=63 => { + let new_branch = (self.rng.gen_range(0..10) > 3) + .then(|| Alphanumeric.sample_string(&mut self.rng, 8)); + + GitOperation::WriteGitBranch { + repo_path, + new_branch, + } + } + 64..=100 => { + let file_paths = generate_file_paths(&repo_path, &mut self.rng, client); + + let statuses = file_paths + .into_iter() + .map(|paths| { + ( + paths, + match self.rng.gen_range(0..3_u32) { + 0 => GitFileStatus::Added, + 1 => GitFileStatus::Modified, + 2 => GitFileStatus::Conflict, + _ => unreachable!(), + }, + ) + }) + .collect::>(); + + GitOperation::WriteGitStatuses { + repo_path, + statuses, + } + } + _ => unreachable!(), + } + } + fn next_root_dir_name(&mut self, user_id: UserId) -> String { let user_ix = self .users diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index b966348cd6..bb3727585b 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -259,7 +259,7 @@ impl RegisteredBuffer { #[derive(Debug)] pub struct Completion { - uuid: String, + pub uuid: String, pub range: Range, pub text: String, } diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index f7746496aa..4ce1303ba4 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -1499,6 +1499,7 @@ mod tests { cx.set_global(Settings::test(cx)); cx.set_global(SettingsStore::test(cx)); language::init(cx); + client::init_settings(cx); workspace::init_settings(cx); }); } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 791f67be29..915e041100 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1256,6 +1256,16 @@ impl Editor { let soft_wrap_mode_override = (mode == EditorMode::SingleLine).then(|| language_settings::SoftWrap::None); + + let mut project_subscription = None; + if mode == EditorMode::Full && buffer.read(cx).is_singleton() { + if let Some(project) = project.as_ref() { + project_subscription = Some(cx.observe(project, |_, _, cx| { + cx.emit(Event::TitleChanged); + })) + } + } + let mut this = Self { handle: cx.weak_handle(), buffer: buffer.clone(), @@ -1312,6 +1322,11 @@ impl Editor { cx.observe_global::(Self::settings_changed), ], }; + + if let Some(project_subscription) = project_subscription { + this._subscriptions.push(project_subscription); + } + this.end_selection(cx); this.scroll_manager.show_scrollbar(cx); @@ -1323,7 +1338,7 @@ impl Editor { cx.set_global(ScrollbarAutoHide(should_auto_hide_scrollbars)); } - this.report_editor_event("open", cx); + this.report_editor_event("open", None, cx); this } @@ -3090,6 +3105,8 @@ impl Editor { copilot .update(cx, |copilot, cx| copilot.accept_completion(completion, cx)) .detach_and_log_err(cx); + + self.report_copilot_event(Some(completion.uuid.clone()), true, cx) } self.insert_with_autoindent_mode(&suggestion.text.to_string(), None, cx); cx.notify(); @@ -3107,6 +3124,8 @@ impl Editor { copilot.discard_completions(&self.copilot_state.completions, cx) }) .detach_and_log_err(cx); + + self.report_copilot_event(None, false, cx) } self.display_map @@ -6853,48 +6872,88 @@ impl Editor { .collect() } - fn report_editor_event(&self, name: &'static str, cx: &AppContext) { - if let Some((project, file)) = self.project.as_ref().zip( - self.buffer - .read(cx) - .as_singleton() - .and_then(|b| b.read(cx).file()), - ) { - let vim_mode = cx - .global::() - .untyped_user_settings() - .get("vim_mode") - == Some(&serde_json::Value::Bool(true)); - let telemetry_settings = *settings::get_setting::(None, cx); - let copilot_enabled = all_language_settings(None, cx).copilot_enabled(None, None); - let copilot_enabled_for_language = self - .buffer - .read(cx) - .settings_at(0, cx) - .show_copilot_suggestions; + fn report_copilot_event( + &self, + suggestion_id: Option, + suggestion_accepted: bool, + cx: &AppContext, + ) { + let Some(project) = &self.project else { + return + }; - let extension = Path::new(file.file_name(cx)) - .extension() - .and_then(|e| e.to_str()); - let telemetry = project.read(cx).client().telemetry().clone(); - telemetry.report_mixpanel_event( - match name { - "open" => "open editor", - "save" => "save editor", - _ => name, - }, - json!({ "File Extension": extension, "Vim Mode": vim_mode, "In Clickhouse": true }), - telemetry_settings, - ); - let event = ClickhouseEvent::Editor { - file_extension: extension.map(ToString::to_string), - vim_mode, - operation: name, - copilot_enabled, - copilot_enabled_for_language, - }; - telemetry.report_clickhouse_event(event, telemetry_settings) - } + // If None, we are either getting suggestions in a new, unsaved file, or in a file without an extension + let file_extension = self + .buffer + .read(cx) + .as_singleton() + .and_then(|b| b.read(cx).file()) + .and_then(|file| Path::new(file.file_name(cx)).extension()) + .and_then(|e| e.to_str()) + .map(|a| a.to_string()); + + let telemetry = project.read(cx).client().telemetry().clone(); + let telemetry_settings = *settings::get_setting::(None, cx); + + let event = ClickhouseEvent::Copilot { + suggestion_id, + suggestion_accepted, + file_extension, + }; + telemetry.report_clickhouse_event(event, telemetry_settings); + } + + fn report_editor_event( + &self, + name: &'static str, + file_extension: Option, + cx: &AppContext, + ) { + let Some(project) = &self.project else { + return + }; + + // If None, we are in a file without an extension + let file_extension = file_extension.or(self + .buffer + .read(cx) + .as_singleton() + .and_then(|b| b.read(cx).file()) + .and_then(|file| Path::new(file.file_name(cx)).extension()) + .and_then(|e| e.to_str()) + .map(|a| a.to_string())); + + let vim_mode = cx + .global::() + .untyped_user_settings() + .get("vim_mode") + == Some(&serde_json::Value::Bool(true)); + let telemetry_settings = *settings::get_setting::(None, cx); + let copilot_enabled = all_language_settings(None, cx).copilot_enabled(None, None); + let copilot_enabled_for_language = self + .buffer + .read(cx) + .settings_at(0, cx) + .show_copilot_suggestions; + + let telemetry = project.read(cx).client().telemetry().clone(); + telemetry.report_mixpanel_event( + match name { + "open" => "open editor", + "save" => "save editor", + _ => name, + }, + json!({ "File Extension": file_extension, "Vim Mode": vim_mode, "In Clickhouse": true }), + telemetry_settings, + ); + let event = ClickhouseEvent::Editor { + file_extension, + vim_mode, + operation: name, + copilot_enabled, + copilot_enabled_for_language, + }; + telemetry.report_clickhouse_event(event, telemetry_settings) } /// Copy the highlighted chunks to the clipboard as JSON. The format is an array of lines, diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index cce10eb05e..cf72f0286c 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -6682,10 +6682,11 @@ pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsC cx.update(|cx| { cx.set_global(SettingsStore::test(cx)); cx.set_global(Settings::test(cx)); + client::init_settings(cx); language::init(cx); - crate::init(cx); Project::init_settings(cx); workspace::init_settings(cx); + crate::init(cx); }); update_test_settings(cx, f); diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 89626c3ea4..29d32e4a43 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -1013,8 +1013,7 @@ mod tests { .zip(expected_styles.iter().cloned()) .collect::>(); assert_eq!( - rendered.text, - dbg!(expected_text), + rendered.text, expected_text, "wrong text for input {blocks:?}" ); assert_eq!( diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index e971af943a..9e122cc63d 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -27,7 +27,7 @@ use std::{ path::{Path, PathBuf}, }; use text::Selection; -use util::{ResultExt, TryFutureExt}; +use util::{paths::FILE_ROW_COLUMN_DELIMITER, ResultExt, TryFutureExt}; use workspace::item::{BreadcrumbText, FollowableItemHandle}; use workspace::{ item::{FollowableItem, Item, ItemEvent, ItemHandle, ProjectItem}, @@ -566,7 +566,7 @@ impl Item for Editor { cx: &AppContext, ) -> AnyElement { Flex::row() - .with_child(Label::new(self.title(cx).to_string(), style.label.clone()).aligned()) + .with_child(Label::new(self.title(cx).to_string(), style.label.clone()).into_any()) .with_children(detail.and_then(|detail| { let path = path_for_buffer(&self.buffer, detail, false, cx)?; let description = path.to_string_lossy(); @@ -580,6 +580,7 @@ impl Item for Editor { .aligned(), ) })) + .align_children_center() .into_any() } @@ -636,7 +637,7 @@ impl Item for Editor { project: ModelHandle, cx: &mut ViewContext, ) -> Task> { - self.report_editor_event("save", cx); + self.report_editor_event("save", None, cx); let format = self.perform_format(project.clone(), FormatTrigger::Save, cx); let buffers = self.buffer().clone().read(cx).all_buffers(); cx.spawn(|_, mut cx| async move { @@ -685,6 +686,11 @@ impl Item for Editor { .as_singleton() .expect("cannot call save_as on an excerpt list"); + let file_extension = abs_path + .extension() + .map(|a| a.to_string_lossy().to_string()); + self.report_editor_event("save", file_extension, cx); + project.update(cx, |project, cx| { project.save_buffer_as(buffer, abs_path, cx) }) @@ -1111,7 +1117,11 @@ impl View for CursorPosition { fn render(&mut self, cx: &mut ViewContext) -> AnyElement { if let Some(position) = self.position { let theme = &cx.global::().theme.workspace.status_bar; - let mut text = format!("{},{}", position.row + 1, position.column + 1); + let mut text = format!( + "{}{FILE_ROW_COLUMN_DELIMITER}{}", + position.row + 1, + position.column + 1 + ); if self.selected_count > 0 { write!(text, " ({} selected)", self.selected_count).unwrap(); } diff --git a/crates/file_finder/Cargo.toml b/crates/file_finder/Cargo.toml index 024054c005..0349d26408 100644 --- a/crates/file_finder/Cargo.toml +++ b/crates/file_finder/Cargo.toml @@ -16,6 +16,7 @@ menu = { path = "../menu" } picker = { path = "../picker" } project = { path = "../project" } settings = { path = "../settings" } +text = { path = "../text" } util = { path = "../util" } theme = { path = "../theme" } workspace = { path = "../workspace" } diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index ad865ec2f7..37ab4218a8 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -1,3 +1,4 @@ +use editor::{scroll::autoscroll::Autoscroll, Bias, Editor}; use fuzzy::PathMatch; use gpui::{ actions, elements::*, AppContext, ModelHandle, MouseState, Task, ViewContext, WeakViewHandle, @@ -12,7 +13,8 @@ use std::{ Arc, }, }; -use util::{post_inc, ResultExt}; +use text::Point; +use util::{paths::PathLikeWithPosition, post_inc, ResultExt}; use workspace::Workspace; pub type FileFinder = Picker; @@ -23,7 +25,7 @@ pub struct FileFinderDelegate { search_count: usize, latest_search_id: usize, latest_search_did_cancel: bool, - latest_search_query: String, + latest_search_query: Option>, relative_to: Option>, matches: Vec, selected: Option<(usize, Arc)>, @@ -60,6 +62,21 @@ pub enum Event { Dismissed, } +#[derive(Debug, Clone)] +struct FileSearchQuery { + raw_query: String, + file_query_end: Option, +} + +impl FileSearchQuery { + fn path_query(&self) -> &str { + match self.file_query_end { + Some(file_path_end) => &self.raw_query[..file_path_end], + None => &self.raw_query, + } + } +} + impl FileFinderDelegate { fn labels_for_match(&self, path_match: &PathMatch) -> (String, Vec, String, Vec) { let path = &path_match.path; @@ -103,7 +120,7 @@ impl FileFinderDelegate { search_count: 0, latest_search_id: 0, latest_search_did_cancel: false, - latest_search_query: String::new(), + latest_search_query: None, relative_to, matches: Vec::new(), selected: None, @@ -111,7 +128,11 @@ impl FileFinderDelegate { } } - fn spawn_search(&mut self, query: String, cx: &mut ViewContext) -> Task<()> { + fn spawn_search( + &mut self, + query: PathLikeWithPosition, + cx: &mut ViewContext, + ) -> Task<()> { let relative_to = self.relative_to.clone(); let worktrees = self .project @@ -140,7 +161,7 @@ impl FileFinderDelegate { cx.spawn(|picker, mut cx| async move { let matches = fuzzy::match_path_sets( candidate_sets.as_slice(), - &query, + query.path_like.path_query(), relative_to, false, 100, @@ -163,18 +184,24 @@ impl FileFinderDelegate { &mut self, search_id: usize, did_cancel: bool, - query: String, + query: PathLikeWithPosition, matches: Vec, cx: &mut ViewContext, ) { if search_id >= self.latest_search_id { self.latest_search_id = search_id; - if self.latest_search_did_cancel && query == self.latest_search_query { + if self.latest_search_did_cancel + && Some(query.path_like.path_query()) + == self + .latest_search_query + .as_ref() + .map(|query| query.path_like.path_query()) + { util::extend_sorted(&mut self.matches, matches.into_iter(), 100, |a, b| b.cmp(a)); } else { self.matches = matches; } - self.latest_search_query = query; + self.latest_search_query = Some(query); self.latest_search_did_cancel = did_cancel; cx.notify(); } @@ -209,13 +236,25 @@ impl PickerDelegate for FileFinderDelegate { cx.notify(); } - fn update_matches(&mut self, query: String, cx: &mut ViewContext) -> Task<()> { - if query.is_empty() { + fn update_matches(&mut self, raw_query: String, cx: &mut ViewContext) -> Task<()> { + if raw_query.is_empty() { self.latest_search_id = post_inc(&mut self.search_count); self.matches.clear(); cx.notify(); Task::ready(()) } else { + let raw_query = &raw_query; + let query = PathLikeWithPosition::parse_str(raw_query, |path_like_str| { + Ok::<_, std::convert::Infallible>(FileSearchQuery { + raw_query: raw_query.to_owned(), + file_query_end: if path_like_str == raw_query { + None + } else { + Some(path_like_str.len()) + }, + }) + }) + .expect("infallible"); self.spawn_search(query, cx) } } @@ -228,12 +267,49 @@ impl PickerDelegate for FileFinderDelegate { path: m.path.clone(), }; - workspace.update(cx, |workspace, cx| { + let open_task = workspace.update(cx, |workspace, cx| { + workspace.open_path(project_path.clone(), None, true, cx) + }); + + let workspace = workspace.downgrade(); + + let row = self + .latest_search_query + .as_ref() + .and_then(|query| query.row) + .map(|row| row.saturating_sub(1)); + let col = self + .latest_search_query + .as_ref() + .and_then(|query| query.column) + .unwrap_or(0) + .saturating_sub(1); + cx.spawn(|_, mut cx| async move { + let item = open_task.await.log_err()?; + if let Some(row) = row { + if let Some(active_editor) = item.downcast::() { + active_editor + .downgrade() + .update(&mut cx, |editor, cx| { + let snapshot = editor.snapshot(cx).display_snapshot; + let point = snapshot + .buffer_snapshot + .clip_point(Point::new(row, col), Bias::Left); + editor.change_selections(Some(Autoscroll::center()), cx, |s| { + s.select_ranges([point..point]) + }); + }) + .log_err(); + } + } + workspace - .open_path(project_path.clone(), None, true, cx) - .detach_and_log_err(cx); - workspace.dismiss_modal(cx); + .update(&mut cx, |workspace, cx| workspace.dismiss_modal(cx)) + .log_err(); + + Some(()) }) + .detach(); } } } @@ -268,6 +344,8 @@ impl PickerDelegate for FileFinderDelegate { #[cfg(test)] mod tests { + use std::time::Duration; + use super::*; use editor::Editor; use gpui::TestAppContext; @@ -283,7 +361,7 @@ mod tests { } #[gpui::test] - async fn test_matching_paths(cx: &mut gpui::TestAppContext) { + async fn test_matching_paths(cx: &mut TestAppContext) { let app_state = init_test(cx); app_state .fs @@ -334,7 +412,173 @@ mod tests { } #[gpui::test] - async fn test_matching_cancellation(cx: &mut gpui::TestAppContext) { + async fn test_row_column_numbers_query_inside_file(cx: &mut TestAppContext) { + let app_state = init_test(cx); + + let first_file_name = "first.rs"; + let first_file_contents = "// First Rust file"; + app_state + .fs + .as_fake() + .insert_tree( + "/src", + json!({ + "test": { + first_file_name: first_file_contents, + "second.rs": "// Second Rust file", + } + }), + ) + .await; + + let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await; + let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx)); + cx.dispatch_action(window_id, Toggle); + let finder = cx.read(|cx| workspace.read(cx).modal::().unwrap()); + + let file_query = &first_file_name[..3]; + let file_row = 1; + let file_column = 3; + assert!(file_column <= first_file_contents.len()); + let query_inside_file = format!("{file_query}:{file_row}:{file_column}"); + finder + .update(cx, |finder, cx| { + finder + .delegate_mut() + .update_matches(query_inside_file.to_string(), cx) + }) + .await; + finder.read_with(cx, |finder, _| { + let finder = finder.delegate(); + assert_eq!(finder.matches.len(), 1); + let latest_search_query = finder + .latest_search_query + .as_ref() + .expect("Finder should have a query after the update_matches call"); + assert_eq!(latest_search_query.path_like.raw_query, query_inside_file); + assert_eq!( + latest_search_query.path_like.file_query_end, + Some(file_query.len()) + ); + assert_eq!(latest_search_query.row, Some(file_row)); + assert_eq!(latest_search_query.column, Some(file_column as u32)); + }); + + let active_pane = cx.read(|cx| workspace.read(cx).active_pane().clone()); + cx.dispatch_action(window_id, SelectNext); + cx.dispatch_action(window_id, Confirm); + active_pane + .condition(cx, |pane, _| pane.active_item().is_some()) + .await; + let editor = cx.update(|cx| { + let active_item = active_pane.read(cx).active_item().unwrap(); + active_item.downcast::().unwrap() + }); + cx.foreground().advance_clock(Duration::from_secs(2)); + cx.foreground().start_waiting(); + cx.foreground().finish_waiting(); + editor.update(cx, |editor, cx| { + let all_selections = editor.selections.all_adjusted(cx); + assert_eq!( + all_selections.len(), + 1, + "Expected to have 1 selection (caret) after file finder confirm, but got: {all_selections:?}" + ); + let caret_selection = all_selections.into_iter().next().unwrap(); + assert_eq!(caret_selection.start, caret_selection.end, + "Caret selection should have its start and end at the same position"); + assert_eq!(file_row, caret_selection.start.row + 1, + "Query inside file should get caret with the same focus row"); + assert_eq!(file_column, caret_selection.start.column as usize + 1, + "Query inside file should get caret with the same focus column"); + }); + } + + #[gpui::test] + async fn test_row_column_numbers_query_outside_file(cx: &mut TestAppContext) { + let app_state = init_test(cx); + + let first_file_name = "first.rs"; + let first_file_contents = "// First Rust file"; + app_state + .fs + .as_fake() + .insert_tree( + "/src", + json!({ + "test": { + first_file_name: first_file_contents, + "second.rs": "// Second Rust file", + } + }), + ) + .await; + + let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await; + let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx)); + cx.dispatch_action(window_id, Toggle); + let finder = cx.read(|cx| workspace.read(cx).modal::().unwrap()); + + let file_query = &first_file_name[..3]; + let file_row = 200; + let file_column = 300; + assert!(file_column > first_file_contents.len()); + let query_outside_file = format!("{file_query}:{file_row}:{file_column}"); + finder + .update(cx, |finder, cx| { + finder + .delegate_mut() + .update_matches(query_outside_file.to_string(), cx) + }) + .await; + finder.read_with(cx, |finder, _| { + let finder = finder.delegate(); + assert_eq!(finder.matches.len(), 1); + let latest_search_query = finder + .latest_search_query + .as_ref() + .expect("Finder should have a query after the update_matches call"); + assert_eq!(latest_search_query.path_like.raw_query, query_outside_file); + assert_eq!( + latest_search_query.path_like.file_query_end, + Some(file_query.len()) + ); + assert_eq!(latest_search_query.row, Some(file_row)); + assert_eq!(latest_search_query.column, Some(file_column as u32)); + }); + + let active_pane = cx.read(|cx| workspace.read(cx).active_pane().clone()); + cx.dispatch_action(window_id, SelectNext); + cx.dispatch_action(window_id, Confirm); + active_pane + .condition(cx, |pane, _| pane.active_item().is_some()) + .await; + let editor = cx.update(|cx| { + let active_item = active_pane.read(cx).active_item().unwrap(); + active_item.downcast::().unwrap() + }); + cx.foreground().advance_clock(Duration::from_secs(2)); + cx.foreground().start_waiting(); + cx.foreground().finish_waiting(); + editor.update(cx, |editor, cx| { + let all_selections = editor.selections.all_adjusted(cx); + assert_eq!( + all_selections.len(), + 1, + "Expected to have 1 selection (caret) after file finder confirm, but got: {all_selections:?}" + ); + let caret_selection = all_selections.into_iter().next().unwrap(); + assert_eq!(caret_selection.start, caret_selection.end, + "Caret selection should have its start and end at the same position"); + assert_eq!(0, caret_selection.start.row, + "Excessive rows (as in query outside file borders) should get trimmed to last file row"); + assert_eq!(first_file_contents.len(), caret_selection.start.column as usize, + "Excessive columns (as in query outside file borders) should get trimmed to selected row's last column"); + }); + } + + #[gpui::test] + async fn test_matching_cancellation(cx: &mut TestAppContext) { let app_state = init_test(cx); app_state .fs @@ -367,7 +611,7 @@ mod tests { ) }); - let query = "hi".to_string(); + let query = test_path_like("hi"); finder .update(cx, |f, cx| f.delegate_mut().spawn_search(query.clone(), cx)) .await; @@ -403,7 +647,7 @@ mod tests { } #[gpui::test] - async fn test_ignored_files(cx: &mut gpui::TestAppContext) { + async fn test_ignored_files(cx: &mut TestAppContext) { let app_state = init_test(cx); app_state .fs @@ -451,13 +695,15 @@ mod tests { ) }); finder - .update(cx, |f, cx| f.delegate_mut().spawn_search("hi".into(), cx)) + .update(cx, |f, cx| { + f.delegate_mut().spawn_search(test_path_like("hi"), cx) + }) .await; finder.read_with(cx, |f, _| assert_eq!(f.delegate().matches.len(), 7)); } #[gpui::test] - async fn test_single_file_worktrees(cx: &mut gpui::TestAppContext) { + async fn test_single_file_worktrees(cx: &mut TestAppContext) { let app_state = init_test(cx); app_state .fs @@ -487,7 +733,9 @@ mod tests { // Even though there is only one worktree, that worktree's filename // is included in the matching, because the worktree is a single file. finder - .update(cx, |f, cx| f.delegate_mut().spawn_search("thf".into(), cx)) + .update(cx, |f, cx| { + f.delegate_mut().spawn_search(test_path_like("thf"), cx) + }) .await; cx.read(|cx| { let finder = finder.read(cx); @@ -505,13 +753,15 @@ mod tests { // Since the worktree root is a file, searching for its name followed by a slash does // not match anything. finder - .update(cx, |f, cx| f.delegate_mut().spawn_search("thf/".into(), cx)) + .update(cx, |f, cx| { + f.delegate_mut().spawn_search(test_path_like("thf/"), cx) + }) .await; finder.read_with(cx, |f, _| assert_eq!(f.delegate().matches.len(), 0)); } #[gpui::test] - async fn test_multiple_matches_with_same_relative_path(cx: &mut gpui::TestAppContext) { + async fn test_multiple_matches_with_same_relative_path(cx: &mut TestAppContext) { let app_state = init_test(cx); app_state .fs @@ -547,7 +797,9 @@ mod tests { // Run a search that matches two files with the same relative path. finder - .update(cx, |f, cx| f.delegate_mut().spawn_search("a.t".into(), cx)) + .update(cx, |f, cx| { + f.delegate_mut().spawn_search(test_path_like("a.t"), cx) + }) .await; // Can switch between different matches with the same relative path. @@ -563,7 +815,7 @@ mod tests { } #[gpui::test] - async fn test_path_distance_ordering(cx: &mut gpui::TestAppContext) { + async fn test_path_distance_ordering(cx: &mut TestAppContext) { let app_state = init_test(cx); app_state .fs @@ -601,7 +853,7 @@ mod tests { finder .update(cx, |f, cx| { - f.delegate_mut().spawn_search("a.txt".into(), cx) + f.delegate_mut().spawn_search(test_path_like("a.txt"), cx) }) .await; @@ -613,7 +865,7 @@ mod tests { } #[gpui::test] - async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) { + async fn test_search_worktree_without_files(cx: &mut TestAppContext) { let app_state = init_test(cx); app_state .fs @@ -643,7 +895,9 @@ mod tests { ) }); finder - .update(cx, |f, cx| f.delegate_mut().spawn_search("dir".into(), cx)) + .update(cx, |f, cx| { + f.delegate_mut().spawn_search(test_path_like("dir"), cx) + }) .await; cx.read(|cx| { let finder = finder.read(cx); @@ -662,4 +916,18 @@ mod tests { state }) } + + fn test_path_like(test_str: &str) -> PathLikeWithPosition { + PathLikeWithPosition::parse_str(test_str, |path_like_str| { + Ok::<_, std::convert::Infallible>(FileSearchQuery { + raw_query: test_str.to_owned(), + file_query_end: if path_like_str == test_str { + None + } else { + Some(path_like_str.len()) + }, + }) + }) + .unwrap() + } } diff --git a/crates/fs/Cargo.toml b/crates/fs/Cargo.toml index d080fe3cd1..54c6ce362a 100644 --- a/crates/fs/Cargo.toml +++ b/crates/fs/Cargo.toml @@ -13,6 +13,7 @@ gpui = { path = "../gpui" } lsp = { path = "../lsp" } rope = { path = "../rope" } util = { path = "../util" } +sum_tree = { path = "../sum_tree" } anyhow.workspace = true async-trait.workspace = true futures.workspace = true diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 945ffaea16..99562405b5 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -27,7 +27,7 @@ use util::ResultExt; #[cfg(any(test, feature = "test-support"))] use collections::{btree_map, BTreeMap}; #[cfg(any(test, feature = "test-support"))] -use repository::FakeGitRepositoryState; +use repository::{FakeGitRepositoryState, GitFileStatus}; #[cfg(any(test, feature = "test-support"))] use std::sync::Weak; @@ -572,15 +572,15 @@ impl FakeFs { Ok(()) } - pub async fn pause_events(&self) { + pub fn pause_events(&self) { self.state.lock().events_paused = true; } - pub async fn buffered_event_count(&self) -> usize { + pub fn buffered_event_count(&self) -> usize { self.state.lock().buffered_events.len() } - pub async fn flush_events(&self, count: usize) { + pub fn flush_events(&self, count: usize) { self.state.lock().flush_events(count); } @@ -654,6 +654,17 @@ impl FakeFs { }); } + pub async fn set_status_for_repo(&self, dot_git: &Path, statuses: &[(&Path, GitFileStatus)]) { + self.with_git_state(dot_git, |state| { + state.worktree_statuses.clear(); + state.worktree_statuses.extend( + statuses + .iter() + .map(|(path, content)| ((**path).into(), content.clone())), + ); + }); + } + pub fn paths(&self) -> Vec { let mut result = Vec::new(); let mut queue = collections::VecDeque::new(); @@ -821,14 +832,16 @@ impl Fs for FakeFs { let old_path = normalize_path(old_path); let new_path = normalize_path(new_path); + let mut state = self.state.lock(); let moved_entry = state.write_path(&old_path, |e| { if let btree_map::Entry::Occupied(e) = e { - Ok(e.remove()) + Ok(e.get().clone()) } else { Err(anyhow!("path does not exist: {}", &old_path.display())) } })?; + state.write_path(&new_path, |e| { match e { btree_map::Entry::Occupied(mut e) => { @@ -844,6 +857,17 @@ impl Fs for FakeFs { } Ok(()) })?; + + state + .write_path(&old_path, |e| { + if let btree_map::Entry::Occupied(e) = e { + Ok(e.remove()) + } else { + unreachable!() + } + }) + .unwrap(); + state.emit_event(&[old_path, new_path]); Ok(()) } diff --git a/crates/fs/src/repository.rs b/crates/fs/src/repository.rs index 5624ce42f1..2c309351fc 100644 --- a/crates/fs/src/repository.rs +++ b/crates/fs/src/repository.rs @@ -1,10 +1,15 @@ use anyhow::Result; use collections::HashMap; use parking_lot::Mutex; +use serde_derive::{Deserialize, Serialize}; use std::{ + cmp::Ordering, + ffi::OsStr, + os::unix::prelude::OsStrExt, path::{Component, Path, PathBuf}, sync::Arc, }; +use sum_tree::{MapSeekTarget, TreeMap}; use util::ResultExt; pub use git2::Repository as LibGitRepository; @@ -16,6 +21,10 @@ pub trait GitRepository: Send { fn load_index_text(&self, relative_file_path: &Path) -> Option; fn branch_name(&self) -> Option; + + fn statuses(&self) -> Option>; + + fn status(&self, path: &RepoPath) -> Option; } impl std::fmt::Debug for dyn GitRepository { @@ -61,6 +70,48 @@ impl GitRepository for LibGitRepository { let branch = String::from_utf8_lossy(head.shorthand_bytes()); Some(branch.to_string()) } + + fn statuses(&self) -> Option> { + let statuses = self.statuses(None).log_err()?; + + let mut map = TreeMap::default(); + + for status in statuses + .iter() + .filter(|status| !status.status().contains(git2::Status::IGNORED)) + { + let path = RepoPath(PathBuf::from(OsStr::from_bytes(status.path_bytes()))); + let Some(status) = read_status(status.status()) else { + continue + }; + + map.insert(path, status) + } + + Some(map) + } + + fn status(&self, path: &RepoPath) -> Option { + let status = self.status_file(path).log_err()?; + read_status(status) + } +} + +fn read_status(status: git2::Status) -> Option { + if status.contains(git2::Status::CONFLICTED) { + Some(GitFileStatus::Conflict) + } else if status.intersects( + git2::Status::WT_MODIFIED + | git2::Status::WT_RENAMED + | git2::Status::INDEX_MODIFIED + | git2::Status::INDEX_RENAMED, + ) { + Some(GitFileStatus::Modified) + } else if status.intersects(git2::Status::WT_NEW | git2::Status::INDEX_NEW) { + Some(GitFileStatus::Added) + } else { + None + } } #[derive(Debug, Clone, Default)] @@ -71,6 +122,7 @@ pub struct FakeGitRepository { #[derive(Debug, Clone, Default)] pub struct FakeGitRepositoryState { pub index_contents: HashMap, + pub worktree_statuses: HashMap, pub branch_name: Option, } @@ -93,6 +145,20 @@ impl GitRepository for FakeGitRepository { let state = self.state.lock(); state.branch_name.clone() } + + fn statuses(&self) -> Option> { + let state = self.state.lock(); + let mut map = TreeMap::default(); + for (repo_path, status) in state.worktree_statuses.iter() { + map.insert(repo_path.to_owned(), status.to_owned()); + } + Some(map) + } + + fn status(&self, path: &RepoPath) -> Option { + let state = self.state.lock(); + state.worktree_statuses.get(path).cloned() + } } fn check_path_to_repo_path_errors(relative_file_path: &Path) -> Result<()> { @@ -123,3 +189,66 @@ fn check_path_to_repo_path_errors(relative_file_path: &Path) -> Result<()> { _ => Ok(()), } } + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum GitFileStatus { + Added, + Modified, + Conflict, +} + +#[derive(Clone, Debug, Ord, Hash, PartialOrd, Eq, PartialEq)] +pub struct RepoPath(PathBuf); + +impl RepoPath { + pub fn new(path: PathBuf) -> Self { + debug_assert!(path.is_relative(), "Repo paths must be relative"); + + RepoPath(path) + } +} + +impl From<&Path> for RepoPath { + fn from(value: &Path) -> Self { + RepoPath::new(value.to_path_buf()) + } +} + +impl From for RepoPath { + fn from(value: PathBuf) -> Self { + RepoPath::new(value) + } +} + +impl Default for RepoPath { + fn default() -> Self { + RepoPath(PathBuf::new()) + } +} + +impl AsRef for RepoPath { + fn as_ref(&self) -> &Path { + self.0.as_ref() + } +} + +impl std::ops::Deref for RepoPath { + type Target = PathBuf; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[derive(Debug)] +pub struct RepoPathDescendants<'a>(pub &'a Path); + +impl<'a> MapSeekTarget for RepoPathDescendants<'a> { + fn cmp_cursor(&self, key: &RepoPath) -> Ordering { + if key.starts_with(&self.0) { + Ordering::Greater + } else { + self.0.cmp(key) + } + } +} diff --git a/crates/go_to_line/Cargo.toml b/crates/go_to_line/Cargo.toml index f279aca569..8f99aa366c 100644 --- a/crates/go_to_line/Cargo.toml +++ b/crates/go_to_line/Cargo.toml @@ -16,3 +16,4 @@ settings = { path = "../settings" } text = { path = "../text" } workspace = { path = "../workspace" } postage.workspace = true +util = { path = "../util" } diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index 90287e9270..967f17b794 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, DisplayPoint, Editor}; +use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, Editor}; use gpui::{ actions, elements::*, geometry::vector::Vector2F, AnyViewHandle, AppContext, Axis, Entity, View, ViewContext, ViewHandle, @@ -8,6 +8,7 @@ use gpui::{ use menu::{Cancel, Confirm}; use settings::Settings; use text::{Bias, Point}; +use util::paths::FILE_ROW_COLUMN_DELIMITER; use workspace::{Modal, Workspace}; actions!(go_to_line, [Toggle]); @@ -75,15 +76,16 @@ impl GoToLine { fn confirm(&mut self, _: &Confirm, cx: &mut ViewContext) { self.prev_scroll_position.take(); - self.active_editor.update(cx, |active_editor, cx| { - if let Some(rows) = active_editor.highlighted_rows() { + if let Some(point) = self.point_from_query(cx) { + self.active_editor.update(cx, |active_editor, cx| { let snapshot = active_editor.snapshot(cx).display_snapshot; - let position = DisplayPoint::new(rows.start, 0).to_point(&snapshot); + let point = snapshot.buffer_snapshot.clip_point(point, Bias::Left); active_editor.change_selections(Some(Autoscroll::center()), cx, |s| { - s.select_ranges([position..position]) + s.select_ranges([point..point]) }); - } - }); + }); + } + cx.emit(Event::Dismissed); } @@ -96,16 +98,7 @@ impl GoToLine { match event { editor::Event::Blurred => cx.emit(Event::Dismissed), editor::Event::BufferEdited { .. } => { - let line_editor = self.line_editor.read(cx).text(cx); - let mut components = line_editor.trim().split(&[',', ':'][..]); - let row = components.next().and_then(|row| row.parse::().ok()); - let column = components.next().and_then(|row| row.parse::().ok()); - if let Some(point) = row.map(|row| { - Point::new( - row.saturating_sub(1), - column.map(|column| column.saturating_sub(1)).unwrap_or(0), - ) - }) { + if let Some(point) = self.point_from_query(cx) { self.active_editor.update(cx, |active_editor, cx| { let snapshot = active_editor.snapshot(cx).display_snapshot; let point = snapshot.buffer_snapshot.clip_point(point, Bias::Left); @@ -120,6 +113,20 @@ impl GoToLine { _ => {} } } + + fn point_from_query(&self, cx: &ViewContext) -> Option { + let line_editor = self.line_editor.read(cx).text(cx); + let mut components = line_editor + .splitn(2, FILE_ROW_COLUMN_DELIMITER) + .map(str::trim) + .fuse(); + let row = components.next().and_then(|row| row.parse::().ok())?; + let column = components.next().and_then(|col| col.parse::().ok()); + Some(Point::new( + row.saturating_sub(1), + column.unwrap_or(0).saturating_sub(1), + )) + } } impl Entity for GoToLine { @@ -147,7 +154,7 @@ impl View for GoToLine { let theme = &cx.global::().theme.picker; let label = format!( - "{},{} of {} lines", + "{}{FILE_ROW_COLUMN_DELIMITER}{} of {} lines", self.cursor_point.row + 1, self.cursor_point.column + 1, self.max_point.row + 1 diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 35c5010cdd..04862d1814 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -48,7 +48,7 @@ smallvec.workspace = true smol.workspace = true time.workspace = true tiny-skia = "0.5" -usvg = "0.14" +usvg = { version = "0.14", features = [] } uuid = { version = "1.1.2", features = ["v4"] } waker-fn = "1.1.0" diff --git a/crates/gpui/src/color.rs b/crates/gpui/src/color.rs index cc725776b9..b6c1e3aff9 100644 --- a/crates/gpui/src/color.rs +++ b/crates/gpui/src/color.rs @@ -42,7 +42,7 @@ impl Color { } pub fn yellow() -> Self { - Self(ColorU::from_u32(0x00ffffff)) + Self(ColorU::from_u32(0xffff00ff)) } pub fn new(r: u8, g: u8, b: u8, a: u8) -> Self { diff --git a/crates/gpui/src/elements.rs b/crates/gpui/src/elements.rs index e2c4af143c..27b01a8db2 100644 --- a/crates/gpui/src/elements.rs +++ b/crates/gpui/src/elements.rs @@ -578,6 +578,15 @@ pub struct ComponentHost> { view_type: PhantomData, } +impl> ComponentHost { + pub fn new(c: C) -> Self { + Self { + component: c, + view_type: PhantomData, + } + } +} + impl> Deref for ComponentHost { type Target = C; diff --git a/crates/gpui/src/keymap_matcher/binding.rs b/crates/gpui/src/keymap_matcher/binding.rs index aa40e8c6af..4d8334128b 100644 --- a/crates/gpui/src/keymap_matcher/binding.rs +++ b/crates/gpui/src/keymap_matcher/binding.rs @@ -11,6 +11,19 @@ pub struct Binding { context_predicate: Option, } +impl std::fmt::Debug for Binding { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "Binding {{ keystrokes: {:?}, action: {}::{}, context_predicate: {:?} }}", + self.keystrokes, + self.action.namespace(), + self.action.name(), + self.context_predicate + ) + } +} + impl Clone for Binding { fn clone(&self) -> Self { Self { diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index bcff08d005..50fcec52ec 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -755,7 +755,7 @@ impl platform::Window for Window { let _ = postage::sink::Sink::try_send(&mut done_tx, answer.try_into().unwrap()); } }); - + let block = block.copy(); let native_window = self.0.borrow().native_window; self.0 .borrow() diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index 83af0afeb3..190f1d96a8 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -75,5 +75,6 @@ lsp = { path = "../lsp", features = ["test-support"] } settings = { path = "../settings", features = ["test-support"] } util = { path = "../util", features = ["test-support"] } rpc = { path = "../rpc", features = ["test-support"] } +git2 = { version = "0.15", default-features = false } tempdir.workspace = true unindent.workspace = true diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index b8f55c1aa4..14359fa288 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -123,6 +123,8 @@ pub struct Project { loading_local_worktrees: HashMap, Shared, Arc>>>>, opened_buffers: HashMap, + local_buffer_ids_by_path: HashMap, + local_buffer_ids_by_entry_id: HashMap, /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it. /// Used for re-issuing buffer requests when peers temporarily disconnect incomplete_remote_buffers: HashMap>>, @@ -456,6 +458,8 @@ impl Project { incomplete_remote_buffers: Default::default(), loading_buffers_by_path: Default::default(), loading_local_worktrees: Default::default(), + local_buffer_ids_by_path: Default::default(), + local_buffer_ids_by_entry_id: Default::default(), buffer_snapshots: Default::default(), join_project_response_message_id: 0, client_state: None, @@ -526,6 +530,8 @@ impl Project { shared_buffers: Default::default(), incomplete_remote_buffers: Default::default(), loading_local_worktrees: Default::default(), + local_buffer_ids_by_path: Default::default(), + local_buffer_ids_by_entry_id: Default::default(), active_entry: None, collaborators: Default::default(), join_project_response_message_id: response.message_id, @@ -1643,6 +1649,21 @@ impl Project { }) .detach(); + if let Some(file) = File::from_dyn(buffer.read(cx).file()) { + if file.is_local { + self.local_buffer_ids_by_path.insert( + ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path.clone(), + }, + remote_id, + ); + + self.local_buffer_ids_by_entry_id + .insert(file.entry_id, remote_id); + } + } + self.detect_language_for_buffer(buffer, cx); self.register_buffer_with_language_servers(buffer, cx); self.register_buffer_with_copilot(buffer, cx); @@ -4544,7 +4565,7 @@ impl Project { if worktree.read(cx).is_local() { cx.subscribe(worktree, |this, worktree, event, cx| match event { worktree::Event::UpdatedEntries(changes) => { - this.update_local_worktree_buffers(&worktree, cx); + this.update_local_worktree_buffers(&worktree, &changes, cx); this.update_local_worktree_language_servers(&worktree, changes, cx); } worktree::Event::UpdatedGitRepositories(updated_repos) => { @@ -4578,80 +4599,106 @@ impl Project { fn update_local_worktree_buffers( &mut self, worktree_handle: &ModelHandle, + changes: &HashMap<(Arc, ProjectEntryId), PathChange>, cx: &mut ModelContext, ) { let snapshot = worktree_handle.read(cx).snapshot(); - let mut buffers_to_delete = Vec::new(); let mut renamed_buffers = Vec::new(); + for (path, entry_id) in changes.keys() { + let worktree_id = worktree_handle.read(cx).id(); + let project_path = ProjectPath { + worktree_id, + path: path.clone(), + }; - for (buffer_id, buffer) in &self.opened_buffers { - if let Some(buffer) = buffer.upgrade(cx) { - buffer.update(cx, |buffer, cx| { - if let Some(old_file) = File::from_dyn(buffer.file()) { - if old_file.worktree != *worktree_handle { - return; - } + let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) { + Some(&buffer_id) => buffer_id, + None => match self.local_buffer_ids_by_path.get(&project_path) { + Some(&buffer_id) => buffer_id, + None => continue, + }, + }; - let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) - { - File { - is_local: true, - entry_id: entry.id, - mtime: entry.mtime, - path: entry.path.clone(), - worktree: worktree_handle.clone(), - is_deleted: false, - } - } else if let Some(entry) = - snapshot.entry_for_path(old_file.path().as_ref()) - { - File { - is_local: true, - entry_id: entry.id, - mtime: entry.mtime, - path: entry.path.clone(), - worktree: worktree_handle.clone(), - is_deleted: false, - } - } else { - File { - is_local: true, - entry_id: old_file.entry_id, - path: old_file.path().clone(), - mtime: old_file.mtime(), - worktree: worktree_handle.clone(), - is_deleted: true, - } - }; - - let old_path = old_file.abs_path(cx); - if new_file.abs_path(cx) != old_path { - renamed_buffers.push((cx.handle(), old_file.clone())); - } - - if new_file != *old_file { - if let Some(project_id) = self.remote_id() { - self.client - .send(proto::UpdateBufferFile { - project_id, - buffer_id: *buffer_id as u64, - file: Some(new_file.to_proto()), - }) - .log_err(); - } - - buffer.file_updated(Arc::new(new_file), cx).detach(); - } - } - }); + let open_buffer = self.opened_buffers.get(&buffer_id); + let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade(cx)) { + buffer } else { - buffers_to_delete.push(*buffer_id); - } - } + self.opened_buffers.remove(&buffer_id); + self.local_buffer_ids_by_path.remove(&project_path); + self.local_buffer_ids_by_entry_id.remove(entry_id); + continue; + }; - for buffer_id in buffers_to_delete { - self.opened_buffers.remove(&buffer_id); + buffer.update(cx, |buffer, cx| { + if let Some(old_file) = File::from_dyn(buffer.file()) { + if old_file.worktree != *worktree_handle { + return; + } + + let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) { + File { + is_local: true, + entry_id: entry.id, + mtime: entry.mtime, + path: entry.path.clone(), + worktree: worktree_handle.clone(), + is_deleted: false, + } + } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) { + File { + is_local: true, + entry_id: entry.id, + mtime: entry.mtime, + path: entry.path.clone(), + worktree: worktree_handle.clone(), + is_deleted: false, + } + } else { + File { + is_local: true, + entry_id: old_file.entry_id, + path: old_file.path().clone(), + mtime: old_file.mtime(), + worktree: worktree_handle.clone(), + is_deleted: true, + } + }; + + let old_path = old_file.abs_path(cx); + if new_file.abs_path(cx) != old_path { + renamed_buffers.push((cx.handle(), old_file.clone())); + self.local_buffer_ids_by_path.remove(&project_path); + self.local_buffer_ids_by_path.insert( + ProjectPath { + worktree_id, + path: path.clone(), + }, + buffer_id, + ); + } + + if new_file.entry_id != *entry_id { + self.local_buffer_ids_by_entry_id.remove(entry_id); + self.local_buffer_ids_by_entry_id + .insert(new_file.entry_id, buffer_id); + } + + if new_file != *old_file { + if let Some(project_id) = self.remote_id() { + self.client + .send(proto::UpdateBufferFile { + project_id, + buffer_id: buffer_id as u64, + file: Some(new_file.to_proto()), + }) + .log_err(); + } + + buffer.file_updated(Arc::new(new_file), cx).detach(); + } + } + }); } for (buffer, old_file) in renamed_buffers { @@ -4664,7 +4711,7 @@ impl Project { fn update_local_worktree_language_servers( &mut self, worktree_handle: &ModelHandle, - changes: &HashMap, PathChange>, + changes: &HashMap<(Arc, ProjectEntryId), PathChange>, cx: &mut ModelContext, ) { let worktree_id = worktree_handle.read(cx).id(); @@ -4681,7 +4728,7 @@ impl Project { let params = lsp::DidChangeWatchedFilesParams { changes: changes .iter() - .filter_map(|(path, change)| { + .filter_map(|((path, _), change)| { let path = abs_path.join(path); if watched_paths.matches(&path) { Some(lsp::FileEvent { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 554304f3d3..403d893425 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -6,7 +6,10 @@ use anyhow::{anyhow, Context, Result}; use client::{proto, Client}; use clock::ReplicaId; use collections::{HashMap, VecDeque}; -use fs::{repository::GitRepository, Fs, LineEnding}; +use fs::{ + repository::{GitFileStatus, GitRepository, RepoPath, RepoPathDescendants}, + Fs, LineEnding, +}; use futures::{ channel::{ mpsc::{self, UnboundedSender}, @@ -52,7 +55,7 @@ use std::{ time::{Duration, SystemTime}, }; use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; -use util::{paths::HOME, ResultExt, TryFutureExt}; +use util::{paths::HOME, ResultExt, TakeUntilExt, TryFutureExt}; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] pub struct WorktreeId(usize); @@ -117,10 +120,38 @@ pub struct Snapshot { completed_scan_id: usize, } -#[derive(Clone, Debug, Eq, PartialEq)] +impl Snapshot { + pub fn repo_for(&self, path: &Path) -> Option { + let mut max_len = 0; + let mut current_candidate = None; + for (work_directory, repo) in (&self.repository_entries).iter() { + if repo.contains(self, path) { + if work_directory.0.as_os_str().len() >= max_len { + current_candidate = Some(repo); + max_len = work_directory.0.as_os_str().len(); + } else { + break; + } + } + } + + current_candidate.map(|entry| entry.to_owned()) + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] pub struct RepositoryEntry { pub(crate) work_directory: WorkDirectoryEntry, pub(crate) branch: Option>, + pub(crate) statuses: TreeMap, +} + +fn read_git_status(git_status: i32) -> Option { + proto::GitStatus::from_i32(git_status).map(|status| match status { + proto::GitStatus::Added => GitFileStatus::Added, + proto::GitStatus::Modified => GitFileStatus::Modified, + proto::GitStatus::Conflict => GitFileStatus::Conflict, + }) } impl RepositoryEntry { @@ -141,6 +172,102 @@ impl RepositoryEntry { pub(crate) fn contains(&self, snapshot: &Snapshot, path: &Path) -> bool { self.work_directory.contains(snapshot, path) } + + pub fn status_for_file(&self, snapshot: &Snapshot, path: &Path) -> Option { + self.work_directory + .relativize(snapshot, path) + .and_then(|repo_path| self.statuses.get(&repo_path)) + .cloned() + } + + pub fn status_for_path(&self, snapshot: &Snapshot, path: &Path) -> Option { + self.work_directory + .relativize(snapshot, path) + .and_then(|repo_path| { + self.statuses + .iter_from(&repo_path) + .take_while(|(key, _)| key.starts_with(&repo_path)) + // Short circut once we've found the highest level + .take_until(|(_, status)| status == &&GitFileStatus::Conflict) + .map(|(_, status)| status) + .reduce( + |status_first, status_second| match (status_first, status_second) { + (GitFileStatus::Conflict, _) | (_, GitFileStatus::Conflict) => { + &GitFileStatus::Conflict + } + (GitFileStatus::Modified, _) | (_, GitFileStatus::Modified) => { + &GitFileStatus::Modified + } + _ => &GitFileStatus::Added, + }, + ) + .copied() + }) + } + + pub fn build_update(&self, other: &Self) -> proto::RepositoryEntry { + let mut updated_statuses: Vec = Vec::new(); + let mut removed_statuses: Vec = Vec::new(); + + let mut self_statuses = self.statuses.iter().peekable(); + let mut other_statuses = other.statuses.iter().peekable(); + loop { + match (self_statuses.peek(), other_statuses.peek()) { + (Some((self_repo_path, self_status)), Some((other_repo_path, other_status))) => { + match Ord::cmp(self_repo_path, other_repo_path) { + Ordering::Less => { + updated_statuses.push(make_status_entry(self_repo_path, self_status)); + self_statuses.next(); + } + Ordering::Equal => { + if self_status != other_status { + updated_statuses + .push(make_status_entry(self_repo_path, self_status)); + } + + self_statuses.next(); + other_statuses.next(); + } + Ordering::Greater => { + removed_statuses.push(make_repo_path(other_repo_path)); + other_statuses.next(); + } + } + } + (Some((self_repo_path, self_status)), None) => { + updated_statuses.push(make_status_entry(self_repo_path, self_status)); + self_statuses.next(); + } + (None, Some((other_repo_path, _))) => { + removed_statuses.push(make_repo_path(other_repo_path)); + other_statuses.next(); + } + (None, None) => break, + } + } + + proto::RepositoryEntry { + work_directory_id: self.work_directory_id().to_proto(), + branch: self.branch.as_ref().map(|str| str.to_string()), + removed_repo_paths: removed_statuses, + updated_statuses: updated_statuses, + } + } +} + +fn make_repo_path(path: &RepoPath) -> String { + path.as_os_str().to_string_lossy().to_string() +} + +fn make_status_entry(path: &RepoPath, status: &GitFileStatus) -> proto::StatusEntry { + proto::StatusEntry { + repo_path: make_repo_path(path), + status: match status { + GitFileStatus::Added => proto::GitStatus::Added.into(), + GitFileStatus::Modified => proto::GitStatus::Modified.into(), + GitFileStatus::Conflict => proto::GitStatus::Conflict.into(), + }, + } } impl From<&RepositoryEntry> for proto::RepositoryEntry { @@ -148,6 +275,12 @@ impl From<&RepositoryEntry> for proto::RepositoryEntry { proto::RepositoryEntry { work_directory_id: value.work_directory.to_proto(), branch: value.branch.as_ref().map(|str| str.to_string()), + updated_statuses: value + .statuses + .iter() + .map(|(repo_path, status)| make_status_entry(repo_path, status)) + .collect(), + removed_repo_paths: Default::default(), } } } @@ -162,6 +295,12 @@ impl Default for RepositoryWorkDirectory { } } +impl AsRef for RepositoryWorkDirectory { + fn as_ref(&self) -> &Path { + self.0.as_ref() + } +} + #[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] pub struct WorkDirectoryEntry(ProjectEntryId); @@ -178,7 +317,7 @@ impl WorkDirectoryEntry { worktree.entry_for_id(self.0).and_then(|entry| { path.strip_prefix(&entry.path) .ok() - .map(move |path| RepoPath(path.to_owned())) + .map(move |path| path.into()) }) } } @@ -197,32 +336,9 @@ impl<'a> From for WorkDirectoryEntry { } } -#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] -pub struct RepoPath(PathBuf); - -impl AsRef for RepoPath { - fn as_ref(&self) -> &Path { - self.0.as_ref() - } -} - -impl Deref for RepoPath { - type Target = PathBuf; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl AsRef for RepositoryWorkDirectory { - fn as_ref(&self) -> &Path { - self.0.as_ref() - } -} - #[derive(Debug, Clone)] pub struct LocalSnapshot { - ignores_by_parent_abs_path: HashMap, (Arc, usize)>, + ignores_by_parent_abs_path: HashMap, (Arc, bool)>, // (gitignore, needs_update) // The ProjectEntryId corresponds to the entry for the .git dir // work_directory_id git_repositories: TreeMap, @@ -234,6 +350,7 @@ pub struct LocalSnapshot { #[derive(Debug, Clone)] pub struct LocalRepositoryEntry { pub(crate) scan_id: usize, + pub(crate) full_scan_id: usize, pub(crate) repo_ptr: Arc>, /// Path to the actual .git folder. /// Note: if .git is a file, this points to the folder indicated by the .git file @@ -265,7 +382,7 @@ enum ScanState { Started, Updated { snapshot: LocalSnapshot, - changes: HashMap, PathChange>, + changes: HashMap<(Arc, ProjectEntryId), PathChange>, barrier: Option, scanning: bool, }, @@ -279,7 +396,7 @@ struct ShareState { } pub enum Event { - UpdatedEntries(HashMap, PathChange>), + UpdatedEntries(HashMap<(Arc, ProjectEntryId), PathChange>), UpdatedGitRepositories(HashMap, LocalRepositoryEntry>), } @@ -1424,13 +1541,41 @@ impl Snapshot { }); for repository in update.updated_repositories { - let repository = RepositoryEntry { - work_directory: ProjectEntryId::from_proto(repository.work_directory_id).into(), - branch: repository.branch.map(Into::into), - }; - if let Some(entry) = self.entry_for_id(repository.work_directory_id()) { - self.repository_entries - .insert(RepositoryWorkDirectory(entry.path.clone()), repository) + let work_directory_entry: WorkDirectoryEntry = + ProjectEntryId::from_proto(repository.work_directory_id).into(); + + if let Some(entry) = self.entry_for_id(*work_directory_entry) { + let mut statuses = TreeMap::default(); + for status_entry in repository.updated_statuses { + let Some(git_file_status) = read_git_status(status_entry.status) else { + continue; + }; + + let repo_path = RepoPath::new(status_entry.repo_path.into()); + statuses.insert(repo_path, git_file_status); + } + + let work_directory = RepositoryWorkDirectory(entry.path.clone()); + if self.repository_entries.get(&work_directory).is_some() { + self.repository_entries.update(&work_directory, |repo| { + repo.branch = repository.branch.map(Into::into); + repo.statuses.insert_tree(statuses); + + for repo_path in repository.removed_repo_paths { + let repo_path = RepoPath::new(repo_path.into()); + repo.statuses.remove(&repo_path); + } + }); + } else { + self.repository_entries.insert( + work_directory, + RepositoryEntry { + work_directory: work_directory_entry, + branch: repository.branch.map(Into::into), + statuses, + }, + ) + } } else { log::error!("no work directory entry for repository {:?}", repository) } @@ -1524,6 +1669,30 @@ impl Snapshot { } } + fn descendent_entries<'a>( + &'a self, + include_dirs: bool, + include_ignored: bool, + parent_path: &'a Path, + ) -> DescendentEntriesIter<'a> { + let mut cursor = self.entries_by_path.cursor(); + cursor.seek(&TraversalTarget::Path(parent_path), Bias::Left, &()); + let mut traversal = Traversal { + cursor, + include_dirs, + include_ignored, + }; + + if traversal.end_offset() == traversal.start_offset() { + traversal.advance(); + } + + DescendentEntriesIter { + traversal, + parent_path, + } + } + pub fn root_entry(&self) -> Option<&Entry> { self.entry_for_path("") } @@ -1570,32 +1739,17 @@ impl Snapshot { } impl LocalSnapshot { - pub(crate) fn repo_for(&self, path: &Path) -> Option { - let mut max_len = 0; - let mut current_candidate = None; - for (work_directory, repo) in (&self.repository_entries).iter() { - if repo.contains(self, path) { - if work_directory.0.as_os_str().len() >= max_len { - current_candidate = Some(repo); - max_len = work_directory.0.as_os_str().len(); - } else { - break; - } - } - } - - current_candidate.map(|entry| entry.to_owned()) + pub(crate) fn get_local_repo(&self, repo: &RepositoryEntry) -> Option<&LocalRepositoryEntry> { + self.git_repositories.get(&repo.work_directory.0) } pub(crate) fn repo_for_metadata( &self, path: &Path, - ) -> Option<(ProjectEntryId, Arc>)> { - let (entry_id, local_repo) = self - .git_repositories + ) -> Option<(&ProjectEntryId, &LocalRepositoryEntry)> { + self.git_repositories .iter() - .find(|(_, repo)| repo.in_dot_git(path))?; - Some((*entry_id, local_repo.repo_ptr.to_owned())) + .find(|(_, repo)| repo.in_dot_git(path)) } #[cfg(test)] @@ -1685,7 +1839,7 @@ impl LocalSnapshot { } Ordering::Equal => { if self_repo != other_repo { - updated_repositories.push((*self_repo).into()); + updated_repositories.push(self_repo.build_update(other_repo)); } self_repos.next(); @@ -1728,10 +1882,8 @@ impl LocalSnapshot { let abs_path = self.abs_path.join(&entry.path); match smol::block_on(build_gitignore(&abs_path, fs)) { Ok(ignore) => { - self.ignores_by_parent_abs_path.insert( - abs_path.parent().unwrap().into(), - (Arc::new(ignore), self.scan_id), - ); + self.ignores_by_parent_abs_path + .insert(abs_path.parent().unwrap().into(), (Arc::new(ignore), true)); } Err(error) => { log::error!( @@ -1801,10 +1953,8 @@ impl LocalSnapshot { } if let Some(ignore) = ignore { - self.ignores_by_parent_abs_path.insert( - self.abs_path.join(&parent_path).into(), - (ignore, self.scan_id), - ); + self.ignores_by_parent_abs_path + .insert(self.abs_path.join(&parent_path).into(), (ignore, false)); } if parent_path.file_name() == Some(&DOT_GIT) { @@ -1852,11 +2002,13 @@ impl LocalSnapshot { let scan_id = self.scan_id; let repo_lock = repo.lock(); + self.repository_entries.insert( work_directory, RepositoryEntry { work_directory: work_dir_id.into(), branch: repo_lock.branch_name().map(Into::into), + statuses: repo_lock.statuses().unwrap_or_default(), }, ); drop(repo_lock); @@ -1865,6 +2017,7 @@ impl LocalSnapshot { work_dir_id, LocalRepositoryEntry { scan_id, + full_scan_id: scan_id, repo_ptr: repo, git_dir_path: parent_path.clone(), }, @@ -1905,11 +2058,11 @@ impl LocalSnapshot { if path.file_name() == Some(&GITIGNORE) { let abs_parent_path = self.abs_path.join(path.parent().unwrap()); - if let Some((_, scan_id)) = self + if let Some((_, needs_update)) = self .ignores_by_parent_abs_path .get_mut(abs_parent_path.as_path()) { - *scan_id = self.snapshot.scan_id; + *needs_update = true; } } } @@ -2399,10 +2552,15 @@ struct BackgroundScanner { status_updates_tx: UnboundedSender, executor: Arc, refresh_requests_rx: channel::Receiver<(Vec, barrier::Sender)>, - prev_state: Mutex<(Snapshot, Vec>)>, + prev_state: Mutex, finished_initial_scan: bool, } +struct BackgroundScannerState { + snapshot: Snapshot, + event_paths: Vec>, +} + impl BackgroundScanner { fn new( snapshot: LocalSnapshot, @@ -2416,7 +2574,10 @@ impl BackgroundScanner { status_updates_tx, executor, refresh_requests_rx, - prev_state: Mutex::new((snapshot.snapshot.clone(), Vec::new())), + prev_state: Mutex::new(BackgroundScannerState { + snapshot: snapshot.snapshot.clone(), + event_paths: Default::default(), + }), snapshot: Mutex::new(snapshot), finished_initial_scan: false, } @@ -2444,7 +2605,7 @@ impl BackgroundScanner { self.snapshot .lock() .ignores_by_parent_abs_path - .insert(ancestor.into(), (ignore.into(), 0)); + .insert(ancestor.into(), (ignore.into(), false)); } } { @@ -2497,7 +2658,7 @@ impl BackgroundScanner { // these before handling changes reported by the filesystem. request = self.refresh_requests_rx.recv().fuse() => { let Ok((paths, barrier)) = request else { break }; - if !self.process_refresh_request(paths, barrier).await { + if !self.process_refresh_request(paths.clone(), barrier).await { return; } } @@ -2508,25 +2669,37 @@ impl BackgroundScanner { while let Poll::Ready(Some(more_events)) = futures::poll!(events_rx.next()) { paths.extend(more_events.into_iter().map(|e| e.path)); } - self.process_events(paths).await; + self.process_events(paths.clone()).await; } } } } async fn process_refresh_request(&self, paths: Vec, barrier: barrier::Sender) -> bool { - self.reload_entries_for_paths(paths, None).await; + if let Some(mut paths) = self.reload_entries_for_paths(paths, None).await { + paths.sort_unstable(); + util::extend_sorted( + &mut self.prev_state.lock().event_paths, + paths, + usize::MAX, + Ord::cmp, + ); + } self.send_status_update(false, Some(barrier)) } async fn process_events(&mut self, paths: Vec) { let (scan_job_tx, scan_job_rx) = channel::unbounded(); - if let Some(mut paths) = self + let paths = self .reload_entries_for_paths(paths, Some(scan_job_tx.clone())) - .await - { - paths.sort_unstable(); - util::extend_sorted(&mut self.prev_state.lock().1, paths, usize::MAX, Ord::cmp); + .await; + if let Some(paths) = &paths { + util::extend_sorted( + &mut self.prev_state.lock().event_paths, + paths.iter().cloned(), + usize::MAX, + Ord::cmp, + ); } drop(scan_job_tx); self.scan_dirs(false, scan_job_rx).await; @@ -2535,6 +2708,12 @@ impl BackgroundScanner { let mut snapshot = self.snapshot.lock(); + if let Some(paths) = paths { + for path in paths { + self.reload_repo_for_file_path(&path, &mut *snapshot, self.fs.as_ref()); + } + } + let mut git_repositories = mem::take(&mut snapshot.git_repositories); git_repositories.retain(|work_directory_id, _| { snapshot @@ -2560,6 +2739,7 @@ impl BackgroundScanner { drop(snapshot); self.send_status_update(false, None); + self.prev_state.lock().event_paths.clear(); } async fn scan_dirs( @@ -2637,14 +2817,18 @@ impl BackgroundScanner { fn send_status_update(&self, scanning: bool, barrier: Option) -> bool { let mut prev_state = self.prev_state.lock(); - let snapshot = self.snapshot.lock().clone(); - let mut old_snapshot = snapshot.snapshot.clone(); - mem::swap(&mut old_snapshot, &mut prev_state.0); - let changed_paths = mem::take(&mut prev_state.1); - let changes = self.build_change_set(&old_snapshot, &snapshot.snapshot, changed_paths); + let new_snapshot = self.snapshot.lock().clone(); + let old_snapshot = mem::replace(&mut prev_state.snapshot, new_snapshot.snapshot.clone()); + + let changes = self.build_change_set( + &old_snapshot, + &new_snapshot.snapshot, + &prev_state.event_paths, + ); + self.status_updates_tx .unbounded_send(ScanState::Updated { - snapshot, + snapshot: new_snapshot, changes, scanning, barrier, @@ -2840,27 +3024,6 @@ impl BackgroundScanner { fs_entry.is_ignored = ignore_stack.is_all(); snapshot.insert_entry(fs_entry, self.fs.as_ref()); - let scan_id = snapshot.scan_id; - - let repo_with_path_in_dotgit = snapshot.repo_for_metadata(&path); - if let Some((entry_id, repo)) = repo_with_path_in_dotgit { - let work_dir = snapshot - .entry_for_id(entry_id) - .map(|entry| RepositoryWorkDirectory(entry.path.clone()))?; - - let repo = repo.lock(); - repo.reload_index(); - let branch = repo.branch_name(); - - snapshot.git_repositories.update(&entry_id, |entry| { - entry.scan_id = scan_id; - }); - - snapshot - .repository_entries - .update(&work_dir, |entry| entry.branch = branch.map(Into::into)); - } - if let Some(scan_queue_tx) = &scan_queue_tx { let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path); if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) { @@ -2876,7 +3039,9 @@ impl BackgroundScanner { } } } - Ok(None) => {} + Ok(None) => { + self.remove_repo_path(&path, &mut snapshot); + } Err(err) => { // TODO - create a special 'error' entry in the entries tree to mark this log::error!("error reading file on event {:?}", err); @@ -2887,22 +3052,143 @@ impl BackgroundScanner { Some(event_paths) } + fn remove_repo_path(&self, path: &Path, snapshot: &mut LocalSnapshot) -> Option<()> { + if !path + .components() + .any(|component| component.as_os_str() == *DOT_GIT) + { + let scan_id = snapshot.scan_id; + let repo = snapshot.repo_for(&path)?; + + let repo_path = repo.work_directory.relativize(&snapshot, &path)?; + + let work_dir = repo.work_directory(snapshot)?; + let work_dir_id = repo.work_directory; + + snapshot + .git_repositories + .update(&work_dir_id, |entry| entry.scan_id = scan_id); + + snapshot.repository_entries.update(&work_dir, |entry| { + entry + .statuses + .remove_range(&repo_path, &RepoPathDescendants(&repo_path)) + }); + } + + Some(()) + } + + fn reload_repo_for_file_path( + &self, + path: &Path, + snapshot: &mut LocalSnapshot, + fs: &dyn Fs, + ) -> Option<()> { + let scan_id = snapshot.scan_id; + + if path + .components() + .any(|component| component.as_os_str() == *DOT_GIT) + { + let (entry_id, repo_ptr) = { + let Some((entry_id, repo)) = snapshot.repo_for_metadata(&path) else { + let dot_git_dir = path.ancestors() + .skip_while(|ancestor| ancestor.file_name() != Some(&*DOT_GIT)) + .next()?; + + snapshot.build_repo(dot_git_dir.into(), fs); + return None; + }; + if repo.full_scan_id == scan_id { + return None; + } + (*entry_id, repo.repo_ptr.to_owned()) + }; + + let work_dir = snapshot + .entry_for_id(entry_id) + .map(|entry| RepositoryWorkDirectory(entry.path.clone()))?; + + let repo = repo_ptr.lock(); + repo.reload_index(); + let branch = repo.branch_name(); + let statuses = repo.statuses().unwrap_or_default(); + + snapshot.git_repositories.update(&entry_id, |entry| { + entry.scan_id = scan_id; + entry.full_scan_id = scan_id; + }); + + snapshot.repository_entries.update(&work_dir, |entry| { + entry.branch = branch.map(Into::into); + entry.statuses = statuses; + }); + } else { + if snapshot + .entry_for_path(&path) + .map(|entry| entry.is_ignored) + .unwrap_or(false) + { + self.remove_repo_path(&path, snapshot); + return None; + } + + let repo = snapshot.repo_for(&path)?; + + let work_dir = repo.work_directory(snapshot)?; + let work_dir_id = repo.work_directory.clone(); + + snapshot + .git_repositories + .update(&work_dir_id, |entry| entry.scan_id = scan_id); + + let local_repo = snapshot.get_local_repo(&repo)?.to_owned(); + + // Short circuit if we've already scanned everything + if local_repo.full_scan_id == scan_id { + return None; + } + + let mut repository = snapshot.repository_entries.remove(&work_dir)?; + + for entry in snapshot.descendent_entries(false, false, path) { + let Some(repo_path) = repo.work_directory.relativize(snapshot, &entry.path) else { + continue; + }; + + let status = local_repo.repo_ptr.lock().status(&repo_path); + if let Some(status) = status { + repository.statuses.insert(repo_path.clone(), status); + } else { + repository.statuses.remove(&repo_path); + } + } + + snapshot.repository_entries.insert(work_dir, repository) + } + + Some(()) + } + async fn update_ignore_statuses(&self) { use futures::FutureExt as _; let mut snapshot = self.snapshot.lock().clone(); let mut ignores_to_update = Vec::new(); let mut ignores_to_delete = Vec::new(); - for (parent_abs_path, (_, scan_id)) in &snapshot.ignores_by_parent_abs_path { - if let Ok(parent_path) = parent_abs_path.strip_prefix(&snapshot.abs_path) { - if *scan_id > snapshot.completed_scan_id - && snapshot.entry_for_path(parent_path).is_some() - { - ignores_to_update.push(parent_abs_path.clone()); + let abs_path = snapshot.abs_path.clone(); + for (parent_abs_path, (_, needs_update)) in &mut snapshot.ignores_by_parent_abs_path { + if let Ok(parent_path) = parent_abs_path.strip_prefix(&abs_path) { + if *needs_update { + *needs_update = false; + if snapshot.snapshot.entry_for_path(parent_path).is_some() { + ignores_to_update.push(parent_abs_path.clone()); + } } let ignore_path = parent_path.join(&*GITIGNORE); - if snapshot.entry_for_path(ignore_path).is_none() { + if snapshot.snapshot.entry_for_path(ignore_path).is_none() { ignores_to_delete.push(parent_abs_path.clone()); } } @@ -3012,8 +3298,8 @@ impl BackgroundScanner { &self, old_snapshot: &Snapshot, new_snapshot: &Snapshot, - event_paths: Vec>, - ) -> HashMap, PathChange> { + event_paths: &[Arc], + ) -> HashMap<(Arc, ProjectEntryId), PathChange> { use PathChange::{Added, AddedOrUpdated, Removed, Updated}; let mut changes = HashMap::default(); @@ -3022,7 +3308,7 @@ impl BackgroundScanner { let received_before_initialized = !self.finished_initial_scan; for path in event_paths { - let path = PathKey(path); + let path = PathKey(path.clone()); old_paths.seek(&path, Bias::Left, &()); new_paths.seek(&path, Bias::Left, &()); @@ -3039,7 +3325,7 @@ impl BackgroundScanner { match Ord::cmp(&old_entry.path, &new_entry.path) { Ordering::Less => { - changes.insert(old_entry.path.clone(), Removed); + changes.insert((old_entry.path.clone(), old_entry.id), Removed); old_paths.next(&()); } Ordering::Equal => { @@ -3047,31 +3333,35 @@ impl BackgroundScanner { // If the worktree was not fully initialized when this event was generated, // we can't know whether this entry was added during the scan or whether // it was merely updated. - changes.insert(new_entry.path.clone(), AddedOrUpdated); + changes.insert( + (new_entry.path.clone(), new_entry.id), + AddedOrUpdated, + ); } else if old_entry.mtime != new_entry.mtime { - changes.insert(new_entry.path.clone(), Updated); + changes.insert((new_entry.path.clone(), new_entry.id), Updated); } old_paths.next(&()); new_paths.next(&()); } Ordering::Greater => { - changes.insert(new_entry.path.clone(), Added); + changes.insert((new_entry.path.clone(), new_entry.id), Added); new_paths.next(&()); } } } (Some(old_entry), None) => { - changes.insert(old_entry.path.clone(), Removed); + changes.insert((old_entry.path.clone(), old_entry.id), Removed); old_paths.next(&()); } (None, Some(new_entry)) => { - changes.insert(new_entry.path.clone(), Added); + changes.insert((new_entry.path.clone(), new_entry.id), Added); new_paths.next(&()); } (None, None) => break, } } } + changes } @@ -3212,17 +3502,13 @@ pub struct Traversal<'a> { impl<'a> Traversal<'a> { pub fn advance(&mut self) -> bool { - self.advance_to_offset(self.offset() + 1) - } - - pub fn advance_to_offset(&mut self, offset: usize) -> bool { self.cursor.seek_forward( &TraversalTarget::Count { - count: offset, + count: self.end_offset() + 1, include_dirs: self.include_dirs, include_ignored: self.include_ignored, }, - Bias::Right, + Bias::Left, &(), ) } @@ -3249,11 +3535,17 @@ impl<'a> Traversal<'a> { self.cursor.item() } - pub fn offset(&self) -> usize { + pub fn start_offset(&self) -> usize { self.cursor .start() .count(self.include_dirs, self.include_ignored) } + + pub fn end_offset(&self) -> usize { + self.cursor + .end(&()) + .count(self.include_dirs, self.include_ignored) + } } impl<'a> Iterator for Traversal<'a> { @@ -3322,6 +3614,25 @@ impl<'a> Iterator for ChildEntriesIter<'a> { } } +struct DescendentEntriesIter<'a> { + parent_path: &'a Path, + traversal: Traversal<'a>, +} + +impl<'a> Iterator for DescendentEntriesIter<'a> { + type Item = &'a Entry; + + fn next(&mut self) -> Option { + if let Some(item) = self.traversal.entry() { + if item.path.starts_with(&self.parent_path) { + self.traversal.advance(); + return Some(item); + } + } + None + } +} + impl<'a> From<&'a Entry> for proto::Entry { fn from(entry: &'a Entry) -> Self { Self { @@ -3436,6 +3747,105 @@ mod tests { }) } + #[gpui::test] + async fn test_descendent_entries(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.background()); + fs.insert_tree( + "/root", + json!({ + "a": "", + "b": { + "c": { + "d": "" + }, + "e": {} + }, + "f": "", + "g": { + "h": {} + }, + "i": { + "j": { + "k": "" + }, + "l": { + + } + }, + ".gitignore": "i/j\n", + }), + ) + .await; + + let http_client = FakeHttpClient::with_404_response(); + let client = cx.read(|cx| Client::new(http_client, cx)); + + let tree = Worktree::local( + client, + Path::new("/root"), + true, + fs, + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.descendent_entries(false, false, Path::new("b")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("b/c/d"),] + ); + assert_eq!( + tree.descendent_entries(true, false, Path::new("b")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![ + Path::new("b"), + Path::new("b/c"), + Path::new("b/c/d"), + Path::new("b/e"), + ] + ); + + assert_eq!( + tree.descendent_entries(false, false, Path::new("g")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + Vec::::new() + ); + assert_eq!( + tree.descendent_entries(true, false, Path::new("g")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("g"), Path::new("g/h"),] + ); + + assert_eq!( + tree.descendent_entries(false, false, Path::new("i")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + Vec::::new() + ); + assert_eq!( + tree.descendent_entries(false, true, Path::new("i")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("i/j/k")] + ); + assert_eq!( + tree.descendent_entries(true, false, Path::new("i")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("i"), Path::new("i/l"),] + ); + }) + } + #[gpui::test(iterations = 10)] async fn test_circular_symlinks(executor: Arc, cx: &mut TestAppContext) { let fs = FakeFs::new(cx.background()); @@ -3686,6 +4096,280 @@ mod tests { }); } + #[gpui::test] + async fn test_git_status(cx: &mut TestAppContext) { + #[track_caller] + fn git_init(path: &Path) -> git2::Repository { + git2::Repository::init(path).expect("Failed to initialize git repository") + } + + #[track_caller] + fn git_add(path: &Path, repo: &git2::Repository) { + let mut index = repo.index().expect("Failed to get index"); + index.add_path(path).expect("Failed to add a.txt"); + index.write().expect("Failed to write index"); + } + + #[track_caller] + fn git_remove_index(path: &Path, repo: &git2::Repository) { + let mut index = repo.index().expect("Failed to get index"); + index.remove_path(path).expect("Failed to add a.txt"); + index.write().expect("Failed to write index"); + } + + #[track_caller] + fn git_commit(msg: &'static str, repo: &git2::Repository) { + use git2::Signature; + + let signature = Signature::now("test", "test@zed.dev").unwrap(); + let oid = repo.index().unwrap().write_tree().unwrap(); + let tree = repo.find_tree(oid).unwrap(); + if let Some(head) = repo.head().ok() { + let parent_obj = head.peel(git2::ObjectType::Commit).unwrap(); + + let parent_commit = parent_obj.as_commit().unwrap(); + + repo.commit( + Some("HEAD"), + &signature, + &signature, + msg, + &tree, + &[parent_commit], + ) + .expect("Failed to commit with parent"); + } else { + repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[]) + .expect("Failed to commit"); + } + } + + #[track_caller] + fn git_stash(repo: &mut git2::Repository) { + use git2::Signature; + + let signature = Signature::now("test", "test@zed.dev").unwrap(); + repo.stash_save(&signature, "N/A", None) + .expect("Failed to stash"); + } + + #[track_caller] + fn git_reset(offset: usize, repo: &git2::Repository) { + let head = repo.head().expect("Couldn't get repo head"); + let object = head.peel(git2::ObjectType::Commit).unwrap(); + let commit = object.as_commit().unwrap(); + let new_head = commit + .parents() + .inspect(|parnet| { + parnet.message(); + }) + .skip(offset) + .next() + .expect("Not enough history"); + repo.reset(&new_head.as_object(), git2::ResetType::Soft, None) + .expect("Could not reset"); + } + + #[allow(dead_code)] + #[track_caller] + fn git_status(repo: &git2::Repository) -> HashMap { + repo.statuses(None) + .unwrap() + .iter() + .map(|status| (status.path().unwrap().to_string(), status.status())) + .collect() + } + + const IGNORE_RULE: &'static str = "**/target"; + + let root = temp_tree(json!({ + "project": { + "a.txt": "a", + "b.txt": "bb", + "c": { + "d": { + "e.txt": "eee" + } + }, + "f.txt": "ffff", + "target": { + "build_file": "???" + }, + ".gitignore": IGNORE_RULE + }, + + })); + + let http_client = FakeHttpClient::with_404_response(); + let client = cx.read(|cx| Client::new(http_client, cx)); + let tree = Worktree::local( + client, + root.path(), + true, + Arc::new(RealFs), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + const A_TXT: &'static str = "a.txt"; + const B_TXT: &'static str = "b.txt"; + const E_TXT: &'static str = "c/d/e.txt"; + const F_TXT: &'static str = "f.txt"; + const DOTGITIGNORE: &'static str = ".gitignore"; + const BUILD_FILE: &'static str = "target/build_file"; + + let work_dir = root.path().join("project"); + let mut repo = git_init(work_dir.as_path()); + repo.add_ignore_rule(IGNORE_RULE).unwrap(); + git_add(Path::new(A_TXT), &repo); + git_add(Path::new(E_TXT), &repo); + git_add(Path::new(DOTGITIGNORE), &repo); + git_commit("Initial commit", &repo); + + std::fs::write(work_dir.join(A_TXT), "aa").unwrap(); + + tree.flush_fs_events(cx).await; + + // Check that the right git state is observed on startup + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + assert_eq!(snapshot.repository_entries.iter().count(), 1); + let (dir, repo) = snapshot.repository_entries.iter().next().unwrap(); + assert_eq!(dir.0.as_ref(), Path::new("project")); + + assert_eq!(repo.statuses.iter().count(), 3); + assert_eq!( + repo.statuses.get(&Path::new(A_TXT).into()), + Some(&GitFileStatus::Modified) + ); + assert_eq!( + repo.statuses.get(&Path::new(B_TXT).into()), + Some(&GitFileStatus::Added) + ); + assert_eq!( + repo.statuses.get(&Path::new(F_TXT).into()), + Some(&GitFileStatus::Added) + ); + }); + + git_add(Path::new(A_TXT), &repo); + git_add(Path::new(B_TXT), &repo); + git_commit("Committing modified and added", &repo); + tree.flush_fs_events(cx).await; + + // Check that repo only changes are tracked + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); + + assert_eq!(repo.statuses.iter().count(), 1); + assert_eq!( + repo.statuses.get(&Path::new(F_TXT).into()), + Some(&GitFileStatus::Added) + ); + }); + + git_reset(0, &repo); + git_remove_index(Path::new(B_TXT), &repo); + git_stash(&mut repo); + std::fs::write(work_dir.join(E_TXT), "eeee").unwrap(); + std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap(); + tree.flush_fs_events(cx).await; + + // Check that more complex repo changes are tracked + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); + + assert_eq!(repo.statuses.iter().count(), 3); + assert_eq!(repo.statuses.get(&Path::new(A_TXT).into()), None); + assert_eq!( + repo.statuses.get(&Path::new(B_TXT).into()), + Some(&GitFileStatus::Added) + ); + assert_eq!( + repo.statuses.get(&Path::new(E_TXT).into()), + Some(&GitFileStatus::Modified) + ); + assert_eq!( + repo.statuses.get(&Path::new(F_TXT).into()), + Some(&GitFileStatus::Added) + ); + }); + + std::fs::remove_file(work_dir.join(B_TXT)).unwrap(); + std::fs::remove_dir_all(work_dir.join("c")).unwrap(); + std::fs::write( + work_dir.join(DOTGITIGNORE), + [IGNORE_RULE, "f.txt"].join("\n"), + ) + .unwrap(); + + git_add(Path::new(DOTGITIGNORE), &repo); + git_commit("Committing modified git ignore", &repo); + + tree.flush_fs_events(cx).await; + + // Check that non-repo behavior is tracked + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); + + assert_eq!(repo.statuses.iter().count(), 0); + }); + + let mut renamed_dir_name = "first_directory/second_directory"; + const RENAMED_FILE: &'static str = "rf.txt"; + + std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap(); + std::fs::write( + work_dir.join(renamed_dir_name).join(RENAMED_FILE), + "new-contents", + ) + .unwrap(); + + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); + + assert_eq!(repo.statuses.iter().count(), 1); + assert_eq!( + repo.statuses + .get(&Path::new(renamed_dir_name).join(RENAMED_FILE).into()), + Some(&GitFileStatus::Added) + ); + }); + + renamed_dir_name = "new_first_directory/second_directory"; + + std::fs::rename( + work_dir.join("first_directory"), + work_dir.join("new_first_directory"), + ) + .unwrap(); + + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); + + assert_eq!(repo.statuses.iter().count(), 1); + assert_eq!( + repo.statuses + .get(&Path::new(renamed_dir_name).join(RENAMED_FILE).into()), + Some(&GitFileStatus::Added) + ); + }); + } + #[gpui::test] async fn test_write_file(cx: &mut TestAppContext) { let dir = temp_tree(json!({ @@ -3911,7 +4595,7 @@ mod tests { cx.subscribe(&worktree, move |tree, _, event, _| { if let Event::UpdatedEntries(changes) = event { - for (path, change_type) in changes.iter() { + for ((path, _), change_type) in changes.iter() { let path = path.clone(); let ix = match paths.binary_search(&path) { Ok(ix) | Err(ix) => ix, @@ -3921,13 +4605,16 @@ mod tests { assert_ne!(paths.get(ix), Some(&path)); paths.insert(ix, path); } + PathChange::Removed => { assert_eq!(paths.get(ix), Some(&path)); paths.remove(ix); } + PathChange::Updated => { assert_eq!(paths.get(ix), Some(&path)); } + PathChange::AddedOrUpdated => { if paths[ix] != path { paths.insert(ix, path); @@ -3935,6 +4622,7 @@ mod tests { } } } + let new_paths = tree.paths().cloned().collect::>(); assert_eq!(paths, new_paths, "incorrect changes: {:?}", changes); } @@ -3942,15 +4630,26 @@ mod tests { .detach(); }); + fs.as_fake().pause_events(); let mut snapshots = Vec::new(); let mut mutations_len = operations; while mutations_len > 1 { - randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; - let buffered_event_count = fs.as_fake().buffered_event_count().await; + if rng.gen_bool(0.2) { + worktree + .update(cx, |worktree, cx| { + randomly_mutate_worktree(worktree, &mut rng, cx) + }) + .await + .log_err(); + } else { + randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; + } + + let buffered_event_count = fs.as_fake().buffered_event_count(); if buffered_event_count > 0 && rng.gen_bool(0.3) { let len = rng.gen_range(0..=buffered_event_count); log::info!("flushing {} events", len); - fs.as_fake().flush_events(len).await; + fs.as_fake().flush_events(len); } else { randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await; mutations_len -= 1; @@ -3966,7 +4665,7 @@ mod tests { } log::info!("quiescing"); - fs.as_fake().flush_events(usize::MAX).await; + fs.as_fake().flush_events(usize::MAX); cx.foreground().run_until_parked(); let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()); snapshot.check_invariants(); @@ -4026,6 +4725,7 @@ mod tests { rng: &mut impl Rng, cx: &mut ModelContext, ) -> Task> { + log::info!("mutating worktree"); let worktree = worktree.as_local_mut().unwrap(); let snapshot = worktree.snapshot(); let entry = snapshot.entries(false).choose(rng).unwrap(); @@ -4087,6 +4787,7 @@ mod tests { insertion_probability: f64, rng: &mut impl Rng, ) { + log::info!("mutating fs"); let mut files = Vec::new(); let mut dirs = Vec::new(); for path in fs.as_fake().paths() { diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index fb82fbfdc4..7325e69f3f 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -6,7 +6,7 @@ use gpui::{ actions, anyhow::{anyhow, Result}, elements::{ - AnchorCorner, ChildView, ContainerStyle, Empty, Flex, Label, MouseEventHandler, + AnchorCorner, ChildView, ComponentHost, ContainerStyle, Empty, Flex, MouseEventHandler, ParentElement, ScrollTarget, Stack, Svg, UniformList, UniformListState, }, geometry::vector::Vector2F, @@ -16,7 +16,10 @@ use gpui::{ ViewHandle, WeakViewHandle, }; use menu::{Confirm, SelectNext, SelectPrev}; -use project::{Entry, EntryKind, Project, ProjectEntryId, ProjectPath, Worktree, WorktreeId}; +use project::{ + repository::GitFileStatus, Entry, EntryKind, Project, ProjectEntryId, ProjectPath, Worktree, + WorktreeId, +}; use settings::Settings; use std::{ cmp::Ordering, @@ -26,7 +29,7 @@ use std::{ path::Path, sync::Arc, }; -use theme::ProjectPanelEntry; +use theme::{ui::FileName, ProjectPanelEntry}; use unicase::UniCase; use workspace::Workspace; @@ -86,6 +89,7 @@ pub struct EntryDetails { is_editing: bool, is_processing: bool, is_cut: bool, + git_status: Option, } actions!( @@ -1008,6 +1012,15 @@ impl ProjectPanel { let entry_range = range.start.saturating_sub(ix)..end_ix - ix; for entry in &visible_worktree_entries[entry_range] { + let path = &entry.path; + let status = (entry.path.parent().is_some() && !entry.is_ignored) + .then(|| { + snapshot + .repo_for(path) + .and_then(|entry| entry.status_for_path(&snapshot, path)) + }) + .flatten(); + let mut details = EntryDetails { filename: entry .path @@ -1028,6 +1041,7 @@ impl ProjectPanel { is_cut: self .clipboard_entry .map_or(false, |e| e.is_cut() && e.entry_id() == entry.id), + git_status: status, }; if let Some(edit_state) = &self.edit_state { @@ -1096,12 +1110,16 @@ impl ProjectPanel { .flex(1.0, true) .into_any() } else { - Label::new(details.filename.clone(), style.text.clone()) - .contained() - .with_margin_left(style.icon_spacing) - .aligned() - .left() - .into_any() + ComponentHost::new(FileName::new( + details.filename.clone(), + details.git_status, + FileName::style(style.text.clone(), &cx.global::().theme), + )) + .contained() + .with_margin_left(style.icon_spacing) + .aligned() + .left() + .into_any() }) .constrained() .with_height(style.height) diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 220ef22fb7..eca5fda306 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -986,8 +986,22 @@ message Entry { message RepositoryEntry { uint64 work_directory_id = 1; optional string branch = 2; + repeated string removed_repo_paths = 3; + repeated StatusEntry updated_statuses = 4; } +message StatusEntry { + string repo_path = 1; + GitStatus status = 2; +} + +enum GitStatus { + Added = 0; + Modified = 1; + Conflict = 2; +} + + message BufferState { uint64 id = 1; optional File file = 2; diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index 20a457cc4b..cef4e6867c 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -1,6 +1,7 @@ use super::{entity_messages, messages, request_messages, ConnectionId, TypedEnvelope}; use anyhow::{anyhow, Result}; use async_tungstenite::tungstenite::Message as WebSocketMessage; +use collections::HashMap; use futures::{SinkExt as _, StreamExt as _}; use prost::Message as _; use serde::Serialize; @@ -484,14 +485,21 @@ pub fn split_worktree_update( mut message: UpdateWorktree, max_chunk_size: usize, ) -> impl Iterator { - let mut done = false; + let mut done_files = false; + + let mut repository_map = message + .updated_repositories + .into_iter() + .map(|repo| (repo.work_directory_id, repo)) + .collect::>(); + iter::from_fn(move || { - if done { + if done_files { return None; } let updated_entries_chunk_size = cmp::min(message.updated_entries.len(), max_chunk_size); - let updated_entries = message + let updated_entries: Vec<_> = message .updated_entries .drain(..updated_entries_chunk_size) .collect(); @@ -502,22 +510,28 @@ pub fn split_worktree_update( .drain(..removed_entries_chunk_size) .collect(); - done = message.updated_entries.is_empty() && message.removed_entries.is_empty(); + done_files = message.updated_entries.is_empty() && message.removed_entries.is_empty(); - // Wait to send repositories until after we've guaranteed that their associated entries - // will be read - let updated_repositories = if done { - mem::take(&mut message.updated_repositories) - } else { - Default::default() - }; + let mut updated_repositories = Vec::new(); - let removed_repositories = if done { + if !repository_map.is_empty() { + for entry in &updated_entries { + if let Some(repo) = repository_map.remove(&entry.id) { + updated_repositories.push(repo) + } + } + } + + let removed_repositories = if done_files { mem::take(&mut message.removed_repositories) } else { Default::default() }; + if done_files { + updated_repositories.extend(mem::take(&mut repository_map).into_values()); + } + Some(UpdateWorktree { project_id: message.project_id, worktree_id: message.worktree_id, @@ -526,7 +540,7 @@ pub fn split_worktree_update( updated_entries, removed_entries, scan_id: message.scan_id, - is_last_update: done && message.is_last_update, + is_last_update: done_files && message.is_last_update, updated_repositories, removed_repositories, }) diff --git a/crates/rpc/src/rpc.rs b/crates/rpc/src/rpc.rs index e51ded5969..64fbf19462 100644 --- a/crates/rpc/src/rpc.rs +++ b/crates/rpc/src/rpc.rs @@ -6,4 +6,4 @@ pub use conn::Connection; pub use peer::*; mod macros; -pub const PROTOCOL_VERSION: u32 = 54; +pub const PROTOCOL_VERSION: u32 = 55; diff --git a/crates/search/Cargo.toml b/crates/search/Cargo.toml index ab3c35c1fe..14e658e8f8 100644 --- a/crates/search/Cargo.toml +++ b/crates/search/Cargo.toml @@ -30,6 +30,7 @@ smol.workspace = true glob.workspace = true [dev-dependencies] +client = { path = "../client", features = ["test-support"] } editor = { path = "../editor", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } serde_json.workspace = true diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 7364431407..0d020da570 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -1287,6 +1287,7 @@ pub mod tests { cx.set_global(settings); language::init(cx); + client::init_settings(cx); editor::init_settings(cx); workspace::init_settings(cx); }); diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 3e916ccd1b..36f0f926cd 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -5,7 +5,7 @@ use arrayvec::ArrayVec; pub use cursor::{Cursor, FilterCursor, Iter}; use std::marker::PhantomData; use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc}; -pub use tree_map::{TreeMap, TreeSet}; +pub use tree_map::{MapSeekTarget, TreeMap, TreeSet}; #[cfg(test)] const TREE_BASE: usize = 2; diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index 1b97cbec9f..ea69fb0dca 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -1,14 +1,14 @@ use std::{cmp::Ordering, fmt::Debug}; -use crate::{Bias, Dimension, Item, KeyedItem, SeekTarget, SumTree, Summary}; +use crate::{Bias, Dimension, Edit, Item, KeyedItem, SeekTarget, SumTree, Summary}; -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq, Eq)] pub struct TreeMap(SumTree>) where K: Clone + Debug + Default + Ord, V: Clone + Debug; -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq, Eq)] pub struct MapEntry { key: K, value: V, @@ -73,6 +73,17 @@ impl TreeMap { removed } + pub fn remove_range(&mut self, start: &impl MapSeekTarget, end: &impl MapSeekTarget) { + let start = MapSeekTargetAdaptor(start); + let end = MapSeekTargetAdaptor(end); + let mut cursor = self.0.cursor::>(); + let mut new_tree = cursor.slice(&start, Bias::Left, &()); + cursor.seek(&end, Bias::Left, &()); + new_tree.push_tree(cursor.suffix(&()), &()); + drop(cursor); + self.0 = new_tree; + } + /// Returns the key-value pair with the greatest key less than or equal to the given key. pub fn closest(&self, key: &K) -> Option<(&K, &V)> { let mut cursor = self.0.cursor::>(); @@ -82,6 +93,16 @@ impl TreeMap { cursor.item().map(|item| (&item.key, &item.value)) } + pub fn iter_from<'a>(&'a self, from: &'a K) -> impl Iterator + '_ { + let mut cursor = self.0.cursor::>(); + let from_key = MapKeyRef(Some(from)); + cursor.seek(&from_key, Bias::Left, &()); + + cursor + .into_iter() + .map(|map_entry| (&map_entry.key, &map_entry.value)) + } + pub fn update(&mut self, key: &K, f: F) -> Option where F: FnOnce(&mut V) -> T, @@ -125,6 +146,45 @@ impl TreeMap { pub fn values(&self) -> impl Iterator + '_ { self.0.iter().map(|entry| &entry.value) } + + pub fn insert_tree(&mut self, other: TreeMap) { + let edits = other + .iter() + .map(|(key, value)| { + Edit::Insert(MapEntry { + key: key.to_owned(), + value: value.to_owned(), + }) + }) + .collect(); + + self.0.edit(edits, &()); + } +} + +#[derive(Debug)] +struct MapSeekTargetAdaptor<'a, T>(&'a T); + +impl<'a, K: Debug + Clone + Default + Ord, T: MapSeekTarget> + SeekTarget<'a, MapKey, MapKeyRef<'a, K>> for MapSeekTargetAdaptor<'_, T> +{ + fn cmp(&self, cursor_location: &MapKeyRef, _: &()) -> Ordering { + if let Some(key) = &cursor_location.0 { + MapSeekTarget::cmp_cursor(self.0, key) + } else { + Ordering::Greater + } + } +} + +pub trait MapSeekTarget: Debug { + fn cmp_cursor(&self, cursor_location: &K) -> Ordering; +} + +impl MapSeekTarget for K { + fn cmp_cursor(&self, cursor_location: &K) -> Ordering { + self.cmp(cursor_location) + } } impl Default for TreeMap @@ -186,7 +246,7 @@ where K: Clone + Debug + Default + Ord, { fn cmp(&self, cursor_location: &MapKeyRef, _: &()) -> Ordering { - self.0.cmp(&cursor_location.0) + Ord::cmp(&self.0, &cursor_location.0) } } @@ -272,4 +332,112 @@ mod tests { map.retain(|key, _| *key % 2 == 0); assert_eq!(map.iter().collect::>(), vec![(&4, &"d"), (&6, &"f")]); } + + #[test] + fn test_iter_from() { + let mut map = TreeMap::default(); + + map.insert("a", 1); + map.insert("b", 2); + map.insert("baa", 3); + map.insert("baaab", 4); + map.insert("c", 5); + + let result = map + .iter_from(&"ba") + .take_while(|(key, _)| key.starts_with(&"ba")) + .collect::>(); + + assert_eq!(result.len(), 2); + assert!(result.iter().find(|(k, _)| k == &&"baa").is_some()); + assert!(result.iter().find(|(k, _)| k == &&"baaab").is_some()); + + let result = map + .iter_from(&"c") + .take_while(|(key, _)| key.starts_with(&"c")) + .collect::>(); + + assert_eq!(result.len(), 1); + assert!(result.iter().find(|(k, _)| k == &&"c").is_some()); + } + + #[test] + fn test_insert_tree() { + let mut map = TreeMap::default(); + map.insert("a", 1); + map.insert("b", 2); + map.insert("c", 3); + + let mut other = TreeMap::default(); + other.insert("a", 2); + other.insert("b", 2); + other.insert("d", 4); + + map.insert_tree(other); + + assert_eq!(map.iter().count(), 4); + assert_eq!(map.get(&"a"), Some(&2)); + assert_eq!(map.get(&"b"), Some(&2)); + assert_eq!(map.get(&"c"), Some(&3)); + assert_eq!(map.get(&"d"), Some(&4)); + } + + #[test] + fn test_remove_between_and_path_successor() { + use std::path::{Path, PathBuf}; + + #[derive(Debug)] + pub struct PathDescendants<'a>(&'a Path); + + impl MapSeekTarget for PathDescendants<'_> { + fn cmp_cursor(&self, key: &PathBuf) -> Ordering { + if key.starts_with(&self.0) { + Ordering::Greater + } else { + self.0.cmp(key) + } + } + } + + let mut map = TreeMap::default(); + + map.insert(PathBuf::from("a"), 1); + map.insert(PathBuf::from("a/a"), 1); + map.insert(PathBuf::from("b"), 2); + map.insert(PathBuf::from("b/a/a"), 3); + map.insert(PathBuf::from("b/a/a/a/b"), 4); + map.insert(PathBuf::from("c"), 5); + map.insert(PathBuf::from("c/a"), 6); + + map.remove_range( + &PathBuf::from("b/a"), + &PathDescendants(&PathBuf::from("b/a")), + ); + + assert_eq!(map.get(&PathBuf::from("a")), Some(&1)); + assert_eq!(map.get(&PathBuf::from("a/a")), Some(&1)); + assert_eq!(map.get(&PathBuf::from("b")), Some(&2)); + assert_eq!(map.get(&PathBuf::from("b/a/a")), None); + assert_eq!(map.get(&PathBuf::from("b/a/a/a/b")), None); + assert_eq!(map.get(&PathBuf::from("c")), Some(&5)); + assert_eq!(map.get(&PathBuf::from("c/a")), Some(&6)); + + map.remove_range(&PathBuf::from("c"), &PathDescendants(&PathBuf::from("c"))); + + assert_eq!(map.get(&PathBuf::from("a")), Some(&1)); + assert_eq!(map.get(&PathBuf::from("a/a")), Some(&1)); + assert_eq!(map.get(&PathBuf::from("b")), Some(&2)); + assert_eq!(map.get(&PathBuf::from("c")), None); + assert_eq!(map.get(&PathBuf::from("c/a")), None); + + map.remove_range(&PathBuf::from("a"), &PathDescendants(&PathBuf::from("a"))); + + assert_eq!(map.get(&PathBuf::from("a")), None); + assert_eq!(map.get(&PathBuf::from("a/a")), None); + assert_eq!(map.get(&PathBuf::from("b")), Some(&2)); + + map.remove_range(&PathBuf::from("b"), &PathDescendants(&PathBuf::from("b"))); + + assert_eq!(map.get(&PathBuf::from("b")), None); + } } diff --git a/crates/theme/Cargo.toml b/crates/theme/Cargo.toml index dedbf2dd6f..0cba8d8da2 100644 --- a/crates/theme/Cargo.toml +++ b/crates/theme/Cargo.toml @@ -13,6 +13,7 @@ doctest = false [dependencies] gpui = { path = "../gpui" } +fs = { path = "../fs" } anyhow.workspace = true indexmap = "1.6.2" parking_lot.workspace = true diff --git a/crates/theme/src/ui.rs b/crates/theme/src/ui.rs index b86bfca8c4..e4df24c89f 100644 --- a/crates/theme/src/ui.rs +++ b/crates/theme/src/ui.rs @@ -1,9 +1,10 @@ use std::borrow::Cow; +use fs::repository::GitFileStatus; use gpui::{ color::Color, elements::{ - ConstrainedBox, Container, ContainerStyle, Empty, Flex, KeystrokeLabel, Label, + ConstrainedBox, Container, ContainerStyle, Empty, Flex, KeystrokeLabel, Label, LabelStyle, MouseEventHandler, ParentElement, Stack, Svg, }, fonts::TextStyle, @@ -11,11 +12,11 @@ use gpui::{ platform, platform::MouseButton, scene::MouseClick, - Action, Element, EventContext, MouseState, View, ViewContext, + Action, AnyElement, Element, EventContext, MouseState, View, ViewContext, }; use serde::Deserialize; -use crate::{ContainedText, Interactive}; +use crate::{ContainedText, Interactive, Theme}; #[derive(Clone, Deserialize, Default)] pub struct CheckboxStyle { @@ -252,3 +253,53 @@ where .constrained() .with_height(style.dimensions().y()) } + +pub struct FileName { + filename: String, + git_status: Option, + style: FileNameStyle, +} + +pub struct FileNameStyle { + template_style: LabelStyle, + git_inserted: Color, + git_modified: Color, + git_deleted: Color, +} + +impl FileName { + pub fn new(filename: String, git_status: Option, style: FileNameStyle) -> Self { + FileName { + filename, + git_status, + style, + } + } + + pub fn style>(style: I, theme: &Theme) -> FileNameStyle { + FileNameStyle { + template_style: style.into(), + git_inserted: theme.editor.diff.inserted, + git_modified: theme.editor.diff.modified, + git_deleted: theme.editor.diff.deleted, + } + } +} + +impl gpui::elements::Component for FileName { + fn render(&self, _: &mut V, _: &mut ViewContext) -> AnyElement { + // Prepare colors for git statuses + let mut filename_text_style = self.style.template_style.text.clone(); + filename_text_style.color = self + .git_status + .as_ref() + .map(|status| match status { + GitFileStatus::Added => self.style.git_inserted, + GitFileStatus::Modified => self.style.git_modified, + GitFileStatus::Conflict => self.style.git_deleted, + }) + .unwrap_or(self.style.template_style.text.color); + + Label::new(self.filename.clone(), filename_text_style).into_any() + } +} diff --git a/crates/util/Cargo.toml b/crates/util/Cargo.toml index 319d815d17..4ec8f7553c 100644 --- a/crates/util/Cargo.toml +++ b/crates/util/Cargo.toml @@ -26,6 +26,7 @@ serde.workspace = true serde_json.workspace = true git2 = { version = "0.15", default-features = false, optional = true } dirs = "3.0" +take-until = "0.2.0" [dev-dependencies] tempdir.workspace = true diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index a324b21a31..f998fc319f 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -1,5 +1,7 @@ use std::path::{Path, PathBuf}; +use serde::{Deserialize, Serialize}; + lazy_static::lazy_static! { pub static ref HOME: PathBuf = dirs::home_dir().expect("failed to determine home directory"); pub static ref CONFIG_DIR: PathBuf = HOME.join(".config").join("zed"); @@ -70,3 +72,208 @@ pub fn compact(path: &Path) -> PathBuf { path.to_path_buf() } } + +/// A delimiter to use in `path_query:row_number:column_number` strings parsing. +pub const FILE_ROW_COLUMN_DELIMITER: char = ':'; + +/// A representation of a path-like string with optional row and column numbers. +/// Matching values example: `te`, `test.rs:22`, `te:22:5`, etc. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct PathLikeWithPosition

{ + pub path_like: P, + pub row: Option, + // Absent if row is absent. + pub column: Option, +} + +impl

PathLikeWithPosition

{ + /// Parses a string that possibly has `:row:column` suffix. + /// Ignores trailing `:`s, so `test.rs:22:` is parsed as `test.rs:22`. + /// If any of the row/column component parsing fails, the whole string is then parsed as a path like. + pub fn parse_str( + s: &str, + parse_path_like_str: impl Fn(&str) -> Result, + ) -> Result { + let fallback = |fallback_str| { + Ok(Self { + path_like: parse_path_like_str(fallback_str)?, + row: None, + column: None, + }) + }; + + match s.trim().split_once(FILE_ROW_COLUMN_DELIMITER) { + Some((path_like_str, maybe_row_and_col_str)) => { + let path_like_str = path_like_str.trim(); + let maybe_row_and_col_str = maybe_row_and_col_str.trim(); + if path_like_str.is_empty() { + fallback(s) + } else if maybe_row_and_col_str.is_empty() { + fallback(path_like_str) + } else { + let (row_parse_result, maybe_col_str) = + match maybe_row_and_col_str.split_once(FILE_ROW_COLUMN_DELIMITER) { + Some((maybe_row_str, maybe_col_str)) => { + (maybe_row_str.parse::(), maybe_col_str.trim()) + } + None => (maybe_row_and_col_str.parse::(), ""), + }; + + match row_parse_result { + Ok(row) => { + if maybe_col_str.is_empty() { + Ok(Self { + path_like: parse_path_like_str(path_like_str)?, + row: Some(row), + column: None, + }) + } else { + match maybe_col_str.parse::() { + Ok(col) => Ok(Self { + path_like: parse_path_like_str(path_like_str)?, + row: Some(row), + column: Some(col), + }), + Err(_) => fallback(s), + } + } + } + Err(_) => fallback(s), + } + } + } + None => fallback(s), + } + } + + pub fn map_path_like( + self, + mapping: impl FnOnce(P) -> Result, + ) -> Result, E> { + Ok(PathLikeWithPosition { + path_like: mapping(self.path_like)?, + row: self.row, + column: self.column, + }) + } + + pub fn to_string(&self, path_like_to_string: impl Fn(&P) -> String) -> String { + let path_like_string = path_like_to_string(&self.path_like); + if let Some(row) = self.row { + if let Some(column) = self.column { + format!("{path_like_string}:{row}:{column}") + } else { + format!("{path_like_string}:{row}") + } + } else { + path_like_string + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + type TestPath = PathLikeWithPosition; + + fn parse_str(s: &str) -> TestPath { + TestPath::parse_str(s, |s| Ok::<_, std::convert::Infallible>(s.to_string())) + .expect("infallible") + } + + #[test] + fn path_with_position_parsing_positive() { + let input_and_expected = [ + ( + "test_file.rs", + PathLikeWithPosition { + path_like: "test_file.rs".to_string(), + row: None, + column: None, + }, + ), + ( + "test_file.rs:1", + PathLikeWithPosition { + path_like: "test_file.rs".to_string(), + row: Some(1), + column: None, + }, + ), + ( + "test_file.rs:1:2", + PathLikeWithPosition { + path_like: "test_file.rs".to_string(), + row: Some(1), + column: Some(2), + }, + ), + ]; + + for (input, expected) in input_and_expected { + let actual = parse_str(input); + assert_eq!( + actual, expected, + "For positive case input str '{input}', got a parse mismatch" + ); + } + } + + #[test] + fn path_with_position_parsing_negative() { + for input in [ + "test_file.rs:a", + "test_file.rs:a:b", + "test_file.rs::", + "test_file.rs::1", + "test_file.rs:1::", + "test_file.rs::1:2", + "test_file.rs:1::2", + "test_file.rs:1:2:", + "test_file.rs:1:2:3", + ] { + let actual = parse_str(input); + assert_eq!( + actual, + PathLikeWithPosition { + path_like: input.to_string(), + row: None, + column: None, + }, + "For negative case input str '{input}', got a parse mismatch" + ); + } + } + + // Trim off trailing `:`s for otherwise valid input. + #[test] + fn path_with_position_parsing_special() { + let input_and_expected = [ + ( + "test_file.rs:", + PathLikeWithPosition { + path_like: "test_file.rs".to_string(), + row: None, + column: None, + }, + ), + ( + "test_file.rs:1:", + PathLikeWithPosition { + path_like: "test_file.rs".to_string(), + row: Some(1), + column: None, + }, + ), + ]; + + for (input, expected) in input_and_expected { + let actual = parse_str(input); + assert_eq!( + actual, expected, + "For special case input str '{input}', got a parse mismatch" + ); + } + } +} diff --git a/crates/util/src/util.rs b/crates/util/src/util.rs index fafd9d5d3b..9d787e1389 100644 --- a/crates/util/src/util.rs +++ b/crates/util/src/util.rs @@ -17,6 +17,8 @@ pub use backtrace::Backtrace; use futures::Future; use rand::{seq::SliceRandom, Rng}; +pub use take_until::*; + #[macro_export] macro_rules! debug_panic { ( $($fmt_arg:tt)* ) => { diff --git a/crates/workspace/src/dock.rs b/crates/workspace/src/dock.rs index 5116391976..9879aba5c6 100644 --- a/crates/workspace/src/dock.rs +++ b/crates/workspace/src/dock.rs @@ -464,7 +464,6 @@ mod tests { let (_, _workspace) = cx.add_window(|cx| { Workspace::new( - Some(serialized_workspace), 0, project.clone(), Arc::new(AppState { @@ -482,6 +481,11 @@ mod tests { ) }); + cx.update(|cx| { + Workspace::load_workspace(_workspace.downgrade(), serialized_workspace, Vec::new(), cx) + }) + .await; + cx.foreground().run_until_parked(); //Should terminate } @@ -607,7 +611,6 @@ mod tests { let project = Project::test(fs, [], cx).await; let (window_id, workspace) = cx.add_window(|cx| { Workspace::new( - None, 0, project.clone(), Arc::new(AppState { diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index ac1bcf6eed..b73dfa495d 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -1,6 +1,6 @@ use crate::{ - dock::DockPosition, DockAnchor, ItemDeserializers, Member, Pane, PaneAxis, Workspace, - WorkspaceId, + dock::DockPosition, item::ItemHandle, DockAnchor, ItemDeserializers, Member, Pane, PaneAxis, + Workspace, WorkspaceId, }; use anyhow::{anyhow, Context, Result}; use async_recursion::async_recursion; @@ -97,17 +97,23 @@ impl SerializedPaneGroup { workspace_id: WorkspaceId, workspace: &WeakViewHandle, cx: &mut AsyncAppContext, - ) -> Option<(Member, Option>)> { + ) -> Option<( + Member, + Option>, + Vec>>, + )> { match self { SerializedPaneGroup::Group { axis, children } => { let mut current_active_pane = None; let mut members = Vec::new(); + let mut items = Vec::new(); for child in children { - if let Some((new_member, active_pane)) = child + if let Some((new_member, active_pane, new_items)) = child .deserialize(project, workspace_id, workspace, cx) .await { members.push(new_member); + items.extend(new_items); current_active_pane = current_active_pane.or(active_pane); } } @@ -117,7 +123,7 @@ impl SerializedPaneGroup { } if members.len() == 1 { - return Some((members.remove(0), current_active_pane)); + return Some((members.remove(0), current_active_pane, items)); } Some(( @@ -126,6 +132,7 @@ impl SerializedPaneGroup { members, }), current_active_pane, + items, )) } SerializedPaneGroup::Pane(serialized_pane) => { @@ -133,7 +140,7 @@ impl SerializedPaneGroup { .update(cx, |workspace, cx| workspace.add_pane(cx).downgrade()) .log_err()?; let active = serialized_pane.active; - serialized_pane + let new_items = serialized_pane .deserialize_to(project, &pane, workspace_id, workspace, cx) .await .log_err()?; @@ -143,7 +150,7 @@ impl SerializedPaneGroup { .log_err()? { let pane = pane.upgrade(cx)?; - Some((Member::Pane(pane.clone()), active.then(|| pane))) + Some((Member::Pane(pane.clone()), active.then(|| pane), new_items)) } else { let pane = pane.upgrade(cx)?; workspace @@ -174,7 +181,8 @@ impl SerializedPane { workspace_id: WorkspaceId, workspace: &WeakViewHandle, cx: &mut AsyncAppContext, - ) -> Result<()> { + ) -> Result>>> { + let mut items = Vec::new(); let mut active_item_index = None; for (index, item) in self.children.iter().enumerate() { let project = project.clone(); @@ -192,6 +200,8 @@ impl SerializedPane { .await .log_err(); + items.push(item_handle.clone()); + if let Some(item_handle) = item_handle { workspace.update(cx, |workspace, cx| { let pane_handle = pane_handle @@ -213,7 +223,7 @@ impl SerializedPane { })?; } - anyhow::Ok(()) + anyhow::Ok(items) } } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index dcc4d017dd..d956db10b6 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -83,7 +83,7 @@ use status_bar::StatusBar; pub use status_bar::StatusItemView; use theme::{Theme, ThemeRegistry}; pub use toolbar::{ToolbarItemLocation, ToolbarItemView}; -use util::{paths, ResultExt}; +use util::{async_iife, paths, ResultExt}; pub use workspace_settings::{AutosaveSetting, DockAnchor, GitGutterSetting, WorkspaceSettings}; lazy_static! { @@ -241,7 +241,6 @@ pub fn init(app_state: Arc, cx: &mut AppContext) { }, ); cx.add_action(Workspace::toggle_sidebar_item); - cx.add_action(Workspace::focus_center); cx.add_action(|workspace: &mut Workspace, _: &ActivatePreviousPane, cx| { workspace.activate_previous_pane(cx) }); @@ -509,7 +508,6 @@ struct FollowerState { impl Workspace { pub fn new( - serialized_workspace: Option, workspace_id: WorkspaceId, project: ModelHandle, app_state: Arc, @@ -675,18 +673,6 @@ impl Workspace { this.project_remote_id_changed(project.read(cx).remote_id(), cx); cx.defer(|this, cx| this.update_window_title(cx)); - if let Some(serialized_workspace) = serialized_workspace { - cx.defer(move |_, cx| { - Self::load_from_serialized_workspace(weak_handle, serialized_workspace, cx) - }); - } else if project.read(cx).is_local() { - if settings::get_setting::(None, cx).default_dock_anchor - != DockAnchor::Expanded - { - Dock::show(&mut this, false, cx); - } - } - this } @@ -708,18 +694,15 @@ impl Workspace { ); cx.spawn(|mut cx| async move { - let mut serialized_workspace = - persistence::DB.workspace_for_roots(&abs_paths.as_slice()); + let serialized_workspace = persistence::DB.workspace_for_roots(&abs_paths.as_slice()); - let paths_to_open = serialized_workspace - .as_ref() - .map(|workspace| workspace.location.paths()) - .unwrap_or(Arc::new(abs_paths)); + let paths_to_open = Arc::new(abs_paths); // Get project paths for all of the abs_paths let mut worktree_roots: HashSet> = Default::default(); - let mut project_paths = Vec::new(); - for path in paths_to_open.iter() { + let mut project_paths: Vec<(PathBuf, Option)> = + Vec::with_capacity(paths_to_open.len()); + for path in paths_to_open.iter().cloned() { if let Some((worktree, project_entry)) = cx .update(|cx| { Workspace::project_path_for_path(project_handle.clone(), &path, true, cx) @@ -728,9 +711,9 @@ impl Workspace { .log_err() { worktree_roots.insert(worktree.read_with(&mut cx, |tree, _| tree.abs_path())); - project_paths.push(Some(project_entry)); + project_paths.push((path, Some(project_entry))); } else { - project_paths.push(None); + project_paths.push((path, None)); } } @@ -750,27 +733,17 @@ impl Workspace { )) }); - let build_workspace = - |cx: &mut ViewContext, - serialized_workspace: Option| { - let mut workspace = Workspace::new( - serialized_workspace, - workspace_id, - project_handle.clone(), - app_state.clone(), - cx, - ); - (app_state.initialize_workspace)(&mut workspace, &app_state, cx); - workspace - }; + let build_workspace = |cx: &mut ViewContext| { + let mut workspace = + Workspace::new(workspace_id, project_handle.clone(), app_state.clone(), cx); + (app_state.initialize_workspace)(&mut workspace, &app_state, cx); + + workspace + }; let workspace = requesting_window_id .and_then(|window_id| { - cx.update(|cx| { - cx.replace_root_view(window_id, |cx| { - build_workspace(cx, serialized_workspace.take()) - }) - }) + cx.update(|cx| cx.replace_root_view(window_id, |cx| build_workspace(cx))) }) .unwrap_or_else(|| { let (bounds, display) = if let Some(bounds) = window_bounds_override { @@ -808,44 +781,21 @@ impl Workspace { // Use the serialized workspace to construct the new window cx.add_window( (app_state.build_window_options)(bounds, display, cx.platform().as_ref()), - |cx| build_workspace(cx, serialized_workspace), + |cx| build_workspace(cx), ) .1 }); let workspace = workspace.downgrade(); notify_if_database_failed(&workspace, &mut cx); - - // Call open path for each of the project paths - // (this will bring them to the front if they were in the serialized workspace) - debug_assert!(paths_to_open.len() == project_paths.len()); - let tasks = paths_to_open - .iter() - .cloned() - .zip(project_paths.into_iter()) - .map(|(abs_path, project_path)| { - let workspace = workspace.clone(); - cx.spawn(|mut cx| { - let fs = app_state.fs.clone(); - async move { - let project_path = project_path?; - if fs.is_file(&abs_path).await { - Some( - workspace - .update(&mut cx, |workspace, cx| { - workspace.open_path(project_path, None, true, cx) - }) - .log_err()? - .await, - ) - } else { - None - } - } - }) - }); - - let opened_items = futures::future::join_all(tasks.into_iter()).await; + let opened_items = open_items( + serialized_workspace, + &workspace, + project_paths, + app_state, + cx, + ) + .await; (workspace, opened_items) }) @@ -1136,6 +1086,8 @@ impl Workspace { visible: bool, cx: &mut ViewContext, ) -> Task, anyhow::Error>>>> { + log::info!("open paths {:?}", abs_paths); + let fs = self.app_state.fs.clone(); // Sort the paths to ensure we add worktrees for parents before their children. @@ -1432,11 +1384,6 @@ impl Workspace { cx.notify(); } - pub fn focus_center(&mut self, _: &menu::Cancel, cx: &mut ViewContext) { - cx.focus_self(); - cx.notify(); - } - fn add_pane(&mut self, cx: &mut ViewContext) -> ViewHandle { let pane = cx.add_view(|cx| { Pane::new( @@ -2559,13 +2506,15 @@ impl Workspace { } } - fn load_from_serialized_workspace( + pub(crate) fn load_workspace( workspace: WeakViewHandle, serialized_workspace: SerializedWorkspace, + paths_to_open: Vec>, cx: &mut AppContext, - ) { + ) -> Task, anyhow::Error>>>> { cx.spawn(|mut cx| async move { - let (project, dock_pane_handle, old_center_pane) = + let result = async_iife! {{ + let (project, dock_pane_handle, old_center_pane) = workspace.read_with(&cx, |workspace, _| { ( workspace.project().clone(), @@ -2574,74 +2523,107 @@ impl Workspace { ) })?; - serialized_workspace - .dock_pane - .deserialize_to( - &project, - &dock_pane_handle, - serialized_workspace.id, - &workspace, - &mut cx, - ) - .await?; + let dock_items = serialized_workspace + .dock_pane + .deserialize_to( + &project, + &dock_pane_handle, + serialized_workspace.id, + &workspace, + &mut cx, + ) + .await?; - // Traverse the splits tree and add to things - let center_group = serialized_workspace - .center_group - .deserialize(&project, serialized_workspace.id, &workspace, &mut cx) - .await; - - // Remove old panes from workspace panes list - workspace.update(&mut cx, |workspace, cx| { - if let Some((center_group, active_pane)) = center_group { - workspace.remove_panes(workspace.center.root.clone(), cx); - - // Swap workspace center group - workspace.center = PaneGroup::with_root(center_group); - - // Change the focus to the workspace first so that we retrigger focus in on the pane. - cx.focus_self(); - - if let Some(active_pane) = active_pane { - cx.focus(&active_pane); - } else { - cx.focus(workspace.panes.last().unwrap()); - } - } else { - let old_center_handle = old_center_pane.and_then(|weak| weak.upgrade(cx)); - if let Some(old_center_handle) = old_center_handle { - cx.focus(&old_center_handle) - } else { - cx.focus_self() - } + let mut center_items = None; + let mut center_group = None; + // Traverse the splits tree and add to things + if let Some((group, active_pane, items)) = serialized_workspace + .center_group + .deserialize(&project, serialized_workspace.id, &workspace, &mut cx) + .await { + center_items = Some(items); + center_group = Some((group, active_pane)) } - if workspace.left_sidebar().read(cx).is_open() - != serialized_workspace.left_sidebar_open - { - workspace.toggle_sidebar(SidebarSide::Left, cx); - } + let resulting_list = cx.read(|cx| { + let mut opened_items = center_items + .unwrap_or_default() + .into_iter() + .chain(dock_items.into_iter()) + .filter_map(|item| { + let item = item?; + let project_path = item.project_path(cx)?; + Some((project_path, item)) + }) + .collect::>(); - // Note that without after_window, the focus_self() and - // the focus the dock generates start generating alternating - // focus due to the deferred execution each triggering each other - cx.after_window_update(move |workspace, cx| { - Dock::set_dock_position( - workspace, - serialized_workspace.dock_position, - false, - cx, - ); + paths_to_open + .into_iter() + .map(|path_to_open| { + path_to_open.map(|path_to_open| { + Ok(opened_items.remove(&path_to_open)) + }) + .transpose() + .map(|item| item.flatten()) + .transpose() + }) + .collect::>() }); - cx.notify(); - })?; + // Remove old panes from workspace panes list + workspace.update(&mut cx, |workspace, cx| { + if let Some((center_group, active_pane)) = center_group { + workspace.remove_panes(workspace.center.root.clone(), cx); - // Serialize ourself to make sure our timestamps and any pane / item changes are replicated - workspace.read_with(&cx, |workspace, cx| workspace.serialize_workspace(cx))?; - anyhow::Ok(()) + // Swap workspace center group + workspace.center = PaneGroup::with_root(center_group); + + // Change the focus to the workspace first so that we retrigger focus in on the pane. + cx.focus_self(); + + if let Some(active_pane) = active_pane { + cx.focus(&active_pane); + } else { + cx.focus(workspace.panes.last().unwrap()); + } + } else { + let old_center_handle = old_center_pane.and_then(|weak| weak.upgrade(cx)); + if let Some(old_center_handle) = old_center_handle { + cx.focus(&old_center_handle) + } else { + cx.focus_self() + } + } + + if workspace.left_sidebar().read(cx).is_open() + != serialized_workspace.left_sidebar_open + { + workspace.toggle_sidebar(SidebarSide::Left, cx); + } + + // Note that without after_window, the focus_self() and + // the focus the dock generates start generating alternating + // focus due to the deferred execution each triggering each other + cx.after_window_update(move |workspace, cx| { + Dock::set_dock_position( + workspace, + serialized_workspace.dock_position, + false, + cx, + ); + }); + + cx.notify(); + })?; + + // Serialize ourself to make sure our timestamps and any pane / item changes are replicated + workspace.read_with(&cx, |workspace, cx| workspace.serialize_workspace(cx))?; + + Ok::<_, anyhow::Error>(resulting_list) + }}; + + result.await.unwrap_or_default() }) - .detach_and_log_err(cx); } #[cfg(any(test, feature = "test-support"))] @@ -2657,10 +2639,99 @@ impl Workspace { dock_default_item_factory: |_, _| None, background_actions: || &[], }); - Self::new(None, 0, project, app_state, cx) + Self::new(0, project, app_state, cx) } } +async fn open_items( + serialized_workspace: Option, + workspace: &WeakViewHandle, + mut project_paths_to_open: Vec<(PathBuf, Option)>, + app_state: Arc, + mut cx: AsyncAppContext, +) -> Vec>>> { + let mut opened_items = Vec::with_capacity(project_paths_to_open.len()); + + if let Some(serialized_workspace) = serialized_workspace { + let workspace = workspace.clone(); + let restored_items = cx + .update(|cx| { + Workspace::load_workspace( + workspace, + serialized_workspace, + project_paths_to_open + .iter() + .map(|(_, project_path)| project_path) + .cloned() + .collect(), + cx, + ) + }) + .await; + + let restored_project_paths = cx.read(|cx| { + restored_items + .iter() + .filter_map(|item| item.as_ref()?.as_ref().ok()?.project_path(cx)) + .collect::>() + }); + + opened_items = restored_items; + project_paths_to_open + .iter_mut() + .for_each(|(_, project_path)| { + if let Some(project_path_to_open) = project_path { + if restored_project_paths.contains(project_path_to_open) { + *project_path = None; + } + } + }); + } else { + for _ in 0..project_paths_to_open.len() { + opened_items.push(None); + } + } + assert!(opened_items.len() == project_paths_to_open.len()); + + let tasks = + project_paths_to_open + .into_iter() + .enumerate() + .map(|(i, (abs_path, project_path))| { + let workspace = workspace.clone(); + cx.spawn(|mut cx| { + let fs = app_state.fs.clone(); + async move { + let file_project_path = project_path?; + if fs.is_file(&abs_path).await { + Some(( + i, + workspace + .update(&mut cx, |workspace, cx| { + workspace.open_path(file_project_path, None, true, cx) + }) + .log_err()? + .await, + )) + } else { + None + } + } + }) + }); + + for maybe_opened_path in futures::future::join_all(tasks.into_iter()) + .await + .into_iter() + { + if let Some((i, path_open_result)) = maybe_opened_path { + opened_items[i] = Some(path_open_result); + } + } + + opened_items +} + fn notify_if_database_failed(workspace: &WeakViewHandle, cx: &mut AsyncAppContext) { const REPORT_ISSUE_URL: &str ="https://github.com/zed-industries/community/issues/new?assignees=&labels=defect%2Ctriage&template=2_bug_report.yml"; @@ -2900,8 +2971,6 @@ pub fn open_paths( Vec, anyhow::Error>>>, )>, > { - log::info!("open paths {:?}", abs_paths); - let app_state = app_state.clone(); let abs_paths = abs_paths.to_vec(); cx.spawn(|mut cx| async move { @@ -3031,8 +3100,7 @@ pub fn join_remote_project( let (_, workspace) = cx.add_window( (app_state.build_window_options)(None, None, cx.platform().as_ref()), |cx| { - let mut workspace = - Workspace::new(Default::default(), 0, project, app_state.clone(), cx); + let mut workspace = Workspace::new(0, project, app_state.clone(), cx); (app_state.initialize_workspace)(&mut workspace, &app_state, cx); workspace }, diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index d937ce7ffe..95f8241dae 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -3,7 +3,7 @@ authors = ["Nathan Sobo "] description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.86.0" +version = "0.87.0" publish = false [lib] diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 3466daebe9..b290460cca 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -6,18 +6,18 @@ use assets::Assets; use backtrace::Backtrace; use cli::{ ipc::{self, IpcSender}, - CliRequest, CliResponse, IpcHandshake, + CliRequest, CliResponse, IpcHandshake, FORCE_CLI_MODE_ENV_VAR_NAME, }; use client::{self, TelemetrySettings, UserStore, ZED_APP_VERSION, ZED_SECRET_CLIENT_TOKEN}; use db::kvp::KEY_VALUE_STORE; -use editor::Editor; +use editor::{scroll::autoscroll::Autoscroll, Editor}; use futures::{ channel::{mpsc, oneshot}, FutureExt, SinkExt, StreamExt, }; use gpui::{Action, App, AppContext, AssetSource, AsyncAppContext, Task, ViewContext}; use isahc::{config::Configurable, Request}; -use language::LanguageRegistry; +use language::{LanguageRegistry, Point}; use log::LevelFilter; use node_runtime::NodeRuntime; use parking_lot::Mutex; @@ -30,20 +30,28 @@ use settings::{ use simplelog::ConfigBuilder; use smol::process::Command; use std::{ + collections::HashMap, env, ffi::OsStr, fs::OpenOptions, io::Write as _, os::unix::prelude::OsStrExt, panic, - path::PathBuf, + path::{Path, PathBuf}, str, - sync::{Arc, Weak}, + sync::{ + atomic::{AtomicBool, Ordering}, + Arc, Weak, + }, thread, time::Duration, }; +use sum_tree::Bias; use terminal_view::{get_working_directory, TerminalSettings, TerminalView}; -use util::http::{self, HttpClient}; +use util::{ + http::{self, HttpClient}, + paths::PathLikeWithPosition, +}; use welcome::{show_welcome_experience, FIRST_OPEN}; use fs::RealFs; @@ -90,29 +98,17 @@ fn main() { }; let (cli_connections_tx, mut cli_connections_rx) = mpsc::unbounded(); + let cli_connections_tx = Arc::new(cli_connections_tx); let (open_paths_tx, mut open_paths_rx) = mpsc::unbounded(); + let open_paths_tx = Arc::new(open_paths_tx); + let urls_callback_triggered = Arc::new(AtomicBool::new(false)); + + let callback_cli_connections_tx = Arc::clone(&cli_connections_tx); + let callback_open_paths_tx = Arc::clone(&open_paths_tx); + let callback_urls_callback_triggered = Arc::clone(&urls_callback_triggered); app.on_open_urls(move |urls, _| { - if let Some(server_name) = urls.first().and_then(|url| url.strip_prefix("zed-cli://")) { - if let Some(cli_connection) = connect_to_cli(server_name).log_err() { - cli_connections_tx - .unbounded_send(cli_connection) - .map_err(|_| anyhow!("no listener for cli connections")) - .log_err(); - }; - } else { - let paths: Vec<_> = urls - .iter() - .flat_map(|url| url.strip_prefix("file://")) - .map(|url| { - let decoded = urlencoding::decode_binary(url.as_bytes()); - PathBuf::from(OsStr::from_bytes(decoded.as_ref())) - }) - .collect(); - open_paths_tx - .unbounded_send(paths) - .map_err(|_| anyhow!("no listener for open urls requests")) - .log_err(); - } + callback_urls_callback_triggered.store(true, Ordering::Release); + open_urls(urls, &callback_cli_connections_tx, &callback_open_paths_tx); }) .on_reopen(move |cx| { if cx.has_global::>() { @@ -228,6 +224,14 @@ fn main() { workspace::open_paths(&paths, &app_state, None, cx).detach_and_log_err(cx); } } else { + // TODO Development mode that forces the CLI mode usually runs Zed binary as is instead + // of an *app, hence gets no specific callbacks run. Emulate them here, if needed. + if std::env::var(FORCE_CLI_MODE_ENV_VAR_NAME).ok().is_some() + && !urls_callback_triggered.load(Ordering::Acquire) + { + open_urls(collect_url_args(), &cli_connections_tx, &open_paths_tx) + } + if let Ok(Some(connection)) = cli_connections_rx.try_next() { cx.spawn(|cx| handle_cli_connection(connection, app_state.clone(), cx)) .detach(); @@ -278,6 +282,37 @@ fn main() { }); } +fn open_urls( + urls: Vec, + cli_connections_tx: &mpsc::UnboundedSender<( + mpsc::Receiver, + IpcSender, + )>, + open_paths_tx: &mpsc::UnboundedSender>, +) { + if let Some(server_name) = urls.first().and_then(|url| url.strip_prefix("zed-cli://")) { + if let Some(cli_connection) = connect_to_cli(server_name).log_err() { + cli_connections_tx + .unbounded_send(cli_connection) + .map_err(|_| anyhow!("no listener for cli connections")) + .log_err(); + }; + } else { + let paths: Vec<_> = urls + .iter() + .flat_map(|url| url.strip_prefix("file://")) + .map(|url| { + let decoded = urlencoding::decode_binary(url.as_bytes()); + PathBuf::from(OsStr::from_bytes(decoded.as_ref())) + }) + .collect(); + open_paths_tx + .unbounded_send(paths) + .map_err(|_| anyhow!("no listener for open urls requests")) + .log_err(); + } +} + async fn restore_or_create_workspace(app_state: &Arc, mut cx: AsyncAppContext) { if let Some(location) = workspace::last_opened_workspace_paths().await { cx.update(|cx| workspace::open_paths(location.paths().as_ref(), app_state, None, cx)) @@ -508,7 +543,8 @@ async fn load_login_shell_environment() -> Result<()> { } fn stdout_is_a_pty() -> bool { - unsafe { libc::isatty(libc::STDOUT_FILENO as i32) != 0 } + std::env::var(FORCE_CLI_MODE_ENV_VAR_NAME).ok().is_none() + && unsafe { libc::isatty(libc::STDOUT_FILENO as i32) != 0 } } fn collect_path_args() -> Vec { @@ -521,7 +557,11 @@ fn collect_path_args() -> Vec { None } }) - .collect::>() + .collect() +} + +fn collect_url_args() -> Vec { + env::args().skip(1).collect() } fn load_embedded_fonts(app: &App) { @@ -616,13 +656,38 @@ async fn handle_cli_connection( if let Some(request) = requests.next().await { match request { CliRequest::Open { paths, wait } => { + let mut caret_positions = HashMap::new(); + let paths = if paths.is_empty() { workspace::last_opened_workspace_paths() .await .map(|location| location.paths().to_vec()) - .unwrap_or(paths) + .unwrap_or_default() } else { paths + .into_iter() + .filter_map(|path_with_position_string| { + let path_with_position = PathLikeWithPosition::parse_str( + &path_with_position_string, + |path_str| { + Ok::<_, std::convert::Infallible>( + Path::new(path_str).to_path_buf(), + ) + }, + ) + .expect("Infallible"); + let path = path_with_position.path_like; + if let Some(row) = path_with_position.row { + if path.is_file() { + let row = row.saturating_sub(1); + let col = + path_with_position.column.unwrap_or(0).saturating_sub(1); + caret_positions.insert(path.clone(), Point::new(row, col)); + } + } + Some(path) + }) + .collect() }; let mut errored = false; @@ -632,11 +697,32 @@ async fn handle_cli_connection( { Ok((workspace, items)) => { let mut item_release_futures = Vec::new(); - cx.update(|cx| { - for (item, path) in items.into_iter().zip(&paths) { - match item { - Some(Ok(item)) => { - let released = oneshot::channel(); + + for (item, path) in items.into_iter().zip(&paths) { + match item { + Some(Ok(item)) => { + if let Some(point) = caret_positions.remove(path) { + if let Some(active_editor) = item.downcast::() { + active_editor + .downgrade() + .update(&mut cx, |editor, cx| { + let snapshot = + editor.snapshot(cx).display_snapshot; + let point = snapshot + .buffer_snapshot + .clip_point(point, Bias::Left); + editor.change_selections( + Some(Autoscroll::center()), + cx, + |s| s.select_ranges([point..point]), + ); + }) + .log_err(); + } + } + + let released = oneshot::channel(); + cx.update(|cx| { item.on_release( cx, Box::new(move |_| { @@ -644,23 +730,20 @@ async fn handle_cli_connection( }), ) .detach(); - item_release_futures.push(released.1); - } - Some(Err(err)) => { - responses - .send(CliResponse::Stderr { - message: format!( - "error opening {:?}: {}", - path, err - ), - }) - .log_err(); - errored = true; - } - None => {} + }); + item_release_futures.push(released.1); } + Some(Err(err)) => { + responses + .send(CliResponse::Stderr { + message: format!("error opening {:?}: {}", path, err), + }) + .log_err(); + errored = true; + } + None => {} } - }); + } if wait { let background = cx.background(); diff --git a/script/clear-target-dir-if-larger-than b/script/clear-target-dir-if-larger-than new file mode 100755 index 0000000000..59c07f77f7 --- /dev/null +++ b/script/clear-target-dir-if-larger-than @@ -0,0 +1,20 @@ +#!/bin/bash + +set -eu + +if [[ $# < 1 ]]; then + echo "usage: $0 " + exit 1 +fi + +max_size_gb=$1 + +current_size=$(du -s target | cut -f1) +current_size_gb=$(expr ${current_size} / 1024 / 1024) + +echo "target directory size: ${current_size_gb}gb. max size: ${max_size_gb}gb" + +if [[ ${current_size_gb} -gt ${max_size_gb} ]]; then + echo "clearing target directory" + rm -rf target +fi