From d85ecc83023e36ffe76accfada8945b627579410 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 13 Sep 2022 18:24:40 -0700 Subject: [PATCH 001/140] Add collab APIs for new signup flow Co-authored-by: Nathan Sobo --- .../20220913211150_create_signups.sql | 25 ++ crates/collab/src/api.rs | 42 ++- crates/collab/src/db.rs | 272 ++++++++++++++++++ 3 files changed, 338 insertions(+), 1 deletion(-) create mode 100644 crates/collab/migrations/20220913211150_create_signups.sql diff --git a/crates/collab/migrations/20220913211150_create_signups.sql b/crates/collab/migrations/20220913211150_create_signups.sql new file mode 100644 index 0000000000..783cdf8c0a --- /dev/null +++ b/crates/collab/migrations/20220913211150_create_signups.sql @@ -0,0 +1,25 @@ +CREATE SEQUENCE metrics_id_seq; + +CREATE TABLE IF NOT EXISTS "signups" ( + "id" SERIAL PRIMARY KEY NOT NULL, + "email_address" VARCHAR NOT NULL, + "email_confirmation_code" VARCHAR(64) NOT NULL, + "email_confirmation_sent" BOOLEAN NOT NULL, + "metrics_id" INTEGER NOT NULL DEFAULT nextval('metrics_id_seq'), + "created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "user_id" INTEGER REFERENCES users (id), + + "platform_mac" BOOLEAN NOT NULL, + "platform_linux" BOOLEAN NOT NULL, + "platform_windows" BOOLEAN NOT NULL, + "platform_unknown" BOOLEAN NOT NULL, + + "editor_features" VARCHAR[] NOT NULL, + "programming_languages" VARCHAR[] NOT NULL +); + +CREATE UNIQUE INDEX "index_signups_on_email_address" ON "signups" ("email_address"); +CREATE INDEX "index_signups_on_email_confirmation_sent" ON "signups" ("email_confirmation_sent"); + +ALTER TABLE "users" + ADD "metrics_id" INTEGER DEFAULT nextval('metrics_id_seq'); diff --git a/crates/collab/src/api.rs b/crates/collab/src/api.rs index eafeae0864..42825d25ff 100644 --- a/crates/collab/src/api.rs +++ b/crates/collab/src/api.rs @@ -1,6 +1,6 @@ use crate::{ auth, - db::{ProjectId, User, UserId}, + db::{ProjectId, Signup, SignupInvite, SignupRedemption, User, UserId}, rpc::{self, ResultExt}, AppState, Error, Result, }; @@ -45,6 +45,10 @@ pub fn routes(rpc_server: &Arc, state: Arc) -> Router Result> { Ok(Json(app.db.get_user_for_invite_code(&code).await?)) } + +async fn create_signup( + Json(params): Json, + Extension(app): Extension>, +) -> Result<()> { + app.db.create_signup(params).await?; + Ok(()) +} + +async fn redeem_signup( + Json(redemption): Json, + Extension(app): Extension>, +) -> Result<()> { + app.db.redeem_signup(redemption).await?; + Ok(()) +} + +async fn record_signup_invites_sent( + Json(params): Json>, + Extension(app): Extension>, +) -> Result<()> { + app.db.record_signup_invites_sent(¶ms).await?; + Ok(()) +} + +#[derive(Deserialize)] +pub struct GetSignupInvitesParams { + pub count: usize, +} + +async fn get_signup_invites( + Query(params): Query, + Extension(app): Extension>, +) -> Result>> { + Ok(Json(app.db.get_signup_invites(params.count).await?)) +} diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index eeb598413e..86dca6de98 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -30,6 +30,11 @@ pub trait Db: Send + Sync { async fn set_user_connected_once(&self, id: UserId, connected_once: bool) -> Result<()>; async fn destroy_user(&self, id: UserId) -> Result<()>; + async fn create_signup(&self, signup: Signup) -> Result<()>; + async fn get_signup_invites(&self, count: usize) -> Result>; + async fn record_signup_invites_sent(&self, signups: &[SignupInvite]) -> Result<()>; + async fn redeem_signup(&self, redemption: SignupRedemption) -> Result; + async fn set_invite_count(&self, id: UserId, count: u32) -> Result<()>; async fn get_invite_code_for_user(&self, id: UserId) -> Result>; async fn get_user_for_invite_code(&self, code: &str) -> Result; @@ -333,6 +338,125 @@ impl Db for PostgresDb { .map(drop)?) } + // signups + + async fn create_signup(&self, signup: Signup) -> Result<()> { + sqlx::query( + " + INSERT INTO signups + ( + email_address, + email_confirmation_code, + email_confirmation_sent, + platform_linux, + platform_mac, + platform_windows, + platform_unknown, + editor_features, + programming_languages + ) + VALUES + ($1, $2, 'f', $3, $4, $5, 'f', $6, $7) + ", + ) + .bind(&signup.email_address) + .bind(&random_email_confirmation_code()) + .bind(&signup.platform_linux) + .bind(&signup.platform_mac) + .bind(&signup.platform_windows) + .bind(&signup.editor_features) + .bind(&signup.programming_languages) + .execute(&self.pool) + .await?; + Ok(()) + } + + async fn get_signup_invites(&self, count: usize) -> Result> { + Ok(sqlx::query_as( + " + SELECT + email_address, email_confirmation_code + FROM signups + WHERE + NOT email_confirmation_sent AND + platform_mac + LIMIT $1 + ", + ) + .bind(count as i32) + .fetch_all(&self.pool) + .await?) + } + + async fn record_signup_invites_sent(&self, signups: &[SignupInvite]) -> Result<()> { + sqlx::query( + " + UPDATE signups + SET email_confirmation_sent = 't' + WHERE email_address = ANY ($1) + ", + ) + .bind( + &signups + .iter() + .map(|s| s.email_address.as_str()) + .collect::>(), + ) + .execute(&self.pool) + .await?; + Ok(()) + } + + async fn redeem_signup(&self, redemption: SignupRedemption) -> Result { + let mut tx = self.pool.begin().await?; + let signup_id: i32 = sqlx::query_scalar( + " + SELECT id + FROM signups + WHERE + email_address = $1 AND + email_confirmation_code = $2 AND + email_confirmation_sent AND + user_id is NULL + ", + ) + .bind(&redemption.email_address) + .bind(&redemption.email_confirmation_code) + .fetch_one(&mut tx) + .await?; + + let user_id: i32 = sqlx::query_scalar( + " + INSERT INTO users + (email_address, github_login, admin, invite_count, invite_code) + VALUES + ($1, $2, 'f', $3, $4) + RETURNING id + ", + ) + .bind(&redemption.email_address) + .bind(&redemption.github_login) + .bind(&redemption.invite_count) + .bind(random_invite_code()) + .fetch_one(&mut tx) + .await?; + + sqlx::query( + " + UPDATE signups + SET user_id = $1 + WHERE id = $2 + ", + ) + .bind(&user_id) + .bind(&signup_id) + .execute(&mut tx) + .await?; + + tx.commit().await?; + Ok(UserId(user_id)) + } + // invite codes async fn set_invite_count(&self, id: UserId, count: u32) -> Result<()> { @@ -1445,6 +1569,30 @@ pub struct IncomingContactRequest { pub should_notify: bool, } +#[derive(Clone, Deserialize)] +pub struct Signup { + pub email_address: String, + pub platform_mac: bool, + pub platform_windows: bool, + pub platform_linux: bool, + pub editor_features: Vec, + pub programming_languages: Vec, +} + +#[derive(FromRow, PartialEq, Debug, Serialize, Deserialize)] +pub struct SignupInvite { + pub email_address: String, + pub email_confirmation_code: String, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct SignupRedemption { + pub email_address: String, + pub email_confirmation_code: String, + pub github_login: String, + pub invite_count: i32, +} + fn fuzzy_like_string(string: &str) -> String { let mut result = String::with_capacity(string.len() * 2 + 1); for c in string.chars() { @@ -1461,6 +1609,10 @@ fn random_invite_code() -> String { nanoid::nanoid!(16) } +fn random_email_confirmation_code() -> String { + nanoid::nanoid!(64) +} + #[cfg(test)] pub mod tests { use super::*; @@ -2400,6 +2552,105 @@ pub mod tests { ); } + #[tokio::test(flavor = "multi_thread")] + async fn test_signups() { + let postgres = TestDb::postgres().await; + let db = postgres.db(); + + // people sign up on the waitlist + for i in 0..8 { + db.create_signup(Signup { + email_address: format!("person-{i}@example.com"), + platform_mac: true, + platform_linux: true, + platform_windows: false, + editor_features: vec!["speed".into()], + programming_languages: vec!["rust".into(), "c".into()], + }) + .await + .unwrap(); + } + + // retrieve the next batch of signup emails to send + let signups_batch1 = db.get_signup_invites(3).await.unwrap(); + let addresses = signups_batch1 + .iter() + .map(|s| &s.email_address) + .collect::>(); + assert_eq!( + addresses, + &[ + "person-0@example.com", + "person-1@example.com", + "person-2@example.com" + ] + ); + assert_ne!( + signups_batch1[0].email_confirmation_code, + signups_batch1[1].email_confirmation_code + ); + + // the waitlist isn't updated until we record that the emails + // were successfully sent. + let signups_batch = db.get_signup_invites(3).await.unwrap(); + assert_eq!(signups_batch, signups_batch1); + + // once the emails go out, we can retrieve the next batch + // of signups. + db.record_signup_invites_sent(&signups_batch1) + .await + .unwrap(); + let signups_batch2 = db.get_signup_invites(3).await.unwrap(); + let addresses = signups_batch2 + .iter() + .map(|s| &s.email_address) + .collect::>(); + assert_eq!( + addresses, + &[ + "person-3@example.com", + "person-4@example.com", + "person-5@example.com" + ] + ); + + // user completes the signup process by providing their + // github account. + let user_id = db + .redeem_signup(SignupRedemption { + email_address: signups_batch1[0].email_address.clone(), + email_confirmation_code: signups_batch1[0].email_confirmation_code.clone(), + github_login: "person-0".into(), + invite_count: 5, + }) + .await + .unwrap(); + let user = db.get_user_by_id(user_id).await.unwrap().unwrap(); + assert_eq!(user.github_login, "person-0"); + assert_eq!(user.email_address.as_deref(), Some("person-0@example.com")); + assert_eq!(user.invite_count, 5); + + // cannot redeem the same signup again. + db.redeem_signup(SignupRedemption { + email_address: signups_batch1[0].email_address.clone(), + email_confirmation_code: signups_batch1[0].email_confirmation_code.clone(), + github_login: "some-other-github_account".into(), + invite_count: 5, + }) + .await + .unwrap_err(); + + // cannot redeem a signup with the wrong confirmation code. + db.redeem_signup(SignupRedemption { + email_address: signups_batch1[1].email_address.clone(), + email_confirmation_code: "the-wrong-code".to_string(), + github_login: "person-1".into(), + invite_count: 5, + }) + .await + .unwrap_err(); + } + pub struct TestDb { pub db: Option>, pub url: String, @@ -2586,6 +2837,27 @@ pub mod tests { unimplemented!() } + // signups + + async fn create_signup(&self, _signup: Signup) -> Result<()> { + unimplemented!() + } + + async fn get_signup_invites(&self, _count: usize) -> Result> { + unimplemented!() + } + + async fn record_signup_invites_sent(&self, _signups: &[SignupInvite]) -> Result<()> { + unimplemented!() + } + + async fn redeem_signup( + &self, + _redemption: SignupRedemption, + ) -> Result { + unimplemented!() + } + // invite codes async fn set_invite_count(&self, _id: UserId, _count: u32) -> Result<()> { From f8c7c925af408b223def9b44b62487fe8ab371b1 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 16 Sep 2022 12:25:42 -0700 Subject: [PATCH 002/140] Update APIs and DB interactions to reflect email confirmation step --- .../20220913211150_create_signups.down.sql | 6 + ...l => 20220913211150_create_signups.up.sql} | 6 +- crates/collab/src/api.rs | 106 +- crates/collab/src/db.rs | 1448 +++-------------- crates/collab/src/db_tests.rs | 1071 ++++++++++++ crates/collab/src/integration_tests.rs | 14 +- crates/collab/src/main.rs | 2 + crates/collab/src/rpc.rs | 41 +- 8 files changed, 1409 insertions(+), 1285 deletions(-) create mode 100644 crates/collab/migrations/20220913211150_create_signups.down.sql rename crates/collab/migrations/{20220913211150_create_signups.sql => 20220913211150_create_signups.up.sql} (81%) create mode 100644 crates/collab/src/db_tests.rs diff --git a/crates/collab/migrations/20220913211150_create_signups.down.sql b/crates/collab/migrations/20220913211150_create_signups.down.sql new file mode 100644 index 0000000000..6ef51842c9 --- /dev/null +++ b/crates/collab/migrations/20220913211150_create_signups.down.sql @@ -0,0 +1,6 @@ +DROP TABLE signups; + +ALTER TABLE users + DROP COLUMN metrics_id; + +DROP SEQUENCE metrics_id_seq; diff --git a/crates/collab/migrations/20220913211150_create_signups.sql b/crates/collab/migrations/20220913211150_create_signups.up.sql similarity index 81% rename from crates/collab/migrations/20220913211150_create_signups.sql rename to crates/collab/migrations/20220913211150_create_signups.up.sql index 783cdf8c0a..9acb313fd6 100644 --- a/crates/collab/migrations/20220913211150_create_signups.sql +++ b/crates/collab/migrations/20220913211150_create_signups.up.sql @@ -8,16 +8,18 @@ CREATE TABLE IF NOT EXISTS "signups" ( "metrics_id" INTEGER NOT NULL DEFAULT nextval('metrics_id_seq'), "created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, "user_id" INTEGER REFERENCES users (id), + "inviting_user_id" INTEGER REFERENCES users (id), "platform_mac" BOOLEAN NOT NULL, "platform_linux" BOOLEAN NOT NULL, "platform_windows" BOOLEAN NOT NULL, "platform_unknown" BOOLEAN NOT NULL, - "editor_features" VARCHAR[] NOT NULL, - "programming_languages" VARCHAR[] NOT NULL + "editor_features" VARCHAR[], + "programming_languages" VARCHAR[] ); +CREATE INDEX "index_users_on_email_address" ON "users" ("email_address"); CREATE UNIQUE INDEX "index_signups_on_email_address" ON "signups" ("email_address"); CREATE INDEX "index_signups_on_email_confirmation_sent" ON "signups" ("email_confirmation_sent"); diff --git a/crates/collab/src/api.rs b/crates/collab/src/api.rs index 42825d25ff..26521ceb27 100644 --- a/crates/collab/src/api.rs +++ b/crates/collab/src/api.rs @@ -1,6 +1,6 @@ use crate::{ auth, - db::{ProjectId, Signup, SignupInvite, SignupRedemption, User, UserId}, + db::{Invite, NewUserParams, ProjectId, Signup, User, UserId}, rpc::{self, ResultExt}, AppState, Error, Result, }; @@ -46,9 +46,9 @@ pub fn routes(rpc_server: &Arc, state: Arc) -> Router, - email_address: Option, - admin: bool, + email_address: String, + email_confirmation_code: Option, + invite_count: i32, } async fn create_user( @@ -123,29 +123,38 @@ async fn create_user( Extension(app): Extension>, Extension(rpc_server): Extension>, ) -> Result> { - let user_id = if let Some(invite_code) = params.invite_code { - let invitee_id = app - .db - .redeem_invite_code( - &invite_code, - ¶ms.github_login, - params.email_address.as_deref(), + let (user_id, inviter_id) = + // Creating a user via the normal signup process + if let Some(email_confirmation_code) = params.email_confirmation_code { + app.db + .create_user_from_invite( + &Invite { + email_address: params.email_address, + email_confirmation_code, + }, + NewUserParams { + github_login: params.github_login, + invite_count: params.invite_count, + }, + ) + .await? + } + // Creating a user as an admin + else { + ( + app.db + .create_user(¶ms.github_login, ¶ms.email_address, false) + .await?, + None, ) - .await?; + }; + + if let Some(inviter_id) = inviter_id { rpc_server - .invite_code_redeemed(&invite_code, invitee_id) + .invite_code_redeemed(inviter_id, user_id) .await .trace_err(); - invitee_id - } else { - app.db - .create_user( - ¶ms.github_login, - params.email_address.as_deref(), - params.admin, - ) - .await? - }; + } let user = app .db @@ -175,7 +184,9 @@ async fn update_user( } if let Some(invite_count) = params.invite_count { - app.db.set_invite_count(user_id, invite_count).await?; + app.db + .set_invite_count_for_user(user_id, invite_count) + .await?; rpc_server.invite_count_updated(user_id).await.trace_err(); } @@ -428,30 +439,39 @@ async fn create_signup( Ok(()) } -async fn redeem_signup( - Json(redemption): Json, - Extension(app): Extension>, -) -> Result<()> { - app.db.redeem_signup(redemption).await?; - Ok(()) +#[derive(Deserialize)] +pub struct CreateInviteFromCodeParams { + invite_code: String, + email_address: String, } -async fn record_signup_invites_sent( - Json(params): Json>, +async fn create_invite_from_code( + Json(params): Json, Extension(app): Extension>, -) -> Result<()> { - app.db.record_signup_invites_sent(¶ms).await?; - Ok(()) +) -> Result> { + Ok(Json( + app.db + .create_invite_from_code(¶ms.invite_code, ¶ms.email_address) + .await?, + )) } #[derive(Deserialize)] -pub struct GetSignupInvitesParams { +pub struct GetUnsentInvitesParams { pub count: usize, } -async fn get_signup_invites( - Query(params): Query, +async fn get_unsent_invites( + Query(params): Query, Extension(app): Extension>, -) -> Result>> { - Ok(Json(app.db.get_signup_invites(params.count).await?)) +) -> Result>> { + Ok(Json(app.db.get_unsent_invites(params.count).await?)) +} + +async fn record_sent_invites( + Json(params): Json>, + Extension(app): Extension>, +) -> Result<()> { + app.db.record_sent_invites(¶ms).await?; + Ok(()) } diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 86dca6de98..9c1ab84570 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -1,5 +1,3 @@ -use std::{cmp, ops::Range, time::Duration}; - use crate::{Error, Result}; use anyhow::{anyhow, Context}; use async_trait::async_trait; @@ -9,6 +7,7 @@ use futures::StreamExt; use serde::{Deserialize, Serialize}; pub use sqlx::postgres::PgPoolOptions as DbOptions; use sqlx::{types::Uuid, FromRow, QueryBuilder, Row}; +use std::{cmp, ops::Range, time::Duration}; use time::{OffsetDateTime, PrimitiveDateTime}; #[async_trait] @@ -16,7 +15,7 @@ pub trait Db: Send + Sync { async fn create_user( &self, github_login: &str, - email_address: Option<&str>, + email_address: &str, admin: bool, ) -> Result; async fn get_all_users(&self, page: u32, limit: u32) -> Result>; @@ -30,20 +29,19 @@ pub trait Db: Send + Sync { async fn set_user_connected_once(&self, id: UserId, connected_once: bool) -> Result<()>; async fn destroy_user(&self, id: UserId) -> Result<()>; - async fn create_signup(&self, signup: Signup) -> Result<()>; - async fn get_signup_invites(&self, count: usize) -> Result>; - async fn record_signup_invites_sent(&self, signups: &[SignupInvite]) -> Result<()>; - async fn redeem_signup(&self, redemption: SignupRedemption) -> Result; - - async fn set_invite_count(&self, id: UserId, count: u32) -> Result<()>; + async fn set_invite_count_for_user(&self, id: UserId, count: u32) -> Result<()>; async fn get_invite_code_for_user(&self, id: UserId) -> Result>; async fn get_user_for_invite_code(&self, code: &str) -> Result; - async fn redeem_invite_code( + async fn create_invite_from_code(&self, code: &str, email_address: &str) -> Result; + + async fn create_signup(&self, signup: Signup) -> Result<()>; + async fn get_unsent_invites(&self, count: usize) -> Result>; + async fn record_sent_invites(&self, invites: &[Invite]) -> Result<()>; + async fn create_user_from_invite( &self, - code: &str, - login: &str, - email_address: Option<&str>, - ) -> Result; + invite: &Invite, + user: NewUserParams, + ) -> Result<(UserId, Option)>; /// Registers a new project for the given user. async fn register_project(&self, host_user_id: UserId) -> Result; @@ -120,8 +118,8 @@ pub trait Db: Send + Sync { max_access_token_count: usize, ) -> Result<()>; async fn get_access_token_hashes(&self, user_id: UserId) -> Result>; - #[cfg(any(test, feature = "seed-support"))] + #[cfg(any(test, feature = "seed-support"))] async fn find_org_by_slug(&self, slug: &str) -> Result>; #[cfg(any(test, feature = "seed-support"))] async fn create_org(&self, name: &str, slug: &str) -> Result; @@ -135,6 +133,7 @@ pub trait Db: Send + Sync { async fn get_accessible_channels(&self, user_id: UserId) -> Result>; async fn can_user_access_channel(&self, user_id: UserId, channel_id: ChannelId) -> Result; + #[cfg(any(test, feature = "seed-support"))] async fn add_channel_member( &self, @@ -156,10 +155,12 @@ pub trait Db: Send + Sync { count: usize, before_id: Option, ) -> Result>; + #[cfg(test)] async fn teardown(&self, url: &str); + #[cfg(test)] - fn as_fake(&self) -> Option<&tests::FakeDb>; + fn as_fake(&self) -> Option<&FakeDb>; } pub struct PostgresDb { @@ -175,6 +176,18 @@ impl PostgresDb { .context("failed to connect to postgres database")?; Ok(Self { pool }) } + + pub fn fuzzy_like_string(string: &str) -> String { + let mut result = String::with_capacity(string.len() * 2 + 1); + for c in string.chars() { + if c.is_alphanumeric() { + result.push('%'); + result.push(c); + } + } + result.push('%'); + result + } } #[async_trait] @@ -184,7 +197,7 @@ impl Db for PostgresDb { async fn create_user( &self, github_login: &str, - email_address: Option<&str>, + email_address: &str, admin: bool, ) -> Result { let query = " @@ -247,7 +260,7 @@ impl Db for PostgresDb { } async fn fuzzy_search_users(&self, name_query: &str, limit: u32) -> Result> { - let like_string = fuzzy_like_string(name_query); + let like_string = Self::fuzzy_like_string(name_query); let query = " SELECT users.* FROM users @@ -371,16 +384,16 @@ impl Db for PostgresDb { Ok(()) } - async fn get_signup_invites(&self, count: usize) -> Result> { + async fn get_unsent_invites(&self, count: usize) -> Result> { Ok(sqlx::query_as( " - SELECT - email_address, email_confirmation_code - FROM signups - WHERE - NOT email_confirmation_sent AND - platform_mac - LIMIT $1 + SELECT + email_address, email_confirmation_code + FROM signups + WHERE + NOT email_confirmation_sent AND + platform_mac + LIMIT $1 ", ) .bind(count as i32) @@ -388,16 +401,16 @@ impl Db for PostgresDb { .await?) } - async fn record_signup_invites_sent(&self, signups: &[SignupInvite]) -> Result<()> { + async fn record_sent_invites(&self, invites: &[Invite]) -> Result<()> { sqlx::query( " - UPDATE signups - SET email_confirmation_sent = 't' - WHERE email_address = ANY ($1) + UPDATE signups + SET email_confirmation_sent = 't' + WHERE email_address = ANY ($1) ", ) .bind( - &signups + &invites .iter() .map(|s| s.email_address.as_str()) .collect::>(), @@ -407,36 +420,41 @@ impl Db for PostgresDb { Ok(()) } - async fn redeem_signup(&self, redemption: SignupRedemption) -> Result { + async fn create_user_from_invite( + &self, + invite: &Invite, + user: NewUserParams, + ) -> Result<(UserId, Option)> { let mut tx = self.pool.begin().await?; - let signup_id: i32 = sqlx::query_scalar( + + let (signup_id, inviting_user_id): (i32, Option) = sqlx::query_as( " - SELECT id + SELECT id, inviting_user_id FROM signups WHERE email_address = $1 AND email_confirmation_code = $2 AND - email_confirmation_sent AND user_id is NULL ", ) - .bind(&redemption.email_address) - .bind(&redemption.email_confirmation_code) - .fetch_one(&mut tx) - .await?; + .bind(&invite.email_address) + .bind(&invite.email_confirmation_code) + .fetch_optional(&mut tx) + .await? + .ok_or_else(|| anyhow!("no such invite"))?; - let user_id: i32 = sqlx::query_scalar( + let user_id: UserId = sqlx::query_scalar( " INSERT INTO users - (email_address, github_login, admin, invite_count, invite_code) + (email_address, github_login, admin, invite_count, invite_code) VALUES - ($1, $2, 'f', $3, $4) + ($1, $2, 'f', $3, $4) RETURNING id ", ) - .bind(&redemption.email_address) - .bind(&redemption.github_login) - .bind(&redemption.invite_count) + .bind(&invite.email_address) + .bind(&user.github_login) + .bind(&user.invite_count) .bind(random_invite_code()) .fetch_one(&mut tx) .await?; @@ -453,13 +471,47 @@ impl Db for PostgresDb { .execute(&mut tx) .await?; + if let Some(inviting_user_id) = inviting_user_id { + let id: Option = sqlx::query_scalar( + " + UPDATE users + SET invite_count = invite_count - 1 + WHERE id = $1 AND invite_count > 0 + RETURNING id + ", + ) + .bind(&inviting_user_id) + .fetch_optional(&mut tx) + .await?; + + if id.is_none() { + Err(Error::Http( + StatusCode::UNAUTHORIZED, + "no invites remaining".to_string(), + ))?; + } + + sqlx::query( + " + INSERT INTO contacts + (user_id_a, user_id_b, a_to_b, should_notify, accepted) + VALUES + ($1, $2, 't', 't', 't') + ", + ) + .bind(inviting_user_id) + .bind(user_id) + .execute(&mut tx) + .await?; + } + tx.commit().await?; - Ok(UserId(user_id)) + Ok((user_id, inviting_user_id)) } // invite codes - async fn set_invite_count(&self, id: UserId, count: u32) -> Result<()> { + async fn set_invite_count_for_user(&self, id: UserId, count: u32) -> Result<()> { let mut tx = self.pool.begin().await?; if count > 0 { sqlx::query( @@ -527,83 +579,82 @@ impl Db for PostgresDb { }) } - async fn redeem_invite_code( - &self, - code: &str, - login: &str, - email_address: Option<&str>, - ) -> Result { + async fn create_invite_from_code(&self, code: &str, email_address: &str) -> Result { let mut tx = self.pool.begin().await?; - let inviter_id: Option = sqlx::query_scalar( + let existing_user: Option = sqlx::query_scalar( " - UPDATE users - SET invite_count = invite_count - 1 - WHERE - invite_code = $1 AND - invite_count > 0 - RETURNING id + SELECT id + FROM users + WHERE email_address = $1 + ", + ) + .bind(email_address) + .fetch_optional(&mut tx) + .await?; + if existing_user.is_some() { + Err(anyhow!("email address is already in use"))?; + } + + let row: Option<(UserId, i32)> = sqlx::query_as( + " + SELECT id, invite_count + FROM users + WHERE invite_code = $1 ", ) .bind(code) .fetch_optional(&mut tx) .await?; - let inviter_id = match inviter_id { - Some(inviter_id) => inviter_id, - None => { - if sqlx::query_scalar::<_, i32>("SELECT 1 FROM users WHERE invite_code = $1") - .bind(code) - .fetch_optional(&mut tx) - .await? - .is_some() - { - Err(Error::Http( - StatusCode::UNAUTHORIZED, - "no invites remaining".to_string(), - ))? - } else { - Err(Error::Http( - StatusCode::NOT_FOUND, - "invite code not found".to_string(), - ))? - } - } + let (inviter_id, invite_count) = match row { + Some(row) => row, + None => Err(Error::Http( + StatusCode::NOT_FOUND, + "invite code not found".to_string(), + ))?, }; - let invitee_id = sqlx::query_scalar( - " - INSERT INTO users - (github_login, email_address, admin, inviter_id, invite_code, invite_count) - VALUES - ($1, $2, 'f', $3, $4, $5) - RETURNING id - ", - ) - .bind(login) - .bind(email_address) - .bind(inviter_id) - .bind(random_invite_code()) - .bind(5) - .fetch_one(&mut tx) - .await - .map(UserId)?; + if invite_count == 0 { + Err(Error::Http( + StatusCode::UNAUTHORIZED, + "no invites remaining".to_string(), + ))?; + } - sqlx::query( + let email_confirmation_code: String = sqlx::query_scalar( " - INSERT INTO contacts - (user_id_a, user_id_b, a_to_b, should_notify, accepted) - VALUES - ($1, $2, 't', 't', 't') + INSERT INTO signups + ( + email_address, + email_confirmation_code, + email_confirmation_sent, + inviting_user_id, + platform_linux, + platform_mac, + platform_windows, + platform_unknown + ) + VALUES + ($1, $2, 'f', $3, 'f', 'f', 'f', 't') + ON CONFLICT (email_address) + DO UPDATE SET + inviting_user_id = excluded.inviting_user_id + RETURNING email_confirmation_code ", ) - .bind(inviter_id) - .bind(invitee_id) - .execute(&mut tx) + .bind(&email_address) + .bind(&random_email_confirmation_code()) + .bind(&inviter_id) + .fetch_one(&mut tx) .await?; tx.commit().await?; - Ok(invitee_id) + + Ok(Invite { + email_address: email_address.into(), + email_confirmation_code, + }) } // projects @@ -1418,7 +1469,7 @@ impl Db for PostgresDb { } #[cfg(test)] - fn as_fake(&self) -> Option<&tests::FakeDb> { + fn as_fake(&self) -> Option<&FakeDb> { None } } @@ -1495,19 +1546,19 @@ pub struct UserActivitySummary { #[derive(Clone, Debug, PartialEq, Serialize)] pub struct ProjectActivitySummary { - id: ProjectId, - duration: Duration, - max_collaborators: usize, + pub id: ProjectId, + pub duration: Duration, + pub max_collaborators: usize, } #[derive(Clone, Debug, PartialEq, Serialize)] pub struct UserActivityPeriod { - project_id: ProjectId, + pub project_id: ProjectId, #[serde(with = "time::serde::iso8601")] - start: OffsetDateTime, + pub start: OffsetDateTime, #[serde(with = "time::serde::iso8601")] - end: OffsetDateTime, - extensions: HashMap, + pub end: OffsetDateTime, + pub extensions: HashMap, } id_type!(OrgId); @@ -1580,31 +1631,17 @@ pub struct Signup { } #[derive(FromRow, PartialEq, Debug, Serialize, Deserialize)] -pub struct SignupInvite { +pub struct Invite { pub email_address: String, pub email_confirmation_code: String, } #[derive(Debug, Serialize, Deserialize)] -pub struct SignupRedemption { - pub email_address: String, - pub email_confirmation_code: String, +pub struct NewUserParams { pub github_login: String, pub invite_count: i32, } -fn fuzzy_like_string(string: &str) -> String { - let mut result = String::with_capacity(string.len() * 2 + 1); - for c in string.chars() { - if c.is_alphanumeric() { - result.push('%'); - result.push(c); - } - } - result.push('%'); - result -} - fn random_invite_code() -> String { nanoid::nanoid!(16) } @@ -1614,11 +1651,14 @@ fn random_email_confirmation_code() -> String { } #[cfg(test)] -pub mod tests { +pub use test::*; + +#[cfg(test)] +mod test { use super::*; use anyhow::anyhow; use collections::BTreeMap; - use gpui::executor::{Background, Deterministic}; + use gpui::executor::Background; use lazy_static::lazy_static; use parking_lot::Mutex; use rand::prelude::*; @@ -1629,1077 +1669,6 @@ pub mod tests { use std::{path::Path, sync::Arc}; use util::post_inc; - #[tokio::test(flavor = "multi_thread")] - async fn test_get_users_by_ids() { - for test_db in [ - TestDb::postgres().await, - TestDb::fake(build_background_executor()), - ] { - let db = test_db.db(); - - let user = db.create_user("user", None, false).await.unwrap(); - let friend1 = db.create_user("friend-1", None, false).await.unwrap(); - let friend2 = db.create_user("friend-2", None, false).await.unwrap(); - let friend3 = db.create_user("friend-3", None, false).await.unwrap(); - - assert_eq!( - db.get_users_by_ids(vec![user, friend1, friend2, friend3]) - .await - .unwrap(), - vec![ - User { - id: user, - github_login: "user".to_string(), - admin: false, - ..Default::default() - }, - User { - id: friend1, - github_login: "friend-1".to_string(), - admin: false, - ..Default::default() - }, - User { - id: friend2, - github_login: "friend-2".to_string(), - admin: false, - ..Default::default() - }, - User { - id: friend3, - github_login: "friend-3".to_string(), - admin: false, - ..Default::default() - } - ] - ); - } - } - - #[tokio::test(flavor = "multi_thread")] - async fn test_create_users() { - let db = TestDb::postgres().await; - let db = db.db(); - - // Create the first batch of users, ensuring invite counts are assigned - // correctly and the respective invite codes are unique. - let user_ids_batch_1 = db - .create_users(vec![ - ("user1".to_string(), "hi@user1.com".to_string(), 5), - ("user2".to_string(), "hi@user2.com".to_string(), 4), - ("user3".to_string(), "hi@user3.com".to_string(), 3), - ]) - .await - .unwrap(); - assert_eq!(user_ids_batch_1.len(), 3); - - let users = db.get_users_by_ids(user_ids_batch_1.clone()).await.unwrap(); - assert_eq!(users.len(), 3); - assert_eq!(users[0].github_login, "user1"); - assert_eq!(users[0].email_address.as_deref(), Some("hi@user1.com")); - assert_eq!(users[0].invite_count, 5); - assert_eq!(users[1].github_login, "user2"); - assert_eq!(users[1].email_address.as_deref(), Some("hi@user2.com")); - assert_eq!(users[1].invite_count, 4); - assert_eq!(users[2].github_login, "user3"); - assert_eq!(users[2].email_address.as_deref(), Some("hi@user3.com")); - assert_eq!(users[2].invite_count, 3); - - let invite_code_1 = users[0].invite_code.clone().unwrap(); - let invite_code_2 = users[1].invite_code.clone().unwrap(); - let invite_code_3 = users[2].invite_code.clone().unwrap(); - assert_ne!(invite_code_1, invite_code_2); - assert_ne!(invite_code_1, invite_code_3); - assert_ne!(invite_code_2, invite_code_3); - - // Create the second batch of users and include a user that is already in the database, ensuring - // the invite count for the existing user is updated without changing their invite code. - let user_ids_batch_2 = db - .create_users(vec![ - ("user2".to_string(), "hi@user2.com".to_string(), 10), - ("user4".to_string(), "hi@user4.com".to_string(), 2), - ]) - .await - .unwrap(); - assert_eq!(user_ids_batch_2.len(), 2); - assert_eq!(user_ids_batch_2[0], user_ids_batch_1[1]); - - let users = db.get_users_by_ids(user_ids_batch_2).await.unwrap(); - assert_eq!(users.len(), 2); - assert_eq!(users[0].github_login, "user2"); - assert_eq!(users[0].email_address.as_deref(), Some("hi@user2.com")); - assert_eq!(users[0].invite_count, 10); - assert_eq!(users[0].invite_code, Some(invite_code_2.clone())); - assert_eq!(users[1].github_login, "user4"); - assert_eq!(users[1].email_address.as_deref(), Some("hi@user4.com")); - assert_eq!(users[1].invite_count, 2); - - let invite_code_4 = users[1].invite_code.clone().unwrap(); - assert_ne!(invite_code_4, invite_code_1); - assert_ne!(invite_code_4, invite_code_2); - assert_ne!(invite_code_4, invite_code_3); - } - - #[tokio::test(flavor = "multi_thread")] - async fn test_worktree_extensions() { - let test_db = TestDb::postgres().await; - let db = test_db.db(); - - let user = db.create_user("user_1", None, false).await.unwrap(); - let project = db.register_project(user).await.unwrap(); - - db.update_worktree_extensions(project, 100, Default::default()) - .await - .unwrap(); - db.update_worktree_extensions( - project, - 100, - [("rs".to_string(), 5), ("md".to_string(), 3)] - .into_iter() - .collect(), - ) - .await - .unwrap(); - db.update_worktree_extensions( - project, - 100, - [("rs".to_string(), 6), ("md".to_string(), 5)] - .into_iter() - .collect(), - ) - .await - .unwrap(); - db.update_worktree_extensions( - project, - 101, - [("ts".to_string(), 2), ("md".to_string(), 1)] - .into_iter() - .collect(), - ) - .await - .unwrap(); - - assert_eq!( - db.get_project_extensions(project).await.unwrap(), - [ - ( - 100, - [("rs".into(), 6), ("md".into(), 5),] - .into_iter() - .collect::>() - ), - ( - 101, - [("ts".into(), 2), ("md".into(), 1),] - .into_iter() - .collect::>() - ) - ] - .into_iter() - .collect() - ); - } - - #[tokio::test(flavor = "multi_thread")] - async fn test_user_activity() { - let test_db = TestDb::postgres().await; - let db = test_db.db(); - - let user_1 = db.create_user("user_1", None, false).await.unwrap(); - let user_2 = db.create_user("user_2", None, false).await.unwrap(); - let user_3 = db.create_user("user_3", None, false).await.unwrap(); - let project_1 = db.register_project(user_1).await.unwrap(); - db.update_worktree_extensions( - project_1, - 1, - HashMap::from_iter([("rs".into(), 5), ("md".into(), 7)]), - ) - .await - .unwrap(); - let project_2 = db.register_project(user_2).await.unwrap(); - let t0 = OffsetDateTime::now_utc() - Duration::from_secs(60 * 60); - - // User 2 opens a project - let t1 = t0 + Duration::from_secs(10); - db.record_user_activity(t0..t1, &[(user_2, project_2)]) - .await - .unwrap(); - - let t2 = t1 + Duration::from_secs(10); - db.record_user_activity(t1..t2, &[(user_2, project_2)]) - .await - .unwrap(); - - // User 1 joins the project - let t3 = t2 + Duration::from_secs(10); - db.record_user_activity(t2..t3, &[(user_2, project_2), (user_1, project_2)]) - .await - .unwrap(); - - // User 1 opens another project - let t4 = t3 + Duration::from_secs(10); - db.record_user_activity( - t3..t4, - &[ - (user_2, project_2), - (user_1, project_2), - (user_1, project_1), - ], - ) - .await - .unwrap(); - - // User 3 joins that project - let t5 = t4 + Duration::from_secs(10); - db.record_user_activity( - t4..t5, - &[ - (user_2, project_2), - (user_1, project_2), - (user_1, project_1), - (user_3, project_1), - ], - ) - .await - .unwrap(); - - // User 2 leaves - let t6 = t5 + Duration::from_secs(5); - db.record_user_activity(t5..t6, &[(user_1, project_1), (user_3, project_1)]) - .await - .unwrap(); - - let t7 = t6 + Duration::from_secs(60); - let t8 = t7 + Duration::from_secs(10); - db.record_user_activity(t7..t8, &[(user_1, project_1)]) - .await - .unwrap(); - - assert_eq!( - db.get_top_users_activity_summary(t0..t6, 10).await.unwrap(), - &[ - UserActivitySummary { - id: user_1, - github_login: "user_1".to_string(), - project_activity: vec![ - ProjectActivitySummary { - id: project_1, - duration: Duration::from_secs(25), - max_collaborators: 2 - }, - ProjectActivitySummary { - id: project_2, - duration: Duration::from_secs(30), - max_collaborators: 2 - } - ] - }, - UserActivitySummary { - id: user_2, - github_login: "user_2".to_string(), - project_activity: vec![ProjectActivitySummary { - id: project_2, - duration: Duration::from_secs(50), - max_collaborators: 2 - }] - }, - UserActivitySummary { - id: user_3, - github_login: "user_3".to_string(), - project_activity: vec![ProjectActivitySummary { - id: project_1, - duration: Duration::from_secs(15), - max_collaborators: 2 - }] - }, - ] - ); - - assert_eq!( - db.get_active_user_count(t0..t6, Duration::from_secs(56), false) - .await - .unwrap(), - 0 - ); - assert_eq!( - db.get_active_user_count(t0..t6, Duration::from_secs(56), true) - .await - .unwrap(), - 0 - ); - assert_eq!( - db.get_active_user_count(t0..t6, Duration::from_secs(54), false) - .await - .unwrap(), - 1 - ); - assert_eq!( - db.get_active_user_count(t0..t6, Duration::from_secs(54), true) - .await - .unwrap(), - 1 - ); - assert_eq!( - db.get_active_user_count(t0..t6, Duration::from_secs(30), false) - .await - .unwrap(), - 2 - ); - assert_eq!( - db.get_active_user_count(t0..t6, Duration::from_secs(30), true) - .await - .unwrap(), - 2 - ); - assert_eq!( - db.get_active_user_count(t0..t6, Duration::from_secs(10), false) - .await - .unwrap(), - 3 - ); - assert_eq!( - db.get_active_user_count(t0..t6, Duration::from_secs(10), true) - .await - .unwrap(), - 3 - ); - assert_eq!( - db.get_active_user_count(t0..t1, Duration::from_secs(5), false) - .await - .unwrap(), - 1 - ); - assert_eq!( - db.get_active_user_count(t0..t1, Duration::from_secs(5), true) - .await - .unwrap(), - 0 - ); - - assert_eq!( - db.get_user_activity_timeline(t3..t6, user_1).await.unwrap(), - &[ - UserActivityPeriod { - project_id: project_1, - start: t3, - end: t6, - extensions: HashMap::from_iter([("rs".to_string(), 5), ("md".to_string(), 7)]), - }, - UserActivityPeriod { - project_id: project_2, - start: t3, - end: t5, - extensions: Default::default(), - }, - ] - ); - assert_eq!( - db.get_user_activity_timeline(t0..t8, user_1).await.unwrap(), - &[ - UserActivityPeriod { - project_id: project_2, - start: t2, - end: t5, - extensions: Default::default(), - }, - UserActivityPeriod { - project_id: project_1, - start: t3, - end: t6, - extensions: HashMap::from_iter([("rs".to_string(), 5), ("md".to_string(), 7)]), - }, - UserActivityPeriod { - project_id: project_1, - start: t7, - end: t8, - extensions: HashMap::from_iter([("rs".to_string(), 5), ("md".to_string(), 7)]), - }, - ] - ); - } - - #[tokio::test(flavor = "multi_thread")] - async fn test_recent_channel_messages() { - for test_db in [ - TestDb::postgres().await, - TestDb::fake(build_background_executor()), - ] { - let db = test_db.db(); - let user = db.create_user("user", None, false).await.unwrap(); - let org = db.create_org("org", "org").await.unwrap(); - let channel = db.create_org_channel(org, "channel").await.unwrap(); - for i in 0..10 { - db.create_channel_message( - channel, - user, - &i.to_string(), - OffsetDateTime::now_utc(), - i, - ) - .await - .unwrap(); - } - - let messages = db.get_channel_messages(channel, 5, None).await.unwrap(); - assert_eq!( - messages.iter().map(|m| &m.body).collect::>(), - ["5", "6", "7", "8", "9"] - ); - - let prev_messages = db - .get_channel_messages(channel, 4, Some(messages[0].id)) - .await - .unwrap(); - assert_eq!( - prev_messages.iter().map(|m| &m.body).collect::>(), - ["1", "2", "3", "4"] - ); - } - } - - #[tokio::test(flavor = "multi_thread")] - async fn test_channel_message_nonces() { - for test_db in [ - TestDb::postgres().await, - TestDb::fake(build_background_executor()), - ] { - let db = test_db.db(); - let user = db.create_user("user", None, false).await.unwrap(); - let org = db.create_org("org", "org").await.unwrap(); - let channel = db.create_org_channel(org, "channel").await.unwrap(); - - let msg1_id = db - .create_channel_message(channel, user, "1", OffsetDateTime::now_utc(), 1) - .await - .unwrap(); - let msg2_id = db - .create_channel_message(channel, user, "2", OffsetDateTime::now_utc(), 2) - .await - .unwrap(); - let msg3_id = db - .create_channel_message(channel, user, "3", OffsetDateTime::now_utc(), 1) - .await - .unwrap(); - let msg4_id = db - .create_channel_message(channel, user, "4", OffsetDateTime::now_utc(), 2) - .await - .unwrap(); - - assert_ne!(msg1_id, msg2_id); - assert_eq!(msg1_id, msg3_id); - assert_eq!(msg2_id, msg4_id); - } - } - - #[tokio::test(flavor = "multi_thread")] - async fn test_create_access_tokens() { - let test_db = TestDb::postgres().await; - let db = test_db.db(); - let user = db.create_user("the-user", None, false).await.unwrap(); - - db.create_access_token_hash(user, "h1", 3).await.unwrap(); - db.create_access_token_hash(user, "h2", 3).await.unwrap(); - assert_eq!( - db.get_access_token_hashes(user).await.unwrap(), - &["h2".to_string(), "h1".to_string()] - ); - - db.create_access_token_hash(user, "h3", 3).await.unwrap(); - assert_eq!( - db.get_access_token_hashes(user).await.unwrap(), - &["h3".to_string(), "h2".to_string(), "h1".to_string(),] - ); - - db.create_access_token_hash(user, "h4", 3).await.unwrap(); - assert_eq!( - db.get_access_token_hashes(user).await.unwrap(), - &["h4".to_string(), "h3".to_string(), "h2".to_string(),] - ); - - db.create_access_token_hash(user, "h5", 3).await.unwrap(); - assert_eq!( - db.get_access_token_hashes(user).await.unwrap(), - &["h5".to_string(), "h4".to_string(), "h3".to_string()] - ); - } - - #[test] - fn test_fuzzy_like_string() { - assert_eq!(fuzzy_like_string("abcd"), "%a%b%c%d%"); - assert_eq!(fuzzy_like_string("x y"), "%x%y%"); - assert_eq!(fuzzy_like_string(" z "), "%z%"); - } - - #[tokio::test(flavor = "multi_thread")] - async fn test_fuzzy_search_users() { - let test_db = TestDb::postgres().await; - let db = test_db.db(); - for github_login in [ - "California", - "colorado", - "oregon", - "washington", - "florida", - "delaware", - "rhode-island", - ] { - db.create_user(github_login, None, false).await.unwrap(); - } - - assert_eq!( - fuzzy_search_user_names(db, "clr").await, - &["colorado", "California"] - ); - assert_eq!( - fuzzy_search_user_names(db, "ro").await, - &["rhode-island", "colorado", "oregon"], - ); - - async fn fuzzy_search_user_names(db: &Arc, query: &str) -> Vec { - db.fuzzy_search_users(query, 10) - .await - .unwrap() - .into_iter() - .map(|user| user.github_login) - .collect::>() - } - } - - #[tokio::test(flavor = "multi_thread")] - async fn test_add_contacts() { - for test_db in [ - TestDb::postgres().await, - TestDb::fake(build_background_executor()), - ] { - let db = test_db.db(); - - let user_1 = db.create_user("user1", None, false).await.unwrap(); - let user_2 = db.create_user("user2", None, false).await.unwrap(); - let user_3 = db.create_user("user3", None, false).await.unwrap(); - - // User starts with no contacts - assert_eq!( - db.get_contacts(user_1).await.unwrap(), - vec![Contact::Accepted { - user_id: user_1, - should_notify: false - }], - ); - - // User requests a contact. Both users see the pending request. - db.send_contact_request(user_1, user_2).await.unwrap(); - assert!(!db.has_contact(user_1, user_2).await.unwrap()); - assert!(!db.has_contact(user_2, user_1).await.unwrap()); - assert_eq!( - db.get_contacts(user_1).await.unwrap(), - &[ - Contact::Accepted { - user_id: user_1, - should_notify: false - }, - Contact::Outgoing { user_id: user_2 } - ], - ); - assert_eq!( - db.get_contacts(user_2).await.unwrap(), - &[ - Contact::Incoming { - user_id: user_1, - should_notify: true - }, - Contact::Accepted { - user_id: user_2, - should_notify: false - }, - ] - ); - - // User 2 dismisses the contact request notification without accepting or rejecting. - // We shouldn't notify them again. - db.dismiss_contact_notification(user_1, user_2) - .await - .unwrap_err(); - db.dismiss_contact_notification(user_2, user_1) - .await - .unwrap(); - assert_eq!( - db.get_contacts(user_2).await.unwrap(), - &[ - Contact::Incoming { - user_id: user_1, - should_notify: false - }, - Contact::Accepted { - user_id: user_2, - should_notify: false - }, - ] - ); - - // User can't accept their own contact request - db.respond_to_contact_request(user_1, user_2, true) - .await - .unwrap_err(); - - // User accepts a contact request. Both users see the contact. - db.respond_to_contact_request(user_2, user_1, true) - .await - .unwrap(); - assert_eq!( - db.get_contacts(user_1).await.unwrap(), - &[ - Contact::Accepted { - user_id: user_1, - should_notify: false - }, - Contact::Accepted { - user_id: user_2, - should_notify: true - } - ], - ); - assert!(db.has_contact(user_1, user_2).await.unwrap()); - assert!(db.has_contact(user_2, user_1).await.unwrap()); - assert_eq!( - db.get_contacts(user_2).await.unwrap(), - &[ - Contact::Accepted { - user_id: user_1, - should_notify: false, - }, - Contact::Accepted { - user_id: user_2, - should_notify: false, - }, - ] - ); - - // Users cannot re-request existing contacts. - db.send_contact_request(user_1, user_2).await.unwrap_err(); - db.send_contact_request(user_2, user_1).await.unwrap_err(); - - // Users can't dismiss notifications of them accepting other users' requests. - db.dismiss_contact_notification(user_2, user_1) - .await - .unwrap_err(); - assert_eq!( - db.get_contacts(user_1).await.unwrap(), - &[ - Contact::Accepted { - user_id: user_1, - should_notify: false - }, - Contact::Accepted { - user_id: user_2, - should_notify: true, - }, - ] - ); - - // Users can dismiss notifications of other users accepting their requests. - db.dismiss_contact_notification(user_1, user_2) - .await - .unwrap(); - assert_eq!( - db.get_contacts(user_1).await.unwrap(), - &[ - Contact::Accepted { - user_id: user_1, - should_notify: false - }, - Contact::Accepted { - user_id: user_2, - should_notify: false, - }, - ] - ); - - // Users send each other concurrent contact requests and - // see that they are immediately accepted. - db.send_contact_request(user_1, user_3).await.unwrap(); - db.send_contact_request(user_3, user_1).await.unwrap(); - assert_eq!( - db.get_contacts(user_1).await.unwrap(), - &[ - Contact::Accepted { - user_id: user_1, - should_notify: false - }, - Contact::Accepted { - user_id: user_2, - should_notify: false, - }, - Contact::Accepted { - user_id: user_3, - should_notify: false - }, - ] - ); - assert_eq!( - db.get_contacts(user_3).await.unwrap(), - &[ - Contact::Accepted { - user_id: user_1, - should_notify: false - }, - Contact::Accepted { - user_id: user_3, - should_notify: false - } - ], - ); - - // User declines a contact request. Both users see that it is gone. - db.send_contact_request(user_2, user_3).await.unwrap(); - db.respond_to_contact_request(user_3, user_2, false) - .await - .unwrap(); - assert!(!db.has_contact(user_2, user_3).await.unwrap()); - assert!(!db.has_contact(user_3, user_2).await.unwrap()); - assert_eq!( - db.get_contacts(user_2).await.unwrap(), - &[ - Contact::Accepted { - user_id: user_1, - should_notify: false - }, - Contact::Accepted { - user_id: user_2, - should_notify: false - } - ] - ); - assert_eq!( - db.get_contacts(user_3).await.unwrap(), - &[ - Contact::Accepted { - user_id: user_1, - should_notify: false - }, - Contact::Accepted { - user_id: user_3, - should_notify: false - } - ], - ); - } - } - - #[tokio::test(flavor = "multi_thread")] - async fn test_invite_codes() { - let postgres = TestDb::postgres().await; - let db = postgres.db(); - let user1 = db.create_user("user-1", None, false).await.unwrap(); - - // Initially, user 1 has no invite code - assert_eq!(db.get_invite_code_for_user(user1).await.unwrap(), None); - - // Setting invite count to 0 when no code is assigned does not assign a new code - db.set_invite_count(user1, 0).await.unwrap(); - assert!(db.get_invite_code_for_user(user1).await.unwrap().is_none()); - - // User 1 creates an invite code that can be used twice. - db.set_invite_count(user1, 2).await.unwrap(); - let (invite_code, invite_count) = - db.get_invite_code_for_user(user1).await.unwrap().unwrap(); - assert_eq!(invite_count, 2); - - // User 2 redeems the invite code and becomes a contact of user 1. - let user2 = db - .redeem_invite_code(&invite_code, "user-2", None) - .await - .unwrap(); - let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); - assert_eq!(invite_count, 1); - assert_eq!( - db.get_contacts(user1).await.unwrap(), - [ - Contact::Accepted { - user_id: user1, - should_notify: false - }, - Contact::Accepted { - user_id: user2, - should_notify: true - } - ] - ); - assert_eq!( - db.get_contacts(user2).await.unwrap(), - [ - Contact::Accepted { - user_id: user1, - should_notify: false - }, - Contact::Accepted { - user_id: user2, - should_notify: false - } - ] - ); - - // User 3 redeems the invite code and becomes a contact of user 1. - let user3 = db - .redeem_invite_code(&invite_code, "user-3", None) - .await - .unwrap(); - let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); - assert_eq!(invite_count, 0); - assert_eq!( - db.get_contacts(user1).await.unwrap(), - [ - Contact::Accepted { - user_id: user1, - should_notify: false - }, - Contact::Accepted { - user_id: user2, - should_notify: true - }, - Contact::Accepted { - user_id: user3, - should_notify: true - } - ] - ); - assert_eq!( - db.get_contacts(user3).await.unwrap(), - [ - Contact::Accepted { - user_id: user1, - should_notify: false - }, - Contact::Accepted { - user_id: user3, - should_notify: false - }, - ] - ); - - // Trying to reedem the code for the third time results in an error. - db.redeem_invite_code(&invite_code, "user-4", None) - .await - .unwrap_err(); - - // Invite count can be updated after the code has been created. - db.set_invite_count(user1, 2).await.unwrap(); - let (latest_code, invite_count) = - db.get_invite_code_for_user(user1).await.unwrap().unwrap(); - assert_eq!(latest_code, invite_code); // Invite code doesn't change when we increment above 0 - assert_eq!(invite_count, 2); - - // User 4 can now redeem the invite code and becomes a contact of user 1. - let user4 = db - .redeem_invite_code(&invite_code, "user-4", None) - .await - .unwrap(); - let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); - assert_eq!(invite_count, 1); - assert_eq!( - db.get_contacts(user1).await.unwrap(), - [ - Contact::Accepted { - user_id: user1, - should_notify: false - }, - Contact::Accepted { - user_id: user2, - should_notify: true - }, - Contact::Accepted { - user_id: user3, - should_notify: true - }, - Contact::Accepted { - user_id: user4, - should_notify: true - } - ] - ); - assert_eq!( - db.get_contacts(user4).await.unwrap(), - [ - Contact::Accepted { - user_id: user1, - should_notify: false - }, - Contact::Accepted { - user_id: user4, - should_notify: false - }, - ] - ); - - // An existing user cannot redeem invite codes. - db.redeem_invite_code(&invite_code, "user-2", None) - .await - .unwrap_err(); - let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); - assert_eq!(invite_count, 1); - - // Ensure invited users get invite codes too. - assert_eq!( - db.get_invite_code_for_user(user2).await.unwrap().unwrap().1, - 5 - ); - assert_eq!( - db.get_invite_code_for_user(user3).await.unwrap().unwrap().1, - 5 - ); - assert_eq!( - db.get_invite_code_for_user(user4).await.unwrap().unwrap().1, - 5 - ); - } - - #[tokio::test(flavor = "multi_thread")] - async fn test_signups() { - let postgres = TestDb::postgres().await; - let db = postgres.db(); - - // people sign up on the waitlist - for i in 0..8 { - db.create_signup(Signup { - email_address: format!("person-{i}@example.com"), - platform_mac: true, - platform_linux: true, - platform_windows: false, - editor_features: vec!["speed".into()], - programming_languages: vec!["rust".into(), "c".into()], - }) - .await - .unwrap(); - } - - // retrieve the next batch of signup emails to send - let signups_batch1 = db.get_signup_invites(3).await.unwrap(); - let addresses = signups_batch1 - .iter() - .map(|s| &s.email_address) - .collect::>(); - assert_eq!( - addresses, - &[ - "person-0@example.com", - "person-1@example.com", - "person-2@example.com" - ] - ); - assert_ne!( - signups_batch1[0].email_confirmation_code, - signups_batch1[1].email_confirmation_code - ); - - // the waitlist isn't updated until we record that the emails - // were successfully sent. - let signups_batch = db.get_signup_invites(3).await.unwrap(); - assert_eq!(signups_batch, signups_batch1); - - // once the emails go out, we can retrieve the next batch - // of signups. - db.record_signup_invites_sent(&signups_batch1) - .await - .unwrap(); - let signups_batch2 = db.get_signup_invites(3).await.unwrap(); - let addresses = signups_batch2 - .iter() - .map(|s| &s.email_address) - .collect::>(); - assert_eq!( - addresses, - &[ - "person-3@example.com", - "person-4@example.com", - "person-5@example.com" - ] - ); - - // user completes the signup process by providing their - // github account. - let user_id = db - .redeem_signup(SignupRedemption { - email_address: signups_batch1[0].email_address.clone(), - email_confirmation_code: signups_batch1[0].email_confirmation_code.clone(), - github_login: "person-0".into(), - invite_count: 5, - }) - .await - .unwrap(); - let user = db.get_user_by_id(user_id).await.unwrap().unwrap(); - assert_eq!(user.github_login, "person-0"); - assert_eq!(user.email_address.as_deref(), Some("person-0@example.com")); - assert_eq!(user.invite_count, 5); - - // cannot redeem the same signup again. - db.redeem_signup(SignupRedemption { - email_address: signups_batch1[0].email_address.clone(), - email_confirmation_code: signups_batch1[0].email_confirmation_code.clone(), - github_login: "some-other-github_account".into(), - invite_count: 5, - }) - .await - .unwrap_err(); - - // cannot redeem a signup with the wrong confirmation code. - db.redeem_signup(SignupRedemption { - email_address: signups_batch1[1].email_address.clone(), - email_confirmation_code: "the-wrong-code".to_string(), - github_login: "person-1".into(), - invite_count: 5, - }) - .await - .unwrap_err(); - } - - pub struct TestDb { - pub db: Option>, - pub url: String, - } - - impl TestDb { - #[allow(clippy::await_holding_lock)] - pub async fn postgres() -> Self { - lazy_static! { - static ref LOCK: Mutex<()> = Mutex::new(()); - } - - let _guard = LOCK.lock(); - let mut rng = StdRng::from_entropy(); - let name = format!("zed-test-{}", rng.gen::()); - let url = format!("postgres://postgres@localhost/{}", name); - let migrations_path = Path::new(concat!(env!("CARGO_MANIFEST_DIR"), "/migrations")); - Postgres::create_database(&url) - .await - .expect("failed to create test db"); - let db = PostgresDb::new(&url, 5).await.unwrap(); - let migrator = Migrator::new(migrations_path).await.unwrap(); - migrator.run(&db.pool).await.unwrap(); - Self { - db: Some(Arc::new(db)), - url, - } - } - - pub fn fake(background: Arc) -> Self { - Self { - db: Some(Arc::new(FakeDb::new(background))), - url: Default::default(), - } - } - - pub fn db(&self) -> &Arc { - self.db.as_ref().unwrap() - } - } - - impl Drop for TestDb { - fn drop(&mut self) { - if let Some(db) = self.db.take() { - futures::executor::block_on(db.teardown(&self.url)); - } - } - } - pub struct FakeDb { background: Arc, pub users: Mutex>, @@ -2753,7 +1722,7 @@ pub mod tests { async fn create_user( &self, github_login: &str, - email_address: Option<&str>, + email_address: &str, admin: bool, ) -> Result { self.background.simulate_random_delay().await; @@ -2771,7 +1740,7 @@ pub mod tests { User { id: user_id, github_login: github_login.to_string(), - email_address: email_address.map(str::to_string), + email_address: Some(email_address.to_string()), admin, invite_code: None, invite_count: 0, @@ -2843,24 +1812,25 @@ pub mod tests { unimplemented!() } - async fn get_signup_invites(&self, _count: usize) -> Result> { + async fn get_unsent_invites(&self, _count: usize) -> Result> { unimplemented!() } - async fn record_signup_invites_sent(&self, _signups: &[SignupInvite]) -> Result<()> { + async fn record_sent_invites(&self, _invites: &[Invite]) -> Result<()> { unimplemented!() } - async fn redeem_signup( + async fn create_user_from_invite( &self, - _redemption: SignupRedemption, - ) -> Result { + _invite: &Invite, + _user: NewUserParams, + ) -> Result<(UserId, Option)> { unimplemented!() } // invite codes - async fn set_invite_count(&self, _id: UserId, _count: u32) -> Result<()> { + async fn set_invite_count_for_user(&self, _id: UserId, _count: u32) -> Result<()> { unimplemented!() } @@ -2873,12 +1843,11 @@ pub mod tests { unimplemented!() } - async fn redeem_invite_code( + async fn create_invite_from_code( &self, _code: &str, - _login: &str, - _email_address: Option<&str>, - ) -> Result { + _email_address: &str, + ) -> Result { unimplemented!() } @@ -3316,7 +2285,52 @@ pub mod tests { } } - fn build_background_executor() -> Arc { - Deterministic::new(0).build_background() + pub struct TestDb { + pub db: Option>, + pub url: String, + } + + impl TestDb { + #[allow(clippy::await_holding_lock)] + pub async fn postgres() -> Self { + lazy_static! { + static ref LOCK: Mutex<()> = Mutex::new(()); + } + + let _guard = LOCK.lock(); + let mut rng = StdRng::from_entropy(); + let name = format!("zed-test-{}", rng.gen::()); + let url = format!("postgres://postgres@localhost/{}", name); + let migrations_path = Path::new(concat!(env!("CARGO_MANIFEST_DIR"), "/migrations")); + Postgres::create_database(&url) + .await + .expect("failed to create test db"); + let db = PostgresDb::new(&url, 5).await.unwrap(); + let migrator = Migrator::new(migrations_path).await.unwrap(); + migrator.run(&db.pool).await.unwrap(); + Self { + db: Some(Arc::new(db)), + url, + } + } + + pub fn fake(background: Arc) -> Self { + Self { + db: Some(Arc::new(FakeDb::new(background))), + url: Default::default(), + } + } + + pub fn db(&self) -> &Arc { + self.db.as_ref().unwrap() + } + } + + impl Drop for TestDb { + fn drop(&mut self) { + if let Some(db) = self.db.take() { + futures::executor::block_on(db.teardown(&self.url)); + } + } } } diff --git a/crates/collab/src/db_tests.rs b/crates/collab/src/db_tests.rs new file mode 100644 index 0000000000..aa9a0b6995 --- /dev/null +++ b/crates/collab/src/db_tests.rs @@ -0,0 +1,1071 @@ +use super::db::*; +use collections::HashMap; +use gpui::executor::{Background, Deterministic}; +use std::{sync::Arc, time::Duration}; +use time::OffsetDateTime; + +#[tokio::test(flavor = "multi_thread")] +async fn test_get_users_by_ids() { + for test_db in [ + TestDb::postgres().await, + TestDb::fake(build_background_executor()), + ] { + let db = test_db.db(); + + let user1 = db.create_user("u1", "u1@example.com", false).await.unwrap(); + let user2 = db.create_user("u2", "u2@example.com", false).await.unwrap(); + let user3 = db.create_user("u3", "u3@example.com", false).await.unwrap(); + let user4 = db.create_user("u4", "u4@example.com", false).await.unwrap(); + + assert_eq!( + db.get_users_by_ids(vec![user1, user2, user3, user4]) + .await + .unwrap(), + vec![ + User { + id: user1, + github_login: "u1".to_string(), + email_address: Some("u1@example.com".to_string()), + admin: false, + ..Default::default() + }, + User { + id: user2, + github_login: "u2".to_string(), + email_address: Some("u2@example.com".to_string()), + admin: false, + ..Default::default() + }, + User { + id: user3, + github_login: "u3".to_string(), + email_address: Some("u3@example.com".to_string()), + admin: false, + ..Default::default() + }, + User { + id: user4, + github_login: "u4".to_string(), + email_address: Some("u4@example.com".to_string()), + admin: false, + ..Default::default() + } + ] + ); + } +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_create_users() { + let db = TestDb::postgres().await; + let db = db.db(); + + // Create the first batch of users, ensuring invite counts are assigned + // correctly and the respective invite codes are unique. + let user_ids_batch_1 = db + .create_users(vec![ + ("user1".to_string(), "hi@user1.com".to_string(), 5), + ("user2".to_string(), "hi@user2.com".to_string(), 4), + ("user3".to_string(), "hi@user3.com".to_string(), 3), + ]) + .await + .unwrap(); + assert_eq!(user_ids_batch_1.len(), 3); + + let users = db.get_users_by_ids(user_ids_batch_1.clone()).await.unwrap(); + assert_eq!(users.len(), 3); + assert_eq!(users[0].github_login, "user1"); + assert_eq!(users[0].email_address.as_deref(), Some("hi@user1.com")); + assert_eq!(users[0].invite_count, 5); + assert_eq!(users[1].github_login, "user2"); + assert_eq!(users[1].email_address.as_deref(), Some("hi@user2.com")); + assert_eq!(users[1].invite_count, 4); + assert_eq!(users[2].github_login, "user3"); + assert_eq!(users[2].email_address.as_deref(), Some("hi@user3.com")); + assert_eq!(users[2].invite_count, 3); + + let invite_code_1 = users[0].invite_code.clone().unwrap(); + let invite_code_2 = users[1].invite_code.clone().unwrap(); + let invite_code_3 = users[2].invite_code.clone().unwrap(); + assert_ne!(invite_code_1, invite_code_2); + assert_ne!(invite_code_1, invite_code_3); + assert_ne!(invite_code_2, invite_code_3); + + // Create the second batch of users and include a user that is already in the database, ensuring + // the invite count for the existing user is updated without changing their invite code. + let user_ids_batch_2 = db + .create_users(vec![ + ("user2".to_string(), "hi@user2.com".to_string(), 10), + ("user4".to_string(), "hi@user4.com".to_string(), 2), + ]) + .await + .unwrap(); + assert_eq!(user_ids_batch_2.len(), 2); + assert_eq!(user_ids_batch_2[0], user_ids_batch_1[1]); + + let users = db.get_users_by_ids(user_ids_batch_2).await.unwrap(); + assert_eq!(users.len(), 2); + assert_eq!(users[0].github_login, "user2"); + assert_eq!(users[0].email_address.as_deref(), Some("hi@user2.com")); + assert_eq!(users[0].invite_count, 10); + assert_eq!(users[0].invite_code, Some(invite_code_2.clone())); + assert_eq!(users[1].github_login, "user4"); + assert_eq!(users[1].email_address.as_deref(), Some("hi@user4.com")); + assert_eq!(users[1].invite_count, 2); + + let invite_code_4 = users[1].invite_code.clone().unwrap(); + assert_ne!(invite_code_4, invite_code_1); + assert_ne!(invite_code_4, invite_code_2); + assert_ne!(invite_code_4, invite_code_3); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_worktree_extensions() { + let test_db = TestDb::postgres().await; + let db = test_db.db(); + + let user = db.create_user("u1", "u1@example.com", false).await.unwrap(); + let project = db.register_project(user).await.unwrap(); + + db.update_worktree_extensions(project, 100, Default::default()) + .await + .unwrap(); + db.update_worktree_extensions( + project, + 100, + [("rs".to_string(), 5), ("md".to_string(), 3)] + .into_iter() + .collect(), + ) + .await + .unwrap(); + db.update_worktree_extensions( + project, + 100, + [("rs".to_string(), 6), ("md".to_string(), 5)] + .into_iter() + .collect(), + ) + .await + .unwrap(); + db.update_worktree_extensions( + project, + 101, + [("ts".to_string(), 2), ("md".to_string(), 1)] + .into_iter() + .collect(), + ) + .await + .unwrap(); + + assert_eq!( + db.get_project_extensions(project).await.unwrap(), + [ + ( + 100, + [("rs".into(), 6), ("md".into(), 5),] + .into_iter() + .collect::>() + ), + ( + 101, + [("ts".into(), 2), ("md".into(), 1),] + .into_iter() + .collect::>() + ) + ] + .into_iter() + .collect() + ); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_user_activity() { + let test_db = TestDb::postgres().await; + let db = test_db.db(); + + let user_1 = db.create_user("u1", "u1@example.com", false).await.unwrap(); + let user_2 = db.create_user("u2", "u2@example.com", false).await.unwrap(); + let user_3 = db.create_user("u3", "u3@example.com", false).await.unwrap(); + let project_1 = db.register_project(user_1).await.unwrap(); + db.update_worktree_extensions( + project_1, + 1, + HashMap::from_iter([("rs".into(), 5), ("md".into(), 7)]), + ) + .await + .unwrap(); + let project_2 = db.register_project(user_2).await.unwrap(); + let t0 = OffsetDateTime::now_utc() - Duration::from_secs(60 * 60); + + // User 2 opens a project + let t1 = t0 + Duration::from_secs(10); + db.record_user_activity(t0..t1, &[(user_2, project_2)]) + .await + .unwrap(); + + let t2 = t1 + Duration::from_secs(10); + db.record_user_activity(t1..t2, &[(user_2, project_2)]) + .await + .unwrap(); + + // User 1 joins the project + let t3 = t2 + Duration::from_secs(10); + db.record_user_activity(t2..t3, &[(user_2, project_2), (user_1, project_2)]) + .await + .unwrap(); + + // User 1 opens another project + let t4 = t3 + Duration::from_secs(10); + db.record_user_activity( + t3..t4, + &[ + (user_2, project_2), + (user_1, project_2), + (user_1, project_1), + ], + ) + .await + .unwrap(); + + // User 3 joins that project + let t5 = t4 + Duration::from_secs(10); + db.record_user_activity( + t4..t5, + &[ + (user_2, project_2), + (user_1, project_2), + (user_1, project_1), + (user_3, project_1), + ], + ) + .await + .unwrap(); + + // User 2 leaves + let t6 = t5 + Duration::from_secs(5); + db.record_user_activity(t5..t6, &[(user_1, project_1), (user_3, project_1)]) + .await + .unwrap(); + + let t7 = t6 + Duration::from_secs(60); + let t8 = t7 + Duration::from_secs(10); + db.record_user_activity(t7..t8, &[(user_1, project_1)]) + .await + .unwrap(); + + assert_eq!( + db.get_top_users_activity_summary(t0..t6, 10).await.unwrap(), + &[ + UserActivitySummary { + id: user_1, + github_login: "u1".to_string(), + project_activity: vec![ + ProjectActivitySummary { + id: project_1, + duration: Duration::from_secs(25), + max_collaborators: 2 + }, + ProjectActivitySummary { + id: project_2, + duration: Duration::from_secs(30), + max_collaborators: 2 + } + ] + }, + UserActivitySummary { + id: user_2, + github_login: "u2".to_string(), + project_activity: vec![ProjectActivitySummary { + id: project_2, + duration: Duration::from_secs(50), + max_collaborators: 2 + }] + }, + UserActivitySummary { + id: user_3, + github_login: "u3".to_string(), + project_activity: vec![ProjectActivitySummary { + id: project_1, + duration: Duration::from_secs(15), + max_collaborators: 2 + }] + }, + ] + ); + + assert_eq!( + db.get_active_user_count(t0..t6, Duration::from_secs(56), false) + .await + .unwrap(), + 0 + ); + assert_eq!( + db.get_active_user_count(t0..t6, Duration::from_secs(56), true) + .await + .unwrap(), + 0 + ); + assert_eq!( + db.get_active_user_count(t0..t6, Duration::from_secs(54), false) + .await + .unwrap(), + 1 + ); + assert_eq!( + db.get_active_user_count(t0..t6, Duration::from_secs(54), true) + .await + .unwrap(), + 1 + ); + assert_eq!( + db.get_active_user_count(t0..t6, Duration::from_secs(30), false) + .await + .unwrap(), + 2 + ); + assert_eq!( + db.get_active_user_count(t0..t6, Duration::from_secs(30), true) + .await + .unwrap(), + 2 + ); + assert_eq!( + db.get_active_user_count(t0..t6, Duration::from_secs(10), false) + .await + .unwrap(), + 3 + ); + assert_eq!( + db.get_active_user_count(t0..t6, Duration::from_secs(10), true) + .await + .unwrap(), + 3 + ); + assert_eq!( + db.get_active_user_count(t0..t1, Duration::from_secs(5), false) + .await + .unwrap(), + 1 + ); + assert_eq!( + db.get_active_user_count(t0..t1, Duration::from_secs(5), true) + .await + .unwrap(), + 0 + ); + + assert_eq!( + db.get_user_activity_timeline(t3..t6, user_1).await.unwrap(), + &[ + UserActivityPeriod { + project_id: project_1, + start: t3, + end: t6, + extensions: HashMap::from_iter([("rs".to_string(), 5), ("md".to_string(), 7)]), + }, + UserActivityPeriod { + project_id: project_2, + start: t3, + end: t5, + extensions: Default::default(), + }, + ] + ); + assert_eq!( + db.get_user_activity_timeline(t0..t8, user_1).await.unwrap(), + &[ + UserActivityPeriod { + project_id: project_2, + start: t2, + end: t5, + extensions: Default::default(), + }, + UserActivityPeriod { + project_id: project_1, + start: t3, + end: t6, + extensions: HashMap::from_iter([("rs".to_string(), 5), ("md".to_string(), 7)]), + }, + UserActivityPeriod { + project_id: project_1, + start: t7, + end: t8, + extensions: HashMap::from_iter([("rs".to_string(), 5), ("md".to_string(), 7)]), + }, + ] + ); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_recent_channel_messages() { + for test_db in [ + TestDb::postgres().await, + TestDb::fake(build_background_executor()), + ] { + let db = test_db.db(); + let user = db.create_user("u", "u@example.com", false).await.unwrap(); + let org = db.create_org("org", "org").await.unwrap(); + let channel = db.create_org_channel(org, "channel").await.unwrap(); + for i in 0..10 { + db.create_channel_message(channel, user, &i.to_string(), OffsetDateTime::now_utc(), i) + .await + .unwrap(); + } + + let messages = db.get_channel_messages(channel, 5, None).await.unwrap(); + assert_eq!( + messages.iter().map(|m| &m.body).collect::>(), + ["5", "6", "7", "8", "9"] + ); + + let prev_messages = db + .get_channel_messages(channel, 4, Some(messages[0].id)) + .await + .unwrap(); + assert_eq!( + prev_messages.iter().map(|m| &m.body).collect::>(), + ["1", "2", "3", "4"] + ); + } +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_channel_message_nonces() { + for test_db in [ + TestDb::postgres().await, + TestDb::fake(build_background_executor()), + ] { + let db = test_db.db(); + let user = db.create_user("u", "u@example.com", false).await.unwrap(); + let org = db.create_org("org", "org").await.unwrap(); + let channel = db.create_org_channel(org, "channel").await.unwrap(); + + let msg1_id = db + .create_channel_message(channel, user, "1", OffsetDateTime::now_utc(), 1) + .await + .unwrap(); + let msg2_id = db + .create_channel_message(channel, user, "2", OffsetDateTime::now_utc(), 2) + .await + .unwrap(); + let msg3_id = db + .create_channel_message(channel, user, "3", OffsetDateTime::now_utc(), 1) + .await + .unwrap(); + let msg4_id = db + .create_channel_message(channel, user, "4", OffsetDateTime::now_utc(), 2) + .await + .unwrap(); + + assert_ne!(msg1_id, msg2_id); + assert_eq!(msg1_id, msg3_id); + assert_eq!(msg2_id, msg4_id); + } +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_create_access_tokens() { + let test_db = TestDb::postgres().await; + let db = test_db.db(); + let user = db.create_user("u1", "u1@example.com", false).await.unwrap(); + + db.create_access_token_hash(user, "h1", 3).await.unwrap(); + db.create_access_token_hash(user, "h2", 3).await.unwrap(); + assert_eq!( + db.get_access_token_hashes(user).await.unwrap(), + &["h2".to_string(), "h1".to_string()] + ); + + db.create_access_token_hash(user, "h3", 3).await.unwrap(); + assert_eq!( + db.get_access_token_hashes(user).await.unwrap(), + &["h3".to_string(), "h2".to_string(), "h1".to_string(),] + ); + + db.create_access_token_hash(user, "h4", 3).await.unwrap(); + assert_eq!( + db.get_access_token_hashes(user).await.unwrap(), + &["h4".to_string(), "h3".to_string(), "h2".to_string(),] + ); + + db.create_access_token_hash(user, "h5", 3).await.unwrap(); + assert_eq!( + db.get_access_token_hashes(user).await.unwrap(), + &["h5".to_string(), "h4".to_string(), "h3".to_string()] + ); +} + +#[test] +fn test_fuzzy_like_string() { + assert_eq!(PostgresDb::fuzzy_like_string("abcd"), "%a%b%c%d%"); + assert_eq!(PostgresDb::fuzzy_like_string("x y"), "%x%y%"); + assert_eq!(PostgresDb::fuzzy_like_string(" z "), "%z%"); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_fuzzy_search_users() { + let test_db = TestDb::postgres().await; + let db = test_db.db(); + for github_login in [ + "California", + "colorado", + "oregon", + "washington", + "florida", + "delaware", + "rhode-island", + ] { + db.create_user(github_login, &format!("{github_login}@example.com"), false) + .await + .unwrap(); + } + + assert_eq!( + fuzzy_search_user_names(db, "clr").await, + &["colorado", "California"] + ); + assert_eq!( + fuzzy_search_user_names(db, "ro").await, + &["rhode-island", "colorado", "oregon"], + ); + + async fn fuzzy_search_user_names(db: &Arc, query: &str) -> Vec { + db.fuzzy_search_users(query, 10) + .await + .unwrap() + .into_iter() + .map(|user| user.github_login) + .collect::>() + } +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_add_contacts() { + for test_db in [ + TestDb::postgres().await, + TestDb::fake(build_background_executor()), + ] { + let db = test_db.db(); + + let user_1 = db.create_user("u1", "u1@example.com", false).await.unwrap(); + let user_2 = db.create_user("u2", "u2@example.com", false).await.unwrap(); + let user_3 = db.create_user("u3", "u3@example.com", false).await.unwrap(); + + // User starts with no contacts + assert_eq!( + db.get_contacts(user_1).await.unwrap(), + vec![Contact::Accepted { + user_id: user_1, + should_notify: false + }], + ); + + // User requests a contact. Both users see the pending request. + db.send_contact_request(user_1, user_2).await.unwrap(); + assert!(!db.has_contact(user_1, user_2).await.unwrap()); + assert!(!db.has_contact(user_2, user_1).await.unwrap()); + assert_eq!( + db.get_contacts(user_1).await.unwrap(), + &[ + Contact::Accepted { + user_id: user_1, + should_notify: false + }, + Contact::Outgoing { user_id: user_2 } + ], + ); + assert_eq!( + db.get_contacts(user_2).await.unwrap(), + &[ + Contact::Incoming { + user_id: user_1, + should_notify: true + }, + Contact::Accepted { + user_id: user_2, + should_notify: false + }, + ] + ); + + // User 2 dismisses the contact request notification without accepting or rejecting. + // We shouldn't notify them again. + db.dismiss_contact_notification(user_1, user_2) + .await + .unwrap_err(); + db.dismiss_contact_notification(user_2, user_1) + .await + .unwrap(); + assert_eq!( + db.get_contacts(user_2).await.unwrap(), + &[ + Contact::Incoming { + user_id: user_1, + should_notify: false + }, + Contact::Accepted { + user_id: user_2, + should_notify: false + }, + ] + ); + + // User can't accept their own contact request + db.respond_to_contact_request(user_1, user_2, true) + .await + .unwrap_err(); + + // User accepts a contact request. Both users see the contact. + db.respond_to_contact_request(user_2, user_1, true) + .await + .unwrap(); + assert_eq!( + db.get_contacts(user_1).await.unwrap(), + &[ + Contact::Accepted { + user_id: user_1, + should_notify: false + }, + Contact::Accepted { + user_id: user_2, + should_notify: true + } + ], + ); + assert!(db.has_contact(user_1, user_2).await.unwrap()); + assert!(db.has_contact(user_2, user_1).await.unwrap()); + assert_eq!( + db.get_contacts(user_2).await.unwrap(), + &[ + Contact::Accepted { + user_id: user_1, + should_notify: false, + }, + Contact::Accepted { + user_id: user_2, + should_notify: false, + }, + ] + ); + + // Users cannot re-request existing contacts. + db.send_contact_request(user_1, user_2).await.unwrap_err(); + db.send_contact_request(user_2, user_1).await.unwrap_err(); + + // Users can't dismiss notifications of them accepting other users' requests. + db.dismiss_contact_notification(user_2, user_1) + .await + .unwrap_err(); + assert_eq!( + db.get_contacts(user_1).await.unwrap(), + &[ + Contact::Accepted { + user_id: user_1, + should_notify: false + }, + Contact::Accepted { + user_id: user_2, + should_notify: true, + }, + ] + ); + + // Users can dismiss notifications of other users accepting their requests. + db.dismiss_contact_notification(user_1, user_2) + .await + .unwrap(); + assert_eq!( + db.get_contacts(user_1).await.unwrap(), + &[ + Contact::Accepted { + user_id: user_1, + should_notify: false + }, + Contact::Accepted { + user_id: user_2, + should_notify: false, + }, + ] + ); + + // Users send each other concurrent contact requests and + // see that they are immediately accepted. + db.send_contact_request(user_1, user_3).await.unwrap(); + db.send_contact_request(user_3, user_1).await.unwrap(); + assert_eq!( + db.get_contacts(user_1).await.unwrap(), + &[ + Contact::Accepted { + user_id: user_1, + should_notify: false + }, + Contact::Accepted { + user_id: user_2, + should_notify: false, + }, + Contact::Accepted { + user_id: user_3, + should_notify: false + }, + ] + ); + assert_eq!( + db.get_contacts(user_3).await.unwrap(), + &[ + Contact::Accepted { + user_id: user_1, + should_notify: false + }, + Contact::Accepted { + user_id: user_3, + should_notify: false + } + ], + ); + + // User declines a contact request. Both users see that it is gone. + db.send_contact_request(user_2, user_3).await.unwrap(); + db.respond_to_contact_request(user_3, user_2, false) + .await + .unwrap(); + assert!(!db.has_contact(user_2, user_3).await.unwrap()); + assert!(!db.has_contact(user_3, user_2).await.unwrap()); + assert_eq!( + db.get_contacts(user_2).await.unwrap(), + &[ + Contact::Accepted { + user_id: user_1, + should_notify: false + }, + Contact::Accepted { + user_id: user_2, + should_notify: false + } + ] + ); + assert_eq!( + db.get_contacts(user_3).await.unwrap(), + &[ + Contact::Accepted { + user_id: user_1, + should_notify: false + }, + Contact::Accepted { + user_id: user_3, + should_notify: false + } + ], + ); + } +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_invite_codes() { + let postgres = TestDb::postgres().await; + let db = postgres.db(); + let user1 = db.create_user("u1", "u1@example.com", false).await.unwrap(); + + // Initially, user 1 has no invite code + assert_eq!(db.get_invite_code_for_user(user1).await.unwrap(), None); + + // Setting invite count to 0 when no code is assigned does not assign a new code + db.set_invite_count_for_user(user1, 0).await.unwrap(); + assert!(db.get_invite_code_for_user(user1).await.unwrap().is_none()); + + // User 1 creates an invite code that can be used twice. + db.set_invite_count_for_user(user1, 2).await.unwrap(); + let (invite_code, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); + assert_eq!(invite_count, 2); + + // User 2 redeems the invite code and becomes a contact of user 1. + let user2_invite = db + .create_invite_from_code(&invite_code, "u2@example.com") + .await + .unwrap(); + let (user2, inviter) = db + .create_user_from_invite( + &user2_invite, + NewUserParams { + github_login: "user2".into(), + invite_count: 7, + }, + ) + .await + .unwrap(); + let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); + assert_eq!(invite_count, 1); + assert_eq!(inviter, Some(user1)); + assert_eq!( + db.get_contacts(user1).await.unwrap(), + [ + Contact::Accepted { + user_id: user1, + should_notify: false + }, + Contact::Accepted { + user_id: user2, + should_notify: true + } + ] + ); + assert_eq!( + db.get_contacts(user2).await.unwrap(), + [ + Contact::Accepted { + user_id: user1, + should_notify: false + }, + Contact::Accepted { + user_id: user2, + should_notify: false + } + ] + ); + assert_eq!( + db.get_invite_code_for_user(user2).await.unwrap().unwrap().1, + 7 + ); + + // User 3 redeems the invite code and becomes a contact of user 1. + let user3_invite = db + .create_invite_from_code(&invite_code, "u3@example.com") + .await + .unwrap(); + let (user3, inviter) = db + .create_user_from_invite( + &user3_invite, + NewUserParams { + github_login: "user-3".into(), + invite_count: 3, + }, + ) + .await + .unwrap(); + let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); + assert_eq!(invite_count, 0); + assert_eq!(inviter, Some(user1)); + assert_eq!( + db.get_contacts(user1).await.unwrap(), + [ + Contact::Accepted { + user_id: user1, + should_notify: false + }, + Contact::Accepted { + user_id: user2, + should_notify: true + }, + Contact::Accepted { + user_id: user3, + should_notify: true + } + ] + ); + assert_eq!( + db.get_contacts(user3).await.unwrap(), + [ + Contact::Accepted { + user_id: user1, + should_notify: false + }, + Contact::Accepted { + user_id: user3, + should_notify: false + }, + ] + ); + assert_eq!( + db.get_invite_code_for_user(user3).await.unwrap().unwrap().1, + 3 + ); + + // Trying to reedem the code for the third time results in an error. + db.create_invite_from_code(&invite_code, "u4@example.com") + .await + .unwrap_err(); + + // Invite count can be updated after the code has been created. + db.set_invite_count_for_user(user1, 2).await.unwrap(); + let (latest_code, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); + assert_eq!(latest_code, invite_code); // Invite code doesn't change when we increment above 0 + assert_eq!(invite_count, 2); + + // User 4 can now redeem the invite code and becomes a contact of user 1. + let user4_invite = db + .create_invite_from_code(&invite_code, "u4@example.com") + .await + .unwrap(); + let (user4, _) = db + .create_user_from_invite( + &user4_invite, + NewUserParams { + github_login: "user-4".into(), + invite_count: 5, + }, + ) + .await + .unwrap(); + + let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); + assert_eq!(invite_count, 1); + assert_eq!( + db.get_contacts(user1).await.unwrap(), + [ + Contact::Accepted { + user_id: user1, + should_notify: false + }, + Contact::Accepted { + user_id: user2, + should_notify: true + }, + Contact::Accepted { + user_id: user3, + should_notify: true + }, + Contact::Accepted { + user_id: user4, + should_notify: true + } + ] + ); + assert_eq!( + db.get_contacts(user4).await.unwrap(), + [ + Contact::Accepted { + user_id: user1, + should_notify: false + }, + Contact::Accepted { + user_id: user4, + should_notify: false + }, + ] + ); + assert_eq!( + db.get_invite_code_for_user(user4).await.unwrap().unwrap().1, + 5 + ); + + // An existing user cannot redeem invite codes. + db.create_invite_from_code(&invite_code, "u2@example.com") + .await + .unwrap_err(); + let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); + assert_eq!(invite_count, 1); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_signups() { + let postgres = TestDb::postgres().await; + let db = postgres.db(); + + // people sign up on the waitlist + for i in 0..8 { + db.create_signup(Signup { + email_address: format!("person-{i}@example.com"), + platform_mac: true, + platform_linux: true, + platform_windows: false, + editor_features: vec!["speed".into()], + programming_languages: vec!["rust".into(), "c".into()], + }) + .await + .unwrap(); + } + + // retrieve the next batch of signup emails to send + let signups_batch1 = db.get_unsent_invites(3).await.unwrap(); + let addresses = signups_batch1 + .iter() + .map(|s| &s.email_address) + .collect::>(); + assert_eq!( + addresses, + &[ + "person-0@example.com", + "person-1@example.com", + "person-2@example.com" + ] + ); + assert_ne!( + signups_batch1[0].email_confirmation_code, + signups_batch1[1].email_confirmation_code + ); + + // the waitlist isn't updated until we record that the emails + // were successfully sent. + let signups_batch = db.get_unsent_invites(3).await.unwrap(); + assert_eq!(signups_batch, signups_batch1); + + // once the emails go out, we can retrieve the next batch + // of signups. + db.record_sent_invites(&signups_batch1).await.unwrap(); + let signups_batch2 = db.get_unsent_invites(3).await.unwrap(); + let addresses = signups_batch2 + .iter() + .map(|s| &s.email_address) + .collect::>(); + assert_eq!( + addresses, + &[ + "person-3@example.com", + "person-4@example.com", + "person-5@example.com" + ] + ); + + // user completes the signup process by providing their + // github account. + let (user_id, inviter_id) = db + .create_user_from_invite( + &Invite { + email_address: signups_batch1[0].email_address.clone(), + email_confirmation_code: signups_batch1[0].email_confirmation_code.clone(), + }, + NewUserParams { + github_login: "person-0".into(), + invite_count: 5, + }, + ) + .await + .unwrap(); + let user = db.get_user_by_id(user_id).await.unwrap().unwrap(); + assert!(inviter_id.is_none()); + assert_eq!(user.github_login, "person-0"); + assert_eq!(user.email_address.as_deref(), Some("person-0@example.com")); + assert_eq!(user.invite_count, 5); + + // cannot redeem the same signup again. + db.create_user_from_invite( + &Invite { + email_address: signups_batch1[0].email_address.clone(), + email_confirmation_code: signups_batch1[0].email_confirmation_code.clone(), + }, + NewUserParams { + github_login: "some-other-github_account".into(), + invite_count: 5, + }, + ) + .await + .unwrap_err(); + + // cannot redeem a signup with the wrong confirmation code. + db.create_user_from_invite( + &Invite { + email_address: signups_batch1[1].email_address.clone(), + email_confirmation_code: "the-wrong-code".to_string(), + }, + NewUserParams { + github_login: "person-1".into(), + invite_count: 5, + }, + ) + .await + .unwrap_err(); +} + +fn build_background_executor() -> Arc { + Deterministic::new(0).build_background() +} diff --git a/crates/collab/src/integration_tests.rs b/crates/collab/src/integration_tests.rs index 6b512d950f..1a4e4381c1 100644 --- a/crates/collab/src/integration_tests.rs +++ b/crates/collab/src/integration_tests.rs @@ -1,5 +1,5 @@ use crate::{ - db::{tests::TestDb, ProjectId, UserId}, + db::{ProjectId, TestDb, UserId}, rpc::{Executor, Server, Store}, AppState, }; @@ -4640,7 +4640,10 @@ async fn test_random_collaboration( let mut server = TestServer::start(cx.foreground(), cx.background()).await; let db = server.app_state.db.clone(); - let host_user_id = db.create_user("host", None, false).await.unwrap(); + let host_user_id = db + .create_user("host", "host@example.com", false) + .await + .unwrap(); let mut available_guests = vec![ "guest-1".to_string(), "guest-2".to_string(), @@ -4649,7 +4652,10 @@ async fn test_random_collaboration( ]; for username in &available_guests { - let guest_user_id = db.create_user(username, None, false).await.unwrap(); + let guest_user_id = db + .create_user(username, &format!("{username}@example.com"), false) + .await + .unwrap(); assert_eq!(*username, format!("guest-{}", guest_user_id)); server .app_state @@ -5157,7 +5163,7 @@ impl TestServer { } else { self.app_state .db - .create_user(name, None, false) + .create_user(name, &format!("{name}@example.com"), false) .await .unwrap() }; diff --git a/crates/collab/src/main.rs b/crates/collab/src/main.rs index 2c2c6a94f4..272d52cc95 100644 --- a/crates/collab/src/main.rs +++ b/crates/collab/src/main.rs @@ -4,6 +4,8 @@ mod db; mod env; mod rpc; +#[cfg(test)] +mod db_tests; #[cfg(test)] mod integration_tests; diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index dab7df3e67..4fc022995f 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -541,27 +541,30 @@ impl Server { pub async fn invite_code_redeemed( self: &Arc, - code: &str, + inviter_id: UserId, invitee_id: UserId, ) -> Result<()> { - let user = self.app_state.db.get_user_for_invite_code(code).await?; - let store = self.store().await; - let invitee_contact = store.contact_for_user(invitee_id, true); - for connection_id in store.connection_ids_for_user(user.id) { - self.peer.send( - connection_id, - proto::UpdateContacts { - contacts: vec![invitee_contact.clone()], - ..Default::default() - }, - )?; - self.peer.send( - connection_id, - proto::UpdateInviteInfo { - url: format!("{}{}", self.app_state.invite_link_prefix, code), - count: user.invite_count as u32, - }, - )?; + if let Some(user) = self.app_state.db.get_user_by_id(inviter_id).await? { + if let Some(code) = &user.invite_code { + let store = self.store().await; + let invitee_contact = store.contact_for_user(invitee_id, true); + for connection_id in store.connection_ids_for_user(inviter_id) { + self.peer.send( + connection_id, + proto::UpdateContacts { + contacts: vec![invitee_contact.clone()], + ..Default::default() + }, + )?; + self.peer.send( + connection_id, + proto::UpdateInviteInfo { + url: format!("{}{}", self.app_state.invite_link_prefix, &code), + count: user.invite_count as u32, + }, + )?; + } + } } Ok(()) } From 3dd8845bd88f1fafc7d79f712113069c0a98ef11 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 16 Sep 2022 15:37:19 -0700 Subject: [PATCH 003/140] Add waitlist summary API --- crates/collab/src/api.rs | 9 ++++++++- crates/collab/src/db.rs | 37 +++++++++++++++++++++++++++++++++++ crates/collab/src/db_tests.rs | 25 +++++++++++++++++++++-- 3 files changed, 68 insertions(+), 3 deletions(-) diff --git a/crates/collab/src/api.rs b/crates/collab/src/api.rs index 26521ceb27..504880f0a3 100644 --- a/crates/collab/src/api.rs +++ b/crates/collab/src/api.rs @@ -1,6 +1,6 @@ use crate::{ auth, - db::{Invite, NewUserParams, ProjectId, Signup, User, UserId}, + db::{Invite, NewUserParams, ProjectId, Signup, User, UserId, WaitlistSummary}, rpc::{self, ResultExt}, AppState, Error, Result, }; @@ -46,6 +46,7 @@ pub fn routes(rpc_server: &Arc, state: Arc) -> Router>, +) -> Result> { + Ok(Json(app.db.get_waitlist_summary().await?)) +} + #[derive(Deserialize)] pub struct CreateInviteFromCodeParams { invite_code: String, diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 9c1ab84570..1509b15cb2 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -35,6 +35,7 @@ pub trait Db: Send + Sync { async fn create_invite_from_code(&self, code: &str, email_address: &str) -> Result; async fn create_signup(&self, signup: Signup) -> Result<()>; + async fn get_waitlist_summary(&self) -> Result; async fn get_unsent_invites(&self, count: usize) -> Result>; async fn record_sent_invites(&self, invites: &[Invite]) -> Result<()>; async fn create_user_from_invite( @@ -384,6 +385,26 @@ impl Db for PostgresDb { Ok(()) } + async fn get_waitlist_summary(&self) -> Result { + Ok(sqlx::query_as( + " + SELECT + COUNT(*) as count, + COALESCE(SUM(CASE WHEN platform_linux THEN 1 ELSE 0 END), 0) as linux_count, + COALESCE(SUM(CASE WHEN platform_mac THEN 1 ELSE 0 END), 0) as mac_count, + COALESCE(SUM(CASE WHEN platform_windows THEN 1 ELSE 0 END), 0) as windows_count + FROM ( + SELECT * + FROM signups + WHERE + NOT email_confirmation_sent + ) AS unsent + ", + ) + .fetch_one(&self.pool) + .await?) + } + async fn get_unsent_invites(&self, count: usize) -> Result> { Ok(sqlx::query_as( " @@ -1630,6 +1651,18 @@ pub struct Signup { pub programming_languages: Vec, } +#[derive(Clone, Debug, PartialEq, Deserialize, Serialize, FromRow)] +pub struct WaitlistSummary { + #[sqlx(default)] + pub count: i64, + #[sqlx(default)] + pub linux_count: i64, + #[sqlx(default)] + pub mac_count: i64, + #[sqlx(default)] + pub windows_count: i64, +} + #[derive(FromRow, PartialEq, Debug, Serialize, Deserialize)] pub struct Invite { pub email_address: String, @@ -1812,6 +1845,10 @@ mod test { unimplemented!() } + async fn get_waitlist_summary(&self) -> Result { + unimplemented!() + } + async fn get_unsent_invites(&self, _count: usize) -> Result> { unimplemented!() } diff --git a/crates/collab/src/db_tests.rs b/crates/collab/src/db_tests.rs index aa9a0b6995..4ff8bbd0f6 100644 --- a/crates/collab/src/db_tests.rs +++ b/crates/collab/src/db_tests.rs @@ -966,8 +966,8 @@ async fn test_signups() { db.create_signup(Signup { email_address: format!("person-{i}@example.com"), platform_mac: true, - platform_linux: true, - platform_windows: false, + platform_linux: i % 2 == 0, + platform_windows: i % 4 == 0, editor_features: vec!["speed".into()], programming_languages: vec!["rust".into(), "c".into()], }) @@ -975,6 +975,16 @@ async fn test_signups() { .unwrap(); } + assert_eq!( + db.get_waitlist_summary().await.unwrap(), + WaitlistSummary { + count: 8, + mac_count: 8, + linux_count: 4, + windows_count: 2, + } + ); + // retrieve the next batch of signup emails to send let signups_batch1 = db.get_unsent_invites(3).await.unwrap(); let addresses = signups_batch1 @@ -1016,6 +1026,17 @@ async fn test_signups() { ] ); + // the sent invites are excluded from the summary. + assert_eq!( + db.get_waitlist_summary().await.unwrap(), + WaitlistSummary { + count: 5, + mac_count: 5, + linux_count: 2, + windows_count: 1, + } + ); + // user completes the signup process by providing their // github account. let (user_id, inviter_id) = db From 963ced1dd8f46dfc8f50c1cccf7799652008a6f6 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 16 Sep 2022 15:45:10 -0700 Subject: [PATCH 004/140] Preserve metrics_id from signup to user record --- .../migrations/20220913211150_create_signups.down.sql | 2 ++ .../migrations/20220913211150_create_signups.up.sql | 3 +++ crates/collab/src/db.rs | 9 +++++---- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/crates/collab/migrations/20220913211150_create_signups.down.sql b/crates/collab/migrations/20220913211150_create_signups.down.sql index 6ef51842c9..14b4e43cea 100644 --- a/crates/collab/migrations/20220913211150_create_signups.down.sql +++ b/crates/collab/migrations/20220913211150_create_signups.down.sql @@ -4,3 +4,5 @@ ALTER TABLE users DROP COLUMN metrics_id; DROP SEQUENCE metrics_id_seq; + +DROP INDEX index_users_on_email_address; \ No newline at end of file diff --git a/crates/collab/migrations/20220913211150_create_signups.up.sql b/crates/collab/migrations/20220913211150_create_signups.up.sql index 9acb313fd6..6c9380275d 100644 --- a/crates/collab/migrations/20220913211150_create_signups.up.sql +++ b/crates/collab/migrations/20220913211150_create_signups.up.sql @@ -25,3 +25,6 @@ CREATE INDEX "index_signups_on_email_confirmation_sent" ON "signups" ("email_con ALTER TABLE "users" ADD "metrics_id" INTEGER DEFAULT nextval('metrics_id_seq'); + +UPDATE users +SET metrics_id = nextval('metrics_id_seq'); diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 1509b15cb2..c89a8ba0fc 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -448,9 +448,9 @@ impl Db for PostgresDb { ) -> Result<(UserId, Option)> { let mut tx = self.pool.begin().await?; - let (signup_id, inviting_user_id): (i32, Option) = sqlx::query_as( + let (signup_id, metrics_id, inviting_user_id): (i32, i32, Option) = sqlx::query_as( " - SELECT id, inviting_user_id + SELECT id, metrics_id, inviting_user_id FROM signups WHERE email_address = $1 AND @@ -467,9 +467,9 @@ impl Db for PostgresDb { let user_id: UserId = sqlx::query_scalar( " INSERT INTO users - (email_address, github_login, admin, invite_count, invite_code) + (email_address, github_login, admin, invite_count, invite_code, metrics_id) VALUES - ($1, $2, 'f', $3, $4) + ($1, $2, 'f', $3, $4, $5) RETURNING id ", ) @@ -477,6 +477,7 @@ impl Db for PostgresDb { .bind(&user.github_login) .bind(&user.invite_count) .bind(random_invite_code()) + .bind(metrics_id) .fetch_one(&mut tx) .await?; From e77263a3c7d7db9429f8da235f3f951bf342fdd9 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 19 Sep 2022 14:34:37 -0700 Subject: [PATCH 005/140] Remove bulk user creation admin API --- crates/collab/src/api.rs | 37 -------------------- crates/collab/src/db.rs | 42 +---------------------- crates/collab/src/db_tests.rs | 64 ----------------------------------- 3 files changed, 1 insertion(+), 142 deletions(-) diff --git a/crates/collab/src/api.rs b/crates/collab/src/api.rs index 504880f0a3..a390b4392a 100644 --- a/crates/collab/src/api.rs +++ b/crates/collab/src/api.rs @@ -30,7 +30,6 @@ pub fn routes(rpc_server: &Arc, state: Arc) -> Router, -} - -#[derive(Deserialize)] -struct CreateUsersEntry { - github_login: String, - email_address: String, - invite_count: usize, -} - -async fn create_users( - Json(params): Json, - Extension(app): Extension>, -) -> Result>> { - let user_ids = app - .db - .create_users( - params - .users - .into_iter() - .map(|params| { - ( - params.github_login, - params.email_address, - params.invite_count, - ) - }) - .collect(), - ) - .await?; - let users = app.db.get_users_by_ids(user_ids).await?; - Ok(Json(users)) -} - #[derive(Debug, Deserialize)] struct GetUsersWithNoInvites { invited_by_another_user: bool, diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index c89a8ba0fc..85f13a6a11 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -6,7 +6,7 @@ use collections::HashMap; use futures::StreamExt; use serde::{Deserialize, Serialize}; pub use sqlx::postgres::PgPoolOptions as DbOptions; -use sqlx::{types::Uuid, FromRow, QueryBuilder, Row}; +use sqlx::{types::Uuid, FromRow, QueryBuilder}; use std::{cmp, ops::Range, time::Duration}; use time::{OffsetDateTime, PrimitiveDateTime}; @@ -19,7 +19,6 @@ pub trait Db: Send + Sync { admin: bool, ) -> Result; async fn get_all_users(&self, page: u32, limit: u32) -> Result>; - async fn create_users(&self, users: Vec<(String, String, usize)>) -> Result>; async fn fuzzy_search_users(&self, query: &str, limit: u32) -> Result>; async fn get_user_by_id(&self, id: UserId) -> Result>; async fn get_users_by_ids(&self, ids: Vec) -> Result>; @@ -225,41 +224,6 @@ impl Db for PostgresDb { .await?) } - async fn create_users(&self, users: Vec<(String, String, usize)>) -> Result> { - let mut query = QueryBuilder::new( - "INSERT INTO users (github_login, email_address, admin, invite_code, invite_count)", - ); - query.push_values( - users, - |mut query, (github_login, email_address, invite_count)| { - query - .push_bind(github_login) - .push_bind(email_address) - .push_bind(false) - .push_bind(random_invite_code()) - .push_bind(invite_count as i32); - }, - ); - query.push( - " - ON CONFLICT (github_login) DO UPDATE SET - github_login = excluded.github_login, - invite_count = excluded.invite_count, - invite_code = CASE WHEN users.invite_code IS NULL - THEN excluded.invite_code - ELSE users.invite_code - END - RETURNING id - ", - ); - - let rows = query.build().fetch_all(&self.pool).await?; - Ok(rows - .into_iter() - .filter_map(|row| row.try_get::(0).ok()) - .collect()) - } - async fn fuzzy_search_users(&self, name_query: &str, limit: u32) -> Result> { let like_string = Self::fuzzy_like_string(name_query); let query = " @@ -1789,10 +1753,6 @@ mod test { unimplemented!() } - async fn create_users(&self, _users: Vec<(String, String, usize)>) -> Result> { - unimplemented!() - } - async fn fuzzy_search_users(&self, _: &str, _: u32) -> Result> { unimplemented!() } diff --git a/crates/collab/src/db_tests.rs b/crates/collab/src/db_tests.rs index 4ff8bbd0f6..16eba2fb22 100644 --- a/crates/collab/src/db_tests.rs +++ b/crates/collab/src/db_tests.rs @@ -55,70 +55,6 @@ async fn test_get_users_by_ids() { } } -#[tokio::test(flavor = "multi_thread")] -async fn test_create_users() { - let db = TestDb::postgres().await; - let db = db.db(); - - // Create the first batch of users, ensuring invite counts are assigned - // correctly and the respective invite codes are unique. - let user_ids_batch_1 = db - .create_users(vec![ - ("user1".to_string(), "hi@user1.com".to_string(), 5), - ("user2".to_string(), "hi@user2.com".to_string(), 4), - ("user3".to_string(), "hi@user3.com".to_string(), 3), - ]) - .await - .unwrap(); - assert_eq!(user_ids_batch_1.len(), 3); - - let users = db.get_users_by_ids(user_ids_batch_1.clone()).await.unwrap(); - assert_eq!(users.len(), 3); - assert_eq!(users[0].github_login, "user1"); - assert_eq!(users[0].email_address.as_deref(), Some("hi@user1.com")); - assert_eq!(users[0].invite_count, 5); - assert_eq!(users[1].github_login, "user2"); - assert_eq!(users[1].email_address.as_deref(), Some("hi@user2.com")); - assert_eq!(users[1].invite_count, 4); - assert_eq!(users[2].github_login, "user3"); - assert_eq!(users[2].email_address.as_deref(), Some("hi@user3.com")); - assert_eq!(users[2].invite_count, 3); - - let invite_code_1 = users[0].invite_code.clone().unwrap(); - let invite_code_2 = users[1].invite_code.clone().unwrap(); - let invite_code_3 = users[2].invite_code.clone().unwrap(); - assert_ne!(invite_code_1, invite_code_2); - assert_ne!(invite_code_1, invite_code_3); - assert_ne!(invite_code_2, invite_code_3); - - // Create the second batch of users and include a user that is already in the database, ensuring - // the invite count for the existing user is updated without changing their invite code. - let user_ids_batch_2 = db - .create_users(vec![ - ("user2".to_string(), "hi@user2.com".to_string(), 10), - ("user4".to_string(), "hi@user4.com".to_string(), 2), - ]) - .await - .unwrap(); - assert_eq!(user_ids_batch_2.len(), 2); - assert_eq!(user_ids_batch_2[0], user_ids_batch_1[1]); - - let users = db.get_users_by_ids(user_ids_batch_2).await.unwrap(); - assert_eq!(users.len(), 2); - assert_eq!(users[0].github_login, "user2"); - assert_eq!(users[0].email_address.as_deref(), Some("hi@user2.com")); - assert_eq!(users[0].invite_count, 10); - assert_eq!(users[0].invite_code, Some(invite_code_2.clone())); - assert_eq!(users[1].github_login, "user4"); - assert_eq!(users[1].email_address.as_deref(), Some("hi@user4.com")); - assert_eq!(users[1].invite_count, 2); - - let invite_code_4 = users[1].invite_code.clone().unwrap(); - assert_ne!(invite_code_4, invite_code_1); - assert_ne!(invite_code_4, invite_code_2); - assert_ne!(invite_code_4, invite_code_3); -} - #[tokio::test(flavor = "multi_thread")] async fn test_worktree_extensions() { let test_db = TestDb::postgres().await; From 9886259b3aea123000307c5e11e089ed98a8924d Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 20 Sep 2022 09:44:56 -0700 Subject: [PATCH 006/140] Start storing users' github user id --- .../20220913211150_create_signups.down.sql | 4 +- .../20220913211150_create_signups.up.sql | 5 +- crates/collab/src/api.rs | 14 +- crates/collab/src/db.rs | 25 +- crates/collab/src/db_tests.rs | 226 ++++++++++++++++-- crates/collab/src/integration_tests.rs | 30 ++- 6 files changed, 262 insertions(+), 42 deletions(-) diff --git a/crates/collab/migrations/20220913211150_create_signups.down.sql b/crates/collab/migrations/20220913211150_create_signups.down.sql index 14b4e43cea..ec02ac3322 100644 --- a/crates/collab/migrations/20220913211150_create_signups.down.sql +++ b/crates/collab/migrations/20220913211150_create_signups.down.sql @@ -1,8 +1,10 @@ DROP TABLE signups; ALTER TABLE users + DROP COLUMN github_user_id, DROP COLUMN metrics_id; DROP SEQUENCE metrics_id_seq; -DROP INDEX index_users_on_email_address; \ No newline at end of file +DROP INDEX index_users_on_email_address; +DROP INDEX index_users_on_github_user_id; \ No newline at end of file diff --git a/crates/collab/migrations/20220913211150_create_signups.up.sql b/crates/collab/migrations/20220913211150_create_signups.up.sql index 6c9380275d..3de683c58e 100644 --- a/crates/collab/migrations/20220913211150_create_signups.up.sql +++ b/crates/collab/migrations/20220913211150_create_signups.up.sql @@ -19,12 +19,15 @@ CREATE TABLE IF NOT EXISTS "signups" ( "programming_languages" VARCHAR[] ); -CREATE INDEX "index_users_on_email_address" ON "users" ("email_address"); CREATE UNIQUE INDEX "index_signups_on_email_address" ON "signups" ("email_address"); CREATE INDEX "index_signups_on_email_confirmation_sent" ON "signups" ("email_confirmation_sent"); ALTER TABLE "users" + ADD "github_user_id" INTEGER, ADD "metrics_id" INTEGER DEFAULT nextval('metrics_id_seq'); +CREATE INDEX "index_users_on_email_address" ON "users" ("email_address"); +CREATE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id"); + UPDATE users SET metrics_id = nextval('metrics_id_seq'); diff --git a/crates/collab/src/api.rs b/crates/collab/src/api.rs index a390b4392a..de8ec44c78 100644 --- a/crates/collab/src/api.rs +++ b/crates/collab/src/api.rs @@ -112,9 +112,11 @@ async fn get_users( #[derive(Deserialize, Debug)] struct CreateUserParams { + github_user_id: i32, github_login: String, email_address: String, email_confirmation_code: Option, + #[serde(default)] invite_count: i32, } @@ -123,6 +125,11 @@ async fn create_user( Extension(app): Extension>, Extension(rpc_server): Extension>, ) -> Result> { + let user = NewUserParams { + github_login: params.github_login, + github_user_id: params.github_user_id, + invite_count: params.invite_count, + }; let (user_id, inviter_id) = // Creating a user via the normal signup process if let Some(email_confirmation_code) = params.email_confirmation_code { @@ -132,10 +139,7 @@ async fn create_user( email_address: params.email_address, email_confirmation_code, }, - NewUserParams { - github_login: params.github_login, - invite_count: params.invite_count, - }, + user, ) .await? } @@ -143,7 +147,7 @@ async fn create_user( else { ( app.db - .create_user(¶ms.github_login, ¶ms.email_address, false) + .create_user(¶ms.email_address, false, user) .await?, None, ) diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 85f13a6a11..f31defa577 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -14,9 +14,9 @@ use time::{OffsetDateTime, PrimitiveDateTime}; pub trait Db: Send + Sync { async fn create_user( &self, - github_login: &str, email_address: &str, admin: bool, + params: NewUserParams, ) -> Result; async fn get_all_users(&self, page: u32, limit: u32) -> Result>; async fn fuzzy_search_users(&self, query: &str, limit: u32) -> Result>; @@ -196,19 +196,20 @@ impl Db for PostgresDb { async fn create_user( &self, - github_login: &str, email_address: &str, admin: bool, + params: NewUserParams, ) -> Result { let query = " - INSERT INTO users (github_login, email_address, admin) - VALUES ($1, $2, $3) + INSERT INTO users (email_address, github_login, github_user_id, admin) + VALUES ($1, $2, $3, $4) ON CONFLICT (github_login) DO UPDATE SET github_login = excluded.github_login RETURNING id "; Ok(sqlx::query_scalar(query) - .bind(github_login) .bind(email_address) + .bind(params.github_login) + .bind(params.github_user_id) .bind(admin) .fetch_one(&self.pool) .await @@ -431,14 +432,15 @@ impl Db for PostgresDb { let user_id: UserId = sqlx::query_scalar( " INSERT INTO users - (email_address, github_login, admin, invite_count, invite_code, metrics_id) + (email_address, github_login, github_user_id, admin, invite_count, invite_code, metrics_id) VALUES - ($1, $2, 'f', $3, $4, $5) + ($1, $2, $3, 'f', $4, $5, $6) RETURNING id ", ) .bind(&invite.email_address) .bind(&user.github_login) + .bind(&user.github_user_id) .bind(&user.invite_count) .bind(random_invite_code()) .bind(metrics_id) @@ -1508,6 +1510,7 @@ id_type!(UserId); pub struct User { pub id: UserId, pub github_login: String, + pub github_user_id: i32, pub email_address: Option, pub admin: bool, pub invite_code: Option, @@ -1637,6 +1640,7 @@ pub struct Invite { #[derive(Debug, Serialize, Deserialize)] pub struct NewUserParams { pub github_login: String, + pub github_user_id: i32, pub invite_count: i32, } @@ -1719,16 +1723,16 @@ mod test { impl Db for FakeDb { async fn create_user( &self, - github_login: &str, email_address: &str, admin: bool, + params: NewUserParams, ) -> Result { self.background.simulate_random_delay().await; let mut users = self.users.lock(); if let Some(user) = users .values() - .find(|user| user.github_login == github_login) + .find(|user| user.github_login == params.github_login) { Ok(user.id) } else { @@ -1737,7 +1741,8 @@ mod test { user_id, User { id: user_id, - github_login: github_login.to_string(), + github_login: params.github_login, + github_user_id: params.github_user_id, email_address: Some(email_address.to_string()), admin, invite_code: None, diff --git a/crates/collab/src/db_tests.rs b/crates/collab/src/db_tests.rs index 16eba2fb22..87033fab38 100644 --- a/crates/collab/src/db_tests.rs +++ b/crates/collab/src/db_tests.rs @@ -12,10 +12,54 @@ async fn test_get_users_by_ids() { ] { let db = test_db.db(); - let user1 = db.create_user("u1", "u1@example.com", false).await.unwrap(); - let user2 = db.create_user("u2", "u2@example.com", false).await.unwrap(); - let user3 = db.create_user("u3", "u3@example.com", false).await.unwrap(); - let user4 = db.create_user("u4", "u4@example.com", false).await.unwrap(); + let user1 = db + .create_user( + "u1@example.com", + false, + NewUserParams { + github_login: "u1".into(), + github_user_id: 1, + invite_count: 0, + }, + ) + .await + .unwrap(); + let user2 = db + .create_user( + "u2@example.com", + false, + NewUserParams { + github_login: "u2".into(), + github_user_id: 2, + invite_count: 0, + }, + ) + .await + .unwrap(); + let user3 = db + .create_user( + "u3@example.com", + false, + NewUserParams { + github_login: "u3".into(), + github_user_id: 3, + invite_count: 0, + }, + ) + .await + .unwrap(); + let user4 = db + .create_user( + "u4@example.com", + false, + NewUserParams { + github_login: "u4".into(), + github_user_id: 4, + invite_count: 0, + }, + ) + .await + .unwrap(); assert_eq!( db.get_users_by_ids(vec![user1, user2, user3, user4]) @@ -25,6 +69,7 @@ async fn test_get_users_by_ids() { User { id: user1, github_login: "u1".to_string(), + github_user_id: 1, email_address: Some("u1@example.com".to_string()), admin: false, ..Default::default() @@ -32,6 +77,7 @@ async fn test_get_users_by_ids() { User { id: user2, github_login: "u2".to_string(), + github_user_id: 2, email_address: Some("u2@example.com".to_string()), admin: false, ..Default::default() @@ -39,6 +85,7 @@ async fn test_get_users_by_ids() { User { id: user3, github_login: "u3".to_string(), + github_user_id: 3, email_address: Some("u3@example.com".to_string()), admin: false, ..Default::default() @@ -46,6 +93,7 @@ async fn test_get_users_by_ids() { User { id: user4, github_login: "u4".to_string(), + github_user_id: 4, email_address: Some("u4@example.com".to_string()), admin: false, ..Default::default() @@ -60,7 +108,18 @@ async fn test_worktree_extensions() { let test_db = TestDb::postgres().await; let db = test_db.db(); - let user = db.create_user("u1", "u1@example.com", false).await.unwrap(); + let user = db + .create_user( + "u1@example.com", + false, + NewUserParams { + github_login: "u1".into(), + github_user_id: 0, + invite_count: 0, + }, + ) + .await + .unwrap(); let project = db.register_project(user).await.unwrap(); db.update_worktree_extensions(project, 100, Default::default()) @@ -120,9 +179,42 @@ async fn test_user_activity() { let test_db = TestDb::postgres().await; let db = test_db.db(); - let user_1 = db.create_user("u1", "u1@example.com", false).await.unwrap(); - let user_2 = db.create_user("u2", "u2@example.com", false).await.unwrap(); - let user_3 = db.create_user("u3", "u3@example.com", false).await.unwrap(); + let user_1 = db + .create_user( + "u1@example.com", + false, + NewUserParams { + github_login: "u1".into(), + github_user_id: 0, + invite_count: 0, + }, + ) + .await + .unwrap(); + let user_2 = db + .create_user( + "u2@example.com", + false, + NewUserParams { + github_login: "u2".into(), + github_user_id: 0, + invite_count: 0, + }, + ) + .await + .unwrap(); + let user_3 = db + .create_user( + "u3@example.com", + false, + NewUserParams { + github_login: "u3".into(), + github_user_id: 0, + invite_count: 0, + }, + ) + .await + .unwrap(); let project_1 = db.register_project(user_1).await.unwrap(); db.update_worktree_extensions( project_1, @@ -340,7 +432,18 @@ async fn test_recent_channel_messages() { TestDb::fake(build_background_executor()), ] { let db = test_db.db(); - let user = db.create_user("u", "u@example.com", false).await.unwrap(); + let user = db + .create_user( + "u@example.com", + false, + NewUserParams { + github_login: "u".into(), + github_user_id: 1, + invite_count: 0, + }, + ) + .await + .unwrap(); let org = db.create_org("org", "org").await.unwrap(); let channel = db.create_org_channel(org, "channel").await.unwrap(); for i in 0..10 { @@ -373,7 +476,18 @@ async fn test_channel_message_nonces() { TestDb::fake(build_background_executor()), ] { let db = test_db.db(); - let user = db.create_user("u", "u@example.com", false).await.unwrap(); + let user = db + .create_user( + "user@example.com", + false, + NewUserParams { + github_login: "user".into(), + github_user_id: 1, + invite_count: 0, + }, + ) + .await + .unwrap(); let org = db.create_org("org", "org").await.unwrap(); let channel = db.create_org_channel(org, "channel").await.unwrap(); @@ -404,7 +518,18 @@ async fn test_channel_message_nonces() { async fn test_create_access_tokens() { let test_db = TestDb::postgres().await; let db = test_db.db(); - let user = db.create_user("u1", "u1@example.com", false).await.unwrap(); + let user = db + .create_user( + "u1@example.com", + false, + NewUserParams { + github_login: "u1".into(), + github_user_id: 1, + invite_count: 0, + }, + ) + .await + .unwrap(); db.create_access_token_hash(user, "h1", 3).await.unwrap(); db.create_access_token_hash(user, "h2", 3).await.unwrap(); @@ -443,7 +568,7 @@ fn test_fuzzy_like_string() { async fn test_fuzzy_search_users() { let test_db = TestDb::postgres().await; let db = test_db.db(); - for github_login in [ + for (i, github_login) in [ "California", "colorado", "oregon", @@ -451,10 +576,21 @@ async fn test_fuzzy_search_users() { "florida", "delaware", "rhode-island", - ] { - db.create_user(github_login, &format!("{github_login}@example.com"), false) - .await - .unwrap(); + ] + .into_iter() + .enumerate() + { + db.create_user( + &format!("{github_login}@example.com"), + false, + NewUserParams { + github_login: github_login.into(), + github_user_id: i as i32, + invite_count: 0, + }, + ) + .await + .unwrap(); } assert_eq!( @@ -484,9 +620,42 @@ async fn test_add_contacts() { ] { let db = test_db.db(); - let user_1 = db.create_user("u1", "u1@example.com", false).await.unwrap(); - let user_2 = db.create_user("u2", "u2@example.com", false).await.unwrap(); - let user_3 = db.create_user("u3", "u3@example.com", false).await.unwrap(); + let user_1 = db + .create_user( + "u1@example.com", + false, + NewUserParams { + github_login: "u1".into(), + github_user_id: 0, + invite_count: 0, + }, + ) + .await + .unwrap(); + let user_2 = db + .create_user( + "u2@example.com", + false, + NewUserParams { + github_login: "u2".into(), + github_user_id: 1, + invite_count: 0, + }, + ) + .await + .unwrap(); + let user_3 = db + .create_user( + "u3@example.com", + false, + NewUserParams { + github_login: "u3".into(), + github_user_id: 2, + invite_count: 0, + }, + ) + .await + .unwrap(); // User starts with no contacts assert_eq!( @@ -700,7 +869,18 @@ async fn test_add_contacts() { async fn test_invite_codes() { let postgres = TestDb::postgres().await; let db = postgres.db(); - let user1 = db.create_user("u1", "u1@example.com", false).await.unwrap(); + let user1 = db + .create_user( + "u1@example.com", + false, + NewUserParams { + github_login: "u1".into(), + github_user_id: 0, + invite_count: 0, + }, + ) + .await + .unwrap(); // Initially, user 1 has no invite code assert_eq!(db.get_invite_code_for_user(user1).await.unwrap(), None); @@ -724,6 +904,7 @@ async fn test_invite_codes() { &user2_invite, NewUserParams { github_login: "user2".into(), + github_user_id: 2, invite_count: 7, }, ) @@ -773,6 +954,7 @@ async fn test_invite_codes() { &user3_invite, NewUserParams { github_login: "user-3".into(), + github_user_id: 3, invite_count: 3, }, ) @@ -837,6 +1019,7 @@ async fn test_invite_codes() { &user4_invite, NewUserParams { github_login: "user-4".into(), + github_user_id: 4, invite_count: 5, }, ) @@ -983,6 +1166,7 @@ async fn test_signups() { }, NewUserParams { github_login: "person-0".into(), + github_user_id: 0, invite_count: 5, }, ) @@ -1002,6 +1186,7 @@ async fn test_signups() { }, NewUserParams { github_login: "some-other-github_account".into(), + github_user_id: 1, invite_count: 5, }, ) @@ -1016,6 +1201,7 @@ async fn test_signups() { }, NewUserParams { github_login: "person-1".into(), + github_user_id: 2, invite_count: 5, }, ) diff --git a/crates/collab/src/integration_tests.rs b/crates/collab/src/integration_tests.rs index 1a4e4381c1..94811b0951 100644 --- a/crates/collab/src/integration_tests.rs +++ b/crates/collab/src/integration_tests.rs @@ -1,5 +1,5 @@ use crate::{ - db::{ProjectId, TestDb, UserId}, + db::{NewUserParams, ProjectId, TestDb, UserId}, rpc::{Executor, Server, Store}, AppState, }; @@ -4641,7 +4641,15 @@ async fn test_random_collaboration( let mut server = TestServer::start(cx.foreground(), cx.background()).await; let db = server.app_state.db.clone(); let host_user_id = db - .create_user("host", "host@example.com", false) + .create_user( + "host@example.com", + false, + NewUserParams { + github_login: "host".into(), + github_user_id: 0, + invite_count: 0, + }, + ) .await .unwrap(); let mut available_guests = vec![ @@ -4651,9 +4659,17 @@ async fn test_random_collaboration( "guest-4".to_string(), ]; - for username in &available_guests { + for (ix, username) in available_guests.iter().enumerate() { let guest_user_id = db - .create_user(username, &format!("{username}@example.com"), false) + .create_user( + &format!("{username}@example.com"), + false, + NewUserParams { + github_login: username.into(), + github_user_id: ix as i32, + invite_count: 0, + }, + ) .await .unwrap(); assert_eq!(*username, format!("guest-{}", guest_user_id)); @@ -5163,7 +5179,11 @@ impl TestServer { } else { self.app_state .db - .create_user(name, &format!("{name}@example.com"), false) + .create_user(&format!("{name}@example.com"), false, NewUserParams { + github_login: name.into(), + github_user_id: 0, + invite_count: 0, + }) .await .unwrap() }; From 1877fc234b2add3549d144f1c6ae101a11a36b5a Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 20 Sep 2022 15:40:56 -0700 Subject: [PATCH 007/140] Update user retrieval API to take both github user id and github login --- crates/collab/src/api.rs | 39 ++++++------ crates/collab/src/db.rs | 85 ++++++++++++++++++++++---- crates/collab/src/db_tests.rs | 58 ++++++++++++++++++ crates/collab/src/integration_tests.rs | 20 ++++-- crates/collab/src/rpc.rs | 2 +- 5 files changed, 165 insertions(+), 39 deletions(-) diff --git a/crates/collab/src/api.rs b/crates/collab/src/api.rs index de8ec44c78..73293e0b2c 100644 --- a/crates/collab/src/api.rs +++ b/crates/collab/src/api.rs @@ -25,10 +25,7 @@ use tracing::instrument; pub fn routes(rpc_server: &Arc, state: Arc) -> Router { Router::new() .route("/users", get(get_users).post(create_user)) - .route( - "/users/:id", - put(update_user).delete(destroy_user).get(get_user), - ) + .route("/users/:id", put(update_user).delete(destroy_user)) .route("/users/:id/access_tokens", post(create_access_token)) .route("/users_with_no_invites", get(get_users_with_no_invites)) .route("/invite_codes/:code", get(get_user_for_invite_code)) @@ -90,6 +87,8 @@ pub async fn validate_api_token(req: Request, next: Next) -> impl IntoR #[derive(Debug, Deserialize)] struct GetUsersQueryParams { + github_user_id: Option, + github_login: Option, query: Option, page: Option, limit: Option, @@ -99,6 +98,14 @@ async fn get_users( Query(params): Query, Extension(app): Extension>, ) -> Result>> { + if let Some(github_login) = ¶ms.github_login { + let user = app + .db + .get_user_by_github_account(github_login, params.github_user_id) + .await?; + return Ok(Json(Vec::from_iter(user))); + } + let limit = params.limit.unwrap_or(100); let users = if let Some(query) = params.query { app.db.fuzzy_search_users(&query, limit).await? @@ -205,18 +212,6 @@ async fn destroy_user( Ok(()) } -async fn get_user( - Path(login): Path, - Extension(app): Extension>, -) -> Result> { - let user = app - .db - .get_user_by_github_login(&login) - .await? - .ok_or_else(|| Error::Http(StatusCode::NOT_FOUND, "User not found".to_string()))?; - Ok(Json(user)) -} - #[derive(Debug, Deserialize)] struct GetUsersWithNoInvites { invited_by_another_user: bool, @@ -351,22 +346,24 @@ struct CreateAccessTokenResponse { } async fn create_access_token( - Path(login): Path, + Path(user_id): Path, Query(params): Query, Extension(app): Extension>, ) -> Result> { - // request.require_token().await?; - let user = app .db - .get_user_by_github_login(&login) + .get_user_by_id(user_id) .await? .ok_or_else(|| anyhow!("user not found"))?; let mut user_id = user.id; if let Some(impersonate) = params.impersonate { if user.admin { - if let Some(impersonated_user) = app.db.get_user_by_github_login(&impersonate).await? { + if let Some(impersonated_user) = app + .db + .get_user_by_github_account(&impersonate, None) + .await? + { user_id = impersonated_user.id; } else { return Err(Error::Http( diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index f31defa577..70dc0c4e5b 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -23,7 +23,11 @@ pub trait Db: Send + Sync { async fn get_user_by_id(&self, id: UserId) -> Result>; async fn get_users_by_ids(&self, ids: Vec) -> Result>; async fn get_users_with_no_invites(&self, invited_by_another_user: bool) -> Result>; - async fn get_user_by_github_login(&self, github_login: &str) -> Result>; + async fn get_user_by_github_account( + &self, + github_login: &str, + github_user_id: Option, + ) -> Result>; async fn set_user_is_admin(&self, id: UserId, is_admin: bool) -> Result<()>; async fn set_user_connected_once(&self, id: UserId, connected_once: bool) -> Result<()>; async fn destroy_user(&self, id: UserId) -> Result<()>; @@ -274,12 +278,53 @@ impl Db for PostgresDb { Ok(sqlx::query_as(&query).fetch_all(&self.pool).await?) } - async fn get_user_by_github_login(&self, github_login: &str) -> Result> { - let query = "SELECT * FROM users WHERE github_login = $1 LIMIT 1"; - Ok(sqlx::query_as(query) + async fn get_user_by_github_account( + &self, + github_login: &str, + github_user_id: Option, + ) -> Result> { + if let Some(github_user_id) = github_user_id { + let mut user = sqlx::query_as::<_, User>( + " + UPDATE users + SET github_login = $1 + WHERE github_user_id = $2 + RETURNING * + ", + ) + .bind(github_login) + .bind(github_user_id) + .fetch_optional(&self.pool) + .await?; + + if user.is_none() { + user = sqlx::query_as::<_, User>( + " + UPDATE users + SET github_user_id = $1 + WHERE github_login = $2 + RETURNING * + ", + ) + .bind(github_user_id) + .bind(github_login) + .fetch_optional(&self.pool) + .await?; + } + + Ok(user) + } else { + Ok(sqlx::query_as( + " + SELECT * FROM users + WHERE github_login = $1 + LIMIT 1 + ", + ) .bind(github_login) .fetch_optional(&self.pool) .await?) + } } async fn set_user_is_admin(&self, id: UserId, is_admin: bool) -> Result<()> { @@ -1777,14 +1822,32 @@ mod test { unimplemented!() } - async fn get_user_by_github_login(&self, github_login: &str) -> Result> { + async fn get_user_by_github_account( + &self, + github_login: &str, + github_user_id: Option, + ) -> Result> { self.background.simulate_random_delay().await; - Ok(self - .users - .lock() - .values() - .find(|user| user.github_login == github_login) - .cloned()) + if let Some(github_user_id) = github_user_id { + for user in self.users.lock().values_mut() { + if user.github_user_id == github_user_id { + user.github_login = github_login.into(); + return Ok(Some(user.clone())); + } + if user.github_login == github_login { + user.github_user_id = github_user_id; + return Ok(Some(user.clone())); + } + } + Ok(None) + } else { + Ok(self + .users + .lock() + .values() + .find(|user| user.github_login == github_login) + .cloned()) + } } async fn set_user_is_admin(&self, _id: UserId, _is_admin: bool) -> Result<()> { diff --git a/crates/collab/src/db_tests.rs b/crates/collab/src/db_tests.rs index 87033fab38..49ac053fd8 100644 --- a/crates/collab/src/db_tests.rs +++ b/crates/collab/src/db_tests.rs @@ -103,6 +103,64 @@ async fn test_get_users_by_ids() { } } +#[tokio::test(flavor = "multi_thread")] +async fn test_get_user_by_github_account() { + for test_db in [ + TestDb::postgres().await, + TestDb::fake(build_background_executor()), + ] { + let db = test_db.db(); + let user_id1 = db + .create_user( + "user1@example.com", + false, + NewUserParams { + github_login: "login1".into(), + github_user_id: 101, + invite_count: 0, + }, + ) + .await + .unwrap(); + let user_id2 = db + .create_user( + "user2@example.com", + false, + NewUserParams { + github_login: "login2".into(), + github_user_id: 102, + invite_count: 0, + }, + ) + .await + .unwrap(); + + let user = db + .get_user_by_github_account("login1", None) + .await + .unwrap() + .unwrap(); + assert_eq!(user.id, user_id1); + assert_eq!(&user.github_login, "login1"); + assert_eq!(user.github_user_id, 101); + + assert!(db + .get_user_by_github_account("non-existent-login", None) + .await + .unwrap() + .is_none()); + + let user = db + .get_user_by_github_account("the-new-login2", Some(102)) + .await + .unwrap() + .unwrap(); + assert_eq!(user.id, user_id2); + assert_eq!(&user.github_login, "the-new-login2"); + assert_eq!(user.github_user_id, 102); + } +} + #[tokio::test(flavor = "multi_thread")] async fn test_worktree_extensions() { let test_db = TestDb::postgres().await; diff --git a/crates/collab/src/integration_tests.rs b/crates/collab/src/integration_tests.rs index 94811b0951..f3d43f277e 100644 --- a/crates/collab/src/integration_tests.rs +++ b/crates/collab/src/integration_tests.rs @@ -5173,17 +5173,25 @@ impl TestServer { }); let http = FakeHttpClient::with_404_response(); - let user_id = if let Ok(Some(user)) = self.app_state.db.get_user_by_github_login(name).await + let user_id = if let Ok(Some(user)) = self + .app_state + .db + .get_user_by_github_account(name, None) + .await { user.id } else { self.app_state .db - .create_user(&format!("{name}@example.com"), false, NewUserParams { - github_login: name.into(), - github_user_id: 0, - invite_count: 0, - }) + .create_user( + &format!("{name}@example.com"), + false, + NewUserParams { + github_login: name.into(), + github_user_id: 0, + invite_count: 0, + }, + ) .await .unwrap() }; diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 4fc022995f..5f27352c5a 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -1404,7 +1404,7 @@ impl Server { let users = match query.len() { 0 => vec![], 1 | 2 => db - .get_user_by_github_login(&query) + .get_user_by_github_account(&query, None) .await? .into_iter() .collect(), From 758875305b09c03ab47a712c28596feeaf91c0ad Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 20 Sep 2022 16:12:27 -0700 Subject: [PATCH 008/140] Add on delete cascade to signups user_id column --- .../collab/migrations/20220913211150_create_signups.down.sql | 1 - crates/collab/migrations/20220913211150_create_signups.up.sql | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/crates/collab/migrations/20220913211150_create_signups.down.sql b/crates/collab/migrations/20220913211150_create_signups.down.sql index ec02ac3322..59b20b1128 100644 --- a/crates/collab/migrations/20220913211150_create_signups.down.sql +++ b/crates/collab/migrations/20220913211150_create_signups.down.sql @@ -7,4 +7,3 @@ ALTER TABLE users DROP SEQUENCE metrics_id_seq; DROP INDEX index_users_on_email_address; -DROP INDEX index_users_on_github_user_id; \ No newline at end of file diff --git a/crates/collab/migrations/20220913211150_create_signups.up.sql b/crates/collab/migrations/20220913211150_create_signups.up.sql index 3de683c58e..5f02bd6887 100644 --- a/crates/collab/migrations/20220913211150_create_signups.up.sql +++ b/crates/collab/migrations/20220913211150_create_signups.up.sql @@ -7,8 +7,8 @@ CREATE TABLE IF NOT EXISTS "signups" ( "email_confirmation_sent" BOOLEAN NOT NULL, "metrics_id" INTEGER NOT NULL DEFAULT nextval('metrics_id_seq'), "created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - "user_id" INTEGER REFERENCES users (id), - "inviting_user_id" INTEGER REFERENCES users (id), + "user_id" INTEGER REFERENCES users (id) ON DELETE CASCADE, + "inviting_user_id" INTEGER REFERENCES users (id) ON DELETE SET NULL, "platform_mac" BOOLEAN NOT NULL, "platform_linux" BOOLEAN NOT NULL, From 20ec933e23fb2b396e0e00e2589f361737298dbe Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 20 Sep 2022 16:23:02 -0700 Subject: [PATCH 009/140] Proceed gracefully when someone signs up repeatedly --- crates/collab/src/db.rs | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 70dc0c4e5b..517810aa29 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -458,21 +458,29 @@ impl Db for PostgresDb { ) -> Result<(UserId, Option)> { let mut tx = self.pool.begin().await?; - let (signup_id, metrics_id, inviting_user_id): (i32, i32, Option) = sqlx::query_as( + let (signup_id, metrics_id, existing_user_id, inviting_user_id): ( + i32, + i32, + Option, + Option, + ) = sqlx::query_as( " - SELECT id, metrics_id, inviting_user_id + SELECT id, metrics_id, user_id, inviting_user_id FROM signups WHERE email_address = $1 AND - email_confirmation_code = $2 AND - user_id is NULL + email_confirmation_code = $2 ", ) .bind(&invite.email_address) .bind(&invite.email_confirmation_code) .fetch_optional(&mut tx) .await? - .ok_or_else(|| anyhow!("no such invite"))?; + .ok_or_else(|| Error::Http(StatusCode::NOT_FOUND, "no such invite".to_string()))?; + + if existing_user_id.is_some() { + Err(Error::Http(StatusCode::UNPROCESSABLE_ENTITY, "invitation already redeemed".to_string()))?; + } let user_id: UserId = sqlx::query_scalar( " From 7a049f14046ba35253f3a98a61de9c97978e9618 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 21 Sep 2022 10:20:11 -0700 Subject: [PATCH 010/140] Fix error when loading users without github user ids from the db --- crates/collab/src/bin/seed.rs | 25 ++++++++++++++++++------- crates/collab/src/db.rs | 8 ++++---- crates/collab/src/db_tests.rs | 12 ++++++------ 3 files changed, 28 insertions(+), 17 deletions(-) diff --git a/crates/collab/src/bin/seed.rs b/crates/collab/src/bin/seed.rs index dba7d14939..b7b3a96710 100644 --- a/crates/collab/src/bin/seed.rs +++ b/crates/collab/src/bin/seed.rs @@ -11,7 +11,7 @@ mod db; #[derive(Debug, Deserialize)] struct GitHubUser { - id: usize, + id: i32, login: String, email: Option, } @@ -26,8 +26,11 @@ async fn main() { let github_token = std::env::var("GITHUB_TOKEN").expect("missing GITHUB_TOKEN env var"); let client = reqwest::Client::new(); - let current_user = + let mut current_user = fetch_github::(&client, &github_token, "https://api.github.com/user").await; + current_user + .email + .get_or_insert_with(|| "placeholder@example.com".to_string()); let staff_users = fetch_github::>( &client, &github_token, @@ -64,16 +67,24 @@ async fn main() { let mut zed_user_ids = Vec::::new(); for (github_user, admin) in zed_users { if let Some(user) = db - .get_user_by_github_login(&github_user.login) + .get_user_by_github_account(&github_user.login, Some(github_user.id)) .await .expect("failed to fetch user") { zed_user_ids.push(user.id); - } else { + } else if let Some(email) = &github_user.email { zed_user_ids.push( - db.create_user(&github_user.login, github_user.email.as_deref(), admin) - .await - .expect("failed to insert user"), + db.create_user( + email, + admin, + db::NewUserParams { + github_login: github_user.login, + github_user_id: github_user.id, + invite_count: 5, + }, + ) + .await + .expect("failed to insert user"), ); } } diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 517810aa29..b4aec0f234 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -1563,7 +1563,7 @@ id_type!(UserId); pub struct User { pub id: UserId, pub github_login: String, - pub github_user_id: i32, + pub github_user_id: Option, pub email_address: Option, pub admin: bool, pub invite_code: Option, @@ -1795,7 +1795,7 @@ mod test { User { id: user_id, github_login: params.github_login, - github_user_id: params.github_user_id, + github_user_id: Some(params.github_user_id), email_address: Some(email_address.to_string()), admin, invite_code: None, @@ -1838,12 +1838,12 @@ mod test { self.background.simulate_random_delay().await; if let Some(github_user_id) = github_user_id { for user in self.users.lock().values_mut() { - if user.github_user_id == github_user_id { + if user.github_user_id == Some(github_user_id) { user.github_login = github_login.into(); return Ok(Some(user.clone())); } if user.github_login == github_login { - user.github_user_id = github_user_id; + user.github_user_id = Some(github_user_id); return Ok(Some(user.clone())); } } diff --git a/crates/collab/src/db_tests.rs b/crates/collab/src/db_tests.rs index 49ac053fd8..1d4417dd86 100644 --- a/crates/collab/src/db_tests.rs +++ b/crates/collab/src/db_tests.rs @@ -69,7 +69,7 @@ async fn test_get_users_by_ids() { User { id: user1, github_login: "u1".to_string(), - github_user_id: 1, + github_user_id: Some(1), email_address: Some("u1@example.com".to_string()), admin: false, ..Default::default() @@ -77,7 +77,7 @@ async fn test_get_users_by_ids() { User { id: user2, github_login: "u2".to_string(), - github_user_id: 2, + github_user_id: Some(2), email_address: Some("u2@example.com".to_string()), admin: false, ..Default::default() @@ -85,7 +85,7 @@ async fn test_get_users_by_ids() { User { id: user3, github_login: "u3".to_string(), - github_user_id: 3, + github_user_id: Some(3), email_address: Some("u3@example.com".to_string()), admin: false, ..Default::default() @@ -93,7 +93,7 @@ async fn test_get_users_by_ids() { User { id: user4, github_login: "u4".to_string(), - github_user_id: 4, + github_user_id: Some(4), email_address: Some("u4@example.com".to_string()), admin: false, ..Default::default() @@ -142,7 +142,7 @@ async fn test_get_user_by_github_account() { .unwrap(); assert_eq!(user.id, user_id1); assert_eq!(&user.github_login, "login1"); - assert_eq!(user.github_user_id, 101); + assert_eq!(user.github_user_id, Some(101)); assert!(db .get_user_by_github_account("non-existent-login", None) @@ -157,7 +157,7 @@ async fn test_get_user_by_github_account() { .unwrap(); assert_eq!(user.id, user_id2); assert_eq!(&user.github_login, "the-new-login2"); - assert_eq!(user.github_user_id, 102); + assert_eq!(user.github_user_id, Some(102)); } } From dac0ce10e59f1f2f11bebbddc33d87d7ab113f01 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 22 Sep 2022 14:37:25 -0700 Subject: [PATCH 011/140] Return the metrics id from the signup-creation API Co-authored-by: Nathan Sobo --- crates/collab/src/api.rs | 11 ++++++++--- crates/collab/src/db.rs | 24 +++++++++++++++--------- crates/collab/src/db_tests.rs | 24 ++++++++++++++---------- 3 files changed, 37 insertions(+), 22 deletions(-) diff --git a/crates/collab/src/api.rs b/crates/collab/src/api.rs index 73293e0b2c..51b43119dc 100644 --- a/crates/collab/src/api.rs +++ b/crates/collab/src/api.rs @@ -396,12 +396,17 @@ async fn get_user_for_invite_code( Ok(Json(app.db.get_user_for_invite_code(&code).await?)) } +#[derive(Serialize)] +struct CreateSignupResponse { + metrics_id: i32, +} + async fn create_signup( Json(params): Json, Extension(app): Extension>, -) -> Result<()> { - app.db.create_signup(params).await?; - Ok(()) +) -> Result> { + let metrics_id = app.db.create_signup(params).await?; + Ok(Json(CreateSignupResponse { metrics_id })) } async fn get_waitlist_summary( diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index b4aec0f234..81c87f213a 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -37,7 +37,7 @@ pub trait Db: Send + Sync { async fn get_user_for_invite_code(&self, code: &str) -> Result; async fn create_invite_from_code(&self, code: &str, email_address: &str) -> Result; - async fn create_signup(&self, signup: Signup) -> Result<()>; + async fn create_signup(&self, signup: Signup) -> Result; async fn get_waitlist_summary(&self) -> Result; async fn get_unsent_invites(&self, count: usize) -> Result>; async fn record_sent_invites(&self, invites: &[Invite]) -> Result<()>; @@ -364,8 +364,8 @@ impl Db for PostgresDb { // signups - async fn create_signup(&self, signup: Signup) -> Result<()> { - sqlx::query( + async fn create_signup(&self, signup: Signup) -> Result { + Ok(sqlx::query_scalar( " INSERT INTO signups ( @@ -381,6 +381,7 @@ impl Db for PostgresDb { ) VALUES ($1, $2, 'f', $3, $4, $5, 'f', $6, $7) + RETURNING id ", ) .bind(&signup.email_address) @@ -390,9 +391,8 @@ impl Db for PostgresDb { .bind(&signup.platform_windows) .bind(&signup.editor_features) .bind(&signup.programming_languages) - .execute(&self.pool) - .await?; - Ok(()) + .fetch_one(&self.pool) + .await?) } async fn get_waitlist_summary(&self) -> Result { @@ -479,7 +479,10 @@ impl Db for PostgresDb { .ok_or_else(|| Error::Http(StatusCode::NOT_FOUND, "no such invite".to_string()))?; if existing_user_id.is_some() { - Err(Error::Http(StatusCode::UNPROCESSABLE_ENTITY, "invitation already redeemed".to_string()))?; + Err(Error::Http( + StatusCode::UNPROCESSABLE_ENTITY, + "invitation already redeemed".to_string(), + ))?; } let user_id: UserId = sqlx::query_scalar( @@ -1564,6 +1567,7 @@ pub struct User { pub id: UserId, pub github_login: String, pub github_user_id: Option, + pub metrics_id: i32, pub email_address: Option, pub admin: bool, pub invite_code: Option, @@ -1789,7 +1793,8 @@ mod test { { Ok(user.id) } else { - let user_id = UserId(post_inc(&mut *self.next_user_id.lock())); + let id = post_inc(&mut *self.next_user_id.lock()); + let user_id = UserId(id); users.insert( user_id, User { @@ -1797,6 +1802,7 @@ mod test { github_login: params.github_login, github_user_id: Some(params.github_user_id), email_address: Some(email_address.to_string()), + metrics_id: id + 100, admin, invite_code: None, invite_count: 0, @@ -1878,7 +1884,7 @@ mod test { // signups - async fn create_signup(&self, _signup: Signup) -> Result<()> { + async fn create_signup(&self, _signup: Signup) -> Result { unimplemented!() } diff --git a/crates/collab/src/db_tests.rs b/crates/collab/src/db_tests.rs index 1d4417dd86..64a3626a22 100644 --- a/crates/collab/src/db_tests.rs +++ b/crates/collab/src/db_tests.rs @@ -1139,17 +1139,20 @@ async fn test_signups() { let db = postgres.db(); // people sign up on the waitlist + let mut signup_metric_ids = Vec::new(); for i in 0..8 { - db.create_signup(Signup { - email_address: format!("person-{i}@example.com"), - platform_mac: true, - platform_linux: i % 2 == 0, - platform_windows: i % 4 == 0, - editor_features: vec!["speed".into()], - programming_languages: vec!["rust".into(), "c".into()], - }) - .await - .unwrap(); + signup_metric_ids.push( + db.create_signup(Signup { + email_address: format!("person-{i}@example.com"), + platform_mac: true, + platform_linux: i % 2 == 0, + platform_windows: i % 4 == 0, + editor_features: vec!["speed".into()], + programming_languages: vec!["rust".into(), "c".into()], + }) + .await + .unwrap(), + ); } assert_eq!( @@ -1235,6 +1238,7 @@ async fn test_signups() { assert_eq!(user.github_login, "person-0"); assert_eq!(user.email_address.as_deref(), Some("person-0@example.com")); assert_eq!(user.invite_count, 5); + assert_eq!(user.metrics_id, signup_metric_ids[0]); // cannot redeem the same signup again. db.create_user_from_invite( From 04baccbea6a002f62e361020215acd7d82d21a01 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 22 Sep 2022 17:52:39 -0700 Subject: [PATCH 012/140] Start work on a client-side telemetry system --- Cargo.lock | 1 + crates/client/Cargo.toml | 1 + crates/client/src/channel.rs | 2 +- crates/client/src/client.rs | 31 +++-- crates/client/src/telemetry.rs | 128 ++++++++++++++++++++ crates/collab/src/integration_tests.rs | 2 +- crates/contacts_panel/src/contacts_panel.rs | 2 +- crates/gpui/src/platform.rs | 1 + crates/gpui/src/platform/mac/platform.rs | 17 ++- crates/gpui/src/platform/test.rs | 8 ++ crates/project/src/project.rs | 2 +- crates/project/src/worktree.rs | 14 +-- crates/workspace/src/workspace.rs | 2 +- crates/zed/src/main.rs | 10 +- 14 files changed, 191 insertions(+), 30 deletions(-) create mode 100644 crates/client/src/telemetry.rs diff --git a/Cargo.lock b/Cargo.lock index 05701b7c56..d11d776732 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -955,6 +955,7 @@ dependencies = [ "postage", "rand 0.8.5", "rpc", + "serde", "smol", "sum_tree", "thiserror", diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index a7888b8965..5fcff565bb 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -32,6 +32,7 @@ thiserror = "1.0.29" time = { version = "0.3", features = ["serde", "serde-well-known"] } tiny_http = "0.8" url = "2.2" +serde = { version = "*", features = ["derive"] } [dev-dependencies] collections = { path = "../collections", features = ["test-support"] } diff --git a/crates/client/src/channel.rs b/crates/client/src/channel.rs index a88f872d11..3f99d7ccd2 100644 --- a/crates/client/src/channel.rs +++ b/crates/client/src/channel.rs @@ -601,7 +601,7 @@ mod tests { let user_id = 5; let http_client = FakeHttpClient::with_404_response(); - let client = Client::new(http_client.clone()); + let client = cx.update(|cx| Client::new(http_client.clone(), cx)); let server = FakeServer::for_client(user_id, &client, cx).await; Channel::init(&client); diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index e328108a52..b5e4558155 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -3,6 +3,7 @@ pub mod test; pub mod channel; pub mod http; +pub mod telemetry; pub mod user; use anyhow::{anyhow, Context, Result}; @@ -13,8 +14,9 @@ use async_tungstenite::tungstenite::{ }; use futures::{future::LocalBoxFuture, FutureExt, SinkExt, StreamExt, TryStreamExt}; use gpui::{ - actions, AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, AnyWeakViewHandle, AsyncAppContext, - Entity, ModelContext, ModelHandle, MutableAppContext, Task, View, ViewContext, ViewHandle, + actions, serde_json::Value, AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, + AnyWeakViewHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, + MutableAppContext, Task, View, ViewContext, ViewHandle, }; use http::HttpClient; use lazy_static::lazy_static; @@ -31,6 +33,7 @@ use std::{ sync::{Arc, Weak}, time::{Duration, Instant}, }; +use telemetry::Telemetry; use thiserror::Error; use url::Url; use util::{ResultExt, TryFutureExt}; @@ -63,6 +66,7 @@ pub struct Client { id: usize, peer: Arc, http: Arc, + telemetry: Arc, state: RwLock, #[allow(clippy::type_complexity)] @@ -232,10 +236,11 @@ impl Drop for Subscription { } impl Client { - pub fn new(http: Arc) -> Arc { + pub fn new(http: Arc, cx: &AppContext) -> Arc { Arc::new(Self { id: 0, peer: Peer::new(), + telemetry: Telemetry::new(http.clone(), cx), http, state: Default::default(), @@ -595,6 +600,9 @@ impl Client { if credentials.is_none() && try_keychain { credentials = read_credentials_from_keychain(cx); read_from_keychain = credentials.is_some(); + if read_from_keychain { + self.log_event("read_credentials_from_keychain", Default::default()); + } } if credentials.is_none() { let mut status_rx = self.status(); @@ -878,6 +886,7 @@ impl Client { ) -> Task> { let platform = cx.platform(); let executor = cx.background(); + let telemetry = self.telemetry.clone(); executor.clone().spawn(async move { // Generate a pair of asymmetric encryption keys. The public key will be used by the // zed server to encrypt the user's access token, so that it can'be intercepted by @@ -956,6 +965,8 @@ impl Client { .context("failed to decrypt access token")?; platform.activate(true); + telemetry.log_event("authenticate_with_browser", Default::default()); + Ok(Credentials { user_id: user_id.parse()?, access_token, @@ -1020,6 +1031,10 @@ impl Client { log::debug!("rpc respond. client_id:{}. name:{}", self.id, T::NAME); self.peer.respond_with_error(receipt, error) } + + pub fn log_event(&self, kind: &str, properties: Value) { + self.telemetry.log_event(kind, properties) + } } impl AnyWeakEntityHandle { @@ -1085,7 +1100,7 @@ mod tests { cx.foreground().forbid_parking(); let user_id = 5; - let client = Client::new(FakeHttpClient::with_404_response()); + let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); let server = FakeServer::for_client(user_id, &client, cx).await; let mut status = client.status(); assert!(matches!( @@ -1124,7 +1139,7 @@ mod tests { let auth_count = Arc::new(Mutex::new(0)); let dropped_auth_count = Arc::new(Mutex::new(0)); - let client = Client::new(FakeHttpClient::with_404_response()); + let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); client.override_authenticate({ let auth_count = auth_count.clone(); let dropped_auth_count = dropped_auth_count.clone(); @@ -1173,7 +1188,7 @@ mod tests { cx.foreground().forbid_parking(); let user_id = 5; - let client = Client::new(FakeHttpClient::with_404_response()); + let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); let server = FakeServer::for_client(user_id, &client, cx).await; let (done_tx1, mut done_rx1) = smol::channel::unbounded(); @@ -1219,7 +1234,7 @@ mod tests { cx.foreground().forbid_parking(); let user_id = 5; - let client = Client::new(FakeHttpClient::with_404_response()); + let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); let server = FakeServer::for_client(user_id, &client, cx).await; let model = cx.add_model(|_| Model::default()); @@ -1247,7 +1262,7 @@ mod tests { cx.foreground().forbid_parking(); let user_id = 5; - let client = Client::new(FakeHttpClient::with_404_response()); + let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); let server = FakeServer::for_client(user_id, &client, cx).await; let model = cx.add_model(|_| Model::default()); diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs new file mode 100644 index 0000000000..a96dd26c20 --- /dev/null +++ b/crates/client/src/telemetry.rs @@ -0,0 +1,128 @@ +use crate::{http::HttpClient, ZED_SECRET_CLIENT_TOKEN}; +use gpui::{ + executor::Background, + serde_json::{self, value::Map, Value}, + AppContext, AppVersion, Task, +}; +use isahc::Request; +use parking_lot::Mutex; +use serde::Serialize; +use std::{ + mem, + sync::Arc, + time::{Duration, SystemTime, UNIX_EPOCH}, +}; +use util::ResultExt; + +pub struct Telemetry { + client: Arc, + executor: Arc, + state: Mutex, +} + +#[derive(Default)] +struct TelemetryState { + metrics_id: Option, + device_id: Option, + app_version: Option, + os_version: Option, + queue: Vec, + flush_task: Option>, +} + +#[derive(Serialize)] +struct RecordEventParams { + token: &'static str, + metrics_id: Option, + device_id: Option, + app_version: Option, + os_version: Option, + events: Vec, +} + +#[derive(Serialize)] +struct Event { + #[serde(rename = "type")] + kind: String, + time: u128, + properties: Option>, +} + +const MAX_QUEUE_LEN: usize = 30; +const EVENTS_URI: &'static str = "https://zed.dev/api/telemetry"; +const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(30); + +impl Telemetry { + pub fn new(client: Arc, cx: &AppContext) -> Arc { + let platform = cx.platform(); + Arc::new(Self { + client, + executor: cx.background().clone(), + state: Mutex::new(TelemetryState { + os_version: platform.os_version().log_err(), + app_version: platform.app_version().log_err(), + metrics_id: None, + device_id: None, + queue: Default::default(), + flush_task: Default::default(), + }), + }) + } + + pub fn set_metrics_id(&self, metrics_id: Option) { + self.state.lock().metrics_id = metrics_id; + } + + pub fn log_event(self: &Arc, kind: &str, properties: Value) { + let mut state = self.state.lock(); + state.queue.push(Event { + kind: kind.to_string(), + time: SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_millis(), + properties: if let Value::Object(properties) = properties { + Some(properties) + } else { + None + }, + }); + if state.queue.len() >= MAX_QUEUE_LEN { + self.flush(); + } else { + let this = self.clone(); + let executor = self.executor.clone(); + state.flush_task = Some(self.executor.spawn(async move { + executor.timer(DEBOUNCE_INTERVAL).await; + this.flush(); + })); + } + } + + fn flush(&self) { + let mut state = self.state.lock(); + let events = mem::take(&mut state.queue); + let client = self.client.clone(); + let app_version = state.app_version; + let os_version = state.os_version; + let metrics_id = state.metrics_id; + let device_id = state.device_id.clone(); + state.flush_task.take(); + self.executor + .spawn(async move { + let body = serde_json::to_vec(&RecordEventParams { + token: ZED_SECRET_CLIENT_TOKEN, + events, + app_version: app_version.map(|v| v.to_string()), + os_version: os_version.map(|v| v.to_string()), + metrics_id, + device_id, + }) + .log_err()?; + let request = Request::post(EVENTS_URI).body(body.into()).log_err()?; + client.send(request).await.log_err(); + Some(()) + }) + .detach(); + } +} diff --git a/crates/collab/src/integration_tests.rs b/crates/collab/src/integration_tests.rs index f3d43f277e..2b9dd25a90 100644 --- a/crates/collab/src/integration_tests.rs +++ b/crates/collab/src/integration_tests.rs @@ -5196,7 +5196,7 @@ impl TestServer { .unwrap() }; let client_name = name.to_string(); - let mut client = Client::new(http.clone()); + let mut client = cx.read(|cx| Client::new(http.clone(), cx)); let server = self.server.clone(); let db = self.app_state.db.clone(); let connection_killers = self.connection_killers.clone(); diff --git a/crates/contacts_panel/src/contacts_panel.rs b/crates/contacts_panel/src/contacts_panel.rs index b5460f4d06..7dcfb8cea4 100644 --- a/crates/contacts_panel/src/contacts_panel.rs +++ b/crates/contacts_panel/src/contacts_panel.rs @@ -1216,7 +1216,7 @@ mod tests { let languages = Arc::new(LanguageRegistry::test()); let http_client = FakeHttpClient::with_404_response(); - let client = Client::new(http_client.clone()); + let client = cx.read(|cx| Client::new(http_client.clone(), cx)); let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx)); let project_store = cx.add_model(|_| ProjectStore::new(project::Db::open_fake())); let server = FakeServer::for_client(current_user_id, &client, cx).await; diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index a50698070c..7467dad547 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -69,6 +69,7 @@ pub trait Platform: Send + Sync { fn path_for_auxiliary_executable(&self, name: &str) -> Result; fn app_path(&self) -> Result; fn app_version(&self) -> Result; + fn os_version(&self) -> Result; } pub(crate) trait ForegroundPlatform { diff --git a/crates/gpui/src/platform/mac/platform.rs b/crates/gpui/src/platform/mac/platform.rs index 7732da2b3e..02fe73504e 100644 --- a/crates/gpui/src/platform/mac/platform.rs +++ b/crates/gpui/src/platform/mac/platform.rs @@ -4,7 +4,7 @@ use super::{ use crate::{ executor, keymap, platform::{self, CursorStyle}, - Action, ClipboardItem, Event, Menu, MenuItem, + Action, AppVersion, ClipboardItem, Event, Menu, MenuItem, }; use anyhow::{anyhow, Result}; use block::ConcreteBlock; @@ -16,7 +16,8 @@ use cocoa::{ }, base::{id, nil, selector, YES}, foundation::{ - NSArray, NSAutoreleasePool, NSBundle, NSData, NSInteger, NSString, NSUInteger, NSURL, + NSArray, NSAutoreleasePool, NSBundle, NSData, NSInteger, NSProcessInfo, NSString, + NSUInteger, NSURL, }, }; use core_foundation::{ @@ -748,6 +749,18 @@ impl platform::Platform for MacPlatform { } } } + + fn os_version(&self) -> Result { + unsafe { + let process_info = NSProcessInfo::processInfo(nil); + let version = process_info.operatingSystemVersion(); + Ok(AppVersion { + major: version.majorVersion as usize, + minor: version.minorVersion as usize, + patch: version.patchVersion as usize, + }) + } + } } unsafe fn path_from_objc(path: id) -> PathBuf { diff --git a/crates/gpui/src/platform/test.rs b/crates/gpui/src/platform/test.rs index 9a458a1dd9..1bb53d49ab 100644 --- a/crates/gpui/src/platform/test.rs +++ b/crates/gpui/src/platform/test.rs @@ -196,6 +196,14 @@ impl super::Platform for Platform { patch: 0, }) } + + fn os_version(&self) -> Result { + Ok(AppVersion { + major: 1, + minor: 0, + patch: 0, + }) + } } impl Window { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 09c5a72315..abb55e49b0 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -650,7 +650,7 @@ impl Project { let languages = Arc::new(LanguageRegistry::test()); let http_client = client::test::FakeHttpClient::with_404_response(); - let client = client::Client::new(http_client.clone()); + let client = cx.update(|cx| client::Client::new(http_client.clone(), cx)); let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx)); let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake())); let project = cx.update(|cx| { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 96ebb59de0..74c50e0c5f 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2804,7 +2804,7 @@ mod tests { .await; let http_client = FakeHttpClient::with_404_response(); - let client = Client::new(http_client); + let client = cx.read(|cx| Client::new(http_client, cx)); let tree = Worktree::local( client, @@ -2866,8 +2866,7 @@ mod tests { fs.insert_symlink("/root/lib/a/lib", "..".into()).await; fs.insert_symlink("/root/lib/b/lib", "..".into()).await; - let http_client = FakeHttpClient::with_404_response(); - let client = Client::new(http_client); + let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); let tree = Worktree::local( client, Arc::from(Path::new("/root")), @@ -2945,8 +2944,7 @@ mod tests { })); let dir = parent_dir.path().join("tree"); - let http_client = FakeHttpClient::with_404_response(); - let client = Client::new(http_client.clone()); + let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); let tree = Worktree::local( client, @@ -3016,8 +3014,7 @@ mod tests { "ignored-dir": {} })); - let http_client = FakeHttpClient::with_404_response(); - let client = Client::new(http_client.clone()); + let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); let tree = Worktree::local( client, @@ -3064,8 +3061,7 @@ mod tests { #[gpui::test(iterations = 30)] async fn test_create_directory(cx: &mut TestAppContext) { - let http_client = FakeHttpClient::with_404_response(); - let client = Client::new(http_client.clone()); + let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); let fs = FakeFs::new(cx.background()); fs.insert_tree( diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 017964d9a1..b9cface656 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -856,7 +856,7 @@ impl AppState { let fs = project::FakeFs::new(cx.background().clone()); let languages = Arc::new(LanguageRegistry::test()); let http_client = client::test::FakeHttpClient::with_404_response(); - let client = Client::new(http_client.clone()); + let client = Client::new(http_client.clone(), cx); let project_store = cx.add_model(|_| ProjectStore::new(project::Db::open_fake())); let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx)); let themes = ThemeRegistry::new((), cx.font_cache().clone()); diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 3bfd5e6e1a..bb913ab610 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -20,7 +20,7 @@ use futures::{ FutureExt, SinkExt, StreamExt, }; use gpui::{executor::Background, App, AssetSource, AsyncAppContext, Task, ViewContext}; -use isahc::{config::Configurable, AsyncBody, Request}; +use isahc::{config::Configurable, Request}; use language::LanguageRegistry; use log::LevelFilter; use parking_lot::Mutex; @@ -88,7 +88,7 @@ fn main() { }); app.run(move |cx| { - let client = client::Client::new(http.clone()); + let client = client::Client::new(http.clone(), cx); let mut languages = LanguageRegistry::new(login_shell_env_loaded); languages.set_language_server_download_dir(zed::paths::LANGUAGES_DIR.clone()); let languages = Arc::new(languages); @@ -280,12 +280,10 @@ fn init_panic_hook(app_version: String, http: Arc, background: A "token": ZED_SECRET_CLIENT_TOKEN, })) .unwrap(); - let request = Request::builder() - .uri(&panic_report_url) - .method(http::Method::POST) + let request = Request::post(&panic_report_url) .redirect_policy(isahc::config::RedirectPolicy::Follow) .header("Content-Type", "application/json") - .body(AsyncBody::from(body))?; + .body(body.into())?; let response = http.send(request).await.context("error sending panic")?; if response.status().is_success() { fs::remove_file(child_path) From 4784dbe498236d3333453d87d3822c7cb965510b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 23 Sep 2022 17:06:27 -0700 Subject: [PATCH 013/140] Link signups to users in telemetry via a stored device_id Co-authored-by: Joseph Lyons --- crates/client/src/client.rs | 34 ++++++-- crates/client/src/telemetry.rs | 26 +++--- .../20220913211150_create_signups.down.sql | 6 +- .../20220913211150_create_signups.up.sql | 12 +-- crates/collab/src/api.rs | 80 ++++++++++--------- crates/collab/src/db.rs | 41 +++++----- crates/collab/src/db_tests.rs | 34 ++++---- 7 files changed, 124 insertions(+), 109 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index b5e4558155..3d85aea3c5 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -14,9 +14,11 @@ use async_tungstenite::tungstenite::{ }; use futures::{future::LocalBoxFuture, FutureExt, SinkExt, StreamExt, TryStreamExt}; use gpui::{ - actions, serde_json::Value, AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, - AnyWeakViewHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, - MutableAppContext, Task, View, ViewContext, ViewHandle, + actions, + serde_json::{json, Value}, + AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, AnyWeakViewHandle, AppContext, + AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, View, ViewContext, + ViewHandle, }; use http::HttpClient; use lazy_static::lazy_static; @@ -52,13 +54,29 @@ lazy_static! { pub const ZED_SECRET_CLIENT_TOKEN: &str = "618033988749894"; -actions!(client, [Authenticate]); +actions!(client, [Authenticate, TestTelemetry]); -pub fn init(rpc: Arc, cx: &mut MutableAppContext) { - cx.add_global_action(move |_: &Authenticate, cx| { - let rpc = rpc.clone(); - cx.spawn(|cx| async move { rpc.authenticate_and_connect(true, &cx).log_err().await }) +pub fn init(client: Arc, cx: &mut MutableAppContext) { + cx.add_global_action({ + let client = client.clone(); + move |_: &Authenticate, cx| { + let client = client.clone(); + cx.spawn( + |cx| async move { client.authenticate_and_connect(true, &cx).log_err().await }, + ) .detach(); + } + }); + cx.add_global_action({ + let client = client.clone(); + move |_: &TestTelemetry, _| { + client.log_event( + "test_telemetry", + json!({ + "test_property": "test_value" + }), + ) + } }); } diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index a96dd26c20..a78e691459 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -1,4 +1,4 @@ -use crate::{http::HttpClient, ZED_SECRET_CLIENT_TOKEN}; +use crate::{http::HttpClient, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL}; use gpui::{ executor::Background, serde_json::{self, value::Map, Value}, @@ -22,7 +22,6 @@ pub struct Telemetry { #[derive(Default)] struct TelemetryState { - metrics_id: Option, device_id: Option, app_version: Option, os_version: Option, @@ -33,7 +32,6 @@ struct TelemetryState { #[derive(Serialize)] struct RecordEventParams { token: &'static str, - metrics_id: Option, device_id: Option, app_version: Option, os_version: Option, @@ -48,8 +46,13 @@ struct Event { properties: Option>, } -const MAX_QUEUE_LEN: usize = 30; -const EVENTS_URI: &'static str = "https://zed.dev/api/telemetry"; +#[cfg(debug_assertions)] +const MAX_QUEUE_LEN: usize = 1; + +#[cfg(not(debug_assertions))] +const MAX_QUEUE_LEN: usize = 10; + +const EVENTS_URI: &'static str = "api/telemetry"; const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(30); impl Telemetry { @@ -61,7 +64,6 @@ impl Telemetry { state: Mutex::new(TelemetryState { os_version: platform.os_version().log_err(), app_version: platform.app_version().log_err(), - metrics_id: None, device_id: None, queue: Default::default(), flush_task: Default::default(), @@ -69,10 +71,6 @@ impl Telemetry { }) } - pub fn set_metrics_id(&self, metrics_id: Option) { - self.state.lock().metrics_id = metrics_id; - } - pub fn log_event(self: &Arc, kind: &str, properties: Value) { let mut state = self.state.lock(); state.queue.push(Event { @@ -88,6 +86,7 @@ impl Telemetry { }, }); if state.queue.len() >= MAX_QUEUE_LEN { + drop(state); self.flush(); } else { let this = self.clone(); @@ -105,7 +104,6 @@ impl Telemetry { let client = self.client.clone(); let app_version = state.app_version; let os_version = state.os_version; - let metrics_id = state.metrics_id; let device_id = state.device_id.clone(); state.flush_task.take(); self.executor @@ -115,11 +113,13 @@ impl Telemetry { events, app_version: app_version.map(|v| v.to_string()), os_version: os_version.map(|v| v.to_string()), - metrics_id, device_id, }) .log_err()?; - let request = Request::post(EVENTS_URI).body(body.into()).log_err()?; + let request = Request::post(format!("{}/{}", *ZED_SERVER_URL, EVENTS_URI)) + .header("Content-Type", "application/json") + .body(body.into()) + .log_err()?; client.send(request).await.log_err(); Some(()) }) diff --git a/crates/collab/migrations/20220913211150_create_signups.down.sql b/crates/collab/migrations/20220913211150_create_signups.down.sql index 59b20b1128..f67c10dd01 100644 --- a/crates/collab/migrations/20220913211150_create_signups.down.sql +++ b/crates/collab/migrations/20220913211150_create_signups.down.sql @@ -1,9 +1,7 @@ DROP TABLE signups; ALTER TABLE users - DROP COLUMN github_user_id, - DROP COLUMN metrics_id; - -DROP SEQUENCE metrics_id_seq; + DROP COLUMN github_user_id; DROP INDEX index_users_on_email_address; +DROP INDEX index_users_on_github_user_id; diff --git a/crates/collab/migrations/20220913211150_create_signups.up.sql b/crates/collab/migrations/20220913211150_create_signups.up.sql index 5f02bd6887..35e334ea5f 100644 --- a/crates/collab/migrations/20220913211150_create_signups.up.sql +++ b/crates/collab/migrations/20220913211150_create_signups.up.sql @@ -1,12 +1,10 @@ -CREATE SEQUENCE metrics_id_seq; - CREATE TABLE IF NOT EXISTS "signups" ( - "id" SERIAL PRIMARY KEY NOT NULL, + "id" SERIAL PRIMARY KEY, "email_address" VARCHAR NOT NULL, "email_confirmation_code" VARCHAR(64) NOT NULL, "email_confirmation_sent" BOOLEAN NOT NULL, - "metrics_id" INTEGER NOT NULL DEFAULT nextval('metrics_id_seq'), "created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "device_id" VARCHAR NOT NULL, "user_id" INTEGER REFERENCES users (id) ON DELETE CASCADE, "inviting_user_id" INTEGER REFERENCES users (id) ON DELETE SET NULL, @@ -23,11 +21,7 @@ CREATE UNIQUE INDEX "index_signups_on_email_address" ON "signups" ("email_addres CREATE INDEX "index_signups_on_email_confirmation_sent" ON "signups" ("email_confirmation_sent"); ALTER TABLE "users" - ADD "github_user_id" INTEGER, - ADD "metrics_id" INTEGER DEFAULT nextval('metrics_id_seq'); + ADD "github_user_id" INTEGER; CREATE INDEX "index_users_on_email_address" ON "users" ("email_address"); CREATE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id"); - -UPDATE users -SET metrics_id = nextval('metrics_id_seq'); diff --git a/crates/collab/src/api.rs b/crates/collab/src/api.rs index 51b43119dc..a82363a56b 100644 --- a/crates/collab/src/api.rs +++ b/crates/collab/src/api.rs @@ -127,44 +127,52 @@ struct CreateUserParams { invite_count: i32, } +#[derive(Serialize, Debug)] +struct CreateUserResponse { + user: User, + signup_device_id: Option, +} + async fn create_user( Json(params): Json, Extension(app): Extension>, Extension(rpc_server): Extension>, -) -> Result> { +) -> Result> { let user = NewUserParams { github_login: params.github_login, github_user_id: params.github_user_id, invite_count: params.invite_count, }; - let (user_id, inviter_id) = - // Creating a user via the normal signup process - if let Some(email_confirmation_code) = params.email_confirmation_code { - app.db - .create_user_from_invite( - &Invite { - email_address: params.email_address, - email_confirmation_code, - }, - user, - ) - .await? - } - // Creating a user as an admin - else { - ( - app.db - .create_user(¶ms.email_address, false, user) - .await?, - None, + let user_id; + let signup_device_id; + // Creating a user via the normal signup process + if let Some(email_confirmation_code) = params.email_confirmation_code { + let result = app + .db + .create_user_from_invite( + &Invite { + email_address: params.email_address, + email_confirmation_code, + }, + user, ) - }; - - if let Some(inviter_id) = inviter_id { - rpc_server - .invite_code_redeemed(inviter_id, user_id) - .await - .trace_err(); + .await?; + user_id = result.0; + signup_device_id = Some(result.2); + if let Some(inviter_id) = result.1 { + rpc_server + .invite_code_redeemed(inviter_id, user_id) + .await + .trace_err(); + } + } + // Creating a user as an admin + else { + user_id = app + .db + .create_user(¶ms.email_address, false, user) + .await?; + signup_device_id = None; } let user = app @@ -173,7 +181,10 @@ async fn create_user( .await? .ok_or_else(|| anyhow!("couldn't find the user we just created"))?; - Ok(Json(user)) + Ok(Json(CreateUserResponse { + user, + signup_device_id, + })) } #[derive(Deserialize)] @@ -396,17 +407,12 @@ async fn get_user_for_invite_code( Ok(Json(app.db.get_user_for_invite_code(&code).await?)) } -#[derive(Serialize)] -struct CreateSignupResponse { - metrics_id: i32, -} - async fn create_signup( Json(params): Json, Extension(app): Extension>, -) -> Result> { - let metrics_id = app.db.create_signup(params).await?; - Ok(Json(CreateSignupResponse { metrics_id })) +) -> Result<()> { + app.db.create_signup(params).await?; + Ok(()) } async fn get_waitlist_summary( diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 81c87f213a..1518ec179f 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -37,7 +37,7 @@ pub trait Db: Send + Sync { async fn get_user_for_invite_code(&self, code: &str) -> Result; async fn create_invite_from_code(&self, code: &str, email_address: &str) -> Result; - async fn create_signup(&self, signup: Signup) -> Result; + async fn create_signup(&self, signup: Signup) -> Result<()>; async fn get_waitlist_summary(&self) -> Result; async fn get_unsent_invites(&self, count: usize) -> Result>; async fn record_sent_invites(&self, invites: &[Invite]) -> Result<()>; @@ -45,7 +45,7 @@ pub trait Db: Send + Sync { &self, invite: &Invite, user: NewUserParams, - ) -> Result<(UserId, Option)>; + ) -> Result<(UserId, Option, String)>; /// Registers a new project for the given user. async fn register_project(&self, host_user_id: UserId) -> Result; @@ -364,8 +364,8 @@ impl Db for PostgresDb { // signups - async fn create_signup(&self, signup: Signup) -> Result { - Ok(sqlx::query_scalar( + async fn create_signup(&self, signup: Signup) -> Result<()> { + sqlx::query( " INSERT INTO signups ( @@ -377,10 +377,11 @@ impl Db for PostgresDb { platform_windows, platform_unknown, editor_features, - programming_languages + programming_languages, + device_id ) VALUES - ($1, $2, 'f', $3, $4, $5, 'f', $6, $7) + ($1, $2, 'f', $3, $4, $5, 'f', $6, $7, $8) RETURNING id ", ) @@ -391,8 +392,10 @@ impl Db for PostgresDb { .bind(&signup.platform_windows) .bind(&signup.editor_features) .bind(&signup.programming_languages) - .fetch_one(&self.pool) - .await?) + .bind(&signup.device_id) + .execute(&self.pool) + .await?; + Ok(()) } async fn get_waitlist_summary(&self) -> Result { @@ -455,17 +458,17 @@ impl Db for PostgresDb { &self, invite: &Invite, user: NewUserParams, - ) -> Result<(UserId, Option)> { + ) -> Result<(UserId, Option, String)> { let mut tx = self.pool.begin().await?; - let (signup_id, metrics_id, existing_user_id, inviting_user_id): ( - i32, + let (signup_id, existing_user_id, inviting_user_id, device_id): ( i32, Option, Option, + String, ) = sqlx::query_as( " - SELECT id, metrics_id, user_id, inviting_user_id + SELECT id, user_id, inviting_user_id, device_id FROM signups WHERE email_address = $1 AND @@ -488,9 +491,9 @@ impl Db for PostgresDb { let user_id: UserId = sqlx::query_scalar( " INSERT INTO users - (email_address, github_login, github_user_id, admin, invite_count, invite_code, metrics_id) + (email_address, github_login, github_user_id, admin, invite_count, invite_code) VALUES - ($1, $2, $3, 'f', $4, $5, $6) + ($1, $2, $3, 'f', $4, $5) RETURNING id ", ) @@ -499,7 +502,6 @@ impl Db for PostgresDb { .bind(&user.github_user_id) .bind(&user.invite_count) .bind(random_invite_code()) - .bind(metrics_id) .fetch_one(&mut tx) .await?; @@ -550,7 +552,7 @@ impl Db for PostgresDb { } tx.commit().await?; - Ok((user_id, inviting_user_id)) + Ok((user_id, inviting_user_id, device_id)) } // invite codes @@ -1567,7 +1569,6 @@ pub struct User { pub id: UserId, pub github_login: String, pub github_user_id: Option, - pub metrics_id: i32, pub email_address: Option, pub admin: bool, pub invite_code: Option, @@ -1674,6 +1675,7 @@ pub struct Signup { pub platform_linux: bool, pub editor_features: Vec, pub programming_languages: Vec, + pub device_id: String, } #[derive(Clone, Debug, PartialEq, Deserialize, Serialize, FromRow)] @@ -1802,7 +1804,6 @@ mod test { github_login: params.github_login, github_user_id: Some(params.github_user_id), email_address: Some(email_address.to_string()), - metrics_id: id + 100, admin, invite_code: None, invite_count: 0, @@ -1884,7 +1885,7 @@ mod test { // signups - async fn create_signup(&self, _signup: Signup) -> Result { + async fn create_signup(&self, _signup: Signup) -> Result<()> { unimplemented!() } @@ -1904,7 +1905,7 @@ mod test { &self, _invite: &Invite, _user: NewUserParams, - ) -> Result<(UserId, Option)> { + ) -> Result<(UserId, Option, String)> { unimplemented!() } diff --git a/crates/collab/src/db_tests.rs b/crates/collab/src/db_tests.rs index 64a3626a22..44697a59bd 100644 --- a/crates/collab/src/db_tests.rs +++ b/crates/collab/src/db_tests.rs @@ -957,7 +957,7 @@ async fn test_invite_codes() { .create_invite_from_code(&invite_code, "u2@example.com") .await .unwrap(); - let (user2, inviter) = db + let (user2, inviter, _) = db .create_user_from_invite( &user2_invite, NewUserParams { @@ -1007,7 +1007,7 @@ async fn test_invite_codes() { .create_invite_from_code(&invite_code, "u3@example.com") .await .unwrap(); - let (user3, inviter) = db + let (user3, inviter, _) = db .create_user_from_invite( &user3_invite, NewUserParams { @@ -1072,7 +1072,7 @@ async fn test_invite_codes() { .create_invite_from_code(&invite_code, "u4@example.com") .await .unwrap(); - let (user4, _) = db + let (user4, _, _) = db .create_user_from_invite( &user4_invite, NewUserParams { @@ -1139,20 +1139,18 @@ async fn test_signups() { let db = postgres.db(); // people sign up on the waitlist - let mut signup_metric_ids = Vec::new(); for i in 0..8 { - signup_metric_ids.push( - db.create_signup(Signup { - email_address: format!("person-{i}@example.com"), - platform_mac: true, - platform_linux: i % 2 == 0, - platform_windows: i % 4 == 0, - editor_features: vec!["speed".into()], - programming_languages: vec!["rust".into(), "c".into()], - }) - .await - .unwrap(), - ); + db.create_signup(Signup { + email_address: format!("person-{i}@example.com"), + platform_mac: true, + platform_linux: i % 2 == 0, + platform_windows: i % 4 == 0, + editor_features: vec!["speed".into()], + programming_languages: vec!["rust".into(), "c".into()], + device_id: format!("device_id_{i}"), + }) + .await + .unwrap(); } assert_eq!( @@ -1219,7 +1217,7 @@ async fn test_signups() { // user completes the signup process by providing their // github account. - let (user_id, inviter_id) = db + let (user_id, inviter_id, signup_device_id) = db .create_user_from_invite( &Invite { email_address: signups_batch1[0].email_address.clone(), @@ -1238,7 +1236,7 @@ async fn test_signups() { assert_eq!(user.github_login, "person-0"); assert_eq!(user.email_address.as_deref(), Some("person-0@example.com")); assert_eq!(user.invite_count, 5); - assert_eq!(user.metrics_id, signup_metric_ids[0]); + assert_eq!(signup_device_id, "device_id_0"); // cannot redeem the same signup again. db.create_user_from_invite( From da36eb3b41b5737fbdb852059fdb14e6a84ebd57 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 26 Sep 2022 15:23:10 -0700 Subject: [PATCH 014/140] wip --- crates/client/src/telemetry.rs | 91 +++++++++++++++--------- crates/gpui/src/platform.rs | 1 + crates/gpui/src/platform/mac/platform.rs | 4 ++ crates/gpui/src/platform/test.rs | 4 ++ 4 files changed, 67 insertions(+), 33 deletions(-) diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index a78e691459..7eea13a923 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -1,8 +1,8 @@ -use crate::{http::HttpClient, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL}; +use crate::{http::HttpClient, ZED_SECRET_CLIENT_TOKEN}; use gpui::{ executor::Background, serde_json::{self, value::Map, Value}, - AppContext, AppVersion, Task, + AppContext, Task, }; use isahc::Request; use parking_lot::Mutex; @@ -12,38 +12,48 @@ use std::{ sync::Arc, time::{Duration, SystemTime, UNIX_EPOCH}, }; -use util::ResultExt; +use util::{post_inc, ResultExt}; pub struct Telemetry { client: Arc, executor: Arc, + session_id: u128, state: Mutex, } #[derive(Default)] struct TelemetryState { - device_id: Option, - app_version: Option, - os_version: Option, - queue: Vec, + user_id: Option>, + device_id: Option>, + app_version: Option>, + os_version: Option>, + os_name: &'static str, + queue: Vec, + next_event_id: usize, flush_task: Option>, } +const AMPLITUDE_EVENTS_URL: &'static str = "https//api2.amplitude.com/batch"; + #[derive(Serialize)] -struct RecordEventParams { - token: &'static str, - device_id: Option, - app_version: Option, - os_version: Option, - events: Vec, +struct AmplitudeEventBatch { + api_key: &'static str, + events: Vec, } #[derive(Serialize)] -struct Event { - #[serde(rename = "type")] - kind: String, +struct AmplitudeEvent { + user_id: Option>, + device_id: Option>, + event_type: String, + event_properties: Option>, + user_properties: Option>, + os_name: &'static str, + os_version: Option>, + app_version: Option>, + event_id: usize, + session_id: u128, time: u128, - properties: Option>, } #[cfg(debug_assertions)] @@ -52,7 +62,6 @@ const MAX_QUEUE_LEN: usize = 1; #[cfg(not(debug_assertions))] const MAX_QUEUE_LEN: usize = 10; -const EVENTS_URI: &'static str = "api/telemetry"; const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(30); impl Telemetry { @@ -61,30 +70,52 @@ impl Telemetry { Arc::new(Self { client, executor: cx.background().clone(), + session_id: SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_millis(), state: Mutex::new(TelemetryState { - os_version: platform.os_version().log_err(), - app_version: platform.app_version().log_err(), + os_version: platform + .os_version() + .log_err() + .map(|v| v.to_string().into()), + os_name: platform.os_name().into(), + app_version: platform + .app_version() + .log_err() + .map(|v| v.to_string().into()), device_id: None, queue: Default::default(), flush_task: Default::default(), + next_event_id: 0, + user_id: None, }), }) } pub fn log_event(self: &Arc, kind: &str, properties: Value) { let mut state = self.state.lock(); - state.queue.push(Event { - kind: kind.to_string(), + let event = AmplitudeEvent { + event_type: kind.to_string(), time: SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_millis(), - properties: if let Value::Object(properties) = properties { + session_id: self.session_id, + event_properties: if let Value::Object(properties) = properties { Some(properties) } else { None }, - }); + user_properties: None, + user_id: state.user_id.clone(), + device_id: state.device_id.clone(), + os_name: state.os_name, + os_version: state.os_version.clone(), + app_version: state.app_version.clone(), + event_id: post_inc(&mut state.next_event_id), + }; + state.queue.push(event); if state.queue.len() >= MAX_QUEUE_LEN { drop(state); self.flush(); @@ -102,21 +133,15 @@ impl Telemetry { let mut state = self.state.lock(); let events = mem::take(&mut state.queue); let client = self.client.clone(); - let app_version = state.app_version; - let os_version = state.os_version; - let device_id = state.device_id.clone(); state.flush_task.take(); self.executor .spawn(async move { - let body = serde_json::to_vec(&RecordEventParams { - token: ZED_SECRET_CLIENT_TOKEN, + let body = serde_json::to_vec(&AmplitudeEventBatch { + api_key: ZED_SECRET_CLIENT_TOKEN, events, - app_version: app_version.map(|v| v.to_string()), - os_version: os_version.map(|v| v.to_string()), - device_id, }) .log_err()?; - let request = Request::post(format!("{}/{}", *ZED_SERVER_URL, EVENTS_URI)) + let request = Request::post(AMPLITUDE_EVENTS_URL) .header("Content-Type", "application/json") .body(body.into()) .log_err()?; diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index 7467dad547..8997bde527 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -69,6 +69,7 @@ pub trait Platform: Send + Sync { fn path_for_auxiliary_executable(&self, name: &str) -> Result; fn app_path(&self) -> Result; fn app_version(&self) -> Result; + fn os_name(&self) -> &'static str; fn os_version(&self) -> Result; } diff --git a/crates/gpui/src/platform/mac/platform.rs b/crates/gpui/src/platform/mac/platform.rs index 02fe73504e..628ddde13c 100644 --- a/crates/gpui/src/platform/mac/platform.rs +++ b/crates/gpui/src/platform/mac/platform.rs @@ -750,6 +750,10 @@ impl platform::Platform for MacPlatform { } } + fn os_name(&self) -> &'static str { + "macOS" + } + fn os_version(&self) -> Result { unsafe { let process_info = NSProcessInfo::processInfo(nil); diff --git a/crates/gpui/src/platform/test.rs b/crates/gpui/src/platform/test.rs index 1bb53d49ab..58ef1ffaf2 100644 --- a/crates/gpui/src/platform/test.rs +++ b/crates/gpui/src/platform/test.rs @@ -197,6 +197,10 @@ impl super::Platform for Platform { }) } + fn os_name(&self) -> &'static str { + "test" + } + fn os_version(&self) -> Result { Ok(AppVersion { major: 1, From f0c50c1e0aa081844e42f52efc5da9ba07ef06ca Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Mon, 26 Sep 2022 16:37:09 -0600 Subject: [PATCH 015/140] Extract db module from project to its own crate This will let us use it from the telemetry crate. Co-authored-by: Joseph Lyons --- Cargo.lock | 14 ++++++++++++++ crates/db/Cargo.toml | 22 ++++++++++++++++++++++ crates/{project => db}/src/db.rs | 0 crates/project/Cargo.toml | 3 +++ crates/project/src/project.rs | 1 - 5 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 crates/db/Cargo.toml rename crates/{project => db}/src/db.rs (100%) diff --git a/Cargo.lock b/Cargo.lock index d11d776732..bf145f0cee 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1504,6 +1504,19 @@ dependencies = [ "matches", ] +[[package]] +name = "db" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "collections", + "gpui", + "parking_lot 0.11.2", + "rocksdb", + "tempdir", +] + [[package]] name = "deflate" version = "0.8.6" @@ -3950,6 +3963,7 @@ dependencies = [ "client", "clock", "collections", + "db", "fsevent", "futures", "fuzzy", diff --git a/crates/db/Cargo.toml b/crates/db/Cargo.toml new file mode 100644 index 0000000000..f4ed283b6e --- /dev/null +++ b/crates/db/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "db" +version = "0.1.0" +edition = "2021" + +[lib] +path = "src/db.rs" +doctest = false + +[features] +test-support = [] + +[dependencies] +collections = { path = "../collections" } +anyhow = "1.0.57" +async-trait = "0.1" +parking_lot = "0.11.1" +rocksdb = "0.18" + +[dev-dependencies] +gpui = { path = "../gpui", features = ["test-support"] } +tempdir = { version = "0.3.7" } diff --git a/crates/project/src/db.rs b/crates/db/src/db.rs similarity index 100% rename from crates/project/src/db.rs rename to crates/db/src/db.rs diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index eebfc08473..a4ea6f2286 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -10,6 +10,7 @@ doctest = false [features] test-support = [ "client/test-support", + "db/test-support", "language/test-support", "settings/test-support", "text/test-support", @@ -20,6 +21,7 @@ text = { path = "../text" } client = { path = "../client" } clock = { path = "../clock" } collections = { path = "../collections" } +db = { path = "../db" } fsevent = { path = "../fsevent" } fuzzy = { path = "../fuzzy" } gpui = { path = "../gpui" } @@ -54,6 +56,7 @@ rocksdb = "0.18" [dev-dependencies] client = { path = "../client", features = ["test-support"] } collections = { path = "../collections", features = ["test-support"] } +db = { path = "../db", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } language = { path = "../language", features = ["test-support"] } lsp = { path = "../lsp", features = ["test-support"] } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index abb55e49b0..73b5e3595d 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1,4 +1,3 @@ -mod db; pub mod fs; mod ignore; mod lsp_command; From 824fdb54e6db1cf4f9cf06718666ba6434785e96 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Mon, 26 Sep 2022 18:18:34 -0600 Subject: [PATCH 016/140] Report editor open and save events to Amplitude Co-authored-by: Max Brunsfeld --- Cargo.lock | 5 ++ crates/client/Cargo.toml | 2 + crates/client/src/client.rs | 17 +++-- crates/client/src/telemetry.rs | 117 +++++++++++++++++++++++++-------- crates/editor/src/editor.rs | 21 ++++++ crates/editor/src/items.rs | 1 + crates/zed/build.rs | 4 ++ crates/zed/src/main.rs | 5 +- 8 files changed, 137 insertions(+), 35 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bf145f0cee..cfe18755dd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -945,6 +945,7 @@ dependencies = [ "async-recursion", "async-tungstenite", "collections", + "db", "futures", "gpui", "image", @@ -963,6 +964,7 @@ dependencies = [ "tiny_http", "url", "util", + "uuid 1.1.2", ] [[package]] @@ -6346,6 +6348,9 @@ name = "uuid" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd6469f4314d5f1ffec476e05f17cc9a78bc7a27a6a857842170bdf8d6f98d2f" +dependencies = [ + "getrandom 0.2.7", +] [[package]] name = "valuable" diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index 5fcff565bb..f61fa1c787 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -12,6 +12,7 @@ test-support = ["collections/test-support", "gpui/test-support", "rpc/test-suppo [dependencies] collections = { path = "../collections" } +db = { path = "../db" } gpui = { path = "../gpui" } util = { path = "../util" } rpc = { path = "../rpc" } @@ -31,6 +32,7 @@ smol = "1.2.5" thiserror = "1.0.29" time = { version = "0.3", features = ["serde", "serde-well-known"] } tiny_http = "0.8" +uuid = { version = "1.1.2", features = ["v4"] } url = "2.2" serde = { version = "*", features = ["derive"] } diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 3d85aea3c5..5d6bef5c23 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -12,6 +12,7 @@ use async_tungstenite::tungstenite::{ error::Error as WebsocketError, http::{Request, StatusCode}, }; +use db::Db; use futures::{future::LocalBoxFuture, FutureExt, SinkExt, StreamExt, TryStreamExt}; use gpui::{ actions, @@ -70,7 +71,7 @@ pub fn init(client: Arc, cx: &mut MutableAppContext) { cx.add_global_action({ let client = client.clone(); move |_: &TestTelemetry, _| { - client.log_event( + client.report_event( "test_telemetry", json!({ "test_property": "test_value" @@ -334,6 +335,7 @@ impl Client { match status { Status::Connected { .. } => { + self.telemetry.set_user_id(self.user_id()); state._reconnect_task = None; } Status::ConnectionLost => { @@ -362,6 +364,7 @@ impl Client { })); } Status::SignedOut | Status::UpgradeRequired => { + self.telemetry.set_user_id(self.user_id()); state._reconnect_task.take(); } _ => {} @@ -619,7 +622,7 @@ impl Client { credentials = read_credentials_from_keychain(cx); read_from_keychain = credentials.is_some(); if read_from_keychain { - self.log_event("read_credentials_from_keychain", Default::default()); + self.report_event("read credentials from keychain", Default::default()); } } if credentials.is_none() { @@ -983,7 +986,7 @@ impl Client { .context("failed to decrypt access token")?; platform.activate(true); - telemetry.log_event("authenticate_with_browser", Default::default()); + telemetry.report_event("authenticate with browser", Default::default()); Ok(Credentials { user_id: user_id.parse()?, @@ -1050,8 +1053,12 @@ impl Client { self.peer.respond_with_error(receipt, error) } - pub fn log_event(&self, kind: &str, properties: Value) { - self.telemetry.log_event(kind, properties) + pub fn start_telemetry(&self, db: Arc) { + self.telemetry.start(db); + } + + pub fn report_event(&self, kind: &str, properties: Value) { + self.telemetry.report_event(kind, properties) } } diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 7eea13a923..63da4eae5c 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -1,10 +1,12 @@ -use crate::{http::HttpClient, ZED_SECRET_CLIENT_TOKEN}; +use crate::http::HttpClient; +use db::Db; use gpui::{ executor::Background, serde_json::{self, value::Map, Value}, AppContext, Task, }; use isahc::Request; +use lazy_static::lazy_static; use parking_lot::Mutex; use serde::Serialize; use std::{ @@ -12,7 +14,8 @@ use std::{ sync::Arc, time::{Duration, SystemTime, UNIX_EPOCH}, }; -use util::{post_inc, ResultExt}; +use util::{post_inc, ResultExt, TryFutureExt}; +use uuid::Uuid; pub struct Telemetry { client: Arc, @@ -33,7 +36,13 @@ struct TelemetryState { flush_task: Option>, } -const AMPLITUDE_EVENTS_URL: &'static str = "https//api2.amplitude.com/batch"; +const AMPLITUDE_EVENTS_URL: &'static str = "https://api2.amplitude.com/batch"; + +lazy_static! { + static ref AMPLITUDE_API_KEY: Option = option_env!("AMPLITUDE_API_KEY") + .map(|key| key.to_string()) + .or(std::env::var("AMPLITUDE_API_KEY").ok()); +} #[derive(Serialize)] struct AmplitudeEventBatch { @@ -62,6 +71,10 @@ const MAX_QUEUE_LEN: usize = 1; #[cfg(not(debug_assertions))] const MAX_QUEUE_LEN: usize = 10; +#[cfg(debug_assertions)] +const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(1); + +#[cfg(not(debug_assertions))] const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(30); impl Telemetry { @@ -93,7 +106,52 @@ impl Telemetry { }) } - pub fn log_event(self: &Arc, kind: &str, properties: Value) { + pub fn start(self: &Arc, db: Arc) { + let this = self.clone(); + self.executor + .spawn( + async move { + let device_id = if let Some(device_id) = db + .read(["device_id"])? + .into_iter() + .flatten() + .next() + .and_then(|bytes| String::from_utf8(bytes).ok()) + { + device_id + } else { + let device_id = Uuid::new_v4().to_string(); + db.write([("device_id", device_id.as_bytes())])?; + device_id + }; + + let device_id = Some(Arc::from(device_id)); + let mut state = this.state.lock(); + state.device_id = device_id.clone(); + for event in &mut state.queue { + event.device_id = device_id.clone(); + } + if !state.queue.is_empty() { + drop(state); + this.flush(); + } + + anyhow::Ok(()) + } + .log_err(), + ) + .detach(); + } + + pub fn set_user_id(&self, user_id: Option) { + self.state.lock().user_id = user_id.map(|id| id.to_string().into()); + } + + pub fn report_event(self: &Arc, kind: &str, properties: Value) { + if AMPLITUDE_API_KEY.is_none() { + return; + } + let mut state = self.state.lock(); let event = AmplitudeEvent { event_type: kind.to_string(), @@ -116,38 +174,39 @@ impl Telemetry { event_id: post_inc(&mut state.next_event_id), }; state.queue.push(event); - if state.queue.len() >= MAX_QUEUE_LEN { - drop(state); - self.flush(); - } else { - let this = self.clone(); - let executor = self.executor.clone(); - state.flush_task = Some(self.executor.spawn(async move { - executor.timer(DEBOUNCE_INTERVAL).await; - this.flush(); - })); + if state.device_id.is_some() { + if state.queue.len() >= MAX_QUEUE_LEN { + drop(state); + self.flush(); + } else { + let this = self.clone(); + let executor = self.executor.clone(); + state.flush_task = Some(self.executor.spawn(async move { + executor.timer(DEBOUNCE_INTERVAL).await; + this.flush(); + })); + } } } fn flush(&self) { let mut state = self.state.lock(); let events = mem::take(&mut state.queue); - let client = self.client.clone(); state.flush_task.take(); - self.executor - .spawn(async move { - let body = serde_json::to_vec(&AmplitudeEventBatch { - api_key: ZED_SECRET_CLIENT_TOKEN, - events, + + if let Some(api_key) = AMPLITUDE_API_KEY.as_ref() { + let client = self.client.clone(); + self.executor + .spawn(async move { + let batch = AmplitudeEventBatch { api_key, events }; + let body = serde_json::to_vec(&batch).log_err()?; + let request = Request::post(AMPLITUDE_EVENTS_URL) + .body(body.into()) + .log_err()?; + client.send(request).await.log_err(); + Some(()) }) - .log_err()?; - let request = Request::post(AMPLITUDE_EVENTS_URL) - .header("Content-Type", "application/json") - .body(body.into()) - .log_err()?; - client.send(request).await.log_err(); - Some(()) - }) - .detach(); + .detach(); + } } } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index c60abc187a..07a9fc011f 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -29,6 +29,7 @@ use gpui::{ geometry::vector::{vec2f, Vector2F}, impl_actions, impl_internal_actions, platform::CursorStyle, + serde_json::json, text_layout, AnyViewHandle, AppContext, AsyncAppContext, ClipboardItem, Element, ElementBox, Entity, ModelHandle, MouseButton, MutableAppContext, RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle, @@ -1053,6 +1054,7 @@ impl Editor { let editor_created_event = EditorCreated(cx.handle()); cx.emit_global(editor_created_event); + this.report_event("open editor", cx); this } @@ -5928,6 +5930,25 @@ impl Editor { }) .collect() } + + fn report_event(&self, name: &str, cx: &AppContext) { + if let Some((project, file)) = self.project.as_ref().zip( + self.buffer + .read(cx) + .as_singleton() + .and_then(|b| b.read(cx).file()), + ) { + project.read(cx).client().report_event( + name, + json!({ + "file_extension": file + .path() + .extension() + .and_then(|e| e.to_str()) + }), + ); + } + } } impl EditorSnapshot { diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index fb6f12a16f..6c004f2007 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -410,6 +410,7 @@ impl Item for Editor { let buffers = buffer.read(cx).all_buffers(); let mut timeout = cx.background().timer(FORMAT_TIMEOUT).fuse(); let format = project.update(cx, |project, cx| project.format(buffers, true, cx)); + self.report_event("save editor", cx); cx.spawn(|_, mut cx| async move { let transaction = futures::select_biased! { _ = timeout => { diff --git a/crates/zed/build.rs b/crates/zed/build.rs index e39946876e..0ffa2397b0 100644 --- a/crates/zed/build.rs +++ b/crates/zed/build.rs @@ -3,6 +3,10 @@ use std::process::Command; fn main() { println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=10.14"); + if let Ok(api_key) = std::env::var("AMPLITUDE_API_KEY") { + println!("cargo:rustc-env=AMPLITUDE_API_KEY={api_key}"); + } + let output = Command::new("npm") .current_dir("../../styles") .args(["install", "--no-save"]) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index bb913ab610..2dd90eb762 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -121,7 +121,6 @@ fn main() { vim::init(cx); terminal::init(cx); - let db = cx.background().block(db); cx.spawn(|cx| watch_themes(fs.clone(), themes.clone(), cx)) .detach(); @@ -139,6 +138,10 @@ fn main() { }) .detach(); + let db = cx.background().block(db); + client.start_telemetry(db.clone()); + client.report_event("start app", Default::default()); + let project_store = cx.add_model(|_| ProjectStore::new(db.clone())); let app_state = Arc::new(AppState { languages, From f2db3abdb28b6700a4cbad51345080333c6c0701 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 27 Sep 2022 12:42:27 -0700 Subject: [PATCH 017/140] Always allow overriding amplitude API key via a runtime env var --- crates/client/src/telemetry.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 63da4eae5c..f048dfdd49 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -39,9 +39,9 @@ struct TelemetryState { const AMPLITUDE_EVENTS_URL: &'static str = "https://api2.amplitude.com/batch"; lazy_static! { - static ref AMPLITUDE_API_KEY: Option = option_env!("AMPLITUDE_API_KEY") - .map(|key| key.to_string()) - .or(std::env::var("AMPLITUDE_API_KEY").ok()); + static ref AMPLITUDE_API_KEY: Option = std::env::var("ZED_AMPLITUDE_API_KEY") + .ok() + .or_else(|| option_env!("ZED_AMPLITUDE_API_KEY").map(|key| key.to_string())); } #[derive(Serialize)] From 3bd68128d7ab67c1b785f4a631caaeccfb056277 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 27 Sep 2022 14:20:13 -0700 Subject: [PATCH 018/140] Add command to view the telemetry log Co-authored-by: Joseph Lyons --- Cargo.lock | 1 + crates/client/Cargo.toml | 1 + crates/client/src/client.rs | 10 ++++- crates/client/src/telemetry.rs | 73 +++++++++++++++++++++++++++------- crates/zed/src/menus.rs | 5 +++ crates/zed/src/zed.rs | 54 +++++++++++++++++++++++++ 6 files changed, 127 insertions(+), 17 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cfe18755dd..17ace3f47b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -959,6 +959,7 @@ dependencies = [ "serde", "smol", "sum_tree", + "tempfile", "thiserror", "time 0.3.11", "tiny_http", diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index f61fa1c787..c9c783c659 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -35,6 +35,7 @@ tiny_http = "0.8" uuid = { version = "1.1.2", features = ["v4"] } url = "2.2" serde = { version = "*", features = ["derive"] } +tempfile = "3" [dev-dependencies] collections = { path = "../collections", features = ["test-support"] } diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 5d6bef5c23..0670add1af 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -33,6 +33,7 @@ use std::{ convert::TryFrom, fmt::Write as _, future::Future, + path::PathBuf, sync::{Arc, Weak}, time::{Duration, Instant}, }; @@ -332,10 +333,11 @@ impl Client { log::info!("set status on client {}: {:?}", self.id, status); let mut state = self.state.write(); *state.status.0.borrow_mut() = status; + let user_id = state.credentials.as_ref().map(|c| c.user_id); match status { Status::Connected { .. } => { - self.telemetry.set_user_id(self.user_id()); + self.telemetry.set_user_id(user_id); state._reconnect_task = None; } Status::ConnectionLost => { @@ -364,7 +366,7 @@ impl Client { })); } Status::SignedOut | Status::UpgradeRequired => { - self.telemetry.set_user_id(self.user_id()); + self.telemetry.set_user_id(user_id); state._reconnect_task.take(); } _ => {} @@ -1060,6 +1062,10 @@ impl Client { pub fn report_event(&self, kind: &str, properties: Value) { self.telemetry.report_event(kind, properties) } + + pub fn telemetry_log_file_path(&self) -> Option { + self.telemetry.log_file_path() + } } impl AnyWeakEntityHandle { diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index f048dfdd49..77aa308f30 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -10,15 +10,18 @@ use lazy_static::lazy_static; use parking_lot::Mutex; use serde::Serialize; use std::{ + io::Write, mem, + path::PathBuf, sync::Arc, time::{Duration, SystemTime, UNIX_EPOCH}, }; +use tempfile::NamedTempFile; use util::{post_inc, ResultExt, TryFutureExt}; use uuid::Uuid; pub struct Telemetry { - client: Arc, + http_client: Arc, executor: Arc, session_id: u128, state: Mutex, @@ -34,6 +37,7 @@ struct TelemetryState { queue: Vec, next_event_id: usize, flush_task: Option>, + log_file: Option, } const AMPLITUDE_EVENTS_URL: &'static str = "https://api2.amplitude.com/batch"; @@ -52,10 +56,13 @@ struct AmplitudeEventBatch { #[derive(Serialize)] struct AmplitudeEvent { + #[serde(skip_serializing_if = "Option::is_none")] user_id: Option>, device_id: Option>, event_type: String, + #[serde(skip_serializing_if = "Option::is_none")] event_properties: Option>, + #[serde(skip_serializing_if = "Option::is_none")] user_properties: Option>, os_name: &'static str, os_version: Option>, @@ -80,8 +87,8 @@ const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(30); impl Telemetry { pub fn new(client: Arc, cx: &AppContext) -> Arc { let platform = cx.platform(); - Arc::new(Self { - client, + let this = Arc::new(Self { + http_client: client, executor: cx.background().clone(), session_id: SystemTime::now() .duration_since(UNIX_EPOCH) @@ -101,9 +108,29 @@ impl Telemetry { queue: Default::default(), flush_task: Default::default(), next_event_id: 0, + log_file: None, user_id: None, }), - }) + }); + + if AMPLITUDE_API_KEY.is_some() { + this.executor + .spawn({ + let this = this.clone(); + async move { + if let Some(tempfile) = NamedTempFile::new().log_err() { + this.state.lock().log_file = Some(tempfile); + } + } + }) + .detach(); + } + + this + } + + pub fn log_file_path(&self) -> Option { + Some(self.state.lock().log_file.as_ref()?.path().to_path_buf()) } pub fn start(self: &Arc, db: Arc) { @@ -189,23 +216,39 @@ impl Telemetry { } } - fn flush(&self) { + fn flush(self: &Arc) { let mut state = self.state.lock(); let events = mem::take(&mut state.queue); state.flush_task.take(); + drop(state); if let Some(api_key) = AMPLITUDE_API_KEY.as_ref() { - let client = self.client.clone(); + let this = self.clone(); self.executor - .spawn(async move { - let batch = AmplitudeEventBatch { api_key, events }; - let body = serde_json::to_vec(&batch).log_err()?; - let request = Request::post(AMPLITUDE_EVENTS_URL) - .body(body.into()) - .log_err()?; - client.send(request).await.log_err(); - Some(()) - }) + .spawn( + async move { + let mut json_bytes = Vec::new(); + + if let Some(file) = &mut this.state.lock().log_file { + let file = file.as_file_mut(); + for event in &events { + json_bytes.clear(); + serde_json::to_writer(&mut json_bytes, event)?; + file.write_all(&json_bytes)?; + file.write(b"\n")?; + } + } + + let batch = AmplitudeEventBatch { api_key, events }; + json_bytes.clear(); + serde_json::to_writer(&mut json_bytes, &batch)?; + let request = + Request::post(AMPLITUDE_EVENTS_URL).body(json_bytes.into())?; + this.http_client.send(request).await?; + Ok(()) + } + .log_err(), + ) .detach(); } } diff --git a/crates/zed/src/menus.rs b/crates/zed/src/menus.rs index 3a34166ba6..f21845a589 100644 --- a/crates/zed/src/menus.rs +++ b/crates/zed/src/menus.rs @@ -332,6 +332,11 @@ pub fn menus() -> Vec> { action: Box::new(command_palette::Toggle), }, MenuItem::Separator, + MenuItem::Action { + name: "View Telemetry Log", + action: Box::new(crate::OpenTelemetryLog), + }, + MenuItem::Separator, MenuItem::Action { name: "Documentation", action: Box::new(crate::OpenBrowser { diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index cd906500ee..407f101421 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -56,6 +56,7 @@ actions!( DebugElements, OpenSettings, OpenLog, + OpenTelemetryLog, OpenKeymap, OpenDefaultSettings, OpenDefaultKeymap, @@ -146,6 +147,12 @@ pub fn init(app_state: &Arc, cx: &mut gpui::MutableAppContext) { open_log_file(workspace, app_state.clone(), cx); } }); + cx.add_action({ + let app_state = app_state.clone(); + move |workspace: &mut Workspace, _: &OpenTelemetryLog, cx: &mut ViewContext| { + open_telemetry_log_file(workspace, app_state.clone(), cx); + } + }); cx.add_action({ let app_state = app_state.clone(); move |_: &mut Workspace, _: &OpenKeymap, cx: &mut ViewContext| { @@ -504,6 +511,53 @@ fn open_log_file( }); } +fn open_telemetry_log_file( + workspace: &mut Workspace, + app_state: Arc, + cx: &mut ViewContext, +) { + workspace.with_local_workspace(cx, app_state.clone(), |_, cx| { + cx.spawn_weak(|workspace, mut cx| async move { + let workspace = workspace.upgrade(&cx)?; + let path = app_state.client.telemetry_log_file_path()?; + let log = app_state.fs.load(&path).await.log_err()?; + workspace.update(&mut cx, |workspace, cx| { + let project = workspace.project().clone(); + let buffer = project + .update(cx, |project, cx| project.create_buffer("", None, cx)) + .expect("creating buffers on a local workspace always succeeds"); + buffer.update(cx, |buffer, cx| { + buffer.set_language(app_state.languages.get_language("JSON"), cx); + buffer.edit( + [( + 0..0, + concat!( + "// Zed collects anonymous usage data to help us understand how people are using the app.\n", + "// After the beta release, we'll provide the ability to opt out of this telemetry.\n", + "\n" + ), + )], + None, + cx, + ); + buffer.edit([(buffer.len()..buffer.len(), log)], None, cx); + }); + + let buffer = cx.add_model(|cx| { + MultiBuffer::singleton(buffer, cx).with_title("Telemetry Log".into()) + }); + workspace.add_item( + Box::new(cx.add_view(|cx| Editor::for_multibuffer(buffer, Some(project), cx))), + cx, + ); + }); + + Some(()) + }) + .detach(); + }); +} + fn open_bundled_config_file( workspace: &mut Workspace, app_state: Arc, From c1c5eaeaf998cc0667d1164d909c02eaf93278f7 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 27 Sep 2022 14:25:28 -0700 Subject: [PATCH 019/140] Use the amplitude API key secret on CI Co-authored-by: Joseph Lyons --- .github/workflows/ci.yml | 1 + crates/zed/build.rs | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cef9497074..866d0acc0e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -56,6 +56,7 @@ jobs: MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }} APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }} + ZED_AMPLITUDE_API_KEY: ${{ secrets.ZED_AMPLITUDE_API_KEY }} steps: - name: Install Rust run: | diff --git a/crates/zed/build.rs b/crates/zed/build.rs index 0ffa2397b0..d3167851a0 100644 --- a/crates/zed/build.rs +++ b/crates/zed/build.rs @@ -3,8 +3,8 @@ use std::process::Command; fn main() { println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=10.14"); - if let Ok(api_key) = std::env::var("AMPLITUDE_API_KEY") { - println!("cargo:rustc-env=AMPLITUDE_API_KEY={api_key}"); + if let Ok(api_key) = std::env::var("ZED_AMPLITUDE_API_KEY") { + println!("cargo:rustc-env=ZED_AMPLITUDE_API_KEY={api_key}"); } let output = Command::new("npm") From ac0bcf3809b7ad336ee558fa247885680ad0366e Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 27 Sep 2022 15:09:16 -0700 Subject: [PATCH 020/140] Limit the size of the buffer in the OpenTelemetryLog command Co-authored-by: Joseph Lyons --- crates/zed/src/zed.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 407f101421..76bc62e4cb 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -521,6 +521,14 @@ fn open_telemetry_log_file( let workspace = workspace.upgrade(&cx)?; let path = app_state.client.telemetry_log_file_path()?; let log = app_state.fs.load(&path).await.log_err()?; + + const MAX_TELEMETRY_LOG_LEN: usize = 5 * 1024 * 1024; + let mut start_offset = log.len().saturating_sub(MAX_TELEMETRY_LOG_LEN); + if let Some(newline_offset) = log[start_offset..].find('\n') { + start_offset += newline_offset + 1; + } + let log_suffix = &log[start_offset..]; + workspace.update(&mut cx, |workspace, cx| { let project = workspace.project().clone(); let buffer = project @@ -534,13 +542,14 @@ fn open_telemetry_log_file( concat!( "// Zed collects anonymous usage data to help us understand how people are using the app.\n", "// After the beta release, we'll provide the ability to opt out of this telemetry.\n", + "// Here is the data that has been reported for the current session:\n", "\n" ), )], None, cx, ); - buffer.edit([(buffer.len()..buffer.len(), log)], None, cx); + buffer.edit([(buffer.len()..buffer.len(), log_suffix)], None, cx); }); let buffer = cx.add_model(|cx| { From 1db75ca2cfd8bffc9594a023c9d0c39dba816348 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 27 Sep 2022 15:49:13 -0700 Subject: [PATCH 021/140] Make device_id optional on signups table This way, signup won't fail if for some reason, the user's client-side JS doesn't provide an amplitude device id. Co-authored-by: Joseph Lyons --- .../20220913211150_create_signups.up.sql | 2 +- crates/collab/src/api.rs | 13 ++++-- crates/collab/src/db.rs | 46 ++++++++++++++----- crates/collab/src/db_tests.rs | 45 ++++++++++++------ 4 files changed, 75 insertions(+), 31 deletions(-) diff --git a/crates/collab/migrations/20220913211150_create_signups.up.sql b/crates/collab/migrations/20220913211150_create_signups.up.sql index 35e334ea5f..19559b747c 100644 --- a/crates/collab/migrations/20220913211150_create_signups.up.sql +++ b/crates/collab/migrations/20220913211150_create_signups.up.sql @@ -4,7 +4,7 @@ CREATE TABLE IF NOT EXISTS "signups" ( "email_confirmation_code" VARCHAR(64) NOT NULL, "email_confirmation_sent" BOOLEAN NOT NULL, "created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - "device_id" VARCHAR NOT NULL, + "device_id" VARCHAR, "user_id" INTEGER REFERENCES users (id) ON DELETE CASCADE, "inviting_user_id" INTEGER REFERENCES users (id) ON DELETE SET NULL, diff --git a/crates/collab/src/api.rs b/crates/collab/src/api.rs index a82363a56b..0a9d8106ce 100644 --- a/crates/collab/src/api.rs +++ b/crates/collab/src/api.rs @@ -157,9 +157,9 @@ async fn create_user( user, ) .await?; - user_id = result.0; - signup_device_id = Some(result.2); - if let Some(inviter_id) = result.1 { + user_id = result.user_id; + signup_device_id = result.signup_device_id; + if let Some(inviter_id) = result.inviting_user_id { rpc_server .invite_code_redeemed(inviter_id, user_id) .await @@ -425,6 +425,7 @@ async fn get_waitlist_summary( pub struct CreateInviteFromCodeParams { invite_code: String, email_address: String, + device_id: Option, } async fn create_invite_from_code( @@ -433,7 +434,11 @@ async fn create_invite_from_code( ) -> Result> { Ok(Json( app.db - .create_invite_from_code(¶ms.invite_code, ¶ms.email_address) + .create_invite_from_code( + ¶ms.invite_code, + ¶ms.email_address, + params.device_id.as_deref(), + ) .await?, )) } diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 1518ec179f..8b01cdf971 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -35,7 +35,12 @@ pub trait Db: Send + Sync { async fn set_invite_count_for_user(&self, id: UserId, count: u32) -> Result<()>; async fn get_invite_code_for_user(&self, id: UserId) -> Result>; async fn get_user_for_invite_code(&self, code: &str) -> Result; - async fn create_invite_from_code(&self, code: &str, email_address: &str) -> Result; + async fn create_invite_from_code( + &self, + code: &str, + email_address: &str, + device_id: Option<&str>, + ) -> Result; async fn create_signup(&self, signup: Signup) -> Result<()>; async fn get_waitlist_summary(&self) -> Result; @@ -45,7 +50,7 @@ pub trait Db: Send + Sync { &self, invite: &Invite, user: NewUserParams, - ) -> Result<(UserId, Option, String)>; + ) -> Result; /// Registers a new project for the given user. async fn register_project(&self, host_user_id: UserId) -> Result; @@ -458,14 +463,14 @@ impl Db for PostgresDb { &self, invite: &Invite, user: NewUserParams, - ) -> Result<(UserId, Option, String)> { + ) -> Result { let mut tx = self.pool.begin().await?; - let (signup_id, existing_user_id, inviting_user_id, device_id): ( + let (signup_id, existing_user_id, inviting_user_id, signup_device_id): ( i32, Option, Option, - String, + Option, ) = sqlx::query_as( " SELECT id, user_id, inviting_user_id, device_id @@ -552,7 +557,11 @@ impl Db for PostgresDb { } tx.commit().await?; - Ok((user_id, inviting_user_id, device_id)) + Ok(NewUserResult { + user_id, + inviting_user_id, + signup_device_id, + }) } // invite codes @@ -625,7 +634,12 @@ impl Db for PostgresDb { }) } - async fn create_invite_from_code(&self, code: &str, email_address: &str) -> Result { + async fn create_invite_from_code( + &self, + code: &str, + email_address: &str, + device_id: Option<&str>, + ) -> Result { let mut tx = self.pool.begin().await?; let existing_user: Option = sqlx::query_scalar( @@ -679,10 +693,11 @@ impl Db for PostgresDb { platform_linux, platform_mac, platform_windows, - platform_unknown + platform_unknown, + device_id ) VALUES - ($1, $2, 'f', $3, 'f', 'f', 'f', 't') + ($1, $2, 'f', $3, 'f', 'f', 'f', 't', $4) ON CONFLICT (email_address) DO UPDATE SET inviting_user_id = excluded.inviting_user_id @@ -692,6 +707,7 @@ impl Db for PostgresDb { .bind(&email_address) .bind(&random_email_confirmation_code()) .bind(&inviter_id) + .bind(&device_id) .fetch_one(&mut tx) .await?; @@ -1675,7 +1691,7 @@ pub struct Signup { pub platform_linux: bool, pub editor_features: Vec, pub programming_languages: Vec, - pub device_id: String, + pub device_id: Option, } #[derive(Clone, Debug, PartialEq, Deserialize, Serialize, FromRow)] @@ -1703,6 +1719,13 @@ pub struct NewUserParams { pub invite_count: i32, } +#[derive(Debug)] +pub struct NewUserResult { + pub user_id: UserId, + pub inviting_user_id: Option, + pub signup_device_id: Option, +} + fn random_invite_code() -> String { nanoid::nanoid!(16) } @@ -1905,7 +1928,7 @@ mod test { &self, _invite: &Invite, _user: NewUserParams, - ) -> Result<(UserId, Option, String)> { + ) -> Result { unimplemented!() } @@ -1928,6 +1951,7 @@ mod test { &self, _code: &str, _email_address: &str, + _device_id: Option<&str>, ) -> Result { unimplemented!() } diff --git a/crates/collab/src/db_tests.rs b/crates/collab/src/db_tests.rs index 44697a59bd..1e48b4b754 100644 --- a/crates/collab/src/db_tests.rs +++ b/crates/collab/src/db_tests.rs @@ -954,10 +954,14 @@ async fn test_invite_codes() { // User 2 redeems the invite code and becomes a contact of user 1. let user2_invite = db - .create_invite_from_code(&invite_code, "u2@example.com") + .create_invite_from_code(&invite_code, "u2@example.com", Some("user-2-device-id")) .await .unwrap(); - let (user2, inviter, _) = db + let NewUserResult { + user_id: user2, + inviting_user_id, + signup_device_id, + } = db .create_user_from_invite( &user2_invite, NewUserParams { @@ -970,7 +974,8 @@ async fn test_invite_codes() { .unwrap(); let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); assert_eq!(invite_count, 1); - assert_eq!(inviter, Some(user1)); + assert_eq!(inviting_user_id, Some(user1)); + assert_eq!(signup_device_id.unwrap(), "user-2-device-id"); assert_eq!( db.get_contacts(user1).await.unwrap(), [ @@ -1004,10 +1009,14 @@ async fn test_invite_codes() { // User 3 redeems the invite code and becomes a contact of user 1. let user3_invite = db - .create_invite_from_code(&invite_code, "u3@example.com") + .create_invite_from_code(&invite_code, "u3@example.com", None) .await .unwrap(); - let (user3, inviter, _) = db + let NewUserResult { + user_id: user3, + inviting_user_id, + signup_device_id, + } = db .create_user_from_invite( &user3_invite, NewUserParams { @@ -1020,7 +1029,8 @@ async fn test_invite_codes() { .unwrap(); let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); assert_eq!(invite_count, 0); - assert_eq!(inviter, Some(user1)); + assert_eq!(inviting_user_id, Some(user1)); + assert!(signup_device_id.is_none()); assert_eq!( db.get_contacts(user1).await.unwrap(), [ @@ -1057,7 +1067,7 @@ async fn test_invite_codes() { ); // Trying to reedem the code for the third time results in an error. - db.create_invite_from_code(&invite_code, "u4@example.com") + db.create_invite_from_code(&invite_code, "u4@example.com", Some("user-4-device-id")) .await .unwrap_err(); @@ -1069,10 +1079,10 @@ async fn test_invite_codes() { // User 4 can now redeem the invite code and becomes a contact of user 1. let user4_invite = db - .create_invite_from_code(&invite_code, "u4@example.com") + .create_invite_from_code(&invite_code, "u4@example.com", Some("user-4-device-id")) .await .unwrap(); - let (user4, _, _) = db + let user4 = db .create_user_from_invite( &user4_invite, NewUserParams { @@ -1082,7 +1092,8 @@ async fn test_invite_codes() { }, ) .await - .unwrap(); + .unwrap() + .user_id; let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); assert_eq!(invite_count, 1); @@ -1126,7 +1137,7 @@ async fn test_invite_codes() { ); // An existing user cannot redeem invite codes. - db.create_invite_from_code(&invite_code, "u2@example.com") + db.create_invite_from_code(&invite_code, "u2@example.com", Some("user-2-device-id")) .await .unwrap_err(); let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); @@ -1147,7 +1158,7 @@ async fn test_signups() { platform_windows: i % 4 == 0, editor_features: vec!["speed".into()], programming_languages: vec!["rust".into(), "c".into()], - device_id: format!("device_id_{i}"), + device_id: Some(format!("device_id_{i}")), }) .await .unwrap(); @@ -1217,7 +1228,11 @@ async fn test_signups() { // user completes the signup process by providing their // github account. - let (user_id, inviter_id, signup_device_id) = db + let NewUserResult { + user_id, + inviting_user_id, + signup_device_id, + } = db .create_user_from_invite( &Invite { email_address: signups_batch1[0].email_address.clone(), @@ -1232,11 +1247,11 @@ async fn test_signups() { .await .unwrap(); let user = db.get_user_by_id(user_id).await.unwrap().unwrap(); - assert!(inviter_id.is_none()); + assert!(inviting_user_id.is_none()); assert_eq!(user.github_login, "person-0"); assert_eq!(user.email_address.as_deref(), Some("person-0@example.com")); assert_eq!(user.invite_count, 5); - assert_eq!(signup_device_id, "device_id_0"); + assert_eq!(signup_device_id.unwrap(), "device_id_0"); // cannot redeem the same signup again. db.create_user_from_invite( From f2ebb094a2bd1541dadd27a1e2ce4b2be21a76ea Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 27 Sep 2022 16:58:03 -0700 Subject: [PATCH 022/140] Remove unnecessary index drop in down migration --- crates/collab/migrations/20220913211150_create_signups.down.sql | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/collab/migrations/20220913211150_create_signups.down.sql b/crates/collab/migrations/20220913211150_create_signups.down.sql index f67c10dd01..5504bbb8dc 100644 --- a/crates/collab/migrations/20220913211150_create_signups.down.sql +++ b/crates/collab/migrations/20220913211150_create_signups.down.sql @@ -4,4 +4,3 @@ ALTER TABLE users DROP COLUMN github_user_id; DROP INDEX index_users_on_email_address; -DROP INDEX index_users_on_github_user_id; From a48995c782da8b5db09645240eea5821d1f776cb Mon Sep 17 00:00:00 2001 From: Isaac Clayton Date: Fri, 22 Jul 2022 12:25:07 +0200 Subject: [PATCH 023/140] Basic html highlighting + lsp support --- Cargo.lock | 11 ++ crates/zed/Cargo.toml | 1 + crates/zed/src/languages.rs | 6 ++ crates/zed/src/languages/html.rs | 101 +++++++++++++++++++ crates/zed/src/languages/html/brackets.scm | 2 + crates/zed/src/languages/html/config.toml | 10 ++ crates/zed/src/languages/html/highlights.scm | 11 ++ crates/zed/src/languages/html/indents.scm | 1 + crates/zed/src/languages/html/outline.scm | 0 9 files changed, 143 insertions(+) create mode 100644 crates/zed/src/languages/html.rs create mode 100644 crates/zed/src/languages/html/brackets.scm create mode 100644 crates/zed/src/languages/html/config.toml create mode 100644 crates/zed/src/languages/html/highlights.scm create mode 100644 crates/zed/src/languages/html/indents.scm create mode 100644 crates/zed/src/languages/html/outline.scm diff --git a/Cargo.lock b/Cargo.lock index 363ee93c14..5023de1f75 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6043,6 +6043,16 @@ dependencies = [ "tree-sitter", ] +[[package]] +name = "tree-sitter-html" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "184e6b77953a354303dc87bf5fe36558c83569ce92606e7b382a0dc1b7443443" +dependencies = [ + "cc", + "tree-sitter", +] + [[package]] name = "tree-sitter-json" version = "0.19.0" @@ -7229,6 +7239,7 @@ dependencies = [ "tree-sitter-cpp", "tree-sitter-elixir", "tree-sitter-go", + "tree-sitter-html", "tree-sitter-json 0.20.0", "tree-sitter-markdown", "tree-sitter-python", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index dc2b0abd03..f84c8836bd 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -100,6 +100,7 @@ tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", tree-sitter-python = "0.20.2" tree-sitter-toml = { git = "https://github.com/tree-sitter/tree-sitter-toml", rev = "342d9be207c2dba869b9967124c679b5e6fd0ebe" } tree-sitter-typescript = "0.20.1" +tree-sitter-html = "0.19.0" url = "2.2" [dev-dependencies] diff --git a/crates/zed/src/languages.rs b/crates/zed/src/languages.rs index 6e57106e87..ba1945f316 100644 --- a/crates/zed/src/languages.rs +++ b/crates/zed/src/languages.rs @@ -7,6 +7,7 @@ use std::{borrow::Cow, str, sync::Arc}; mod c; mod elixir; mod go; +mod html; mod installation; mod json; mod language_plugin; @@ -96,6 +97,11 @@ pub async fn init(languages: Arc, _executor: Arc) tree_sitter_typescript::language_tsx(), Some(CachedLspAdapter::new(typescript::TypeScriptLspAdapter).await), ), + ( + "html", + tree_sitter_html::language(), + Some(CachedLspAdapter::new(html::HtmlLspAdapter).await), + ), ] { languages.add(Arc::new(language(name, grammar, lsp_adapter))); } diff --git a/crates/zed/src/languages/html.rs b/crates/zed/src/languages/html.rs new file mode 100644 index 0000000000..5497841d88 --- /dev/null +++ b/crates/zed/src/languages/html.rs @@ -0,0 +1,101 @@ +use super::installation::{npm_install_packages, npm_package_latest_version}; +use anyhow::{anyhow, Context, Result}; +use async_trait::async_trait; +use client::http::HttpClient; +use futures::StreamExt; +use language::{LanguageServerName, LspAdapter}; +use serde_json::json; +use smol::fs; +use std::{any::Any, path::PathBuf, sync::Arc}; +use util::ResultExt; + +pub struct HtmlLspAdapter; + +impl HtmlLspAdapter { + const BIN_PATH: &'static str = + "node_modules/vscode-langservers-extracted/bin/vscode-html-language-server"; +} + +#[async_trait] +impl LspAdapter for HtmlLspAdapter { + async fn name(&self) -> LanguageServerName { + LanguageServerName("vscode-html-language-server".into()) + } + + async fn server_args(&self) -> Vec { + vec!["--stdio".into()] + } + + async fn fetch_latest_server_version( + &self, + _: Arc, + ) -> Result> { + Ok(Box::new(npm_package_latest_version("vscode-langservers-extracted").await?) as Box<_>) + } + + async fn fetch_server_binary( + &self, + version: Box, + _: Arc, + container_dir: PathBuf, + ) -> Result { + let version = version.downcast::().unwrap(); + let version_dir = container_dir.join(version.as_str()); + fs::create_dir_all(&version_dir) + .await + .context("failed to create version directory")?; + let binary_path = version_dir.join(Self::BIN_PATH); + + if fs::metadata(&binary_path).await.is_err() { + npm_install_packages( + [("vscode-langservers-extracted", version.as_str())], + &version_dir, + ) + .await?; + + if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() { + while let Some(entry) = entries.next().await { + if let Some(entry) = entry.log_err() { + let entry_path = entry.path(); + if entry_path.as_path() != version_dir { + fs::remove_dir_all(&entry_path).await.log_err(); + } + } + } + } + } + + Ok(binary_path) + } + + async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { + (|| async move { + let mut last_version_dir = None; + let mut entries = fs::read_dir(&container_dir).await?; + while let Some(entry) = entries.next().await { + let entry = entry?; + if entry.file_type().await?.is_dir() { + last_version_dir = Some(entry.path()); + } + } + let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?; + let bin_path = last_version_dir.join(Self::BIN_PATH); + if bin_path.exists() { + Ok(bin_path) + } else { + Err(anyhow!( + "missing executable in directory {:?}", + last_version_dir + )) + } + })() + .await + .log_err() + } + + async fn initialization_options(&self) -> Option { + Some(json!({ + "provideFormatter": true + })) + } +} diff --git a/crates/zed/src/languages/html/brackets.scm b/crates/zed/src/languages/html/brackets.scm new file mode 100644 index 0000000000..2d12b17daa --- /dev/null +++ b/crates/zed/src/languages/html/brackets.scm @@ -0,0 +1,2 @@ +("<" @open ">" @close) +("\"" @open "\"" @close) diff --git a/crates/zed/src/languages/html/config.toml b/crates/zed/src/languages/html/config.toml new file mode 100644 index 0000000000..0680717b2c --- /dev/null +++ b/crates/zed/src/languages/html/config.toml @@ -0,0 +1,10 @@ +name = "HTML" +path_suffixes = ["html"] +autoclose_before = ">" +brackets = [ + { start = "<", end = ">", close = true, newline = true }, + { start = "{", end = "}", close = true, newline = true }, + { start = "(", end = ")", close = true, newline = true }, + { start = "\"", end = "\"", close = true, newline = false }, + { start = "!--", end = " --", close = true, newline = false }, +] diff --git a/crates/zed/src/languages/html/highlights.scm b/crates/zed/src/languages/html/highlights.scm new file mode 100644 index 0000000000..d6d361ca49 --- /dev/null +++ b/crates/zed/src/languages/html/highlights.scm @@ -0,0 +1,11 @@ +(tag_name) @keyword +(doctype) @constant +(attribute_name) @property +(attribute_value) @string +(comment) @comment + +[ + "<" + ">" + " Date: Tue, 30 Aug 2022 16:52:41 -0700 Subject: [PATCH 024/140] Add JavaScript language injection in HTML --- crates/zed/src/languages/html/injections.scm | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 crates/zed/src/languages/html/injections.scm diff --git a/crates/zed/src/languages/html/injections.scm b/crates/zed/src/languages/html/injections.scm new file mode 100644 index 0000000000..6d2b8e9377 --- /dev/null +++ b/crates/zed/src/languages/html/injections.scm @@ -0,0 +1,4 @@ +(script_element + (raw_text) @content + (#set! "language" "javascript")) + From 21fb2b9bf1eea8ac3f6042478dfd31f78497c3f8 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 30 Aug 2022 17:23:09 -0700 Subject: [PATCH 025/140] Tweak HTML indents and highlights --- crates/language/src/buffer.rs | 2 ++ crates/language/src/language.rs | 4 ++++ crates/zed/src/languages/html/highlights.scm | 4 ++++ crates/zed/src/languages/html/indents.scm | 7 ++++++- crates/zed/src/languages/html/injections.scm | 1 - 5 files changed, 16 insertions(+), 2 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 08843aacfe..c8730be452 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1631,6 +1631,8 @@ impl BufferSnapshot { if capture.index == config.indent_capture_ix { start.get_or_insert(Point::from_ts_point(capture.node.start_position())); end.get_or_insert(Point::from_ts_point(capture.node.end_position())); + } else if Some(capture.index) == config.start_capture_ix { + start = Some(Point::from_ts_point(capture.node.end_position())); } else if Some(capture.index) == config.end_capture_ix { end = Some(Point::from_ts_point(capture.node.start_position())); } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 780f6e75b5..0366bdf669 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -304,6 +304,7 @@ pub struct Grammar { struct IndentConfig { query: Query, indent_capture_ix: u32, + start_capture_ix: Option, end_capture_ix: Option, } @@ -661,11 +662,13 @@ impl Language { let grammar = self.grammar_mut(); let query = Query::new(grammar.ts_language, source)?; let mut indent_capture_ix = None; + let mut start_capture_ix = None; let mut end_capture_ix = None; get_capture_indices( &query, &mut [ ("indent", &mut indent_capture_ix), + ("start", &mut start_capture_ix), ("end", &mut end_capture_ix), ], ); @@ -673,6 +676,7 @@ impl Language { grammar.indents_config = Some(IndentConfig { query, indent_capture_ix, + start_capture_ix, end_capture_ix, }); } diff --git a/crates/zed/src/languages/html/highlights.scm b/crates/zed/src/languages/html/highlights.scm index d6d361ca49..0ce535fad4 100644 --- a/crates/zed/src/languages/html/highlights.scm +++ b/crates/zed/src/languages/html/highlights.scm @@ -1,11 +1,15 @@ (tag_name) @keyword +(erroneous_end_tag_name) @keyword (doctype) @constant (attribute_name) @property (attribute_value) @string (comment) @comment +"=" @operator + [ "<" ">" "" ] @punctuation.bracket \ No newline at end of file diff --git a/crates/zed/src/languages/html/indents.scm b/crates/zed/src/languages/html/indents.scm index a1560bfea7..436663dba3 100644 --- a/crates/zed/src/languages/html/indents.scm +++ b/crates/zed/src/languages/html/indents.scm @@ -1 +1,6 @@ -(tag_name) @indent +(start_tag ">" @end) @indent +(self_closing_tag "/>" @end) @indent + +(element + (start_tag) @start + (end_tag)? @end) @indent diff --git a/crates/zed/src/languages/html/injections.scm b/crates/zed/src/languages/html/injections.scm index 6d2b8e9377..60688f599f 100644 --- a/crates/zed/src/languages/html/injections.scm +++ b/crates/zed/src/languages/html/injections.scm @@ -1,4 +1,3 @@ (script_element (raw_text) @content (#set! "language" "javascript")) - From a2e57e8d71e00bcb9059e6c79854937eeb488047 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 31 Aug 2022 11:53:52 -0700 Subject: [PATCH 026/140] Add basic syntax highlighting for CSS --- Cargo.lock | 10 +++ crates/zed/Cargo.toml | 1 + crates/zed/src/languages.rs | 5 ++ crates/zed/src/languages/css/brackets.scm | 3 + crates/zed/src/languages/css/config.toml | 9 +++ crates/zed/src/languages/css/highlights.scm | 76 ++++++++++++++++++++ crates/zed/src/languages/css/indents.scm | 1 + crates/zed/src/languages/html/injections.scm | 8 ++- 8 files changed, 111 insertions(+), 2 deletions(-) create mode 100644 crates/zed/src/languages/css/brackets.scm create mode 100644 crates/zed/src/languages/css/config.toml create mode 100644 crates/zed/src/languages/css/highlights.scm create mode 100644 crates/zed/src/languages/css/indents.scm diff --git a/Cargo.lock b/Cargo.lock index 5023de1f75..3d74213062 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6025,6 +6025,15 @@ dependencies = [ "tree-sitter", ] +[[package]] +name = "tree-sitter-css" +version = "0.19.0" +source = "git+https://github.com/tree-sitter/tree-sitter-css?rev=769203d0f9abe1a9a691ac2b9fe4bb4397a73c51#769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" +dependencies = [ + "cc", + "tree-sitter", +] + [[package]] name = "tree-sitter-elixir" version = "0.19.0" @@ -7237,6 +7246,7 @@ dependencies = [ "tree-sitter", "tree-sitter-c", "tree-sitter-cpp", + "tree-sitter-css", "tree-sitter-elixir", "tree-sitter-go", "tree-sitter-html", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index f84c8836bd..27c8477355 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -92,6 +92,7 @@ toml = "0.5" tree-sitter = "0.20" tree-sitter-c = "0.20.1" tree-sitter-cpp = "0.20.0" +tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" } tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "05e3631c6a0701c1fa518b0fee7be95a2ceef5e2" } tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" } tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "137e1ce6a02698fc246cdb9c6b886ed1de9a1ed8" } diff --git a/crates/zed/src/languages.rs b/crates/zed/src/languages.rs index ba1945f316..71a85af0c8 100644 --- a/crates/zed/src/languages.rs +++ b/crates/zed/src/languages.rs @@ -47,6 +47,11 @@ pub async fn init(languages: Arc, _executor: Arc) tree_sitter_cpp::language(), Some(CachedLspAdapter::new(c::CLspAdapter).await), ), + ( + "css", + tree_sitter_css::language(), + None, // + ), ( "elixir", tree_sitter_elixir::language(), diff --git a/crates/zed/src/languages/css/brackets.scm b/crates/zed/src/languages/css/brackets.scm new file mode 100644 index 0000000000..191fd9c084 --- /dev/null +++ b/crates/zed/src/languages/css/brackets.scm @@ -0,0 +1,3 @@ +("(" @open ")" @close) +("[" @open "]" @close) +("{" @open "}" @close) diff --git a/crates/zed/src/languages/css/config.toml b/crates/zed/src/languages/css/config.toml new file mode 100644 index 0000000000..28def3abd5 --- /dev/null +++ b/crates/zed/src/languages/css/config.toml @@ -0,0 +1,9 @@ +name = "CSS" +path_suffixes = ["css"] +autoclose_before = ";:.,=}])>" +brackets = [ + { start = "{", end = "}", close = true, newline = true }, + { start = "[", end = "]", close = true, newline = true }, + { start = "(", end = ")", close = true, newline = true }, + { start = "\"", end = "\"", close = true, newline = false } +] diff --git a/crates/zed/src/languages/css/highlights.scm b/crates/zed/src/languages/css/highlights.scm new file mode 100644 index 0000000000..3638837af7 --- /dev/null +++ b/crates/zed/src/languages/css/highlights.scm @@ -0,0 +1,76 @@ +(comment) @comment + +[ + (tag_name) + (nesting_selector) + (universal_selector) +] @tag + +[ + "~" + ">" + "+" + "-" + "*" + "/" + "=" + "^=" + "|=" + "~=" + "$=" + "*=" + "and" + "or" + "not" + "only" +] @operator + +(attribute_selector (plain_value) @string) + +(attribute_name) @attribute +(pseudo_element_selector (tag_name) @attribute) +(pseudo_class_selector (class_name) @attribute) + +[ + (class_name) + (id_name) + (namespace_name) + (property_name) + (feature_name) +] @property + +(function_name) @function + +((property_name) @variable + (#match? @variable "^--")) +((plain_value) @variable + (#match? @variable "^--")) + +[ + "@media" + "@import" + "@charset" + "@namespace" + "@supports" + "@keyframes" + (at_keyword) + (to) + (from) + (important) +] @keyword + +(string_value) @string +(color_value) @string.special + +[ + (integer_value) + (float_value) +] @number + +(unit) @type + +[ + "#" + "," + ":" +] @punctuation.delimiter diff --git a/crates/zed/src/languages/css/indents.scm b/crates/zed/src/languages/css/indents.scm new file mode 100644 index 0000000000..e975469092 --- /dev/null +++ b/crates/zed/src/languages/css/indents.scm @@ -0,0 +1 @@ +(_ "{" "}" @end) @indent diff --git a/crates/zed/src/languages/html/injections.scm b/crates/zed/src/languages/html/injections.scm index 60688f599f..9084e373f2 100644 --- a/crates/zed/src/languages/html/injections.scm +++ b/crates/zed/src/languages/html/injections.scm @@ -1,3 +1,7 @@ (script_element - (raw_text) @content - (#set! "language" "javascript")) + (raw_text) @content + (#set! "language" "javascript")) + +(style_element + (raw_text) @content + (#set! "language" "css")) From 67e188a015cc95dc4c84440bd9a63cb4903789be Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 31 Aug 2022 16:50:44 -0700 Subject: [PATCH 027/140] Add Buffer::language_at, update MultiBuffer to use it Co-authored-by: Julia Risley --- Cargo.lock | 12 ++++ crates/editor/Cargo.toml | 4 ++ crates/editor/src/editor.rs | 94 ++++++++++++++++++++++++-- crates/editor/src/multi_buffer.rs | 27 +++++++- crates/language/src/buffer.rs | 21 +++++- crates/language/src/language.rs | 29 ++++---- crates/language/src/syntax_map.rs | 91 ++++++++++++++++--------- crates/language/src/tests.rs | 2 +- crates/zed/src/languages.rs | 6 +- crates/zed/src/languages/c.rs | 7 +- crates/zed/src/languages/elixir.rs | 4 +- crates/zed/src/languages/go.rs | 4 +- crates/zed/src/languages/python.rs | 7 +- crates/zed/src/languages/rust.rs | 6 +- crates/zed/src/languages/typescript.rs | 8 +-- crates/zed/src/zed.rs | 4 +- 16 files changed, 245 insertions(+), 81 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3d74213062..79eae80258 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1719,6 +1719,8 @@ dependencies = [ "text", "theme", "tree-sitter", + "tree-sitter-html", + "tree-sitter-javascript", "tree-sitter-rust", "unindent", "util", @@ -6062,6 +6064,16 @@ dependencies = [ "tree-sitter", ] +[[package]] +name = "tree-sitter-javascript" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2490fab08630b2c8943c320f7b63473cbf65511c8d83aec551beb9b4375906ed" +dependencies = [ + "cc", + "tree-sitter", +] + [[package]] name = "tree-sitter-json" version = "0.19.0" diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index dfd4938742..cfe244d4fa 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -51,6 +51,8 @@ serde = { version = "1.0", features = ["derive", "rc"] } smallvec = { version = "1.6", features = ["union"] } smol = "1.2" tree-sitter-rust = { version = "*", optional = true } +tree-sitter-html = { version = "*", optional = true } +tree-sitter-javascript = { version = "*", optional = true } [dev-dependencies] text = { path = "../text", features = ["test-support"] } @@ -67,3 +69,5 @@ rand = "0.8" unindent = "0.1.7" tree-sitter = "0.20" tree-sitter-rust = "0.20" +tree-sitter-html = "0.19" +tree-sitter-javascript = "0.20" diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index c6cfd887db..b313d2de55 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1116,7 +1116,7 @@ impl Editor { &self, point: T, cx: &'a AppContext, - ) -> Option<&'a Arc> { + ) -> Option> { self.buffer.read(cx).language_at(point, cx) } @@ -4501,9 +4501,9 @@ impl Editor { // as that portion won't be used for detecting if a line is a comment. let full_comment_prefix: Arc = if let Some(prefix) = buffer .language_at(selection.start, cx) - .and_then(|l| l.line_comment_prefix()) + .and_then(|l| l.line_comment_prefix().map(|p| p.into())) { - prefix.into() + prefix } else { return; }; @@ -6713,7 +6713,7 @@ mod tests { platform::{WindowBounds, WindowOptions}, }; use indoc::indoc; - use language::{FakeLspAdapter, LanguageConfig}; + use language::{FakeLspAdapter, LanguageConfig, LanguageRegistry}; use project::FakeFs; use settings::EditorSettings; use std::{cell::RefCell, rc::Rc, time::Instant}; @@ -9792,6 +9792,92 @@ mod tests { }); } + #[gpui::test] + async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx).await; + + let html_language = Arc::new( + Language::new( + LanguageConfig { + name: "HTML".into(), + brackets: vec![BracketPair { + start: "<".to_string(), + end: ">".to_string(), + close: true, + newline: true, + }], + autoclose_before: "})]".to_string(), + ..Default::default() + }, + Some(tree_sitter_html::language()), + ) + .with_injection_query( + r#" + (script_element + (raw_text) @content + (#set! "language" "javascript")) + "#, + ) + .unwrap(), + ); + + let javascript_language = Arc::new(Language::new( + LanguageConfig { + name: "JavaScript".into(), + brackets: vec![BracketPair { + start: "/*".to_string(), + end: "*/".to_string(), + close: true, + newline: true, + }], + autoclose_before: "})]".to_string(), + ..Default::default() + }, + Some(tree_sitter_javascript::language()), + )); + + let registry = Arc::new(LanguageRegistry::test()); + registry.add(html_language.clone()); + registry.add(javascript_language.clone()); + + cx.update_buffer(|buffer, cx| { + buffer.set_language_registry(registry); + buffer.set_language(Some(html_language), cx); + }); + + cx.set_state( + &r#" + ˇ + + + "# + .unindent(), + ); + + let cursors = cx.update_editor(|editor, cx| editor.selections.ranges::(cx)); + cx.update_buffer(|buffer, _| { + let snapshot = buffer.snapshot(); + assert_eq!( + snapshot + .language_at(cursors[0].start) + .unwrap() + .name() + .as_ref(), + "HTML" + ); + assert_eq!( + snapshot + .language_at(cursors[1].start) + .unwrap() + .name() + .as_ref(), + "JavaScript" + ); + }); + } + #[gpui::test] async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) { cx.update(|cx| cx.set_global(Settings::test(cx))); diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 4ee9526a67..3b43f99ca0 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1212,9 +1212,9 @@ impl MultiBuffer { &self, point: T, cx: &'a AppContext, - ) -> Option<&'a Arc> { + ) -> Option> { self.point_to_buffer_offset(point, cx) - .and_then(|(buffer, _)| buffer.read(cx).language()) + .and_then(|(buffer, offset)| buffer.read(cx).language_at(offset)) } pub fn files<'a>(&'a self, cx: &'a AppContext) -> SmallVec<[&'a dyn File; 2]> { @@ -1940,6 +1940,24 @@ impl MultiBufferSnapshot { } } + pub fn point_to_buffer_offset( + &self, + point: T, + ) -> Option<(&BufferSnapshot, usize)> { + let offset = point.to_offset(&self); + let mut cursor = self.excerpts.cursor::(); + cursor.seek(&offset, Bias::Right, &()); + if cursor.item().is_none() { + cursor.prev(&()); + } + + cursor.item().map(|excerpt| { + let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer); + let buffer_point = excerpt_start + offset - *cursor.start(); + (&excerpt.buffer, buffer_point) + }) + } + pub fn suggested_indents( &self, rows: impl IntoIterator, @@ -2490,6 +2508,11 @@ impl MultiBufferSnapshot { .and_then(|excerpt| excerpt.buffer.language()) } + pub fn language_at<'a, T: ToOffset>(&'a self, point: T) -> Option<&'a Arc> { + self.point_to_buffer_offset(point) + .and_then(|(buffer, offset)| buffer.language_at(offset)) + } + pub fn is_dirty(&self) -> bool { self.is_dirty } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index c8730be452..372f77cf20 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -641,6 +641,15 @@ impl Buffer { self.language.as_ref() } + pub fn language_at(&self, position: D) -> Option> { + let offset = position.to_offset(self); + self.syntax_map + .lock() + .layers_for_range(offset..offset, &self.text) + .last() + .map(|info| info.language.clone()) + } + pub fn parse_count(&self) -> usize { self.parse_count } @@ -1826,6 +1835,14 @@ impl BufferSnapshot { self.language.as_ref() } + pub fn language_at(&self, position: D) -> Option<&Arc> { + let offset = position.to_offset(self); + self.syntax + .layers_for_range(offset..offset, &self.text) + .last() + .map(|info| info.language) + } + pub fn surrounding_word(&self, start: T) -> (Range, Option) { let mut start = start.to_offset(self); let mut end = start; @@ -1858,8 +1875,8 @@ impl BufferSnapshot { pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { let range = range.start.to_offset(self)..range.end.to_offset(self); let mut result: Option> = None; - 'outer: for (_, _, node) in self.syntax.layers_for_range(range.clone(), &self.text) { - let mut cursor = node.walk(); + 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) { + let mut cursor = layer.node.walk(); // Descend to the first leaf that touches the start of the range, // and if the range is non-empty, extends beyond the start. diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 0366bdf669..341f70bff9 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -135,7 +135,7 @@ impl CachedLspAdapter { pub async fn label_for_completion( &self, completion_item: &lsp::CompletionItem, - language: &Language, + language: &Arc, ) -> Option { self.adapter .label_for_completion(completion_item, language) @@ -146,7 +146,7 @@ impl CachedLspAdapter { &self, name: &str, kind: lsp::SymbolKind, - language: &Language, + language: &Arc, ) -> Option { self.adapter.label_for_symbol(name, kind, language).await } @@ -175,7 +175,7 @@ pub trait LspAdapter: 'static + Send + Sync { async fn label_for_completion( &self, _: &lsp::CompletionItem, - _: &Language, + _: &Arc, ) -> Option { None } @@ -184,7 +184,7 @@ pub trait LspAdapter: 'static + Send + Sync { &self, _: &str, _: lsp::SymbolKind, - _: &Language, + _: &Arc, ) -> Option { None } @@ -793,7 +793,7 @@ impl Language { } pub async fn label_for_completion( - &self, + self: &Arc, completion: &lsp::CompletionItem, ) -> Option { self.adapter @@ -802,7 +802,11 @@ impl Language { .await } - pub async fn label_for_symbol(&self, name: &str, kind: lsp::SymbolKind) -> Option { + pub async fn label_for_symbol( + self: &Arc, + name: &str, + kind: lsp::SymbolKind, + ) -> Option { self.adapter .as_ref()? .label_for_symbol(name, kind, self) @@ -810,20 +814,17 @@ impl Language { } pub fn highlight_text<'a>( - &'a self, + self: &'a Arc, text: &'a Rope, range: Range, ) -> Vec<(Range, HighlightId)> { let mut result = Vec::new(); if let Some(grammar) = &self.grammar { let tree = grammar.parse_text(text, None); - let captures = SyntaxSnapshot::single_tree_captures( - range.clone(), - text, - &tree, - grammar, - |grammar| grammar.highlights_query.as_ref(), - ); + let captures = + SyntaxSnapshot::single_tree_captures(range.clone(), text, &tree, self, |grammar| { + grammar.highlights_query.as_ref() + }); let highlight_maps = vec![grammar.highlight_map()]; let mut offset = 0; for chunk in BufferChunks::new(text, range, Some((captures, highlight_maps)), vec![]) { diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index a8cac76ac7..a7d9101d7b 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -92,6 +92,12 @@ struct SyntaxLayer { language: Arc, } +pub struct SyntaxLayerInfo<'a> { + pub depth: usize, + pub node: Node<'a>, + pub language: &'a Arc, +} + #[derive(Debug, Clone)] struct SyntaxLayerSummary { min_depth: usize, @@ -473,13 +479,18 @@ impl SyntaxSnapshot { range: Range, text: &'a Rope, tree: &'a Tree, - grammar: &'a Grammar, + language: &'a Arc, query: fn(&Grammar) -> Option<&Query>, ) -> SyntaxMapCaptures<'a> { SyntaxMapCaptures::new( range.clone(), text, - [(grammar, 0, tree.root_node())].into_iter(), + [SyntaxLayerInfo { + language, + depth: 0, + node: tree.root_node(), + }] + .into_iter(), query, ) } @@ -513,7 +524,7 @@ impl SyntaxSnapshot { } #[cfg(test)] - pub fn layers(&self, buffer: &BufferSnapshot) -> Vec<(&Grammar, usize, Node)> { + pub fn layers(&self, buffer: &BufferSnapshot) -> Vec { self.layers_for_range(0..buffer.len(), buffer) } @@ -521,7 +532,7 @@ impl SyntaxSnapshot { &self, range: Range, buffer: &BufferSnapshot, - ) -> Vec<(&Grammar, usize, Node)> { + ) -> Vec { let start = buffer.anchor_before(range.start.to_offset(buffer)); let end = buffer.anchor_after(range.end.to_offset(buffer)); @@ -538,16 +549,14 @@ impl SyntaxSnapshot { let mut result = Vec::new(); cursor.next(buffer); while let Some(layer) = cursor.item() { - if let Some(grammar) = &layer.language.grammar { - result.push(( - grammar.as_ref(), - layer.depth, - layer.tree.root_node_with_offset( - layer.range.start.to_offset(buffer), - layer.range.start.to_point(buffer).to_ts_point(), - ), - )); - } + result.push(SyntaxLayerInfo { + language: &layer.language, + depth: layer.depth, + node: layer.tree.root_node_with_offset( + layer.range.start.to_offset(buffer), + layer.range.start.to_point(buffer).to_ts_point(), + ), + }); cursor.next(buffer) } @@ -559,7 +568,7 @@ impl<'a> SyntaxMapCaptures<'a> { fn new( range: Range, text: &'a Rope, - layers: impl Iterator)>, + layers: impl Iterator>, query: fn(&Grammar) -> Option<&Query>, ) -> Self { let mut result = Self { @@ -567,11 +576,19 @@ impl<'a> SyntaxMapCaptures<'a> { grammars: Vec::new(), active_layer_count: 0, }; - for (grammar, depth, node) in layers { - let query = if let Some(query) = query(grammar) { - query - } else { - continue; + for SyntaxLayerInfo { + language, + depth, + node, + } in layers + { + let grammar = match &language.grammar { + Some(grammer) => grammer, + None => continue, + }; + let query = match query(&grammar) { + Some(query) => query, + None => continue, }; let mut query_cursor = QueryCursorHandle::new(); @@ -678,15 +695,23 @@ impl<'a> SyntaxMapMatches<'a> { fn new( range: Range, text: &'a Rope, - layers: impl Iterator)>, + layers: impl Iterator>, query: fn(&Grammar) -> Option<&Query>, ) -> Self { let mut result = Self::default(); - for (grammar, depth, node) in layers { - let query = if let Some(query) = query(grammar) { - query - } else { - continue; + for SyntaxLayerInfo { + language, + depth, + node, + } in layers + { + let grammar = match &language.grammar { + Some(grammer) => grammer, + None => continue, + }; + let query = match query(&grammar) { + Some(query) => query, + None => continue, }; let mut query_cursor = QueryCursorHandle::new(); @@ -1624,8 +1649,8 @@ mod tests { let reference_layers = reference_syntax_map.layers(&buffer); for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers.into_iter()) { - assert_eq!(edited_layer.2.to_sexp(), reference_layer.2.to_sexp()); - assert_eq!(edited_layer.2.range(), reference_layer.2.range()); + assert_eq!(edited_layer.node.to_sexp(), reference_layer.node.to_sexp()); + assert_eq!(edited_layer.node.range(), reference_layer.node.range()); } } @@ -1770,13 +1795,13 @@ mod tests { mutated_layers.into_iter().zip(reference_layers.into_iter()) { assert_eq!( - edited_layer.2.to_sexp(), - reference_layer.2.to_sexp(), + edited_layer.node.to_sexp(), + reference_layer.node.to_sexp(), "different layer at step {i}" ); assert_eq!( - edited_layer.2.range(), - reference_layer.2.range(), + edited_layer.node.range(), + reference_layer.node.range(), "different layer at step {i}" ); } @@ -1828,7 +1853,7 @@ mod tests { expected_layers.len(), "wrong number of layers" ); - for (i, ((_, _, node), expected_s_exp)) in + for (i, (SyntaxLayerInfo { node, .. }, expected_s_exp)) in layers.iter().zip(expected_layers.iter()).enumerate() { let actual_s_exp = node.to_sexp(); diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 821bbc9968..8f56f3287e 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -1449,7 +1449,7 @@ fn get_tree_sexp(buffer: &ModelHandle, cx: &gpui::TestAppContext) -> Str buffer.read_with(cx, |buffer, _| { let snapshot = buffer.snapshot(); let layers = snapshot.syntax.layers(buffer.as_text_snapshot()); - layers[0].2.to_sexp() + layers[0].node.to_sexp() }) } diff --git a/crates/zed/src/languages.rs b/crates/zed/src/languages.rs index 71a85af0c8..2745fa824a 100644 --- a/crates/zed/src/languages.rs +++ b/crates/zed/src/languages.rs @@ -108,7 +108,7 @@ pub async fn init(languages: Arc, _executor: Arc) Some(CachedLspAdapter::new(html::HtmlLspAdapter).await), ), ] { - languages.add(Arc::new(language(name, grammar, lsp_adapter))); + languages.add(language(name, grammar, lsp_adapter)); } } @@ -116,7 +116,7 @@ pub(crate) fn language( name: &str, grammar: tree_sitter::Language, lsp_adapter: Option>, -) -> Language { +) -> Arc { let config = toml::from_slice( &LanguageDir::get(&format!("{}/config.toml", name)) .unwrap() @@ -153,7 +153,7 @@ pub(crate) fn language( if let Some(lsp_adapter) = lsp_adapter { language = language.with_lsp_adapter(lsp_adapter) } - language + Arc::new(language) } fn load_query(name: &str, filename_prefix: &str) -> Option> { diff --git a/crates/zed/src/languages/c.rs b/crates/zed/src/languages/c.rs index 6aa750f6a0..712e87101b 100644 --- a/crates/zed/src/languages/c.rs +++ b/crates/zed/src/languages/c.rs @@ -112,7 +112,7 @@ impl super::LspAdapter for CLspAdapter { async fn label_for_completion( &self, completion: &lsp::CompletionItem, - language: &Language, + language: &Arc, ) -> Option { let label = completion .label @@ -190,7 +190,7 @@ impl super::LspAdapter for CLspAdapter { &self, name: &str, kind: lsp::SymbolKind, - language: &Language, + language: &Arc, ) -> Option { let (text, filter_range, display_range) = match kind { lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { @@ -251,7 +251,6 @@ mod tests { use gpui::MutableAppContext; use language::{AutoindentMode, Buffer}; use settings::Settings; - use std::sync::Arc; #[gpui::test] fn test_c_autoindent(cx: &mut MutableAppContext) { @@ -262,7 +261,7 @@ mod tests { let language = crate::languages::language("c", tree_sitter_c::language(), None); cx.add_model(|cx| { - let mut buffer = Buffer::new(0, "", cx).with_language(Arc::new(language), cx); + let mut buffer = Buffer::new(0, "", cx).with_language(language, cx); // empty function buffer.edit([(0..0, "int main() {}")], None, cx); diff --git a/crates/zed/src/languages/elixir.rs b/crates/zed/src/languages/elixir.rs index 4959338522..75b35bb630 100644 --- a/crates/zed/src/languages/elixir.rs +++ b/crates/zed/src/languages/elixir.rs @@ -113,7 +113,7 @@ impl LspAdapter for ElixirLspAdapter { async fn label_for_completion( &self, completion: &lsp::CompletionItem, - language: &Language, + language: &Arc, ) -> Option { match completion.kind.zip(completion.detail.as_ref()) { Some((_, detail)) if detail.starts_with("(function)") => { @@ -168,7 +168,7 @@ impl LspAdapter for ElixirLspAdapter { &self, name: &str, kind: SymbolKind, - language: &Language, + language: &Arc, ) -> Option { let (text, filter_range, display_range) = match kind { SymbolKind::METHOD | SymbolKind::FUNCTION => { diff --git a/crates/zed/src/languages/go.rs b/crates/zed/src/languages/go.rs index 729d39b513..19692fdf44 100644 --- a/crates/zed/src/languages/go.rs +++ b/crates/zed/src/languages/go.rs @@ -134,7 +134,7 @@ impl super::LspAdapter for GoLspAdapter { async fn label_for_completion( &self, completion: &lsp::CompletionItem, - language: &Language, + language: &Arc, ) -> Option { let label = &completion.label; @@ -235,7 +235,7 @@ impl super::LspAdapter for GoLspAdapter { &self, name: &str, kind: lsp::SymbolKind, - language: &Language, + language: &Arc, ) -> Option { let (text, filter_range, display_range) = match kind { lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { diff --git a/crates/zed/src/languages/python.rs b/crates/zed/src/languages/python.rs index 274fc3216c..e6e55eeac4 100644 --- a/crates/zed/src/languages/python.rs +++ b/crates/zed/src/languages/python.rs @@ -90,7 +90,7 @@ impl LspAdapter for PythonLspAdapter { async fn label_for_completion( &self, item: &lsp::CompletionItem, - language: &language::Language, + language: &Arc, ) -> Option { let label = &item.label; let grammar = language.grammar()?; @@ -112,7 +112,7 @@ impl LspAdapter for PythonLspAdapter { &self, name: &str, kind: lsp::SymbolKind, - language: &language::Language, + language: &Arc, ) -> Option { let (text, filter_range, display_range) = match kind { lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { @@ -149,7 +149,6 @@ mod tests { use gpui::{ModelContext, MutableAppContext}; use language::{AutoindentMode, Buffer}; use settings::Settings; - use std::sync::Arc; #[gpui::test] fn test_python_autoindent(cx: &mut MutableAppContext) { @@ -160,7 +159,7 @@ mod tests { cx.set_global(settings); cx.add_model(|cx| { - let mut buffer = Buffer::new(0, "", cx).with_language(Arc::new(language), cx); + let mut buffer = Buffer::new(0, "", cx).with_language(language, cx); let append = |buffer: &mut Buffer, text: &str, cx: &mut ModelContext| { let ix = buffer.len(); buffer.edit([(ix..ix, text)], Some(AutoindentMode::EachLine), cx); diff --git a/crates/zed/src/languages/rust.rs b/crates/zed/src/languages/rust.rs index adbe431279..f5776f3420 100644 --- a/crates/zed/src/languages/rust.rs +++ b/crates/zed/src/languages/rust.rs @@ -119,7 +119,7 @@ impl LspAdapter for RustLspAdapter { async fn label_for_completion( &self, completion: &lsp::CompletionItem, - language: &Language, + language: &Arc, ) -> Option { match completion.kind { Some(lsp::CompletionItemKind::FIELD) if completion.detail.is_some() => { @@ -196,7 +196,7 @@ impl LspAdapter for RustLspAdapter { &self, name: &str, kind: lsp::SymbolKind, - language: &Language, + language: &Arc, ) -> Option { let (text, filter_range, display_range) = match kind { lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { @@ -439,7 +439,7 @@ mod tests { cx.set_global(settings); cx.add_model(|cx| { - let mut buffer = Buffer::new(0, "", cx).with_language(Arc::new(language), cx); + let mut buffer = Buffer::new(0, "", cx).with_language(language, cx); // indent between braces buffer.set_text("fn a() {}", cx); diff --git a/crates/zed/src/languages/typescript.rs b/crates/zed/src/languages/typescript.rs index 85a1bd6400..95f56bce5b 100644 --- a/crates/zed/src/languages/typescript.rs +++ b/crates/zed/src/languages/typescript.rs @@ -115,7 +115,7 @@ impl LspAdapter for TypeScriptLspAdapter { async fn label_for_completion( &self, item: &lsp::CompletionItem, - language: &language::Language, + language: &Arc, ) -> Option { use lsp::CompletionItemKind as Kind; let len = item.label.len(); @@ -144,7 +144,6 @@ impl LspAdapter for TypeScriptLspAdapter { #[cfg(test)] mod tests { - use std::sync::Arc; use gpui::MutableAppContext; use unindent::Unindent; @@ -172,9 +171,8 @@ mod tests { "# .unindent(); - let buffer = cx.add_model(|cx| { - language::Buffer::new(0, text, cx).with_language(Arc::new(language), cx) - }); + let buffer = + cx.add_model(|cx| language::Buffer::new(0, text, cx).with_language(language, cx)); let outline = buffer.read(cx).snapshot().outline(None).unwrap(); assert_eq!( outline diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 76bc62e4cb..f86022e39c 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -1133,7 +1133,7 @@ mod tests { assert!(!editor.is_dirty(cx)); assert_eq!(editor.title(cx), "untitled"); assert!(Arc::ptr_eq( - editor.language_at(0, cx).unwrap(), + &editor.language_at(0, cx).unwrap(), &languages::PLAIN_TEXT )); editor.handle_input("hi", cx); @@ -1220,7 +1220,7 @@ mod tests { editor.update(cx, |editor, cx| { assert!(Arc::ptr_eq( - editor.language_at(0, cx).unwrap(), + &editor.language_at(0, cx).unwrap(), &languages::PLAIN_TEXT )); editor.handle_input("hi", cx); From 2b0794f5ae76d1f1fdd6a8d16c3a0db17ddc9cb6 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 9 Sep 2022 17:40:34 -0700 Subject: [PATCH 028/140] Restructure autoclosing to account for multi-language documents --- crates/editor/src/editor.rs | 572 +++++++++++----------- crates/zed/src/languages/html/config.toml | 2 +- 2 files changed, 281 insertions(+), 293 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b313d2de55..055b73d7dc 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -410,7 +410,7 @@ pub struct Editor { add_selections_state: Option, select_next_state: Option, selection_history: SelectionHistory, - autoclose_stack: InvalidationStack, + autoclose_regions: Vec, snippet_stack: InvalidationStack, select_larger_syntax_node_stack: Vec]>>, ime_transaction: Option, @@ -569,8 +569,9 @@ struct SelectNextState { done: bool, } -struct BracketPairState { - ranges: Vec>, +struct AutocloseRegion { + selection_id: usize, + range: Range, pair: BracketPair, } @@ -1010,7 +1011,7 @@ impl Editor { add_selections_state: None, select_next_state: None, selection_history: Default::default(), - autoclose_stack: Default::default(), + autoclose_regions: Default::default(), snippet_stack: Default::default(), select_larger_syntax_node_stack: Vec::new(), ime_transaction: Default::default(), @@ -1401,8 +1402,7 @@ impl Editor { self.add_selections_state = None; self.select_next_state = None; self.select_larger_syntax_node_stack.clear(); - self.autoclose_stack - .invalidate(&self.selections.disjoint_anchors(), buffer); + self.invalidate_autoclose_regions(&self.selections.disjoint_anchors(), buffer); self.snippet_stack .invalidate(&self.selections.disjoint_anchors(), buffer); self.take_rename(false, cx); @@ -1849,15 +1849,158 @@ impl Editor { return; } - if !self.skip_autoclose_end(text, cx) { - self.transact(cx, |this, cx| { - if !this.surround_with_bracket_pair(text, cx) { - this.insert(text, cx); - this.autoclose_bracket_pairs(cx); + let text: Arc = text.into(); + let selections = self.selections.all_adjusted(cx); + let mut edits = Vec::new(); + let mut new_selections = Vec::with_capacity(selections.len()); + let mut new_autoclose_regions = Vec::new(); + let snapshot = self.buffer.read(cx).read(cx); + + for (selection, autoclose_region) in + self.selections_with_autoclose_regions(selections, &snapshot) + { + if let Some(language) = snapshot.language_at(selection.head()) { + // Determine if the inserted text matches the opening or closing + // bracket of any of this language's bracket pairs. + let mut bracket_pair = None; + let mut is_bracket_pair_start = false; + for pair in language.brackets() { + if pair.start.ends_with(text.as_ref()) { + bracket_pair = Some(pair.clone()); + is_bracket_pair_start = true; + break; + } else if pair.end.as_str() == text.as_ref() { + bracket_pair = Some(pair.clone()); + break; + } } - }); - self.trigger_completion_on_input(text, cx); + + if let Some(bracket_pair) = bracket_pair { + if selection.is_empty() { + if is_bracket_pair_start { + let prefix_len = bracket_pair.start.len() - text.len(); + + // If the inserted text is a suffix of an opening bracket and the + // selection is preceded by the rest of the opening bracket, then + // insert the closing bracket. + let should_autoclose = selection.start.column > (prefix_len as u32) + && snapshot.contains_str_at( + Point::new( + selection.start.row, + selection.start.column - (prefix_len as u32), + ), + &bracket_pair.start[..prefix_len], + ) + && snapshot + .chars_at(selection.start) + .next() + .map_or(true, |c| language.should_autoclose_before(c)); + if should_autoclose { + let anchor = snapshot.anchor_before(selection.end); + new_selections + .push((selection.map(|_| anchor.clone()), text.len())); + new_autoclose_regions.push(( + anchor.clone(), + text.len(), + selection.id, + bracket_pair.clone(), + )); + edits.push(( + selection.range(), + format!("{}{}", text, bracket_pair.end).into(), + )); + continue; + } + } else if let Some(region) = autoclose_region { + // If the selection is followed by an auto-inserted closing bracket, + // then don't insert anything else; just move the selection past the + // closing bracket. + let should_skip = selection.end == region.range.end.to_point(&snapshot); + if should_skip { + let anchor = snapshot.anchor_after(selection.end); + new_selections.push(( + selection.map(|_| anchor.clone()), + region.pair.end.len(), + )); + continue; + } + } + } + // If an opening bracket is typed while text is selected, then + // surround that text with the bracket pair. + else if is_bracket_pair_start { + edits.push((selection.start..selection.start, text.clone())); + edits.push(( + selection.end..selection.end, + bracket_pair.end.as_str().into(), + )); + new_selections.push(( + Selection { + id: selection.id, + start: snapshot.anchor_after(selection.start), + end: snapshot.anchor_before(selection.end), + reversed: selection.reversed, + goal: selection.goal, + }, + 0, + )); + continue; + } + } + } + + // If not handling any auto-close operation, then just replace the selected + // text with the given input and move the selection to the end of the + // newly inserted text. + let anchor = snapshot.anchor_after(selection.end); + new_selections.push((selection.map(|_| anchor.clone()), 0)); + edits.push((selection.start..selection.end, text.clone())); } + + drop(snapshot); + self.transact(cx, |this, cx| { + this.buffer.update(cx, |buffer, cx| { + buffer.edit(edits, Some(AutoindentMode::EachLine), cx); + }); + + let new_anchor_selections = new_selections.iter().map(|e| &e.0); + let new_selection_deltas = new_selections.iter().map(|e| e.1); + let snapshot = this.buffer.read(cx).read(cx); + let new_selections = resolve_multiple::(new_anchor_selections, &snapshot) + .zip(new_selection_deltas) + .map(|(selection, delta)| selection.map(|e| e + delta)) + .collect::>(); + + let mut i = 0; + for (position, delta, selection_id, pair) in new_autoclose_regions { + let position = position.to_offset(&snapshot) + delta; + let start = snapshot.anchor_before(position); + let end = snapshot.anchor_after(position); + while let Some(existing_state) = this.autoclose_regions.get(i) { + match existing_state.range.start.cmp(&start, &snapshot) { + Ordering::Less => i += 1, + Ordering::Greater => break, + Ordering::Equal => match end.cmp(&existing_state.range.end, &snapshot) { + Ordering::Less => i += 1, + Ordering::Equal => break, + Ordering::Greater => break, + }, + } + } + this.autoclose_regions.insert( + i, + AutocloseRegion { + selection_id, + range: start..end, + pair, + }, + ); + } + + drop(snapshot); + this.change_selections(None, cx, |s| s.select(new_selections)); + this.trigger_completion_on_input(&text, cx); + }); } pub fn newline(&mut self, _: &Newline, cx: &mut ViewContext) { @@ -2029,232 +2172,89 @@ impl Editor { } } - fn surround_with_bracket_pair(&mut self, text: &str, cx: &mut ViewContext) -> bool { - let snapshot = self.buffer.read(cx).snapshot(cx); - if let Some(pair) = snapshot - .language() - .and_then(|language| language.brackets().iter().find(|b| b.start == text)) - .cloned() - { - if self - .selections - .all::(cx) - .iter() - .any(|selection| selection.is_empty()) - { - return false; - } - - let mut selections = self.selections.disjoint_anchors().to_vec(); - for selection in &mut selections { - selection.end = selection.end.bias_left(&snapshot); - } - drop(snapshot); - - self.buffer.update(cx, |buffer, cx| { - let pair_start: Arc = pair.start.clone().into(); - let pair_end: Arc = pair.end.clone().into(); - buffer.edit( - selections.iter().flat_map(|s| { - [ - (s.start.clone()..s.start.clone(), pair_start.clone()), - (s.end.clone()..s.end.clone(), pair_end.clone()), - ] - }), - None, - cx, - ); - }); - - let snapshot = self.buffer.read(cx).read(cx); - for selection in &mut selections { - selection.end = selection.end.bias_right(&snapshot); - } - drop(snapshot); - - self.change_selections(None, cx, |s| s.select_anchors(selections)); - true - } else { - false - } - } - - fn autoclose_bracket_pairs(&mut self, cx: &mut ViewContext) { + /// If any empty selections is touching the start of its innermost containing autoclose + /// region, expand it to select the brackets. + fn select_autoclose_pair(&mut self, cx: &mut ViewContext) { let selections = self.selections.all::(cx); - let mut bracket_pair_state = None; - let mut new_selections = None; - self.buffer.update(cx, |buffer, cx| { - let mut snapshot = buffer.snapshot(cx); - let left_biased_selections = selections - .iter() - .map(|selection| selection.map(|p| snapshot.anchor_before(p))) - .collect::>(); - - let autoclose_pair = snapshot.language().and_then(|language| { - let first_selection_start = selections.first().unwrap().start; - let pair = language.brackets().iter().find(|pair| { - pair.close - && snapshot.contains_str_at( - first_selection_start.saturating_sub(pair.start.len()), - &pair.start, - ) - }); - pair.and_then(|pair| { - let should_autoclose = selections.iter().all(|selection| { - // Ensure all selections are parked at the end of a pair start. - if snapshot.contains_str_at( - selection.start.saturating_sub(pair.start.len()), - &pair.start, - ) { - snapshot - .chars_at(selection.start) - .next() - .map_or(true, |c| language.should_autoclose_before(c)) - } else { - false + let buffer = self.buffer.read(cx).read(cx); + let mut new_selections = Vec::new(); + for (mut selection, region) in self.selections_with_autoclose_regions(selections, &buffer) { + if let (Some(region), true) = (region, selection.is_empty()) { + let mut range = region.range.to_offset(&buffer); + if selection.start == range.start { + if range.start >= region.pair.start.len() { + range.start -= region.pair.start.len(); + if buffer.contains_str_at(range.start, ®ion.pair.start) { + if buffer.contains_str_at(range.end, ®ion.pair.end) { + range.end += region.pair.end.len(); + selection.start = range.start; + selection.end = range.end; + } } - }); - - if should_autoclose { - Some(pair.clone()) - } else { - None } - }) - }); - - if let Some(pair) = autoclose_pair { - let selection_ranges = selections - .iter() - .map(|selection| { - let start = selection.start.to_offset(&snapshot); - start..start - }) - .collect::>(); - - let pair_end: Arc = pair.end.clone().into(); - buffer.edit( - selection_ranges - .iter() - .map(|range| (range.clone(), pair_end.clone())), - None, - cx, - ); - snapshot = buffer.snapshot(cx); - - new_selections = Some( - resolve_multiple::(left_biased_selections.iter(), &snapshot) - .collect::>(), - ); - - if pair.end.len() == 1 { - let mut delta = 0; - bracket_pair_state = Some(BracketPairState { - ranges: selections - .iter() - .map(move |selection| { - let offset = selection.start + delta; - delta += 1; - snapshot.anchor_before(offset)..snapshot.anchor_after(offset) - }) - .collect(), - pair, - }); } } - }); + new_selections.push(selection); + } - if let Some(new_selections) = new_selections { - self.change_selections(None, cx, |s| { - s.select(new_selections); - }); - } - if let Some(bracket_pair_state) = bracket_pair_state { - self.autoclose_stack.push(bracket_pair_state); - } + drop(buffer); + self.change_selections(None, cx, |selections| selections.select(new_selections)); } - fn skip_autoclose_end(&mut self, text: &str, cx: &mut ViewContext) -> bool { - let buffer = self.buffer.read(cx).snapshot(cx); - let old_selections = self.selections.all::(cx); - let autoclose_pair = if let Some(autoclose_pair) = self.autoclose_stack.last() { - autoclose_pair - } else { - return false; - }; - if text != autoclose_pair.pair.end { - return false; - } + /// Iterate the given selections, and for each one, find the smallest surrounding + /// autoclose region. This uses the ordering of the selections and the autoclose + /// regions to avoid repeated comparisons. + fn selections_with_autoclose_regions<'a, D: ToOffset + Clone>( + &'a self, + selections: impl IntoIterator>, + buffer: &'a MultiBufferSnapshot, + ) -> impl Iterator, Option<&'a AutocloseRegion>)> { + let mut i = 0; + let mut pair_states = self.autoclose_regions.as_slice(); + selections.into_iter().map(move |selection| { + let range = selection.start.to_offset(buffer)..selection.end.to_offset(buffer); - debug_assert_eq!(old_selections.len(), autoclose_pair.ranges.len()); - - if old_selections - .iter() - .zip(autoclose_pair.ranges.iter().map(|r| r.to_offset(&buffer))) - .all(|(selection, autoclose_range)| { - let autoclose_range_end = autoclose_range.end.to_offset(&buffer); - selection.is_empty() && selection.start == autoclose_range_end - }) - { - let new_selections = old_selections - .into_iter() - .map(|selection| { - let cursor = selection.start + 1; - Selection { - id: selection.id, - start: cursor, - end: cursor, - reversed: false, - goal: SelectionGoal::None, - } - }) - .collect(); - self.autoclose_stack.pop(); - self.change_selections(Some(Autoscroll::Fit), cx, |s| { - s.select(new_selections); - }); - true - } else { - false - } - } - - fn select_autoclose_pair(&mut self, cx: &mut ViewContext) -> bool { - let buffer = self.buffer.read(cx).snapshot(cx); - let old_selections = self.selections.all::(cx); - let autoclose_pair = if let Some(autoclose_pair) = self.autoclose_stack.last() { - autoclose_pair - } else { - return false; - }; - - debug_assert_eq!(old_selections.len(), autoclose_pair.ranges.len()); - - let mut new_selections = Vec::new(); - for (selection, autoclose_range) in old_selections - .iter() - .zip(autoclose_pair.ranges.iter().map(|r| r.to_offset(&buffer))) - { - if selection.is_empty() - && autoclose_range.is_empty() - && selection.start == autoclose_range.start - { - new_selections.push(Selection { - id: selection.id, - start: selection.start - autoclose_pair.pair.start.len(), - end: selection.end + autoclose_pair.pair.end.len(), - reversed: true, - goal: selection.goal, - }); - } else { - return false; + let mut enclosing = None; + while let Some(pair_state) = pair_states.get(i) { + if pair_state.range.end.to_offset(buffer) < range.start { + pair_states = &pair_states[i + 1..]; + i = 0; + } else if pair_state.range.start.to_offset(buffer) > range.end { + break; + } else if pair_state.selection_id == selection.id { + enclosing = Some(pair_state); + i += 1; + } } - } - self.change_selections(Some(Autoscroll::Fit), cx, |selections| { - selections.select(new_selections) + (selection.clone(), enclosing) + }) + } + + /// Remove any autoclose regions that no longer contain their selection. + fn invalidate_autoclose_regions( + &mut self, + mut selections: &[Selection], + buffer: &MultiBufferSnapshot, + ) { + self.autoclose_regions.retain(|state| { + let mut i = 0; + while let Some(selection) = selections.get(i) { + if selection.end.cmp(&state.range.start, buffer).is_lt() { + selections = &selections[1..]; + continue; + } + if selection.start.cmp(&state.range.end, buffer).is_gt() { + break; + } + if selection.id == state.selection_id { + return true; + } else { + i += 1; + } + } + false }); - true } fn completion_query(buffer: &MultiBufferSnapshot, position: impl ToOffset) -> Option { @@ -2909,51 +2909,47 @@ impl Editor { pub fn backspace(&mut self, _: &Backspace, cx: &mut ViewContext) { self.transact(cx, |this, cx| { - if !this.select_autoclose_pair(cx) { - let mut selections = this.selections.all::(cx); - if !this.selections.line_mode { - let display_map = this.display_map.update(cx, |map, cx| map.snapshot(cx)); - for selection in &mut selections { - if selection.is_empty() { - let old_head = selection.head(); - let mut new_head = movement::left( - &display_map, - old_head.to_display_point(&display_map), - ) - .to_point(&display_map); - if let Some((buffer, line_buffer_range)) = display_map - .buffer_snapshot - .buffer_line_for_row(old_head.row) - { - let indent_size = - buffer.indent_size_for_line(line_buffer_range.start.row); - let language_name = - buffer.language().map(|language| language.name()); - let indent_len = match indent_size.kind { - IndentKind::Space => { - cx.global::().tab_size(language_name.as_deref()) - } - IndentKind::Tab => NonZeroU32::new(1).unwrap(), - }; - if old_head.column <= indent_size.len && old_head.column > 0 { - let indent_len = indent_len.get(); - new_head = cmp::min( - new_head, - Point::new( - old_head.row, - ((old_head.column - 1) / indent_len) * indent_len, - ), - ); + this.select_autoclose_pair(cx); + let mut selections = this.selections.all::(cx); + if !this.selections.line_mode { + let display_map = this.display_map.update(cx, |map, cx| map.snapshot(cx)); + for selection in &mut selections { + if selection.is_empty() { + let old_head = selection.head(); + let mut new_head = + movement::left(&display_map, old_head.to_display_point(&display_map)) + .to_point(&display_map); + if let Some((buffer, line_buffer_range)) = display_map + .buffer_snapshot + .buffer_line_for_row(old_head.row) + { + let indent_size = + buffer.indent_size_for_line(line_buffer_range.start.row); + let language_name = buffer.language().map(|language| language.name()); + let indent_len = match indent_size.kind { + IndentKind::Space => { + cx.global::().tab_size(language_name.as_deref()) } + IndentKind::Tab => NonZeroU32::new(1).unwrap(), + }; + if old_head.column <= indent_size.len && old_head.column > 0 { + let indent_len = indent_len.get(); + new_head = cmp::min( + new_head, + Point::new( + old_head.row, + ((old_head.column - 1) / indent_len) * indent_len, + ), + ); } - - selection.set_head(new_head, SelectionGoal::None); } + + selection.set_head(new_head, SelectionGoal::None); } } - - this.change_selections(Some(Autoscroll::Fit), cx, |s| s.select(selections)); } + + this.change_selections(Some(Autoscroll::Fit), cx, |s| s.select(selections)); this.insert("", cx); }); } @@ -3957,17 +3953,16 @@ impl Editor { cx: &mut ViewContext, ) { self.transact(cx, |this, cx| { - if !this.select_autoclose_pair(cx) { - this.change_selections(Some(Autoscroll::Fit), cx, |s| { - let line_mode = s.line_mode; - s.move_with(|map, selection| { - if selection.is_empty() && !line_mode { - let cursor = movement::previous_word_start(map, selection.head()); - selection.set_head(cursor, SelectionGoal::None); - } - }); + this.select_autoclose_pair(cx); + this.change_selections(Some(Autoscroll::Fit), cx, |s| { + let line_mode = s.line_mode; + s.move_with(|map, selection| { + if selection.is_empty() && !line_mode { + let cursor = movement::previous_word_start(map, selection.head()); + selection.set_head(cursor, SelectionGoal::None); + } }); - } + }); this.insert("", cx); }); } @@ -3978,17 +3973,16 @@ impl Editor { cx: &mut ViewContext, ) { self.transact(cx, |this, cx| { - if !this.select_autoclose_pair(cx) { - this.change_selections(Some(Autoscroll::Fit), cx, |s| { - let line_mode = s.line_mode; - s.move_with(|map, selection| { - if selection.is_empty() && !line_mode { - let cursor = movement::previous_subword_start(map, selection.head()); - selection.set_head(cursor, SelectionGoal::None); - } - }); + this.select_autoclose_pair(cx); + this.change_selections(Some(Autoscroll::Fit), cx, |s| { + let line_mode = s.line_mode; + s.move_with(|map, selection| { + if selection.is_empty() && !line_mode { + let cursor = movement::previous_subword_start(map, selection.head()); + selection.set_head(cursor, SelectionGoal::None); + } }); - } + }); this.insert("", cx); }); } @@ -6495,12 +6489,6 @@ impl DerefMut for InvalidationStack { } } -impl InvalidationRegion for BracketPairState { - fn ranges(&self) -> &[Range] { - &self.ranges - } -} - impl InvalidationRegion for SnippetState { fn ranges(&self) -> &[Range] { &self.ranges[self.active_index] diff --git a/crates/zed/src/languages/html/config.toml b/crates/zed/src/languages/html/config.toml index 0680717b2c..80b33b1243 100644 --- a/crates/zed/src/languages/html/config.toml +++ b/crates/zed/src/languages/html/config.toml @@ -1,6 +1,6 @@ name = "HTML" path_suffixes = ["html"] -autoclose_before = ">" +autoclose_before = ">})" brackets = [ { start = "<", end = ">", close = true, newline = true }, { start = "{", end = "}", close = true, newline = true }, From 2da32af340980dc3eddefc924666774fba087710 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 28 Sep 2022 12:36:55 -0700 Subject: [PATCH 029/140] Update EditorTestContext usage to reflect new synchronous constructor --- crates/editor/src/editor.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 055b73d7dc..699b442a5d 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -9782,7 +9782,7 @@ mod tests { #[gpui::test] async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) { - let mut cx = EditorTestContext::new(cx).await; + let mut cx = EditorTestContext::new(cx); let html_language = Arc::new( Language::new( From 4f44375abd3d8a10ee2820db373d4992ab3df42b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 28 Sep 2022 13:38:54 -0700 Subject: [PATCH 030/140] Make Buffer::language_at fall back to Buffer::language For languages with no grammar (plain text), there will be no layers. --- crates/language/src/buffer.rs | 2 ++ crates/language/src/language.rs | 9 +++++++++ crates/language/src/syntax_map.rs | 1 + 3 files changed, 12 insertions(+) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 372f77cf20..4ff1b002b0 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -648,6 +648,7 @@ impl Buffer { .layers_for_range(offset..offset, &self.text) .last() .map(|info| info.language.clone()) + .or_else(|| self.language.clone()) } pub fn parse_count(&self) -> usize { @@ -1841,6 +1842,7 @@ impl BufferSnapshot { .layers_for_range(offset..offset, &self.text) .last() .map(|info| info.language) + .or(self.language.as_ref()) } pub fn surrounding_word(&self, start: T) -> (Range, Option) { diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 341f70bff9..b8d4ca309f 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -26,6 +26,7 @@ use serde_json::Value; use std::{ any::Any, cell::RefCell, + fmt::Debug, mem, ops::Range, path::{Path, PathBuf}, @@ -866,6 +867,14 @@ impl Language { } } +impl Debug for Language { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Language") + .field("name", &self.config.name) + .finish() + } +} + impl Grammar { pub fn id(&self) -> usize { self.id diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index a7d9101d7b..8983406690 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -92,6 +92,7 @@ struct SyntaxLayer { language: Arc, } +#[derive(Debug)] pub struct SyntaxLayerInfo<'a> { pub depth: usize, pub node: Node<'a>, From 64253e444124a0b2b4810f4e0a091874c582cc7a Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 28 Sep 2022 14:16:35 -0700 Subject: [PATCH 031/140] 0.56.0 --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 363ee93c14..b32b6a47a2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7151,7 +7151,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.55.0" +version = "0.56.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index dc2b0abd03..c96163d99e 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -3,7 +3,7 @@ authors = ["Nathan Sobo "] description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.55.0" +version = "0.56.0" [lib] name = "zed" From af7c2b8b4744738ffd70c427406b6657638a700b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 28 Sep 2022 15:21:49 -0700 Subject: [PATCH 032/140] Set minimum user id length in amplitude calls --- crates/client/src/telemetry.rs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 77aa308f30..8b7be5ba80 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -52,6 +52,12 @@ lazy_static! { struct AmplitudeEventBatch { api_key: &'static str, events: Vec, + options: AmplitudeEventBatchOptions, +} + +#[derive(Serialize)] +struct AmplitudeEventBatchOptions { + min_id_length: usize, } #[derive(Serialize)] @@ -239,7 +245,11 @@ impl Telemetry { } } - let batch = AmplitudeEventBatch { api_key, events }; + let batch = AmplitudeEventBatch { + api_key, + events, + options: AmplitudeEventBatchOptions { min_id_length: 1 }, + }; json_bytes.clear(); serde_json::to_writer(&mut json_bytes, &batch)?; let request = From 0d3486ca82dc6812d05b3c6876240ee62cd1ab9d Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 28 Sep 2022 15:30:55 -0700 Subject: [PATCH 033/140] Remove TestTelemetry command --- crates/client/src/client.rs | 21 ++++----------------- 1 file changed, 4 insertions(+), 17 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 0670add1af..b75be62308 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -15,11 +15,9 @@ use async_tungstenite::tungstenite::{ use db::Db; use futures::{future::LocalBoxFuture, FutureExt, SinkExt, StreamExt, TryStreamExt}; use gpui::{ - actions, - serde_json::{json, Value}, - AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, AnyWeakViewHandle, AppContext, - AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, View, ViewContext, - ViewHandle, + actions, serde_json::Value, AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, + AnyWeakViewHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, + MutableAppContext, Task, View, ViewContext, ViewHandle, }; use http::HttpClient; use lazy_static::lazy_static; @@ -56,7 +54,7 @@ lazy_static! { pub const ZED_SECRET_CLIENT_TOKEN: &str = "618033988749894"; -actions!(client, [Authenticate, TestTelemetry]); +actions!(client, [Authenticate]); pub fn init(client: Arc, cx: &mut MutableAppContext) { cx.add_global_action({ @@ -69,17 +67,6 @@ pub fn init(client: Arc, cx: &mut MutableAppContext) { .detach(); } }); - cx.add_global_action({ - let client = client.clone(); - move |_: &TestTelemetry, _| { - client.report_event( - "test_telemetry", - json!({ - "test_property": "test_value" - }), - ) - } - }); } pub struct Client { From 2a14af4cdea11693943377964e613ed2b61d664c Mon Sep 17 00:00:00 2001 From: ForLoveOfCats Date: Tue, 30 Aug 2022 11:08:22 -0400 Subject: [PATCH 034/140] Load a file's head text on file load just to get started --- Cargo.lock | 44 ++++++++++++++++++++++++++++++++++ crates/language/src/buffer.rs | 10 ++++++-- crates/project/Cargo.toml | 1 + crates/project/src/fs.rs | 39 ++++++++++++++++++++++++++++++ crates/project/src/worktree.rs | 14 ++++++++--- crates/rpc/proto/zed.proto | 3 ++- 6 files changed, 105 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b32b6a47a2..31f5f30f38 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2224,6 +2224,21 @@ dependencies = [ "stable_deref_trait", ] +[[package]] +name = "git2" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2994bee4a3a6a51eb90c218523be382fd7ea09b16380b9312e9dbe955ff7c7d1" +dependencies = [ + "bitflags", + "libc", + "libgit2-sys", + "log", + "openssl-probe", + "openssl-sys", + "url", +] + [[package]] name = "glob" version = "0.3.0" @@ -2894,6 +2909,20 @@ version = "0.2.126" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836" +[[package]] +name = "libgit2-sys" +version = "0.14.0+1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47a00859c70c8a4f7218e6d1cc32875c4b55f6799445b842b0d8ed5e4c3d959b" +dependencies = [ + "cc", + "libc", + "libssh2-sys", + "libz-sys", + "openssl-sys", + "pkg-config", +] + [[package]] name = "libloading" version = "0.7.3" @@ -2934,6 +2963,20 @@ dependencies = [ "zstd-sys", ] +[[package]] +name = "libssh2-sys" +version = "0.2.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b094a36eb4b8b8c8a7b4b8ae43b2944502be3e59cd87687595cf6b0a71b3f4ca" +dependencies = [ + "cc", + "libc", + "libz-sys", + "openssl-sys", + "pkg-config", + "vcpkg", +] + [[package]] name = "libz-sys" version = "1.1.8" @@ -3970,6 +4013,7 @@ dependencies = [ "fsevent", "futures", "fuzzy", + "git2", "gpui", "ignore", "language", diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 08843aacfe..ca86f9c172 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -47,6 +47,7 @@ pub use lsp::DiagnosticSeverity; pub struct Buffer { text: TextBuffer, + head_text: Option, file: Option>, saved_version: clock::Global, saved_version_fingerprint: String, @@ -328,17 +329,20 @@ impl Buffer { Self::build( TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()), None, + None, ) } pub fn from_file>( replica_id: ReplicaId, base_text: T, + head_text: Option, file: Arc, cx: &mut ModelContext, ) -> Self { Self::build( TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()), + head_text.map(|h| h.into()), Some(file), ) } @@ -349,7 +353,7 @@ impl Buffer { file: Option>, ) -> Result { let buffer = TextBuffer::new(replica_id, message.id, message.base_text); - let mut this = Self::build(buffer, file); + let mut this = Self::build(buffer, message.head_text, file); this.text.set_line_ending(proto::deserialize_line_ending( proto::LineEnding::from_i32(message.line_ending) .ok_or_else(|| anyhow!("missing line_ending"))?, @@ -362,6 +366,7 @@ impl Buffer { id: self.remote_id(), file: self.file.as_ref().map(|f| f.to_proto()), base_text: self.base_text().to_string(), + head_text: self.head_text.clone(), line_ending: proto::serialize_line_ending(self.line_ending()) as i32, } } @@ -404,7 +409,7 @@ impl Buffer { self } - fn build(buffer: TextBuffer, file: Option>) -> Self { + fn build(buffer: TextBuffer, head_text: Option, file: Option>) -> Self { let saved_mtime = if let Some(file) = file.as_ref() { file.mtime() } else { @@ -418,6 +423,7 @@ impl Buffer { transaction_depth: 0, was_dirty_before_starting_transaction: None, text: buffer, + head_text, file, syntax_map: Mutex::new(SyntaxMap::new()), parsing_in_background: false, diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index a4ea6f2286..4e7ff2d471 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -52,6 +52,7 @@ smol = "1.2.5" thiserror = "1.0.29" toml = "0.5" rocksdb = "0.18" +git2 = "0.15" [dev-dependencies] client = { path = "../client", features = ["test-support"] } diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index f2d62fae87..68d07c891c 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -1,9 +1,11 @@ use anyhow::{anyhow, Result}; use fsevent::EventStream; use futures::{future::BoxFuture, Stream, StreamExt}; +use git2::{Repository, RepositoryOpenFlags}; use language::LineEnding; use smol::io::{AsyncReadExt, AsyncWriteExt}; use std::{ + ffi::OsStr, io, os::unix::fs::MetadataExt, path::{Component, Path, PathBuf}, @@ -29,6 +31,7 @@ pub trait Fs: Send + Sync { async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()>; async fn open_sync(&self, path: &Path) -> Result>; async fn load(&self, path: &Path) -> Result; + async fn load_head_text(&self, path: &Path) -> Option; async fn save(&self, path: &Path, text: &Rope, line_ending: LineEnding) -> Result<()>; async fn canonicalize(&self, path: &Path) -> Result; async fn is_file(&self, path: &Path) -> bool; @@ -161,6 +164,38 @@ impl Fs for RealFs { Ok(text) } + async fn load_head_text(&self, path: &Path) -> Option { + fn logic(path: &Path) -> Result> { + let repo = Repository::open_ext(path, RepositoryOpenFlags::empty(), &[OsStr::new("")])?; + assert!(repo.path().ends_with(".git")); + let repo_root_path = match repo.path().parent() { + Some(root) => root, + None => return Ok(None), + }; + + let relative_path = path.strip_prefix(repo_root_path)?; + let object = repo + .head()? + .peel_to_tree()? + .get_path(relative_path)? + .to_object(&repo)?; + + let content = match object.as_blob() { + Some(blob) => blob.content().to_owned(), + None => return Ok(None), + }; + + let head_text = String::from_utf8(content.to_owned())?; + Ok(Some(head_text)) + } + + match logic(path) { + Ok(value) => return value, + Err(err) => log::error!("Error loading head text: {:?}", err), + } + None + } + async fn save(&self, path: &Path, text: &Rope, line_ending: LineEnding) -> Result<()> { let buffer_size = text.summary().len.min(10 * 1024); let file = smol::fs::File::create(path).await?; @@ -748,6 +783,10 @@ impl Fs for FakeFs { entry.file_content(&path).cloned() } + async fn load_head_text(&self, _: &Path) -> Option { + None + } + async fn save(&self, path: &Path, text: &Rope, line_ending: LineEnding) -> Result<()> { self.simulate_random_delay().await; let path = normalize_path(path); diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 74c50e0c5f..42d18eb3bb 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -446,10 +446,10 @@ impl LocalWorktree { ) -> Task>> { let path = Arc::from(path); cx.spawn(move |this, mut cx| async move { - let (file, contents) = this + let (file, contents, head_text) = this .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx)) .await?; - Ok(cx.add_model(|cx| Buffer::from_file(0, contents, Arc::new(file), cx))) + Ok(cx.add_model(|cx| Buffer::from_file(0, contents, head_text, Arc::new(file), cx))) }) } @@ -558,13 +558,19 @@ impl LocalWorktree { } } - fn load(&self, path: &Path, cx: &mut ModelContext) -> Task> { + fn load( + &self, + path: &Path, + cx: &mut ModelContext, + ) -> Task)>> { let handle = cx.handle(); let path = Arc::from(path); let abs_path = self.absolutize(&path); let fs = self.fs.clone(); cx.spawn(|this, mut cx| async move { let text = fs.load(&abs_path).await?; + let head_text = fs.load_head_text(&abs_path).await; + // Eagerly populate the snapshot with an updated entry for the loaded file let entry = this .update(&mut cx, |this, cx| { @@ -573,6 +579,7 @@ impl LocalWorktree { .refresh_entry(path, abs_path, None, cx) }) .await?; + Ok(( File { entry_id: Some(entry.id), @@ -582,6 +589,7 @@ impl LocalWorktree { is_local: true, }, text, + head_text, )) }) } diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 7840829b44..818f2cb7e1 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -821,7 +821,8 @@ message BufferState { uint64 id = 1; optional File file = 2; string base_text = 3; - LineEnding line_ending = 4; + optional string head_text = 4; + LineEnding line_ending = 5; } message BufferChunk { From 6fa2e62fa41175668fb62479063117682c9ecde9 Mon Sep 17 00:00:00 2001 From: Julia Date: Tue, 30 Aug 2022 16:29:20 -0400 Subject: [PATCH 035/140] Start asking Editors to update git after a debounced delay --- crates/editor/src/items.rs | 9 ++ crates/workspace/src/workspace.rs | 132 ++++++++++++++++++++++++------ 2 files changed, 115 insertions(+), 26 deletions(-) diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index f63ffc3d7c..22a069c5c0 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -478,6 +478,15 @@ impl Item for Editor { }) } + fn update_git( + &mut self, + _project: ModelHandle, + _cx: &mut ViewContext, + ) -> Task> { + println!("Editor::update_git"); + Task::ready(Ok(())) + } + fn to_item_events(event: &Self::Event) -> Vec { let mut result = Vec::new(); match event { diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index b9cface656..3446dc0f0e 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -52,7 +52,6 @@ use std::{ cell::RefCell, fmt, future::Future, - mem, ops::Range, path::{Path, PathBuf}, rc::Rc, @@ -318,7 +317,23 @@ pub trait Item: View { project: ModelHandle, cx: &mut ViewContext, ) -> Task>; + fn update_git( + &mut self, + _project: ModelHandle, + _cx: &mut ViewContext, + ) -> Task> { + Task::ready(Ok(())) + } fn to_item_events(event: &Self::Event) -> Vec; + fn should_close_item_on_event(_: &Self::Event) -> bool { + false + } + fn should_update_tab_on_event(_: &Self::Event) -> bool { + false + } + fn is_edit_event(_: &Self::Event) -> bool { + false + } fn act_as_type( &self, type_id: TypeId, @@ -435,6 +450,57 @@ impl FollowableItemHandle for ViewHandle { } } +struct DelayedDebouncedEditAction { + task: Option>, + cancel_channel: Option>, +} + +impl DelayedDebouncedEditAction { + fn new() -> DelayedDebouncedEditAction { + DelayedDebouncedEditAction { + task: None, + cancel_channel: None, + } + } + + fn fire_new( + &mut self, + delay: Duration, + workspace: &Workspace, + cx: &mut ViewContext, + f: F, + ) where + F: FnOnce(ModelHandle, AsyncAppContext) -> Fut + 'static, + Fut: 'static + Future, + { + if let Some(channel) = self.cancel_channel.take() { + _ = channel.send(()); + } + + let project = workspace.project().downgrade(); + + let (sender, mut receiver) = oneshot::channel::<()>(); + self.cancel_channel = Some(sender); + + let previous_task = self.task.take(); + self.task = Some(cx.spawn_weak(|_, cx| async move { + let mut timer = cx.background().timer(delay).fuse(); + if let Some(previous_task) = previous_task { + previous_task.await; + } + + futures::select_biased! { + _ = receiver => return, + _ = timer => {} + } + + if let Some(project) = project.upgrade(&cx) { + (f)(project, cx).await; + } + })); + } +} + pub trait ItemHandle: 'static + fmt::Debug { fn subscribe_to_item_events( &self, @@ -473,6 +539,11 @@ pub trait ItemHandle: 'static + fmt::Debug { ) -> Task>; fn reload(&self, project: ModelHandle, cx: &mut MutableAppContext) -> Task>; + fn update_git( + &self, + project: ModelHandle, + cx: &mut MutableAppContext, + ) -> Task>; fn act_as_type(&self, type_id: TypeId, cx: &AppContext) -> Option; fn to_followable_item_handle(&self, cx: &AppContext) -> Option>; fn on_release( @@ -578,8 +649,8 @@ impl ItemHandle for ViewHandle { .insert(self.id(), pane.downgrade()) .is_none() { - let mut pending_autosave = None; - let mut cancel_pending_autosave = oneshot::channel::<()>().0; + let mut pending_autosave = DelayedDebouncedEditAction::new(); + let mut pending_git_update = DelayedDebouncedEditAction::new(); let pending_update = Rc::new(RefCell::new(None)); let pending_update_scheduled = Rc::new(AtomicBool::new(false)); @@ -637,45 +708,46 @@ impl ItemHandle for ViewHandle { .detach_and_log_err(cx); return; } + ItemEvent::UpdateTab => { pane.update(cx, |_, cx| { cx.emit(pane::Event::ChangeItemTitle); cx.notify(); }); } + ItemEvent::Edit => { if let Autosave::AfterDelay { milliseconds } = cx.global::().autosave { - let prev_autosave = pending_autosave - .take() - .unwrap_or_else(|| Task::ready(Some(()))); - let (cancel_tx, mut cancel_rx) = oneshot::channel::<()>(); - let prev_cancel_tx = - mem::replace(&mut cancel_pending_autosave, cancel_tx); - let project = workspace.project.downgrade(); - let _ = prev_cancel_tx.send(()); + let delay = Duration::from_millis(milliseconds); let item = item.clone(); - pending_autosave = - Some(cx.spawn_weak(|_, mut cx| async move { - let mut timer = cx - .background() - .timer(Duration::from_millis(milliseconds)) - .fuse(); - prev_autosave.await; - futures::select_biased! { - _ = cancel_rx => return None, - _ = timer => {} - } - - let project = project.upgrade(&cx)?; + pending_autosave.fire_new( + delay, + workspace, + cx, + |project, mut cx| async move { cx.update(|cx| Pane::autosave_item(&item, project, cx)) .await .log_err(); - None - })); + }, + ); } + + const GIT_DELAY: Duration = Duration::from_millis(800); + let item = item.clone(); + pending_git_update.fire_new( + GIT_DELAY, + workspace, + cx, + |project, mut cx| async move { + cx.update(|cx| item.update_git(project, cx)) + .await + .log_err(); + }, + ); } + _ => {} } } @@ -755,6 +827,14 @@ impl ItemHandle for ViewHandle { self.update(cx, |item, cx| item.reload(project, cx)) } + fn update_git( + &self, + project: ModelHandle, + cx: &mut MutableAppContext, + ) -> Task> { + self.update(cx, |item, cx| item.update_git(project, cx)) + } + fn act_as_type(&self, type_id: TypeId, cx: &AppContext) -> Option { self.read(cx).act_as_type(type_id, self, cx) } From 55ca02351c5e8c662f7f9e09a8ce93a4f69233c5 Mon Sep 17 00:00:00 2001 From: Julia Date: Thu, 1 Sep 2022 17:22:12 -0400 Subject: [PATCH 036/140] Start painting some sort of hunk info, it's wrong but it's close Co-Authored-By: Max Brunsfeld --- Cargo.lock | 2 +- crates/editor/src/element.rs | 35 ++++++ crates/editor/src/multi_buffer.rs | 16 ++- crates/language/Cargo.toml | 1 + crates/language/src/buffer.rs | 35 ++++++ crates/language/src/git.rs | 197 ++++++++++++++++++++++++++++++ crates/language/src/language.rs | 1 + crates/project/Cargo.toml | 1 - crates/project/src/fs.rs | 2 +- crates/project/src/project.rs | 4 +- 10 files changed, 286 insertions(+), 8 deletions(-) create mode 100644 crates/language/src/git.rs diff --git a/Cargo.lock b/Cargo.lock index 31f5f30f38..2872d83a94 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2855,6 +2855,7 @@ dependencies = [ "env_logger", "futures", "fuzzy", + "git2", "gpui", "lazy_static", "log", @@ -4013,7 +4014,6 @@ dependencies = [ "fsevent", "futures", "fuzzy", - "git2", "gpui", "ignore", "language", diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 1e1ab83063..357f15432b 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -34,6 +34,7 @@ use gpui::{ WeakViewHandle, }; use json::json; +use language::git::{DiffHunk, DiffHunkStatus}; use language::{Bias, DiagnosticSeverity, OffsetUtf16, Selection}; use project::ProjectPath; use settings::Settings; @@ -543,6 +544,33 @@ impl EditorElement { } } + println!("painting from hunks: {:#?}\n", &layout.diff_hunks); + for hunk in &layout.diff_hunks { + let color = match hunk.status() { + DiffHunkStatus::Added => Color::green(), + DiffHunkStatus::Modified => Color::blue(), + _ => continue, + }; + + let start_row = hunk.buffer_range.start; + let end_row = hunk.buffer_range.end; + + let start_y = start_row as f32 * layout.line_height - (scroll_top % layout.line_height); + let end_y = end_row as f32 * layout.line_height - (scroll_top % layout.line_height) + + layout.line_height; + + let highlight_origin = bounds.origin() + vec2f(0., start_y); + let highlight_size = vec2f(6., end_y - start_y); + let highlight_bounds = RectF::new(highlight_origin, highlight_size); + + cx.scene.push_quad(Quad { + bounds: highlight_bounds, + background: Some(color), + border: Border::new(0., Color::transparent_black()), + corner_radius: 0., + }); + } + if let Some((row, indicator)) = layout.code_actions_indicator.as_mut() { let mut x = bounds.width() - layout.gutter_padding; let mut y = *row as f32 * layout.position_map.line_height - scroll_top; @@ -1425,6 +1453,11 @@ impl Element for EditorElement { let line_number_layouts = self.layout_line_numbers(start_row..end_row, &active_rows, &snapshot, cx); + let diff_hunks = snapshot + .buffer_snapshot + .diff_hunks_in_range(start_row..end_row) + .collect(); + let mut max_visible_line_width = 0.0; let line_layouts = self.layout_lines(start_row..end_row, &snapshot, cx); for line in &line_layouts { @@ -1573,6 +1606,7 @@ impl Element for EditorElement { highlighted_rows, highlighted_ranges, line_number_layouts, + diff_hunks, blocks, selections, context_menu, @@ -1710,6 +1744,7 @@ pub struct LayoutState { highlighted_ranges: Vec<(Range, Color)>, selections: Vec<(ReplicaId, Vec)>, context_menu: Option<(DisplayPoint, ElementBox)>, + diff_hunks: Vec>, code_actions_indicator: Option<(u32, ElementBox)>, hover_popovers: Option<(DisplayPoint, Vec)>, } diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 4ee9526a67..91de32bac9 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -7,9 +7,10 @@ use collections::{BTreeMap, Bound, HashMap, HashSet}; use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; pub use language::Completion; use language::{ - char_kind, AutoindentMode, Buffer, BufferChunks, BufferSnapshot, CharKind, Chunk, - DiagnosticEntry, Event, File, IndentSize, Language, OffsetRangeExt, Outline, OutlineItem, - Selection, ToOffset as _, ToOffsetUtf16 as _, ToPoint as _, ToPointUtf16 as _, TransactionId, + char_kind, git::DiffHunk, AutoindentMode, Buffer, BufferChunks, BufferSnapshot, CharKind, + Chunk, DiagnosticEntry, Event, File, IndentSize, Language, OffsetRangeExt, Outline, + OutlineItem, Selection, ToOffset as _, ToOffsetUtf16 as _, ToPoint as _, ToPointUtf16 as _, + TransactionId, }; use smallvec::SmallVec; use std::{ @@ -2529,6 +2530,15 @@ impl MultiBufferSnapshot { }) } + pub fn diff_hunks_in_range<'a>( + &'a self, + row_range: Range, + ) -> impl 'a + Iterator> { + self.as_singleton() + .into_iter() + .flat_map(move |(_, _, buffer)| buffer.diff_hunks_in_range(row_range.clone())) + } + pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { let range = range.start.to_offset(self)..range.end.to_offset(self); diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 6e9f368e77..6d347f3595 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -51,6 +51,7 @@ smol = "1.2" tree-sitter = "0.20" tree-sitter-rust = { version = "*", optional = true } tree-sitter-typescript = { version = "*", optional = true } +git2 = "0.15" [dev-dependencies] client = { path = "../client", features = ["test-support"] } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index ca86f9c172..ad3d8978ad 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1,3 +1,4 @@ +use crate::git::{BufferDiff, DiffHunk}; pub use crate::{ diagnostic_set::DiagnosticSet, highlight_map::{HighlightId, HighlightMap}, @@ -48,6 +49,7 @@ pub use lsp::DiagnosticSeverity; pub struct Buffer { text: TextBuffer, head_text: Option, + diff: BufferDiff, file: Option>, saved_version: clock::Global, saved_version_fingerprint: String, @@ -74,6 +76,7 @@ pub struct Buffer { pub struct BufferSnapshot { text: text::BufferSnapshot, + git_hunks: Arc<[DiffHunk]>, pub(crate) syntax: SyntaxSnapshot, file: Option>, diagnostics: DiagnosticSet, @@ -416,6 +419,11 @@ impl Buffer { UNIX_EPOCH }; + let mut diff = BufferDiff::new(); + if let Some(head_text) = &head_text { + diff.update(head_text, &buffer); + } + Self { saved_mtime, saved_version: buffer.version(), @@ -424,6 +432,7 @@ impl Buffer { was_dirty_before_starting_transaction: None, text: buffer, head_text, + diff, file, syntax_map: Mutex::new(SyntaxMap::new()), parsing_in_background: false, @@ -453,6 +462,7 @@ impl Buffer { BufferSnapshot { text, syntax, + git_hunks: self.diff.hunks(), file: self.file.clone(), remote_selections: self.remote_selections.clone(), diagnostics: self.diagnostics.clone(), @@ -2145,6 +2155,30 @@ impl BufferSnapshot { }) } + pub fn diff_hunks_in_range<'a>( + &'a self, + query_row_range: Range, + ) -> impl 'a + Iterator> { + self.git_hunks.iter().filter_map(move |hunk| { + let range = hunk.buffer_range.to_point(&self.text); + + if range.start.row < query_row_range.end && query_row_range.start < range.end.row { + let end_row = if range.end.column > 0 { + range.end.row + 1 + } else { + range.end.row + }; + + Some(DiffHunk { + buffer_range: range.start.row..end_row, + head_range: hunk.head_range.clone(), + }) + } else { + None + } + }) + } + pub fn diagnostics_in_range<'a, T, O>( &'a self, search_range: Range, @@ -2218,6 +2252,7 @@ impl Clone for BufferSnapshot { fn clone(&self) -> Self { Self { text: self.text.clone(), + git_hunks: self.git_hunks.clone(), syntax: self.syntax.clone(), file: self.file.clone(), remote_selections: self.remote_selections.clone(), diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs new file mode 100644 index 0000000000..5445396918 --- /dev/null +++ b/crates/language/src/git.rs @@ -0,0 +1,197 @@ +use std::ops::Range; +use std::sync::Arc; + +use sum_tree::Bias; +use text::{Anchor, Point}; + +pub use git2 as libgit; +use libgit::Patch as GitPatch; + +#[derive(Debug, Clone, Copy)] +pub enum DiffHunkStatus { + Added, + Modified, + Removed, +} + +#[derive(Debug)] +pub struct DiffHunk { + pub buffer_range: Range, + pub head_range: Range, +} + +impl DiffHunk { + pub fn status(&self) -> DiffHunkStatus { + if self.head_range.is_empty() { + DiffHunkStatus::Added + } else if self.buffer_range.is_empty() { + DiffHunkStatus::Removed + } else { + DiffHunkStatus::Modified + } + } +} + +pub struct BufferDiff { + hunks: Arc<[DiffHunk]>, +} + +impl BufferDiff { + pub fn new() -> BufferDiff { + BufferDiff { + hunks: Arc::new([]), + } + } + + pub fn hunks(&self) -> Arc<[DiffHunk]> { + self.hunks.clone() + } + + pub fn update(&mut self, head: &str, buffer: &text::BufferSnapshot) { + let current = buffer.as_rope().to_string().into_bytes(); + let patch = match GitPatch::from_buffers(head.as_bytes(), None, ¤t, None, None) { + Ok(patch) => patch, + Err(_) => { + //Reset hunks in case of failure to avoid showing a stale (potentially erroneous) diff + self.hunks = Arc::new([]); + return; + } + }; + + let mut hunks = Vec::new(); + for index in 0..patch.num_hunks() { + let (hunk, _) = match patch.hunk(index) { + Ok(it) => it, + Err(_) => break, + }; + + let new_start = hunk.new_start(); + let new_end = new_start + hunk.new_lines(); + let start_anchor = buffer.anchor_at(Point::new(new_start, 0), Bias::Left); + let end_anchor = buffer.anchor_at(Point::new(new_end, 0), Bias::Left); + let buffer_range = start_anchor..end_anchor; + + let old_start = hunk.old_start() as usize; + let old_end = old_start + hunk.old_lines() as usize; + let head_range = old_start..old_end; + + hunks.push(DiffHunk { + buffer_range, + head_range, + }); + } + + self.hunks = hunks.into(); + } +} + +#[derive(Debug, Clone, Copy)] +pub enum GitDiffEdit { + Added(u32), + Modified(u32), + Removed(u32), +} + +impl GitDiffEdit { + pub fn line(self) -> u32 { + use GitDiffEdit::*; + + match self { + Added(line) | Modified(line) | Removed(line) => line, + } + } +} + +// struct DiffTracker { +// track_line_num: u32, +// edits: Vec, +// } + +// impl DiffTracker { +// fn new() -> DiffTracker { +// DiffTracker { +// track_line_num: 0, +// edits: Vec::new(), +// } +// } + +// fn attempt_finalize_file(&mut self, base_path: &Path) -> Result<()> { +// let relative = if let Some(relative) = self.last_file_path.clone() { +// relative +// } else { +// return Ok(()); +// }; + +// let mut path = base_path.to_path_buf(); +// path.push(relative); +// path = canonicalize(path).map_err(Error::Io)?; + +// self.diffs.push(GitFileDiff { +// path, +// edits: take(&mut self.edits), +// }); + +// Ok(()) +// } + +// fn handle_diff_line( +// &mut self, +// delta: DiffDelta, +// line: DiffLine, +// base_path: &Path, +// ) -> Result<()> { +// let path = match (delta.old_file().path(), delta.new_file().path()) { +// (Some(old), _) => old, +// (_, Some(new)) => new, +// (_, _) => return Err(Error::DeltaMissingPath), +// }; + +// if self.last_file_path.as_deref() != Some(path) { +// self.attempt_finalize_file(base_path)?; +// self.last_file_path = Some(path.to_path_buf()); +// self.track_line_num = 0; +// } + +// match line.origin_value() { +// DiffLineType::Context => { +// self.track_line_num = line.new_lineno().ok_or(Error::ContextMissingLineNum)?; +// } + +// DiffLineType::Deletion => { +// self.track_line_num += 1; +// self.edits.push(GitDiffEdit::Removed(self.track_line_num)); +// } + +// DiffLineType::Addition => { +// let addition_line_num = line.new_lineno().ok_or(Error::AdditionMissingLineNum)?; +// self.track_line_num = addition_line_num; + +// let mut replaced = false; +// for rewind_index in (0..self.edits.len()).rev() { +// let edit = &mut self.edits[rewind_index]; + +// if let GitDiffEdit::Removed(removed_line_num) = *edit { +// match removed_line_num.cmp(&addition_line_num) { +// Ordering::Equal => { +// *edit = GitDiffEdit::Modified(addition_line_num); +// replaced = true; +// break; +// } + +// Ordering::Greater => continue, +// Ordering::Less => break, +// } +// } +// } + +// if !replaced { +// self.edits.push(GitDiffEdit::Added(addition_line_num)); +// } +// } + +// _ => {} +// } + +// Ok(()) +// } +// } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 780f6e75b5..8e2fe601e7 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1,5 +1,6 @@ mod buffer; mod diagnostic_set; +pub mod git; mod highlight_map; mod outline; pub mod proto; diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index 4e7ff2d471..a4ea6f2286 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -52,7 +52,6 @@ smol = "1.2.5" thiserror = "1.0.29" toml = "0.5" rocksdb = "0.18" -git2 = "0.15" [dev-dependencies] client = { path = "../client", features = ["test-support"] } diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index 68d07c891c..a983df0f4b 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -1,7 +1,7 @@ use anyhow::{anyhow, Result}; use fsevent::EventStream; use futures::{future::BoxFuture, Stream, StreamExt}; -use git2::{Repository, RepositoryOpenFlags}; +use language::git::libgit::{Repository, RepositoryOpenFlags}; use language::LineEnding; use smol::io::{AsyncReadExt, AsyncWriteExt}; use std::{ diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 6841c561d0..8fa1fe9622 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4533,8 +4533,8 @@ impl Project { fn add_worktree(&mut self, worktree: &ModelHandle, cx: &mut ModelContext) { cx.observe(worktree, |_, _, cx| cx.notify()).detach(); if worktree.read(cx).is_local() { - cx.subscribe(worktree, |this, worktree, _, cx| { - this.update_local_worktree_buffers(worktree, cx); + cx.subscribe(worktree, |this, worktree, event, cx| match event { + worktree::Event::UpdatedEntries => this.update_local_worktree_buffers(worktree, cx), }) .detach(); } From 641daf0a6eddd3b852886e73ee7a450f3a40359c Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 2 Sep 2022 10:39:32 -0400 Subject: [PATCH 037/140] Correct git gutter indicator scroll position & add rounded corner --- crates/editor/src/element.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 357f15432b..c82860cb72 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -544,7 +544,7 @@ impl EditorElement { } } - println!("painting from hunks: {:#?}\n", &layout.diff_hunks); + println!("painting from hunks: {:#?}\n", layout.diff_hunks); for hunk in &layout.diff_hunks { let color = match hunk.status() { DiffHunkStatus::Added => Color::green(), @@ -555,19 +555,19 @@ impl EditorElement { let start_row = hunk.buffer_range.start; let end_row = hunk.buffer_range.end; - let start_y = start_row as f32 * layout.line_height - (scroll_top % layout.line_height); - let end_y = end_row as f32 * layout.line_height - (scroll_top % layout.line_height) - + layout.line_height; + let start_y = start_row as f32 * layout.line_height - scroll_top; + let end_y = end_row as f32 * layout.line_height + layout.line_height - scroll_top; - let highlight_origin = bounds.origin() + vec2f(0., start_y); - let highlight_size = vec2f(6., end_y - start_y); + let width = 0.22 * layout.line_height; + let highlight_origin = bounds.origin() + vec2f(-width, start_y); + let highlight_size = vec2f(width * 2., end_y - start_y); let highlight_bounds = RectF::new(highlight_origin, highlight_size); cx.scene.push_quad(Quad { bounds: highlight_bounds, background: Some(color), border: Border::new(0., Color::transparent_black()), - corner_radius: 0., + corner_radius: 0.2 * layout.line_height, }); } From fdda2abb782a36075c825f91f774bf97386726f7 Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 2 Sep 2022 14:35:35 -0400 Subject: [PATCH 038/140] Correct start/end of git diff hunks --- crates/editor/src/element.rs | 4 +- crates/language/src/git.rs | 108 +++-------------------------------- 2 files changed, 11 insertions(+), 101 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index c82860cb72..b13a2a6ada 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -549,14 +549,14 @@ impl EditorElement { let color = match hunk.status() { DiffHunkStatus::Added => Color::green(), DiffHunkStatus::Modified => Color::blue(), - _ => continue, + DiffHunkStatus::Removed => continue, }; let start_row = hunk.buffer_range.start; let end_row = hunk.buffer_range.end; let start_y = start_row as f32 * layout.line_height - scroll_top; - let end_y = end_row as f32 * layout.line_height + layout.line_height - scroll_top; + let end_y = end_row as f32 * layout.line_height - scroll_top; let width = 0.22 * layout.line_height; let highlight_origin = bounds.origin() + vec2f(-width, start_y); diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs index 5445396918..73e511ca48 100644 --- a/crates/language/src/git.rs +++ b/crates/language/src/git.rs @@ -5,7 +5,7 @@ use sum_tree::Bias; use text::{Anchor, Point}; pub use git2 as libgit; -use libgit::Patch as GitPatch; +use libgit::{DiffOptions as GitOptions, Patch as GitPatch}; #[derive(Debug, Clone, Copy)] pub enum DiffHunkStatus { @@ -48,8 +48,12 @@ impl BufferDiff { } pub fn update(&mut self, head: &str, buffer: &text::BufferSnapshot) { + let head = head.as_bytes(); let current = buffer.as_rope().to_string().into_bytes(); - let patch = match GitPatch::from_buffers(head.as_bytes(), None, ¤t, None, None) { + + let mut options = GitOptions::default(); + options.context_lines(0); + let patch = match GitPatch::from_buffers(head, None, ¤t, None, Some(&mut options)) { Ok(patch) => patch, Err(_) => { //Reset hunks in case of failure to avoid showing a stale (potentially erroneous) diff @@ -62,16 +66,16 @@ impl BufferDiff { for index in 0..patch.num_hunks() { let (hunk, _) = match patch.hunk(index) { Ok(it) => it, - Err(_) => break, + Err(_) => continue, }; - let new_start = hunk.new_start(); + let new_start = hunk.new_start() - 1; let new_end = new_start + hunk.new_lines(); let start_anchor = buffer.anchor_at(Point::new(new_start, 0), Bias::Left); let end_anchor = buffer.anchor_at(Point::new(new_end, 0), Bias::Left); let buffer_range = start_anchor..end_anchor; - let old_start = hunk.old_start() as usize; + let old_start = hunk.old_start() as usize - 1; let old_end = old_start + hunk.old_lines() as usize; let head_range = old_start..old_end; @@ -101,97 +105,3 @@ impl GitDiffEdit { } } } - -// struct DiffTracker { -// track_line_num: u32, -// edits: Vec, -// } - -// impl DiffTracker { -// fn new() -> DiffTracker { -// DiffTracker { -// track_line_num: 0, -// edits: Vec::new(), -// } -// } - -// fn attempt_finalize_file(&mut self, base_path: &Path) -> Result<()> { -// let relative = if let Some(relative) = self.last_file_path.clone() { -// relative -// } else { -// return Ok(()); -// }; - -// let mut path = base_path.to_path_buf(); -// path.push(relative); -// path = canonicalize(path).map_err(Error::Io)?; - -// self.diffs.push(GitFileDiff { -// path, -// edits: take(&mut self.edits), -// }); - -// Ok(()) -// } - -// fn handle_diff_line( -// &mut self, -// delta: DiffDelta, -// line: DiffLine, -// base_path: &Path, -// ) -> Result<()> { -// let path = match (delta.old_file().path(), delta.new_file().path()) { -// (Some(old), _) => old, -// (_, Some(new)) => new, -// (_, _) => return Err(Error::DeltaMissingPath), -// }; - -// if self.last_file_path.as_deref() != Some(path) { -// self.attempt_finalize_file(base_path)?; -// self.last_file_path = Some(path.to_path_buf()); -// self.track_line_num = 0; -// } - -// match line.origin_value() { -// DiffLineType::Context => { -// self.track_line_num = line.new_lineno().ok_or(Error::ContextMissingLineNum)?; -// } - -// DiffLineType::Deletion => { -// self.track_line_num += 1; -// self.edits.push(GitDiffEdit::Removed(self.track_line_num)); -// } - -// DiffLineType::Addition => { -// let addition_line_num = line.new_lineno().ok_or(Error::AdditionMissingLineNum)?; -// self.track_line_num = addition_line_num; - -// let mut replaced = false; -// for rewind_index in (0..self.edits.len()).rev() { -// let edit = &mut self.edits[rewind_index]; - -// if let GitDiffEdit::Removed(removed_line_num) = *edit { -// match removed_line_num.cmp(&addition_line_num) { -// Ordering::Equal => { -// *edit = GitDiffEdit::Modified(addition_line_num); -// replaced = true; -// break; -// } - -// Ordering::Greater => continue, -// Ordering::Less => break, -// } -// } -// } - -// if !replaced { -// self.edits.push(GitDiffEdit::Added(addition_line_num)); -// } -// } - -// _ => {} -// } - -// Ok(()) -// } -// } From 5157c71fa9f825c0d0fd48e5b7015f6baafa86ae Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 2 Sep 2022 15:22:15 -0400 Subject: [PATCH 039/140] Render deletion gutter markers --- crates/editor/src/element.rs | 42 +++++++++++++++++++++++++++--------- 1 file changed, 32 insertions(+), 10 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index b13a2a6ada..4ee14407b8 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -525,8 +525,9 @@ impl EditorElement { layout: &mut LayoutState, cx: &mut PaintContext, ) { - let scroll_top = - layout.position_map.snapshot.scroll_position().y() * layout.position_map.line_height; + let line_height = layout.position_map.line_height; + let scroll_position = layout.position_map.snapshot.scroll_position(); + let scroll_top = scroll_position.y() * line_height; for (ix, line) in layout.line_number_layouts.iter().enumerate() { if let Some(line) = line { let line_origin = bounds.origin() @@ -544,21 +545,42 @@ impl EditorElement { } } - println!("painting from hunks: {:#?}\n", layout.diff_hunks); for hunk in &layout.diff_hunks { let color = match hunk.status() { DiffHunkStatus::Added => Color::green(), DiffHunkStatus::Modified => Color::blue(), - DiffHunkStatus::Removed => continue, + + //TODO: This rendering is entirely a horrible hack + DiffHunkStatus::Removed => { + let row_above = hunk.buffer_range.start; + + let offset = line_height / 2.; + let start_y = row_above as f32 * line_height + offset - scroll_top; + let end_y = start_y + line_height; + + let width = 0.4 * line_height; + let highlight_origin = bounds.origin() + vec2f(-width, start_y); + let highlight_size = vec2f(width * 2., end_y - start_y); + let highlight_bounds = RectF::new(highlight_origin, highlight_size); + + cx.scene.push_quad(Quad { + bounds: highlight_bounds, + background: Some(Color::red()), + border: Border::new(0., Color::transparent_black()), + corner_radius: 1. * line_height, + }); + + continue; + } }; let start_row = hunk.buffer_range.start; let end_row = hunk.buffer_range.end; - let start_y = start_row as f32 * layout.line_height - scroll_top; - let end_y = end_row as f32 * layout.line_height - scroll_top; + let start_y = start_row as f32 * line_height - scroll_top; + let end_y = end_row as f32 * line_height - scroll_top; - let width = 0.22 * layout.line_height; + let width = 0.22 * line_height; let highlight_origin = bounds.origin() + vec2f(-width, start_y); let highlight_size = vec2f(width * 2., end_y - start_y); let highlight_bounds = RectF::new(highlight_origin, highlight_size); @@ -567,15 +589,15 @@ impl EditorElement { bounds: highlight_bounds, background: Some(color), border: Border::new(0., Color::transparent_black()), - corner_radius: 0.2 * layout.line_height, + corner_radius: 0.2 * line_height, }); } if let Some((row, indicator)) = layout.code_actions_indicator.as_mut() { let mut x = bounds.width() - layout.gutter_padding; - let mut y = *row as f32 * layout.position_map.line_height - scroll_top; + let mut y = *row as f32 * line_height - scroll_top; x += ((layout.gutter_padding + layout.gutter_margin) - indicator.size().x()) / 2.; - y += (layout.position_map.line_height - indicator.size().y()) / 2.; + y += (line_height - indicator.size().y()) / 2.; indicator.paint(bounds.origin() + vec2f(x, y), visible_bounds, cx); } } From 883d5b7a081d640afe7afcfd3729e610ecb874dc Mon Sep 17 00:00:00 2001 From: Julia Date: Tue, 6 Sep 2022 17:09:47 -0400 Subject: [PATCH 040/140] Update git gutter status after debounced delay Co-authored-by: Max Brunsfeld --- crates/editor/src/display_map/fold_map.rs | 1 + crates/editor/src/items.rs | 7 +++-- crates/editor/src/multi_buffer.rs | 26 ++++++++++++++++++ crates/language/src/buffer.rs | 33 ++++++++++++++++++----- crates/workspace/src/workspace.rs | 2 +- 5 files changed, 60 insertions(+), 9 deletions(-) diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 970910f969..6ab5c6202e 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -274,6 +274,7 @@ impl FoldMap { if buffer.edit_count() != new_buffer.edit_count() || buffer.parse_count() != new_buffer.parse_count() || buffer.diagnostics_update_count() != new_buffer.diagnostics_update_count() + || buffer.diff_update_count() != new_buffer.diff_update_count() || buffer.trailing_excerpt_update_count() != new_buffer.trailing_excerpt_update_count() { diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 22a069c5c0..d208fc9c15 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -481,9 +481,12 @@ impl Item for Editor { fn update_git( &mut self, _project: ModelHandle, - _cx: &mut ViewContext, + cx: &mut ViewContext, ) -> Task> { - println!("Editor::update_git"); + self.buffer().update(cx, |multibuffer, cx| { + multibuffer.update_git(cx); + }); + cx.notify(); Task::ready(Ok(())) } diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 91de32bac9..1d09b7008f 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -91,6 +91,7 @@ struct BufferState { last_selections_update_count: usize, last_diagnostics_update_count: usize, last_file_update_count: usize, + last_diff_update_count: usize, excerpts: Vec, _subscriptions: [gpui::Subscription; 2], } @@ -102,6 +103,7 @@ pub struct MultiBufferSnapshot { parse_count: usize, diagnostics_update_count: usize, trailing_excerpt_update_count: usize, + diff_update_count: usize, edit_count: usize, is_dirty: bool, has_conflict: bool, @@ -203,6 +205,7 @@ impl MultiBuffer { last_selections_update_count: buffer_state.last_selections_update_count, last_diagnostics_update_count: buffer_state.last_diagnostics_update_count, last_file_update_count: buffer_state.last_file_update_count, + last_diff_update_count: buffer_state.last_diff_update_count, excerpts: buffer_state.excerpts.clone(), _subscriptions: [ new_cx.observe(&buffer_state.buffer, |_, _, cx| cx.notify()), @@ -309,6 +312,15 @@ impl MultiBuffer { self.read(cx).symbols_containing(offset, theme) } + pub fn update_git(&mut self, cx: &mut ModelContext) { + let mut buffers = self.buffers.borrow_mut(); + for buffer in buffers.values_mut() { + buffer.buffer.update(cx, |buffer, _| { + buffer.update_git(); + }) + } + } + pub fn edit( &mut self, edits: I, @@ -828,6 +840,7 @@ impl MultiBuffer { last_selections_update_count: buffer_snapshot.selections_update_count(), last_diagnostics_update_count: buffer_snapshot.diagnostics_update_count(), last_file_update_count: buffer_snapshot.file_update_count(), + last_diff_update_count: buffer_snapshot.diff_update_count(), excerpts: Default::default(), _subscriptions: [ cx.observe(&buffer, |_, _, cx| cx.notify()), @@ -1250,6 +1263,7 @@ impl MultiBuffer { let mut excerpts_to_edit = Vec::new(); let mut reparsed = false; let mut diagnostics_updated = false; + let mut diff_updated = false; let mut is_dirty = false; let mut has_conflict = false; let mut edited = false; @@ -1261,6 +1275,7 @@ impl MultiBuffer { let selections_update_count = buffer.selections_update_count(); let diagnostics_update_count = buffer.diagnostics_update_count(); let file_update_count = buffer.file_update_count(); + let diff_update_count = buffer.diff_update_count(); let buffer_edited = version.changed_since(&buffer_state.last_version); let buffer_reparsed = parse_count > buffer_state.last_parse_count; @@ -1269,17 +1284,20 @@ impl MultiBuffer { let buffer_diagnostics_updated = diagnostics_update_count > buffer_state.last_diagnostics_update_count; let buffer_file_updated = file_update_count > buffer_state.last_file_update_count; + let buffer_diff_updated = diff_update_count > buffer_state.last_diff_update_count; if buffer_edited || buffer_reparsed || buffer_selections_updated || buffer_diagnostics_updated || buffer_file_updated + || buffer_diff_updated { buffer_state.last_version = version; buffer_state.last_parse_count = parse_count; buffer_state.last_selections_update_count = selections_update_count; buffer_state.last_diagnostics_update_count = diagnostics_update_count; buffer_state.last_file_update_count = file_update_count; + buffer_state.last_diff_update_count = diff_update_count; excerpts_to_edit.extend( buffer_state .excerpts @@ -1291,6 +1309,7 @@ impl MultiBuffer { edited |= buffer_edited; reparsed |= buffer_reparsed; diagnostics_updated |= buffer_diagnostics_updated; + diff_updated |= buffer_diff_updated; is_dirty |= buffer.is_dirty(); has_conflict |= buffer.has_conflict(); } @@ -1303,6 +1322,9 @@ impl MultiBuffer { if diagnostics_updated { snapshot.diagnostics_update_count += 1; } + if diff_updated { + snapshot.diff_update_count += 1; + } snapshot.is_dirty = is_dirty; snapshot.has_conflict = has_conflict; @@ -2480,6 +2502,10 @@ impl MultiBufferSnapshot { self.diagnostics_update_count } + pub fn diff_update_count(&self) -> usize { + self.diff_update_count + } + pub fn trailing_excerpt_update_count(&self) -> usize { self.trailing_excerpt_update_count } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index ad3d8978ad..10d7fa5535 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -49,7 +49,7 @@ pub use lsp::DiagnosticSeverity; pub struct Buffer { text: TextBuffer, head_text: Option, - diff: BufferDiff, + git_diff: BufferDiff, file: Option>, saved_version: clock::Global, saved_version_fingerprint: String, @@ -69,6 +69,7 @@ pub struct Buffer { diagnostics_update_count: usize, diagnostics_timestamp: clock::Lamport, file_update_count: usize, + diff_update_count: usize, completion_triggers: Vec, completion_triggers_timestamp: clock::Lamport, deferred_ops: OperationQueue, @@ -76,12 +77,13 @@ pub struct Buffer { pub struct BufferSnapshot { text: text::BufferSnapshot, - git_hunks: Arc<[DiffHunk]>, + pub git_hunks: Arc<[DiffHunk]>, pub(crate) syntax: SyntaxSnapshot, file: Option>, diagnostics: DiagnosticSet, diagnostics_update_count: usize, file_update_count: usize, + diff_update_count: usize, remote_selections: TreeMap, selections_update_count: usize, language: Option>, @@ -419,9 +421,9 @@ impl Buffer { UNIX_EPOCH }; - let mut diff = BufferDiff::new(); + let mut git_diff = BufferDiff::new(); if let Some(head_text) = &head_text { - diff.update(head_text, &buffer); + git_diff.update(head_text, &buffer); } Self { @@ -432,7 +434,7 @@ impl Buffer { was_dirty_before_starting_transaction: None, text: buffer, head_text, - diff, + git_diff, file, syntax_map: Mutex::new(SyntaxMap::new()), parsing_in_background: false, @@ -447,6 +449,7 @@ impl Buffer { diagnostics_update_count: 0, diagnostics_timestamp: Default::default(), file_update_count: 0, + diff_update_count: 0, completion_triggers: Default::default(), completion_triggers_timestamp: Default::default(), deferred_ops: OperationQueue::new(), @@ -462,12 +465,13 @@ impl Buffer { BufferSnapshot { text, syntax, - git_hunks: self.diff.hunks(), + git_hunks: self.git_diff.hunks(), file: self.file.clone(), remote_selections: self.remote_selections.clone(), diagnostics: self.diagnostics.clone(), diagnostics_update_count: self.diagnostics_update_count, file_update_count: self.file_update_count, + diff_update_count: self.diff_update_count, language: self.language.clone(), parse_count: self.parse_count, selections_update_count: self.selections_update_count, @@ -649,6 +653,14 @@ impl Buffer { task } + pub fn update_git(&mut self) { + if let Some(head_text) = &self.head_text { + let snapshot = self.snapshot(); + self.git_diff.update(head_text, &snapshot); + self.diff_update_count += 1; + } + } + pub fn close(&mut self, cx: &mut ModelContext) { cx.emit(Event::Closed); } @@ -673,6 +685,10 @@ impl Buffer { self.file_update_count } + pub fn diff_update_count(&self) -> usize { + self.diff_update_count + } + #[cfg(any(test, feature = "test-support"))] pub fn is_parsing(&self) -> bool { self.parsing_in_background @@ -2226,6 +2242,10 @@ impl BufferSnapshot { pub fn file_update_count(&self) -> usize { self.file_update_count } + + pub fn diff_update_count(&self) -> usize { + self.diff_update_count + } } pub fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize { @@ -2260,6 +2280,7 @@ impl Clone for BufferSnapshot { selections_update_count: self.selections_update_count, diagnostics_update_count: self.diagnostics_update_count, file_update_count: self.file_update_count, + diff_update_count: self.diff_update_count, language: self.language.clone(), parse_count: self.parse_count, } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 3446dc0f0e..ad3862c56f 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -734,7 +734,7 @@ impl ItemHandle for ViewHandle { ); } - const GIT_DELAY: Duration = Duration::from_millis(800); + const GIT_DELAY: Duration = Duration::from_millis(600); let item = item.clone(); pending_git_update.fire_new( GIT_DELAY, From a86e93d46fa16b0c45acce1c3ba53ec728141157 Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 9 Sep 2022 11:52:42 -0400 Subject: [PATCH 041/140] Checkpoint on incremental diff sumtree shenanigans --- crates/language/src/buffer.rs | 18 +-- crates/language/src/git.rs | 263 ++++++++++++++++++++++++++++------ crates/text/src/rope.rs | 7 + 3 files changed, 233 insertions(+), 55 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 10d7fa5535..5159e316f9 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -35,7 +35,7 @@ use std::{ time::{Duration, Instant, SystemTime, UNIX_EPOCH}, vec, }; -use sum_tree::TreeMap; +use sum_tree::{SumTree, TreeMap}; use text::operation_queue::OperationQueue; pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, Operation as _, *}; use theme::SyntaxTheme; @@ -48,7 +48,7 @@ pub use lsp::DiagnosticSeverity; pub struct Buffer { text: TextBuffer, - head_text: Option, + head_text: Option, git_diff: BufferDiff, file: Option>, saved_version: clock::Global, @@ -77,7 +77,7 @@ pub struct Buffer { pub struct BufferSnapshot { text: text::BufferSnapshot, - pub git_hunks: Arc<[DiffHunk]>, + pub git_hunks: SumTree>, pub(crate) syntax: SyntaxSnapshot, file: Option>, diagnostics: DiagnosticSet, @@ -371,7 +371,7 @@ impl Buffer { id: self.remote_id(), file: self.file.as_ref().map(|f| f.to_proto()), base_text: self.base_text().to_string(), - head_text: self.head_text.clone(), + head_text: self.head_text.as_ref().map(|h| h.to_string()), line_ending: proto::serialize_line_ending(self.line_ending()) as i32, } } @@ -421,10 +421,8 @@ impl Buffer { UNIX_EPOCH }; - let mut git_diff = BufferDiff::new(); - if let Some(head_text) = &head_text { - git_diff.update(head_text, &buffer); - } + let git_diff = BufferDiff::new(&head_text, &buffer); + let head_text = head_text.map(|h| Rope::from(h.as_str())); Self { saved_mtime, @@ -465,7 +463,7 @@ impl Buffer { BufferSnapshot { text, syntax, - git_hunks: self.git_diff.hunks(), + git_hunks: self.git_diff.hunks().clone(), file: self.file.clone(), remote_selections: self.remote_selections.clone(), diagnostics: self.diagnostics.clone(), @@ -2175,6 +2173,8 @@ impl BufferSnapshot { &'a self, query_row_range: Range, ) -> impl 'a + Iterator> { + println!("{} hunks overall", self.git_hunks.iter().count()); + //This is pretty terrible, find a way to utilize sumtree traversal to accelerate this self.git_hunks.iter().filter_map(move |hunk| { let range = hunk.buffer_range.to_point(&self.text); diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs index 73e511ca48..4a227c904d 100644 --- a/crates/language/src/git.rs +++ b/crates/language/src/git.rs @@ -1,8 +1,7 @@ use std::ops::Range; -use std::sync::Arc; -use sum_tree::Bias; -use text::{Anchor, Point}; +use sum_tree::{Bias, SumTree}; +use text::{Anchor, BufferSnapshot, Point, Rope}; pub use git2 as libgit; use libgit::{DiffOptions as GitOptions, Patch as GitPatch}; @@ -14,7 +13,7 @@ pub enum DiffHunkStatus { Removed, } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct DiffHunk { pub buffer_range: Range, pub head_range: Range, @@ -32,60 +31,232 @@ impl DiffHunk { } } +impl sum_tree::Item for DiffHunk { + type Summary = DiffHunkSummary; + + fn summary(&self) -> Self::Summary { + DiffHunkSummary { + head_range: self.head_range.clone(), + } + } +} + +#[derive(Debug, Default, Clone)] +pub struct DiffHunkSummary { + head_range: Range, +} + +impl sum_tree::Summary for DiffHunkSummary { + type Context = (); + + fn add_summary(&mut self, other: &Self, _: &Self::Context) { + self.head_range.start = self.head_range.start.min(other.head_range.start); + self.head_range.end = self.head_range.end.max(other.head_range.end); + } +} + +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +struct HunkHeadEnd(usize); + +impl<'a> sum_tree::Dimension<'a, DiffHunkSummary> for HunkHeadEnd { + fn add_summary(&mut self, summary: &'a DiffHunkSummary, _: &()) { + self.0 = summary.head_range.end; + } + + fn from_summary(summary: &'a DiffHunkSummary, _: &()) -> Self { + HunkHeadEnd(summary.head_range.end) + } +} + +struct HunkIter<'a> { + index: usize, + patch: GitPatch<'a>, +} + +impl<'a> HunkIter<'a> { + fn diff(head: &'a [u8], current: &'a [u8]) -> Option { + let mut options = GitOptions::default(); + options.context_lines(0); + let patch = match GitPatch::from_buffers(head, None, current, None, Some(&mut options)) { + Ok(patch) => patch, + Err(_) => return None, + }; + + Some(HunkIter { index: 0, patch }) + } + + fn next(&mut self, buffer: &BufferSnapshot) -> Option> { + if self.index >= self.patch.num_hunks() { + return None; + } + + let (hunk, _) = match self.patch.hunk(self.index) { + Ok(it) => it, + Err(_) => return None, + }; + + let new_start = hunk.new_start() - 1; + let new_end = new_start + hunk.new_lines(); + let start_anchor = buffer.anchor_at(Point::new(new_start, 0), Bias::Left); + let end_anchor = buffer.anchor_at(Point::new(new_end, 0), Bias::Left); + let buffer_range = start_anchor..end_anchor; + + //This is probably wrong? When does this trigger? Should buffer range also do this? + let head_range = if hunk.old_start() == 0 { + 0..0 + } else { + let old_start = hunk.old_start() as usize - 1; + let old_end = old_start + hunk.old_lines() as usize; + old_start..old_end + }; + + self.index += 1; + Some(DiffHunk { + buffer_range, + head_range, + }) + } +} + pub struct BufferDiff { - hunks: Arc<[DiffHunk]>, + last_update_version: clock::Global, + hunks: SumTree>, } impl BufferDiff { - pub fn new() -> BufferDiff { - BufferDiff { - hunks: Arc::new([]), - } - } - - pub fn hunks(&self) -> Arc<[DiffHunk]> { - self.hunks.clone() - } - - pub fn update(&mut self, head: &str, buffer: &text::BufferSnapshot) { - let head = head.as_bytes(); - let current = buffer.as_rope().to_string().into_bytes(); - - let mut options = GitOptions::default(); - options.context_lines(0); - let patch = match GitPatch::from_buffers(head, None, ¤t, None, Some(&mut options)) { - Ok(patch) => patch, - Err(_) => { - //Reset hunks in case of failure to avoid showing a stale (potentially erroneous) diff - self.hunks = Arc::new([]); - return; + pub fn new(head_text: &Option, buffer: &text::BufferSnapshot) -> BufferDiff { + let hunks = if let Some(head_text) = head_text { + let buffer_string = buffer.as_rope().to_string(); + let buffer_bytes = buffer_string.as_bytes(); + let iter = HunkIter::diff(head_text.as_bytes(), buffer_bytes); + if let Some(mut iter) = iter { + println!("some iter"); + let mut hunks = SumTree::new(); + while let Some(hunk) = iter.next(buffer) { + println!("hunk"); + hunks.push(hunk, &()); + } + hunks + } else { + SumTree::new() } + } else { + SumTree::new() }; - let mut hunks = Vec::new(); - for index in 0..patch.num_hunks() { - let (hunk, _) = match patch.hunk(index) { - Ok(it) => it, - Err(_) => continue, - }; + BufferDiff { + last_update_version: buffer.version().clone(), + hunks, + } + } - let new_start = hunk.new_start() - 1; - let new_end = new_start + hunk.new_lines(); - let start_anchor = buffer.anchor_at(Point::new(new_start, 0), Bias::Left); - let end_anchor = buffer.anchor_at(Point::new(new_end, 0), Bias::Left); - let buffer_range = start_anchor..end_anchor; + pub fn hunks(&self) -> &SumTree> { + &self.hunks + } - let old_start = hunk.old_start() as usize - 1; - let old_end = old_start + hunk.old_lines() as usize; - let head_range = old_start..old_end; + pub fn update(&mut self, head: &Rope, buffer: &text::BufferSnapshot) { + let expand_by = 20; + let combine_distance = 5; - hunks.push(DiffHunk { - buffer_range, - head_range, - }); + struct EditRange { + head_start: u32, + head_end: u32, + buffer_start: u32, + buffer_end: u32, } - self.hunks = hunks.into(); + let mut ranges = Vec::::new(); + + for edit in buffer.edits_since::(&self.last_update_version) { + //This bit is extremely wrong, this is not where these row lines should come from + let head_start = edit.old.start.row.saturating_sub(expand_by); + let head_end = (edit.old.end.row + expand_by).min(head.summary().lines.row + 1); + + let buffer_start = edit.new.start.row.saturating_sub(expand_by); + let buffer_end = (edit.new.end.row + expand_by).min(buffer.row_count()); + + if let Some(last_range) = ranges.last_mut() { + let head_distance = last_range.head_end.abs_diff(head_end); + let buffer_distance = last_range.buffer_end.abs_diff(buffer_end); + + if head_distance <= combine_distance || buffer_distance <= combine_distance { + last_range.head_start = last_range.head_start.min(head_start); + last_range.head_end = last_range.head_end.max(head_end); + + last_range.buffer_start = last_range.buffer_start.min(buffer_start); + last_range.buffer_end = last_range.buffer_end.max(buffer_end); + } else { + ranges.push(EditRange { + head_start, + head_end, + buffer_start, + buffer_end, + }); + } + } else { + ranges.push(EditRange { + head_start, + head_end, + buffer_start, + buffer_end, + }); + } + } + + self.last_update_version = buffer.version().clone(); + + let mut new_hunks = SumTree::new(); + let mut cursor = self.hunks.cursor::(); + + for range in ranges { + let head_range = range.head_start..range.head_end; + let head_slice = head.slice_rows(head_range.clone()); + let head_str = head_slice.to_string(); + + let buffer_range = range.buffer_start..range.buffer_end; + let buffer_slice = buffer.as_rope().slice_rows(buffer_range.clone()); + let buffer_str = buffer_slice.to_string(); + + println!("diffing head {:?}, buffer {:?}", head_range, buffer_range); + + let mut iter = match HunkIter::diff(head_str.as_bytes(), buffer_str.as_bytes()) { + Some(iter) => iter, + None => continue, + }; + + while let Some(hunk) = iter.next(buffer) { + println!("hunk"); + let prefix = cursor.slice(&HunkHeadEnd(hunk.head_range.end), Bias::Right, &()); + println!("prefix len: {}", prefix.iter().count()); + new_hunks.extend(prefix.iter().cloned(), &()); + + new_hunks.push(hunk.clone(), &()); + + cursor.seek(&HunkHeadEnd(hunk.head_range.end), Bias::Right, &()); + println!("item: {:?}", cursor.item()); + if let Some(item) = cursor.item() { + if item.head_range.end <= hunk.head_range.end { + println!("skipping"); + cursor.next(&()); + } + } + } + } + + new_hunks.extend( + cursor + .suffix(&()) + .iter() + .map(|i| { + println!("extending with {i:?}"); + i + }) + .cloned(), + &(), + ); + drop(cursor); + + self.hunks = new_hunks; } } diff --git a/crates/text/src/rope.rs b/crates/text/src/rope.rs index d35ac46f45..e148c048bb 100644 --- a/crates/text/src/rope.rs +++ b/crates/text/src/rope.rs @@ -54,6 +54,13 @@ impl Rope { cursor.slice(range.end) } + pub fn slice_rows(&self, range: Range) -> Rope { + //This would be more efficient with a forward advance after the first, but it's fine + let start = self.point_to_offset(Point::new(range.start, 0)); + let end = self.point_to_offset(Point::new(range.end, 0)); + self.slice(start..end) + } + pub fn push(&mut self, text: &str) { let mut new_chunks = SmallVec::<[_; 16]>::new(); let mut new_chunk = ArrayString::new(); From 61ff24edc8f257e0ffbd691351653255cf2f0e66 Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 9 Sep 2022 15:40:33 -0400 Subject: [PATCH 042/140] Move cloneable diff state into new snapshot type Co-Authored-By: Max Brunsfeld --- crates/language/src/buffer.rs | 31 ++----- crates/language/src/git.rs | 154 ++++++++++++++++++++++++++------ crates/sum_tree/src/sum_tree.rs | 6 ++ crates/text/src/anchor.rs | 2 +- 4 files changed, 140 insertions(+), 53 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 5159e316f9..37f2151133 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1,4 +1,4 @@ -use crate::git::{BufferDiff, DiffHunk}; +use crate::git::{BufferDiff, BufferDiffSnapshot, DiffHunk}; pub use crate::{ diagnostic_set::DiagnosticSet, highlight_map::{HighlightId, HighlightMap}, @@ -35,7 +35,7 @@ use std::{ time::{Duration, Instant, SystemTime, UNIX_EPOCH}, vec, }; -use sum_tree::{SumTree, TreeMap}; +use sum_tree::TreeMap; use text::operation_queue::OperationQueue; pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, Operation as _, *}; use theme::SyntaxTheme; @@ -77,7 +77,7 @@ pub struct Buffer { pub struct BufferSnapshot { text: text::BufferSnapshot, - pub git_hunks: SumTree>, + pub diff_snapshot: BufferDiffSnapshot, pub(crate) syntax: SyntaxSnapshot, file: Option>, diagnostics: DiagnosticSet, @@ -463,7 +463,7 @@ impl Buffer { BufferSnapshot { text, syntax, - git_hunks: self.git_diff.hunks().clone(), + diff_snapshot: self.git_diff.snapshot(), file: self.file.clone(), remote_selections: self.remote_selections.clone(), diagnostics: self.diagnostics.clone(), @@ -2173,26 +2173,7 @@ impl BufferSnapshot { &'a self, query_row_range: Range, ) -> impl 'a + Iterator> { - println!("{} hunks overall", self.git_hunks.iter().count()); - //This is pretty terrible, find a way to utilize sumtree traversal to accelerate this - self.git_hunks.iter().filter_map(move |hunk| { - let range = hunk.buffer_range.to_point(&self.text); - - if range.start.row < query_row_range.end && query_row_range.start < range.end.row { - let end_row = if range.end.column > 0 { - range.end.row + 1 - } else { - range.end.row - }; - - Some(DiffHunk { - buffer_range: range.start.row..end_row, - head_range: hunk.head_range.clone(), - }) - } else { - None - } - }) + self.diff_snapshot.hunks_in_range(query_row_range, self) } pub fn diagnostics_in_range<'a, T, O>( @@ -2272,7 +2253,7 @@ impl Clone for BufferSnapshot { fn clone(&self) -> Self { Self { text: self.text.clone(), - git_hunks: self.git_hunks.clone(), + diff_snapshot: self.diff_snapshot.clone(), syntax: self.syntax.clone(), file: self.file.clone(), remote_selections: self.remote_selections.clone(), diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs index 4a227c904d..09d74ad9f3 100644 --- a/crates/language/src/git.rs +++ b/crates/language/src/git.rs @@ -1,7 +1,7 @@ use std::ops::Range; use sum_tree::{Bias, SumTree}; -use text::{Anchor, BufferSnapshot, Point, Rope}; +use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToPoint}; pub use git2 as libgit; use libgit::{DiffOptions as GitOptions, Patch as GitPatch}; @@ -13,10 +13,10 @@ pub enum DiffHunkStatus { Removed, } -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct DiffHunk { pub buffer_range: Range, - pub head_range: Range, + pub head_range: Range, } impl DiffHunk { @@ -36,6 +36,7 @@ impl sum_tree::Item for DiffHunk { fn summary(&self) -> Self::Summary { DiffHunkSummary { + buffer_range: self.buffer_range.clone(), head_range: self.head_range.clone(), } } @@ -43,11 +44,12 @@ impl sum_tree::Item for DiffHunk { #[derive(Debug, Default, Clone)] pub struct DiffHunkSummary { - head_range: Range, + buffer_range: Range, + head_range: Range, } impl sum_tree::Summary for DiffHunkSummary { - type Context = (); + type Context = text::BufferSnapshot; fn add_summary(&mut self, other: &Self, _: &Self::Context) { self.head_range.start = self.head_range.start.min(other.head_range.start); @@ -55,19 +57,32 @@ impl sum_tree::Summary for DiffHunkSummary { } } -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] -struct HunkHeadEnd(usize); +#[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord)] +struct HunkHeadEnd(u32); impl<'a> sum_tree::Dimension<'a, DiffHunkSummary> for HunkHeadEnd { - fn add_summary(&mut self, summary: &'a DiffHunkSummary, _: &()) { + fn add_summary(&mut self, summary: &'a DiffHunkSummary, _: &text::BufferSnapshot) { self.0 = summary.head_range.end; } - fn from_summary(summary: &'a DiffHunkSummary, _: &()) -> Self { + fn from_summary(summary: &'a DiffHunkSummary, _: &text::BufferSnapshot) -> Self { HunkHeadEnd(summary.head_range.end) } } +#[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord)] +struct HunkBufferEnd(u32); + +impl<'a> sum_tree::Dimension<'a, DiffHunkSummary> for HunkBufferEnd { + fn add_summary(&mut self, summary: &'a DiffHunkSummary, buffer: &text::BufferSnapshot) { + self.0 = summary.buffer_range.end.to_point(buffer).row; + } + + fn from_summary(summary: &'a DiffHunkSummary, buffer: &text::BufferSnapshot) -> Self { + HunkBufferEnd(summary.buffer_range.end.to_point(buffer).row) + } +} + struct HunkIter<'a> { index: usize, patch: GitPatch<'a>, @@ -105,8 +120,8 @@ impl<'a> HunkIter<'a> { let head_range = if hunk.old_start() == 0 { 0..0 } else { - let old_start = hunk.old_start() as usize - 1; - let old_end = old_start + hunk.old_lines() as usize; + let old_start = hunk.old_start() - 1; + let old_end = old_start + hunk.old_lines(); old_start..old_end }; @@ -118,9 +133,48 @@ impl<'a> HunkIter<'a> { } } +#[derive(Clone)] +pub struct BufferDiffSnapshot { + tree: SumTree>, +} + +impl BufferDiffSnapshot { + pub fn hunks_in_range<'a>( + &'a self, + query_row_range: Range, + buffer: &'a BufferSnapshot, + ) -> impl 'a + Iterator> { + println!("{} hunks overall", self.tree.iter().count()); + + self.tree.iter().filter_map(move |hunk| { + let range = hunk.buffer_range.to_point(&buffer); + + if range.start.row < query_row_range.end && query_row_range.start < range.end.row { + let end_row = if range.end.column > 0 { + range.end.row + 1 + } else { + range.end.row + }; + + Some(DiffHunk { + buffer_range: range.start.row..end_row, + head_range: hunk.head_range.clone(), + }) + } else { + None + } + }) + } + + #[cfg(test)] + fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator> { + self.hunks_in_range(0..u32::MAX, text) + } +} + pub struct BufferDiff { last_update_version: clock::Global, - hunks: SumTree>, + snapshot: BufferDiffSnapshot, } impl BufferDiff { @@ -128,13 +182,12 @@ impl BufferDiff { let hunks = if let Some(head_text) = head_text { let buffer_string = buffer.as_rope().to_string(); let buffer_bytes = buffer_string.as_bytes(); + let iter = HunkIter::diff(head_text.as_bytes(), buffer_bytes); if let Some(mut iter) = iter { - println!("some iter"); let mut hunks = SumTree::new(); while let Some(hunk) = iter.next(buffer) { - println!("hunk"); - hunks.push(hunk, &()); + hunks.push(hunk, buffer); } hunks } else { @@ -146,12 +199,12 @@ impl BufferDiff { BufferDiff { last_update_version: buffer.version().clone(), - hunks, + snapshot: BufferDiffSnapshot { tree: hunks }, } } - pub fn hunks(&self) -> &SumTree> { - &self.hunks + pub fn snapshot(&self) -> BufferDiffSnapshot { + self.snapshot.clone() } pub fn update(&mut self, head: &Rope, buffer: &text::BufferSnapshot) { @@ -206,7 +259,7 @@ impl BufferDiff { self.last_update_version = buffer.version().clone(); let mut new_hunks = SumTree::new(); - let mut cursor = self.hunks.cursor::(); + let mut cursor = self.snapshot.tree.cursor::(); for range in ranges { let head_range = range.head_start..range.head_end; @@ -226,18 +279,18 @@ impl BufferDiff { while let Some(hunk) = iter.next(buffer) { println!("hunk"); - let prefix = cursor.slice(&HunkHeadEnd(hunk.head_range.end), Bias::Right, &()); + let prefix = cursor.slice(&HunkHeadEnd(hunk.head_range.end), Bias::Right, buffer); println!("prefix len: {}", prefix.iter().count()); - new_hunks.extend(prefix.iter().cloned(), &()); + new_hunks.extend(prefix.iter().cloned(), buffer); - new_hunks.push(hunk.clone(), &()); + new_hunks.push(hunk.clone(), buffer); - cursor.seek(&HunkHeadEnd(hunk.head_range.end), Bias::Right, &()); + cursor.seek(&HunkHeadEnd(hunk.head_range.end), Bias::Right, buffer); println!("item: {:?}", cursor.item()); if let Some(item) = cursor.item() { if item.head_range.end <= hunk.head_range.end { println!("skipping"); - cursor.next(&()); + cursor.next(buffer); } } } @@ -245,18 +298,18 @@ impl BufferDiff { new_hunks.extend( cursor - .suffix(&()) + .suffix(buffer) .iter() .map(|i| { println!("extending with {i:?}"); i }) .cloned(), - &(), + buffer, ); drop(cursor); - self.hunks = new_hunks; + self.snapshot.tree = new_hunks; } } @@ -276,3 +329,50 @@ impl GitDiffEdit { } } } + +#[cfg(test)] +mod tests { + use super::*; + use text::Buffer; + use unindent::Unindent as _; + + #[gpui::test] + fn test_buffer_diff_simple() { + let head_text = " + one + two + three + " + .unindent(); + + let buffer_text = " + one + hello + three + " + .unindent(); + + let mut buffer = Buffer::new(0, 0, buffer_text); + let diff = BufferDiff::new(&Some(head_text.clone()), &buffer); + assert_eq!( + diff.snapshot.hunks(&buffer).collect::>(), + &[DiffHunk { + buffer_range: 1..2, + head_range: 1..2 + }] + ); + + buffer.edit([(0..0, "point five\n")]); + assert_eq!( + diff.snapshot.hunks(&buffer).collect::>(), + &[DiffHunk { + buffer_range: 2..3, + head_range: 1..2 + }] + ); + } + + // use rand::rngs::StdRng; + // #[gpui::test(iterations = 100)] + // fn test_buffer_diff_random(mut rng: StdRng) {} +} diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index cb05dff967..7beab3b7c5 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -101,6 +101,12 @@ pub enum Bias { Right, } +impl Default for Bias { + fn default() -> Self { + Bias::Left + } +} + impl PartialOrd for Bias { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index dca95ce5d5..9f70ae1cc7 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -4,7 +4,7 @@ use anyhow::Result; use std::{cmp::Ordering, fmt::Debug, ops::Range}; use sum_tree::Bias; -#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] +#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash, Default)] pub struct Anchor { pub timestamp: clock::Local, pub offset: usize, From a2e8fc79d9ff47686eb9b6442049f2f9331ba240 Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 9 Sep 2022 17:32:19 -0400 Subject: [PATCH 043/140] Switch head range from row range to byte offset range Co-Authored-By: Max Brunsfeld --- crates/language/src/git.rs | 405 +++++++++++++++++++++++-------------- 1 file changed, 255 insertions(+), 150 deletions(-) diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs index 09d74ad9f3..b3e2f7da51 100644 --- a/crates/language/src/git.rs +++ b/crates/language/src/git.rs @@ -16,7 +16,7 @@ pub enum DiffHunkStatus { #[derive(Debug, Clone, PartialEq, Eq)] pub struct DiffHunk { pub buffer_range: Range, - pub head_range: Range, + pub head_range: Range, } impl DiffHunk { @@ -45,7 +45,7 @@ impl sum_tree::Item for DiffHunk { #[derive(Debug, Default, Clone)] pub struct DiffHunkSummary { buffer_range: Range, - head_range: Range, + head_range: Range, } impl sum_tree::Summary for DiffHunkSummary { @@ -58,7 +58,7 @@ impl sum_tree::Summary for DiffHunkSummary { } #[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord)] -struct HunkHeadEnd(u32); +struct HunkHeadEnd(usize); impl<'a> sum_tree::Dimension<'a, DiffHunkSummary> for HunkHeadEnd { fn add_summary(&mut self, summary: &'a DiffHunkSummary, _: &text::BufferSnapshot) { @@ -83,55 +83,63 @@ impl<'a> sum_tree::Dimension<'a, DiffHunkSummary> for HunkBufferEnd { } } -struct HunkIter<'a> { - index: usize, - patch: GitPatch<'a>, -} +// struct HunkIter<'a> { +// index: usize, +// patch: GitPatch<'a>, +// } -impl<'a> HunkIter<'a> { - fn diff(head: &'a [u8], current: &'a [u8]) -> Option { - let mut options = GitOptions::default(); - options.context_lines(0); - let patch = match GitPatch::from_buffers(head, None, current, None, Some(&mut options)) { - Ok(patch) => patch, - Err(_) => return None, - }; +// impl<'a> HunkIter<'a> { +// fn diff(head: &'a [u8], current: &'a [u8]) -> Option { +// let mut options = GitOptions::default(); +// options.context_lines(0); +// let patch = match GitPatch::from_buffers(head, None, current, None, Some(&mut options)) { +// Ok(patch) => patch, +// Err(_) => return None, +// }; - Some(HunkIter { index: 0, patch }) - } +// Some(HunkIter { index: 0, patch }) +// } - fn next(&mut self, buffer: &BufferSnapshot) -> Option> { - if self.index >= self.patch.num_hunks() { - return None; - } +// fn next(&mut self, buffer: &BufferSnapshot) -> Option> { +// if self.index >= self.patch.num_hunks() { +// return None; +// } - let (hunk, _) = match self.patch.hunk(self.index) { - Ok(it) => it, - Err(_) => return None, - }; +// let (hunk, _) = match self.patch.hunk(self.index) { +// Ok(it) => it, +// Err(_) => return None, +// }; +// let hunk_line_count = self.patch.num_lines_in_hunk(self.index).unwrap(); - let new_start = hunk.new_start() - 1; - let new_end = new_start + hunk.new_lines(); - let start_anchor = buffer.anchor_at(Point::new(new_start, 0), Bias::Left); - let end_anchor = buffer.anchor_at(Point::new(new_end, 0), Bias::Left); - let buffer_range = start_anchor..end_anchor; +// println!("{hunk:#?}"); +// for index in 0..hunk_line_count { +// println!("{:?}", self.patch.line_in_hunk(self.index, index)); +// } - //This is probably wrong? When does this trigger? Should buffer range also do this? - let head_range = if hunk.old_start() == 0 { - 0..0 - } else { - let old_start = hunk.old_start() - 1; - let old_end = old_start + hunk.old_lines(); - old_start..old_end - }; +// let new_start = hunk.new_start() - 1; +// let new_end = new_start + hunk.new_lines(); +// let start_anchor = buffer.anchor_at(Point::new(new_start, 0), Bias::Left); +// let end_anchor = buffer.anchor_at(Point::new(new_end, 0), Bias::Left); +// let buffer_range = start_anchor..end_anchor; - self.index += 1; - Some(DiffHunk { - buffer_range, - head_range, - }) - } -} +// //This is probably wrong? When does this trigger? Should buffer range also do this? +// let head_range = if hunk.old_start() == 0 { +// 0..0 +// } else { +// let old_start = hunk.old_start() - 1; +// let old_end = old_start + hunk.old_lines(); +// old_start..old_end +// }; + +// // let head_start_index = self.patch.line_in_hunk(self.index, 0) + +// self.index += 1; +// Some(DiffHunk { +// buffer_range, +// head_range, +// }) +// } +// } #[derive(Clone)] pub struct BufferDiffSnapshot { @@ -144,7 +152,7 @@ impl BufferDiffSnapshot { query_row_range: Range, buffer: &'a BufferSnapshot, ) -> impl 'a + Iterator> { - println!("{} hunks overall", self.tree.iter().count()); + // println!("{} hunks overall", self.tree.iter().count()); self.tree.iter().filter_map(move |hunk| { let range = hunk.buffer_range.to_point(&buffer); @@ -183,16 +191,100 @@ impl BufferDiff { let buffer_string = buffer.as_rope().to_string(); let buffer_bytes = buffer_string.as_bytes(); - let iter = HunkIter::diff(head_text.as_bytes(), buffer_bytes); - if let Some(mut iter) = iter { - let mut hunks = SumTree::new(); - while let Some(hunk) = iter.next(buffer) { - hunks.push(hunk, buffer); + let mut options = GitOptions::default(); + options.context_lines(0); + let patch = match GitPatch::from_buffers( + head_text.as_bytes(), + None, + buffer_bytes, + None, + Some(&mut options), + ) { + Ok(patch) => patch, + Err(_) => todo!("This needs to be handled"), + }; + + let mut hunks = SumTree::>::new(); + let mut delta = 0i64; + for i in 0..patch.num_hunks() { + let diff_line_item_count = patch.num_lines_in_hunk(i).unwrap(); + + // if diff_line_item_count == 0 { + // continue; + // } + + // let calc_line_diff_hunk = || { + + // }; + + // let first_line = patch.line_in_hunk(0).unwrap(); + // let mut hunk = + + for j in 0..diff_line_item_count { + let line = patch.line_in_hunk(i, j).unwrap(); + + let hunk = match line.origin_value() { + libgit::DiffLineType::Addition => { + let buffer_start = line.content_offset(); + let buffer_end = buffer_start as usize + line.content().len(); + let head_offset = (buffer_start - delta) as usize; + delta += line.content().len() as i64; + DiffHunk { + buffer_range: buffer.anchor_before(buffer_start as usize) + ..buffer.anchor_after(buffer_end), + head_range: head_offset..head_offset, + } + } + libgit::DiffLineType::Deletion => { + let head_start = line.content_offset(); + let head_end = head_start as usize + line.content().len(); + let buffer_offset = (head_start + delta) as usize; + delta -= line.content().len() as i64; + DiffHunk { + buffer_range: buffer.anchor_before(buffer_offset) + ..buffer.anchor_after(buffer_offset), + head_range: (head_start as usize)..head_end, + } + } + + libgit::DiffLineType::AddEOFNL => todo!(), + libgit::DiffLineType::ContextEOFNL => todo!(), + libgit::DiffLineType::DeleteEOFNL => todo!(), + libgit::DiffLineType::Context => unreachable!(), + libgit::DiffLineType::FileHeader => continue, + libgit::DiffLineType::HunkHeader => continue, + libgit::DiffLineType::Binary => continue, + }; + + let mut combined = false; + hunks.update_last( + |last_hunk| { + if last_hunk.head_range.end == hunk.head_range.start { + last_hunk.head_range.end = hunk.head_range.end; + last_hunk.buffer_range.end = hunk.buffer_range.end; + combined = true; + } + }, + buffer, + ); + if !combined { + hunks.push(hunk, buffer); + } } - hunks - } else { - SumTree::new() } + + // let iter = HunkIter::diff(head_text.as_bytes(), buffer_bytes); + // if let Some(mut iter) = iter { + // let mut hunks = SumTree::new(); + // while let Some(hunk) = iter.next(buffer) { + // hunks.push(hunk, buffer); + // } + // println!("========"); + // hunks + // } else { + // SumTree::new() + // } + hunks } else { SumTree::new() }; @@ -208,108 +300,108 @@ impl BufferDiff { } pub fn update(&mut self, head: &Rope, buffer: &text::BufferSnapshot) { - let expand_by = 20; - let combine_distance = 5; + // let expand_by = 20; + // let combine_distance = 5; - struct EditRange { - head_start: u32, - head_end: u32, - buffer_start: u32, - buffer_end: u32, - } + // struct EditRange { + // head_start: u32, + // head_end: u32, + // buffer_start: u32, + // buffer_end: u32, + // } - let mut ranges = Vec::::new(); + // let mut ranges = Vec::::new(); - for edit in buffer.edits_since::(&self.last_update_version) { - //This bit is extremely wrong, this is not where these row lines should come from - let head_start = edit.old.start.row.saturating_sub(expand_by); - let head_end = (edit.old.end.row + expand_by).min(head.summary().lines.row + 1); + // for edit in buffer.edits_since::(&self.last_update_version) { + // //This bit is extremely wrong, this is not where these row lines should come from + // let head_start = edit.old.start.row.saturating_sub(expand_by); + // let head_end = (edit.old.end.row + expand_by).min(head.summary().lines.row + 1); - let buffer_start = edit.new.start.row.saturating_sub(expand_by); - let buffer_end = (edit.new.end.row + expand_by).min(buffer.row_count()); + // let buffer_start = edit.new.start.row.saturating_sub(expand_by); + // let buffer_end = (edit.new.end.row + expand_by).min(buffer.row_count()); - if let Some(last_range) = ranges.last_mut() { - let head_distance = last_range.head_end.abs_diff(head_end); - let buffer_distance = last_range.buffer_end.abs_diff(buffer_end); + // if let Some(last_range) = ranges.last_mut() { + // let head_distance = last_range.head_end.abs_diff(head_end); + // let buffer_distance = last_range.buffer_end.abs_diff(buffer_end); - if head_distance <= combine_distance || buffer_distance <= combine_distance { - last_range.head_start = last_range.head_start.min(head_start); - last_range.head_end = last_range.head_end.max(head_end); + // if head_distance <= combine_distance || buffer_distance <= combine_distance { + // last_range.head_start = last_range.head_start.min(head_start); + // last_range.head_end = last_range.head_end.max(head_end); - last_range.buffer_start = last_range.buffer_start.min(buffer_start); - last_range.buffer_end = last_range.buffer_end.max(buffer_end); - } else { - ranges.push(EditRange { - head_start, - head_end, - buffer_start, - buffer_end, - }); - } - } else { - ranges.push(EditRange { - head_start, - head_end, - buffer_start, - buffer_end, - }); - } - } + // last_range.buffer_start = last_range.buffer_start.min(buffer_start); + // last_range.buffer_end = last_range.buffer_end.max(buffer_end); + // } else { + // ranges.push(EditRange { + // head_start, + // head_end, + // buffer_start, + // buffer_end, + // }); + // } + // } else { + // ranges.push(EditRange { + // head_start, + // head_end, + // buffer_start, + // buffer_end, + // }); + // } + // } - self.last_update_version = buffer.version().clone(); + // self.last_update_version = buffer.version().clone(); - let mut new_hunks = SumTree::new(); - let mut cursor = self.snapshot.tree.cursor::(); + // let mut new_hunks = SumTree::new(); + // let mut cursor = self.snapshot.tree.cursor::(); - for range in ranges { - let head_range = range.head_start..range.head_end; - let head_slice = head.slice_rows(head_range.clone()); - let head_str = head_slice.to_string(); + // for range in ranges { + // let head_range = range.head_start..range.head_end; + // let head_slice = head.slice_rows(head_range.clone()); + // let head_str = head_slice.to_string(); - let buffer_range = range.buffer_start..range.buffer_end; - let buffer_slice = buffer.as_rope().slice_rows(buffer_range.clone()); - let buffer_str = buffer_slice.to_string(); + // let buffer_range = range.buffer_start..range.buffer_end; + // let buffer_slice = buffer.as_rope().slice_rows(buffer_range.clone()); + // let buffer_str = buffer_slice.to_string(); - println!("diffing head {:?}, buffer {:?}", head_range, buffer_range); + // println!("diffing head {:?}, buffer {:?}", head_range, buffer_range); - let mut iter = match HunkIter::diff(head_str.as_bytes(), buffer_str.as_bytes()) { - Some(iter) => iter, - None => continue, - }; + // let mut iter = match HunkIter::diff(head_str.as_bytes(), buffer_str.as_bytes()) { + // Some(iter) => iter, + // None => continue, + // }; - while let Some(hunk) = iter.next(buffer) { - println!("hunk"); - let prefix = cursor.slice(&HunkHeadEnd(hunk.head_range.end), Bias::Right, buffer); - println!("prefix len: {}", prefix.iter().count()); - new_hunks.extend(prefix.iter().cloned(), buffer); + // while let Some(hunk) = iter.next(buffer) { + // println!("hunk"); + // let prefix = cursor.slice(&HunkHeadEnd(hunk.head_range.end), Bias::Right, buffer); + // println!("prefix len: {}", prefix.iter().count()); + // new_hunks.extend(prefix.iter().cloned(), buffer); - new_hunks.push(hunk.clone(), buffer); + // new_hunks.push(hunk.clone(), buffer); - cursor.seek(&HunkHeadEnd(hunk.head_range.end), Bias::Right, buffer); - println!("item: {:?}", cursor.item()); - if let Some(item) = cursor.item() { - if item.head_range.end <= hunk.head_range.end { - println!("skipping"); - cursor.next(buffer); - } - } - } - } + // cursor.seek(&HunkHeadEnd(hunk.head_range.end), Bias::Right, buffer); + // println!("item: {:?}", cursor.item()); + // if let Some(item) = cursor.item() { + // if item.head_range.end <= hunk.head_range.end { + // println!("skipping"); + // cursor.next(buffer); + // } + // } + // } + // } - new_hunks.extend( - cursor - .suffix(buffer) - .iter() - .map(|i| { - println!("extending with {i:?}"); - i - }) - .cloned(), - buffer, - ); - drop(cursor); + // new_hunks.extend( + // cursor + // .suffix(buffer) + // .iter() + // .map(|i| { + // println!("extending with {i:?}"); + // i + // }) + // .cloned(), + // buffer, + // ); + // drop(cursor); - self.snapshot.tree = new_hunks; + // self.snapshot.tree = new_hunks; } } @@ -354,22 +446,35 @@ mod tests { let mut buffer = Buffer::new(0, 0, buffer_text); let diff = BufferDiff::new(&Some(head_text.clone()), &buffer); - assert_eq!( - diff.snapshot.hunks(&buffer).collect::>(), - &[DiffHunk { - buffer_range: 1..2, - head_range: 1..2 - }] - ); + assert_hunks(&diff, &buffer, &head_text, &[(1..2, "two\n")]); buffer.edit([(0..0, "point five\n")]); + assert_hunks(&diff, &buffer, &head_text, &[(2..3, "two\n")]); + } + + #[track_caller] + fn assert_hunks( + diff: &BufferDiff, + buffer: &BufferSnapshot, + head_text: &str, + expected_hunks: &[(Range, &str)], + ) { + let hunks = diff.snapshot.hunks(buffer).collect::>(); assert_eq!( - diff.snapshot.hunks(&buffer).collect::>(), - &[DiffHunk { - buffer_range: 2..3, - head_range: 1..2 - }] + hunks.len(), + expected_hunks.len(), + "actual hunks are {hunks:#?}" ); + + let diff_iter = hunks.iter().enumerate(); + for ((index, hunk), (expected_range, expected_str)) in diff_iter.zip(expected_hunks) { + assert_eq!(&hunk.buffer_range, expected_range, "for hunk {index}"); + assert_eq!( + &head_text[hunk.head_range.clone()], + *expected_str, + "for hunk {index}" + ); + } } // use rand::rngs::StdRng; From 4b2040a7ca3d12f0614ed8ea9a5f0cb78b521343 Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 12 Sep 2022 15:38:44 -0400 Subject: [PATCH 044/140] Move diff logic back into `BufferDiff::update` --- crates/language/src/buffer.rs | 3 +- crates/language/src/git.rs | 333 ++++++++-------------------------- 2 files changed, 79 insertions(+), 257 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 37f2151133..e75e17e541 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -48,7 +48,7 @@ pub use lsp::DiagnosticSeverity; pub struct Buffer { text: TextBuffer, - head_text: Option, + head_text: Option, git_diff: BufferDiff, file: Option>, saved_version: clock::Global, @@ -422,7 +422,6 @@ impl Buffer { }; let git_diff = BufferDiff::new(&head_text, &buffer); - let head_text = head_text.map(|h| Rope::from(h.as_str())); Self { saved_mtime, diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs index b3e2f7da51..4025a2a42f 100644 --- a/crates/language/src/git.rs +++ b/crates/language/src/git.rs @@ -83,64 +83,6 @@ impl<'a> sum_tree::Dimension<'a, DiffHunkSummary> for HunkBufferEnd { } } -// struct HunkIter<'a> { -// index: usize, -// patch: GitPatch<'a>, -// } - -// impl<'a> HunkIter<'a> { -// fn diff(head: &'a [u8], current: &'a [u8]) -> Option { -// let mut options = GitOptions::default(); -// options.context_lines(0); -// let patch = match GitPatch::from_buffers(head, None, current, None, Some(&mut options)) { -// Ok(patch) => patch, -// Err(_) => return None, -// }; - -// Some(HunkIter { index: 0, patch }) -// } - -// fn next(&mut self, buffer: &BufferSnapshot) -> Option> { -// if self.index >= self.patch.num_hunks() { -// return None; -// } - -// let (hunk, _) = match self.patch.hunk(self.index) { -// Ok(it) => it, -// Err(_) => return None, -// }; -// let hunk_line_count = self.patch.num_lines_in_hunk(self.index).unwrap(); - -// println!("{hunk:#?}"); -// for index in 0..hunk_line_count { -// println!("{:?}", self.patch.line_in_hunk(self.index, index)); -// } - -// let new_start = hunk.new_start() - 1; -// let new_end = new_start + hunk.new_lines(); -// let start_anchor = buffer.anchor_at(Point::new(new_start, 0), Bias::Left); -// let end_anchor = buffer.anchor_at(Point::new(new_end, 0), Bias::Left); -// let buffer_range = start_anchor..end_anchor; - -// //This is probably wrong? When does this trigger? Should buffer range also do this? -// let head_range = if hunk.old_start() == 0 { -// 0..0 -// } else { -// let old_start = hunk.old_start() - 1; -// let old_end = old_start + hunk.old_lines(); -// old_start..old_end -// }; - -// // let head_start_index = self.patch.line_in_hunk(self.index, 0) - -// self.index += 1; -// Some(DiffHunk { -// buffer_range, -// head_range, -// }) -// } -// } - #[derive(Clone)] pub struct BufferDiffSnapshot { tree: SumTree>, @@ -187,221 +129,102 @@ pub struct BufferDiff { impl BufferDiff { pub fn new(head_text: &Option, buffer: &text::BufferSnapshot) -> BufferDiff { - let hunks = if let Some(head_text) = head_text { - let buffer_string = buffer.as_rope().to_string(); - let buffer_bytes = buffer_string.as_bytes(); - - let mut options = GitOptions::default(); - options.context_lines(0); - let patch = match GitPatch::from_buffers( - head_text.as_bytes(), - None, - buffer_bytes, - None, - Some(&mut options), - ) { - Ok(patch) => patch, - Err(_) => todo!("This needs to be handled"), - }; - - let mut hunks = SumTree::>::new(); - let mut delta = 0i64; - for i in 0..patch.num_hunks() { - let diff_line_item_count = patch.num_lines_in_hunk(i).unwrap(); - - // if diff_line_item_count == 0 { - // continue; - // } - - // let calc_line_diff_hunk = || { - - // }; - - // let first_line = patch.line_in_hunk(0).unwrap(); - // let mut hunk = - - for j in 0..diff_line_item_count { - let line = patch.line_in_hunk(i, j).unwrap(); - - let hunk = match line.origin_value() { - libgit::DiffLineType::Addition => { - let buffer_start = line.content_offset(); - let buffer_end = buffer_start as usize + line.content().len(); - let head_offset = (buffer_start - delta) as usize; - delta += line.content().len() as i64; - DiffHunk { - buffer_range: buffer.anchor_before(buffer_start as usize) - ..buffer.anchor_after(buffer_end), - head_range: head_offset..head_offset, - } - } - libgit::DiffLineType::Deletion => { - let head_start = line.content_offset(); - let head_end = head_start as usize + line.content().len(); - let buffer_offset = (head_start + delta) as usize; - delta -= line.content().len() as i64; - DiffHunk { - buffer_range: buffer.anchor_before(buffer_offset) - ..buffer.anchor_after(buffer_offset), - head_range: (head_start as usize)..head_end, - } - } - - libgit::DiffLineType::AddEOFNL => todo!(), - libgit::DiffLineType::ContextEOFNL => todo!(), - libgit::DiffLineType::DeleteEOFNL => todo!(), - libgit::DiffLineType::Context => unreachable!(), - libgit::DiffLineType::FileHeader => continue, - libgit::DiffLineType::HunkHeader => continue, - libgit::DiffLineType::Binary => continue, - }; - - let mut combined = false; - hunks.update_last( - |last_hunk| { - if last_hunk.head_range.end == hunk.head_range.start { - last_hunk.head_range.end = hunk.head_range.end; - last_hunk.buffer_range.end = hunk.buffer_range.end; - combined = true; - } - }, - buffer, - ); - if !combined { - hunks.push(hunk, buffer); - } - } - } - - // let iter = HunkIter::diff(head_text.as_bytes(), buffer_bytes); - // if let Some(mut iter) = iter { - // let mut hunks = SumTree::new(); - // while let Some(hunk) = iter.next(buffer) { - // hunks.push(hunk, buffer); - // } - // println!("========"); - // hunks - // } else { - // SumTree::new() - // } - hunks - } else { - SumTree::new() + let mut instance = BufferDiff { + last_update_version: buffer.version().clone(), + snapshot: BufferDiffSnapshot { + tree: SumTree::new(), + }, }; - BufferDiff { - last_update_version: buffer.version().clone(), - snapshot: BufferDiffSnapshot { tree: hunks }, + if let Some(head_text) = head_text { + instance.update(head_text, buffer); } + + instance } pub fn snapshot(&self) -> BufferDiffSnapshot { self.snapshot.clone() } - pub fn update(&mut self, head: &Rope, buffer: &text::BufferSnapshot) { - // let expand_by = 20; - // let combine_distance = 5; + pub fn update(&mut self, head_text: &str, buffer: &text::BufferSnapshot) { + let buffer_string = buffer.as_rope().to_string(); + let buffer_bytes = buffer_string.as_bytes(); - // struct EditRange { - // head_start: u32, - // head_end: u32, - // buffer_start: u32, - // buffer_end: u32, - // } + let mut options = GitOptions::default(); + options.context_lines(0); + let patch = match GitPatch::from_buffers( + head_text.as_bytes(), + None, + buffer_bytes, + None, + Some(&mut options), + ) { + Ok(patch) => patch, + Err(_) => todo!("This needs to be handled"), + }; - // let mut ranges = Vec::::new(); + let mut hunks = SumTree::>::new(); + let mut delta = 0i64; + for hunk_index in 0..patch.num_hunks() { + for line_index in 0..patch.num_lines_in_hunk(hunk_index).unwrap() { + let line = patch.line_in_hunk(hunk_index, line_index).unwrap(); - // for edit in buffer.edits_since::(&self.last_update_version) { - // //This bit is extremely wrong, this is not where these row lines should come from - // let head_start = edit.old.start.row.saturating_sub(expand_by); - // let head_end = (edit.old.end.row + expand_by).min(head.summary().lines.row + 1); + let hunk = match line.origin_value() { + libgit::DiffLineType::Addition => { + let buffer_start = line.content_offset(); + let buffer_end = buffer_start as usize + line.content().len(); + let head_offset = (buffer_start - delta) as usize; + delta += line.content().len() as i64; + DiffHunk { + buffer_range: buffer.anchor_before(buffer_start as usize) + ..buffer.anchor_after(buffer_end), + head_range: head_offset..head_offset, + } + } - // let buffer_start = edit.new.start.row.saturating_sub(expand_by); - // let buffer_end = (edit.new.end.row + expand_by).min(buffer.row_count()); + libgit::DiffLineType::Deletion => { + let head_start = line.content_offset(); + let head_end = head_start as usize + line.content().len(); + let buffer_offset = (head_start + delta) as usize; + delta -= line.content().len() as i64; + DiffHunk { + buffer_range: buffer.anchor_before(buffer_offset) + ..buffer.anchor_after(buffer_offset), + head_range: (head_start as usize)..head_end, + } + } - // if let Some(last_range) = ranges.last_mut() { - // let head_distance = last_range.head_end.abs_diff(head_end); - // let buffer_distance = last_range.buffer_end.abs_diff(buffer_end); + libgit::DiffLineType::AddEOFNL => todo!(), + libgit::DiffLineType::ContextEOFNL => todo!(), + libgit::DiffLineType::DeleteEOFNL => todo!(), - // if head_distance <= combine_distance || buffer_distance <= combine_distance { - // last_range.head_start = last_range.head_start.min(head_start); - // last_range.head_end = last_range.head_end.max(head_end); + libgit::DiffLineType::FileHeader => continue, + libgit::DiffLineType::HunkHeader => continue, + libgit::DiffLineType::Binary => continue, - // last_range.buffer_start = last_range.buffer_start.min(buffer_start); - // last_range.buffer_end = last_range.buffer_end.max(buffer_end); - // } else { - // ranges.push(EditRange { - // head_start, - // head_end, - // buffer_start, - // buffer_end, - // }); - // } - // } else { - // ranges.push(EditRange { - // head_start, - // head_end, - // buffer_start, - // buffer_end, - // }); - // } - // } + //We specifically tell git to not give us context lines + libgit::DiffLineType::Context => unreachable!(), + }; - // self.last_update_version = buffer.version().clone(); + let mut combined = false; + hunks.update_last( + |last_hunk| { + if last_hunk.head_range.end == hunk.head_range.start { + last_hunk.head_range.end = hunk.head_range.end; + last_hunk.buffer_range.end = hunk.buffer_range.end; + combined = true; + } + }, + buffer, + ); + if !combined { + hunks.push(hunk, buffer); + } + } + } - // let mut new_hunks = SumTree::new(); - // let mut cursor = self.snapshot.tree.cursor::(); - - // for range in ranges { - // let head_range = range.head_start..range.head_end; - // let head_slice = head.slice_rows(head_range.clone()); - // let head_str = head_slice.to_string(); - - // let buffer_range = range.buffer_start..range.buffer_end; - // let buffer_slice = buffer.as_rope().slice_rows(buffer_range.clone()); - // let buffer_str = buffer_slice.to_string(); - - // println!("diffing head {:?}, buffer {:?}", head_range, buffer_range); - - // let mut iter = match HunkIter::diff(head_str.as_bytes(), buffer_str.as_bytes()) { - // Some(iter) => iter, - // None => continue, - // }; - - // while let Some(hunk) = iter.next(buffer) { - // println!("hunk"); - // let prefix = cursor.slice(&HunkHeadEnd(hunk.head_range.end), Bias::Right, buffer); - // println!("prefix len: {}", prefix.iter().count()); - // new_hunks.extend(prefix.iter().cloned(), buffer); - - // new_hunks.push(hunk.clone(), buffer); - - // cursor.seek(&HunkHeadEnd(hunk.head_range.end), Bias::Right, buffer); - // println!("item: {:?}", cursor.item()); - // if let Some(item) = cursor.item() { - // if item.head_range.end <= hunk.head_range.end { - // println!("skipping"); - // cursor.next(buffer); - // } - // } - // } - // } - - // new_hunks.extend( - // cursor - // .suffix(buffer) - // .iter() - // .map(|i| { - // println!("extending with {i:?}"); - // i - // }) - // .cloned(), - // buffer, - // ); - // drop(cursor); - - // self.snapshot.tree = new_hunks; + self.snapshot.tree = hunks; } } From e0ea932fa7d345ddfee557cd29b577b98d515f31 Mon Sep 17 00:00:00 2001 From: Julia Date: Tue, 13 Sep 2022 20:41:38 -0400 Subject: [PATCH 045/140] Checkpoint preparing for a more organized approach to incremental diff --- crates/language/src/git.rs | 380 ++++++++++++++++++++++++++++--------- 1 file changed, 292 insertions(+), 88 deletions(-) diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs index 4025a2a42f..642ed5f297 100644 --- a/crates/language/src/git.rs +++ b/crates/language/src/git.rs @@ -1,10 +1,14 @@ use std::ops::Range; +use client::proto::create_buffer_for_peer; use sum_tree::{Bias, SumTree}; use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToPoint}; pub use git2 as libgit; -use libgit::{DiffOptions as GitOptions, Patch as GitPatch}; +use libgit::{ + DiffLine as GitDiffLine, DiffLineType as GitDiffLineType, DiffOptions as GitOptions, + Patch as GitPatch, +}; #[derive(Debug, Clone, Copy)] pub enum DiffHunkStatus { @@ -16,12 +20,12 @@ pub enum DiffHunkStatus { #[derive(Debug, Clone, PartialEq, Eq)] pub struct DiffHunk { pub buffer_range: Range, - pub head_range: Range, + pub head_byte_range: Range, } impl DiffHunk { pub fn status(&self) -> DiffHunkStatus { - if self.head_range.is_empty() { + if self.head_byte_range.is_empty() { DiffHunkStatus::Added } else if self.buffer_range.is_empty() { DiffHunkStatus::Removed @@ -37,7 +41,7 @@ impl sum_tree::Item for DiffHunk { fn summary(&self) -> Self::Summary { DiffHunkSummary { buffer_range: self.buffer_range.clone(), - head_range: self.head_range.clone(), + head_range: self.head_byte_range.clone(), } } } @@ -70,6 +74,19 @@ impl<'a> sum_tree::Dimension<'a, DiffHunkSummary> for HunkHeadEnd { } } +#[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord)] +struct HunkBufferStart(u32); + +impl<'a> sum_tree::Dimension<'a, DiffHunkSummary> for HunkBufferStart { + fn add_summary(&mut self, summary: &'a DiffHunkSummary, buffer: &text::BufferSnapshot) { + self.0 = summary.buffer_range.start.to_point(buffer).row; + } + + fn from_summary(summary: &'a DiffHunkSummary, buffer: &text::BufferSnapshot) -> Self { + HunkBufferStart(summary.buffer_range.start.to_point(buffer).row) + } +} + #[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord)] struct HunkBufferEnd(u32); @@ -83,6 +100,40 @@ impl<'a> sum_tree::Dimension<'a, DiffHunkSummary> for HunkBufferEnd { } } +struct HunkLineIter<'a, 'b> { + patch: &'a GitPatch<'b>, + hunk_index: usize, + line_index: usize, +} + +impl<'a, 'b> HunkLineIter<'a, 'b> { + fn new(patch: &'a GitPatch<'b>, hunk_index: usize) -> Self { + HunkLineIter { + patch, + hunk_index, + line_index: 0, + } + } +} + +impl<'a, 'b> std::iter::Iterator for HunkLineIter<'a, 'b> { + type Item = GitDiffLine<'b>; + + fn next(&mut self) -> Option { + if self.line_index >= self.patch.num_lines_in_hunk(self.hunk_index).unwrap() { + return None; + } + + let line_index = self.line_index; + self.line_index += 1; + Some( + self.patch + .line_in_hunk(self.hunk_index, line_index) + .unwrap(), + ) + } +} + #[derive(Clone)] pub struct BufferDiffSnapshot { tree: SumTree>, @@ -94,8 +145,6 @@ impl BufferDiffSnapshot { query_row_range: Range, buffer: &'a BufferSnapshot, ) -> impl 'a + Iterator> { - // println!("{} hunks overall", self.tree.iter().count()); - self.tree.iter().filter_map(move |hunk| { let range = hunk.buffer_range.to_point(&buffer); @@ -108,7 +157,7 @@ impl BufferDiffSnapshot { Some(DiffHunk { buffer_range: range.start.row..end_row, - head_range: hunk.head_range.clone(), + head_byte_range: hunk.head_byte_range.clone(), }) } else { None @@ -129,18 +178,32 @@ pub struct BufferDiff { impl BufferDiff { pub fn new(head_text: &Option, buffer: &text::BufferSnapshot) -> BufferDiff { - let mut instance = BufferDiff { - last_update_version: buffer.version().clone(), - snapshot: BufferDiffSnapshot { - tree: SumTree::new(), - }, - }; + let mut tree = SumTree::new(); if let Some(head_text) = head_text { - instance.update(head_text, buffer); + let buffer_text = buffer.as_rope().to_string(); + let patch = Self::diff(&head_text, &buffer_text); + + if let Some(patch) = patch { + let mut buffer_divergence = 0; + + for hunk_index in 0..patch.num_hunks() { + let patch = Self::process_patch_hunk( + &mut buffer_divergence, + &patch, + hunk_index, + buffer, + ); + + tree.push(patch, buffer); + } + } } - instance + BufferDiff { + last_update_version: buffer.version().clone(), + snapshot: BufferDiffSnapshot { tree }, + } } pub fn snapshot(&self) -> BufferDiffSnapshot { @@ -148,100 +211,241 @@ impl BufferDiff { } pub fn update(&mut self, head_text: &str, buffer: &text::BufferSnapshot) { - let buffer_string = buffer.as_rope().to_string(); - let buffer_bytes = buffer_string.as_bytes(); + // let buffer_string = buffer.as_rope().to_string(); + // let buffer_bytes = buffer_string.as_bytes(); + // let mut options = GitOptions::default(); + // options.context_lines(0); + // let patch = match GitPatch::from_buffers( + // head_text.as_bytes(), + // None, + // buffer_bytes, + // None, + // Some(&mut options), + // ) { + // Ok(patch) => patch, + // Err(_) => todo!("This needs to be handled"), + // }; + + // let mut hunks = SumTree::>::new(); + // let mut delta = 0i64; + // for hunk_index in 0..patch.num_hunks() { + // for line_index in 0..patch.num_lines_in_hunk(hunk_index).unwrap() { + // let line = patch.line_in_hunk(hunk_index, line_index).unwrap(); + + // let hunk = match line.origin_value() { + // GitDiffLineType::Addition => { + // let buffer_start = line.content_offset(); + // let buffer_end = buffer_start as usize + line.content().len(); + // let head_offset = (buffer_start - delta) as usize; + // delta += line.content().len() as i64; + // DiffHunk { + // buffer_range: buffer.anchor_before(buffer_start as usize) + // ..buffer.anchor_after(buffer_end), + // head_byte_range: head_offset..head_offset, + // } + // } + + // GitDiffLineType::Deletion => { + // let head_start = line.content_offset(); + // let head_end = head_start as usize + line.content().len(); + // let buffer_offset = (head_start + delta) as usize; + // delta -= line.content().len() as i64; + // DiffHunk { + // buffer_range: buffer.anchor_before(buffer_offset) + // ..buffer.anchor_after(buffer_offset), + // head_byte_range: (head_start as usize)..head_end, + // } + // } + + // _ => continue, + // }; + + // let mut combined = false; + // hunks.update_last( + // |last_hunk| { + // if last_hunk.head_byte_range.end == hunk.head_byte_range.start { + // last_hunk.head_byte_range.end = hunk.head_byte_range.end; + // last_hunk.buffer_range.end = hunk.buffer_range.end; + // combined = true; + // } + // }, + // buffer, + // ); + // if !combined { + // hunks.push(hunk, buffer); + // } + // } + // } + + // println!("====="); + // for hunk in hunks.iter() { + // let buffer_range = hunk.buffer_range.to_point(&buffer); + // println!( + // "hunk in buffer range {buffer_range:?}, head slice {:?}", + // &head_text[hunk.head_byte_range.clone()] + // ); + // } + // println!("====="); + + // self.snapshot.tree = hunks; + } + + pub fn actual_update( + &mut self, + head_text: &str, + buffer: &BufferSnapshot, + ) -> Option> { + for edit_range in self.group_edit_ranges(buffer) { + // let patch = self.diff(head, current)?; + } + + None + } + + fn diff<'a>(head: &'a str, current: &'a str) -> Option> { let mut options = GitOptions::default(); options.context_lines(0); - let patch = match GitPatch::from_buffers( - head_text.as_bytes(), + + let patch = GitPatch::from_buffers( + head.as_bytes(), None, - buffer_bytes, + current.as_bytes(), None, Some(&mut options), - ) { - Ok(patch) => patch, - Err(_) => todo!("This needs to be handled"), - }; + ); - let mut hunks = SumTree::>::new(); - let mut delta = 0i64; - for hunk_index in 0..patch.num_hunks() { - for line_index in 0..patch.num_lines_in_hunk(hunk_index).unwrap() { - let line = patch.line_in_hunk(hunk_index, line_index).unwrap(); + match patch { + Ok(patch) => Some(patch), - let hunk = match line.origin_value() { - libgit::DiffLineType::Addition => { - let buffer_start = line.content_offset(); - let buffer_end = buffer_start as usize + line.content().len(); - let head_offset = (buffer_start - delta) as usize; - delta += line.content().len() as i64; - DiffHunk { - buffer_range: buffer.anchor_before(buffer_start as usize) - ..buffer.anchor_after(buffer_end), - head_range: head_offset..head_offset, - } - } + Err(err) => { + log::error!("`GitPatch::from_buffers` failed: {}", err); + None + } + } + } - libgit::DiffLineType::Deletion => { - let head_start = line.content_offset(); - let head_end = head_start as usize + line.content().len(); - let buffer_offset = (head_start + delta) as usize; - delta -= line.content().len() as i64; - DiffHunk { - buffer_range: buffer.anchor_before(buffer_offset) - ..buffer.anchor_after(buffer_offset), - head_range: (head_start as usize)..head_end, - } - } + fn group_edit_ranges(&mut self, buffer: &text::BufferSnapshot) -> Vec> { + const EXPAND_BY: u32 = 20; + const COMBINE_DISTANCE: u32 = 5; - libgit::DiffLineType::AddEOFNL => todo!(), - libgit::DiffLineType::ContextEOFNL => todo!(), - libgit::DiffLineType::DeleteEOFNL => todo!(), + // let mut cursor = self.snapshot.tree.cursor::(); - libgit::DiffLineType::FileHeader => continue, - libgit::DiffLineType::HunkHeader => continue, - libgit::DiffLineType::Binary => continue, + let mut ranges = Vec::>::new(); - //We specifically tell git to not give us context lines - libgit::DiffLineType::Context => unreachable!(), - }; + for edit in buffer.edits_since::(&self.last_update_version) { + let buffer_start = edit.new.start.row.saturating_sub(EXPAND_BY); + let buffer_end = (edit.new.end.row + EXPAND_BY).min(buffer.row_count()); - let mut combined = false; - hunks.update_last( - |last_hunk| { - if last_hunk.head_range.end == hunk.head_range.start { - last_hunk.head_range.end = hunk.head_range.end; - last_hunk.buffer_range.end = hunk.buffer_range.end; - combined = true; - } - }, - buffer, - ); - if !combined { - hunks.push(hunk, buffer); + match ranges.last_mut() { + Some(last_range) if last_range.end.abs_diff(buffer_end) <= COMBINE_DISTANCE => { + last_range.start = last_range.start.min(buffer_start); + last_range.end = last_range.end.max(buffer_end); } + + _ => ranges.push(buffer_start..buffer_end), } } - self.snapshot.tree = hunks; + self.last_update_version = buffer.version().clone(); + ranges } -} -#[derive(Debug, Clone, Copy)] -pub enum GitDiffEdit { - Added(u32), - Modified(u32), - Removed(u32), -} + fn process_patch_hunk<'a>( + buffer_divergence: &mut isize, + patch: &GitPatch<'a>, + hunk_index: usize, + buffer: &text::BufferSnapshot, + ) -> DiffHunk { + let mut buffer_byte_range: Option> = None; + let mut head_byte_range: Option> = None; -impl GitDiffEdit { - pub fn line(self) -> u32 { - use GitDiffEdit::*; + for line_index in 0..patch.num_lines_in_hunk(hunk_index).unwrap() { + let line = patch.line_in_hunk(hunk_index, line_index).unwrap(); + let kind = line.origin_value(); + println!("line index: {line_index}, kind: {kind:?}"); + let content_offset = line.content_offset() as isize; - match self { - Added(line) | Modified(line) | Removed(line) => line, + match (kind, &mut buffer_byte_range, &mut head_byte_range) { + (GitDiffLineType::Addition, None, _) => { + let start = *buffer_divergence + content_offset; + let end = start + line.content().len() as isize; + buffer_byte_range = Some(start as usize..end as usize); + } + + (GitDiffLineType::Addition, Some(buffer_byte_range), _) => { + buffer_byte_range.end = content_offset as usize; + } + + (GitDiffLineType::Deletion, _, None) => { + let end = content_offset + line.content().len() as isize; + head_byte_range = Some(content_offset as usize..end as usize); + } + + (GitDiffLineType::Deletion, _, Some(head_byte_range)) => { + let end = content_offset + line.content().len() as isize; + head_byte_range.end = end as usize; + } + + _ => {} + } } + + //unwrap_or deletion without addition + let buffer_byte_range = buffer_byte_range.unwrap_or(0..0); + //unwrap_or addition without deletion + let head_byte_range = head_byte_range.unwrap_or(0..0); + + *buffer_divergence += buffer_byte_range.len() as isize - head_byte_range.len() as isize; + + DiffHunk { + buffer_range: buffer.anchor_before(buffer_byte_range.start) + ..buffer.anchor_before(buffer_byte_range.end), + head_byte_range, + } + } + + fn name() { + // if self.hunk_index >= self.patch.num_hunks() { + // return None; + // } + + // let mut line_iter = HunkLineIter::new(&self.patch, self.hunk_index); + // let line = line_iter.find(|line| { + // matches!( + // line.origin_value(), + // GitDiffLineType::Addition | GitDiffLineType::Deletion + // ) + // })?; + + // //For the first line of a hunk the content offset is equally valid for an addition or deletion + // let content_offset = line.content_offset() as usize; + + // let mut buffer_range = content_offset..content_offset; + // let mut head_byte_range = match line.origin_value() { + // GitDiffLineType::Addition => content_offset..content_offset, + // GitDiffLineType::Deletion => content_offset..content_offset + line.content().len(), + // _ => unreachable!(), + // }; + + // for line in line_iter { + // match line.origin_value() { + // GitDiffLineType::Addition => { + // // buffer_range.end = + // } + + // GitDiffLineType::Deletion => {} + + // _ => continue, + // } + // } + + // self.hunk_index += 1; + // Some(DiffHunk { + // buffer_range: buffer.anchor_before(buffer_range.start) + // ..buffer.anchor_before(buffer_range.end), + // head_byte_range, + // }) } } @@ -293,7 +497,7 @@ mod tests { for ((index, hunk), (expected_range, expected_str)) in diff_iter.zip(expected_hunks) { assert_eq!(&hunk.buffer_range, expected_range, "for hunk {index}"); assert_eq!( - &head_text[hunk.head_range.clone()], + &head_text[hunk.head_byte_range.clone()], *expected_str, "for hunk {index}" ); From 2f7283fd13e111452dd63b3c3bb5599ff90a2d5e Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 14 Sep 2022 11:18:33 -0400 Subject: [PATCH 046/140] buffer_divergence doesn't seem to be a concept that needs to be tracked --- crates/language/src/git.rs | 22 +++++----------------- 1 file changed, 5 insertions(+), 17 deletions(-) diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs index 642ed5f297..2ac0400da8 100644 --- a/crates/language/src/git.rs +++ b/crates/language/src/git.rs @@ -185,16 +185,8 @@ impl BufferDiff { let patch = Self::diff(&head_text, &buffer_text); if let Some(patch) = patch { - let mut buffer_divergence = 0; - for hunk_index in 0..patch.num_hunks() { - let patch = Self::process_patch_hunk( - &mut buffer_divergence, - &patch, - hunk_index, - buffer, - ); - + let patch = Self::process_patch_hunk(&patch, hunk_index, buffer); tree.push(patch, buffer); } } @@ -352,7 +344,6 @@ impl BufferDiff { } fn process_patch_hunk<'a>( - buffer_divergence: &mut isize, patch: &GitPatch<'a>, hunk_index: usize, buffer: &text::BufferSnapshot, @@ -363,18 +354,17 @@ impl BufferDiff { for line_index in 0..patch.num_lines_in_hunk(hunk_index).unwrap() { let line = patch.line_in_hunk(hunk_index, line_index).unwrap(); let kind = line.origin_value(); - println!("line index: {line_index}, kind: {kind:?}"); let content_offset = line.content_offset() as isize; match (kind, &mut buffer_byte_range, &mut head_byte_range) { (GitDiffLineType::Addition, None, _) => { - let start = *buffer_divergence + content_offset; - let end = start + line.content().len() as isize; - buffer_byte_range = Some(start as usize..end as usize); + let end = content_offset + line.content().len() as isize; + buffer_byte_range = Some(content_offset as usize..end as usize); } (GitDiffLineType::Addition, Some(buffer_byte_range), _) => { - buffer_byte_range.end = content_offset as usize; + let end = content_offset + line.content().len() as isize; + buffer_byte_range.end = end as usize; } (GitDiffLineType::Deletion, _, None) => { @@ -396,8 +386,6 @@ impl BufferDiff { //unwrap_or addition without deletion let head_byte_range = head_byte_range.unwrap_or(0..0); - *buffer_divergence += buffer_byte_range.len() as isize - head_byte_range.len() as isize; - DiffHunk { buffer_range: buffer.anchor_before(buffer_byte_range.start) ..buffer.anchor_before(buffer_byte_range.end), From 96917a8007018c08a373a8fc09608abc73cb3c1d Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 14 Sep 2022 11:20:01 -0400 Subject: [PATCH 047/140] Small clean --- crates/language/src/git.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs index 2ac0400da8..361a44f377 100644 --- a/crates/language/src/git.rs +++ b/crates/language/src/git.rs @@ -355,25 +355,26 @@ impl BufferDiff { let line = patch.line_in_hunk(hunk_index, line_index).unwrap(); let kind = line.origin_value(); let content_offset = line.content_offset() as isize; + let content_len = line.content().len() as isize; match (kind, &mut buffer_byte_range, &mut head_byte_range) { (GitDiffLineType::Addition, None, _) => { - let end = content_offset + line.content().len() as isize; + let end = content_offset + content_len; buffer_byte_range = Some(content_offset as usize..end as usize); } (GitDiffLineType::Addition, Some(buffer_byte_range), _) => { - let end = content_offset + line.content().len() as isize; + let end = content_offset + content_len; buffer_byte_range.end = end as usize; } (GitDiffLineType::Deletion, _, None) => { - let end = content_offset + line.content().len() as isize; + let end = content_offset + content_len; head_byte_range = Some(content_offset as usize..end as usize); } (GitDiffLineType::Deletion, _, Some(head_byte_range)) => { - let end = content_offset + line.content().len() as isize; + let end = content_offset + content_len; head_byte_range.end = end as usize; } From c1249a3d84195869cbff0cfecaeff4ab8b0b2a52 Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 14 Sep 2022 12:38:23 -0400 Subject: [PATCH 048/140] Handle deletions more robustly and correctly --- crates/editor/src/element.rs | 4 ++-- crates/language/src/git.rs | 31 ++++++++++++++++++++++++------- 2 files changed, 26 insertions(+), 9 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 4ee14407b8..3a5166e17e 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -552,10 +552,10 @@ impl EditorElement { //TODO: This rendering is entirely a horrible hack DiffHunkStatus::Removed => { - let row_above = hunk.buffer_range.start; + let row = hunk.buffer_range.start as i64 - 1; let offset = line_height / 2.; - let start_y = row_above as f32 * line_height + offset - scroll_top; + let start_y = row as f32 * line_height + offset - scroll_top; let end_y = start_y + line_height; let width = 0.4 * line_height; diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs index 361a44f377..f2adee42fa 100644 --- a/crates/language/src/git.rs +++ b/crates/language/src/git.rs @@ -1,8 +1,7 @@ use std::ops::Range; -use client::proto::create_buffer_for_peer; use sum_tree::{Bias, SumTree}; -use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToPoint}; +use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint}; pub use git2 as libgit; use libgit::{ @@ -148,7 +147,7 @@ impl BufferDiffSnapshot { self.tree.iter().filter_map(move |hunk| { let range = hunk.buffer_range.to_point(&buffer); - if range.start.row < query_row_range.end && query_row_range.start < range.end.row { + if range.start.row <= query_row_range.end && query_row_range.start <= range.end.row { let end_row = if range.end.column > 0 { range.end.row + 1 } else { @@ -186,8 +185,8 @@ impl BufferDiff { if let Some(patch) = patch { for hunk_index in 0..patch.num_hunks() { - let patch = Self::process_patch_hunk(&patch, hunk_index, buffer); - tree.push(patch, buffer); + let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer); + tree.push(hunk, buffer); } } } @@ -348,10 +347,14 @@ impl BufferDiff { hunk_index: usize, buffer: &text::BufferSnapshot, ) -> DiffHunk { + let line_item_count = patch.num_lines_in_hunk(hunk_index).unwrap(); + assert!(line_item_count > 0); + + let mut first_deletion_buffer_row: Option = None; let mut buffer_byte_range: Option> = None; let mut head_byte_range: Option> = None; - for line_index in 0..patch.num_lines_in_hunk(hunk_index).unwrap() { + for line_index in 0..line_item_count { let line = patch.line_in_hunk(hunk_index, line_index).unwrap(); let kind = line.origin_value(); let content_offset = line.content_offset() as isize; @@ -380,10 +383,24 @@ impl BufferDiff { _ => {} } + + if kind == GitDiffLineType::Deletion && first_deletion_buffer_row.is_none() { + //old_lineno is guarenteed to be Some for deletions + //libgit gives us line numbers that are 1-indexed but also returns a 0 for some states + let row = line.old_lineno().unwrap().saturating_sub(1); + first_deletion_buffer_row = Some(row); + } } //unwrap_or deletion without addition - let buffer_byte_range = buffer_byte_range.unwrap_or(0..0); + let buffer_byte_range = buffer_byte_range.unwrap_or_else(|| { + //we cannot have an addition-less hunk without deletion(s) or else there would be no hunk + let row = first_deletion_buffer_row.unwrap(); + let anchor = buffer.anchor_before(Point::new(row, 0)); + let offset = anchor.to_offset(buffer); + offset..offset + }); + //unwrap_or addition without deletion let head_byte_range = head_byte_range.unwrap_or(0..0); From e72e132ce29588f7cfa712c013a209fa5e19af75 Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 14 Sep 2022 18:44:00 -0400 Subject: [PATCH 049/140] Clear out commented code & once again perform full file diff on update --- crates/language/src/git.rs | 142 +++---------------------------------- 1 file changed, 11 insertions(+), 131 deletions(-) diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs index f2adee42fa..02cf3ca141 100644 --- a/crates/language/src/git.rs +++ b/crates/language/src/git.rs @@ -202,96 +202,20 @@ impl BufferDiff { } pub fn update(&mut self, head_text: &str, buffer: &text::BufferSnapshot) { - // let buffer_string = buffer.as_rope().to_string(); - // let buffer_bytes = buffer_string.as_bytes(); + let mut tree = SumTree::new(); - // let mut options = GitOptions::default(); - // options.context_lines(0); - // let patch = match GitPatch::from_buffers( - // head_text.as_bytes(), - // None, - // buffer_bytes, - // None, - // Some(&mut options), - // ) { - // Ok(patch) => patch, - // Err(_) => todo!("This needs to be handled"), - // }; + let buffer_text = buffer.as_rope().to_string(); + let patch = Self::diff(&head_text, &buffer_text); - // let mut hunks = SumTree::>::new(); - // let mut delta = 0i64; - // for hunk_index in 0..patch.num_hunks() { - // for line_index in 0..patch.num_lines_in_hunk(hunk_index).unwrap() { - // let line = patch.line_in_hunk(hunk_index, line_index).unwrap(); - - // let hunk = match line.origin_value() { - // GitDiffLineType::Addition => { - // let buffer_start = line.content_offset(); - // let buffer_end = buffer_start as usize + line.content().len(); - // let head_offset = (buffer_start - delta) as usize; - // delta += line.content().len() as i64; - // DiffHunk { - // buffer_range: buffer.anchor_before(buffer_start as usize) - // ..buffer.anchor_after(buffer_end), - // head_byte_range: head_offset..head_offset, - // } - // } - - // GitDiffLineType::Deletion => { - // let head_start = line.content_offset(); - // let head_end = head_start as usize + line.content().len(); - // let buffer_offset = (head_start + delta) as usize; - // delta -= line.content().len() as i64; - // DiffHunk { - // buffer_range: buffer.anchor_before(buffer_offset) - // ..buffer.anchor_after(buffer_offset), - // head_byte_range: (head_start as usize)..head_end, - // } - // } - - // _ => continue, - // }; - - // let mut combined = false; - // hunks.update_last( - // |last_hunk| { - // if last_hunk.head_byte_range.end == hunk.head_byte_range.start { - // last_hunk.head_byte_range.end = hunk.head_byte_range.end; - // last_hunk.buffer_range.end = hunk.buffer_range.end; - // combined = true; - // } - // }, - // buffer, - // ); - // if !combined { - // hunks.push(hunk, buffer); - // } - // } - // } - - // println!("====="); - // for hunk in hunks.iter() { - // let buffer_range = hunk.buffer_range.to_point(&buffer); - // println!( - // "hunk in buffer range {buffer_range:?}, head slice {:?}", - // &head_text[hunk.head_byte_range.clone()] - // ); - // } - // println!("====="); - - // self.snapshot.tree = hunks; - } - - pub fn actual_update( - &mut self, - head_text: &str, - buffer: &BufferSnapshot, - ) -> Option> { - for edit_range in self.group_edit_ranges(buffer) { - // let patch = self.diff(head, current)?; + if let Some(patch) = patch { + for hunk_index in 0..patch.num_hunks() { + let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer); + tree.push(hunk, buffer); + } } - None + self.last_update_version = buffer.version().clone(); + self.snapshot.tree = tree; } fn diff<'a>(head: &'a str, current: &'a str) -> Option> { @@ -316,7 +240,7 @@ impl BufferDiff { } } - fn group_edit_ranges(&mut self, buffer: &text::BufferSnapshot) -> Vec> { + fn group_edit_ranges(&self, buffer: &text::BufferSnapshot) -> Vec> { const EXPAND_BY: u32 = 20; const COMBINE_DISTANCE: u32 = 5; @@ -338,7 +262,6 @@ impl BufferDiff { } } - self.last_update_version = buffer.version().clone(); ranges } @@ -410,49 +333,6 @@ impl BufferDiff { head_byte_range, } } - - fn name() { - // if self.hunk_index >= self.patch.num_hunks() { - // return None; - // } - - // let mut line_iter = HunkLineIter::new(&self.patch, self.hunk_index); - // let line = line_iter.find(|line| { - // matches!( - // line.origin_value(), - // GitDiffLineType::Addition | GitDiffLineType::Deletion - // ) - // })?; - - // //For the first line of a hunk the content offset is equally valid for an addition or deletion - // let content_offset = line.content_offset() as usize; - - // let mut buffer_range = content_offset..content_offset; - // let mut head_byte_range = match line.origin_value() { - // GitDiffLineType::Addition => content_offset..content_offset, - // GitDiffLineType::Deletion => content_offset..content_offset + line.content().len(), - // _ => unreachable!(), - // }; - - // for line in line_iter { - // match line.origin_value() { - // GitDiffLineType::Addition => { - // // buffer_range.end = - // } - - // GitDiffLineType::Deletion => {} - - // _ => continue, - // } - // } - - // self.hunk_index += 1; - // Some(DiffHunk { - // buffer_range: buffer.anchor_before(buffer_range.start) - // ..buffer.anchor_before(buffer_range.end), - // head_byte_range, - // }) - } } #[cfg(test)] From 03b6f3e0bf2628b238a8e34f40242ddc387a62ca Mon Sep 17 00:00:00 2001 From: Julia Date: Thu, 15 Sep 2022 13:57:57 -0400 Subject: [PATCH 050/140] Reorganize for for purely file level invalidation --- crates/language/src/git.rs | 91 +++++--------------------------------- 1 file changed, 10 insertions(+), 81 deletions(-) diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs index 02cf3ca141..c3f43e54e1 100644 --- a/crates/language/src/git.rs +++ b/crates/language/src/git.rs @@ -1,13 +1,10 @@ use std::ops::Range; -use sum_tree::{Bias, SumTree}; -use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint}; +use sum_tree::SumTree; +use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, ToOffset, ToPoint}; pub use git2 as libgit; -use libgit::{ - DiffLine as GitDiffLine, DiffLineType as GitDiffLineType, DiffOptions as GitOptions, - Patch as GitPatch, -}; +use libgit::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch}; #[derive(Debug, Clone, Copy)] pub enum DiffHunkStatus { @@ -99,40 +96,6 @@ impl<'a> sum_tree::Dimension<'a, DiffHunkSummary> for HunkBufferEnd { } } -struct HunkLineIter<'a, 'b> { - patch: &'a GitPatch<'b>, - hunk_index: usize, - line_index: usize, -} - -impl<'a, 'b> HunkLineIter<'a, 'b> { - fn new(patch: &'a GitPatch<'b>, hunk_index: usize) -> Self { - HunkLineIter { - patch, - hunk_index, - line_index: 0, - } - } -} - -impl<'a, 'b> std::iter::Iterator for HunkLineIter<'a, 'b> { - type Item = GitDiffLine<'b>; - - fn next(&mut self) -> Option { - if self.line_index >= self.patch.num_lines_in_hunk(self.hunk_index).unwrap() { - return None; - } - - let line_index = self.line_index; - self.line_index += 1; - Some( - self.patch - .line_in_hunk(self.hunk_index, line_index) - .unwrap(), - ) - } -} - #[derive(Clone)] pub struct BufferDiffSnapshot { tree: SumTree>, @@ -171,30 +134,22 @@ impl BufferDiffSnapshot { } pub struct BufferDiff { - last_update_version: clock::Global, snapshot: BufferDiffSnapshot, } impl BufferDiff { pub fn new(head_text: &Option, buffer: &text::BufferSnapshot) -> BufferDiff { - let mut tree = SumTree::new(); + let mut instance = BufferDiff { + snapshot: BufferDiffSnapshot { + tree: SumTree::new(), + }, + }; if let Some(head_text) = head_text { - let buffer_text = buffer.as_rope().to_string(); - let patch = Self::diff(&head_text, &buffer_text); - - if let Some(patch) = patch { - for hunk_index in 0..patch.num_hunks() { - let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer); - tree.push(hunk, buffer); - } - } + instance.update(head_text, buffer); } - BufferDiff { - last_update_version: buffer.version().clone(), - snapshot: BufferDiffSnapshot { tree }, - } + instance } pub fn snapshot(&self) -> BufferDiffSnapshot { @@ -214,7 +169,6 @@ impl BufferDiff { } } - self.last_update_version = buffer.version().clone(); self.snapshot.tree = tree; } @@ -240,31 +194,6 @@ impl BufferDiff { } } - fn group_edit_ranges(&self, buffer: &text::BufferSnapshot) -> Vec> { - const EXPAND_BY: u32 = 20; - const COMBINE_DISTANCE: u32 = 5; - - // let mut cursor = self.snapshot.tree.cursor::(); - - let mut ranges = Vec::>::new(); - - for edit in buffer.edits_since::(&self.last_update_version) { - let buffer_start = edit.new.start.row.saturating_sub(EXPAND_BY); - let buffer_end = (edit.new.end.row + EXPAND_BY).min(buffer.row_count()); - - match ranges.last_mut() { - Some(last_range) if last_range.end.abs_diff(buffer_end) <= COMBINE_DISTANCE => { - last_range.start = last_range.start.min(buffer_start); - last_range.end = last_range.end.max(buffer_end); - } - - _ => ranges.push(buffer_start..buffer_end), - } - } - - ranges - } - fn process_patch_hunk<'a>( patch: &GitPatch<'a>, hunk_index: usize, From 446bf886555c52d9871f7214b35cd5fc1455e6ac Mon Sep 17 00:00:00 2001 From: Julia Date: Thu, 15 Sep 2022 16:17:17 -0400 Subject: [PATCH 051/140] Use row range while building buffer range during diff line iteration --- crates/language/src/git.rs | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs index c3f43e54e1..040121fcf2 100644 --- a/crates/language/src/git.rs +++ b/crates/language/src/git.rs @@ -1,7 +1,7 @@ use std::ops::Range; use sum_tree::SumTree; -use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, ToOffset, ToPoint}; +use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, ToPoint}; pub use git2 as libgit; use libgit::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch}; @@ -203,7 +203,7 @@ impl BufferDiff { assert!(line_item_count > 0); let mut first_deletion_buffer_row: Option = None; - let mut buffer_byte_range: Option> = None; + let mut buffer_row_range: Option> = None; let mut head_byte_range: Option> = None; for line_index in 0..line_item_count { @@ -212,15 +212,16 @@ impl BufferDiff { let content_offset = line.content_offset() as isize; let content_len = line.content().len() as isize; - match (kind, &mut buffer_byte_range, &mut head_byte_range) { + match (kind, &mut buffer_row_range, &mut head_byte_range) { (GitDiffLineType::Addition, None, _) => { - let end = content_offset + content_len; - buffer_byte_range = Some(content_offset as usize..end as usize); + //guarenteed to be present for additions + let row = line.new_lineno().unwrap().saturating_sub(1); + buffer_row_range = Some(row..row + 1); } (GitDiffLineType::Addition, Some(buffer_byte_range), _) => { - let end = content_offset + content_len; - buffer_byte_range.end = end as usize; + let row = line.new_lineno().unwrap().saturating_sub(1); + buffer_byte_range.end = row + 1; } (GitDiffLineType::Deletion, _, None) => { @@ -245,20 +246,20 @@ impl BufferDiff { } //unwrap_or deletion without addition - let buffer_byte_range = buffer_byte_range.unwrap_or_else(|| { + let buffer_byte_range = buffer_row_range.unwrap_or_else(|| { //we cannot have an addition-less hunk without deletion(s) or else there would be no hunk let row = first_deletion_buffer_row.unwrap(); - let anchor = buffer.anchor_before(Point::new(row, 0)); - let offset = anchor.to_offset(buffer); - offset..offset + row..row }); //unwrap_or addition without deletion let head_byte_range = head_byte_range.unwrap_or(0..0); + let start = Point::new(buffer_byte_range.start, 0); + let end = Point::new(buffer_byte_range.end, 0); + let buffer_range = buffer.anchor_before(start)..buffer.anchor_before(end); DiffHunk { - buffer_range: buffer.anchor_before(buffer_byte_range.start) - ..buffer.anchor_before(buffer_byte_range.end), + buffer_range, head_byte_range, } } From b9d84df1274a6a75f17715d6b5c32870e6edb2d6 Mon Sep 17 00:00:00 2001 From: Julia Date: Thu, 15 Sep 2022 17:44:01 -0400 Subject: [PATCH 052/140] Track buffer row divergence while iterating through diff lines This allows for offsetting head row index of deleted lines to normalize into buffer row space --- crates/editor/src/element.rs | 2 +- crates/language/src/git.rs | 56 +++++++++++++++++------------------- 2 files changed, 27 insertions(+), 31 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 3a5166e17e..40b1e62adf 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -552,7 +552,7 @@ impl EditorElement { //TODO: This rendering is entirely a horrible hack DiffHunkStatus::Removed => { - let row = hunk.buffer_range.start as i64 - 1; + let row = hunk.buffer_range.start; let offset = line_height / 2.; let start_y = row as f32 * line_height + offset - scroll_top; diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs index 040121fcf2..9065ef5606 100644 --- a/crates/language/src/git.rs +++ b/crates/language/src/git.rs @@ -163,8 +163,9 @@ impl BufferDiff { let patch = Self::diff(&head_text, &buffer_text); if let Some(patch) = patch { + let mut divergence = 0; for hunk_index in 0..patch.num_hunks() { - let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer); + let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence); tree.push(hunk, buffer); } } @@ -198,6 +199,7 @@ impl BufferDiff { patch: &GitPatch<'a>, hunk_index: usize, buffer: &text::BufferSnapshot, + buffer_row_divergence: &mut i64, ) -> DiffHunk { let line_item_count = patch.num_lines_in_hunk(hunk_index).unwrap(); assert!(line_item_count > 0); @@ -212,41 +214,35 @@ impl BufferDiff { let content_offset = line.content_offset() as isize; let content_len = line.content().len() as isize; - match (kind, &mut buffer_row_range, &mut head_byte_range) { - (GitDiffLineType::Addition, None, _) => { - //guarenteed to be present for additions - let row = line.new_lineno().unwrap().saturating_sub(1); - buffer_row_range = Some(row..row + 1); - } + if kind == GitDiffLineType::Addition { + *buffer_row_divergence += 1; + let row = line.new_lineno().unwrap().saturating_sub(1); - (GitDiffLineType::Addition, Some(buffer_byte_range), _) => { - let row = line.new_lineno().unwrap().saturating_sub(1); - buffer_byte_range.end = row + 1; + match &mut buffer_row_range { + Some(buffer_row_range) => buffer_row_range.end = row + 1, + None => buffer_row_range = Some(row..row + 1), } - - (GitDiffLineType::Deletion, _, None) => { - let end = content_offset + content_len; - head_byte_range = Some(content_offset as usize..end as usize); - } - - (GitDiffLineType::Deletion, _, Some(head_byte_range)) => { - let end = content_offset + content_len; - head_byte_range.end = end as usize; - } - - _ => {} } - if kind == GitDiffLineType::Deletion && first_deletion_buffer_row.is_none() { - //old_lineno is guarenteed to be Some for deletions - //libgit gives us line numbers that are 1-indexed but also returns a 0 for some states - let row = line.old_lineno().unwrap().saturating_sub(1); - first_deletion_buffer_row = Some(row); + if kind == GitDiffLineType::Deletion { + *buffer_row_divergence -= 1; + let end = content_offset + content_len; + + match &mut head_byte_range { + Some(head_byte_range) => head_byte_range.end = end as usize, + None => head_byte_range = Some(content_offset as usize..end as usize), + } + + if first_deletion_buffer_row.is_none() { + let old_row = line.old_lineno().unwrap().saturating_sub(1); + let row = old_row as i64 + *buffer_row_divergence; + first_deletion_buffer_row = Some(row as u32); + } } } //unwrap_or deletion without addition - let buffer_byte_range = buffer_row_range.unwrap_or_else(|| { + let buffer_row_range = buffer_row_range.unwrap_or_else(|| { //we cannot have an addition-less hunk without deletion(s) or else there would be no hunk let row = first_deletion_buffer_row.unwrap(); row..row @@ -255,8 +251,8 @@ impl BufferDiff { //unwrap_or addition without deletion let head_byte_range = head_byte_range.unwrap_or(0..0); - let start = Point::new(buffer_byte_range.start, 0); - let end = Point::new(buffer_byte_range.end, 0); + let start = Point::new(buffer_row_range.start, 0); + let end = Point::new(buffer_row_range.end, 0); let buffer_range = buffer.anchor_before(start)..buffer.anchor_before(end); DiffHunk { buffer_range, From c4da8c46f70ba8e3c2e475547e3c97bba785dec4 Mon Sep 17 00:00:00 2001 From: Julia Date: Thu, 15 Sep 2022 18:50:31 -0400 Subject: [PATCH 053/140] Disable unnecessary libgit2 cargo features Co-Authored-By: Mikayla Maki --- Cargo.lock | 18 ------------------ crates/language/Cargo.toml | 2 +- 2 files changed, 1 insertion(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2872d83a94..1f60f1d36c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2234,8 +2234,6 @@ dependencies = [ "libc", "libgit2-sys", "log", - "openssl-probe", - "openssl-sys", "url", ] @@ -2918,9 +2916,7 @@ checksum = "47a00859c70c8a4f7218e6d1cc32875c4b55f6799445b842b0d8ed5e4c3d959b" dependencies = [ "cc", "libc", - "libssh2-sys", "libz-sys", - "openssl-sys", "pkg-config", ] @@ -2964,20 +2960,6 @@ dependencies = [ "zstd-sys", ] -[[package]] -name = "libssh2-sys" -version = "0.2.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b094a36eb4b8b8c8a7b4b8ae43b2944502be3e59cd87687595cf6b0a71b3f4ca" -dependencies = [ - "cc", - "libc", - "libz-sys", - "openssl-sys", - "pkg-config", - "vcpkg", -] - [[package]] name = "libz-sys" version = "1.1.8" diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 6d347f3595..034b10e89c 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -51,7 +51,7 @@ smol = "1.2" tree-sitter = "0.20" tree-sitter-rust = { version = "*", optional = true } tree-sitter-typescript = { version = "*", optional = true } -git2 = "0.15" +git2 = { version = "0.15", default-features = false } [dev-dependencies] client = { path = "../client", features = ["test-support"] } From 9c8295487752b7982a0277a2b949abd98e81e004 Mon Sep 17 00:00:00 2001 From: Julia Date: Thu, 15 Sep 2022 19:06:45 -0400 Subject: [PATCH 054/140] Changed diffs to be async and dropped git delay --- crates/editor/src/multi_buffer.rs | 12 +++--- crates/language/src/buffer.rs | 44 +++++++++++++++------ crates/language/src/git.rs | 63 ++++++++++++++----------------- crates/workspace/src/workspace.rs | 2 +- 4 files changed, 70 insertions(+), 51 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 1d09b7008f..72b88f837d 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -313,11 +313,13 @@ impl MultiBuffer { } pub fn update_git(&mut self, cx: &mut ModelContext) { - let mut buffers = self.buffers.borrow_mut(); - for buffer in buffers.values_mut() { - buffer.buffer.update(cx, |buffer, _| { - buffer.update_git(); - }) + let buffers = self.buffers.borrow(); + for buffer_state in buffers.values() { + if buffer_state.buffer.read(cx).needs_git_update() { + buffer_state + .buffer + .update(cx, |buffer, cx| buffer.update_git(cx)) + } } } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index e75e17e541..2cff3796bc 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1,4 +1,4 @@ -use crate::git::{BufferDiff, BufferDiffSnapshot, DiffHunk}; +use crate::git::{BufferDiff, DiffHunk}; pub use crate::{ diagnostic_set::DiagnosticSet, highlight_map::{HighlightId, HighlightMap}, @@ -48,7 +48,7 @@ pub use lsp::DiagnosticSeverity; pub struct Buffer { text: TextBuffer, - head_text: Option, + head_text: Option>, git_diff: BufferDiff, file: Option>, saved_version: clock::Global, @@ -77,7 +77,7 @@ pub struct Buffer { pub struct BufferSnapshot { text: text::BufferSnapshot, - pub diff_snapshot: BufferDiffSnapshot, + pub diff_snapshot: BufferDiff, pub(crate) syntax: SyntaxSnapshot, file: Option>, diagnostics: DiagnosticSet, @@ -347,7 +347,7 @@ impl Buffer { ) -> Self { Self::build( TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()), - head_text.map(|h| h.into()), + head_text.map(|h| Arc::new(h.into())), Some(file), ) } @@ -358,7 +358,7 @@ impl Buffer { file: Option>, ) -> Result { let buffer = TextBuffer::new(replica_id, message.id, message.base_text); - let mut this = Self::build(buffer, message.head_text, file); + let mut this = Self::build(buffer, message.head_text.map(|text| Arc::new(text)), file); this.text.set_line_ending(proto::deserialize_line_ending( proto::LineEnding::from_i32(message.line_ending) .ok_or_else(|| anyhow!("missing line_ending"))?, @@ -414,14 +414,18 @@ impl Buffer { self } - fn build(buffer: TextBuffer, head_text: Option, file: Option>) -> Self { + fn build( + buffer: TextBuffer, + head_text: Option>, + file: Option>, + ) -> Self { let saved_mtime = if let Some(file) = file.as_ref() { file.mtime() } else { UNIX_EPOCH }; - let git_diff = BufferDiff::new(&head_text, &buffer); + let git_diff = smol::block_on(BufferDiff::new(head_text.clone(), &buffer)); Self { saved_mtime, @@ -462,7 +466,7 @@ impl Buffer { BufferSnapshot { text, syntax, - diff_snapshot: self.git_diff.snapshot(), + diff_snapshot: self.git_diff.clone(), file: self.file.clone(), remote_selections: self.remote_selections.clone(), diagnostics: self.diagnostics.clone(), @@ -650,11 +654,29 @@ impl Buffer { task } - pub fn update_git(&mut self) { - if let Some(head_text) = &self.head_text { + pub fn needs_git_update(&self) -> bool { + self.git_diff.needs_update(self) + } + + pub fn update_git(&mut self, cx: &mut ModelContext) { + if self.head_text.is_some() { let snapshot = self.snapshot(); - self.git_diff.update(head_text, &snapshot); + let head_text = self.head_text.clone(); self.diff_update_count += 1; + + let buffer_diff = cx + .background() + .spawn(async move { BufferDiff::new(head_text, &snapshot).await }); + + cx.spawn_weak(|this, mut cx| async move { + let buffer_diff = buffer_diff.await; + if let Some(this) = this.upgrade(&cx) { + this.update(&mut cx, |this, _| { + this.git_diff = buffer_diff; + }) + } + }) + .detach() } } diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs index 9065ef5606..65ac373f7a 100644 --- a/crates/language/src/git.rs +++ b/crates/language/src/git.rs @@ -1,4 +1,4 @@ -use std::ops::Range; +use std::{ops::Range, sync::Arc}; use sum_tree::SumTree; use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, ToPoint}; @@ -97,11 +97,25 @@ impl<'a> sum_tree::Dimension<'a, DiffHunkSummary> for HunkBufferEnd { } #[derive(Clone)] -pub struct BufferDiffSnapshot { +pub struct BufferDiff { + last_buffer_version: clock::Global, tree: SumTree>, } -impl BufferDiffSnapshot { +impl BufferDiff { + pub async fn new(head_text: Option>, buffer: &text::BufferSnapshot) -> BufferDiff { + let mut instance = BufferDiff { + last_buffer_version: buffer.version().clone(), + tree: SumTree::new(), + }; + + if let Some(head_text) = head_text { + instance.update(&*head_text, buffer); + } + + instance + } + pub fn hunks_in_range<'a>( &'a self, query_row_range: Range, @@ -127,36 +141,11 @@ impl BufferDiffSnapshot { }) } - #[cfg(test)] - fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator> { - self.hunks_in_range(0..u32::MAX, text) - } -} - -pub struct BufferDiff { - snapshot: BufferDiffSnapshot, -} - -impl BufferDiff { - pub fn new(head_text: &Option, buffer: &text::BufferSnapshot) -> BufferDiff { - let mut instance = BufferDiff { - snapshot: BufferDiffSnapshot { - tree: SumTree::new(), - }, - }; - - if let Some(head_text) = head_text { - instance.update(head_text, buffer); - } - - instance + pub fn needs_update(&self, buffer: &text::BufferSnapshot) -> bool { + buffer.version().changed_since(&self.last_buffer_version) } - pub fn snapshot(&self) -> BufferDiffSnapshot { - self.snapshot.clone() - } - - pub fn update(&mut self, head_text: &str, buffer: &text::BufferSnapshot) { + fn update(&mut self, head_text: &str, buffer: &text::BufferSnapshot) { let mut tree = SumTree::new(); let buffer_text = buffer.as_rope().to_string(); @@ -170,7 +159,13 @@ impl BufferDiff { } } - self.snapshot.tree = tree; + self.tree = tree; + self.last_buffer_version = buffer.version().clone(); + } + + #[cfg(test)] + fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator> { + self.hunks_in_range(0..u32::MAX, text) } fn diff<'a>(head: &'a str, current: &'a str) -> Option> { @@ -284,7 +279,7 @@ mod tests { .unindent(); let mut buffer = Buffer::new(0, 0, buffer_text); - let diff = BufferDiff::new(&Some(head_text.clone()), &buffer); + let diff = smol::block_on(BufferDiff::new(Some(Arc::new(head_text.clone())), &buffer)); assert_hunks(&diff, &buffer, &head_text, &[(1..2, "two\n")]); buffer.edit([(0..0, "point five\n")]); @@ -298,7 +293,7 @@ mod tests { head_text: &str, expected_hunks: &[(Range, &str)], ) { - let hunks = diff.snapshot.hunks(buffer).collect::>(); + let hunks = diff.hunks(buffer).collect::>(); assert_eq!( hunks.len(), expected_hunks.len(), diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index ad3862c56f..e28e4d66d1 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -734,7 +734,7 @@ impl ItemHandle for ViewHandle { ); } - const GIT_DELAY: Duration = Duration::from_millis(600); + const GIT_DELAY: Duration = Duration::from_millis(10); let item = item.clone(); pending_git_update.fire_new( GIT_DELAY, From 6825b6077aa8120f4b5978c64d68fde122b39419 Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 16 Sep 2022 12:20:31 -0400 Subject: [PATCH 055/140] Properly invalidate when async git diff completes --- crates/editor/src/items.rs | 1 - crates/language/src/buffer.rs | 5 +++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index d208fc9c15..76e1480180 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -486,7 +486,6 @@ impl Item for Editor { self.buffer().update(cx, |multibuffer, cx| { multibuffer.update_git(cx); }); - cx.notify(); Task::ready(Ok(())) } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 2cff3796bc..5ddebcaff6 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -662,7 +662,6 @@ impl Buffer { if self.head_text.is_some() { let snapshot = self.snapshot(); let head_text = self.head_text.clone(); - self.diff_update_count += 1; let buffer_diff = cx .background() @@ -671,8 +670,10 @@ impl Buffer { cx.spawn_weak(|this, mut cx| async move { let buffer_diff = buffer_diff.await; if let Some(this) = this.upgrade(&cx) { - this.update(&mut cx, |this, _| { + this.update(&mut cx, |this, cx| { this.git_diff = buffer_diff; + this.diff_update_count += 1; + cx.notify(); }) } }) From 6633c0b3287484bd7b051f2de5bb49fba8fc6379 Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 16 Sep 2022 14:49:24 -0400 Subject: [PATCH 056/140] Perform initial file load git diff async --- crates/language/src/buffer.rs | 18 +++++++++--------- crates/language/src/git.rs | 28 +++++++++++++--------------- crates/project/src/worktree.rs | 6 +++++- 3 files changed, 27 insertions(+), 25 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 5ddebcaff6..90e86a20c4 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -425,8 +425,6 @@ impl Buffer { UNIX_EPOCH }; - let git_diff = smol::block_on(BufferDiff::new(head_text.clone(), &buffer)); - Self { saved_mtime, saved_version: buffer.version(), @@ -435,7 +433,7 @@ impl Buffer { was_dirty_before_starting_transaction: None, text: buffer, head_text, - git_diff, + git_diff: BufferDiff::new(), file, syntax_map: Mutex::new(SyntaxMap::new()), parsing_in_background: false, @@ -659,16 +657,18 @@ impl Buffer { } pub fn update_git(&mut self, cx: &mut ModelContext) { - if self.head_text.is_some() { + if let Some(head_text) = &self.head_text { let snapshot = self.snapshot(); - let head_text = self.head_text.clone(); + let head_text = head_text.clone(); - let buffer_diff = cx - .background() - .spawn(async move { BufferDiff::new(head_text, &snapshot).await }); + let mut diff = self.git_diff.clone(); + let diff = cx.background().spawn(async move { + diff.update(&head_text, &snapshot).await; + diff + }); cx.spawn_weak(|this, mut cx| async move { - let buffer_diff = buffer_diff.await; + let buffer_diff = diff.await; if let Some(this) = this.upgrade(&cx) { this.update(&mut cx, |this, cx| { this.git_diff = buffer_diff; diff --git a/crates/language/src/git.rs b/crates/language/src/git.rs index 65ac373f7a..d713dcbc14 100644 --- a/crates/language/src/git.rs +++ b/crates/language/src/git.rs @@ -1,4 +1,4 @@ -use std::{ops::Range, sync::Arc}; +use std::ops::Range; use sum_tree::SumTree; use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, ToPoint}; @@ -98,22 +98,16 @@ impl<'a> sum_tree::Dimension<'a, DiffHunkSummary> for HunkBufferEnd { #[derive(Clone)] pub struct BufferDiff { - last_buffer_version: clock::Global, + last_buffer_version: Option, tree: SumTree>, } impl BufferDiff { - pub async fn new(head_text: Option>, buffer: &text::BufferSnapshot) -> BufferDiff { - let mut instance = BufferDiff { - last_buffer_version: buffer.version().clone(), + pub fn new() -> BufferDiff { + BufferDiff { + last_buffer_version: None, tree: SumTree::new(), - }; - - if let Some(head_text) = head_text { - instance.update(&*head_text, buffer); } - - instance } pub fn hunks_in_range<'a>( @@ -142,10 +136,13 @@ impl BufferDiff { } pub fn needs_update(&self, buffer: &text::BufferSnapshot) -> bool { - buffer.version().changed_since(&self.last_buffer_version) + match &self.last_buffer_version { + Some(last) => buffer.version().changed_since(last), + None => true, + } } - fn update(&mut self, head_text: &str, buffer: &text::BufferSnapshot) { + pub async fn update(&mut self, head_text: &str, buffer: &text::BufferSnapshot) { let mut tree = SumTree::new(); let buffer_text = buffer.as_rope().to_string(); @@ -160,7 +157,7 @@ impl BufferDiff { } self.tree = tree; - self.last_buffer_version = buffer.version().clone(); + self.last_buffer_version = Some(buffer.version().clone()); } #[cfg(test)] @@ -279,7 +276,8 @@ mod tests { .unindent(); let mut buffer = Buffer::new(0, 0, buffer_text); - let diff = smol::block_on(BufferDiff::new(Some(Arc::new(head_text.clone())), &buffer)); + let mut diff = BufferDiff::new(); + smol::block_on(diff.update(&head_text, &buffer)); assert_hunks(&diff, &buffer, &head_text, &[(1..2, "two\n")]); buffer.edit([(0..0, "point five\n")]); diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 42d18eb3bb..2ff3e6fe04 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -449,7 +449,11 @@ impl LocalWorktree { let (file, contents, head_text) = this .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx)) .await?; - Ok(cx.add_model(|cx| Buffer::from_file(0, contents, head_text, Arc::new(file), cx))) + Ok(cx.add_model(|cx| { + let mut buffer = Buffer::from_file(0, contents, head_text, Arc::new(file), cx); + buffer.update_git(cx); + buffer + })) }) } From 8edee9b2a8680bc5db074e76eec7a400ca92caf1 Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 16 Sep 2022 16:48:43 -0400 Subject: [PATCH 057/140] Async-ify head text loading --- crates/project/src/worktree.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 2ff3e6fe04..79e2ed9da9 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -573,7 +573,13 @@ impl LocalWorktree { let fs = self.fs.clone(); cx.spawn(|this, mut cx| async move { let text = fs.load(&abs_path).await?; - let head_text = fs.load_head_text(&abs_path).await; + + let head_text = { + let fs = fs.clone(); + let abs_path = abs_path.clone(); + let task = async move { fs.load_head_text(&abs_path).await }; + cx.background().spawn(task).await + }; // Eagerly populate the snapshot with an updated entry for the loaded file let entry = this From b18dd8fcff7a87f46fbc98b247b9e2d3198abc4f Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 19 Sep 2022 15:16:04 -0400 Subject: [PATCH 058/140] Fully qualify outside git-related code when a diff is a git diff --- crates/editor/src/display_map/fold_map.rs | 2 +- crates/editor/src/element.rs | 2 +- crates/editor/src/multi_buffer.rs | 33 ++++++++++---------- crates/language/src/buffer.rs | 38 +++++++++++------------ 4 files changed, 38 insertions(+), 37 deletions(-) diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 6ab5c6202e..c17cfa39f2 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -274,7 +274,7 @@ impl FoldMap { if buffer.edit_count() != new_buffer.edit_count() || buffer.parse_count() != new_buffer.parse_count() || buffer.diagnostics_update_count() != new_buffer.diagnostics_update_count() - || buffer.diff_update_count() != new_buffer.diff_update_count() + || buffer.git_diff_update_count() != new_buffer.git_diff_update_count() || buffer.trailing_excerpt_update_count() != new_buffer.trailing_excerpt_update_count() { diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 40b1e62adf..a293514559 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1477,7 +1477,7 @@ impl Element for EditorElement { let diff_hunks = snapshot .buffer_snapshot - .diff_hunks_in_range(start_row..end_row) + .git_diff_hunks_in_range(start_row..end_row) .collect(); let mut max_visible_line_width = 0.0; diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 72b88f837d..2f93bc5b09 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -91,7 +91,7 @@ struct BufferState { last_selections_update_count: usize, last_diagnostics_update_count: usize, last_file_update_count: usize, - last_diff_update_count: usize, + last_git_diff_update_count: usize, excerpts: Vec, _subscriptions: [gpui::Subscription; 2], } @@ -103,7 +103,7 @@ pub struct MultiBufferSnapshot { parse_count: usize, diagnostics_update_count: usize, trailing_excerpt_update_count: usize, - diff_update_count: usize, + git_diff_update_count: usize, edit_count: usize, is_dirty: bool, has_conflict: bool, @@ -205,7 +205,7 @@ impl MultiBuffer { last_selections_update_count: buffer_state.last_selections_update_count, last_diagnostics_update_count: buffer_state.last_diagnostics_update_count, last_file_update_count: buffer_state.last_file_update_count, - last_diff_update_count: buffer_state.last_diff_update_count, + last_git_diff_update_count: buffer_state.last_git_diff_update_count, excerpts: buffer_state.excerpts.clone(), _subscriptions: [ new_cx.observe(&buffer_state.buffer, |_, _, cx| cx.notify()), @@ -842,7 +842,7 @@ impl MultiBuffer { last_selections_update_count: buffer_snapshot.selections_update_count(), last_diagnostics_update_count: buffer_snapshot.diagnostics_update_count(), last_file_update_count: buffer_snapshot.file_update_count(), - last_diff_update_count: buffer_snapshot.diff_update_count(), + last_git_diff_update_count: buffer_snapshot.git_diff_update_count(), excerpts: Default::default(), _subscriptions: [ cx.observe(&buffer, |_, _, cx| cx.notify()), @@ -1265,7 +1265,7 @@ impl MultiBuffer { let mut excerpts_to_edit = Vec::new(); let mut reparsed = false; let mut diagnostics_updated = false; - let mut diff_updated = false; + let mut git_diff_updated = false; let mut is_dirty = false; let mut has_conflict = false; let mut edited = false; @@ -1277,7 +1277,7 @@ impl MultiBuffer { let selections_update_count = buffer.selections_update_count(); let diagnostics_update_count = buffer.diagnostics_update_count(); let file_update_count = buffer.file_update_count(); - let diff_update_count = buffer.diff_update_count(); + let git_diff_update_count = buffer.git_diff_update_count(); let buffer_edited = version.changed_since(&buffer_state.last_version); let buffer_reparsed = parse_count > buffer_state.last_parse_count; @@ -1286,20 +1286,21 @@ impl MultiBuffer { let buffer_diagnostics_updated = diagnostics_update_count > buffer_state.last_diagnostics_update_count; let buffer_file_updated = file_update_count > buffer_state.last_file_update_count; - let buffer_diff_updated = diff_update_count > buffer_state.last_diff_update_count; + let buffer_git_diff_updated = + git_diff_update_count > buffer_state.last_git_diff_update_count; if buffer_edited || buffer_reparsed || buffer_selections_updated || buffer_diagnostics_updated || buffer_file_updated - || buffer_diff_updated + || buffer_git_diff_updated { buffer_state.last_version = version; buffer_state.last_parse_count = parse_count; buffer_state.last_selections_update_count = selections_update_count; buffer_state.last_diagnostics_update_count = diagnostics_update_count; buffer_state.last_file_update_count = file_update_count; - buffer_state.last_diff_update_count = diff_update_count; + buffer_state.last_git_diff_update_count = git_diff_update_count; excerpts_to_edit.extend( buffer_state .excerpts @@ -1311,7 +1312,7 @@ impl MultiBuffer { edited |= buffer_edited; reparsed |= buffer_reparsed; diagnostics_updated |= buffer_diagnostics_updated; - diff_updated |= buffer_diff_updated; + git_diff_updated |= buffer_git_diff_updated; is_dirty |= buffer.is_dirty(); has_conflict |= buffer.has_conflict(); } @@ -1324,8 +1325,8 @@ impl MultiBuffer { if diagnostics_updated { snapshot.diagnostics_update_count += 1; } - if diff_updated { - snapshot.diff_update_count += 1; + if git_diff_updated { + snapshot.git_diff_update_count += 1; } snapshot.is_dirty = is_dirty; snapshot.has_conflict = has_conflict; @@ -2504,8 +2505,8 @@ impl MultiBufferSnapshot { self.diagnostics_update_count } - pub fn diff_update_count(&self) -> usize { - self.diff_update_count + pub fn git_diff_update_count(&self) -> usize { + self.git_diff_update_count } pub fn trailing_excerpt_update_count(&self) -> usize { @@ -2558,13 +2559,13 @@ impl MultiBufferSnapshot { }) } - pub fn diff_hunks_in_range<'a>( + pub fn git_diff_hunks_in_range<'a>( &'a self, row_range: Range, ) -> impl 'a + Iterator> { self.as_singleton() .into_iter() - .flat_map(move |(_, _, buffer)| buffer.diff_hunks_in_range(row_range.clone())) + .flat_map(move |(_, _, buffer)| buffer.git_diff_hunks_in_range(row_range.clone())) } pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 90e86a20c4..53bfe4a10c 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1,4 +1,4 @@ -use crate::git::{BufferDiff, DiffHunk}; +use crate::git; pub use crate::{ diagnostic_set::DiagnosticSet, highlight_map::{HighlightId, HighlightMap}, @@ -49,7 +49,7 @@ pub use lsp::DiagnosticSeverity; pub struct Buffer { text: TextBuffer, head_text: Option>, - git_diff: BufferDiff, + git_diff: git::BufferDiff, file: Option>, saved_version: clock::Global, saved_version_fingerprint: String, @@ -69,7 +69,7 @@ pub struct Buffer { diagnostics_update_count: usize, diagnostics_timestamp: clock::Lamport, file_update_count: usize, - diff_update_count: usize, + git_diff_update_count: usize, completion_triggers: Vec, completion_triggers_timestamp: clock::Lamport, deferred_ops: OperationQueue, @@ -77,13 +77,13 @@ pub struct Buffer { pub struct BufferSnapshot { text: text::BufferSnapshot, - pub diff_snapshot: BufferDiff, + pub git_diff_snapshot: git::BufferDiff, pub(crate) syntax: SyntaxSnapshot, file: Option>, diagnostics: DiagnosticSet, diagnostics_update_count: usize, file_update_count: usize, - diff_update_count: usize, + git_diff_update_count: usize, remote_selections: TreeMap, selections_update_count: usize, language: Option>, @@ -433,7 +433,7 @@ impl Buffer { was_dirty_before_starting_transaction: None, text: buffer, head_text, - git_diff: BufferDiff::new(), + git_diff: git::BufferDiff::new(), file, syntax_map: Mutex::new(SyntaxMap::new()), parsing_in_background: false, @@ -448,7 +448,7 @@ impl Buffer { diagnostics_update_count: 0, diagnostics_timestamp: Default::default(), file_update_count: 0, - diff_update_count: 0, + git_diff_update_count: 0, completion_triggers: Default::default(), completion_triggers_timestamp: Default::default(), deferred_ops: OperationQueue::new(), @@ -464,13 +464,13 @@ impl Buffer { BufferSnapshot { text, syntax, - diff_snapshot: self.git_diff.clone(), + git_diff_snapshot: self.git_diff.clone(), file: self.file.clone(), remote_selections: self.remote_selections.clone(), diagnostics: self.diagnostics.clone(), diagnostics_update_count: self.diagnostics_update_count, file_update_count: self.file_update_count, - diff_update_count: self.diff_update_count, + git_diff_update_count: self.git_diff_update_count, language: self.language.clone(), parse_count: self.parse_count, selections_update_count: self.selections_update_count, @@ -672,7 +672,7 @@ impl Buffer { if let Some(this) = this.upgrade(&cx) { this.update(&mut cx, |this, cx| { this.git_diff = buffer_diff; - this.diff_update_count += 1; + this.git_diff_update_count += 1; cx.notify(); }) } @@ -705,8 +705,8 @@ impl Buffer { self.file_update_count } - pub fn diff_update_count(&self) -> usize { - self.diff_update_count + pub fn git_diff_update_count(&self) -> usize { + self.git_diff_update_count } #[cfg(any(test, feature = "test-support"))] @@ -2191,11 +2191,11 @@ impl BufferSnapshot { }) } - pub fn diff_hunks_in_range<'a>( + pub fn git_diff_hunks_in_range<'a>( &'a self, query_row_range: Range, - ) -> impl 'a + Iterator> { - self.diff_snapshot.hunks_in_range(query_row_range, self) + ) -> impl 'a + Iterator> { + self.git_diff_snapshot.hunks_in_range(query_row_range, self) } pub fn diagnostics_in_range<'a, T, O>( @@ -2246,8 +2246,8 @@ impl BufferSnapshot { self.file_update_count } - pub fn diff_update_count(&self) -> usize { - self.diff_update_count + pub fn git_diff_update_count(&self) -> usize { + self.git_diff_update_count } } @@ -2275,7 +2275,7 @@ impl Clone for BufferSnapshot { fn clone(&self) -> Self { Self { text: self.text.clone(), - diff_snapshot: self.diff_snapshot.clone(), + git_diff_snapshot: self.git_diff_snapshot.clone(), syntax: self.syntax.clone(), file: self.file.clone(), remote_selections: self.remote_selections.clone(), @@ -2283,7 +2283,7 @@ impl Clone for BufferSnapshot { selections_update_count: self.selections_update_count, diagnostics_update_count: self.diagnostics_update_count, file_update_count: self.file_update_count, - diff_update_count: self.diff_update_count, + git_diff_update_count: self.git_diff_update_count, language: self.language.clone(), parse_count: self.parse_count, } From a679557e40f7eafd9aa615d2741a266e9c5987b9 Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 19 Sep 2022 18:22:39 -0400 Subject: [PATCH 059/140] Avoid racing git diffs & allow for "as fast as possible" diff updating Co-Authored-By: Mikayla Maki --- assets/settings/default.json | 4 +++ crates/language/src/buffer.rs | 38 +++++++++++++++++++------- crates/settings/src/settings.rs | 18 +++++++++++++ crates/workspace/src/workspace.rs | 45 +++++++++++++++++++++++-------- 4 files changed, 85 insertions(+), 20 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index a12cf44d94..4ebc1e702f 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -74,6 +74,10 @@ "hard_tabs": false, // How many columns a tab should occupy. "tab_size": 4, + // Git gutter behavior configuration. Remove this item to disable git gutters entirely. + "git_gutter": { + "files_included": "all" + }, // Settings specific to the terminal "terminal": { // What shell to use when opening a terminal. May take 3 values: diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 53bfe4a10c..6ecfbc7e62 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -46,10 +46,16 @@ pub use {tree_sitter_rust, tree_sitter_typescript}; pub use lsp::DiagnosticSeverity; +struct GitDiffStatus { + diff: git::BufferDiff, + update_in_progress: bool, + update_requested: bool, +} + pub struct Buffer { text: TextBuffer, head_text: Option>, - git_diff: git::BufferDiff, + git_diff_status: GitDiffStatus, file: Option>, saved_version: clock::Global, saved_version_fingerprint: String, @@ -77,7 +83,7 @@ pub struct Buffer { pub struct BufferSnapshot { text: text::BufferSnapshot, - pub git_diff_snapshot: git::BufferDiff, + pub git_diff: git::BufferDiff, pub(crate) syntax: SyntaxSnapshot, file: Option>, diagnostics: DiagnosticSet, @@ -433,7 +439,11 @@ impl Buffer { was_dirty_before_starting_transaction: None, text: buffer, head_text, - git_diff: git::BufferDiff::new(), + git_diff_status: GitDiffStatus { + diff: git::BufferDiff::new(), + update_in_progress: false, + update_requested: false, + }, file, syntax_map: Mutex::new(SyntaxMap::new()), parsing_in_background: false, @@ -464,7 +474,7 @@ impl Buffer { BufferSnapshot { text, syntax, - git_diff_snapshot: self.git_diff.clone(), + git_diff: self.git_diff_status.diff.clone(), file: self.file.clone(), remote_selections: self.remote_selections.clone(), diagnostics: self.diagnostics.clone(), @@ -653,15 +663,20 @@ impl Buffer { } pub fn needs_git_update(&self) -> bool { - self.git_diff.needs_update(self) + self.git_diff_status.diff.needs_update(self) } pub fn update_git(&mut self, cx: &mut ModelContext) { + if self.git_diff_status.update_in_progress { + self.git_diff_status.update_requested = true; + return; + } + if let Some(head_text) = &self.head_text { let snapshot = self.snapshot(); let head_text = head_text.clone(); - let mut diff = self.git_diff.clone(); + let mut diff = self.git_diff_status.diff.clone(); let diff = cx.background().spawn(async move { diff.update(&head_text, &snapshot).await; diff @@ -671,9 +686,14 @@ impl Buffer { let buffer_diff = diff.await; if let Some(this) = this.upgrade(&cx) { this.update(&mut cx, |this, cx| { - this.git_diff = buffer_diff; + this.git_diff_status.diff = buffer_diff; this.git_diff_update_count += 1; cx.notify(); + + this.git_diff_status.update_in_progress = false; + if this.git_diff_status.update_requested { + this.update_git(cx); + } }) } }) @@ -2195,7 +2215,7 @@ impl BufferSnapshot { &'a self, query_row_range: Range, ) -> impl 'a + Iterator> { - self.git_diff_snapshot.hunks_in_range(query_row_range, self) + self.git_diff.hunks_in_range(query_row_range, self) } pub fn diagnostics_in_range<'a, T, O>( @@ -2275,7 +2295,7 @@ impl Clone for BufferSnapshot { fn clone(&self) -> Self { Self { text: self.text.clone(), - git_diff_snapshot: self.git_diff_snapshot.clone(), + git_diff: self.git_diff.clone(), syntax: self.syntax.clone(), file: self.file.clone(), remote_selections: self.remote_selections.clone(), diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index e346ff60e6..adb2892b36 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -61,6 +61,22 @@ pub struct EditorSettings { pub format_on_save: Option, pub formatter: Option, pub enable_language_server: Option, + pub git_gutter: Option, +} + +#[derive(Clone, Copy, Debug, Default, Deserialize, JsonSchema)] +pub struct GitGutterConfig { + pub files_included: GitGutterLevel, + pub debounce_delay_millis: Option, +} + +#[derive(Clone, Copy, Debug, Default, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum GitGutterLevel { + #[default] + All, + OnlyTracked, + None, } #[derive(Copy, Clone, Debug, Deserialize, PartialEq, Eq, JsonSchema)] @@ -250,6 +266,7 @@ impl Settings { format_on_save: required(defaults.editor.format_on_save), formatter: required(defaults.editor.formatter), enable_language_server: required(defaults.editor.enable_language_server), + git_gutter: defaults.editor.git_gutter, }, editor_overrides: Default::default(), terminal_defaults: Default::default(), @@ -378,6 +395,7 @@ impl Settings { format_on_save: Some(FormatOnSave::On), formatter: Some(Formatter::LanguageServer), enable_language_server: Some(true), + git_gutter: Default::default(), }, editor_overrides: Default::default(), terminal_defaults: Default::default(), diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index e28e4d66d1..9e8338d289 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -734,18 +734,41 @@ impl ItemHandle for ViewHandle { ); } - const GIT_DELAY: Duration = Duration::from_millis(10); + let debounce_delay = cx + .global::() + .editor_overrides + .git_gutter + .unwrap_or_default() + .debounce_delay_millis; let item = item.clone(); - pending_git_update.fire_new( - GIT_DELAY, - workspace, - cx, - |project, mut cx| async move { - cx.update(|cx| item.update_git(project, cx)) - .await - .log_err(); - }, - ); + + if let Some(delay) = debounce_delay { + const MIN_GIT_DELAY: u64 = 50; + + let delay = delay.max(MIN_GIT_DELAY); + let duration = Duration::from_millis(delay); + + pending_git_update.fire_new( + duration, + workspace, + cx, + |project, mut cx| async move { + cx.update(|cx| item.update_git(project, cx)) + .await + .log_err(); + }, + ); + } else { + let project = workspace.project().downgrade(); + cx.spawn_weak(|_, mut cx| async move { + if let Some(project) = project.upgrade(&cx) { + cx.update(|cx| item.update_git(project, cx)) + .await + .log_err(); + } + }) + .detach(); + } } _ => {} From 632f47930f30e175c81e109c448431b156906600 Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 19 Sep 2022 18:44:47 -0400 Subject: [PATCH 060/140] Utilize initial file contents as head text by default Co-Authored-By: Mikayla Maki --- crates/project/src/worktree.rs | 24 ++++++++++++++++++++++-- crates/settings/src/settings.rs | 6 +++--- 2 files changed, 25 insertions(+), 5 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 79e2ed9da9..4d2b509738 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -32,6 +32,7 @@ use postage::{ prelude::{Sink as _, Stream as _}, watch, }; +use settings::Settings; use smol::channel::{self, Sender}; use std::{ any::Any, @@ -571,14 +572,33 @@ impl LocalWorktree { let path = Arc::from(path); let abs_path = self.absolutize(&path); let fs = self.fs.clone(); + + let files_included = cx + .global::() + .editor_overrides + .git_gutter + .unwrap_or_default() + .files_included; + cx.spawn(|this, mut cx| async move { let text = fs.load(&abs_path).await?; - let head_text = { + let head_text = if matches!( + files_included, + settings::GitFilesIncluded::All | settings::GitFilesIncluded::OnlyTracked + ) { let fs = fs.clone(); let abs_path = abs_path.clone(); let task = async move { fs.load_head_text(&abs_path).await }; - cx.background().spawn(task).await + let results = cx.background().spawn(task).await; + + if files_included == settings::GitFilesIncluded::All { + results.or_else(|| Some(text.clone())) + } else { + results + } + } else { + None }; // Eagerly populate the snapshot with an updated entry for the loaded file diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index adb2892b36..3f4a764c79 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -66,13 +66,13 @@ pub struct EditorSettings { #[derive(Clone, Copy, Debug, Default, Deserialize, JsonSchema)] pub struct GitGutterConfig { - pub files_included: GitGutterLevel, + pub files_included: GitFilesIncluded, pub debounce_delay_millis: Option, } -#[derive(Clone, Copy, Debug, Default, Deserialize, JsonSchema)] +#[derive(Clone, Copy, Debug, Default, Deserialize, JsonSchema, PartialEq, Eq)] #[serde(rename_all = "snake_case")] -pub enum GitGutterLevel { +pub enum GitFilesIncluded { #[default] All, OnlyTracked, From 8d2de1074b1b7583c5ed10cf401ec3a92de291bb Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 19 Sep 2022 19:25:59 -0400 Subject: [PATCH 061/140] Pull git indicator colors out of theme Co-Authored-By: Kay Simmons Co-Authored-By: Mikayla Maki --- crates/editor/src/element.rs | 15 ++++++++++++--- crates/theme/src/theme.rs | 1 + styles/src/styleTree/editor.ts | 6 ++++-- styles/src/themes/common/base16.ts | 5 +++++ styles/src/themes/common/theme.ts | 1 + 5 files changed, 23 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index a293514559..2e767d72e6 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -545,10 +545,19 @@ impl EditorElement { } } + let (inserted_color, modified_color, deleted_color) = { + let editor = &cx.global::().theme.editor; + ( + editor.diff_background_inserted, + editor.diff_background_modified, + editor.diff_background_deleted, + ) + }; + for hunk in &layout.diff_hunks { let color = match hunk.status() { - DiffHunkStatus::Added => Color::green(), - DiffHunkStatus::Modified => Color::blue(), + DiffHunkStatus::Added => inserted_color, + DiffHunkStatus::Modified => modified_color, //TODO: This rendering is entirely a horrible hack DiffHunkStatus::Removed => { @@ -565,7 +574,7 @@ impl EditorElement { cx.scene.push_quad(Quad { bounds: highlight_bounds, - background: Some(Color::red()), + background: Some(deleted_color), border: Border::new(0., Color::transparent_black()), corner_radius: 1. * line_height, }); diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index 739a4c7686..1fd586efee 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -490,6 +490,7 @@ pub struct Editor { pub document_highlight_write_background: Color, pub diff_background_deleted: Color, pub diff_background_inserted: Color, + pub diff_background_modified: Color, pub line_number: Color, pub line_number_active: Color, pub guest_selections: Vec, diff --git a/styles/src/styleTree/editor.ts b/styles/src/styleTree/editor.ts index 62f7a0efdf..29d6857964 100644 --- a/styles/src/styleTree/editor.ts +++ b/styles/src/styleTree/editor.ts @@ -7,6 +7,7 @@ import { player, popoverShadow, text, + textColor, TextColor, } from "./components"; import hoverPopover from "./hoverPopover"; @@ -59,8 +60,9 @@ export default function editor(theme: Theme) { indicator: iconColor(theme, "secondary"), verticalScale: 0.618 }, - diffBackgroundDeleted: backgroundColor(theme, "error"), - diffBackgroundInserted: backgroundColor(theme, "ok"), + diffBackgroundDeleted: theme.ramps.red(0.3).hex(), + diffBackgroundInserted: theme.ramps.green(0.3).hex(), + diffBackgroundModified: theme.ramps.blue(0.3).hex(), documentHighlightReadBackground: theme.editor.highlight.occurrence, documentHighlightWriteBackground: theme.editor.highlight.activeOccurrence, errorColor: theme.textColor.error, diff --git a/styles/src/themes/common/base16.ts b/styles/src/themes/common/base16.ts index 7aa72ef137..326928252e 100644 --- a/styles/src/themes/common/base16.ts +++ b/styles/src/themes/common/base16.ts @@ -113,6 +113,11 @@ export function createTheme( hovered: sample(ramps.blue, 0.1), active: sample(ramps.blue, 0.15), }, + on500Ok: { + base: sample(ramps.green, 0.05), + hovered: sample(ramps.green, 0.1), + active: sample(ramps.green, 0.15) + } }; const borderColor = { diff --git a/styles/src/themes/common/theme.ts b/styles/src/themes/common/theme.ts index e01435b846..b93148ae2c 100644 --- a/styles/src/themes/common/theme.ts +++ b/styles/src/themes/common/theme.ts @@ -78,6 +78,7 @@ export default interface Theme { // Hacks for elements on top of the editor on500: BackgroundColorSet; ok: BackgroundColorSet; + on500Ok: BackgroundColorSet; error: BackgroundColorSet; on500Error: BackgroundColorSet; warning: BackgroundColorSet; From bb8798a8444eb96af94236ba41ea0652b91baf59 Mon Sep 17 00:00:00 2001 From: Julia Date: Tue, 20 Sep 2022 17:50:29 -0400 Subject: [PATCH 062/140] WIP pls amend me Co-Authored-By: Max Brunsfeld Co-Authored-By: Mikayla Maki --- Cargo.lock | 1 + crates/project/Cargo.toml | 1 + crates/project/src/project.rs | 2 +- crates/project/src/worktree.rs | 76 +++++++++++++++++++++++++++++++++- 4 files changed, 78 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1f60f1d36c..040db0fd41 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3996,6 +3996,7 @@ dependencies = [ "fsevent", "futures", "fuzzy", + "git2", "gpui", "ignore", "language", diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index a4ea6f2286..76eef0efa7 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -52,6 +52,7 @@ smol = "1.2.5" thiserror = "1.0.29" toml = "0.5" rocksdb = "0.18" +git2 = { version = "0.15", default-features = false } [dev-dependencies] client = { path = "../client", features = ["test-support"] } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 8fa1fe9622..4d16c6ad1f 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4538,7 +4538,7 @@ impl Project { }) .detach(); } - + let push_strong_handle = { let worktree = worktree.read(cx); self.is_shared() || worktree.is_visible() || worktree.is_remote() diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 4d2b509738..b054f93328 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -27,7 +27,7 @@ use language::{ Buffer, DiagnosticEntry, LineEnding, PointUtf16, Rope, }; use lazy_static::lazy_static; -use parking_lot::Mutex; +use parking_lot::{Mutex, RwLock}; use postage::{ prelude::{Sink as _, Stream as _}, watch, @@ -105,12 +105,20 @@ pub struct Snapshot { pub struct LocalSnapshot { abs_path: Arc, ignores_by_parent_abs_path: HashMap, (Arc, usize)>, + git_repositories: Vec, removed_entry_ids: HashMap, next_entry_id: Arc, snapshot: Snapshot, extension_counts: HashMap, } +#[derive(Clone)] +pub(crate) struct GitRepositoryState { + content_path: Arc, + git_dir_path: Arc, + repository: Arc>, +} + impl Deref for LocalSnapshot { type Target = Snapshot; @@ -143,6 +151,7 @@ struct ShareState { pub enum Event { UpdatedEntries, + UpdatedGitRepositories(Vec), } impl Entity for Worktree { @@ -373,6 +382,7 @@ impl LocalWorktree { let mut snapshot = LocalSnapshot { abs_path, ignores_by_parent_abs_path: Default::default(), + git_repositories: Default::default(), removed_entry_ids: Default::default(), next_entry_id, snapshot: Snapshot { @@ -504,6 +514,7 @@ impl LocalWorktree { fn poll_snapshot(&mut self, force: bool, cx: &mut ModelContext) { self.poll_task.take(); + match self.scan_state() { ScanState::Idle => { self.snapshot = self.background_snapshot.lock().clone(); @@ -512,6 +523,7 @@ impl LocalWorktree { } cx.emit(Event::UpdatedEntries); } + ScanState::Initializing => { let is_fake_fs = self.fs.is_fake(); self.snapshot = self.background_snapshot.lock().clone(); @@ -528,12 +540,14 @@ impl LocalWorktree { })); cx.emit(Event::UpdatedEntries); } + _ => { if force { self.snapshot = self.background_snapshot.lock().clone(); } } } + cx.notify(); } @@ -1285,6 +1299,10 @@ impl LocalSnapshot { pub fn extension_counts(&self) -> &HashMap { &self.extension_counts } + + pub(crate) fn git_repository_for_file_path(&self, path: &Path) -> Option { + None + } #[cfg(test)] pub(crate) fn build_initial_update(&self, project_id: u64) -> proto::UpdateWorktree { @@ -3042,6 +3060,61 @@ mod tests { assert!(tree.entry_for_path(".git").unwrap().is_ignored); }); } + + #[gpui::test] + async fn test_git_repository_for_path(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.background()); + fs.insert_tree( + "/root", + json!({ + "dir1": { + ".git": {}, + "deps": { + "dep1": { + ".git": {}, + "src": { + "a.txt": "" + } + } + }, + "src": { + "b.txt": "" + } + }, + "c.txt": "" + }), + ) + .await; + + let http_client = FakeHttpClient::with_404_response(); + let client = Client::new(http_client); + let tree = Worktree::local( + client, + Arc::from(Path::new("/root")), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.foreground().run_until_parked(); + + tree.read_with(cx, |tree, cx| { + let tree = tree.as_local().unwrap(); + + assert!(tree.git_repository_for_file_path("c.txt".as_ref()).is_none()); + + let repo1 = tree.git_repository_for_file_path("dir1/src/b.txt".as_ref()).unwrap().lock(); + assert_eq!(repo1.content_path.as_ref(), Path::new("dir1")); + assert_eq!(repo1.git_dir_path.as_ref(), Path::new("dir1/.git")); + + let repo2 = tree.git_repository_for_file_path("dir1/deps/dep1/src/a.txt".as_ref()).unwrap().lock(); + assert_eq!(repo2.content_path.as_ref(), Path::new("dir1/deps/dep1")); + assert_eq!(repo2.git_dir_path.as_ref(), Path::new("dir1/deps/dep1/.git")); + }); + } #[gpui::test] async fn test_write_file(cx: &mut TestAppContext) { @@ -3161,6 +3234,7 @@ mod tests { abs_path: root_dir.path().into(), removed_entry_ids: Default::default(), ignores_by_parent_abs_path: Default::default(), + git_repositories: Default::default(), next_entry_id: next_entry_id.clone(), snapshot: Snapshot { id: WorktreeId::from_usize(0), From 0d1b2a7e4693f38464bbdfe41fb9b10a03d501e8 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 22 Sep 2022 12:50:35 -0700 Subject: [PATCH 063/140] WIP - max & mikayla working on tests --- crates/project/src/project.rs | 3 +- crates/project/src/worktree.rs | 134 +++++++++++++++++++++++++++++---- 2 files changed, 120 insertions(+), 17 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 4d16c6ad1f..36c7c6cf81 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4535,10 +4535,11 @@ impl Project { if worktree.read(cx).is_local() { cx.subscribe(worktree, |this, worktree, event, cx| match event { worktree::Event::UpdatedEntries => this.update_local_worktree_buffers(worktree, cx), + worktree::Event::UpdatedGitRepositories(_) => todo!(), }) .detach(); } - + let push_strong_handle = { let worktree = worktree.read(cx); self.is_shared() || worktree.is_visible() || worktree.is_remote() diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index b054f93328..49dbe06117 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -18,6 +18,7 @@ use futures::{ Stream, StreamExt, }; use fuzzy::CharBag; +use git2::Repository; use gpui::{ executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, @@ -27,7 +28,7 @@ use language::{ Buffer, DiagnosticEntry, LineEnding, PointUtf16, Rope, }; use lazy_static::lazy_static; -use parking_lot::{Mutex, RwLock}; +use parking_lot::Mutex; use postage::{ prelude::{Sink as _, Stream as _}, watch, @@ -41,6 +42,7 @@ use std::{ ffi::{OsStr, OsString}, fmt, future::Future, + mem, ops::{Deref, DerefMut}, os::unix::prelude::{OsStrExt, OsStringExt}, path::{Path, PathBuf}, @@ -52,6 +54,7 @@ use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; use util::{ResultExt, TryFutureExt}; lazy_static! { + static ref DOT_GIT: &'static OsStr = OsStr::new(".git"); static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore"); } @@ -101,6 +104,24 @@ pub struct Snapshot { is_complete: bool, } +// + +// 'GitResolver' +// File paths <-> Repository Paths -> git_repository_path() -> First .git in an ancestor in a path +// Repository Paths <-> Repository Pointers -> git_repository_open() +// fs.watch() ^ +// +// Folder: where all the git magic happens +// .git IT +// OR it can be a file that points somewhere else + +// 1. Walk through the file tree, looking for .git files or folders +// 2. When we discover them, open and save a libgit2 pointer to the repository +// 2a. Use git_repository_path() to start a watch on the repository (if not already watched) +// +// File paths -> Git repository == Ancestor check (is there a .git in an ancestor folder) +// Git repository -> Files == Descendent check (subtracting out any intersecting .git folders) + #[derive(Clone)] pub struct LocalSnapshot { abs_path: Arc, @@ -113,9 +134,10 @@ pub struct LocalSnapshot { } #[derive(Clone)] -pub(crate) struct GitRepositoryState { +pub struct GitRepositoryState { content_path: Arc, git_dir_path: Arc, + scan_id: usize, repository: Arc>, } @@ -1299,11 +1321,34 @@ impl LocalSnapshot { pub fn extension_counts(&self) -> &HashMap { &self.extension_counts } - + pub(crate) fn git_repository_for_file_path(&self, path: &Path) -> Option { + for repository in self.git_repositories.iter().rev() { + if path.starts_with(&repository.content_path) { + return Some(repository.clone()); + } + } None } + pub(crate) fn git_repository_for_git_data(&self, path: &Path) -> Option { + for repository in self.git_repositories.iter() { + if path.starts_with(&repository.git_dir_path) { + return Some(repository.clone()); + } + } + None + } + + pub(crate) fn does_git_repository_track_file_path( + &self, + repo: &GitRepositoryState, + file_path: &Path, + ) -> bool { + self.git_repository_for_file_path(file_path) + .map_or(false, |r| r.content_path == repo.content_path) + } + #[cfg(test)] pub(crate) fn build_initial_update(&self, project_id: u64) -> proto::UpdateWorktree { let root_name = self.root_name.clone(); @@ -1403,6 +1448,25 @@ impl LocalSnapshot { ); } } + } else if entry.path.file_name() == Some(&DOT_GIT) { + let abs_path = self.abs_path.join(&entry.path); + let content_path: Arc = entry.path.parent().unwrap().into(); + if let Err(ix) = self + .git_repositories + .binary_search_by_key(&&content_path, |repo| &repo.content_path) + { + if let Some(repository) = Repository::open(&abs_path).log_err() { + self.git_repositories.insert( + ix, + GitRepositoryState { + content_path, + git_dir_path: repository.path().into(), + scan_id: self.scan_id, + repository: Arc::new(Mutex::new(repository)), + }, + ); + } + } } self.reuse_entry_id(&mut entry); @@ -1549,6 +1613,14 @@ impl LocalSnapshot { { *scan_id = self.snapshot.scan_id; } + } else if path.file_name() == Some(&DOT_GIT) { + let parent_path = path.parent().unwrap(); + if let Ok(ix) = self + .git_repositories + .binary_search_by_key(&parent_path, |repo| repo.content_path.as_ref()) + { + self.git_repositories[ix].scan_id = self.snapshot.scan_id; + } } } @@ -2423,6 +2495,7 @@ impl BackgroundScanner { self.snapshot.lock().removed_entry_ids.clear(); self.update_ignore_statuses().await; + self.update_git_repositories().await; true } @@ -2488,6 +2561,16 @@ impl BackgroundScanner { .await; } + async fn update_git_repositories(&self) { + let mut snapshot = self.snapshot(); + let mut git_repositories = mem::take(&mut snapshot.git_repositories); + git_repositories.retain(|git_repository| { + let dot_git_path = git_repository.content_path.join(&*DOT_GIT); + snapshot.entry_for_path(dot_git_path).is_some() + }); + snapshot.git_repositories = git_repositories; + } + async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) { let mut ignore_stack = job.ignore_stack; if let Some((ignore, _)) = snapshot.ignores_by_parent_abs_path.get(&job.abs_path) { @@ -3060,7 +3143,7 @@ mod tests { assert!(tree.entry_for_path(".git").unwrap().is_ignored); }); } - + #[gpui::test] async fn test_git_repository_for_path(cx: &mut TestAppContext) { let fs = FakeFs::new(cx.background()); @@ -3068,7 +3151,9 @@ mod tests { "/root", json!({ "dir1": { - ".git": {}, + ".git": { + "HEAD": "abc" + }, "deps": { "dep1": { ".git": {}, @@ -3097,22 +3182,39 @@ mod tests { &mut cx.to_async(), ) .await - .unwrap(); - + .unwrap(); + cx.foreground().run_until_parked(); - + tree.read_with(cx, |tree, cx| { let tree = tree.as_local().unwrap(); - - assert!(tree.git_repository_for_file_path("c.txt".as_ref()).is_none()); - let repo1 = tree.git_repository_for_file_path("dir1/src/b.txt".as_ref()).unwrap().lock(); - assert_eq!(repo1.content_path.as_ref(), Path::new("dir1")); - assert_eq!(repo1.git_dir_path.as_ref(), Path::new("dir1/.git")); + assert!(tree + .git_repository_for_file_path("c.txt".as_ref()) + .is_none()); - let repo2 = tree.git_repository_for_file_path("dir1/deps/dep1/src/a.txt".as_ref()).unwrap().lock(); - assert_eq!(repo2.content_path.as_ref(), Path::new("dir1/deps/dep1")); - assert_eq!(repo2.git_dir_path.as_ref(), Path::new("dir1/deps/dep1/.git")); + let repo = tree + .git_repository_for_file_path("dir1/src/b.txt".as_ref()) + .unwrap(); + + // Need to update the file system for anything involving git + // Goal: Make this test pass + // Up Next: Invalidating git repos! + assert_eq!(repo.content_path.as_ref(), Path::new("dir1")); + assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/.git")); + + let repo = tree + .git_repository_for_file_path("dir1/deps/dep1/src/a.txt".as_ref()) + .unwrap(); + + assert_eq!(repo.content_path.as_ref(), Path::new("dir1/deps/dep1")); + assert_eq!( repo = tree .git_repository_for_git_data("dir/.git/HEAD".as_ref()) + .unwrap(); + assert_eq!(repo.content_path.as_ref(), Path::new("dir1/deps/dep1")); + + assert!(tree.does_git_repository_track_file_path(&repo, "dir1/src/b.txt".as_ref())); + assert!(!tree + .does_git_repository_track_file_path(&repo, "dir1/deps/dep1/src/a.txt".as_ref())); }); } From 6ac9308a034cd480357355c2566c44464aaf9058 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 22 Sep 2022 16:55:24 -0700 Subject: [PATCH 064/140] Added git repository type infrastructure and moved git file system stuff into fs abstraction so we can test without touching the file system. Co-Authored-By: kay@zed.dev --- crates/project/src/fs.rs | 27 ++++++ crates/project/src/git_repository.rs | 132 ++++++++++++++++++++++++++ crates/project/src/project.rs | 1 + crates/project/src/worktree.rs | 135 ++++++++++++++------------- 4 files changed, 230 insertions(+), 65 deletions(-) create mode 100644 crates/project/src/git_repository.rs diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index a983df0f4b..70d1879886 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -12,6 +12,7 @@ use std::{ pin::Pin, time::{Duration, SystemTime}, }; + use text::Rope; #[cfg(any(test, feature = "test-support"))] @@ -21,6 +22,8 @@ use futures::lock::Mutex; #[cfg(any(test, feature = "test-support"))] use std::sync::{Arc, Weak}; +use crate::git_repository::{FakeGitRepository, GitRepository, RealGitRepository}; + #[async_trait::async_trait] pub trait Fs: Send + Sync { async fn create_dir(&self, path: &Path) -> Result<()>; @@ -45,6 +48,11 @@ pub trait Fs: Send + Sync { path: &Path, latency: Duration, ) -> Pin>>>; + fn open_git_repository( + &self, + abs_dotgit_path: &Path, + content_path: &Arc, + ) -> Option>; fn is_fake(&self) -> bool; #[cfg(any(test, feature = "test-support"))] fn as_fake(&self) -> &FakeFs; @@ -270,6 +278,14 @@ impl Fs for RealFs { }))) } + fn open_git_repository( + &self, + abs_dotgit_path: &Path, + content_path: &Arc, + ) -> Option> { + RealGitRepository::open(abs_dotgit_path, content_path) + } + fn is_fake(&self) -> bool { false } @@ -885,6 +901,17 @@ impl Fs for FakeFs { })) } + fn open_git_repository( + &self, + abs_dotgit_path: &Path, + content_path: &Arc, + ) -> Option> { + Some(Box::new(FakeGitRepository::new( + abs_dotgit_path, + content_path, + ))) + } + fn is_fake(&self) -> bool { true } diff --git a/crates/project/src/git_repository.rs b/crates/project/src/git_repository.rs new file mode 100644 index 0000000000..fe7747be9b --- /dev/null +++ b/crates/project/src/git_repository.rs @@ -0,0 +1,132 @@ +use git2::Repository; +use parking_lot::Mutex; +use std::{path::Path, sync::Arc}; +use util::ResultExt; + +pub trait GitRepository: Send + Sync { + fn boxed_clone(&self) -> Box; + fn is_path_managed_by(&self, path: &Path) -> bool; + fn is_path_in_git_folder(&self, path: &Path) -> bool; + fn content_path(&self) -> &Path; + fn git_dir_path(&self) -> &Path; + fn last_scan_id(&self) -> usize; + fn set_scan_id(&mut self, scan_id: usize); +} + +#[derive(Clone)] +pub struct RealGitRepository { + // Path to folder containing the .git file or directory + content_path: Arc, + // Path to the actual .git folder. + // Note: if .git is a file, this points to the folder indicated by the .git file + git_dir_path: Arc, + last_scan_id: usize, + libgit_repository: Arc>, +} + +impl RealGitRepository { + pub fn open( + abs_dotgit_path: &Path, + content_path: &Arc, + ) -> Option> { + Repository::open(&abs_dotgit_path) + .log_err() + .map::, _>(|libgit_repository| { + Box::new(Self { + content_path: content_path.clone(), + git_dir_path: libgit_repository.path().into(), + last_scan_id: 0, + libgit_repository: Arc::new(parking_lot::Mutex::new(libgit_repository)), + }) + }) + } +} + +impl GitRepository for RealGitRepository { + fn boxed_clone(&self) -> Box { + Box::new(self.clone()) + } + + fn is_path_managed_by(&self, path: &Path) -> bool { + path.starts_with(&self.content_path) + } + + fn is_path_in_git_folder(&self, path: &Path) -> bool { + path.starts_with(&self.git_dir_path) + } + + fn content_path(&self) -> &Path { + self.content_path.as_ref() + } + + fn git_dir_path(&self) -> &Path { + self.git_dir_path.as_ref() + } + + fn last_scan_id(&self) -> usize { + self.last_scan_id + } + + fn set_scan_id(&mut self, scan_id: usize) { + self.last_scan_id = scan_id; + } +} + +impl PartialEq for &Box { + fn eq(&self, other: &Self) -> bool { + self.content_path() == other.content_path() + } +} +impl Eq for &Box {} + +#[cfg(any(test, feature = "test-support"))] +#[derive(Clone)] +pub struct FakeGitRepository { + // Path to folder containing the .git file or directory + content_path: Arc, + // Path to the actual .git folder. + // Note: if .git is a file, this points to the folder indicated by the .git file + git_dir_path: Arc, + last_scan_id: usize, +} + +impl FakeGitRepository { + pub fn new(abs_dotgit_path: &Path, content_path: &Arc) -> FakeGitRepository { + Self { + content_path: content_path.clone(), + git_dir_path: abs_dotgit_path.into(), + last_scan_id: 0, + } + } +} + +#[cfg(any(test, feature = "test-support"))] +impl GitRepository for FakeGitRepository { + fn boxed_clone(&self) -> Box { + Box::new(self.clone()) + } + + fn is_path_managed_by(&self, path: &Path) -> bool { + path.starts_with(&self.content_path) + } + + fn is_path_in_git_folder(&self, path: &Path) -> bool { + path.starts_with(&self.git_dir_path) + } + + fn content_path(&self) -> &Path { + self.content_path.as_ref() + } + + fn git_dir_path(&self) -> &Path { + self.git_dir_path.as_ref() + } + + fn last_scan_id(&self) -> usize { + self.last_scan_id + } + + fn set_scan_id(&mut self, scan_id: usize) { + self.last_scan_id = scan_id; + } +} diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 36c7c6cf81..78a500585a 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1,4 +1,5 @@ pub mod fs; +mod git_repository; mod ignore; mod lsp_command; pub mod search; diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 49dbe06117..5ae8bf542c 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -1,4 +1,4 @@ -use crate::{copy_recursive, ProjectEntryId, RemoveOptions}; +use crate::{copy_recursive, git_repository::GitRepository, ProjectEntryId, RemoveOptions}; use super::{ fs::{self, Fs}, @@ -18,7 +18,6 @@ use futures::{ Stream, StreamExt, }; use fuzzy::CharBag; -use git2::Repository; use gpui::{ executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, @@ -104,41 +103,32 @@ pub struct Snapshot { is_complete: bool, } -// - -// 'GitResolver' -// File paths <-> Repository Paths -> git_repository_path() -> First .git in an ancestor in a path -// Repository Paths <-> Repository Pointers -> git_repository_open() -// fs.watch() ^ -// -// Folder: where all the git magic happens -// .git IT -// OR it can be a file that points somewhere else - -// 1. Walk through the file tree, looking for .git files or folders -// 2. When we discover them, open and save a libgit2 pointer to the repository -// 2a. Use git_repository_path() to start a watch on the repository (if not already watched) -// -// File paths -> Git repository == Ancestor check (is there a .git in an ancestor folder) -// Git repository -> Files == Descendent check (subtracting out any intersecting .git folders) - -#[derive(Clone)] pub struct LocalSnapshot { abs_path: Arc, ignores_by_parent_abs_path: HashMap, (Arc, usize)>, - git_repositories: Vec, + git_repositories: Vec>, removed_entry_ids: HashMap, next_entry_id: Arc, snapshot: Snapshot, extension_counts: HashMap, } -#[derive(Clone)] -pub struct GitRepositoryState { - content_path: Arc, - git_dir_path: Arc, - scan_id: usize, - repository: Arc>, +impl Clone for LocalSnapshot { + fn clone(&self) -> Self { + Self { + abs_path: self.abs_path.clone(), + ignores_by_parent_abs_path: self.ignores_by_parent_abs_path.clone(), + git_repositories: self + .git_repositories + .iter() + .map(|repo| repo.boxed_clone()) + .collect(), + removed_entry_ids: self.removed_entry_ids.clone(), + next_entry_id: self.next_entry_id.clone(), + snapshot: self.snapshot.clone(), + extension_counts: self.extension_counts.clone(), + } + } } impl Deref for LocalSnapshot { @@ -173,7 +163,7 @@ struct ShareState { pub enum Event { UpdatedEntries, - UpdatedGitRepositories(Vec), + UpdatedGitRepositories(Vec>), } impl Entity for Worktree { @@ -1322,31 +1312,47 @@ impl LocalSnapshot { &self.extension_counts } - pub(crate) fn git_repository_for_file_path(&self, path: &Path) -> Option { - for repository in self.git_repositories.iter().rev() { - if path.starts_with(&repository.content_path) { - return Some(repository.clone()); - } - } - None + // Gives the most specific git repository for a given path + pub(crate) fn git_repository_for_file_path( + &self, + path: &Path, + ) -> Option> { + self.git_repositories + .iter() + .rev() //git_repository is ordered lexicographically + .find(|repo| repo.is_path_managed_by(path)) + .map(|repo| repo.boxed_clone()) } - pub(crate) fn git_repository_for_git_data(&self, path: &Path) -> Option { - for repository in self.git_repositories.iter() { - if path.starts_with(&repository.git_dir_path) { - return Some(repository.clone()); - } - } - None + // ~/zed: + // - src + // - crates + // - .git -> /usr/.git + pub(crate) fn git_repository_for_git_data( + &self, + path: &Path, + ) -> Option> { + self.git_repositories + .iter() + .find(|repo| repo.is_path_in_git_folder(path)) + .map(|repo| repo.boxed_clone()) } pub(crate) fn does_git_repository_track_file_path( &self, - repo: &GitRepositoryState, + repo: &Box, file_path: &Path, ) -> bool { + // /zed + // - .git + // - a.txt + // - /dep + // - b.txt + // - .git + + // Depends on git_repository_for_file_path returning the most specific git repository for a given path self.git_repository_for_file_path(file_path) - .map_or(false, |r| r.content_path == repo.content_path) + .map_or(false, |r| &r == repo) } #[cfg(test)] @@ -1431,7 +1437,7 @@ impl LocalSnapshot { } fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry { - if !entry.is_dir() && entry.path.file_name() == Some(&GITIGNORE) { + if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) { let abs_path = self.abs_path.join(&entry.path); match smol::block_on(build_gitignore(&abs_path, fs)) { Ok(ignore) => { @@ -1453,18 +1459,10 @@ impl LocalSnapshot { let content_path: Arc = entry.path.parent().unwrap().into(); if let Err(ix) = self .git_repositories - .binary_search_by_key(&&content_path, |repo| &repo.content_path) + .binary_search_by_key(&content_path.as_ref(), |repo| repo.content_path()) { - if let Some(repository) = Repository::open(&abs_path).log_err() { - self.git_repositories.insert( - ix, - GitRepositoryState { - content_path, - git_dir_path: repository.path().into(), - scan_id: self.scan_id, - repository: Arc::new(Mutex::new(repository)), - }, - ); + if let Some(repository) = fs.open_git_repository(&abs_path, &content_path) { + self.git_repositories.insert(ix, repository); } } } @@ -1617,9 +1615,9 @@ impl LocalSnapshot { let parent_path = path.parent().unwrap(); if let Ok(ix) = self .git_repositories - .binary_search_by_key(&parent_path, |repo| repo.content_path.as_ref()) + .binary_search_by_key(&parent_path, |repo| repo.content_path().as_ref()) { - self.git_repositories[ix].scan_id = self.snapshot.scan_id; + self.git_repositories[ix].set_scan_id(self.snapshot.scan_id); } } } @@ -2565,7 +2563,7 @@ impl BackgroundScanner { let mut snapshot = self.snapshot(); let mut git_repositories = mem::take(&mut snapshot.git_repositories); git_repositories.retain(|git_repository| { - let dot_git_path = git_repository.content_path.join(&*DOT_GIT); + let dot_git_path = git_repository.content_path().join(&*DOT_GIT); snapshot.entry_for_path(dot_git_path).is_some() }); snapshot.git_repositories = git_repositories; @@ -2925,6 +2923,7 @@ mod tests { fmt::Write, time::{SystemTime, UNIX_EPOCH}, }; + use util::test::temp_tree; #[gpui::test] @@ -3147,6 +3146,7 @@ mod tests { #[gpui::test] async fn test_git_repository_for_path(cx: &mut TestAppContext) { let fs = FakeFs::new(cx.background()); + fs.insert_tree( "/root", json!({ @@ -3200,17 +3200,22 @@ mod tests { // Need to update the file system for anything involving git // Goal: Make this test pass // Up Next: Invalidating git repos! - assert_eq!(repo.content_path.as_ref(), Path::new("dir1")); - assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/.git")); + assert_eq!(repo.content_path(), Path::new("dir1")); + assert_eq!(repo.git_dir_path(), Path::new("dir1/.git")); let repo = tree .git_repository_for_file_path("dir1/deps/dep1/src/a.txt".as_ref()) .unwrap(); - assert_eq!(repo.content_path.as_ref(), Path::new("dir1/deps/dep1")); - assert_eq!( repo = tree .git_repository_for_git_data("dir/.git/HEAD".as_ref()) + assert_eq!(repo.content_path(), Path::new("dir1/deps/dep1")); + assert_eq!(repo.git_dir_path(), Path::new("dir1/deps/dep1")); + + let repo = tree + .git_repository_for_git_data("dir1/.git/HEAD".as_ref()) .unwrap(); - assert_eq!(repo.content_path.as_ref(), Path::new("dir1/deps/dep1")); + + assert_eq!(repo.content_path(), Path::new("dir1")); + assert_eq!(repo.git_dir_path(), Path::new("dir1/.git")); assert!(tree.does_git_repository_track_file_path(&repo, "dir1/src/b.txt".as_ref())); assert!(!tree From c8e63d76a41596a56cd15d0804d0e4cba631b509 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 26 Sep 2022 07:59:51 -0700 Subject: [PATCH 065/140] Get the test to failing,,, correctly --- Cargo.lock | 2 + crates/project/Cargo.toml | 1 + crates/project/src/git_repository.rs | 10 ++++ crates/project/src/worktree.rs | 68 ++++++++++++++-------------- crates/util/Cargo.toml | 6 ++- crates/util/src/lib.rs | 7 +++ crates/util/src/test.rs | 8 ++++ 7 files changed, 68 insertions(+), 34 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 040db0fd41..8157327cf2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6359,6 +6359,8 @@ version = "0.1.0" dependencies = [ "anyhow", "futures", + "git2", + "lazy_static", "log", "rand 0.8.5", "serde_json", diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index 76eef0efa7..8ca01eac2c 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -54,6 +54,7 @@ toml = "0.5" rocksdb = "0.18" git2 = { version = "0.15", default-features = false } + [dev-dependencies] client = { path = "../client", features = ["test-support"] } collections = { path = "../collections", features = ["test-support"] } diff --git a/crates/project/src/git_repository.rs b/crates/project/src/git_repository.rs index fe7747be9b..47849bf644 100644 --- a/crates/project/src/git_repository.rs +++ b/crates/project/src/git_repository.rs @@ -11,6 +11,7 @@ pub trait GitRepository: Send + Sync { fn git_dir_path(&self) -> &Path; fn last_scan_id(&self) -> usize; fn set_scan_id(&mut self, scan_id: usize); + fn with_repo(&mut self, f: Box); } #[derive(Clone)] @@ -70,6 +71,11 @@ impl GitRepository for RealGitRepository { fn set_scan_id(&mut self, scan_id: usize) { self.last_scan_id = scan_id; } + + fn with_repo(&mut self, f: Box) { + let mut git2 = self.libgit_repository.lock(); + f(&mut git2) + } } impl PartialEq for &Box { @@ -129,4 +135,8 @@ impl GitRepository for FakeGitRepository { fn set_scan_id(&mut self, scan_id: usize) { self.last_scan_id = scan_id; } + + fn with_repo(&mut self, _: Box) { + unimplemented!(); + } } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 5ae8bf542c..6fd00aabbd 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -26,7 +26,6 @@ use language::{ proto::{deserialize_version, serialize_line_ending, serialize_version}, Buffer, DiagnosticEntry, LineEnding, PointUtf16, Rope, }; -use lazy_static::lazy_static; use parking_lot::Mutex; use postage::{ prelude::{Sink as _, Stream as _}, @@ -50,12 +49,7 @@ use std::{ time::{Duration, SystemTime}, }; use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; -use util::{ResultExt, TryFutureExt}; - -lazy_static! { - static ref DOT_GIT: &'static OsStr = OsStr::new(".git"); - static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore"); -} +use util::{ResultExt, TryFutureExt, DOT_GIT, GITIGNORE}; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] pub struct WorktreeId(usize); @@ -1317,6 +1311,13 @@ impl LocalSnapshot { &self, path: &Path, ) -> Option> { + let repos = self + .git_repositories + .iter() + .map(|repo| repo.content_path().to_str().unwrap().to_string()) + .collect::>(); + dbg!(repos); + self.git_repositories .iter() .rev() //git_repository is ordered lexicographically @@ -1437,6 +1438,7 @@ impl LocalSnapshot { } fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry { + dbg!(&entry.path); if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) { let abs_path = self.abs_path.join(&entry.path); match smol::block_on(build_gitignore(&abs_path, fs)) { @@ -1455,6 +1457,8 @@ impl LocalSnapshot { } } } else if entry.path.file_name() == Some(&DOT_GIT) { + dbg!(&entry.path); + let abs_path = self.abs_path.join(&entry.path); let content_path: Arc = entry.path.parent().unwrap().into(); if let Err(ix) = self @@ -2223,6 +2227,7 @@ impl BackgroundScanner { if ignore_stack.is_all() { if let Some(mut root_entry) = snapshot.root_entry().cloned() { root_entry.is_ignored = true; + dbg!("scan dirs entry"); snapshot.insert_entry(root_entry, self.fs.as_ref()); } } @@ -2445,6 +2450,7 @@ impl BackgroundScanner { snapshot.root_char_bag, ); fs_entry.is_ignored = ignore_stack.is_all(); + dbg!("process_events entry"); snapshot.insert_entry(fs_entry, self.fs.as_ref()); let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path); @@ -3145,50 +3151,46 @@ mod tests { #[gpui::test] async fn test_git_repository_for_path(cx: &mut TestAppContext) { - let fs = FakeFs::new(cx.background()); - - fs.insert_tree( - "/root", - json!({ - "dir1": { - ".git": { - "HEAD": "abc" - }, - "deps": { - "dep1": { - ".git": {}, - "src": { - "a.txt": "" - } + let root = temp_tree(json!({ + "dir1": { + ".git": {}, + "deps": { + "dep1": { + ".git": {}, + "src": { + "a.txt": "" } - }, - "src": { - "b.txt": "" } }, - "c.txt": "" - }), - ) - .await; + "src": { + "b.txt": "" + } + }, + "c.txt": "" + })); let http_client = FakeHttpClient::with_404_response(); let client = Client::new(http_client); let tree = Worktree::local( client, - Arc::from(Path::new("/root")), + root.path(), true, - fs.clone(), + Arc::new(RealFs), Default::default(), &mut cx.to_async(), ) .await .unwrap(); - cx.foreground().run_until_parked(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + tree.flush_fs_events(cx).await; - tree.read_with(cx, |tree, cx| { + tree.read_with(cx, |tree, _cx| { let tree = tree.as_local().unwrap(); + dbg!(tree); + assert!(tree .git_repository_for_file_path("c.txt".as_ref()) .is_none()); diff --git a/crates/util/Cargo.toml b/crates/util/Cargo.toml index 4ec214fef1..78416aa5b5 100644 --- a/crates/util/Cargo.toml +++ b/crates/util/Cargo.toml @@ -7,17 +7,21 @@ edition = "2021" doctest = false [features] -test-support = ["rand", "serde_json", "tempdir"] +test-support = ["rand", "serde_json", "tempdir", "git2"] [dependencies] anyhow = "1.0.38" futures = "0.3" log = { version = "0.4.16", features = ["kv_unstable_serde"] } +lazy_static = "1.4.0" rand = { version = "0.8", optional = true } tempdir = { version = "0.3.7", optional = true } serde_json = { version = "1.0", features = ["preserve_order"], optional = true } +git2 = { version = "0.15", default-features = false, optional = true } + [dev-dependencies] rand = { version = "0.8" } tempdir = { version = "0.3.7" } serde_json = { version = "1.0", features = ["preserve_order"] } +git2 = { version = "0.15", default-features = false } diff --git a/crates/util/src/lib.rs b/crates/util/src/lib.rs index 97f409f410..52bf70e3a7 100644 --- a/crates/util/src/lib.rs +++ b/crates/util/src/lib.rs @@ -2,13 +2,20 @@ pub mod test; use futures::Future; +use lazy_static::lazy_static; use std::{ cmp::Ordering, + ffi::OsStr, ops::AddAssign, pin::Pin, task::{Context, Poll}, }; +lazy_static! { + pub static ref DOT_GIT: &'static OsStr = OsStr::new(".git"); + pub static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore"); +} + pub fn truncate(s: &str, max_chars: usize) -> &str { match s.char_indices().nth(max_chars) { None => s, diff --git a/crates/util/src/test.rs b/crates/util/src/test.rs index 7b2e00d57b..4e4716434e 100644 --- a/crates/util/src/test.rs +++ b/crates/util/src/test.rs @@ -1,12 +1,15 @@ mod assertions; mod marked_text; +use git2; use std::path::{Path, PathBuf}; use tempdir::TempDir; pub use assertions::*; pub use marked_text::*; +use crate::DOT_GIT; + pub fn temp_tree(tree: serde_json::Value) -> TempDir { let dir = TempDir::new("").unwrap(); write_tree(dir.path(), tree); @@ -24,6 +27,11 @@ fn write_tree(path: &Path, tree: serde_json::Value) { match contents { Value::Object(_) => { fs::create_dir(&path).unwrap(); + + if path.file_name() == Some(&DOT_GIT) { + git2::Repository::init(&path.parent().unwrap()).unwrap(); + } + write_tree(&path, contents); } Value::Null => { From 4251e0f5f13978e6d6a779f30797948e2ac37382 Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 26 Sep 2022 16:57:31 -0400 Subject: [PATCH 066/140] Find repos under worktree & return correct results for repo queries Co-Authored-By: Mikayla Maki --- crates/project/src/fs.rs | 27 ++---- crates/project/src/git_repository.rs | 121 +++++---------------------- crates/project/src/worktree.rs | 98 +++++++++------------- 3 files changed, 66 insertions(+), 180 deletions(-) diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index 70d1879886..e675ddf8e5 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -22,7 +22,7 @@ use futures::lock::Mutex; #[cfg(any(test, feature = "test-support"))] use std::sync::{Arc, Weak}; -use crate::git_repository::{FakeGitRepository, GitRepository, RealGitRepository}; +use crate::git_repository::GitRepository; #[async_trait::async_trait] pub trait Fs: Send + Sync { @@ -48,11 +48,7 @@ pub trait Fs: Send + Sync { path: &Path, latency: Duration, ) -> Pin>>>; - fn open_git_repository( - &self, - abs_dotgit_path: &Path, - content_path: &Arc, - ) -> Option>; + fn open_git_repository(&self, abs_dotgit_path: &Path) -> Option; fn is_fake(&self) -> bool; #[cfg(any(test, feature = "test-support"))] fn as_fake(&self) -> &FakeFs; @@ -278,12 +274,8 @@ impl Fs for RealFs { }))) } - fn open_git_repository( - &self, - abs_dotgit_path: &Path, - content_path: &Arc, - ) -> Option> { - RealGitRepository::open(abs_dotgit_path, content_path) + fn open_git_repository(&self, abs_dotgit_path: &Path) -> Option { + GitRepository::open(abs_dotgit_path) } fn is_fake(&self) -> bool { @@ -901,15 +893,8 @@ impl Fs for FakeFs { })) } - fn open_git_repository( - &self, - abs_dotgit_path: &Path, - content_path: &Arc, - ) -> Option> { - Some(Box::new(FakeGitRepository::new( - abs_dotgit_path, - content_path, - ))) + fn open_git_repository(&self, _: &Path) -> Option { + None } fn is_fake(&self) -> bool { diff --git a/crates/project/src/git_repository.rs b/crates/project/src/git_repository.rs index 47849bf644..d1df841fe7 100644 --- a/crates/project/src/git_repository.rs +++ b/crates/project/src/git_repository.rs @@ -3,19 +3,8 @@ use parking_lot::Mutex; use std::{path::Path, sync::Arc}; use util::ResultExt; -pub trait GitRepository: Send + Sync { - fn boxed_clone(&self) -> Box; - fn is_path_managed_by(&self, path: &Path) -> bool; - fn is_path_in_git_folder(&self, path: &Path) -> bool; - fn content_path(&self) -> &Path; - fn git_dir_path(&self) -> &Path; - fn last_scan_id(&self) -> usize; - fn set_scan_id(&mut self, scan_id: usize); - fn with_repo(&mut self, f: Box); -} - #[derive(Clone)] -pub struct RealGitRepository { +pub struct GitRepository { // Path to folder containing the .git file or directory content_path: Arc, // Path to the actual .git folder. @@ -25,118 +14,50 @@ pub struct RealGitRepository { libgit_repository: Arc>, } -impl RealGitRepository { - pub fn open( - abs_dotgit_path: &Path, - content_path: &Arc, - ) -> Option> { - Repository::open(&abs_dotgit_path) +impl GitRepository { + pub fn open(dotgit_path: &Path) -> Option { + Repository::open(&dotgit_path) .log_err() - .map::, _>(|libgit_repository| { - Box::new(Self { - content_path: content_path.clone(), - git_dir_path: libgit_repository.path().into(), + .and_then(|libgit_repository| { + Some(Self { + content_path: libgit_repository.workdir()?.into(), + git_dir_path: dotgit_path.canonicalize().log_err()?.into(), last_scan_id: 0, libgit_repository: Arc::new(parking_lot::Mutex::new(libgit_repository)), }) }) } -} -impl GitRepository for RealGitRepository { - fn boxed_clone(&self) -> Box { - Box::new(self.clone()) + pub fn is_path_managed_by(&self, path: &Path) -> bool { + path.canonicalize() + .map(|path| path.starts_with(&self.content_path)) + .unwrap_or(false) } - fn is_path_managed_by(&self, path: &Path) -> bool { - path.starts_with(&self.content_path) + pub fn is_path_in_git_folder(&self, path: &Path) -> bool { + path.canonicalize() + .map(|path| path.starts_with(&self.git_dir_path)) + .unwrap_or(false) } - fn is_path_in_git_folder(&self, path: &Path) -> bool { - path.starts_with(&self.git_dir_path) - } - - fn content_path(&self) -> &Path { + pub fn content_path(&self) -> &Path { self.content_path.as_ref() } - fn git_dir_path(&self) -> &Path { + pub fn git_dir_path(&self) -> &Path { self.git_dir_path.as_ref() } - fn last_scan_id(&self) -> usize { + pub fn last_scan_id(&self) -> usize { self.last_scan_id } - fn set_scan_id(&mut self, scan_id: usize) { + pub fn set_scan_id(&mut self, scan_id: usize) { self.last_scan_id = scan_id; } - fn with_repo(&mut self, f: Box) { + pub fn with_repo(&mut self, f: Box) { let mut git2 = self.libgit_repository.lock(); f(&mut git2) } } - -impl PartialEq for &Box { - fn eq(&self, other: &Self) -> bool { - self.content_path() == other.content_path() - } -} -impl Eq for &Box {} - -#[cfg(any(test, feature = "test-support"))] -#[derive(Clone)] -pub struct FakeGitRepository { - // Path to folder containing the .git file or directory - content_path: Arc, - // Path to the actual .git folder. - // Note: if .git is a file, this points to the folder indicated by the .git file - git_dir_path: Arc, - last_scan_id: usize, -} - -impl FakeGitRepository { - pub fn new(abs_dotgit_path: &Path, content_path: &Arc) -> FakeGitRepository { - Self { - content_path: content_path.clone(), - git_dir_path: abs_dotgit_path.into(), - last_scan_id: 0, - } - } -} - -#[cfg(any(test, feature = "test-support"))] -impl GitRepository for FakeGitRepository { - fn boxed_clone(&self) -> Box { - Box::new(self.clone()) - } - - fn is_path_managed_by(&self, path: &Path) -> bool { - path.starts_with(&self.content_path) - } - - fn is_path_in_git_folder(&self, path: &Path) -> bool { - path.starts_with(&self.git_dir_path) - } - - fn content_path(&self) -> &Path { - self.content_path.as_ref() - } - - fn git_dir_path(&self) -> &Path { - self.git_dir_path.as_ref() - } - - fn last_scan_id(&self) -> usize { - self.last_scan_id - } - - fn set_scan_id(&mut self, scan_id: usize) { - self.last_scan_id = scan_id; - } - - fn with_repo(&mut self, _: Box) { - unimplemented!(); - } -} diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 6fd00aabbd..ee54fdb394 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -100,7 +100,7 @@ pub struct Snapshot { pub struct LocalSnapshot { abs_path: Arc, ignores_by_parent_abs_path: HashMap, (Arc, usize)>, - git_repositories: Vec>, + git_repositories: Vec, removed_entry_ids: HashMap, next_entry_id: Arc, snapshot: Snapshot, @@ -115,7 +115,7 @@ impl Clone for LocalSnapshot { git_repositories: self .git_repositories .iter() - .map(|repo| repo.boxed_clone()) + .map(|repo| repo.clone()) .collect(), removed_entry_ids: self.removed_entry_ids.clone(), next_entry_id: self.next_entry_id.clone(), @@ -157,7 +157,7 @@ struct ShareState { pub enum Event { UpdatedEntries, - UpdatedGitRepositories(Vec>), + UpdatedGitRepositories(Vec), } impl Entity for Worktree { @@ -1307,53 +1307,35 @@ impl LocalSnapshot { } // Gives the most specific git repository for a given path - pub(crate) fn git_repository_for_file_path( - &self, - path: &Path, - ) -> Option> { - let repos = self - .git_repositories - .iter() - .map(|repo| repo.content_path().to_str().unwrap().to_string()) - .collect::>(); - dbg!(repos); - + pub(crate) fn git_repository_for_file_path(&self, path: &Path) -> Option { self.git_repositories .iter() .rev() //git_repository is ordered lexicographically - .find(|repo| repo.is_path_managed_by(path)) - .map(|repo| repo.boxed_clone()) + .find(|repo| { + repo.is_path_managed_by(&self.abs_path.join(path)) + }) + .map(|repo| repo.clone()) } // ~/zed: // - src // - crates // - .git -> /usr/.git - pub(crate) fn git_repository_for_git_data( - &self, - path: &Path, - ) -> Option> { + pub(crate) fn git_repository_for_git_data(&self, path: &Path) -> Option { self.git_repositories .iter() - .find(|repo| repo.is_path_in_git_folder(path)) - .map(|repo| repo.boxed_clone()) + .find(|repo| repo.is_path_in_git_folder(&self.abs_path.join(path))) + .map(|repo| repo.clone()) } pub(crate) fn does_git_repository_track_file_path( &self, - repo: &Box, + repo: &GitRepository, file_path: &Path, ) -> bool { - // /zed - // - .git - // - a.txt - // - /dep - // - b.txt - // - .git - // Depends on git_repository_for_file_path returning the most specific git repository for a given path - self.git_repository_for_file_path(file_path) - .map_or(false, |r| &r == repo) + self.git_repository_for_file_path(&self.abs_path.join(file_path)) + .map_or(false, |r| r.git_dir_path() == repo.git_dir_path()) } #[cfg(test)] @@ -1438,7 +1420,6 @@ impl LocalSnapshot { } fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry { - dbg!(&entry.path); if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) { let abs_path = self.abs_path.join(&entry.path); match smol::block_on(build_gitignore(&abs_path, fs)) { @@ -1456,19 +1437,6 @@ impl LocalSnapshot { ); } } - } else if entry.path.file_name() == Some(&DOT_GIT) { - dbg!(&entry.path); - - let abs_path = self.abs_path.join(&entry.path); - let content_path: Arc = entry.path.parent().unwrap().into(); - if let Err(ix) = self - .git_repositories - .binary_search_by_key(&content_path.as_ref(), |repo| repo.content_path()) - { - if let Some(repository) = fs.open_git_repository(&abs_path, &content_path) { - self.git_repositories.insert(ix, repository); - } - } } self.reuse_entry_id(&mut entry); @@ -1506,6 +1474,7 @@ impl LocalSnapshot { parent_path: Arc, entries: impl IntoIterator, ignore: Option>, + fs: &dyn Fs, ) { let mut parent_entry = if let Some(parent_entry) = self.entries_by_path.get(&PathKey(parent_path.clone()), &()) @@ -1531,6 +1500,18 @@ impl LocalSnapshot { unreachable!(); } + if parent_path.file_name() == Some(&DOT_GIT) { + let abs_path = self.abs_path.join(&parent_path); + if let Err(ix) = self + .git_repositories + .binary_search_by_key(&abs_path.as_path(), |repo| repo.git_dir_path()) + { + if let Some(repository) = fs.open_git_repository(&abs_path) { + self.git_repositories.insert(ix, repository); + } + } + } + let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)]; let mut entries_by_id_edits = Vec::new(); @@ -2227,7 +2208,6 @@ impl BackgroundScanner { if ignore_stack.is_all() { if let Some(mut root_entry) = snapshot.root_entry().cloned() { root_entry.is_ignored = true; - dbg!("scan dirs entry"); snapshot.insert_entry(root_entry, self.fs.as_ref()); } } @@ -2375,9 +2355,12 @@ impl BackgroundScanner { new_entries.push(child_entry); } - self.snapshot - .lock() - .populate_dir(job.path.clone(), new_entries, new_ignore); + self.snapshot.lock().populate_dir( + job.path.clone(), + new_entries, + new_ignore, + self.fs.as_ref(), + ); for new_job in new_jobs { job.scan_queue.send(new_job).await.unwrap(); } @@ -2450,7 +2433,6 @@ impl BackgroundScanner { snapshot.root_char_bag, ); fs_entry.is_ignored = ignore_stack.is_all(); - dbg!("process_events entry"); snapshot.insert_entry(fs_entry, self.fs.as_ref()); let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path); @@ -3189,8 +3171,6 @@ mod tests { tree.read_with(cx, |tree, _cx| { let tree = tree.as_local().unwrap(); - dbg!(tree); - assert!(tree .git_repository_for_file_path("c.txt".as_ref()) .is_none()); @@ -3202,22 +3182,22 @@ mod tests { // Need to update the file system for anything involving git // Goal: Make this test pass // Up Next: Invalidating git repos! - assert_eq!(repo.content_path(), Path::new("dir1")); - assert_eq!(repo.git_dir_path(), Path::new("dir1/.git")); + assert_eq!(repo.content_path(), root.path().join("dir1").canonicalize().unwrap()); + assert_eq!(repo.git_dir_path(), root.path().join("dir1/.git").canonicalize().unwrap()); let repo = tree .git_repository_for_file_path("dir1/deps/dep1/src/a.txt".as_ref()) .unwrap(); - assert_eq!(repo.content_path(), Path::new("dir1/deps/dep1")); - assert_eq!(repo.git_dir_path(), Path::new("dir1/deps/dep1")); + assert_eq!(repo.content_path(), root.path().join("dir1/deps/dep1").canonicalize().unwrap()); + assert_eq!(repo.git_dir_path(), root.path().join("dir1/deps/dep1/.git").canonicalize().unwrap()); let repo = tree .git_repository_for_git_data("dir1/.git/HEAD".as_ref()) .unwrap(); - assert_eq!(repo.content_path(), Path::new("dir1")); - assert_eq!(repo.git_dir_path(), Path::new("dir1/.git")); + assert_eq!(repo.content_path(), root.path().join("dir1").canonicalize().unwrap()); + assert_eq!(repo.git_dir_path(), root.path().join("dir1/.git").canonicalize().unwrap()); assert!(tree.does_git_repository_track_file_path(&repo, "dir1/src/b.txt".as_ref())); assert!(!tree From d2b18790a0d7b9d597d3015380ea82e66229086c Mon Sep 17 00:00:00 2001 From: Julia Date: Tue, 27 Sep 2022 14:07:53 -0400 Subject: [PATCH 067/140] Remove git repos from worktree when deleted on storage Co-Authored-By: Mikayla Maki --- crates/project/src/git_repository.rs | 2 +- crates/project/src/worktree.rs | 68 ++++++++++++++++++++-------- 2 files changed, 50 insertions(+), 20 deletions(-) diff --git a/crates/project/src/git_repository.rs b/crates/project/src/git_repository.rs index d1df841fe7..73f7130e56 100644 --- a/crates/project/src/git_repository.rs +++ b/crates/project/src/git_repository.rs @@ -56,7 +56,7 @@ impl GitRepository { self.last_scan_id = scan_id; } - pub fn with_repo(&mut self, f: Box) { + pub fn with_repo(&mut self, f: F) { let mut git2 = self.libgit_repository.lock(); f(&mut git2) } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index ee54fdb394..a9ebfd8612 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -1311,9 +1311,7 @@ impl LocalSnapshot { self.git_repositories .iter() .rev() //git_repository is ordered lexicographically - .find(|repo| { - repo.is_path_managed_by(&self.abs_path.join(path)) - }) + .find(|repo| repo.is_path_managed_by(&self.abs_path.join(path))) .map(|repo| repo.clone()) } @@ -2548,13 +2546,16 @@ impl BackgroundScanner { } async fn update_git_repositories(&self) { - let mut snapshot = self.snapshot(); - let mut git_repositories = mem::take(&mut snapshot.git_repositories); - git_repositories.retain(|git_repository| { - let dot_git_path = git_repository.content_path().join(&*DOT_GIT); - snapshot.entry_for_path(dot_git_path).is_some() - }); - snapshot.git_repositories = git_repositories; + let mut snapshot = self.snapshot.lock(); + + let new_repos = snapshot + .git_repositories + .iter() + .cloned() + .filter(|repo| git2::Repository::open(repo.git_dir_path()).is_ok()) + .collect(); + + snapshot.git_repositories = new_repos; } async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) { @@ -3179,30 +3180,59 @@ mod tests { .git_repository_for_file_path("dir1/src/b.txt".as_ref()) .unwrap(); - // Need to update the file system for anything involving git - // Goal: Make this test pass - // Up Next: Invalidating git repos! - assert_eq!(repo.content_path(), root.path().join("dir1").canonicalize().unwrap()); - assert_eq!(repo.git_dir_path(), root.path().join("dir1/.git").canonicalize().unwrap()); + assert_eq!( + repo.content_path(), + root.path().join("dir1").canonicalize().unwrap() + ); + assert_eq!( + repo.git_dir_path(), + root.path().join("dir1/.git").canonicalize().unwrap() + ); let repo = tree .git_repository_for_file_path("dir1/deps/dep1/src/a.txt".as_ref()) .unwrap(); - assert_eq!(repo.content_path(), root.path().join("dir1/deps/dep1").canonicalize().unwrap()); - assert_eq!(repo.git_dir_path(), root.path().join("dir1/deps/dep1/.git").canonicalize().unwrap()); + assert_eq!( + repo.content_path(), + root.path().join("dir1/deps/dep1").canonicalize().unwrap() + ); + assert_eq!( + repo.git_dir_path(), + root.path() + .join("dir1/deps/dep1/.git") + .canonicalize() + .unwrap() + ); let repo = tree .git_repository_for_git_data("dir1/.git/HEAD".as_ref()) .unwrap(); - assert_eq!(repo.content_path(), root.path().join("dir1").canonicalize().unwrap()); - assert_eq!(repo.git_dir_path(), root.path().join("dir1/.git").canonicalize().unwrap()); + assert_eq!( + repo.content_path(), + root.path().join("dir1").canonicalize().unwrap() + ); + assert_eq!( + repo.git_dir_path(), + root.path().join("dir1/.git").canonicalize().unwrap() + ); assert!(tree.does_git_repository_track_file_path(&repo, "dir1/src/b.txt".as_ref())); assert!(!tree .does_git_repository_track_file_path(&repo, "dir1/deps/dep1/src/a.txt".as_ref())); }); + + std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap(); + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _cx| { + let tree = tree.as_local().unwrap(); + + assert!(tree + .git_repository_for_file_path("dir1/src/b.txt".as_ref()) + .is_none()); + }); } #[gpui::test] From 759b7f1e07257b431c048d381e26b858b92933ce Mon Sep 17 00:00:00 2001 From: Julia Date: Tue, 27 Sep 2022 14:37:33 -0400 Subject: [PATCH 068/140] Update repo scan id when files under dot git dir events Co-Authored-By: Mikayla Maki --- crates/project/src/git_repository.rs | 16 +++--- crates/project/src/worktree.rs | 74 ++++++++++++---------------- 2 files changed, 39 insertions(+), 51 deletions(-) diff --git a/crates/project/src/git_repository.rs b/crates/project/src/git_repository.rs index 73f7130e56..eab031da17 100644 --- a/crates/project/src/git_repository.rs +++ b/crates/project/src/git_repository.rs @@ -10,7 +10,7 @@ pub struct GitRepository { // Path to the actual .git folder. // Note: if .git is a file, this points to the folder indicated by the .git file git_dir_path: Arc, - last_scan_id: usize, + scan_id: usize, libgit_repository: Arc>, } @@ -22,19 +22,19 @@ impl GitRepository { Some(Self { content_path: libgit_repository.workdir()?.into(), git_dir_path: dotgit_path.canonicalize().log_err()?.into(), - last_scan_id: 0, + scan_id: 0, libgit_repository: Arc::new(parking_lot::Mutex::new(libgit_repository)), }) }) } - pub fn is_path_managed_by(&self, path: &Path) -> bool { + pub fn manages(&self, path: &Path) -> bool { path.canonicalize() .map(|path| path.starts_with(&self.content_path)) .unwrap_or(false) } - pub fn is_path_in_git_folder(&self, path: &Path) -> bool { + pub fn in_dot_git(&self, path: &Path) -> bool { path.canonicalize() .map(|path| path.starts_with(&self.git_dir_path)) .unwrap_or(false) @@ -48,12 +48,12 @@ impl GitRepository { self.git_dir_path.as_ref() } - pub fn last_scan_id(&self) -> usize { - self.last_scan_id + pub fn scan_id(&self) -> usize { + self.scan_id } - pub fn set_scan_id(&mut self, scan_id: usize) { - self.last_scan_id = scan_id; + pub(super) fn set_scan_id(&mut self, scan_id: usize) { + self.scan_id = scan_id; } pub fn with_repo(&mut self, f: F) { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index a9ebfd8612..aead63102b 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -40,7 +40,6 @@ use std::{ ffi::{OsStr, OsString}, fmt, future::Future, - mem, ops::{Deref, DerefMut}, os::unix::prelude::{OsStrExt, OsStringExt}, path::{Path, PathBuf}, @@ -1307,32 +1306,24 @@ impl LocalSnapshot { } // Gives the most specific git repository for a given path - pub(crate) fn git_repository_for_file_path(&self, path: &Path) -> Option { + pub(crate) fn repo_for(&self, path: &Path) -> Option { self.git_repositories .iter() .rev() //git_repository is ordered lexicographically - .find(|repo| repo.is_path_managed_by(&self.abs_path.join(path))) + .find(|repo| repo.manages(&self.abs_path.join(path))) .map(|repo| repo.clone()) } - // ~/zed: - // - src - // - crates - // - .git -> /usr/.git - pub(crate) fn git_repository_for_git_data(&self, path: &Path) -> Option { + pub(crate) fn in_dot_git(&mut self, path: &Path) -> Option<&mut GitRepository> { self.git_repositories - .iter() - .find(|repo| repo.is_path_in_git_folder(&self.abs_path.join(path))) - .map(|repo| repo.clone()) + .iter_mut() + .rev() //git_repository is ordered lexicographically + .find(|repo| repo.in_dot_git(&self.abs_path.join(path))) } - pub(crate) fn does_git_repository_track_file_path( - &self, - repo: &GitRepository, - file_path: &Path, - ) -> bool { + pub(crate) fn tracks_filepath(&self, repo: &GitRepository, file_path: &Path) -> bool { // Depends on git_repository_for_file_path returning the most specific git repository for a given path - self.git_repository_for_file_path(&self.abs_path.join(file_path)) + self.repo_for(&self.abs_path.join(file_path)) .map_or(false, |r| r.git_dir_path() == repo.git_dir_path()) } @@ -2433,6 +2424,11 @@ impl BackgroundScanner { fs_entry.is_ignored = ignore_stack.is_all(); snapshot.insert_entry(fs_entry, self.fs.as_ref()); + let scan_id = snapshot.scan_id; + if let Some(repo) = snapshot.in_dot_git(&abs_path) { + repo.set_scan_id(scan_id); + } + let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path); if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) { ancestor_inodes.insert(metadata.inode); @@ -3172,13 +3168,9 @@ mod tests { tree.read_with(cx, |tree, _cx| { let tree = tree.as_local().unwrap(); - assert!(tree - .git_repository_for_file_path("c.txt".as_ref()) - .is_none()); + assert!(tree.repo_for("c.txt".as_ref()).is_none()); - let repo = tree - .git_repository_for_file_path("dir1/src/b.txt".as_ref()) - .unwrap(); + let repo = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap(); assert_eq!( repo.content_path(), @@ -3189,9 +3181,7 @@ mod tests { root.path().join("dir1/.git").canonicalize().unwrap() ); - let repo = tree - .git_repository_for_file_path("dir1/deps/dep1/src/a.txt".as_ref()) - .unwrap(); + let repo = tree.repo_for("dir1/deps/dep1/src/a.txt".as_ref()).unwrap(); assert_eq!( repo.content_path(), @@ -3204,23 +3194,23 @@ mod tests { .canonicalize() .unwrap() ); + }); - let repo = tree - .git_repository_for_git_data("dir1/.git/HEAD".as_ref()) - .unwrap(); + let original_scan_id = tree.read_with(cx, |tree, _cx| { + let tree = tree.as_local().unwrap(); + tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id() + }); - assert_eq!( - repo.content_path(), - root.path().join("dir1").canonicalize().unwrap() + std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap(); + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _cx| { + let tree = tree.as_local().unwrap(); + let new_scan_id = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id(); + assert_ne!( + original_scan_id, new_scan_id, + "original {original_scan_id}, new {new_scan_id}" ); - assert_eq!( - repo.git_dir_path(), - root.path().join("dir1/.git").canonicalize().unwrap() - ); - - assert!(tree.does_git_repository_track_file_path(&repo, "dir1/src/b.txt".as_ref())); - assert!(!tree - .does_git_repository_track_file_path(&repo, "dir1/deps/dep1/src/a.txt".as_ref())); }); std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap(); @@ -3229,9 +3219,7 @@ mod tests { tree.read_with(cx, |tree, _cx| { let tree = tree.as_local().unwrap(); - assert!(tree - .git_repository_for_file_path("dir1/src/b.txt".as_ref()) - .is_none()); + assert!(tree.repo_for("dir1/src/b.txt".as_ref()).is_none()); }); } From 7e5d49487be16511f1246048b221997e9956d8f2 Mon Sep 17 00:00:00 2001 From: Julia Date: Tue, 27 Sep 2022 20:06:18 -0400 Subject: [PATCH 069/140] WIP Notifying buffers of head text change Co-Authored-By: Mikayla Maki --- crates/editor/src/items.rs | 4 +- crates/editor/src/multi_buffer.rs | 6 +-- crates/language/src/buffer.rs | 14 +++++-- crates/project/src/fs.rs | 46 ---------------------- crates/project/src/git_repository.rs | 44 +++++++++++++++++++-- crates/project/src/project.rs | 29 +++++++++++++- crates/project/src/worktree.rs | 57 +++++++++++++++++++++++++--- crates/workspace/src/workspace.rs | 12 +++--- 8 files changed, 142 insertions(+), 70 deletions(-) diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 76e1480180..c1082020e5 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -478,13 +478,13 @@ impl Item for Editor { }) } - fn update_git( + fn git_diff_recalc( &mut self, _project: ModelHandle, cx: &mut ViewContext, ) -> Task> { self.buffer().update(cx, |multibuffer, cx| { - multibuffer.update_git(cx); + multibuffer.git_diff_recalc(cx); }); Task::ready(Ok(())) } diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 2f93bc5b09..76093e0496 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -312,13 +312,13 @@ impl MultiBuffer { self.read(cx).symbols_containing(offset, theme) } - pub fn update_git(&mut self, cx: &mut ModelContext) { + pub fn git_diff_recalc(&mut self, cx: &mut ModelContext) { let buffers = self.buffers.borrow(); for buffer_state in buffers.values() { - if buffer_state.buffer.read(cx).needs_git_update() { + if buffer_state.buffer.read(cx).needs_git_diff_recalc() { buffer_state .buffer - .update(cx, |buffer, cx| buffer.update_git(cx)) + .update(cx, |buffer, cx| buffer.git_diff_recalc(cx)) } } } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 6ecfbc7e62..d1dfb9ec22 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -613,6 +613,7 @@ impl Buffer { cx, ); } + self.update_git(cx); cx.emit(Event::Reloaded); cx.notify(); } @@ -661,12 +662,19 @@ impl Buffer { self.file = Some(new_file); task } + + pub fn update_git(&mut self, cx: &mut ModelContext) { + //Grab head text + - pub fn needs_git_update(&self) -> bool { + self.git_diff_recalc(cx); + } + + pub fn needs_git_diff_recalc(&self) -> bool { self.git_diff_status.diff.needs_update(self) } - pub fn update_git(&mut self, cx: &mut ModelContext) { + pub fn git_diff_recalc(&mut self, cx: &mut ModelContext) { if self.git_diff_status.update_in_progress { self.git_diff_status.update_requested = true; return; @@ -692,7 +700,7 @@ impl Buffer { this.git_diff_status.update_in_progress = false; if this.git_diff_status.update_requested { - this.update_git(cx); + this.git_diff_recalc(cx); } }) } diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index e675ddf8e5..8542030cb7 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -34,7 +34,6 @@ pub trait Fs: Send + Sync { async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()>; async fn open_sync(&self, path: &Path) -> Result>; async fn load(&self, path: &Path) -> Result; - async fn load_head_text(&self, path: &Path) -> Option; async fn save(&self, path: &Path, text: &Rope, line_ending: LineEnding) -> Result<()>; async fn canonicalize(&self, path: &Path) -> Result; async fn is_file(&self, path: &Path) -> bool; @@ -48,7 +47,6 @@ pub trait Fs: Send + Sync { path: &Path, latency: Duration, ) -> Pin>>>; - fn open_git_repository(&self, abs_dotgit_path: &Path) -> Option; fn is_fake(&self) -> bool; #[cfg(any(test, feature = "test-support"))] fn as_fake(&self) -> &FakeFs; @@ -168,38 +166,6 @@ impl Fs for RealFs { Ok(text) } - async fn load_head_text(&self, path: &Path) -> Option { - fn logic(path: &Path) -> Result> { - let repo = Repository::open_ext(path, RepositoryOpenFlags::empty(), &[OsStr::new("")])?; - assert!(repo.path().ends_with(".git")); - let repo_root_path = match repo.path().parent() { - Some(root) => root, - None => return Ok(None), - }; - - let relative_path = path.strip_prefix(repo_root_path)?; - let object = repo - .head()? - .peel_to_tree()? - .get_path(relative_path)? - .to_object(&repo)?; - - let content = match object.as_blob() { - Some(blob) => blob.content().to_owned(), - None => return Ok(None), - }; - - let head_text = String::from_utf8(content.to_owned())?; - Ok(Some(head_text)) - } - - match logic(path) { - Ok(value) => return value, - Err(err) => log::error!("Error loading head text: {:?}", err), - } - None - } - async fn save(&self, path: &Path, text: &Rope, line_ending: LineEnding) -> Result<()> { let buffer_size = text.summary().len.min(10 * 1024); let file = smol::fs::File::create(path).await?; @@ -274,10 +240,6 @@ impl Fs for RealFs { }))) } - fn open_git_repository(&self, abs_dotgit_path: &Path) -> Option { - GitRepository::open(abs_dotgit_path) - } - fn is_fake(&self) -> bool { false } @@ -791,10 +753,6 @@ impl Fs for FakeFs { entry.file_content(&path).cloned() } - async fn load_head_text(&self, _: &Path) -> Option { - None - } - async fn save(&self, path: &Path, text: &Rope, line_ending: LineEnding) -> Result<()> { self.simulate_random_delay().await; let path = normalize_path(path); @@ -893,10 +851,6 @@ impl Fs for FakeFs { })) } - fn open_git_repository(&self, _: &Path) -> Option { - None - } - fn is_fake(&self) -> bool { true } diff --git a/crates/project/src/git_repository.rs b/crates/project/src/git_repository.rs index eab031da17..c27b1ba385 100644 --- a/crates/project/src/git_repository.rs +++ b/crates/project/src/git_repository.rs @@ -1,6 +1,7 @@ -use git2::Repository; +use anyhow::Result; +use git2::{Repository as LibGitRepository, RepositoryOpenFlags as LibGitRepositoryOpenFlags}; use parking_lot::Mutex; -use std::{path::Path, sync::Arc}; +use std::{path::Path, sync::Arc, ffi::OsStr}; use util::ResultExt; #[derive(Clone)] @@ -11,12 +12,12 @@ pub struct GitRepository { // Note: if .git is a file, this points to the folder indicated by the .git file git_dir_path: Arc, scan_id: usize, - libgit_repository: Arc>, + libgit_repository: Arc>, } impl GitRepository { pub fn open(dotgit_path: &Path) -> Option { - Repository::open(&dotgit_path) + LibGitRepository::open(&dotgit_path) .log_err() .and_then(|libgit_repository| { Some(Self { @@ -60,4 +61,39 @@ impl GitRepository { let mut git2 = self.libgit_repository.lock(); f(&mut git2) } + + pub async fn load_head_text(&self, file_path: &Path) -> Option { + fn logic(repo: &LibGitRepository, file_path: &Path) -> Result> { + let object = repo + .head()? + .peel_to_tree()? + .get_path(file_path)? + .to_object(&repo)?; + + let content = match object.as_blob() { + Some(blob) => blob.content().to_owned(), + None => return Ok(None), + }; + + let head_text = String::from_utf8(content.to_owned())?; + Ok(Some(head_text)) + } + + match logic(&self.libgit_repository.lock(), file_path) { + Ok(value) => return value, + Err(err) => log::error!("Error loading head text: {:?}", err), + } + None + } +} + +impl std::fmt::Debug for GitRepository { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("GitRepository") + .field("content_path", &self.content_path) + .field("git_dir_path", &self.git_dir_path) + .field("scan_id", &self.scan_id) + .field("libgit_repository", &"LibGitRepository") + .finish() + } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 78a500585a..4aa3a89d86 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -13,6 +13,7 @@ use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore}; use clock::ReplicaId; use collections::{hash_map, BTreeMap, HashMap, HashSet}; use futures::{future::Shared, AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt}; +use git_repository::GitRepository; use gpui::{ AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle, @@ -4536,7 +4537,9 @@ impl Project { if worktree.read(cx).is_local() { cx.subscribe(worktree, |this, worktree, event, cx| match event { worktree::Event::UpdatedEntries => this.update_local_worktree_buffers(worktree, cx), - worktree::Event::UpdatedGitRepositories(_) => todo!(), + worktree::Event::UpdatedGitRepositories(updated_repos) => { + this.update_local_worktree_buffers_git_repos(updated_repos, cx) + } }) .detach(); } @@ -4644,6 +4647,30 @@ impl Project { } } + fn update_local_worktree_buffers_git_repos( + &mut self, + updated_repos: &[GitRepository], + cx: &mut ModelContext, + ) { + for (buffer_id, buffer) in &self.opened_buffers { + if let Some(buffer) = buffer.upgrade(cx) { + buffer.update(cx, |buffer, cx| { + let updated = updated_repos.iter().any(|repo| { + buffer + .file() + .and_then(|file| file.as_local()) + .map(|file| repo.manages(&file.abs_path(cx))) + .unwrap_or(false) + }); + + if updated { + buffer.update_git(cx); + } + }); + } + } + } + pub fn set_active_path(&mut self, entry: Option, cx: &mut ModelContext) { let new_active_entry = entry.and_then(|project_path| { let worktree = self.worktree_for_id(project_path.worktree_id, cx)?; diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index aead63102b..beef854470 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -467,7 +467,7 @@ impl LocalWorktree { .await?; Ok(cx.add_model(|cx| { let mut buffer = Buffer::from_file(0, contents, head_text, Arc::new(file), cx); - buffer.update_git(cx); + buffer.git_diff_recalc(cx); buffer })) }) @@ -522,16 +522,28 @@ impl LocalWorktree { match self.scan_state() { ScanState::Idle => { - self.snapshot = self.background_snapshot.lock().clone(); + let new_snapshot = self.background_snapshot.lock().clone(); + let updated_repos = self.list_updated_repos(&new_snapshot); + self.snapshot = new_snapshot; + if let Some(share) = self.share.as_mut() { *share.snapshots_tx.borrow_mut() = self.snapshot.clone(); } + cx.emit(Event::UpdatedEntries); + + if !updated_repos.is_empty() { + cx.emit(Event::UpdatedGitRepositories(updated_repos)); + } } ScanState::Initializing => { let is_fake_fs = self.fs.is_fake(); - self.snapshot = self.background_snapshot.lock().clone(); + + let new_snapshot = self.background_snapshot.lock().clone(); + let updated_repos = self.list_updated_repos(&new_snapshot); + self.snapshot = new_snapshot; + self.poll_task = Some(cx.spawn_weak(|this, mut cx| async move { if is_fake_fs { #[cfg(any(test, feature = "test-support"))] @@ -543,7 +555,12 @@ impl LocalWorktree { this.update(&mut cx, |this, cx| this.poll_snapshot(cx)); } })); + cx.emit(Event::UpdatedEntries); + + if !updated_repos.is_empty() { + cx.emit(Event::UpdatedGitRepositories(updated_repos)); + } } _ => { @@ -556,6 +573,34 @@ impl LocalWorktree { cx.notify(); } + fn list_updated_repos(&self, new_snapshot: &LocalSnapshot) -> Vec { + let old_snapshot = &self.snapshot; + + fn diff<'a>( + a: &'a LocalSnapshot, + b: &'a LocalSnapshot, + updated: &mut HashMap<&'a Path, GitRepository>, + ) { + for a_repo in &a.git_repositories { + let matched = b.git_repositories.iter().find(|b_repo| { + a_repo.git_dir_path() == b_repo.git_dir_path() + && a_repo.scan_id() == b_repo.scan_id() + }); + + if matched.is_some() { + updated.insert(a_repo.git_dir_path(), a_repo.clone()); + } + } + } + + let mut updated = HashMap::<&Path, GitRepository>::default(); + + diff(old_snapshot, new_snapshot, &mut updated); + diff(new_snapshot, old_snapshot, &mut updated); + + updated.into_values().collect() + } + pub fn scan_complete(&self) -> impl Future { let mut scan_state_rx = self.last_scan_state_rx.clone(); async move { @@ -606,9 +651,11 @@ impl LocalWorktree { files_included, settings::GitFilesIncluded::All | settings::GitFilesIncluded::OnlyTracked ) { + + let fs = fs.clone(); let abs_path = abs_path.clone(); - let task = async move { fs.load_head_text(&abs_path).await }; + let opt_future = async move { fs.load_head_text(&abs_path).await }; let results = cx.background().spawn(task).await; if files_included == settings::GitFilesIncluded::All { @@ -1495,7 +1542,7 @@ impl LocalSnapshot { .git_repositories .binary_search_by_key(&abs_path.as_path(), |repo| repo.git_dir_path()) { - if let Some(repository) = fs.open_git_repository(&abs_path) { + if let Some(repository) = GitRepository::open(&abs_path) { self.git_repositories.insert(ix, repository); } } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 9e8338d289..921fb2de20 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -317,7 +317,7 @@ pub trait Item: View { project: ModelHandle, cx: &mut ViewContext, ) -> Task>; - fn update_git( + fn git_diff_recalc( &mut self, _project: ModelHandle, _cx: &mut ViewContext, @@ -539,7 +539,7 @@ pub trait ItemHandle: 'static + fmt::Debug { ) -> Task>; fn reload(&self, project: ModelHandle, cx: &mut MutableAppContext) -> Task>; - fn update_git( + fn git_diff_recalc( &self, project: ModelHandle, cx: &mut MutableAppContext, @@ -753,7 +753,7 @@ impl ItemHandle for ViewHandle { workspace, cx, |project, mut cx| async move { - cx.update(|cx| item.update_git(project, cx)) + cx.update(|cx| item.git_diff_recalc(project, cx)) .await .log_err(); }, @@ -762,7 +762,7 @@ impl ItemHandle for ViewHandle { let project = workspace.project().downgrade(); cx.spawn_weak(|_, mut cx| async move { if let Some(project) = project.upgrade(&cx) { - cx.update(|cx| item.update_git(project, cx)) + cx.update(|cx| item.git_diff_recalc(project, cx)) .await .log_err(); } @@ -850,12 +850,12 @@ impl ItemHandle for ViewHandle { self.update(cx, |item, cx| item.reload(project, cx)) } - fn update_git( + fn git_diff_recalc( &self, project: ModelHandle, cx: &mut MutableAppContext, ) -> Task> { - self.update(cx, |item, cx| item.update_git(project, cx)) + self.update(cx, |item, cx| item.git_diff_recalc(project, cx)) } fn act_as_type(&self, type_id: TypeId, cx: &AppContext) -> Option { From bf3b3da6edbf654dbda2e0d4f553aa8460da92a2 Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 28 Sep 2022 10:26:30 -0400 Subject: [PATCH 070/140] Build again --- crates/language/src/buffer.rs | 3 +-- crates/project/src/git_repository.rs | 2 +- crates/project/src/worktree.rs | 12 ++++++------ 3 files changed, 8 insertions(+), 9 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index d1dfb9ec22..9d386c14ad 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -662,10 +662,9 @@ impl Buffer { self.file = Some(new_file); task } - + pub fn update_git(&mut self, cx: &mut ModelContext) { //Grab head text - self.git_diff_recalc(cx); } diff --git a/crates/project/src/git_repository.rs b/crates/project/src/git_repository.rs index c27b1ba385..4b46b18391 100644 --- a/crates/project/src/git_repository.rs +++ b/crates/project/src/git_repository.rs @@ -1,7 +1,7 @@ use anyhow::Result; use git2::{Repository as LibGitRepository, RepositoryOpenFlags as LibGitRepositoryOpenFlags}; use parking_lot::Mutex; -use std::{path::Path, sync::Arc, ffi::OsStr}; +use std::{ffi::OsStr, path::Path, sync::Arc}; use util::ResultExt; #[derive(Clone)] diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index beef854470..4885ce104a 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -636,6 +636,7 @@ impl LocalWorktree { let path = Arc::from(path); let abs_path = self.absolutize(&path); let fs = self.fs.clone(); + let snapshot = self.snapshot(); let files_included = cx .global::() @@ -651,12 +652,11 @@ impl LocalWorktree { files_included, settings::GitFilesIncluded::All | settings::GitFilesIncluded::OnlyTracked ) { - - - let fs = fs.clone(); - let abs_path = abs_path.clone(); - let opt_future = async move { fs.load_head_text(&abs_path).await }; - let results = cx.background().spawn(task).await; + let results = if let Some(repo) = snapshot.repo_for(&abs_path) { + repo.load_head_text(&abs_path).await + } else { + None + }; if files_included == settings::GitFilesIncluded::All { results.or_else(|| Some(text.clone())) From d5fd531743680c568e64faa75e1059f20b215453 Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 28 Sep 2022 11:43:33 -0400 Subject: [PATCH 071/140] Move git related things into specialized git crate Co-Authored-By: Mikayla Maki --- Cargo.lock | 22 +++++++- crates/editor/Cargo.toml | 1 + crates/editor/src/element.rs | 2 +- crates/editor/src/multi_buffer.rs | 8 +-- crates/git/Cargo.toml | 22 ++++++++ .../{language/src/git.rs => git/src/diff.rs} | 6 +-- crates/git/src/git.rs | 12 +++++ .../src/repository.rs} | 20 +++---- crates/language/Cargo.toml | 2 +- crates/language/src/buffer.rs | 33 ++++++------ crates/language/src/language.rs | 1 - crates/project/Cargo.toml | 3 +- crates/project/src/fs.rs | 4 -- crates/project/src/project.rs | 45 ++++++++++------ crates/project/src/worktree.rs | 54 +++++++++++-------- crates/util/src/lib.rs | 7 --- crates/util/src/test.rs | 9 ++-- 17 files changed, 151 insertions(+), 100 deletions(-) create mode 100644 crates/git/Cargo.toml rename crates/{language/src/git.rs => git/src/diff.rs} (98%) create mode 100644 crates/git/src/git.rs rename crates/{project/src/git_repository.rs => git/src/repository.rs} (80%) diff --git a/Cargo.lock b/Cargo.lock index 8157327cf2..c8918158be 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1697,6 +1697,7 @@ dependencies = [ "env_logger", "futures", "fuzzy", + "git", "gpui", "indoc", "itertools", @@ -2224,6 +2225,23 @@ dependencies = [ "stable_deref_trait", ] +[[package]] +name = "git" +version = "0.1.0" +dependencies = [ + "anyhow", + "clock", + "git2", + "lazy_static", + "log", + "parking_lot 0.11.2", + "smol", + "sum_tree", + "text", + "unindent", + "util", +] + [[package]] name = "git2" version = "0.15.0" @@ -2853,7 +2871,7 @@ dependencies = [ "env_logger", "futures", "fuzzy", - "git2", + "git", "gpui", "lazy_static", "log", @@ -3996,7 +4014,7 @@ dependencies = [ "fsevent", "futures", "fuzzy", - "git2", + "git", "gpui", "ignore", "language", diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index dfd4938742..2ea7473b59 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -25,6 +25,7 @@ clock = { path = "../clock" } collections = { path = "../collections" } context_menu = { path = "../context_menu" } fuzzy = { path = "../fuzzy" } +git = { path = "../git" } gpui = { path = "../gpui" } language = { path = "../language" } lsp = { path = "../lsp" } diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 2e767d72e6..4bc9f9a10b 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -16,6 +16,7 @@ use crate::{ }; use clock::ReplicaId; use collections::{BTreeMap, HashMap}; +use git::diff::{DiffHunk, DiffHunkStatus}; use gpui::{ color::Color, elements::*, @@ -34,7 +35,6 @@ use gpui::{ WeakViewHandle, }; use json::json; -use language::git::{DiffHunk, DiffHunkStatus}; use language::{Bias, DiagnosticSeverity, OffsetUtf16, Selection}; use project::ProjectPath; use settings::Settings; diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 76093e0496..b4e302e3c3 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -4,13 +4,13 @@ pub use anchor::{Anchor, AnchorRangeExt}; use anyhow::Result; use clock::ReplicaId; use collections::{BTreeMap, Bound, HashMap, HashSet}; +use git::diff::DiffHunk; use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; pub use language::Completion; use language::{ - char_kind, git::DiffHunk, AutoindentMode, Buffer, BufferChunks, BufferSnapshot, CharKind, - Chunk, DiagnosticEntry, Event, File, IndentSize, Language, OffsetRangeExt, Outline, - OutlineItem, Selection, ToOffset as _, ToOffsetUtf16 as _, ToPoint as _, ToPointUtf16 as _, - TransactionId, + char_kind, AutoindentMode, Buffer, BufferChunks, BufferSnapshot, CharKind, Chunk, + DiagnosticEntry, Event, File, IndentSize, Language, OffsetRangeExt, Outline, OutlineItem, + Selection, ToOffset as _, ToOffsetUtf16 as _, ToPoint as _, ToPointUtf16 as _, TransactionId, }; use smallvec::SmallVec; use std::{ diff --git a/crates/git/Cargo.toml b/crates/git/Cargo.toml new file mode 100644 index 0000000000..79ac56d098 --- /dev/null +++ b/crates/git/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "git" +version = "0.1.0" +edition = "2021" + +[lib] +path = "src/git.rs" + +[dependencies] +anyhow = "1.0.38" +clock = { path = "../clock" } +git2 = { version = "0.15", default-features = false } +lazy_static = "1.4.0" +sum_tree = { path = "../sum_tree" } +text = { path = "../text" } +util = { path = "../util" } +log = { version = "0.4.16", features = ["kv_unstable_serde"] } +smol = "1.2" +parking_lot = "0.11.1" + +[dev-dependencies] +unindent = "0.1.7" diff --git a/crates/language/src/git.rs b/crates/git/src/diff.rs similarity index 98% rename from crates/language/src/git.rs rename to crates/git/src/diff.rs index d713dcbc14..ddaddb7289 100644 --- a/crates/language/src/git.rs +++ b/crates/git/src/diff.rs @@ -259,7 +259,7 @@ mod tests { use text::Buffer; use unindent::Unindent as _; - #[gpui::test] + #[test] fn test_buffer_diff_simple() { let head_text = " one @@ -308,8 +308,4 @@ mod tests { ); } } - - // use rand::rngs::StdRng; - // #[gpui::test(iterations = 100)] - // fn test_buffer_diff_random(mut rng: StdRng) {} } diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs new file mode 100644 index 0000000000..36f54e706a --- /dev/null +++ b/crates/git/src/git.rs @@ -0,0 +1,12 @@ +use std::ffi::OsStr; + +pub use git2 as libgit; +pub use lazy_static::lazy_static; + +pub mod diff; +pub mod repository; + +lazy_static! { + pub static ref DOT_GIT: &'static OsStr = OsStr::new(".git"); + pub static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore"); +} diff --git a/crates/project/src/git_repository.rs b/crates/git/src/repository.rs similarity index 80% rename from crates/project/src/git_repository.rs rename to crates/git/src/repository.rs index 4b46b18391..a38d13ef0d 100644 --- a/crates/project/src/git_repository.rs +++ b/crates/git/src/repository.rs @@ -1,7 +1,7 @@ use anyhow::Result; -use git2::{Repository as LibGitRepository, RepositoryOpenFlags as LibGitRepositoryOpenFlags}; +use git2::Repository as LibGitRepository; use parking_lot::Mutex; -use std::{ffi::OsStr, path::Path, sync::Arc}; +use std::{path::Path, sync::Arc}; use util::ResultExt; #[derive(Clone)] @@ -53,21 +53,17 @@ impl GitRepository { self.scan_id } - pub(super) fn set_scan_id(&mut self, scan_id: usize) { + pub fn set_scan_id(&mut self, scan_id: usize) { + println!("setting scan id"); self.scan_id = scan_id; } - pub fn with_repo(&mut self, f: F) { - let mut git2 = self.libgit_repository.lock(); - f(&mut git2) - } - - pub async fn load_head_text(&self, file_path: &Path) -> Option { - fn logic(repo: &LibGitRepository, file_path: &Path) -> Result> { + pub async fn load_head_text(&self, relative_file_path: &Path) -> Option { + fn logic(repo: &LibGitRepository, relative_file_path: &Path) -> Result> { let object = repo .head()? .peel_to_tree()? - .get_path(file_path)? + .get_path(relative_file_path)? .to_object(&repo)?; let content = match object.as_blob() { @@ -79,7 +75,7 @@ impl GitRepository { Ok(Some(head_text)) } - match logic(&self.libgit_repository.lock(), file_path) { + match logic(&self.libgit_repository.as_ref().lock(), relative_file_path) { Ok(value) => return value, Err(err) => log::error!("Error loading head text: {:?}", err), } diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 034b10e89c..7a218acc8e 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -25,6 +25,7 @@ client = { path = "../client" } clock = { path = "../clock" } collections = { path = "../collections" } fuzzy = { path = "../fuzzy" } +git = { path = "../git" } gpui = { path = "../gpui" } lsp = { path = "../lsp" } rpc = { path = "../rpc" } @@ -51,7 +52,6 @@ smol = "1.2" tree-sitter = "0.20" tree-sitter-rust = { version = "*", optional = true } tree-sitter-typescript = { version = "*", optional = true } -git2 = { version = "0.15", default-features = false } [dev-dependencies] client = { path = "../client", features = ["test-support"] } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 9d386c14ad..13fe6daed5 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1,4 +1,3 @@ -use crate::git; pub use crate::{ diagnostic_set::DiagnosticSet, highlight_map::{HighlightId, HighlightMap}, @@ -47,14 +46,14 @@ pub use {tree_sitter_rust, tree_sitter_typescript}; pub use lsp::DiagnosticSeverity; struct GitDiffStatus { - diff: git::BufferDiff, + diff: git::diff::BufferDiff, update_in_progress: bool, update_requested: bool, } pub struct Buffer { text: TextBuffer, - head_text: Option>, + head_text: Option, git_diff_status: GitDiffStatus, file: Option>, saved_version: clock::Global, @@ -83,7 +82,7 @@ pub struct Buffer { pub struct BufferSnapshot { text: text::BufferSnapshot, - pub git_diff: git::BufferDiff, + pub git_diff: git::diff::BufferDiff, pub(crate) syntax: SyntaxSnapshot, file: Option>, diagnostics: DiagnosticSet, @@ -353,7 +352,7 @@ impl Buffer { ) -> Self { Self::build( TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()), - head_text.map(|h| Arc::new(h.into())), + head_text.map(|h| h.into().into_boxed_str().into()), Some(file), ) } @@ -364,7 +363,11 @@ impl Buffer { file: Option>, ) -> Result { let buffer = TextBuffer::new(replica_id, message.id, message.base_text); - let mut this = Self::build(buffer, message.head_text.map(|text| Arc::new(text)), file); + let mut this = Self::build( + buffer, + message.head_text.map(|text| text.into_boxed_str().into()), + file, + ); this.text.set_line_ending(proto::deserialize_line_ending( proto::LineEnding::from_i32(message.line_ending) .ok_or_else(|| anyhow!("missing line_ending"))?, @@ -420,11 +423,7 @@ impl Buffer { self } - fn build( - buffer: TextBuffer, - head_text: Option>, - file: Option>, - ) -> Self { + fn build(buffer: TextBuffer, head_text: Option, file: Option>) -> Self { let saved_mtime = if let Some(file) = file.as_ref() { file.mtime() } else { @@ -440,7 +439,7 @@ impl Buffer { text: buffer, head_text, git_diff_status: GitDiffStatus { - diff: git::BufferDiff::new(), + diff: git::diff::BufferDiff::new(), update_in_progress: false, update_requested: false, }, @@ -613,7 +612,7 @@ impl Buffer { cx, ); } - self.update_git(cx); + self.git_diff_recalc(cx); cx.emit(Event::Reloaded); cx.notify(); } @@ -663,9 +662,8 @@ impl Buffer { task } - pub fn update_git(&mut self, cx: &mut ModelContext) { - //Grab head text - + pub fn update_head_text(&mut self, head_text: Option, cx: &mut ModelContext) { + self.head_text = head_text; self.git_diff_recalc(cx); } @@ -674,6 +672,7 @@ impl Buffer { } pub fn git_diff_recalc(&mut self, cx: &mut ModelContext) { + println!("recalc"); if self.git_diff_status.update_in_progress { self.git_diff_status.update_requested = true; return; @@ -2221,7 +2220,7 @@ impl BufferSnapshot { pub fn git_diff_hunks_in_range<'a>( &'a self, query_row_range: Range, - ) -> impl 'a + Iterator> { + ) -> impl 'a + Iterator> { self.git_diff.hunks_in_range(query_row_range, self) } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 8e2fe601e7..780f6e75b5 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1,6 +1,5 @@ mod buffer; mod diagnostic_set; -pub mod git; mod highlight_map; mod outline; pub mod proto; diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index 8ca01eac2c..1e45e3c6ed 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -24,6 +24,7 @@ collections = { path = "../collections" } db = { path = "../db" } fsevent = { path = "../fsevent" } fuzzy = { path = "../fuzzy" } +git = { path = "../git" } gpui = { path = "../gpui" } language = { path = "../language" } lsp = { path = "../lsp" } @@ -52,8 +53,6 @@ smol = "1.2.5" thiserror = "1.0.29" toml = "0.5" rocksdb = "0.18" -git2 = { version = "0.15", default-features = false } - [dev-dependencies] client = { path = "../client", features = ["test-support"] } diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index 8542030cb7..6a496910a0 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -1,11 +1,9 @@ use anyhow::{anyhow, Result}; use fsevent::EventStream; use futures::{future::BoxFuture, Stream, StreamExt}; -use language::git::libgit::{Repository, RepositoryOpenFlags}; use language::LineEnding; use smol::io::{AsyncReadExt, AsyncWriteExt}; use std::{ - ffi::OsStr, io, os::unix::fs::MetadataExt, path::{Component, Path, PathBuf}, @@ -22,8 +20,6 @@ use futures::lock::Mutex; #[cfg(any(test, feature = "test-support"))] use std::sync::{Arc, Weak}; -use crate::git_repository::GitRepository; - #[async_trait::async_trait] pub trait Fs: Send + Sync { async fn create_dir(&self, path: &Path) -> Result<()>; diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 4aa3a89d86..57af588c68 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1,5 +1,4 @@ pub mod fs; -mod git_repository; mod ignore; mod lsp_command; pub mod search; @@ -13,7 +12,7 @@ use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore}; use clock::ReplicaId; use collections::{hash_map, BTreeMap, HashMap, HashSet}; use futures::{future::Shared, AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt}; -use git_repository::GitRepository; +use git::repository::GitRepository; use gpui::{ AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle, @@ -4538,6 +4537,7 @@ impl Project { cx.subscribe(worktree, |this, worktree, event, cx| match event { worktree::Event::UpdatedEntries => this.update_local_worktree_buffers(worktree, cx), worktree::Event::UpdatedGitRepositories(updated_repos) => { + println!("{updated_repos:#?}"); this.update_local_worktree_buffers_git_repos(updated_repos, cx) } }) @@ -4649,24 +4649,35 @@ impl Project { fn update_local_worktree_buffers_git_repos( &mut self, - updated_repos: &[GitRepository], + repos: &[GitRepository], cx: &mut ModelContext, ) { - for (buffer_id, buffer) in &self.opened_buffers { - if let Some(buffer) = buffer.upgrade(cx) { - buffer.update(cx, |buffer, cx| { - let updated = updated_repos.iter().any(|repo| { - buffer - .file() - .and_then(|file| file.as_local()) - .map(|file| repo.manages(&file.abs_path(cx))) - .unwrap_or(false) - }); + //TODO: Produce protos - if updated { - buffer.update_git(cx); - } - }); + for (_, buffer) in &self.opened_buffers { + if let Some(buffer) = buffer.upgrade(cx) { + let file = match buffer.read(cx).file().and_then(|file| file.as_local()) { + Some(file) => file, + None => return, + }; + let path = file.path().clone(); + let abs_path = file.abs_path(cx); + println!("got file"); + + let repo = match repos.iter().find(|repo| repo.manages(&abs_path)) { + Some(repo) => repo.clone(), + None => return, + }; + println!("got repo"); + + cx.spawn(|_, mut cx| async move { + let head_text = repo.load_head_text(&path).await; + buffer.update(&mut cx, |buffer, cx| { + println!("got calling update"); + buffer.update_head_text(head_text, cx); + }); + }) + .detach(); } } } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 4885ce104a..7fd37dc016 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -1,10 +1,9 @@ -use crate::{copy_recursive, git_repository::GitRepository, ProjectEntryId, RemoveOptions}; - use super::{ fs::{self, Fs}, ignore::IgnoreStack, DiagnosticSummary, }; +use crate::{copy_recursive, ProjectEntryId, RemoveOptions}; use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{anyhow, Context, Result}; use client::{proto, Client}; @@ -18,6 +17,8 @@ use futures::{ Stream, StreamExt, }; use fuzzy::CharBag; +use git::repository::GitRepository; +use git::{DOT_GIT, GITIGNORE}; use gpui::{ executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, @@ -48,7 +49,7 @@ use std::{ time::{Duration, SystemTime}, }; use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; -use util::{ResultExt, TryFutureExt, DOT_GIT, GITIGNORE}; +use util::{ResultExt, TryFutureExt}; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] pub struct WorktreeId(usize); @@ -523,7 +524,10 @@ impl LocalWorktree { match self.scan_state() { ScanState::Idle => { let new_snapshot = self.background_snapshot.lock().clone(); - let updated_repos = self.list_updated_repos(&new_snapshot); + let updated_repos = Self::list_updated_repos( + &self.snapshot.git_repositories, + &new_snapshot.git_repositories, + ); self.snapshot = new_snapshot; if let Some(share) = self.share.as_mut() { @@ -541,7 +545,10 @@ impl LocalWorktree { let is_fake_fs = self.fs.is_fake(); let new_snapshot = self.background_snapshot.lock().clone(); - let updated_repos = self.list_updated_repos(&new_snapshot); + let updated_repos = Self::list_updated_repos( + &self.snapshot.git_repositories, + &new_snapshot.git_repositories, + ); self.snapshot = new_snapshot; self.poll_task = Some(cx.spawn_weak(|this, mut cx| async move { @@ -573,16 +580,20 @@ impl LocalWorktree { cx.notify(); } - fn list_updated_repos(&self, new_snapshot: &LocalSnapshot) -> Vec { - let old_snapshot = &self.snapshot; + fn list_updated_repos( + old_repos: &[GitRepository], + new_repos: &[GitRepository], + ) -> Vec { + println!("old repos: {:#?}", old_repos); + println!("new repos: {:#?}", new_repos); fn diff<'a>( - a: &'a LocalSnapshot, - b: &'a LocalSnapshot, + a: &'a [GitRepository], + b: &'a [GitRepository], updated: &mut HashMap<&'a Path, GitRepository>, ) { - for a_repo in &a.git_repositories { - let matched = b.git_repositories.iter().find(|b_repo| { + for a_repo in a { + let matched = b.iter().find(|b_repo| { a_repo.git_dir_path() == b_repo.git_dir_path() && a_repo.scan_id() == b_repo.scan_id() }); @@ -595,10 +606,10 @@ impl LocalWorktree { let mut updated = HashMap::<&Path, GitRepository>::default(); - diff(old_snapshot, new_snapshot, &mut updated); - diff(new_snapshot, old_snapshot, &mut updated); + diff(old_repos, new_repos, &mut updated); + diff(new_repos, old_repos, &mut updated); - updated.into_values().collect() + dbg!(updated.into_values().collect()) } pub fn scan_complete(&self) -> impl Future { @@ -653,7 +664,7 @@ impl LocalWorktree { settings::GitFilesIncluded::All | settings::GitFilesIncluded::OnlyTracked ) { let results = if let Some(repo) = snapshot.repo_for(&abs_path) { - repo.load_head_text(&abs_path).await + repo.load_head_text(&path).await } else { None }; @@ -1362,6 +1373,7 @@ impl LocalSnapshot { } pub(crate) fn in_dot_git(&mut self, path: &Path) -> Option<&mut GitRepository> { + println!("chechking {path:?}"); self.git_repositories .iter_mut() .rev() //git_repository is ordered lexicographically @@ -1510,7 +1522,6 @@ impl LocalSnapshot { parent_path: Arc, entries: impl IntoIterator, ignore: Option>, - fs: &dyn Fs, ) { let mut parent_entry = if let Some(parent_entry) = self.entries_by_path.get(&PathKey(parent_path.clone()), &()) @@ -2391,12 +2402,9 @@ impl BackgroundScanner { new_entries.push(child_entry); } - self.snapshot.lock().populate_dir( - job.path.clone(), - new_entries, - new_ignore, - self.fs.as_ref(), - ); + self.snapshot + .lock() + .populate_dir(job.path.clone(), new_entries, new_ignore); for new_job in new_jobs { job.scan_queue.send(new_job).await.unwrap(); } @@ -2595,7 +2603,7 @@ impl BackgroundScanner { .git_repositories .iter() .cloned() - .filter(|repo| git2::Repository::open(repo.git_dir_path()).is_ok()) + .filter(|repo| git::libgit::Repository::open(repo.git_dir_path()).is_ok()) .collect(); snapshot.git_repositories = new_repos; diff --git a/crates/util/src/lib.rs b/crates/util/src/lib.rs index 52bf70e3a7..97f409f410 100644 --- a/crates/util/src/lib.rs +++ b/crates/util/src/lib.rs @@ -2,20 +2,13 @@ pub mod test; use futures::Future; -use lazy_static::lazy_static; use std::{ cmp::Ordering, - ffi::OsStr, ops::AddAssign, pin::Pin, task::{Context, Poll}, }; -lazy_static! { - pub static ref DOT_GIT: &'static OsStr = OsStr::new(".git"); - pub static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore"); -} - pub fn truncate(s: &str, max_chars: usize) -> &str { match s.char_indices().nth(max_chars) { None => s, diff --git a/crates/util/src/test.rs b/crates/util/src/test.rs index 4e4716434e..96d13f4c81 100644 --- a/crates/util/src/test.rs +++ b/crates/util/src/test.rs @@ -2,14 +2,15 @@ mod assertions; mod marked_text; use git2; -use std::path::{Path, PathBuf}; +use std::{ + ffi::OsStr, + path::{Path, PathBuf}, +}; use tempdir::TempDir; pub use assertions::*; pub use marked_text::*; -use crate::DOT_GIT; - pub fn temp_tree(tree: serde_json::Value) -> TempDir { let dir = TempDir::new("").unwrap(); write_tree(dir.path(), tree); @@ -28,7 +29,7 @@ fn write_tree(path: &Path, tree: serde_json::Value) { Value::Object(_) => { fs::create_dir(&path).unwrap(); - if path.file_name() == Some(&DOT_GIT) { + if path.file_name() == Some(&OsStr::new(".git")) { git2::Repository::init(&path.parent().unwrap()).unwrap(); } From 71b2126eca51cd3a5c9796a86aad6800a33e9184 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Wed, 28 Sep 2022 11:42:22 -0700 Subject: [PATCH 072/140] WIP, re-doing fs and fake git repos --- Cargo.lock | 1 + crates/git/Cargo.toml | 2 + crates/git/src/repository.rs | 123 ++++++++++++++++++++++++++++----- crates/language/src/buffer.rs | 1 - crates/project/src/fs.rs | 10 +++ crates/project/src/project.rs | 8 +-- crates/project/src/worktree.rs | 50 +++++++------- 7 files changed, 145 insertions(+), 50 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c8918158be..3c87f336de 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2230,6 +2230,7 @@ name = "git" version = "0.1.0" dependencies = [ "anyhow", + "async-trait", "clock", "git2", "lazy_static", diff --git a/crates/git/Cargo.toml b/crates/git/Cargo.toml index 79ac56d098..7ef9a953ba 100644 --- a/crates/git/Cargo.toml +++ b/crates/git/Cargo.toml @@ -17,6 +17,8 @@ util = { path = "../util" } log = { version = "0.4.16", features = ["kv_unstable_serde"] } smol = "1.2" parking_lot = "0.11.1" +async-trait = "0.1" + [dev-dependencies] unindent = "0.1.7" diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index a38d13ef0d..19ba0d1238 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -4,8 +4,29 @@ use parking_lot::Mutex; use std::{path::Path, sync::Arc}; use util::ResultExt; +#[async_trait::async_trait] +pub trait GitRepository: Send + Sync + std::fmt::Debug { + fn manages(&self, path: &Path) -> bool; + + fn in_dot_git(&self, path: &Path) -> bool; + + fn content_path(&self) -> &Path; + + fn git_dir_path(&self) -> &Path; + + fn scan_id(&self) -> usize; + + fn set_scan_id(&mut self, scan_id: usize); + + fn git_repo(&self) -> Arc>; + + fn boxed_clone(&self) -> Box; + + async fn load_head_text(&self, relative_file_path: &Path) -> Option; +} + #[derive(Clone)] -pub struct GitRepository { +pub struct RealGitRepository { // Path to folder containing the .git file or directory content_path: Arc, // Path to the actual .git folder. @@ -15,50 +36,48 @@ pub struct GitRepository { libgit_repository: Arc>, } -impl GitRepository { - pub fn open(dotgit_path: &Path) -> Option { +impl RealGitRepository { + pub fn open(dotgit_path: &Path) -> Option> { LibGitRepository::open(&dotgit_path) .log_err() - .and_then(|libgit_repository| { - Some(Self { + .and_then::, _>(|libgit_repository| { + Some(Box::new(Self { content_path: libgit_repository.workdir()?.into(), git_dir_path: dotgit_path.canonicalize().log_err()?.into(), scan_id: 0, libgit_repository: Arc::new(parking_lot::Mutex::new(libgit_repository)), - }) + })) }) } +} - pub fn manages(&self, path: &Path) -> bool { +#[async_trait::async_trait] +impl GitRepository for RealGitRepository { + fn manages(&self, path: &Path) -> bool { path.canonicalize() .map(|path| path.starts_with(&self.content_path)) .unwrap_or(false) } - pub fn in_dot_git(&self, path: &Path) -> bool { + fn in_dot_git(&self, path: &Path) -> bool { path.canonicalize() .map(|path| path.starts_with(&self.git_dir_path)) .unwrap_or(false) } - pub fn content_path(&self) -> &Path { + fn content_path(&self) -> &Path { self.content_path.as_ref() } - pub fn git_dir_path(&self) -> &Path { + fn git_dir_path(&self) -> &Path { self.git_dir_path.as_ref() } - pub fn scan_id(&self) -> usize { + fn scan_id(&self) -> usize { self.scan_id } - pub fn set_scan_id(&mut self, scan_id: usize) { - println!("setting scan id"); - self.scan_id = scan_id; - } - - pub async fn load_head_text(&self, relative_file_path: &Path) -> Option { + async fn load_head_text(&self, relative_file_path: &Path) -> Option { fn logic(repo: &LibGitRepository, relative_file_path: &Path) -> Result> { let object = repo .head()? @@ -81,9 +100,21 @@ impl GitRepository { } None } + + fn git_repo(&self) -> Arc> { + self.libgit_repository.clone() + } + + fn set_scan_id(&mut self, scan_id: usize) { + self.scan_id = scan_id; + } + + fn boxed_clone(&self) -> Box { + Box::new(self.clone()) + } } -impl std::fmt::Debug for GitRepository { +impl std::fmt::Debug for RealGitRepository { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("GitRepository") .field("content_path", &self.content_path) @@ -93,3 +124,59 @@ impl std::fmt::Debug for GitRepository { .finish() } } + +#[derive(Debug, Clone)] +pub struct FakeGitRepository { + content_path: Arc, + git_dir_path: Arc, + scan_id: usize, +} + +impl FakeGitRepository { + pub fn open(dotgit_path: &Path, scan_id: usize) -> Box { + Box::new(FakeGitRepository { + content_path: dotgit_path.parent().unwrap().into(), + git_dir_path: dotgit_path.into(), + scan_id, + }) + } +} + +#[async_trait::async_trait] +impl GitRepository for FakeGitRepository { + fn manages(&self, path: &Path) -> bool { + path.starts_with(self.content_path()) + } + + fn in_dot_git(&self, path: &Path) -> bool { + path.starts_with(self.git_dir_path()) + } + + fn content_path(&self) -> &Path { + &self.content_path + } + + fn git_dir_path(&self) -> &Path { + &self.git_dir_path + } + + fn scan_id(&self) -> usize { + self.scan_id + } + + async fn load_head_text(&self, _: &Path) -> Option { + unimplemented!() + } + + fn git_repo(&self) -> Arc> { + unimplemented!() + } + + fn set_scan_id(&mut self, scan_id: usize) { + self.scan_id = scan_id; + } + + fn boxed_clone(&self) -> Box { + Box::new(self.clone()) + } +} diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 13fe6daed5..0268f1cc68 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -672,7 +672,6 @@ impl Buffer { } pub fn git_diff_recalc(&mut self, cx: &mut ModelContext) { - println!("recalc"); if self.git_diff_status.update_in_progress { self.git_diff_status.update_requested = true; return; diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index 6a496910a0..4b27a23856 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -1,6 +1,7 @@ use anyhow::{anyhow, Result}; use fsevent::EventStream; use futures::{future::BoxFuture, Stream, StreamExt}; +use git::repository::{FakeGitRepository, GitRepository, RealGitRepository}; use language::LineEnding; use smol::io::{AsyncReadExt, AsyncWriteExt}; use std::{ @@ -43,6 +44,7 @@ pub trait Fs: Send + Sync { path: &Path, latency: Duration, ) -> Pin>>>; + async fn open_repo(&self, abs_dot_git: &Path) -> Option>; fn is_fake(&self) -> bool; #[cfg(any(test, feature = "test-support"))] fn as_fake(&self) -> &FakeFs; @@ -236,6 +238,10 @@ impl Fs for RealFs { }))) } + fn open_repo(&self, abs_dot_git: &Path) -> Option> { + RealGitRepository::open(&abs_dot_git) + } + fn is_fake(&self) -> bool { false } @@ -847,6 +853,10 @@ impl Fs for FakeFs { })) } + fn open_repo(&self, abs_dot_git: &Path) -> Option> { + Some(FakeGitRepository::open(abs_dot_git.into(), 0)) + } + fn is_fake(&self) -> bool { true } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 57af588c68..a2a49c9c93 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4537,7 +4537,6 @@ impl Project { cx.subscribe(worktree, |this, worktree, event, cx| match event { worktree::Event::UpdatedEntries => this.update_local_worktree_buffers(worktree, cx), worktree::Event::UpdatedGitRepositories(updated_repos) => { - println!("{updated_repos:#?}"); this.update_local_worktree_buffers_git_repos(updated_repos, cx) } }) @@ -4649,7 +4648,7 @@ impl Project { fn update_local_worktree_buffers_git_repos( &mut self, - repos: &[GitRepository], + repos: &[Box], cx: &mut ModelContext, ) { //TODO: Produce protos @@ -4662,18 +4661,15 @@ impl Project { }; let path = file.path().clone(); let abs_path = file.abs_path(cx); - println!("got file"); let repo = match repos.iter().find(|repo| repo.manages(&abs_path)) { - Some(repo) => repo.clone(), + Some(repo) => repo.boxed_clone(), None => return, }; - println!("got repo"); cx.spawn(|_, mut cx| async move { let head_text = repo.load_head_text(&path).await; buffer.update(&mut cx, |buffer, cx| { - println!("got calling update"); buffer.update_head_text(head_text, cx); }); }) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 7fd37dc016..ae55659f98 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -100,7 +100,7 @@ pub struct Snapshot { pub struct LocalSnapshot { abs_path: Arc, ignores_by_parent_abs_path: HashMap, (Arc, usize)>, - git_repositories: Vec, + git_repositories: Vec>, removed_entry_ids: HashMap, next_entry_id: Arc, snapshot: Snapshot, @@ -115,7 +115,7 @@ impl Clone for LocalSnapshot { git_repositories: self .git_repositories .iter() - .map(|repo| repo.clone()) + .map(|repo| repo.boxed_clone()) .collect(), removed_entry_ids: self.removed_entry_ids.clone(), next_entry_id: self.next_entry_id.clone(), @@ -157,7 +157,7 @@ struct ShareState { pub enum Event { UpdatedEntries, - UpdatedGitRepositories(Vec), + UpdatedGitRepositories(Vec>), } impl Entity for Worktree { @@ -581,16 +581,13 @@ impl LocalWorktree { } fn list_updated_repos( - old_repos: &[GitRepository], - new_repos: &[GitRepository], - ) -> Vec { - println!("old repos: {:#?}", old_repos); - println!("new repos: {:#?}", new_repos); - + old_repos: &[Box], + new_repos: &[Box], + ) -> Vec> { fn diff<'a>( - a: &'a [GitRepository], - b: &'a [GitRepository], - updated: &mut HashMap<&'a Path, GitRepository>, + a: &'a [Box], + b: &'a [Box], + updated: &mut HashMap<&'a Path, Box>, ) { for a_repo in a { let matched = b.iter().find(|b_repo| { @@ -599,17 +596,17 @@ impl LocalWorktree { }); if matched.is_some() { - updated.insert(a_repo.git_dir_path(), a_repo.clone()); + updated.insert(a_repo.git_dir_path(), a_repo.boxed_clone()); } } } - let mut updated = HashMap::<&Path, GitRepository>::default(); + let mut updated = HashMap::<&Path, Box>::default(); diff(old_repos, new_repos, &mut updated); diff(new_repos, old_repos, &mut updated); - dbg!(updated.into_values().collect()) + updated.into_values().collect() } pub fn scan_complete(&self) -> impl Future { @@ -1364,23 +1361,22 @@ impl LocalSnapshot { } // Gives the most specific git repository for a given path - pub(crate) fn repo_for(&self, path: &Path) -> Option { + pub(crate) fn repo_for(&self, path: &Path) -> Option> { self.git_repositories .iter() .rev() //git_repository is ordered lexicographically .find(|repo| repo.manages(&self.abs_path.join(path))) - .map(|repo| repo.clone()) + .map(|repo| repo.boxed_clone()) } - pub(crate) fn in_dot_git(&mut self, path: &Path) -> Option<&mut GitRepository> { - println!("chechking {path:?}"); + pub(crate) fn in_dot_git(&mut self, path: &Path) -> Option<&mut Box> { self.git_repositories .iter_mut() .rev() //git_repository is ordered lexicographically .find(|repo| repo.in_dot_git(&self.abs_path.join(path))) } - pub(crate) fn tracks_filepath(&self, repo: &GitRepository, file_path: &Path) -> bool { + pub(crate) fn _tracks_filepath(&self, repo: &dyn GitRepository, file_path: &Path) -> bool { // Depends on git_repository_for_file_path returning the most specific git repository for a given path self.repo_for(&self.abs_path.join(file_path)) .map_or(false, |r| r.git_dir_path() == repo.git_dir_path()) @@ -1522,6 +1518,7 @@ impl LocalSnapshot { parent_path: Arc, entries: impl IntoIterator, ignore: Option>, + fs: &dyn Fs, ) { let mut parent_entry = if let Some(parent_entry) = self.entries_by_path.get(&PathKey(parent_path.clone()), &()) @@ -1553,7 +1550,7 @@ impl LocalSnapshot { .git_repositories .binary_search_by_key(&abs_path.as_path(), |repo| repo.git_dir_path()) { - if let Some(repository) = GitRepository::open(&abs_path) { + if let Some(repository) = fs.open_repo(abs_path.as_path()) { self.git_repositories.insert(ix, repository); } } @@ -2402,9 +2399,12 @@ impl BackgroundScanner { new_entries.push(child_entry); } - self.snapshot - .lock() - .populate_dir(job.path.clone(), new_entries, new_ignore); + self.snapshot.lock().populate_dir( + job.path.clone(), + new_entries, + new_ignore, + self.fs.as_ref(), + ); for new_job in new_jobs { job.scan_queue.send(new_job).await.unwrap(); } @@ -2602,7 +2602,7 @@ impl BackgroundScanner { let new_repos = snapshot .git_repositories .iter() - .cloned() + .map(|repo| repo.boxed_clone()) .filter(|repo| git::libgit::Repository::open(repo.git_dir_path()).is_ok()) .collect(); From f7714a25d1556fe3ac3efa49cc4abbd883fb24b7 Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 28 Sep 2022 16:52:24 -0400 Subject: [PATCH 073/140] Don't pretend this is async --- crates/project/src/fs.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index 4b27a23856..cc1f6101f4 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -44,7 +44,7 @@ pub trait Fs: Send + Sync { path: &Path, latency: Duration, ) -> Pin>>>; - async fn open_repo(&self, abs_dot_git: &Path) -> Option>; + fn open_repo(&self, abs_dot_git: &Path) -> Option>; fn is_fake(&self) -> bool; #[cfg(any(test, feature = "test-support"))] fn as_fake(&self) -> &FakeFs; From 113d3b88d0e1aeb31d8488fe1296bc563c4d842e Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Wed, 28 Sep 2022 14:16:21 -0700 Subject: [PATCH 074/140] Added test, and fix, for changed_repos method on LocalWorktree --- crates/project/src/worktree.rs | 52 +++++++++++++++++++++++++++++++--- 1 file changed, 48 insertions(+), 4 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index ae55659f98..81eec4987f 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -524,7 +524,7 @@ impl LocalWorktree { match self.scan_state() { ScanState::Idle => { let new_snapshot = self.background_snapshot.lock().clone(); - let updated_repos = Self::list_updated_repos( + let updated_repos = Self::changed_repos( &self.snapshot.git_repositories, &new_snapshot.git_repositories, ); @@ -545,7 +545,7 @@ impl LocalWorktree { let is_fake_fs = self.fs.is_fake(); let new_snapshot = self.background_snapshot.lock().clone(); - let updated_repos = Self::list_updated_repos( + let updated_repos = Self::changed_repos( &self.snapshot.git_repositories, &new_snapshot.git_repositories, ); @@ -580,7 +580,7 @@ impl LocalWorktree { cx.notify(); } - fn list_updated_repos( + fn changed_repos( old_repos: &[Box], new_repos: &[Box], ) -> Vec> { @@ -595,7 +595,7 @@ impl LocalWorktree { && a_repo.scan_id() == b_repo.scan_id() }); - if matched.is_some() { + if matched.is_none() { updated.insert(a_repo.git_dir_path(), a_repo.boxed_clone()); } } @@ -2955,6 +2955,7 @@ mod tests { use anyhow::Result; use client::test::FakeHttpClient; use fs::RealFs; + use git::repository::FakeGitRepository; use gpui::{executor::Deterministic, TestAppContext}; use rand::prelude::*; use serde_json::json; @@ -3278,6 +3279,49 @@ mod tests { }); } + #[test] + fn test_changed_repos() { + let prev_repos: Vec> = vec![ + FakeGitRepository::open(Path::new("/.git"), 0), + FakeGitRepository::open(Path::new("/a/.git"), 0), + FakeGitRepository::open(Path::new("/a/b/.git"), 0), + ]; + + let new_repos: Vec> = vec![ + FakeGitRepository::open(Path::new("/a/.git"), 1), + FakeGitRepository::open(Path::new("/a/b/.git"), 0), + FakeGitRepository::open(Path::new("/a/c/.git"), 0), + ]; + + let res = LocalWorktree::changed_repos(&prev_repos, &new_repos); + + dbg!(&res); + + // Deletion retained + assert!(res + .iter() + .find(|repo| repo.git_dir_path() == Path::new("/.git") && repo.scan_id() == 0) + .is_some()); + + // Update retained + assert!(res + .iter() + .find(|repo| repo.git_dir_path() == Path::new("/a/.git") && repo.scan_id() == 1) + .is_some()); + + // Addition retained + assert!(res + .iter() + .find(|repo| repo.git_dir_path() == Path::new("/a/c/.git") && repo.scan_id() == 0) + .is_some()); + + // Nochange, not retained + assert!(res + .iter() + .find(|repo| repo.git_dir_path() == Path::new("/a/b/.git") && repo.scan_id() == 0) + .is_none()); + } + #[gpui::test] async fn test_write_file(cx: &mut TestAppContext) { let dir = temp_tree(json!({ From 8a2430090b11cb01d6c94d78a9def3f96d8a9a2d Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Wed, 21 Sep 2022 10:39:03 -0400 Subject: [PATCH 075/140] WIP Git gutter styling --- crates/editor/src/element.rs | 6 +++--- styles/src/styleTree/editor.ts | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 4bc9f9a10b..82bd260819 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -567,7 +567,7 @@ impl EditorElement { let start_y = row as f32 * line_height + offset - scroll_top; let end_y = start_y + line_height; - let width = 0.4 * line_height; + let width = 0.275 * line_height; let highlight_origin = bounds.origin() + vec2f(-width, start_y); let highlight_size = vec2f(width * 2., end_y - start_y); let highlight_bounds = RectF::new(highlight_origin, highlight_size); @@ -589,7 +589,7 @@ impl EditorElement { let start_y = start_row as f32 * line_height - scroll_top; let end_y = end_row as f32 * line_height - scroll_top; - let width = 0.22 * line_height; + let width = 0.12 * line_height; let highlight_origin = bounds.origin() + vec2f(-width, start_y); let highlight_size = vec2f(width * 2., end_y - start_y); let highlight_bounds = RectF::new(highlight_origin, highlight_size); @@ -598,7 +598,7 @@ impl EditorElement { bounds: highlight_bounds, background: Some(color), border: Border::new(0., Color::transparent_black()), - corner_radius: 0.2 * line_height, + corner_radius: 0.05 * line_height, }); } diff --git a/styles/src/styleTree/editor.ts b/styles/src/styleTree/editor.ts index 29d6857964..bd01c3b845 100644 --- a/styles/src/styleTree/editor.ts +++ b/styles/src/styleTree/editor.ts @@ -60,9 +60,9 @@ export default function editor(theme: Theme) { indicator: iconColor(theme, "secondary"), verticalScale: 0.618 }, - diffBackgroundDeleted: theme.ramps.red(0.3).hex(), - diffBackgroundInserted: theme.ramps.green(0.3).hex(), - diffBackgroundModified: theme.ramps.blue(0.3).hex(), + diffBackgroundDeleted: theme.iconColor.error, + diffBackgroundInserted: theme.iconColor.ok, + diffBackgroundModified: theme.iconColor.warning, documentHighlightReadBackground: theme.editor.highlight.occurrence, documentHighlightWriteBackground: theme.editor.highlight.activeOccurrence, errorColor: theme.textColor.error, From b395fbb3f2214564b1f4e5cf8afcadbbe43748b2 Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Wed, 21 Sep 2022 15:39:51 -0400 Subject: [PATCH 076/140] wip --- crates/editor/src/element.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 82bd260819..b8731f8707 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -589,7 +589,7 @@ impl EditorElement { let start_y = start_row as f32 * line_height - scroll_top; let end_y = end_row as f32 * line_height - scroll_top; - let width = 0.12 * line_height; + let width = 0.16 * line_height; let highlight_origin = bounds.origin() + vec2f(-width, start_y); let highlight_size = vec2f(width * 2., end_y - start_y); let highlight_bounds = RectF::new(highlight_origin, highlight_size); From 9fe6a5e83e1c6a8cdfbe3669a25576fed0a4dbbe Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Wed, 28 Sep 2022 14:58:52 -0700 Subject: [PATCH 077/140] made git stuff slightly more themable --- crates/editor/src/element.rs | 18 ++++++++++++++---- crates/theme/src/theme.rs | 3 +++ styles/src/styleTree/editor.ts | 3 +++ 3 files changed, 20 insertions(+), 4 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index b8731f8707..57ee919288 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -545,12 +545,22 @@ impl EditorElement { } } - let (inserted_color, modified_color, deleted_color) = { + let ( + inserted_color, + modified_color, + deleted_color, + width_multiplier, + corner_radius, + removed_width_mult, + ) = { let editor = &cx.global::().theme.editor; ( editor.diff_background_inserted, editor.diff_background_modified, editor.diff_background_deleted, + editor.diff_indicator_width_multiplier, + editor.diff_indicator_corner_radius, + editor.removed_diff_width_multiplier, ) }; @@ -567,7 +577,7 @@ impl EditorElement { let start_y = row as f32 * line_height + offset - scroll_top; let end_y = start_y + line_height; - let width = 0.275 * line_height; + let width = removed_width_mult * line_height; let highlight_origin = bounds.origin() + vec2f(-width, start_y); let highlight_size = vec2f(width * 2., end_y - start_y); let highlight_bounds = RectF::new(highlight_origin, highlight_size); @@ -589,7 +599,7 @@ impl EditorElement { let start_y = start_row as f32 * line_height - scroll_top; let end_y = end_row as f32 * line_height - scroll_top; - let width = 0.16 * line_height; + let width = width_multiplier * line_height; let highlight_origin = bounds.origin() + vec2f(-width, start_y); let highlight_size = vec2f(width * 2., end_y - start_y); let highlight_bounds = RectF::new(highlight_origin, highlight_size); @@ -598,7 +608,7 @@ impl EditorElement { bounds: highlight_bounds, background: Some(color), border: Border::new(0., Color::transparent_black()), - corner_radius: 0.05 * line_height, + corner_radius: corner_radius * line_height, }); } diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index 1fd586efee..0d0c94ea8d 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -491,6 +491,9 @@ pub struct Editor { pub diff_background_deleted: Color, pub diff_background_inserted: Color, pub diff_background_modified: Color, + pub removed_diff_width_multiplier: f32, + pub diff_indicator_width_multiplier: f32, + pub diff_indicator_corner_radius: f32, pub line_number: Color, pub line_number_active: Color, pub guest_selections: Vec, diff --git a/styles/src/styleTree/editor.ts b/styles/src/styleTree/editor.ts index bd01c3b845..6e52c620ee 100644 --- a/styles/src/styleTree/editor.ts +++ b/styles/src/styleTree/editor.ts @@ -63,6 +63,9 @@ export default function editor(theme: Theme) { diffBackgroundDeleted: theme.iconColor.error, diffBackgroundInserted: theme.iconColor.ok, diffBackgroundModified: theme.iconColor.warning, + removedDiffWidthMultiplier: 0.275, + diffIndicatorWidthMultiplier: 0.16, + diffIndicatorCornerRadius: 0.05, documentHighlightReadBackground: theme.editor.highlight.occurrence, documentHighlightWriteBackground: theme.editor.highlight.activeOccurrence, errorColor: theme.textColor.error, From e865b85d9cd88001624d3e869e16fc8600067f07 Mon Sep 17 00:00:00 2001 From: Julia Date: Thu, 29 Sep 2022 13:10:39 -0400 Subject: [PATCH 078/140] Track index instead of head for diffs --- crates/git/src/repository.rs | 32 +++++++++++++++++++++++--------- crates/project/src/fs.rs | 7 +++++-- crates/project/src/worktree.rs | 8 +++++++- 3 files changed, 35 insertions(+), 12 deletions(-) diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 19ba0d1238..37b79fa10d 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -18,6 +18,8 @@ pub trait GitRepository: Send + Sync + std::fmt::Debug { fn set_scan_id(&mut self, scan_id: usize); + fn reopen_git_repo(&mut self) -> bool; + fn git_repo(&self) -> Arc>; fn boxed_clone(&self) -> Box; @@ -79,18 +81,15 @@ impl GitRepository for RealGitRepository { async fn load_head_text(&self, relative_file_path: &Path) -> Option { fn logic(repo: &LibGitRepository, relative_file_path: &Path) -> Result> { - let object = repo - .head()? - .peel_to_tree()? - .get_path(relative_file_path)? - .to_object(&repo)?; - - let content = match object.as_blob() { - Some(blob) => blob.content().to_owned(), + const STAGE_NORMAL: i32 = 0; + let index = repo.index()?; + let oid = match index.get_path(relative_file_path, STAGE_NORMAL) { + Some(entry) => entry.id, None => return Ok(None), }; - let head_text = String::from_utf8(content.to_owned())?; + let content = repo.find_blob(oid)?.content().to_owned(); + let head_text = String::from_utf8(content)?; Ok(Some(head_text)) } @@ -101,6 +100,17 @@ impl GitRepository for RealGitRepository { None } + fn reopen_git_repo(&mut self) -> bool { + match LibGitRepository::open(&self.git_dir_path) { + Ok(repo) => { + self.libgit_repository = Arc::new(Mutex::new(repo)); + true + } + + Err(_) => false, + } + } + fn git_repo(&self) -> Arc> { self.libgit_repository.clone() } @@ -168,6 +178,10 @@ impl GitRepository for FakeGitRepository { unimplemented!() } + fn reopen_git_repo(&mut self) -> bool { + unimplemented!() + } + fn git_repo(&self) -> Arc> { unimplemented!() } diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index cc1f6101f4..c14edcd5e4 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -1,7 +1,7 @@ use anyhow::{anyhow, Result}; use fsevent::EventStream; use futures::{future::BoxFuture, Stream, StreamExt}; -use git::repository::{FakeGitRepository, GitRepository, RealGitRepository}; +use git::repository::{GitRepository, RealGitRepository}; use language::LineEnding; use smol::io::{AsyncReadExt, AsyncWriteExt}; use std::{ @@ -854,7 +854,10 @@ impl Fs for FakeFs { } fn open_repo(&self, abs_dot_git: &Path) -> Option> { - Some(FakeGitRepository::open(abs_dot_git.into(), 0)) + Some(git::repository::FakeGitRepository::open( + abs_dot_git.into(), + 0, + )) } fn is_fake(&self) -> bool { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 81eec4987f..1d47c843c5 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2603,7 +2603,13 @@ impl BackgroundScanner { .git_repositories .iter() .map(|repo| repo.boxed_clone()) - .filter(|repo| git::libgit::Repository::open(repo.git_dir_path()).is_ok()) + .filter_map(|mut repo| { + if repo.reopen_git_repo() { + Some(repo) + } else { + None + } + }) .collect(); snapshot.git_repositories = new_repos; From fcf11b118109a90bcb2167ec309e0afa6d9878f0 Mon Sep 17 00:00:00 2001 From: Julia Date: Thu, 29 Sep 2022 13:12:49 -0400 Subject: [PATCH 079/140] Bump protocol version to be ahead of main --- crates/rpc/src/rpc.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/rpc/src/rpc.rs b/crates/rpc/src/rpc.rs index b9f6e6a739..2c28462ee3 100644 --- a/crates/rpc/src/rpc.rs +++ b/crates/rpc/src/rpc.rs @@ -6,4 +6,4 @@ pub use conn::Connection; pub use peer::*; mod macros; -pub const PROTOCOL_VERSION: u32 = 32; +pub const PROTOCOL_VERSION: u32 = 33; From 5d09083a7d59129b350af583d731fd5039406086 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 29 Sep 2022 12:32:25 -0700 Subject: [PATCH 080/140] Identify users in amplitude via a separate 'metrics_id' UUID --- crates/client/src/client.rs | 4 +- crates/client/src/telemetry.rs | 10 +- crates/client/src/user.rs | 10 +- .../20220913211150_create_signups.down.sql | 6 - ....sql => 20220913211150_create_signups.sql} | 0 .../20220929182110_add_metrics_id.sql | 2 + crates/collab/src/api.rs | 82 ++-- crates/collab/src/db.rs | 56 ++- crates/collab/src/db_tests.rs | 349 +++++++++--------- crates/collab/src/integration_tests.rs | 7 +- crates/collab/src/rpc.rs | 17 +- crates/rpc/proto/zed.proto | 9 + crates/rpc/src/proto.rs | 3 + 13 files changed, 316 insertions(+), 239 deletions(-) delete mode 100644 crates/collab/migrations/20220913211150_create_signups.down.sql rename crates/collab/migrations/{20220913211150_create_signups.up.sql => 20220913211150_create_signups.sql} (100%) create mode 100644 crates/collab/migrations/20220929182110_add_metrics_id.sql diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index b75be62308..9ec24abae5 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -320,11 +320,9 @@ impl Client { log::info!("set status on client {}: {:?}", self.id, status); let mut state = self.state.write(); *state.status.0.borrow_mut() = status; - let user_id = state.credentials.as_ref().map(|c| c.user_id); match status { Status::Connected { .. } => { - self.telemetry.set_user_id(user_id); state._reconnect_task = None; } Status::ConnectionLost => { @@ -353,7 +351,7 @@ impl Client { })); } Status::SignedOut | Status::UpgradeRequired => { - self.telemetry.set_user_id(user_id); + self.telemetry.set_metrics_id(None); state._reconnect_task.take(); } _ => {} diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 8b7be5ba80..c9b5665e9e 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -29,7 +29,7 @@ pub struct Telemetry { #[derive(Default)] struct TelemetryState { - user_id: Option>, + metrics_id: Option>, device_id: Option>, app_version: Option>, os_version: Option>, @@ -115,7 +115,7 @@ impl Telemetry { flush_task: Default::default(), next_event_id: 0, log_file: None, - user_id: None, + metrics_id: None, }), }); @@ -176,8 +176,8 @@ impl Telemetry { .detach(); } - pub fn set_user_id(&self, user_id: Option) { - self.state.lock().user_id = user_id.map(|id| id.to_string().into()); + pub fn set_metrics_id(&self, metrics_id: Option) { + self.state.lock().metrics_id = metrics_id.map(|s| s.into()); } pub fn report_event(self: &Arc, kind: &str, properties: Value) { @@ -199,7 +199,7 @@ impl Telemetry { None }, user_properties: None, - user_id: state.user_id.clone(), + user_id: state.metrics_id.clone(), device_id: state.device_id.clone(), os_name: state.os_name, os_version: state.os_version.clone(), diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index 149d22e77a..b31cda94b3 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -142,10 +142,14 @@ impl UserStore { match status { Status::Connected { .. } => { if let Some((this, user_id)) = this.upgrade(&cx).zip(client.user_id()) { - let user = this + let fetch_user = this .update(&mut cx, |this, cx| this.fetch_user(user_id, cx)) - .log_err() - .await; + .log_err(); + let fetch_metrics_id = + client.request(proto::GetPrivateUserInfo {}).log_err(); + let (user, info) = futures::join!(fetch_user, fetch_metrics_id); + client.telemetry.set_metrics_id(info.map(|i| i.metrics_id)); + client.telemetry.report_event("sign in", Default::default()); current_user_tx.send(user).await.ok(); } } diff --git a/crates/collab/migrations/20220913211150_create_signups.down.sql b/crates/collab/migrations/20220913211150_create_signups.down.sql deleted file mode 100644 index 5504bbb8dc..0000000000 --- a/crates/collab/migrations/20220913211150_create_signups.down.sql +++ /dev/null @@ -1,6 +0,0 @@ -DROP TABLE signups; - -ALTER TABLE users - DROP COLUMN github_user_id; - -DROP INDEX index_users_on_email_address; diff --git a/crates/collab/migrations/20220913211150_create_signups.up.sql b/crates/collab/migrations/20220913211150_create_signups.sql similarity index 100% rename from crates/collab/migrations/20220913211150_create_signups.up.sql rename to crates/collab/migrations/20220913211150_create_signups.sql diff --git a/crates/collab/migrations/20220929182110_add_metrics_id.sql b/crates/collab/migrations/20220929182110_add_metrics_id.sql new file mode 100644 index 0000000000..665d6323bf --- /dev/null +++ b/crates/collab/migrations/20220929182110_add_metrics_id.sql @@ -0,0 +1,2 @@ +ALTER TABLE "users" + ADD "metrics_id" uuid NOT NULL DEFAULT gen_random_uuid(); diff --git a/crates/collab/src/api.rs b/crates/collab/src/api.rs index 0a9d8106ce..08dfa91ba9 100644 --- a/crates/collab/src/api.rs +++ b/crates/collab/src/api.rs @@ -24,6 +24,7 @@ use tracing::instrument; pub fn routes(rpc_server: &Arc, state: Arc) -> Router { Router::new() + .route("/user", get(get_authenticated_user)) .route("/users", get(get_users).post(create_user)) .route("/users/:id", put(update_user).delete(destroy_user)) .route("/users/:id/access_tokens", post(create_access_token)) @@ -85,10 +86,33 @@ pub async fn validate_api_token(req: Request, next: Next) -> impl IntoR Ok::<_, Error>(next.run(req).await) } +#[derive(Debug, Deserialize)] +struct AuthenticatedUserParams { + github_user_id: i32, + github_login: String, +} + +#[derive(Debug, Serialize)] +struct AuthenticatedUserResponse { + user: User, + metrics_id: String, +} + +async fn get_authenticated_user( + Query(params): Query, + Extension(app): Extension>, +) -> Result> { + let user = app + .db + .get_user_by_github_account(¶ms.github_login, Some(params.github_user_id)) + .await? + .ok_or_else(|| Error::Http(StatusCode::NOT_FOUND, "user not found".into()))?; + let metrics_id = app.db.get_user_metrics_id(user.id).await?; + return Ok(Json(AuthenticatedUserResponse { user, metrics_id })); +} + #[derive(Debug, Deserialize)] struct GetUsersQueryParams { - github_user_id: Option, - github_login: Option, query: Option, page: Option, limit: Option, @@ -98,14 +122,6 @@ async fn get_users( Query(params): Query, Extension(app): Extension>, ) -> Result>> { - if let Some(github_login) = ¶ms.github_login { - let user = app - .db - .get_user_by_github_account(github_login, params.github_user_id) - .await?; - return Ok(Json(Vec::from_iter(user))); - } - let limit = params.limit.unwrap_or(100); let users = if let Some(query) = params.query { app.db.fuzzy_search_users(&query, limit).await? @@ -124,6 +140,8 @@ struct CreateUserParams { email_address: String, email_confirmation_code: Option, #[serde(default)] + admin: bool, + #[serde(default)] invite_count: i32, } @@ -131,6 +149,7 @@ struct CreateUserParams { struct CreateUserResponse { user: User, signup_device_id: Option, + metrics_id: String, } async fn create_user( @@ -143,12 +162,10 @@ async fn create_user( github_user_id: params.github_user_id, invite_count: params.invite_count, }; - let user_id; - let signup_device_id; + // Creating a user via the normal signup process - if let Some(email_confirmation_code) = params.email_confirmation_code { - let result = app - .db + let result = if let Some(email_confirmation_code) = params.email_confirmation_code { + app.db .create_user_from_invite( &Invite { email_address: params.email_address, @@ -156,34 +173,37 @@ async fn create_user( }, user, ) - .await?; - user_id = result.user_id; - signup_device_id = result.signup_device_id; - if let Some(inviter_id) = result.inviting_user_id { - rpc_server - .invite_code_redeemed(inviter_id, user_id) - .await - .trace_err(); - } + .await? } // Creating a user as an admin - else { - user_id = app - .db + else if params.admin { + app.db .create_user(¶ms.email_address, false, user) - .await?; - signup_device_id = None; + .await? + } else { + Err(Error::Http( + StatusCode::UNPROCESSABLE_ENTITY, + "email confirmation code is required".into(), + ))? + }; + + if let Some(inviter_id) = result.inviting_user_id { + rpc_server + .invite_code_redeemed(inviter_id, result.user_id) + .await + .trace_err(); } let user = app .db - .get_user_by_id(user_id) + .get_user_by_id(result.user_id) .await? .ok_or_else(|| anyhow!("couldn't find the user we just created"))?; Ok(Json(CreateUserResponse { user, - signup_device_id, + metrics_id: result.metrics_id, + signup_device_id: result.signup_device_id, })) } diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 8b01cdf971..a12f6a4f89 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -17,10 +17,11 @@ pub trait Db: Send + Sync { email_address: &str, admin: bool, params: NewUserParams, - ) -> Result; + ) -> Result; async fn get_all_users(&self, page: u32, limit: u32) -> Result>; async fn fuzzy_search_users(&self, query: &str, limit: u32) -> Result>; async fn get_user_by_id(&self, id: UserId) -> Result>; + async fn get_user_metrics_id(&self, id: UserId) -> Result; async fn get_users_by_ids(&self, ids: Vec) -> Result>; async fn get_users_with_no_invites(&self, invited_by_another_user: bool) -> Result>; async fn get_user_by_github_account( @@ -208,21 +209,26 @@ impl Db for PostgresDb { email_address: &str, admin: bool, params: NewUserParams, - ) -> Result { + ) -> Result { let query = " INSERT INTO users (email_address, github_login, github_user_id, admin) VALUES ($1, $2, $3, $4) ON CONFLICT (github_login) DO UPDATE SET github_login = excluded.github_login - RETURNING id + RETURNING id, metrics_id::text "; - Ok(sqlx::query_scalar(query) + let (user_id, metrics_id): (UserId, String) = sqlx::query_as(query) .bind(email_address) .bind(params.github_login) .bind(params.github_user_id) .bind(admin) .fetch_one(&self.pool) - .await - .map(UserId)?) + .await?; + Ok(NewUserResult { + user_id, + metrics_id, + signup_device_id: None, + inviting_user_id: None, + }) } async fn get_all_users(&self, page: u32, limit: u32) -> Result> { @@ -256,6 +262,18 @@ impl Db for PostgresDb { Ok(users.into_iter().next()) } + async fn get_user_metrics_id(&self, id: UserId) -> Result { + let query = " + SELECT metrics_id::text + FROM users + WHERE id = $1 + "; + Ok(sqlx::query_scalar(query) + .bind(id) + .fetch_one(&self.pool) + .await?) + } + async fn get_users_by_ids(&self, ids: Vec) -> Result> { let ids = ids.into_iter().map(|id| id.0).collect::>(); let query = " @@ -493,13 +511,13 @@ impl Db for PostgresDb { ))?; } - let user_id: UserId = sqlx::query_scalar( + let (user_id, metrics_id): (UserId, String) = sqlx::query_as( " INSERT INTO users (email_address, github_login, github_user_id, admin, invite_count, invite_code) VALUES ($1, $2, $3, 'f', $4, $5) - RETURNING id + RETURNING id, metrics_id::text ", ) .bind(&invite.email_address) @@ -559,6 +577,7 @@ impl Db for PostgresDb { tx.commit().await?; Ok(NewUserResult { user_id, + metrics_id, inviting_user_id, signup_device_id, }) @@ -1722,6 +1741,7 @@ pub struct NewUserParams { #[derive(Debug)] pub struct NewUserResult { pub user_id: UserId, + pub metrics_id: String, pub inviting_user_id: Option, pub signup_device_id: Option, } @@ -1808,15 +1828,15 @@ mod test { email_address: &str, admin: bool, params: NewUserParams, - ) -> Result { + ) -> Result { self.background.simulate_random_delay().await; let mut users = self.users.lock(); - if let Some(user) = users + let user_id = if let Some(user) = users .values() .find(|user| user.github_login == params.github_login) { - Ok(user.id) + user.id } else { let id = post_inc(&mut *self.next_user_id.lock()); let user_id = UserId(id); @@ -1833,8 +1853,14 @@ mod test { connected_once: false, }, ); - Ok(user_id) - } + user_id + }; + Ok(NewUserResult { + user_id, + metrics_id: "the-metrics-id".to_string(), + inviting_user_id: None, + signup_device_id: None, + }) } async fn get_all_users(&self, _page: u32, _limit: u32) -> Result> { @@ -1850,6 +1876,10 @@ mod test { Ok(self.get_users_by_ids(vec![id]).await?.into_iter().next()) } + async fn get_user_metrics_id(&self, _id: UserId) -> Result { + Ok("the-metrics-id".to_string()) + } + async fn get_users_by_ids(&self, ids: Vec) -> Result> { self.background.simulate_random_delay().await; let users = self.users.lock(); diff --git a/crates/collab/src/db_tests.rs b/crates/collab/src/db_tests.rs index 1e48b4b754..e063b97eb6 100644 --- a/crates/collab/src/db_tests.rs +++ b/crates/collab/src/db_tests.rs @@ -12,89 +12,56 @@ async fn test_get_users_by_ids() { ] { let db = test_db.db(); - let user1 = db - .create_user( - "u1@example.com", - false, - NewUserParams { - github_login: "u1".into(), - github_user_id: 1, - invite_count: 0, - }, - ) - .await - .unwrap(); - let user2 = db - .create_user( - "u2@example.com", - false, - NewUserParams { - github_login: "u2".into(), - github_user_id: 2, - invite_count: 0, - }, - ) - .await - .unwrap(); - let user3 = db - .create_user( - "u3@example.com", - false, - NewUserParams { - github_login: "u3".into(), - github_user_id: 3, - invite_count: 0, - }, - ) - .await - .unwrap(); - let user4 = db - .create_user( - "u4@example.com", - false, - NewUserParams { - github_login: "u4".into(), - github_user_id: 4, - invite_count: 0, - }, - ) - .await - .unwrap(); + let mut user_ids = Vec::new(); + for i in 1..=4 { + user_ids.push( + db.create_user( + &format!("user{i}@example.com"), + false, + NewUserParams { + github_login: format!("user{i}"), + github_user_id: i, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id, + ); + } assert_eq!( - db.get_users_by_ids(vec![user1, user2, user3, user4]) - .await - .unwrap(), + db.get_users_by_ids(user_ids.clone()).await.unwrap(), vec![ User { - id: user1, - github_login: "u1".to_string(), + id: user_ids[0], + github_login: "user1".to_string(), github_user_id: Some(1), - email_address: Some("u1@example.com".to_string()), + email_address: Some("user1@example.com".to_string()), admin: false, ..Default::default() }, User { - id: user2, - github_login: "u2".to_string(), + id: user_ids[1], + github_login: "user2".to_string(), github_user_id: Some(2), - email_address: Some("u2@example.com".to_string()), + email_address: Some("user2@example.com".to_string()), admin: false, ..Default::default() }, User { - id: user3, - github_login: "u3".to_string(), + id: user_ids[2], + github_login: "user3".to_string(), github_user_id: Some(3), - email_address: Some("u3@example.com".to_string()), + email_address: Some("user3@example.com".to_string()), admin: false, ..Default::default() }, User { - id: user4, - github_login: "u4".to_string(), + id: user_ids[3], + github_login: "user4".to_string(), github_user_id: Some(4), - email_address: Some("u4@example.com".to_string()), + email_address: Some("user4@example.com".to_string()), admin: false, ..Default::default() } @@ -121,7 +88,8 @@ async fn test_get_user_by_github_account() { }, ) .await - .unwrap(); + .unwrap() + .user_id; let user_id2 = db .create_user( "user2@example.com", @@ -133,7 +101,8 @@ async fn test_get_user_by_github_account() { }, ) .await - .unwrap(); + .unwrap() + .user_id; let user = db .get_user_by_github_account("login1", None) @@ -177,7 +146,8 @@ async fn test_worktree_extensions() { }, ) .await - .unwrap(); + .unwrap() + .user_id; let project = db.register_project(user).await.unwrap(); db.update_worktree_extensions(project, 100, Default::default()) @@ -237,43 +207,25 @@ async fn test_user_activity() { let test_db = TestDb::postgres().await; let db = test_db.db(); - let user_1 = db - .create_user( - "u1@example.com", - false, - NewUserParams { - github_login: "u1".into(), - github_user_id: 0, - invite_count: 0, - }, - ) - .await - .unwrap(); - let user_2 = db - .create_user( - "u2@example.com", - false, - NewUserParams { - github_login: "u2".into(), - github_user_id: 0, - invite_count: 0, - }, - ) - .await - .unwrap(); - let user_3 = db - .create_user( - "u3@example.com", - false, - NewUserParams { - github_login: "u3".into(), - github_user_id: 0, - invite_count: 0, - }, - ) - .await - .unwrap(); - let project_1 = db.register_project(user_1).await.unwrap(); + let mut user_ids = Vec::new(); + for i in 0..=2 { + user_ids.push( + db.create_user( + &format!("user{i}@example.com"), + false, + NewUserParams { + github_login: format!("user{i}"), + github_user_id: i, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id, + ); + } + + let project_1 = db.register_project(user_ids[0]).await.unwrap(); db.update_worktree_extensions( project_1, 1, @@ -281,34 +233,37 @@ async fn test_user_activity() { ) .await .unwrap(); - let project_2 = db.register_project(user_2).await.unwrap(); + let project_2 = db.register_project(user_ids[1]).await.unwrap(); let t0 = OffsetDateTime::now_utc() - Duration::from_secs(60 * 60); // User 2 opens a project let t1 = t0 + Duration::from_secs(10); - db.record_user_activity(t0..t1, &[(user_2, project_2)]) + db.record_user_activity(t0..t1, &[(user_ids[1], project_2)]) .await .unwrap(); let t2 = t1 + Duration::from_secs(10); - db.record_user_activity(t1..t2, &[(user_2, project_2)]) + db.record_user_activity(t1..t2, &[(user_ids[1], project_2)]) .await .unwrap(); // User 1 joins the project let t3 = t2 + Duration::from_secs(10); - db.record_user_activity(t2..t3, &[(user_2, project_2), (user_1, project_2)]) - .await - .unwrap(); + db.record_user_activity( + t2..t3, + &[(user_ids[1], project_2), (user_ids[0], project_2)], + ) + .await + .unwrap(); // User 1 opens another project let t4 = t3 + Duration::from_secs(10); db.record_user_activity( t3..t4, &[ - (user_2, project_2), - (user_1, project_2), - (user_1, project_1), + (user_ids[1], project_2), + (user_ids[0], project_2), + (user_ids[0], project_1), ], ) .await @@ -319,10 +274,10 @@ async fn test_user_activity() { db.record_user_activity( t4..t5, &[ - (user_2, project_2), - (user_1, project_2), - (user_1, project_1), - (user_3, project_1), + (user_ids[1], project_2), + (user_ids[0], project_2), + (user_ids[0], project_1), + (user_ids[2], project_1), ], ) .await @@ -330,13 +285,16 @@ async fn test_user_activity() { // User 2 leaves let t6 = t5 + Duration::from_secs(5); - db.record_user_activity(t5..t6, &[(user_1, project_1), (user_3, project_1)]) - .await - .unwrap(); + db.record_user_activity( + t5..t6, + &[(user_ids[0], project_1), (user_ids[2], project_1)], + ) + .await + .unwrap(); let t7 = t6 + Duration::from_secs(60); let t8 = t7 + Duration::from_secs(10); - db.record_user_activity(t7..t8, &[(user_1, project_1)]) + db.record_user_activity(t7..t8, &[(user_ids[0], project_1)]) .await .unwrap(); @@ -344,8 +302,8 @@ async fn test_user_activity() { db.get_top_users_activity_summary(t0..t6, 10).await.unwrap(), &[ UserActivitySummary { - id: user_1, - github_login: "u1".to_string(), + id: user_ids[0], + github_login: "user0".to_string(), project_activity: vec![ ProjectActivitySummary { id: project_1, @@ -360,8 +318,8 @@ async fn test_user_activity() { ] }, UserActivitySummary { - id: user_2, - github_login: "u2".to_string(), + id: user_ids[1], + github_login: "user1".to_string(), project_activity: vec![ProjectActivitySummary { id: project_2, duration: Duration::from_secs(50), @@ -369,8 +327,8 @@ async fn test_user_activity() { }] }, UserActivitySummary { - id: user_3, - github_login: "u3".to_string(), + id: user_ids[2], + github_login: "user2".to_string(), project_activity: vec![ProjectActivitySummary { id: project_1, duration: Duration::from_secs(15), @@ -442,7 +400,9 @@ async fn test_user_activity() { ); assert_eq!( - db.get_user_activity_timeline(t3..t6, user_1).await.unwrap(), + db.get_user_activity_timeline(t3..t6, user_ids[0]) + .await + .unwrap(), &[ UserActivityPeriod { project_id: project_1, @@ -459,7 +419,9 @@ async fn test_user_activity() { ] ); assert_eq!( - db.get_user_activity_timeline(t0..t8, user_1).await.unwrap(), + db.get_user_activity_timeline(t0..t8, user_ids[0]) + .await + .unwrap(), &[ UserActivityPeriod { project_id: project_2, @@ -501,7 +463,8 @@ async fn test_recent_channel_messages() { }, ) .await - .unwrap(); + .unwrap() + .user_id; let org = db.create_org("org", "org").await.unwrap(); let channel = db.create_org_channel(org, "channel").await.unwrap(); for i in 0..10 { @@ -545,7 +508,8 @@ async fn test_channel_message_nonces() { }, ) .await - .unwrap(); + .unwrap() + .user_id; let org = db.create_org("org", "org").await.unwrap(); let channel = db.create_org_channel(org, "channel").await.unwrap(); @@ -587,7 +551,8 @@ async fn test_create_access_tokens() { }, ) .await - .unwrap(); + .unwrap() + .user_id; db.create_access_token_hash(user, "h1", 3).await.unwrap(); db.create_access_token_hash(user, "h2", 3).await.unwrap(); @@ -678,42 +643,27 @@ async fn test_add_contacts() { ] { let db = test_db.db(); - let user_1 = db - .create_user( - "u1@example.com", - false, - NewUserParams { - github_login: "u1".into(), - github_user_id: 0, - invite_count: 0, - }, - ) - .await - .unwrap(); - let user_2 = db - .create_user( - "u2@example.com", - false, - NewUserParams { - github_login: "u2".into(), - github_user_id: 1, - invite_count: 0, - }, - ) - .await - .unwrap(); - let user_3 = db - .create_user( - "u3@example.com", - false, - NewUserParams { - github_login: "u3".into(), - github_user_id: 2, - invite_count: 0, - }, - ) - .await - .unwrap(); + let mut user_ids = Vec::new(); + for i in 0..3 { + user_ids.push( + db.create_user( + &format!("user{i}@example.com"), + false, + NewUserParams { + github_login: format!("user{i}"), + github_user_id: i, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id, + ); + } + + let user_1 = user_ids[0]; + let user_2 = user_ids[1]; + let user_3 = user_ids[2]; // User starts with no contacts assert_eq!( @@ -927,12 +877,12 @@ async fn test_add_contacts() { async fn test_invite_codes() { let postgres = TestDb::postgres().await; let db = postgres.db(); - let user1 = db + let NewUserResult { user_id: user1, .. } = db .create_user( - "u1@example.com", + "user1@example.com", false, NewUserParams { - github_login: "u1".into(), + github_login: "user1".into(), github_user_id: 0, invite_count: 0, }, @@ -954,13 +904,14 @@ async fn test_invite_codes() { // User 2 redeems the invite code and becomes a contact of user 1. let user2_invite = db - .create_invite_from_code(&invite_code, "u2@example.com", Some("user-2-device-id")) + .create_invite_from_code(&invite_code, "user2@example.com", Some("user-2-device-id")) .await .unwrap(); let NewUserResult { user_id: user2, inviting_user_id, signup_device_id, + metrics_id, } = db .create_user_from_invite( &user2_invite, @@ -976,6 +927,7 @@ async fn test_invite_codes() { assert_eq!(invite_count, 1); assert_eq!(inviting_user_id, Some(user1)); assert_eq!(signup_device_id.unwrap(), "user-2-device-id"); + assert_eq!(db.get_user_metrics_id(user2).await.unwrap(), metrics_id); assert_eq!( db.get_contacts(user1).await.unwrap(), [ @@ -1009,13 +961,14 @@ async fn test_invite_codes() { // User 3 redeems the invite code and becomes a contact of user 1. let user3_invite = db - .create_invite_from_code(&invite_code, "u3@example.com", None) + .create_invite_from_code(&invite_code, "user3@example.com", None) .await .unwrap(); let NewUserResult { user_id: user3, inviting_user_id, signup_device_id, + .. } = db .create_user_from_invite( &user3_invite, @@ -1067,7 +1020,7 @@ async fn test_invite_codes() { ); // Trying to reedem the code for the third time results in an error. - db.create_invite_from_code(&invite_code, "u4@example.com", Some("user-4-device-id")) + db.create_invite_from_code(&invite_code, "user4@example.com", Some("user-4-device-id")) .await .unwrap_err(); @@ -1079,7 +1032,7 @@ async fn test_invite_codes() { // User 4 can now redeem the invite code and becomes a contact of user 1. let user4_invite = db - .create_invite_from_code(&invite_code, "u4@example.com", Some("user-4-device-id")) + .create_invite_from_code(&invite_code, "user4@example.com", Some("user-4-device-id")) .await .unwrap(); let user4 = db @@ -1137,7 +1090,7 @@ async fn test_invite_codes() { ); // An existing user cannot redeem invite codes. - db.create_invite_from_code(&invite_code, "u2@example.com", Some("user-2-device-id")) + db.create_invite_from_code(&invite_code, "user2@example.com", Some("user-2-device-id")) .await .unwrap_err(); let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); @@ -1232,6 +1185,7 @@ async fn test_signups() { user_id, inviting_user_id, signup_device_id, + .. } = db .create_user_from_invite( &Invite { @@ -1284,6 +1238,51 @@ async fn test_signups() { .unwrap_err(); } +#[tokio::test(flavor = "multi_thread")] +async fn test_metrics_id() { + let postgres = TestDb::postgres().await; + let db = postgres.db(); + + let NewUserResult { + user_id: user1, + metrics_id: metrics_id1, + .. + } = db + .create_user( + "person1@example.com", + false, + NewUserParams { + github_login: "person1".into(), + github_user_id: 101, + invite_count: 5, + }, + ) + .await + .unwrap(); + let NewUserResult { + user_id: user2, + metrics_id: metrics_id2, + .. + } = db + .create_user( + "person2@example.com", + false, + NewUserParams { + github_login: "person2".into(), + github_user_id: 102, + invite_count: 5, + }, + ) + .await + .unwrap(); + + assert_eq!(db.get_user_metrics_id(user1).await.unwrap(), metrics_id1); + assert_eq!(db.get_user_metrics_id(user2).await.unwrap(), metrics_id2); + assert_eq!(metrics_id1.len(), 36); + assert_eq!(metrics_id2.len(), 36); + assert_ne!(metrics_id1, metrics_id2); +} + fn build_background_executor() -> Arc { Deterministic::new(0).build_background() } diff --git a/crates/collab/src/integration_tests.rs b/crates/collab/src/integration_tests.rs index 3c9886dc16..e9643d3deb 100644 --- a/crates/collab/src/integration_tests.rs +++ b/crates/collab/src/integration_tests.rs @@ -4663,7 +4663,8 @@ async fn test_random_collaboration( }, ) .await - .unwrap(); + .unwrap() + .user_id; let mut available_guests = vec![ "guest-1".to_string(), "guest-2".to_string(), @@ -4683,7 +4684,8 @@ async fn test_random_collaboration( }, ) .await - .unwrap(); + .unwrap() + .user_id; assert_eq!(*username, format!("guest-{}", guest_user_id)); server .app_state @@ -5206,6 +5208,7 @@ impl TestServer { ) .await .unwrap() + .user_id }; let client_name = name.to_string(); let mut client = cx.read(|cx| Client::new(http.clone(), cx)); diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 5f27352c5a..467ec174ab 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -205,7 +205,8 @@ impl Server { .add_request_handler(Server::follow) .add_message_handler(Server::unfollow) .add_message_handler(Server::update_followers) - .add_request_handler(Server::get_channel_messages); + .add_request_handler(Server::get_channel_messages) + .add_request_handler(Server::get_private_user_info); Arc::new(server) } @@ -1727,6 +1728,20 @@ impl Server { Ok(()) } + async fn get_private_user_info( + self: Arc, + request: TypedEnvelope, + response: Response, + ) -> Result<()> { + let user_id = self + .store() + .await + .user_id_for_connection(request.sender_id)?; + let metrics_id = self.app_state.db.get_user_metrics_id(user_id).await?; + response.send(proto::GetPrivateUserInfoResponse { metrics_id })?; + Ok(()) + } + pub(crate) async fn store(&self) -> StoreGuard<'_> { #[cfg(test)] tokio::task::yield_now().await; diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 7840829b44..6a48ad1b97 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -108,6 +108,9 @@ message Envelope { FollowResponse follow_response = 93; UpdateFollowers update_followers = 94; Unfollow unfollow = 95; + + GetPrivateUserInfo get_private_user_info = 96; + GetPrivateUserInfoResponse get_private_user_info_response = 97; } } @@ -748,6 +751,12 @@ message Unfollow { uint32 leader_id = 2; } +message GetPrivateUserInfo {} + +message GetPrivateUserInfoResponse { + string metrics_id = 1; +} + // Entities message UpdateActiveView { diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index 2ba3fa18ba..001753c709 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -167,6 +167,8 @@ messages!( (UpdateProject, Foreground), (UpdateWorktree, Foreground), (UpdateWorktreeExtensions, Background), + (GetPrivateUserInfo, Foreground), + (GetPrivateUserInfoResponse, Foreground), ); request_messages!( @@ -189,6 +191,7 @@ request_messages!( (GetTypeDefinition, GetTypeDefinitionResponse), (GetDocumentHighlights, GetDocumentHighlightsResponse), (GetReferences, GetReferencesResponse), + (GetPrivateUserInfo, GetPrivateUserInfoResponse), (GetProjectSymbols, GetProjectSymbolsResponse), (FuzzySearchUsers, UsersResponse), (GetUsers, UsersResponse), From 35a537dae012a73c085c24d4a554e7b199de2d86 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 29 Sep 2022 13:51:17 -0700 Subject: [PATCH 081/140] Fix FakeServer to expect new GetPrivateUserInfo request --- crates/client/src/test.rs | 64 ++++++++++++++------- crates/contacts_panel/src/contacts_panel.rs | 11 ++++ 2 files changed, 54 insertions(+), 21 deletions(-) diff --git a/crates/client/src/test.rs b/crates/client/src/test.rs index c634978a57..56d3d80b63 100644 --- a/crates/client/src/test.rs +++ b/crates/client/src/test.rs @@ -6,7 +6,10 @@ use anyhow::{anyhow, Result}; use futures::{future::BoxFuture, stream::BoxStream, Future, StreamExt}; use gpui::{executor, ModelHandle, TestAppContext}; use parking_lot::Mutex; -use rpc::{proto, ConnectionId, Peer, Receipt, TypedEnvelope}; +use rpc::{ + proto::{self, GetPrivateUserInfo, GetPrivateUserInfoResponse}, + ConnectionId, Peer, Receipt, TypedEnvelope, +}; use std::{fmt, rc::Rc, sync::Arc}; pub struct FakeServer { @@ -93,6 +96,7 @@ impl FakeServer { .authenticate_and_connect(false, &cx.to_async()) .await .unwrap(); + server } @@ -126,26 +130,44 @@ impl FakeServer { #[allow(clippy::await_holding_lock)] pub async fn receive(&self) -> Result> { self.executor.start_waiting(); - let message = self - .state - .lock() - .incoming - .as_mut() - .expect("not connected") - .next() - .await - .ok_or_else(|| anyhow!("other half hung up"))?; - self.executor.finish_waiting(); - let type_name = message.payload_type_name(); - Ok(*message - .into_any() - .downcast::>() - .unwrap_or_else(|_| { - panic!( - "fake server received unexpected message type: {:?}", - type_name - ); - })) + + loop { + let message = self + .state + .lock() + .incoming + .as_mut() + .expect("not connected") + .next() + .await + .ok_or_else(|| anyhow!("other half hung up"))?; + self.executor.finish_waiting(); + let type_name = message.payload_type_name(); + let message = message.into_any(); + + if message.is::>() { + return Ok(*message.downcast().unwrap()); + } + + if message.is::>() { + self.respond( + message + .downcast::>() + .unwrap() + .receipt(), + GetPrivateUserInfoResponse { + metrics_id: "the-metrics-id".into(), + }, + ) + .await; + continue; + } + + panic!( + "fake server received unexpected message type: {:?}", + type_name + ); + } } pub async fn respond( diff --git a/crates/contacts_panel/src/contacts_panel.rs b/crates/contacts_panel/src/contacts_panel.rs index 7dcfb8cea4..91b86aaf0e 100644 --- a/crates/contacts_panel/src/contacts_panel.rs +++ b/crates/contacts_panel/src/contacts_panel.rs @@ -1220,6 +1220,17 @@ mod tests { let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx)); let project_store = cx.add_model(|_| ProjectStore::new(project::Db::open_fake())); let server = FakeServer::for_client(current_user_id, &client, cx).await; + + let request = server.receive::().await.unwrap(); + server + .respond( + request.receipt(), + proto::GetPrivateUserInfoResponse { + metrics_id: "the-metrics-id".into(), + }, + ) + .await; + let fs = FakeFs::new(cx.background()); fs.insert_tree("/private_dir", json!({ "one.rs": "" })) .await; From a977593f3d58bad7c90c8bbc662ebfb4f309a5b5 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 29 Sep 2022 16:47:20 -0700 Subject: [PATCH 082/140] 0.57.0 --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b32b6a47a2..e99fa91008 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7151,7 +7151,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.56.0" +version = "0.57.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index c96163d99e..48a84a5831 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -3,7 +3,7 @@ authors = ["Nathan Sobo "] description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.56.0" +version = "0.57.0" [lib] name = "zed" From 25bba396efa28139fc05ae867a10f9ca40206129 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 30 Sep 2022 09:51:03 +0200 Subject: [PATCH 083/140] Cache `CGEventSource` and avoid leaking `CGEvent` when handling events --- crates/gpui/src/platform/mac/event.rs | 83 +++++++++++++++------------ 1 file changed, 47 insertions(+), 36 deletions(-) diff --git a/crates/gpui/src/platform/mac/event.rs b/crates/gpui/src/platform/mac/event.rs index 51524f4b15..ea2b492b27 100644 --- a/crates/gpui/src/platform/mac/event.rs +++ b/crates/gpui/src/platform/mac/event.rs @@ -14,8 +14,10 @@ use core_graphics::{ event::{CGEvent, CGEventFlags, CGKeyCode}, event_source::{CGEventSource, CGEventSourceStateID}, }; +use ctor::ctor; +use foreign_types::ForeignType; use objc::{class, msg_send, sel, sel_impl}; -use std::{borrow::Cow, ffi::CStr, os::raw::c_char}; +use std::{borrow::Cow, ffi::CStr, mem, os::raw::c_char, ptr}; const BACKSPACE_KEY: u16 = 0x7f; const SPACE_KEY: u16 = b' ' as u16; @@ -25,6 +27,15 @@ const ESCAPE_KEY: u16 = 0x1b; const TAB_KEY: u16 = 0x09; const SHIFT_TAB_KEY: u16 = 0x19; +static mut EVENT_SOURCE: core_graphics::sys::CGEventSourceRef = ptr::null_mut(); + +#[ctor] +unsafe fn build_event_source() { + let source = CGEventSource::new(CGEventSourceStateID::Private).unwrap(); + EVENT_SOURCE = source.as_ptr(); + mem::forget(source); +} + pub fn key_to_native(key: &str) -> Cow { use cocoa::appkit::*; let code = match key { @@ -228,7 +239,8 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { let mut chars_ignoring_modifiers = CStr::from_ptr(native_event.charactersIgnoringModifiers().UTF8String() as *mut c_char) .to_str() - .unwrap(); + .unwrap() + .to_string(); let first_char = chars_ignoring_modifiers.chars().next().map(|ch| ch as u16); let modifiers = native_event.modifierFlags(); @@ -243,31 +255,31 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { #[allow(non_upper_case_globals)] let key = match first_char { - Some(SPACE_KEY) => "space", - Some(BACKSPACE_KEY) => "backspace", - Some(ENTER_KEY) | Some(NUMPAD_ENTER_KEY) => "enter", - Some(ESCAPE_KEY) => "escape", - Some(TAB_KEY) => "tab", - Some(SHIFT_TAB_KEY) => "tab", - Some(NSUpArrowFunctionKey) => "up", - Some(NSDownArrowFunctionKey) => "down", - Some(NSLeftArrowFunctionKey) => "left", - Some(NSRightArrowFunctionKey) => "right", - Some(NSPageUpFunctionKey) => "pageup", - Some(NSPageDownFunctionKey) => "pagedown", - Some(NSDeleteFunctionKey) => "delete", - Some(NSF1FunctionKey) => "f1", - Some(NSF2FunctionKey) => "f2", - Some(NSF3FunctionKey) => "f3", - Some(NSF4FunctionKey) => "f4", - Some(NSF5FunctionKey) => "f5", - Some(NSF6FunctionKey) => "f6", - Some(NSF7FunctionKey) => "f7", - Some(NSF8FunctionKey) => "f8", - Some(NSF9FunctionKey) => "f9", - Some(NSF10FunctionKey) => "f10", - Some(NSF11FunctionKey) => "f11", - Some(NSF12FunctionKey) => "f12", + Some(SPACE_KEY) => "space".to_string(), + Some(BACKSPACE_KEY) => "backspace".to_string(), + Some(ENTER_KEY) | Some(NUMPAD_ENTER_KEY) => "enter".to_string(), + Some(ESCAPE_KEY) => "escape".to_string(), + Some(TAB_KEY) => "tab".to_string(), + Some(SHIFT_TAB_KEY) => "tab".to_string(), + Some(NSUpArrowFunctionKey) => "up".to_string(), + Some(NSDownArrowFunctionKey) => "down".to_string(), + Some(NSLeftArrowFunctionKey) => "left".to_string(), + Some(NSRightArrowFunctionKey) => "right".to_string(), + Some(NSPageUpFunctionKey) => "pageup".to_string(), + Some(NSPageDownFunctionKey) => "pagedown".to_string(), + Some(NSDeleteFunctionKey) => "delete".to_string(), + Some(NSF1FunctionKey) => "f1".to_string(), + Some(NSF2FunctionKey) => "f2".to_string(), + Some(NSF3FunctionKey) => "f3".to_string(), + Some(NSF4FunctionKey) => "f4".to_string(), + Some(NSF5FunctionKey) => "f5".to_string(), + Some(NSF6FunctionKey) => "f6".to_string(), + Some(NSF7FunctionKey) => "f7".to_string(), + Some(NSF8FunctionKey) => "f8".to_string(), + Some(NSF9FunctionKey) => "f9".to_string(), + Some(NSF10FunctionKey) => "f10".to_string(), + Some(NSF11FunctionKey) => "f11".to_string(), + Some(NSF12FunctionKey) => "f12".to_string(), _ => { let mut chars_ignoring_modifiers_and_shift = chars_for_modified_key(native_event.keyCode(), false, false); @@ -303,21 +315,19 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { shift, cmd, function, - key: key.into(), + key, } } -fn chars_for_modified_key<'a>(code: CGKeyCode, cmd: bool, shift: bool) -> &'a str { +fn chars_for_modified_key(code: CGKeyCode, cmd: bool, shift: bool) -> String { // Ideally, we would use `[NSEvent charactersByApplyingModifiers]` but that // always returns an empty string with certain keyboards, e.g. Japanese. Synthesizing // an event with the given flags instead lets us access `characters`, which always // returns a valid string. - let event = CGEvent::new_keyboard_event( - CGEventSource::new(CGEventSourceStateID::Private).unwrap(), - code, - true, - ) - .unwrap(); + let source = unsafe { core_graphics::event_source::CGEventSource::from_ptr(EVENT_SOURCE) }; + let event = CGEvent::new_keyboard_event(source.clone(), code, true).unwrap(); + mem::forget(source); + let mut flags = CGEventFlags::empty(); if cmd { flags |= CGEventFlags::CGEventFlagCommand; @@ -327,10 +337,11 @@ fn chars_for_modified_key<'a>(code: CGKeyCode, cmd: bool, shift: bool) -> &'a st } event.set_flags(flags); - let event: id = unsafe { msg_send![class!(NSEvent), eventWithCGEvent: event] }; unsafe { + let event: id = msg_send![class!(NSEvent), eventWithCGEvent: &*event]; CStr::from_ptr(event.characters().UTF8String()) .to_str() .unwrap() + .to_string() } } From bce25918a039fe1e7706258b5c60e23a5e6368a2 Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 30 Sep 2022 11:13:22 -0400 Subject: [PATCH 084/140] Fix test build --- crates/project/src/worktree.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 1d47c843c5..0d2594475c 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3211,7 +3211,7 @@ mod tests { })); let http_client = FakeHttpClient::with_404_response(); - let client = Client::new(http_client); + let client = cx.read(|cx| Client::new(http_client, cx)); let tree = Worktree::local( client, root.path(), From 1c5d15b85e785f0f87a50df14160295e3109185f Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 30 Sep 2022 13:32:54 -0400 Subject: [PATCH 085/140] Use sumtree instead of iterator linear search for diff hunks in range Co-Authored-By: Max Brunsfeld Co-Authored-By: Mikayla Maki --- crates/git/src/diff.rs | 200 +++++++++++++++++++++++--------------- crates/text/src/anchor.rs | 2 +- 2 files changed, 123 insertions(+), 79 deletions(-) diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index ddaddb7289..4d12ca90d1 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -1,7 +1,7 @@ use std::ops::Range; use sum_tree::SumTree; -use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, ToPoint}; +use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point}; pub use git2 as libgit; use libgit::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch}; @@ -37,7 +37,6 @@ impl sum_tree::Item for DiffHunk { fn summary(&self) -> Self::Summary { DiffHunkSummary { buffer_range: self.buffer_range.clone(), - head_range: self.head_byte_range.clone(), } } } @@ -45,54 +44,17 @@ impl sum_tree::Item for DiffHunk { #[derive(Debug, Default, Clone)] pub struct DiffHunkSummary { buffer_range: Range, - head_range: Range, } impl sum_tree::Summary for DiffHunkSummary { type Context = text::BufferSnapshot; - fn add_summary(&mut self, other: &Self, _: &Self::Context) { - self.head_range.start = self.head_range.start.min(other.head_range.start); - self.head_range.end = self.head_range.end.max(other.head_range.end); - } -} - -#[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord)] -struct HunkHeadEnd(usize); - -impl<'a> sum_tree::Dimension<'a, DiffHunkSummary> for HunkHeadEnd { - fn add_summary(&mut self, summary: &'a DiffHunkSummary, _: &text::BufferSnapshot) { - self.0 = summary.head_range.end; - } - - fn from_summary(summary: &'a DiffHunkSummary, _: &text::BufferSnapshot) -> Self { - HunkHeadEnd(summary.head_range.end) - } -} - -#[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord)] -struct HunkBufferStart(u32); - -impl<'a> sum_tree::Dimension<'a, DiffHunkSummary> for HunkBufferStart { - fn add_summary(&mut self, summary: &'a DiffHunkSummary, buffer: &text::BufferSnapshot) { - self.0 = summary.buffer_range.start.to_point(buffer).row; - } - - fn from_summary(summary: &'a DiffHunkSummary, buffer: &text::BufferSnapshot) -> Self { - HunkBufferStart(summary.buffer_range.start.to_point(buffer).row) - } -} - -#[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord)] -struct HunkBufferEnd(u32); - -impl<'a> sum_tree::Dimension<'a, DiffHunkSummary> for HunkBufferEnd { - fn add_summary(&mut self, summary: &'a DiffHunkSummary, buffer: &text::BufferSnapshot) { - self.0 = summary.buffer_range.end.to_point(buffer).row; - } - - fn from_summary(summary: &'a DiffHunkSummary, buffer: &text::BufferSnapshot) -> Self { - HunkBufferEnd(summary.buffer_range.end.to_point(buffer).row) + fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { + self.buffer_range.start = self + .buffer_range + .start + .min(&other.buffer_range.start, buffer); + self.buffer_range.end = self.buffer_range.end.max(&other.buffer_range.end, buffer); } } @@ -115,23 +77,30 @@ impl BufferDiff { query_row_range: Range, buffer: &'a BufferSnapshot, ) -> impl 'a + Iterator> { - self.tree.iter().filter_map(move |hunk| { - let range = hunk.buffer_range.to_point(&buffer); + let start = buffer.anchor_before(Point::new(query_row_range.start, 0)); + let end = buffer.anchor_after(Point::new(query_row_range.end, 0)); - if range.start.row <= query_row_range.end && query_row_range.start <= range.end.row { - let end_row = if range.end.column > 0 { - range.end.row + 1 - } else { - range.end.row - }; + let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| { + let before_start = summary.buffer_range.end.cmp(&start, buffer).is_lt(); + let after_end = summary.buffer_range.start.cmp(&end, buffer).is_gt(); + !before_start && !after_end + }); - Some(DiffHunk { - buffer_range: range.start.row..end_row, - head_byte_range: hunk.head_byte_range.clone(), - }) + std::iter::from_fn(move || { + cursor.next(buffer); + let hunk = cursor.item()?; + + let range = hunk.buffer_range.to_point(buffer); + let end_row = if range.end.column > 0 { + range.end.row + 1 } else { - None - } + range.end.row + }; + + Some(DiffHunk { + buffer_range: range.start.row..end_row, + head_byte_range: hunk.head_byte_range.clone(), + }) }) } @@ -270,7 +239,7 @@ mod tests { let buffer_text = " one - hello + HELLO three " .unindent(); @@ -278,10 +247,78 @@ mod tests { let mut buffer = Buffer::new(0, 0, buffer_text); let mut diff = BufferDiff::new(); smol::block_on(diff.update(&head_text, &buffer)); - assert_hunks(&diff, &buffer, &head_text, &[(1..2, "two\n")]); + assert_hunks( + &diff, + &buffer, + &head_text, + &[(1..2, "two\n", "HELLO\n")], + None, + ); buffer.edit([(0..0, "point five\n")]); - assert_hunks(&diff, &buffer, &head_text, &[(2..3, "two\n")]); + smol::block_on(diff.update(&head_text, &buffer)); + assert_hunks( + &diff, + &buffer, + &head_text, + &[(0..1, "", "point five\n"), (2..3, "two\n", "HELLO\n")], + None, + ); + } + + #[test] + fn test_buffer_diff_range() { + let head_text = " + one + two + three + four + five + six + seven + eight + nine + ten + " + .unindent(); + + let buffer_text = " + A + one + B + two + C + three + HELLO + four + five + SIXTEEN + seven + eight + WORLD + nine + + ten + + " + .unindent(); + + let buffer = Buffer::new(0, 0, buffer_text); + let mut diff = BufferDiff::new(); + smol::block_on(diff.update(&head_text, &buffer)); + assert_eq!(diff.hunks(&buffer).count(), 8); + + assert_hunks( + &diff, + &buffer, + &head_text, + &[ + (6..7, "", "HELLO\n"), + (9..10, "six\n", "SIXTEEN\n"), + (12..13, "", "WORLD\n"), + ], + Some(7..12), + ); } #[track_caller] @@ -289,23 +326,30 @@ mod tests { diff: &BufferDiff, buffer: &BufferSnapshot, head_text: &str, - expected_hunks: &[(Range, &str)], + expected_hunks: &[(Range, &str, &str)], + range: Option>, ) { - let hunks = diff.hunks(buffer).collect::>(); - assert_eq!( - hunks.len(), - expected_hunks.len(), - "actual hunks are {hunks:#?}" - ); + let actual_hunks = diff + .hunks_in_range(range.unwrap_or(0..u32::MAX), buffer) + .map(|hunk| { + ( + hunk.buffer_range.clone(), + &head_text[hunk.head_byte_range], + buffer + .text_for_range( + Point::new(hunk.buffer_range.start, 0) + ..Point::new(hunk.buffer_range.end, 0), + ) + .collect::(), + ) + }) + .collect::>(); - let diff_iter = hunks.iter().enumerate(); - for ((index, hunk), (expected_range, expected_str)) in diff_iter.zip(expected_hunks) { - assert_eq!(&hunk.buffer_range, expected_range, "for hunk {index}"); - assert_eq!( - &head_text[hunk.head_byte_range.clone()], - *expected_str, - "for hunk {index}" - ); - } + let expected_hunks: Vec<_> = expected_hunks + .iter() + .map(|(r, s, h)| (r.clone(), *s, h.to_string())) + .collect(); + + assert_eq!(actual_hunks, expected_hunks); } } diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 9f70ae1cc7..ab0e1eeabc 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -26,7 +26,7 @@ impl Anchor { bias: Bias::Right, buffer_id: None, }; - + pub fn cmp(&self, other: &Anchor, buffer: &BufferSnapshot) -> Ordering { let fragment_id_comparison = if self.timestamp == other.timestamp { Ordering::Equal From 6540936970916c98d67e540f692e17615f902f80 Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 30 Sep 2022 13:51:54 -0400 Subject: [PATCH 086/140] Fix some panics in tests --- crates/git/src/repository.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 37b79fa10d..f834ebc219 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -175,11 +175,11 @@ impl GitRepository for FakeGitRepository { } async fn load_head_text(&self, _: &Path) -> Option { - unimplemented!() + None } fn reopen_git_repo(&mut self) -> bool { - unimplemented!() + false } fn git_repo(&self) -> Arc> { From ce7f6dd0829fb183e05708f15f93977d9e9c650c Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 30 Sep 2022 15:50:55 -0400 Subject: [PATCH 087/140] Start a test for remote git data updating Co-Authored-By: Mikayla Maki Co-Authored-By: Max Brunsfeld --- Cargo.lock | 3 + crates/collab/Cargo.toml | 5 +- crates/collab/src/integration_tests.rs | 138 +++++++++++++++++++++++++ crates/git/Cargo.toml | 5 +- crates/git/src/diff.rs | 75 +++++++------- crates/git/src/repository.rs | 26 ++++- crates/language/src/buffer.rs | 9 ++ crates/project/src/fs.rs | 54 +++++++++- crates/project/src/worktree.rs | 12 +-- crates/text/src/anchor.rs | 2 +- 10 files changed, 272 insertions(+), 57 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3c87f336de..75dd5530c9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1031,6 +1031,7 @@ dependencies = [ "env_logger", "envy", "futures", + "git", "gpui", "hyper", "language", @@ -1061,6 +1062,7 @@ dependencies = [ "tracing", "tracing-log", "tracing-subscriber", + "unindent", "util", "workspace", ] @@ -2232,6 +2234,7 @@ dependencies = [ "anyhow", "async-trait", "clock", + "collections", "git2", "lazy_static", "log", diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index 9b3603e6e4..47c86e0fe7 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -1,5 +1,5 @@ [package] -authors = ["Nathan Sobo "] +authors = ["Nathan Sobo "] default-run = "collab" edition = "2021" name = "collab" @@ -26,6 +26,7 @@ base64 = "0.13" clap = { version = "3.1", features = ["derive"], optional = true } envy = "0.4.2" futures = "0.3" +git = { path = "../git" } hyper = "0.14" lazy_static = "1.4" lipsum = { version = "0.8", optional = true } @@ -65,11 +66,13 @@ project = { path = "../project", features = ["test-support"] } settings = { path = "../settings", features = ["test-support"] } theme = { path = "../theme" } workspace = { path = "../workspace", features = ["test-support"] } +git = { path = "../git", features = ["test-support"] } ctor = "0.1" env_logger = "0.9" util = { path = "../util" } lazy_static = "1.4" serde_json = { version = "1.0", features = ["preserve_order"] } +unindent = "0.1" [features] seed-support = ["clap", "lipsum", "reqwest"] diff --git a/crates/collab/src/integration_tests.rs b/crates/collab/src/integration_tests.rs index 3c9886dc16..586d988ef1 100644 --- a/crates/collab/src/integration_tests.rs +++ b/crates/collab/src/integration_tests.rs @@ -51,6 +51,7 @@ use std::{ time::Duration, }; use theme::ThemeRegistry; +use unindent::Unindent as _; use workspace::{Item, SplitDirection, ToggleFollow, Workspace}; #[ctor::ctor] @@ -946,6 +947,143 @@ async fn test_propagate_saves_and_fs_changes( .await; } +#[gpui::test(iterations = 10)] +async fn test_git_head_text( + executor: Arc, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + executor.forbid_parking(); + let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .make_contacts(vec![(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + client_a + .fs + .insert_tree( + "/dir", + json!({ + ".git": {}, + "a.txt": " + one + two + three + ".unindent(), + }), + ) + .await; + + let head_text = " + one + three + " + .unindent(); + + let new_head_text = " + 1 + two + three + " + .unindent(); + + client_a + .fs + .as_fake() + .set_head_state_for_git_repository( + Path::new("/dir/.git"), + &[(Path::new("a.txt"), head_text.clone())], + ) + .await; + + let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await; + let project_b = client_b.build_remote_project(&project_a, cx_a, cx_b).await; + + // Create the buffer + let buffer_a = project_a + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "/dir/a.txt"), cx)) + .await + .unwrap(); + + // Wait for it to catch up to the new diff + buffer_a + .condition(cx_a, |buffer, _| !buffer.is_recalculating_git_diff()) + .await; + + // Smoke test diffing + buffer_a.read_with(cx_a, |buffer, _| { + assert_eq!(buffer.head_text(), Some(head_text.as_ref())); + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_range(0..4), + &buffer, + &head_text, + &[(1..2, "", "two\n")], + ); + }); + + // Create remote buffer + let buffer_b = project_b + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "/dir/a.txt"), cx)) + .await + .unwrap(); + + //TODO: WAIT FOR REMOTE UPDATES TO FINISH + + // Smoke test diffing + buffer_b.read_with(cx_b, |buffer, _| { + assert_eq!(buffer.head_text(), Some(head_text.as_ref())); + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_range(0..4), + &buffer, + &head_text, + &[(1..2, "", "two\n")], + ); + }); + + // TODO: Create a dummy file event + client_a + .fs + .as_fake() + .set_head_state_for_git_repository( + Path::new("/dir/.git"), + &[(Path::new("a.txt"), new_head_text.clone())], + ) + .await; + + // TODO: Flush this file event + + // Wait for buffer_a to receive it + buffer_a + .condition(cx_a, |buffer, _| !buffer.is_recalculating_git_diff()) + .await; + + // Smoke test new diffing + buffer_a.read_with(cx_a, |buffer, _| { + assert_eq!(buffer.head_text(), Some(new_head_text.as_ref())); + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_range(0..4), + &buffer, + &head_text, + &[(0..1, "1", "one\n")], + ); + }); + + //TODO: WAIT FOR REMOTE UPDATES TO FINISH on B + + // Smoke test B + buffer_b.read_with(cx_b, |buffer, _| { + assert_eq!(buffer.head_text(), Some(new_head_text.as_ref())); + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_range(0..4), + &buffer, + &head_text, + &[(0..1, "1", "one\n")], + ); + }); +} + #[gpui::test(iterations = 10)] async fn test_fs_operations( executor: Arc, diff --git a/crates/git/Cargo.toml b/crates/git/Cargo.toml index 7ef9a953ba..744fdc8b99 100644 --- a/crates/git/Cargo.toml +++ b/crates/git/Cargo.toml @@ -13,12 +13,15 @@ git2 = { version = "0.15", default-features = false } lazy_static = "1.4.0" sum_tree = { path = "../sum_tree" } text = { path = "../text" } +collections = { path = "../collections" } util = { path = "../util" } log = { version = "0.4.16", features = ["kv_unstable_serde"] } smol = "1.2" parking_lot = "0.11.1" async-trait = "0.1" - [dev-dependencies] unindent = "0.1.7" + +[features] +test-support = [] diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index 4d12ca90d1..6c904d44d1 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -222,6 +222,40 @@ impl BufferDiff { } } +/// Range (crossing new lines), old, new +#[cfg(any(test, feature = "test-support"))] +#[track_caller] +pub fn assert_hunks( + diff_hunks: Iter, + buffer: &BufferSnapshot, + head_text: &str, + expected_hunks: &[(Range, &str, &str)], +) where + Iter: Iterator>, +{ + let actual_hunks = diff_hunks + .map(|hunk| { + ( + hunk.buffer_range.clone(), + &head_text[hunk.head_byte_range], + buffer + .text_for_range( + Point::new(hunk.buffer_range.start, 0) + ..Point::new(hunk.buffer_range.end, 0), + ) + .collect::(), + ) + }) + .collect::>(); + + let expected_hunks: Vec<_> = expected_hunks + .iter() + .map(|(r, s, h)| (r.clone(), *s, h.to_string())) + .collect(); + + assert_eq!(actual_hunks, expected_hunks); +} + #[cfg(test)] mod tests { use super::*; @@ -248,21 +282,19 @@ mod tests { let mut diff = BufferDiff::new(); smol::block_on(diff.update(&head_text, &buffer)); assert_hunks( - &diff, + diff.hunks(&buffer), &buffer, &head_text, &[(1..2, "two\n", "HELLO\n")], - None, ); buffer.edit([(0..0, "point five\n")]); smol::block_on(diff.update(&head_text, &buffer)); assert_hunks( - &diff, + diff.hunks(&buffer), &buffer, &head_text, &[(0..1, "", "point five\n"), (2..3, "two\n", "HELLO\n")], - None, ); } @@ -309,7 +341,7 @@ mod tests { assert_eq!(diff.hunks(&buffer).count(), 8); assert_hunks( - &diff, + diff.hunks_in_range(7..12, &buffer), &buffer, &head_text, &[ @@ -317,39 +349,6 @@ mod tests { (9..10, "six\n", "SIXTEEN\n"), (12..13, "", "WORLD\n"), ], - Some(7..12), ); } - - #[track_caller] - fn assert_hunks( - diff: &BufferDiff, - buffer: &BufferSnapshot, - head_text: &str, - expected_hunks: &[(Range, &str, &str)], - range: Option>, - ) { - let actual_hunks = diff - .hunks_in_range(range.unwrap_or(0..u32::MAX), buffer) - .map(|hunk| { - ( - hunk.buffer_range.clone(), - &head_text[hunk.head_byte_range], - buffer - .text_for_range( - Point::new(hunk.buffer_range.start, 0) - ..Point::new(hunk.buffer_range.end, 0), - ) - .collect::(), - ) - }) - .collect::>(); - - let expected_hunks: Vec<_> = expected_hunks - .iter() - .map(|(r, s, h)| (r.clone(), *s, h.to_string())) - .collect(); - - assert_eq!(actual_hunks, expected_hunks); - } } diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index f834ebc219..fb43e44561 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -1,7 +1,11 @@ use anyhow::Result; +use collections::HashMap; use git2::Repository as LibGitRepository; use parking_lot::Mutex; -use std::{path::Path, sync::Arc}; +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; use util::ResultExt; #[async_trait::async_trait] @@ -140,14 +144,25 @@ pub struct FakeGitRepository { content_path: Arc, git_dir_path: Arc, scan_id: usize, + state: Arc>, +} + +#[derive(Debug, Clone, Default)] +pub struct FakeGitRepositoryState { + pub index_contents: HashMap, } impl FakeGitRepository { - pub fn open(dotgit_path: &Path, scan_id: usize) -> Box { + pub fn open( + dotgit_path: &Path, + scan_id: usize, + state: Arc>, + ) -> Box { Box::new(FakeGitRepository { content_path: dotgit_path.parent().unwrap().into(), git_dir_path: dotgit_path.into(), scan_id, + state, }) } } @@ -174,12 +189,13 @@ impl GitRepository for FakeGitRepository { self.scan_id } - async fn load_head_text(&self, _: &Path) -> Option { - None + async fn load_head_text(&self, path: &Path) -> Option { + let state = self.state.lock(); + state.index_contents.get(path).cloned() } fn reopen_git_repo(&mut self) -> bool { - false + true } fn git_repo(&self) -> Arc> { diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 0268f1cc68..831236ad5d 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -662,6 +662,11 @@ impl Buffer { task } + #[cfg(any(test, feature = "test-support"))] + pub fn head_text(&self) -> Option<&str> { + self.head_text.as_deref() + } + pub fn update_head_text(&mut self, head_text: Option, cx: &mut ModelContext) { self.head_text = head_text; self.git_diff_recalc(cx); @@ -671,6 +676,10 @@ impl Buffer { self.git_diff_status.diff.needs_update(self) } + pub fn is_recalculating_git_diff(&self) -> bool { + self.git_diff_status.update_in_progress + } + pub fn git_diff_recalc(&mut self, cx: &mut ModelContext) { if self.git_diff_status.update_in_progress { self.git_diff_status.update_requested = true; diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index c14edcd5e4..2b914ae373 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -1,7 +1,7 @@ use anyhow::{anyhow, Result}; use fsevent::EventStream; use futures::{future::BoxFuture, Stream, StreamExt}; -use git::repository::{GitRepository, RealGitRepository}; +use git::repository::{FakeGitRepositoryState, GitRepository, RealGitRepository}; use language::LineEnding; use smol::io::{AsyncReadExt, AsyncWriteExt}; use std::{ @@ -277,6 +277,7 @@ enum FakeFsEntry { inode: u64, mtime: SystemTime, entries: BTreeMap>>, + git_repo_state: Option>>, }, Symlink { target: PathBuf, @@ -391,6 +392,7 @@ impl FakeFs { inode: 0, mtime: SystemTime::now(), entries: Default::default(), + git_repo_state: None, })), next_inode: 1, event_txs: Default::default(), @@ -480,6 +482,31 @@ impl FakeFs { .boxed() } + pub async fn set_head_state_for_git_repository( + &self, + dot_git: &Path, + head_state: &[(&Path, String)], + ) { + let content_path = dot_git.parent().unwrap(); + let state = self.state.lock().await; + let entry = state.read_path(dot_git).await.unwrap(); + let mut entry = entry.lock().await; + + if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry { + let repo_state = git_repo_state.get_or_insert_with(Default::default); + let mut repo_state = repo_state.lock(); + + repo_state.index_contents.clear(); + repo_state.index_contents.extend( + head_state + .iter() + .map(|(path, content)| (content_path.join(path), content.clone())), + ); + } else { + panic!("not a directory"); + } + } + pub async fn files(&self) -> Vec { let mut result = Vec::new(); let mut queue = collections::VecDeque::new(); @@ -569,6 +596,7 @@ impl Fs for FakeFs { inode, mtime: SystemTime::now(), entries: Default::default(), + git_repo_state: None, })) }); Ok(()) @@ -854,10 +882,26 @@ impl Fs for FakeFs { } fn open_repo(&self, abs_dot_git: &Path) -> Option> { - Some(git::repository::FakeGitRepository::open( - abs_dot_git.into(), - 0, - )) + let executor = self.executor.upgrade().unwrap(); + executor.block(async move { + let state = self.state.lock().await; + let entry = state.read_path(abs_dot_git).await.unwrap(); + let mut entry = entry.lock().await; + if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry { + let state = git_repo_state + .get_or_insert_with(|| { + Arc::new(parking_lot::Mutex::new(FakeGitRepositoryState::default())) + }) + .clone(); + Some(git::repository::FakeGitRepository::open( + abs_dot_git.into(), + 0, + state, + )) + } else { + None + } + }) } fn is_fake(&self) -> bool { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 0d2594475c..d3a5f710e0 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3288,15 +3288,15 @@ mod tests { #[test] fn test_changed_repos() { let prev_repos: Vec> = vec![ - FakeGitRepository::open(Path::new("/.git"), 0), - FakeGitRepository::open(Path::new("/a/.git"), 0), - FakeGitRepository::open(Path::new("/a/b/.git"), 0), + FakeGitRepository::open(Path::new("/.git"), 0, Default::default()), + FakeGitRepository::open(Path::new("/a/.git"), 0, Default::default()), + FakeGitRepository::open(Path::new("/a/b/.git"), 0, Default::default()), ]; let new_repos: Vec> = vec![ - FakeGitRepository::open(Path::new("/a/.git"), 1), - FakeGitRepository::open(Path::new("/a/b/.git"), 0), - FakeGitRepository::open(Path::new("/a/c/.git"), 0), + FakeGitRepository::open(Path::new("/a/.git"), 1, Default::default()), + FakeGitRepository::open(Path::new("/a/b/.git"), 0, Default::default()), + FakeGitRepository::open(Path::new("/a/c/.git"), 0, Default::default()), ]; let res = LocalWorktree::changed_repos(&prev_repos, &new_repos); diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index ab0e1eeabc..9f70ae1cc7 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -26,7 +26,7 @@ impl Anchor { bias: Bias::Right, buffer_id: None, }; - + pub fn cmp(&self, other: &Anchor, buffer: &BufferSnapshot) -> Ordering { let fragment_id_comparison = if self.timestamp == other.timestamp { Ordering::Equal From 42b7820dbbd1095c4e5f66e6a74d984c7843dbfa Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 30 Sep 2022 18:05:09 -0400 Subject: [PATCH 088/140] Perform git diff on remote buffer open --- crates/project/src/project.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index a2a49c9c93..1e8567d4d4 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -5816,7 +5816,7 @@ impl Project { cx: &mut ModelContext, ) -> Task>> { let mut opened_buffer_rx = self.opened_buffer.1.clone(); - cx.spawn(|this, cx| async move { + cx.spawn(|this, mut cx| async move { let buffer = loop { let buffer = this.read_with(&cx, |this, cx| { this.opened_buffers @@ -5834,6 +5834,7 @@ impl Project { .await .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?; }; + buffer.update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx)); Ok(buffer) }) } From c95646a298d718096019a120b6f7e4ed890c63ce Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 30 Sep 2022 18:25:25 -0400 Subject: [PATCH 089/140] WIP Start refactoring separation of concerns for repo metadata Co-Authored-By: Max Brunsfeld Co-Authored-By: Mikayla Maki --- crates/collab/src/integration_tests.rs | 12 +- crates/git/src/repository.rs | 147 +++++-------------------- crates/language/src/buffer.rs | 4 - crates/project/src/fs.rs | 5 +- crates/project/src/worktree.rs | 53 +++++++-- 5 files changed, 73 insertions(+), 148 deletions(-) diff --git a/crates/collab/src/integration_tests.rs b/crates/collab/src/integration_tests.rs index 586d988ef1..d5a4c56b7d 100644 --- a/crates/collab/src/integration_tests.rs +++ b/crates/collab/src/integration_tests.rs @@ -1008,9 +1008,7 @@ async fn test_git_head_text( .unwrap(); // Wait for it to catch up to the new diff - buffer_a - .condition(cx_a, |buffer, _| !buffer.is_recalculating_git_diff()) - .await; + executor.run_until_parked(); // Smoke test diffing buffer_a.read_with(cx_a, |buffer, _| { @@ -1029,7 +1027,8 @@ async fn test_git_head_text( .await .unwrap(); - //TODO: WAIT FOR REMOTE UPDATES TO FINISH + // Wait remote buffer to catch up to the new diff + executor.run_until_parked(); // Smoke test diffing buffer_b.read_with(cx_b, |buffer, _| { @@ -1055,9 +1054,7 @@ async fn test_git_head_text( // TODO: Flush this file event // Wait for buffer_a to receive it - buffer_a - .condition(cx_a, |buffer, _| !buffer.is_recalculating_git_diff()) - .await; + executor.run_until_parked(); // Smoke test new diffing buffer_a.read_with(cx_a, |buffer, _| { @@ -1071,6 +1068,7 @@ async fn test_git_head_text( }); //TODO: WAIT FOR REMOTE UPDATES TO FINISH on B + executor.run_until_parked(); // Smoke test B buffer_b.read_with(cx_b, |buffer, _| { diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index fb43e44561..ba8faa4b2b 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -2,88 +2,39 @@ use anyhow::Result; use collections::HashMap; use git2::Repository as LibGitRepository; use parking_lot::Mutex; -use std::{ - path::{Path, PathBuf}, - sync::Arc, -}; use util::ResultExt; +use std::{path::{Path, PathBuf}, sync::Arc}; #[async_trait::async_trait] -pub trait GitRepository: Send + Sync + std::fmt::Debug { - fn manages(&self, path: &Path) -> bool; +pub trait GitRepository: Send { + // fn manages(&self, path: &Path) -> bool; + // fn reopen_git_repo(&mut self) -> bool; + // fn git_repo(&self) -> Arc>; + // fn boxed_clone(&self) -> Box; - fn in_dot_git(&self, path: &Path) -> bool; - - fn content_path(&self) -> &Path; - - fn git_dir_path(&self) -> &Path; - - fn scan_id(&self) -> usize; - - fn set_scan_id(&mut self, scan_id: usize); - - fn reopen_git_repo(&mut self) -> bool; - - fn git_repo(&self) -> Arc>; - - fn boxed_clone(&self) -> Box; - - async fn load_head_text(&self, relative_file_path: &Path) -> Option; -} - -#[derive(Clone)] -pub struct RealGitRepository { - // Path to folder containing the .git file or directory - content_path: Arc, - // Path to the actual .git folder. - // Note: if .git is a file, this points to the folder indicated by the .git file - git_dir_path: Arc, - scan_id: usize, - libgit_repository: Arc>, -} - -impl RealGitRepository { - pub fn open(dotgit_path: &Path) -> Option> { + fn load_head_text(&self, relative_file_path: &Path) -> Option; + + fn open_real(dotgit_path: &Path) -> Option>> + where Self: Sized + { LibGitRepository::open(&dotgit_path) .log_err() - .and_then::, _>(|libgit_repository| { - Some(Box::new(Self { - content_path: libgit_repository.workdir()?.into(), - git_dir_path: dotgit_path.canonicalize().log_err()?.into(), - scan_id: 0, - libgit_repository: Arc::new(parking_lot::Mutex::new(libgit_repository)), - })) + .and_then::>, _>(|libgit_repository| { + Some(Arc::new(Mutex::new(libgit_repository))) }) } } #[async_trait::async_trait] -impl GitRepository for RealGitRepository { - fn manages(&self, path: &Path) -> bool { - path.canonicalize() - .map(|path| path.starts_with(&self.content_path)) - .unwrap_or(false) - } +impl GitRepository for LibGitRepository { + // fn manages(&self, path: &Path) -> bool { + // path.canonicalize() + // .map(|path| path.starts_with(&self.content_path)) + // .unwrap_or(false) + // } - fn in_dot_git(&self, path: &Path) -> bool { - path.canonicalize() - .map(|path| path.starts_with(&self.git_dir_path)) - .unwrap_or(false) - } - fn content_path(&self) -> &Path { - self.content_path.as_ref() - } - - fn git_dir_path(&self) -> &Path { - self.git_dir_path.as_ref() - } - - fn scan_id(&self) -> usize { - self.scan_id - } - - async fn load_head_text(&self, relative_file_path: &Path) -> Option { + fn load_head_text(&self, relative_file_path: &Path) -> Option { fn logic(repo: &LibGitRepository, relative_file_path: &Path) -> Result> { const STAGE_NORMAL: i32 = 0; let index = repo.index()?; @@ -97,53 +48,18 @@ impl GitRepository for RealGitRepository { Ok(Some(head_text)) } - match logic(&self.libgit_repository.as_ref().lock(), relative_file_path) { + match logic(&self, relative_file_path) { Ok(value) => return value, Err(err) => log::error!("Error loading head text: {:?}", err), } None } - - fn reopen_git_repo(&mut self) -> bool { - match LibGitRepository::open(&self.git_dir_path) { - Ok(repo) => { - self.libgit_repository = Arc::new(Mutex::new(repo)); - true - } - - Err(_) => false, - } - } - - fn git_repo(&self) -> Arc> { - self.libgit_repository.clone() - } - - fn set_scan_id(&mut self, scan_id: usize) { - self.scan_id = scan_id; - } - - fn boxed_clone(&self) -> Box { - Box::new(self.clone()) - } -} - -impl std::fmt::Debug for RealGitRepository { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("GitRepository") - .field("content_path", &self.content_path) - .field("git_dir_path", &self.git_dir_path) - .field("scan_id", &self.scan_id) - .field("libgit_repository", &"LibGitRepository") - .finish() - } } #[derive(Debug, Clone)] pub struct FakeGitRepository { content_path: Arc, git_dir_path: Arc, - scan_id: usize, state: Arc>, } @@ -153,15 +69,10 @@ pub struct FakeGitRepositoryState { } impl FakeGitRepository { - pub fn open( - dotgit_path: &Path, - scan_id: usize, - state: Arc>, - ) -> Box { + pub fn open(dotgit_path: &Path, state: Arc>) -> Box { Box::new(FakeGitRepository { content_path: dotgit_path.parent().unwrap().into(), git_dir_path: dotgit_path.into(), - scan_id, state, }) } @@ -173,9 +84,9 @@ impl GitRepository for FakeGitRepository { path.starts_with(self.content_path()) } - fn in_dot_git(&self, path: &Path) -> bool { - path.starts_with(self.git_dir_path()) - } + // fn in_dot_git(&self, path: &Path) -> bool { + // path.starts_with(self.git_dir_path()) + // } fn content_path(&self) -> &Path { &self.content_path @@ -185,10 +96,6 @@ impl GitRepository for FakeGitRepository { &self.git_dir_path } - fn scan_id(&self) -> usize { - self.scan_id - } - async fn load_head_text(&self, path: &Path) -> Option { let state = self.state.lock(); state.index_contents.get(path).cloned() @@ -202,10 +109,6 @@ impl GitRepository for FakeGitRepository { unimplemented!() } - fn set_scan_id(&mut self, scan_id: usize) { - self.scan_id = scan_id; - } - fn boxed_clone(&self) -> Box { Box::new(self.clone()) } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 831236ad5d..22706ab1b5 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -676,10 +676,6 @@ impl Buffer { self.git_diff_status.diff.needs_update(self) } - pub fn is_recalculating_git_diff(&self) -> bool { - self.git_diff_status.update_in_progress - } - pub fn git_diff_recalc(&mut self, cx: &mut ModelContext) { if self.git_diff_status.update_in_progress { self.git_diff_status.update_requested = true; diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index 2b914ae373..1280fcb8bc 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -1,7 +1,7 @@ use anyhow::{anyhow, Result}; use fsevent::EventStream; use futures::{future::BoxFuture, Stream, StreamExt}; -use git::repository::{FakeGitRepositoryState, GitRepository, RealGitRepository}; +use git::repository::{FakeGitRepositoryState, GitRepository, Git2Repo}; use language::LineEnding; use smol::io::{AsyncReadExt, AsyncWriteExt}; use std::{ @@ -239,7 +239,7 @@ impl Fs for RealFs { } fn open_repo(&self, abs_dot_git: &Path) -> Option> { - RealGitRepository::open(&abs_dot_git) + Git2Repo::open(&abs_dot_git) } fn is_fake(&self) -> bool { @@ -895,7 +895,6 @@ impl Fs for FakeFs { .clone(); Some(git::repository::FakeGitRepository::open( abs_dot_git.into(), - 0, state, )) } else { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index d3a5f710e0..4f14ab6ad1 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -97,10 +97,39 @@ pub struct Snapshot { is_complete: bool, } +#[derive(Clone)] +struct GitRepositoryEntry { + repo: Arc>, + + // repo: Box, + scan_id: usize, + // Path to folder containing the .git file or directory + content_path: Arc, + // Path to the actual .git folder. + // Note: if .git is a file, this points to the folder indicated by the .git file + git_dir_path: Arc, +} + +impl std::fmt::Debug for GitRepositoryEntry { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("GitRepositoryEntry") + .field("content_path", &self.content_path) + .field("git_dir_path", &self.git_dir_path) + .field("libgit_repository", &"LibGitRepository") + .finish() + } +} + +// impl Clone for GitRepositoryEntry { +// fn clone(&self) -> Self { +// GitRepositoryEntry { repo: self.repo.boxed_clone(), scan_id: self.scan_id } +// } +// } + pub struct LocalSnapshot { abs_path: Arc, ignores_by_parent_abs_path: HashMap, (Arc, usize)>, - git_repositories: Vec>, + git_repositories: Vec, removed_entry_ids: HashMap, next_entry_id: Arc, snapshot: Snapshot, @@ -115,7 +144,7 @@ impl Clone for LocalSnapshot { git_repositories: self .git_repositories .iter() - .map(|repo| repo.boxed_clone()) + .cloned() .collect(), removed_entry_ids: self.removed_entry_ids.clone(), next_entry_id: self.next_entry_id.clone(), @@ -3287,17 +3316,17 @@ mod tests { #[test] fn test_changed_repos() { - let prev_repos: Vec> = vec![ - FakeGitRepository::open(Path::new("/.git"), 0, Default::default()), - FakeGitRepository::open(Path::new("/a/.git"), 0, Default::default()), - FakeGitRepository::open(Path::new("/a/b/.git"), 0, Default::default()), - ]; + // let prev_repos: Vec> = vec![ + // FakeGitRepository::open(Path::new("/.git"), 0, Default::default()), + // FakeGitRepository::open(Path::new("/a/.git"), 0, Default::default()), + // FakeGitRepository::open(Path::new("/a/b/.git"), 0, Default::default()), + // ]; - let new_repos: Vec> = vec![ - FakeGitRepository::open(Path::new("/a/.git"), 1, Default::default()), - FakeGitRepository::open(Path::new("/a/b/.git"), 0, Default::default()), - FakeGitRepository::open(Path::new("/a/c/.git"), 0, Default::default()), - ]; + // let new_repos: Vec> = vec![ + // FakeGitRepository::open(Path::new("/a/.git"), 1, Default::default()), + // FakeGitRepository::open(Path::new("/a/b/.git"), 0, Default::default()), + // FakeGitRepository::open(Path::new("/a/c/.git"), 0, Default::default()), + // ]; let res = LocalWorktree::changed_repos(&prev_repos, &new_repos); From af0974264cd61f21ae46f8b0cf8eee3025314d5b Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Fri, 30 Sep 2022 17:33:34 -0700 Subject: [PATCH 090/140] Refactored git repository code to seperate out repository entry tracking data and git2 mocking code. Co-authored-by: Max Co-authored-by: Julia --- Cargo.lock | 1 + crates/collab/src/integration_tests.rs | 12 +- crates/git/Cargo.toml | 1 + crates/git/src/repository.rs | 73 ++-------- crates/project/src/fs.rs | 29 ++-- crates/project/src/project.rs | 11 +- crates/project/src/worktree.rs | 191 +++++++++++++------------ 7 files changed, 143 insertions(+), 175 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 75dd5530c9..fa8f8acbdc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2235,6 +2235,7 @@ dependencies = [ "async-trait", "clock", "collections", + "futures", "git2", "lazy_static", "log", diff --git a/crates/collab/src/integration_tests.rs b/crates/collab/src/integration_tests.rs index d5a4c56b7d..168231a6b4 100644 --- a/crates/collab/src/integration_tests.rs +++ b/crates/collab/src/integration_tests.rs @@ -966,7 +966,8 @@ async fn test_git_head_text( .insert_tree( "/dir", json!({ - ".git": {}, + ".git": { + }, "a.txt": " one two @@ -983,9 +984,8 @@ async fn test_git_head_text( .unindent(); let new_head_text = " - 1 + one two - three " .unindent(); @@ -1041,7 +1041,6 @@ async fn test_git_head_text( ); }); - // TODO: Create a dummy file event client_a .fs .as_fake() @@ -1051,19 +1050,18 @@ async fn test_git_head_text( ) .await; - // TODO: Flush this file event - // Wait for buffer_a to receive it executor.run_until_parked(); // Smoke test new diffing buffer_a.read_with(cx_a, |buffer, _| { assert_eq!(buffer.head_text(), Some(new_head_text.as_ref())); + git::diff::assert_hunks( buffer.snapshot().git_diff_hunks_in_range(0..4), &buffer, &head_text, - &[(0..1, "1", "one\n")], + &[(2..3, "", "three\n")], ); }); diff --git a/crates/git/Cargo.toml b/crates/git/Cargo.toml index 744fdc8b99..b8f3aac0b9 100644 --- a/crates/git/Cargo.toml +++ b/crates/git/Cargo.toml @@ -19,6 +19,7 @@ log = { version = "0.4.16", features = ["kv_unstable_serde"] } smol = "1.2" parking_lot = "0.11.1" async-trait = "0.1" +futures = "0.3" [dev-dependencies] unindent = "0.1.7" diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index ba8faa4b2b..a49a1e0b60 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -1,39 +1,20 @@ use anyhow::Result; use collections::HashMap; -use git2::Repository as LibGitRepository; use parking_lot::Mutex; -use util::ResultExt; -use std::{path::{Path, PathBuf}, sync::Arc}; +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; + +pub use git2::Repository as LibGitRepository; #[async_trait::async_trait] pub trait GitRepository: Send { - // fn manages(&self, path: &Path) -> bool; - // fn reopen_git_repo(&mut self) -> bool; - // fn git_repo(&self) -> Arc>; - // fn boxed_clone(&self) -> Box; - fn load_head_text(&self, relative_file_path: &Path) -> Option; - - fn open_real(dotgit_path: &Path) -> Option>> - where Self: Sized - { - LibGitRepository::open(&dotgit_path) - .log_err() - .and_then::>, _>(|libgit_repository| { - Some(Arc::new(Mutex::new(libgit_repository))) - }) - } } #[async_trait::async_trait] impl GitRepository for LibGitRepository { - // fn manages(&self, path: &Path) -> bool { - // path.canonicalize() - // .map(|path| path.starts_with(&self.content_path)) - // .unwrap_or(false) - // } - - fn load_head_text(&self, relative_file_path: &Path) -> Option { fn logic(repo: &LibGitRepository, relative_file_path: &Path) -> Result> { const STAGE_NORMAL: i32 = 0; @@ -56,10 +37,8 @@ impl GitRepository for LibGitRepository { } } -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Default)] pub struct FakeGitRepository { - content_path: Arc, - git_dir_path: Arc, state: Arc>, } @@ -69,47 +48,15 @@ pub struct FakeGitRepositoryState { } impl FakeGitRepository { - pub fn open(dotgit_path: &Path, state: Arc>) -> Box { - Box::new(FakeGitRepository { - content_path: dotgit_path.parent().unwrap().into(), - git_dir_path: dotgit_path.into(), - state, - }) + pub fn open(state: Arc>) -> Arc> { + Arc::new(Mutex::new(FakeGitRepository { state })) } } #[async_trait::async_trait] impl GitRepository for FakeGitRepository { - fn manages(&self, path: &Path) -> bool { - path.starts_with(self.content_path()) - } - - // fn in_dot_git(&self, path: &Path) -> bool { - // path.starts_with(self.git_dir_path()) - // } - - fn content_path(&self) -> &Path { - &self.content_path - } - - fn git_dir_path(&self) -> &Path { - &self.git_dir_path - } - - async fn load_head_text(&self, path: &Path) -> Option { + fn load_head_text(&self, path: &Path) -> Option { let state = self.state.lock(); state.index_contents.get(path).cloned() } - - fn reopen_git_repo(&mut self) -> bool { - true - } - - fn git_repo(&self) -> Arc> { - unimplemented!() - } - - fn boxed_clone(&self) -> Box { - Box::new(self.clone()) - } } diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index 1280fcb8bc..d0e549c0b5 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -1,8 +1,9 @@ use anyhow::{anyhow, Result}; use fsevent::EventStream; use futures::{future::BoxFuture, Stream, StreamExt}; -use git::repository::{FakeGitRepositoryState, GitRepository, Git2Repo}; +use git::repository::{FakeGitRepositoryState, GitRepository, LibGitRepository}; use language::LineEnding; +use parking_lot::Mutex as SyncMutex; use smol::io::{AsyncReadExt, AsyncWriteExt}; use std::{ io, @@ -11,6 +12,7 @@ use std::{ pin::Pin, time::{Duration, SystemTime}, }; +use util::ResultExt; use text::Rope; @@ -44,7 +46,7 @@ pub trait Fs: Send + Sync { path: &Path, latency: Duration, ) -> Pin>>>; - fn open_repo(&self, abs_dot_git: &Path) -> Option>; + fn open_repo(&self, abs_dot_git: &Path) -> Option>>; fn is_fake(&self) -> bool; #[cfg(any(test, feature = "test-support"))] fn as_fake(&self) -> &FakeFs; @@ -238,8 +240,12 @@ impl Fs for RealFs { }))) } - fn open_repo(&self, abs_dot_git: &Path) -> Option> { - Git2Repo::open(&abs_dot_git) + fn open_repo(&self, dotgit_path: &Path) -> Option>> { + LibGitRepository::open(&dotgit_path) + .log_err() + .and_then::>, _>(|libgit_repository| { + Some(Arc::new(SyncMutex::new(libgit_repository))) + }) } fn is_fake(&self) -> bool { @@ -277,7 +283,7 @@ enum FakeFsEntry { inode: u64, mtime: SystemTime, entries: BTreeMap>>, - git_repo_state: Option>>, + git_repo_state: Option>>, }, Symlink { target: PathBuf, @@ -488,7 +494,7 @@ impl FakeFs { head_state: &[(&Path, String)], ) { let content_path = dot_git.parent().unwrap(); - let state = self.state.lock().await; + let mut state = self.state.lock().await; let entry = state.read_path(dot_git).await.unwrap(); let mut entry = entry.lock().await; @@ -502,6 +508,8 @@ impl FakeFs { .iter() .map(|(path, content)| (content_path.join(path), content.clone())), ); + + state.emit_event([dot_git]); } else { panic!("not a directory"); } @@ -881,7 +889,7 @@ impl Fs for FakeFs { })) } - fn open_repo(&self, abs_dot_git: &Path) -> Option> { + fn open_repo(&self, abs_dot_git: &Path) -> Option>> { let executor = self.executor.upgrade().unwrap(); executor.block(async move { let state = self.state.lock().await; @@ -890,13 +898,10 @@ impl Fs for FakeFs { if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry { let state = git_repo_state .get_or_insert_with(|| { - Arc::new(parking_lot::Mutex::new(FakeGitRepositoryState::default())) + Arc::new(SyncMutex::new(FakeGitRepositoryState::default())) }) .clone(); - Some(git::repository::FakeGitRepository::open( - abs_dot_git.into(), - state, - )) + Some(git::repository::FakeGitRepository::open(state)) } else { None } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 1e8567d4d4..f1aa98c4e0 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -12,7 +12,7 @@ use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore}; use clock::ReplicaId; use collections::{hash_map, BTreeMap, HashMap, HashSet}; use futures::{future::Shared, AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt}; -use git::repository::GitRepository; + use gpui::{ AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle, @@ -4648,7 +4648,7 @@ impl Project { fn update_local_worktree_buffers_git_repos( &mut self, - repos: &[Box], + repos: &[GitRepositoryEntry], cx: &mut ModelContext, ) { //TODO: Produce protos @@ -4663,12 +4663,15 @@ impl Project { let abs_path = file.abs_path(cx); let repo = match repos.iter().find(|repo| repo.manages(&abs_path)) { - Some(repo) => repo.boxed_clone(), + Some(repo) => repo.clone(), None => return, }; cx.spawn(|_, mut cx| async move { - let head_text = repo.load_head_text(&path).await; + let head_text = cx + .background() + .spawn(async move { repo.repo.lock().load_head_text(&path) }) + .await; buffer.update(&mut cx, |buffer, cx| { buffer.update_head_text(head_text, cx); }); diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 4f14ab6ad1..560f23d147 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -98,16 +98,15 @@ pub struct Snapshot { } #[derive(Clone)] -struct GitRepositoryEntry { - repo: Arc>, - - // repo: Box, - scan_id: usize, +pub struct GitRepositoryEntry { + pub(crate) repo: Arc>, + + pub(crate) scan_id: usize, // Path to folder containing the .git file or directory - content_path: Arc, + pub(crate) content_path: Arc, // Path to the actual .git folder. // Note: if .git is a file, this points to the folder indicated by the .git file - git_dir_path: Arc, + pub(crate) git_dir_path: Arc, } impl std::fmt::Debug for GitRepositoryEntry { @@ -141,11 +140,7 @@ impl Clone for LocalSnapshot { Self { abs_path: self.abs_path.clone(), ignores_by_parent_abs_path: self.ignores_by_parent_abs_path.clone(), - git_repositories: self - .git_repositories - .iter() - .cloned() - .collect(), + git_repositories: self.git_repositories.iter().cloned().collect(), removed_entry_ids: self.removed_entry_ids.clone(), next_entry_id: self.next_entry_id.clone(), snapshot: self.snapshot.clone(), @@ -186,7 +181,7 @@ struct ShareState { pub enum Event { UpdatedEntries, - UpdatedGitRepositories(Vec>), + UpdatedGitRepositories(Vec), } impl Entity for Worktree { @@ -610,27 +605,26 @@ impl LocalWorktree { } fn changed_repos( - old_repos: &[Box], - new_repos: &[Box], - ) -> Vec> { + old_repos: &[GitRepositoryEntry], + new_repos: &[GitRepositoryEntry], + ) -> Vec { fn diff<'a>( - a: &'a [Box], - b: &'a [Box], - updated: &mut HashMap<&'a Path, Box>, + a: &'a [GitRepositoryEntry], + b: &'a [GitRepositoryEntry], + updated: &mut HashMap<&'a Path, GitRepositoryEntry>, ) { for a_repo in a { let matched = b.iter().find(|b_repo| { - a_repo.git_dir_path() == b_repo.git_dir_path() - && a_repo.scan_id() == b_repo.scan_id() + a_repo.git_dir_path == b_repo.git_dir_path && a_repo.scan_id == b_repo.scan_id }); if matched.is_none() { - updated.insert(a_repo.git_dir_path(), a_repo.boxed_clone()); + updated.insert(a_repo.git_dir_path.as_ref(), a_repo.clone()); } } } - let mut updated = HashMap::<&Path, Box>::default(); + let mut updated = HashMap::<&Path, GitRepositoryEntry>::default(); diff(old_repos, new_repos, &mut updated); diff(new_repos, old_repos, &mut updated); @@ -690,7 +684,12 @@ impl LocalWorktree { settings::GitFilesIncluded::All | settings::GitFilesIncluded::OnlyTracked ) { let results = if let Some(repo) = snapshot.repo_for(&abs_path) { - repo.load_head_text(&path).await + cx.background() + .spawn({ + let path = path.clone(); + async move { repo.repo.lock().load_head_text(&path) } + }) + .await } else { None }; @@ -1390,25 +1389,19 @@ impl LocalSnapshot { } // Gives the most specific git repository for a given path - pub(crate) fn repo_for(&self, path: &Path) -> Option> { + pub(crate) fn repo_for(&self, path: &Path) -> Option { self.git_repositories .iter() .rev() //git_repository is ordered lexicographically - .find(|repo| repo.manages(&self.abs_path.join(path))) - .map(|repo| repo.boxed_clone()) + .find(|repo| repo.manages(path)) + .cloned() } - pub(crate) fn in_dot_git(&mut self, path: &Path) -> Option<&mut Box> { + pub(crate) fn in_dot_git(&mut self, path: &Path) -> Option<&mut GitRepositoryEntry> { + // Git repositories cannot be nested, so we don't need to reverse the order self.git_repositories .iter_mut() - .rev() //git_repository is ordered lexicographically - .find(|repo| repo.in_dot_git(&self.abs_path.join(path))) - } - - pub(crate) fn _tracks_filepath(&self, repo: &dyn GitRepository, file_path: &Path) -> bool { - // Depends on git_repository_for_file_path returning the most specific git repository for a given path - self.repo_for(&self.abs_path.join(file_path)) - .map_or(false, |r| r.git_dir_path() == repo.git_dir_path()) + .find(|repo| repo.in_dot_git(path)) } #[cfg(test)] @@ -1575,12 +1568,21 @@ impl LocalSnapshot { if parent_path.file_name() == Some(&DOT_GIT) { let abs_path = self.abs_path.join(&parent_path); + let content_path: Arc = parent_path.parent().unwrap().into(); if let Err(ix) = self .git_repositories - .binary_search_by_key(&abs_path.as_path(), |repo| repo.git_dir_path()) + .binary_search_by_key(&&content_path, |repo| &repo.content_path) { - if let Some(repository) = fs.open_repo(abs_path.as_path()) { - self.git_repositories.insert(ix, repository); + if let Some(repo) = fs.open_repo(abs_path.as_path()) { + self.git_repositories.insert( + ix, + GitRepositoryEntry { + repo, + scan_id: 0, + content_path, + git_dir_path: parent_path, + }, + ); } } } @@ -1673,9 +1675,9 @@ impl LocalSnapshot { let parent_path = path.parent().unwrap(); if let Ok(ix) = self .git_repositories - .binary_search_by_key(&parent_path, |repo| repo.content_path().as_ref()) + .binary_search_by_key(&parent_path, |repo| repo.git_dir_path.as_ref()) { - self.git_repositories[ix].set_scan_id(self.snapshot.scan_id); + self.git_repositories[ix].scan_id = self.snapshot.scan_id; } } } @@ -1716,6 +1718,25 @@ impl LocalSnapshot { ignore_stack } + + pub fn git_repo_entries(&self) -> &[GitRepositoryEntry] { + &self.git_repositories + } +} +// Worktree root +// | +// git_dir_path: c/d/.git +//in_dot_git Query: c/d/.git/HEAD +// Manages Query: c/d/e/f/a.txt + +impl GitRepositoryEntry { + pub(crate) fn manages(&self, path: &Path) -> bool { + path.starts_with(self.content_path.as_ref()) + } + + pub(crate) fn in_dot_git(&self, path: &Path) -> bool { + path.starts_with(self.git_dir_path.as_ref()) + } } async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result { @@ -2509,8 +2530,8 @@ impl BackgroundScanner { snapshot.insert_entry(fs_entry, self.fs.as_ref()); let scan_id = snapshot.scan_id; - if let Some(repo) = snapshot.in_dot_git(&abs_path) { - repo.set_scan_id(scan_id); + if let Some(repo) = snapshot.in_dot_git(&path) { + repo.scan_id = scan_id; } let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path); @@ -2625,19 +2646,21 @@ impl BackgroundScanner { .await; } + // TODO: Clarify what is going on here because re-loading every git repository + // on every file system event seems wrong async fn update_git_repositories(&self) { let mut snapshot = self.snapshot.lock(); let new_repos = snapshot .git_repositories .iter() - .map(|repo| repo.boxed_clone()) - .filter_map(|mut repo| { - if repo.reopen_git_repo() { - Some(repo) - } else { - None - } + .cloned() + .filter_map(|mut repo_entry| { + let repo = self + .fs + .open_repo(&snapshot.abs_path.join(&repo_entry.git_dir_path))?; + repo_entry.repo = repo; + Some(repo_entry) }) .collect(); @@ -3262,34 +3285,17 @@ mod tests { assert!(tree.repo_for("c.txt".as_ref()).is_none()); let repo = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap(); - - assert_eq!( - repo.content_path(), - root.path().join("dir1").canonicalize().unwrap() - ); - assert_eq!( - repo.git_dir_path(), - root.path().join("dir1/.git").canonicalize().unwrap() - ); + assert_eq!(repo.content_path.as_ref(), Path::new("dir1")); + assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/.git")); let repo = tree.repo_for("dir1/deps/dep1/src/a.txt".as_ref()).unwrap(); - - assert_eq!( - repo.content_path(), - root.path().join("dir1/deps/dep1").canonicalize().unwrap() - ); - assert_eq!( - repo.git_dir_path(), - root.path() - .join("dir1/deps/dep1/.git") - .canonicalize() - .unwrap() - ); + assert_eq!(repo.content_path.as_ref(), Path::new("dir1/deps/dep1")); + assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/deps/dep1/.git"),); }); let original_scan_id = tree.read_with(cx, |tree, _cx| { let tree = tree.as_local().unwrap(); - tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id() + tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id }); std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap(); @@ -3297,7 +3303,7 @@ mod tests { tree.read_with(cx, |tree, _cx| { let tree = tree.as_local().unwrap(); - let new_scan_id = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id(); + let new_scan_id = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id; assert_ne!( original_scan_id, new_scan_id, "original {original_scan_id}, new {new_scan_id}" @@ -3316,44 +3322,51 @@ mod tests { #[test] fn test_changed_repos() { - // let prev_repos: Vec> = vec![ - // FakeGitRepository::open(Path::new("/.git"), 0, Default::default()), - // FakeGitRepository::open(Path::new("/a/.git"), 0, Default::default()), - // FakeGitRepository::open(Path::new("/a/b/.git"), 0, Default::default()), - // ]; + fn fake_entry(git_dir_path: impl AsRef, scan_id: usize) -> GitRepositoryEntry { + GitRepositoryEntry { + repo: Arc::new(Mutex::new(FakeGitRepository::default())), + scan_id, + content_path: git_dir_path.as_ref().parent().unwrap().into(), + git_dir_path: git_dir_path.as_ref().into(), + } + } - // let new_repos: Vec> = vec![ - // FakeGitRepository::open(Path::new("/a/.git"), 1, Default::default()), - // FakeGitRepository::open(Path::new("/a/b/.git"), 0, Default::default()), - // FakeGitRepository::open(Path::new("/a/c/.git"), 0, Default::default()), - // ]; + let prev_repos: Vec = vec![ + fake_entry("/.git", 0), + fake_entry("/a/.git", 0), + fake_entry("/a/b/.git", 0), + ]; + + let new_repos: Vec = vec![ + fake_entry("/a/.git", 1), + fake_entry("/a/b/.git", 0), + fake_entry("/a/c/.git", 0), + ]; let res = LocalWorktree::changed_repos(&prev_repos, &new_repos); - dbg!(&res); - // Deletion retained assert!(res .iter() - .find(|repo| repo.git_dir_path() == Path::new("/.git") && repo.scan_id() == 0) + .find(|repo| repo.git_dir_path.as_ref() == Path::new("/.git") && repo.scan_id == 0) .is_some()); // Update retained assert!(res .iter() - .find(|repo| repo.git_dir_path() == Path::new("/a/.git") && repo.scan_id() == 1) + .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/.git") && repo.scan_id == 1) .is_some()); // Addition retained assert!(res .iter() - .find(|repo| repo.git_dir_path() == Path::new("/a/c/.git") && repo.scan_id() == 0) + .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/c/.git") && repo.scan_id == 0) .is_some()); // Nochange, not retained assert!(res .iter() - .find(|repo| repo.git_dir_path() == Path::new("/a/b/.git") && repo.scan_id() == 0) + .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/b/.git") && repo.scan_id == 0) .is_none()); } From a1299d9b68b2b6bf3ec01b311daabf2f54d6b1e5 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Fri, 30 Sep 2022 17:34:14 -0700 Subject: [PATCH 091/140] Fixed 1 test --- crates/collab/src/integration_tests.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/collab/src/integration_tests.rs b/crates/collab/src/integration_tests.rs index 168231a6b4..eb3fbc3dc8 100644 --- a/crates/collab/src/integration_tests.rs +++ b/crates/collab/src/integration_tests.rs @@ -1075,7 +1075,7 @@ async fn test_git_head_text( buffer.snapshot().git_diff_hunks_in_range(0..4), &buffer, &head_text, - &[(0..1, "1", "one\n")], + &[(2..3, "", "three\n")], ); }); } From 8c24c858c9efee31e0acb55ef61ceb7fe4f0cd43 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Fri, 30 Sep 2022 17:36:22 -0700 Subject: [PATCH 092/140] Touched up comments --- crates/project/src/worktree.rs | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 560f23d147..40efeee1d1 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -119,12 +119,6 @@ impl std::fmt::Debug for GitRepositoryEntry { } } -// impl Clone for GitRepositoryEntry { -// fn clone(&self) -> Self { -// GitRepositoryEntry { repo: self.repo.boxed_clone(), scan_id: self.scan_id } -// } -// } - pub struct LocalSnapshot { abs_path: Arc, ignores_by_parent_abs_path: HashMap, (Arc, usize)>, @@ -1723,17 +1717,14 @@ impl LocalSnapshot { &self.git_repositories } } -// Worktree root -// | -// git_dir_path: c/d/.git -//in_dot_git Query: c/d/.git/HEAD -// Manages Query: c/d/e/f/a.txt impl GitRepositoryEntry { + // Note that these paths should be relative to the worktree root. pub(crate) fn manages(&self, path: &Path) -> bool { path.starts_with(self.content_path.as_ref()) } + // Note that theis path should be relative to the worktree root. pub(crate) fn in_dot_git(&self, path: &Path) -> bool { path.starts_with(self.git_dir_path.as_ref()) } From 512f817e2f50a5917f954971c96ee763ae16b33d Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Sat, 1 Oct 2022 18:18:35 -0700 Subject: [PATCH 093/140] Added proto messages for updating the head text --- crates/collab/src/integration_tests.rs | 3 -- crates/collab/src/rpc.rs | 18 +++++++++- crates/project/src/project.rs | 46 ++++++++++++++++++++++++-- crates/rpc/proto/zed.proto | 7 ++++ crates/rpc/src/proto.rs | 2 ++ 5 files changed, 69 insertions(+), 7 deletions(-) diff --git a/crates/collab/src/integration_tests.rs b/crates/collab/src/integration_tests.rs index eb3fbc3dc8..422c9fd0bb 100644 --- a/crates/collab/src/integration_tests.rs +++ b/crates/collab/src/integration_tests.rs @@ -1065,9 +1065,6 @@ async fn test_git_head_text( ); }); - //TODO: WAIT FOR REMOTE UPDATES TO FINISH on B - executor.run_until_parked(); - // Smoke test B buffer_b.read_with(cx_b, |buffer, _| { assert_eq!(buffer.head_text(), Some(new_head_text.as_ref())); diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 5f27352c5a..318555b7ed 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -205,7 +205,8 @@ impl Server { .add_request_handler(Server::follow) .add_message_handler(Server::unfollow) .add_message_handler(Server::update_followers) - .add_request_handler(Server::get_channel_messages); + .add_request_handler(Server::get_channel_messages) + .add_message_handler(Server::update_head_text); Arc::new(server) } @@ -1727,6 +1728,21 @@ impl Server { Ok(()) } + async fn update_head_text( + self: Arc, + request: TypedEnvelope, + ) -> Result<()> { + let receiver_ids = self.store().await.project_connection_ids( + ProjectId::from_proto(request.payload.project_id), + request.sender_id, + )?; + broadcast(request.sender_id, receiver_ids, |connection_id| { + self.peer + .forward_send(request.sender_id, connection_id, request.payload.clone()) + }); + Ok(()) + } + pub(crate) async fn store(&self) -> StoreGuard<'_> { #[cfg(test)] tokio::task::yield_now().await; diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index f1aa98c4e0..1064d05fe9 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -8,7 +8,10 @@ pub mod worktree; mod project_tests; use anyhow::{anyhow, Context, Result}; -use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore}; +use client::{ + proto::{self}, + Client, PeerId, TypedEnvelope, User, UserStore, +}; use clock::ReplicaId; use collections::{hash_map, BTreeMap, HashMap, HashSet}; use futures::{future::Shared, AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt}; @@ -421,6 +424,7 @@ impl Project { client.add_model_request_handler(Self::handle_open_buffer_by_id); client.add_model_request_handler(Self::handle_open_buffer_by_path); client.add_model_request_handler(Self::handle_save_buffer); + client.add_model_message_handler(Self::handle_update_head_text); } pub fn local( @@ -4667,14 +4671,29 @@ impl Project { None => return, }; + let shared_remote_id = self.shared_remote_id(); + let client = self.client.clone(); + cx.spawn(|_, mut cx| async move { let head_text = cx .background() .spawn(async move { repo.repo.lock().load_head_text(&path) }) .await; - buffer.update(&mut cx, |buffer, cx| { - buffer.update_head_text(head_text, cx); + + let buffer_id = buffer.update(&mut cx, |buffer, cx| { + buffer.update_head_text(head_text.clone(), cx); + buffer.remote_id() }); + + if let Some(project_id) = shared_remote_id { + client + .send(proto::UpdateHeadText { + project_id, + buffer_id: buffer_id as u64, + head_text, + }) + .log_err(); + } }) .detach(); } @@ -5253,6 +5272,27 @@ impl Project { }) } + async fn handle_update_head_text( + this: ModelHandle, + envelope: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + this.update(&mut cx, |this, cx| { + let buffer_id = envelope.payload.buffer_id; + let head_text = envelope.payload.head_text; + let buffer = this + .opened_buffers + .get_mut(&buffer_id) + .and_then(|b| b.upgrade(cx)) + .ok_or_else(|| anyhow!("No such buffer {}", buffer_id))?; + + buffer.update(cx, |buffer, cx| buffer.update_head_text(head_text, cx)); + + Ok(()) + }) + } + async fn handle_update_buffer_file( this: ModelHandle, envelope: TypedEnvelope, diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 818f2cb7e1..d6604383da 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -108,6 +108,7 @@ message Envelope { FollowResponse follow_response = 93; UpdateFollowers update_followers = 94; Unfollow unfollow = 95; + UpdateHeadText update_head_text = 96; } } @@ -992,3 +993,9 @@ message WorktreeMetadata { string root_name = 2; bool visible = 3; } + +message UpdateHeadText { + uint64 project_id = 1; + uint64 buffer_id = 2; + optional string head_text = 3; +} diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index 2ba3fa18ba..e91a9fd558 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -167,6 +167,7 @@ messages!( (UpdateProject, Foreground), (UpdateWorktree, Foreground), (UpdateWorktreeExtensions, Background), + (UpdateHeadText, Background), ); request_messages!( @@ -263,6 +264,7 @@ entity_messages!( UpdateProject, UpdateWorktree, UpdateWorktreeExtensions, + UpdateHeadText ); entity_messages!(channel_id, ChannelMessageSent); From 7f84abaf13c1e0720de5929bf242b3a51fb525b2 Mon Sep 17 00:00:00 2001 From: Julia Date: Sun, 2 Oct 2022 14:11:35 -0400 Subject: [PATCH 094/140] Increment protocol version again for previous commit --- crates/rpc/src/rpc.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/rpc/src/rpc.rs b/crates/rpc/src/rpc.rs index 2c28462ee3..640271d4a2 100644 --- a/crates/rpc/src/rpc.rs +++ b/crates/rpc/src/rpc.rs @@ -6,4 +6,4 @@ pub use conn::Connection; pub use peer::*; mod macros; -pub const PROTOCOL_VERSION: u32 = 33; +pub const PROTOCOL_VERSION: u32 = 34; From 5769cdc3543f953ed38573d566355e7107e8494e Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Sun, 2 Oct 2022 17:56:09 -0700 Subject: [PATCH 095/140] made git diff rendering respect line wrap --- crates/editor/src/element.rs | 237 ++++++++++++++++++++++----------- crates/git/src/diff.rs | 2 +- crates/project/src/fs.rs | 10 +- crates/theme/src/theme.rs | 17 ++- styles/src/styleTree/editor.ts | 14 +- 5 files changed, 185 insertions(+), 95 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 57ee919288..5d83051567 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -46,6 +46,7 @@ use std::{ ops::Range, sync::Arc, }; +use theme::DiffStyle; struct SelectionLayout { head: DisplayPoint, @@ -525,98 +526,156 @@ impl EditorElement { layout: &mut LayoutState, cx: &mut PaintContext, ) { - let line_height = layout.position_map.line_height; - let scroll_position = layout.position_map.snapshot.scroll_position(); - let scroll_top = scroll_position.y() * line_height; + struct GutterLayout { + line_height: f32, + // scroll_position: Vector2F, + scroll_top: f32, + bounds: RectF, + } + + struct DiffLayout<'a> { + buffer_line: usize, + last_diff: Option<(&'a DiffHunk, usize)>, + } + + fn diff_quad( + status: DiffHunkStatus, + layout_range: Range, + gutter_layout: &GutterLayout, + diff_style: &DiffStyle, + ) -> Quad { + let color = match status { + DiffHunkStatus::Added => diff_style.inserted, + DiffHunkStatus::Modified => diff_style.modified, + + //TODO: This rendering is entirely a horrible hack + DiffHunkStatus::Removed => { + let row = layout_range.start; + + let offset = gutter_layout.line_height / 2.; + let start_y = + row as f32 * gutter_layout.line_height + offset - gutter_layout.scroll_top; + let end_y = start_y + gutter_layout.line_height; + + let width = diff_style.removed_width_em * gutter_layout.line_height; + let highlight_origin = gutter_layout.bounds.origin() + vec2f(-width, start_y); + let highlight_size = vec2f(width * 2., end_y - start_y); + let highlight_bounds = RectF::new(highlight_origin, highlight_size); + + return Quad { + bounds: highlight_bounds, + background: Some(diff_style.deleted), + border: Border::new(0., Color::transparent_black()), + corner_radius: 1. * gutter_layout.line_height, + }; + } + }; + + let start_row = layout_range.start; + let end_row = layout_range.end; + + let start_y = start_row as f32 * gutter_layout.line_height - gutter_layout.scroll_top; + let end_y = end_row as f32 * gutter_layout.line_height - gutter_layout.scroll_top; + + let width = diff_style.width_em * gutter_layout.line_height; + let highlight_origin = gutter_layout.bounds.origin() + vec2f(-width, start_y); + let highlight_size = vec2f(width * 2., end_y - start_y); + let highlight_bounds = RectF::new(highlight_origin, highlight_size); + + Quad { + bounds: highlight_bounds, + background: Some(color), + border: Border::new(0., Color::transparent_black()), + corner_radius: diff_style.corner_radius * gutter_layout.line_height, + } + } + + let gutter_layout = { + let scroll_position = layout.position_map.snapshot.scroll_position(); + let line_height = layout.position_map.line_height; + GutterLayout { + scroll_top: scroll_position.y() * line_height, + // scroll_position, + line_height, + bounds, + } + }; + + let mut diff_layout = DiffLayout { + buffer_line: 0, + last_diff: None, + }; + + let diff_style = &cx.global::().theme.editor.diff.clone(); + // dbg!("***************"); + // dbg!(&layout.diff_hunks); + // dbg!("***************"); + + // line is `None` when there's a line wrap for (ix, line) in layout.line_number_layouts.iter().enumerate() { + // dbg!(ix); if let Some(line) = line { let line_origin = bounds.origin() + vec2f( bounds.width() - line.width() - layout.gutter_padding, - ix as f32 * layout.position_map.line_height - - (scroll_top % layout.position_map.line_height), + ix as f32 * gutter_layout.line_height + - (gutter_layout.scroll_top % gutter_layout.line_height), ); - line.paint( - line_origin, - visible_bounds, - layout.position_map.line_height, - cx, - ); + + line.paint(line_origin, visible_bounds, gutter_layout.line_height, cx); + + //This line starts a buffer line, so let's do the diff calculation + let new_hunk = get_hunk(diff_layout.buffer_line, &layout.diff_hunks); + + // This + the unwraps are annoying, but at least it's legible + let (is_ending, is_starting) = match (diff_layout.last_diff, new_hunk) { + (None, None) => (false, false), + (None, Some(_)) => (false, true), + (Some(_), None) => (true, false), + (Some((old_hunk, _)), Some(new_hunk)) if new_hunk == old_hunk => (false, false), + (Some(_), Some(_)) => (true, true), + }; + + // dbg!(diff_layout.buffer_line, is_starting); + + if is_ending { + let (last_hunk, start_line) = diff_layout.last_diff.take().unwrap(); + // dbg!("ending"); + // dbg!(start_line..ix); + cx.scene.push_quad(diff_quad( + last_hunk.status(), + start_line..ix, + &gutter_layout, + diff_style, + )); + } + + if is_starting { + let new_hunk = new_hunk.unwrap(); + + diff_layout.last_diff = Some((new_hunk, ix)); + }; + + diff_layout.buffer_line += 1; } } - let ( - inserted_color, - modified_color, - deleted_color, - width_multiplier, - corner_radius, - removed_width_mult, - ) = { - let editor = &cx.global::().theme.editor; - ( - editor.diff_background_inserted, - editor.diff_background_modified, - editor.diff_background_deleted, - editor.diff_indicator_width_multiplier, - editor.diff_indicator_corner_radius, - editor.removed_diff_width_multiplier, - ) - }; - - for hunk in &layout.diff_hunks { - let color = match hunk.status() { - DiffHunkStatus::Added => inserted_color, - DiffHunkStatus::Modified => modified_color, - - //TODO: This rendering is entirely a horrible hack - DiffHunkStatus::Removed => { - let row = hunk.buffer_range.start; - - let offset = line_height / 2.; - let start_y = row as f32 * line_height + offset - scroll_top; - let end_y = start_y + line_height; - - let width = removed_width_mult * line_height; - let highlight_origin = bounds.origin() + vec2f(-width, start_y); - let highlight_size = vec2f(width * 2., end_y - start_y); - let highlight_bounds = RectF::new(highlight_origin, highlight_size); - - cx.scene.push_quad(Quad { - bounds: highlight_bounds, - background: Some(deleted_color), - border: Border::new(0., Color::transparent_black()), - corner_radius: 1. * line_height, - }); - - continue; - } - }; - - let start_row = hunk.buffer_range.start; - let end_row = hunk.buffer_range.end; - - let start_y = start_row as f32 * line_height - scroll_top; - let end_y = end_row as f32 * line_height - scroll_top; - - let width = width_multiplier * line_height; - let highlight_origin = bounds.origin() + vec2f(-width, start_y); - let highlight_size = vec2f(width * 2., end_y - start_y); - let highlight_bounds = RectF::new(highlight_origin, highlight_size); - - cx.scene.push_quad(Quad { - bounds: highlight_bounds, - background: Some(color), - border: Border::new(0., Color::transparent_black()), - corner_radius: corner_radius * line_height, - }); + // If we ran out with a diff hunk still being prepped, paint it now + if let Some((last_hunk, start_line)) = diff_layout.last_diff { + let end_line = layout.line_number_layouts.len(); + cx.scene.push_quad(diff_quad( + last_hunk.status(), + start_line..end_line, + &gutter_layout, + diff_style, + )) } if let Some((row, indicator)) = layout.code_actions_indicator.as_mut() { let mut x = bounds.width() - layout.gutter_padding; - let mut y = *row as f32 * line_height - scroll_top; + let mut y = *row as f32 * gutter_layout.line_height - gutter_layout.scroll_top; x += ((layout.gutter_padding + layout.gutter_margin) - indicator.size().x()) / 2.; - y += (line_height - indicator.size().y()) / 2.; + y += (gutter_layout.line_height - indicator.size().y()) / 2.; indicator.paint(bounds.origin() + vec2f(x, y), visible_bounds, cx); } } @@ -1321,6 +1380,28 @@ impl EditorElement { } } +/// Get the hunk that contains buffer_line, starting from start_idx +/// Returns none if there is none found, and +fn get_hunk(buffer_line: usize, hunks: &[DiffHunk]) -> Option<&DiffHunk> { + for i in 0..hunks.len() { + // Safety: Index out of bounds is handled by the check above + let hunk = hunks.get(i).unwrap(); + if hunk.buffer_range.contains(&(buffer_line as u32)) { + return Some(hunk); + } else if hunk.status() == DiffHunkStatus::Removed + && buffer_line == hunk.buffer_range.start as usize + { + return Some(hunk); + } else if hunk.buffer_range.start > buffer_line as u32 { + // If we've passed the buffer_line, just stop + return None; + } + } + + // We reached the end of the array without finding a hunk, just return none. + return None; +} + impl Element for EditorElement { type LayoutState = LayoutState; type PaintState = (); diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index 6c904d44d1..48630fc91c 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -6,7 +6,7 @@ use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point}; pub use git2 as libgit; use libgit::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch}; -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum DiffHunkStatus { Added, Modified, diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index d0e549c0b5..2b7aca642d 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -1,10 +1,11 @@ use anyhow::{anyhow, Result}; use fsevent::EventStream; use futures::{future::BoxFuture, Stream, StreamExt}; -use git::repository::{FakeGitRepositoryState, GitRepository, LibGitRepository}; +use git::repository::{GitRepository, LibGitRepository}; use language::LineEnding; use parking_lot::Mutex as SyncMutex; use smol::io::{AsyncReadExt, AsyncWriteExt}; +use std::sync::Arc; use std::{ io, os::unix::fs::MetadataExt, @@ -12,16 +13,17 @@ use std::{ pin::Pin, time::{Duration, SystemTime}, }; -use util::ResultExt; - use text::Rope; +use util::ResultExt; #[cfg(any(test, feature = "test-support"))] use collections::{btree_map, BTreeMap}; #[cfg(any(test, feature = "test-support"))] use futures::lock::Mutex; #[cfg(any(test, feature = "test-support"))] -use std::sync::{Arc, Weak}; +use git::repository::FakeGitRepositoryState; +#[cfg(any(test, feature = "test-support"))] +use std::sync::Weak; #[async_trait::async_trait] pub trait Fs: Send + Sync { diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index 0d0c94ea8d..d8c8296481 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -488,12 +488,7 @@ pub struct Editor { pub rename_fade: f32, pub document_highlight_read_background: Color, pub document_highlight_write_background: Color, - pub diff_background_deleted: Color, - pub diff_background_inserted: Color, - pub diff_background_modified: Color, - pub removed_diff_width_multiplier: f32, - pub diff_indicator_width_multiplier: f32, - pub diff_indicator_corner_radius: f32, + pub diff: DiffStyle, pub line_number: Color, pub line_number_active: Color, pub guest_selections: Vec, @@ -577,6 +572,16 @@ pub struct CodeActions { pub vertical_scale: f32, } +#[derive(Clone, Deserialize, Default)] +pub struct DiffStyle { + pub inserted: Color, + pub modified: Color, + pub deleted: Color, + pub removed_width_em: f32, + pub width_em: f32, + pub corner_radius: f32, +} + #[derive(Debug, Default, Clone, Copy)] pub struct Interactive { pub default: T, diff --git a/styles/src/styleTree/editor.ts b/styles/src/styleTree/editor.ts index 6e52c620ee..04a5bafbd5 100644 --- a/styles/src/styleTree/editor.ts +++ b/styles/src/styleTree/editor.ts @@ -60,12 +60,14 @@ export default function editor(theme: Theme) { indicator: iconColor(theme, "secondary"), verticalScale: 0.618 }, - diffBackgroundDeleted: theme.iconColor.error, - diffBackgroundInserted: theme.iconColor.ok, - diffBackgroundModified: theme.iconColor.warning, - removedDiffWidthMultiplier: 0.275, - diffIndicatorWidthMultiplier: 0.16, - diffIndicatorCornerRadius: 0.05, + diff: { + deleted: theme.iconColor.error, + inserted: theme.iconColor.ok, + modified: theme.iconColor.warning, + removedWidthEm: 0.275, + widthEm: 0.16, + cornerRadius: 0.05, + }, documentHighlightReadBackground: theme.editor.highlight.occurrence, documentHighlightWriteBackground: theme.editor.highlight.activeOccurrence, errorColor: theme.textColor.error, From 52dbf2f9b8246bb5b1258ab990939acb74efc527 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Sun, 2 Oct 2022 18:01:37 -0700 Subject: [PATCH 096/140] add proto stuff --- crates/client/src/client.rs | 4 +- crates/client/src/telemetry.rs | 10 +- crates/client/src/user.rs | 10 +- .../20220913211150_create_signups.down.sql | 6 - ....sql => 20220913211150_create_signups.sql} | 0 .../20220929182110_add_metrics_id.sql | 2 + crates/collab/src/api.rs | 82 ++-- crates/collab/src/db.rs | 56 ++- crates/collab/src/db_tests.rs | 349 +++++++++--------- crates/collab/src/integration_tests.rs | 7 +- crates/collab/src/rpc.rs | 17 + crates/rpc/proto/zed.proto | 10 +- crates/rpc/src/proto.rs | 3 + 13 files changed, 317 insertions(+), 239 deletions(-) delete mode 100644 crates/collab/migrations/20220913211150_create_signups.down.sql rename crates/collab/migrations/{20220913211150_create_signups.up.sql => 20220913211150_create_signups.sql} (100%) create mode 100644 crates/collab/migrations/20220929182110_add_metrics_id.sql diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index b75be62308..9ec24abae5 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -320,11 +320,9 @@ impl Client { log::info!("set status on client {}: {:?}", self.id, status); let mut state = self.state.write(); *state.status.0.borrow_mut() = status; - let user_id = state.credentials.as_ref().map(|c| c.user_id); match status { Status::Connected { .. } => { - self.telemetry.set_user_id(user_id); state._reconnect_task = None; } Status::ConnectionLost => { @@ -353,7 +351,7 @@ impl Client { })); } Status::SignedOut | Status::UpgradeRequired => { - self.telemetry.set_user_id(user_id); + self.telemetry.set_metrics_id(None); state._reconnect_task.take(); } _ => {} diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 8b7be5ba80..c9b5665e9e 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -29,7 +29,7 @@ pub struct Telemetry { #[derive(Default)] struct TelemetryState { - user_id: Option>, + metrics_id: Option>, device_id: Option>, app_version: Option>, os_version: Option>, @@ -115,7 +115,7 @@ impl Telemetry { flush_task: Default::default(), next_event_id: 0, log_file: None, - user_id: None, + metrics_id: None, }), }); @@ -176,8 +176,8 @@ impl Telemetry { .detach(); } - pub fn set_user_id(&self, user_id: Option) { - self.state.lock().user_id = user_id.map(|id| id.to_string().into()); + pub fn set_metrics_id(&self, metrics_id: Option) { + self.state.lock().metrics_id = metrics_id.map(|s| s.into()); } pub fn report_event(self: &Arc, kind: &str, properties: Value) { @@ -199,7 +199,7 @@ impl Telemetry { None }, user_properties: None, - user_id: state.user_id.clone(), + user_id: state.metrics_id.clone(), device_id: state.device_id.clone(), os_name: state.os_name, os_version: state.os_version.clone(), diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index 149d22e77a..b31cda94b3 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -142,10 +142,14 @@ impl UserStore { match status { Status::Connected { .. } => { if let Some((this, user_id)) = this.upgrade(&cx).zip(client.user_id()) { - let user = this + let fetch_user = this .update(&mut cx, |this, cx| this.fetch_user(user_id, cx)) - .log_err() - .await; + .log_err(); + let fetch_metrics_id = + client.request(proto::GetPrivateUserInfo {}).log_err(); + let (user, info) = futures::join!(fetch_user, fetch_metrics_id); + client.telemetry.set_metrics_id(info.map(|i| i.metrics_id)); + client.telemetry.report_event("sign in", Default::default()); current_user_tx.send(user).await.ok(); } } diff --git a/crates/collab/migrations/20220913211150_create_signups.down.sql b/crates/collab/migrations/20220913211150_create_signups.down.sql deleted file mode 100644 index 5504bbb8dc..0000000000 --- a/crates/collab/migrations/20220913211150_create_signups.down.sql +++ /dev/null @@ -1,6 +0,0 @@ -DROP TABLE signups; - -ALTER TABLE users - DROP COLUMN github_user_id; - -DROP INDEX index_users_on_email_address; diff --git a/crates/collab/migrations/20220913211150_create_signups.up.sql b/crates/collab/migrations/20220913211150_create_signups.sql similarity index 100% rename from crates/collab/migrations/20220913211150_create_signups.up.sql rename to crates/collab/migrations/20220913211150_create_signups.sql diff --git a/crates/collab/migrations/20220929182110_add_metrics_id.sql b/crates/collab/migrations/20220929182110_add_metrics_id.sql new file mode 100644 index 0000000000..665d6323bf --- /dev/null +++ b/crates/collab/migrations/20220929182110_add_metrics_id.sql @@ -0,0 +1,2 @@ +ALTER TABLE "users" + ADD "metrics_id" uuid NOT NULL DEFAULT gen_random_uuid(); diff --git a/crates/collab/src/api.rs b/crates/collab/src/api.rs index 0a9d8106ce..08dfa91ba9 100644 --- a/crates/collab/src/api.rs +++ b/crates/collab/src/api.rs @@ -24,6 +24,7 @@ use tracing::instrument; pub fn routes(rpc_server: &Arc, state: Arc) -> Router { Router::new() + .route("/user", get(get_authenticated_user)) .route("/users", get(get_users).post(create_user)) .route("/users/:id", put(update_user).delete(destroy_user)) .route("/users/:id/access_tokens", post(create_access_token)) @@ -85,10 +86,33 @@ pub async fn validate_api_token(req: Request, next: Next) -> impl IntoR Ok::<_, Error>(next.run(req).await) } +#[derive(Debug, Deserialize)] +struct AuthenticatedUserParams { + github_user_id: i32, + github_login: String, +} + +#[derive(Debug, Serialize)] +struct AuthenticatedUserResponse { + user: User, + metrics_id: String, +} + +async fn get_authenticated_user( + Query(params): Query, + Extension(app): Extension>, +) -> Result> { + let user = app + .db + .get_user_by_github_account(¶ms.github_login, Some(params.github_user_id)) + .await? + .ok_or_else(|| Error::Http(StatusCode::NOT_FOUND, "user not found".into()))?; + let metrics_id = app.db.get_user_metrics_id(user.id).await?; + return Ok(Json(AuthenticatedUserResponse { user, metrics_id })); +} + #[derive(Debug, Deserialize)] struct GetUsersQueryParams { - github_user_id: Option, - github_login: Option, query: Option, page: Option, limit: Option, @@ -98,14 +122,6 @@ async fn get_users( Query(params): Query, Extension(app): Extension>, ) -> Result>> { - if let Some(github_login) = ¶ms.github_login { - let user = app - .db - .get_user_by_github_account(github_login, params.github_user_id) - .await?; - return Ok(Json(Vec::from_iter(user))); - } - let limit = params.limit.unwrap_or(100); let users = if let Some(query) = params.query { app.db.fuzzy_search_users(&query, limit).await? @@ -124,6 +140,8 @@ struct CreateUserParams { email_address: String, email_confirmation_code: Option, #[serde(default)] + admin: bool, + #[serde(default)] invite_count: i32, } @@ -131,6 +149,7 @@ struct CreateUserParams { struct CreateUserResponse { user: User, signup_device_id: Option, + metrics_id: String, } async fn create_user( @@ -143,12 +162,10 @@ async fn create_user( github_user_id: params.github_user_id, invite_count: params.invite_count, }; - let user_id; - let signup_device_id; + // Creating a user via the normal signup process - if let Some(email_confirmation_code) = params.email_confirmation_code { - let result = app - .db + let result = if let Some(email_confirmation_code) = params.email_confirmation_code { + app.db .create_user_from_invite( &Invite { email_address: params.email_address, @@ -156,34 +173,37 @@ async fn create_user( }, user, ) - .await?; - user_id = result.user_id; - signup_device_id = result.signup_device_id; - if let Some(inviter_id) = result.inviting_user_id { - rpc_server - .invite_code_redeemed(inviter_id, user_id) - .await - .trace_err(); - } + .await? } // Creating a user as an admin - else { - user_id = app - .db + else if params.admin { + app.db .create_user(¶ms.email_address, false, user) - .await?; - signup_device_id = None; + .await? + } else { + Err(Error::Http( + StatusCode::UNPROCESSABLE_ENTITY, + "email confirmation code is required".into(), + ))? + }; + + if let Some(inviter_id) = result.inviting_user_id { + rpc_server + .invite_code_redeemed(inviter_id, result.user_id) + .await + .trace_err(); } let user = app .db - .get_user_by_id(user_id) + .get_user_by_id(result.user_id) .await? .ok_or_else(|| anyhow!("couldn't find the user we just created"))?; Ok(Json(CreateUserResponse { user, - signup_device_id, + metrics_id: result.metrics_id, + signup_device_id: result.signup_device_id, })) } diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 8b01cdf971..a12f6a4f89 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -17,10 +17,11 @@ pub trait Db: Send + Sync { email_address: &str, admin: bool, params: NewUserParams, - ) -> Result; + ) -> Result; async fn get_all_users(&self, page: u32, limit: u32) -> Result>; async fn fuzzy_search_users(&self, query: &str, limit: u32) -> Result>; async fn get_user_by_id(&self, id: UserId) -> Result>; + async fn get_user_metrics_id(&self, id: UserId) -> Result; async fn get_users_by_ids(&self, ids: Vec) -> Result>; async fn get_users_with_no_invites(&self, invited_by_another_user: bool) -> Result>; async fn get_user_by_github_account( @@ -208,21 +209,26 @@ impl Db for PostgresDb { email_address: &str, admin: bool, params: NewUserParams, - ) -> Result { + ) -> Result { let query = " INSERT INTO users (email_address, github_login, github_user_id, admin) VALUES ($1, $2, $3, $4) ON CONFLICT (github_login) DO UPDATE SET github_login = excluded.github_login - RETURNING id + RETURNING id, metrics_id::text "; - Ok(sqlx::query_scalar(query) + let (user_id, metrics_id): (UserId, String) = sqlx::query_as(query) .bind(email_address) .bind(params.github_login) .bind(params.github_user_id) .bind(admin) .fetch_one(&self.pool) - .await - .map(UserId)?) + .await?; + Ok(NewUserResult { + user_id, + metrics_id, + signup_device_id: None, + inviting_user_id: None, + }) } async fn get_all_users(&self, page: u32, limit: u32) -> Result> { @@ -256,6 +262,18 @@ impl Db for PostgresDb { Ok(users.into_iter().next()) } + async fn get_user_metrics_id(&self, id: UserId) -> Result { + let query = " + SELECT metrics_id::text + FROM users + WHERE id = $1 + "; + Ok(sqlx::query_scalar(query) + .bind(id) + .fetch_one(&self.pool) + .await?) + } + async fn get_users_by_ids(&self, ids: Vec) -> Result> { let ids = ids.into_iter().map(|id| id.0).collect::>(); let query = " @@ -493,13 +511,13 @@ impl Db for PostgresDb { ))?; } - let user_id: UserId = sqlx::query_scalar( + let (user_id, metrics_id): (UserId, String) = sqlx::query_as( " INSERT INTO users (email_address, github_login, github_user_id, admin, invite_count, invite_code) VALUES ($1, $2, $3, 'f', $4, $5) - RETURNING id + RETURNING id, metrics_id::text ", ) .bind(&invite.email_address) @@ -559,6 +577,7 @@ impl Db for PostgresDb { tx.commit().await?; Ok(NewUserResult { user_id, + metrics_id, inviting_user_id, signup_device_id, }) @@ -1722,6 +1741,7 @@ pub struct NewUserParams { #[derive(Debug)] pub struct NewUserResult { pub user_id: UserId, + pub metrics_id: String, pub inviting_user_id: Option, pub signup_device_id: Option, } @@ -1808,15 +1828,15 @@ mod test { email_address: &str, admin: bool, params: NewUserParams, - ) -> Result { + ) -> Result { self.background.simulate_random_delay().await; let mut users = self.users.lock(); - if let Some(user) = users + let user_id = if let Some(user) = users .values() .find(|user| user.github_login == params.github_login) { - Ok(user.id) + user.id } else { let id = post_inc(&mut *self.next_user_id.lock()); let user_id = UserId(id); @@ -1833,8 +1853,14 @@ mod test { connected_once: false, }, ); - Ok(user_id) - } + user_id + }; + Ok(NewUserResult { + user_id, + metrics_id: "the-metrics-id".to_string(), + inviting_user_id: None, + signup_device_id: None, + }) } async fn get_all_users(&self, _page: u32, _limit: u32) -> Result> { @@ -1850,6 +1876,10 @@ mod test { Ok(self.get_users_by_ids(vec![id]).await?.into_iter().next()) } + async fn get_user_metrics_id(&self, _id: UserId) -> Result { + Ok("the-metrics-id".to_string()) + } + async fn get_users_by_ids(&self, ids: Vec) -> Result> { self.background.simulate_random_delay().await; let users = self.users.lock(); diff --git a/crates/collab/src/db_tests.rs b/crates/collab/src/db_tests.rs index 1e48b4b754..e063b97eb6 100644 --- a/crates/collab/src/db_tests.rs +++ b/crates/collab/src/db_tests.rs @@ -12,89 +12,56 @@ async fn test_get_users_by_ids() { ] { let db = test_db.db(); - let user1 = db - .create_user( - "u1@example.com", - false, - NewUserParams { - github_login: "u1".into(), - github_user_id: 1, - invite_count: 0, - }, - ) - .await - .unwrap(); - let user2 = db - .create_user( - "u2@example.com", - false, - NewUserParams { - github_login: "u2".into(), - github_user_id: 2, - invite_count: 0, - }, - ) - .await - .unwrap(); - let user3 = db - .create_user( - "u3@example.com", - false, - NewUserParams { - github_login: "u3".into(), - github_user_id: 3, - invite_count: 0, - }, - ) - .await - .unwrap(); - let user4 = db - .create_user( - "u4@example.com", - false, - NewUserParams { - github_login: "u4".into(), - github_user_id: 4, - invite_count: 0, - }, - ) - .await - .unwrap(); + let mut user_ids = Vec::new(); + for i in 1..=4 { + user_ids.push( + db.create_user( + &format!("user{i}@example.com"), + false, + NewUserParams { + github_login: format!("user{i}"), + github_user_id: i, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id, + ); + } assert_eq!( - db.get_users_by_ids(vec![user1, user2, user3, user4]) - .await - .unwrap(), + db.get_users_by_ids(user_ids.clone()).await.unwrap(), vec![ User { - id: user1, - github_login: "u1".to_string(), + id: user_ids[0], + github_login: "user1".to_string(), github_user_id: Some(1), - email_address: Some("u1@example.com".to_string()), + email_address: Some("user1@example.com".to_string()), admin: false, ..Default::default() }, User { - id: user2, - github_login: "u2".to_string(), + id: user_ids[1], + github_login: "user2".to_string(), github_user_id: Some(2), - email_address: Some("u2@example.com".to_string()), + email_address: Some("user2@example.com".to_string()), admin: false, ..Default::default() }, User { - id: user3, - github_login: "u3".to_string(), + id: user_ids[2], + github_login: "user3".to_string(), github_user_id: Some(3), - email_address: Some("u3@example.com".to_string()), + email_address: Some("user3@example.com".to_string()), admin: false, ..Default::default() }, User { - id: user4, - github_login: "u4".to_string(), + id: user_ids[3], + github_login: "user4".to_string(), github_user_id: Some(4), - email_address: Some("u4@example.com".to_string()), + email_address: Some("user4@example.com".to_string()), admin: false, ..Default::default() } @@ -121,7 +88,8 @@ async fn test_get_user_by_github_account() { }, ) .await - .unwrap(); + .unwrap() + .user_id; let user_id2 = db .create_user( "user2@example.com", @@ -133,7 +101,8 @@ async fn test_get_user_by_github_account() { }, ) .await - .unwrap(); + .unwrap() + .user_id; let user = db .get_user_by_github_account("login1", None) @@ -177,7 +146,8 @@ async fn test_worktree_extensions() { }, ) .await - .unwrap(); + .unwrap() + .user_id; let project = db.register_project(user).await.unwrap(); db.update_worktree_extensions(project, 100, Default::default()) @@ -237,43 +207,25 @@ async fn test_user_activity() { let test_db = TestDb::postgres().await; let db = test_db.db(); - let user_1 = db - .create_user( - "u1@example.com", - false, - NewUserParams { - github_login: "u1".into(), - github_user_id: 0, - invite_count: 0, - }, - ) - .await - .unwrap(); - let user_2 = db - .create_user( - "u2@example.com", - false, - NewUserParams { - github_login: "u2".into(), - github_user_id: 0, - invite_count: 0, - }, - ) - .await - .unwrap(); - let user_3 = db - .create_user( - "u3@example.com", - false, - NewUserParams { - github_login: "u3".into(), - github_user_id: 0, - invite_count: 0, - }, - ) - .await - .unwrap(); - let project_1 = db.register_project(user_1).await.unwrap(); + let mut user_ids = Vec::new(); + for i in 0..=2 { + user_ids.push( + db.create_user( + &format!("user{i}@example.com"), + false, + NewUserParams { + github_login: format!("user{i}"), + github_user_id: i, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id, + ); + } + + let project_1 = db.register_project(user_ids[0]).await.unwrap(); db.update_worktree_extensions( project_1, 1, @@ -281,34 +233,37 @@ async fn test_user_activity() { ) .await .unwrap(); - let project_2 = db.register_project(user_2).await.unwrap(); + let project_2 = db.register_project(user_ids[1]).await.unwrap(); let t0 = OffsetDateTime::now_utc() - Duration::from_secs(60 * 60); // User 2 opens a project let t1 = t0 + Duration::from_secs(10); - db.record_user_activity(t0..t1, &[(user_2, project_2)]) + db.record_user_activity(t0..t1, &[(user_ids[1], project_2)]) .await .unwrap(); let t2 = t1 + Duration::from_secs(10); - db.record_user_activity(t1..t2, &[(user_2, project_2)]) + db.record_user_activity(t1..t2, &[(user_ids[1], project_2)]) .await .unwrap(); // User 1 joins the project let t3 = t2 + Duration::from_secs(10); - db.record_user_activity(t2..t3, &[(user_2, project_2), (user_1, project_2)]) - .await - .unwrap(); + db.record_user_activity( + t2..t3, + &[(user_ids[1], project_2), (user_ids[0], project_2)], + ) + .await + .unwrap(); // User 1 opens another project let t4 = t3 + Duration::from_secs(10); db.record_user_activity( t3..t4, &[ - (user_2, project_2), - (user_1, project_2), - (user_1, project_1), + (user_ids[1], project_2), + (user_ids[0], project_2), + (user_ids[0], project_1), ], ) .await @@ -319,10 +274,10 @@ async fn test_user_activity() { db.record_user_activity( t4..t5, &[ - (user_2, project_2), - (user_1, project_2), - (user_1, project_1), - (user_3, project_1), + (user_ids[1], project_2), + (user_ids[0], project_2), + (user_ids[0], project_1), + (user_ids[2], project_1), ], ) .await @@ -330,13 +285,16 @@ async fn test_user_activity() { // User 2 leaves let t6 = t5 + Duration::from_secs(5); - db.record_user_activity(t5..t6, &[(user_1, project_1), (user_3, project_1)]) - .await - .unwrap(); + db.record_user_activity( + t5..t6, + &[(user_ids[0], project_1), (user_ids[2], project_1)], + ) + .await + .unwrap(); let t7 = t6 + Duration::from_secs(60); let t8 = t7 + Duration::from_secs(10); - db.record_user_activity(t7..t8, &[(user_1, project_1)]) + db.record_user_activity(t7..t8, &[(user_ids[0], project_1)]) .await .unwrap(); @@ -344,8 +302,8 @@ async fn test_user_activity() { db.get_top_users_activity_summary(t0..t6, 10).await.unwrap(), &[ UserActivitySummary { - id: user_1, - github_login: "u1".to_string(), + id: user_ids[0], + github_login: "user0".to_string(), project_activity: vec![ ProjectActivitySummary { id: project_1, @@ -360,8 +318,8 @@ async fn test_user_activity() { ] }, UserActivitySummary { - id: user_2, - github_login: "u2".to_string(), + id: user_ids[1], + github_login: "user1".to_string(), project_activity: vec![ProjectActivitySummary { id: project_2, duration: Duration::from_secs(50), @@ -369,8 +327,8 @@ async fn test_user_activity() { }] }, UserActivitySummary { - id: user_3, - github_login: "u3".to_string(), + id: user_ids[2], + github_login: "user2".to_string(), project_activity: vec![ProjectActivitySummary { id: project_1, duration: Duration::from_secs(15), @@ -442,7 +400,9 @@ async fn test_user_activity() { ); assert_eq!( - db.get_user_activity_timeline(t3..t6, user_1).await.unwrap(), + db.get_user_activity_timeline(t3..t6, user_ids[0]) + .await + .unwrap(), &[ UserActivityPeriod { project_id: project_1, @@ -459,7 +419,9 @@ async fn test_user_activity() { ] ); assert_eq!( - db.get_user_activity_timeline(t0..t8, user_1).await.unwrap(), + db.get_user_activity_timeline(t0..t8, user_ids[0]) + .await + .unwrap(), &[ UserActivityPeriod { project_id: project_2, @@ -501,7 +463,8 @@ async fn test_recent_channel_messages() { }, ) .await - .unwrap(); + .unwrap() + .user_id; let org = db.create_org("org", "org").await.unwrap(); let channel = db.create_org_channel(org, "channel").await.unwrap(); for i in 0..10 { @@ -545,7 +508,8 @@ async fn test_channel_message_nonces() { }, ) .await - .unwrap(); + .unwrap() + .user_id; let org = db.create_org("org", "org").await.unwrap(); let channel = db.create_org_channel(org, "channel").await.unwrap(); @@ -587,7 +551,8 @@ async fn test_create_access_tokens() { }, ) .await - .unwrap(); + .unwrap() + .user_id; db.create_access_token_hash(user, "h1", 3).await.unwrap(); db.create_access_token_hash(user, "h2", 3).await.unwrap(); @@ -678,42 +643,27 @@ async fn test_add_contacts() { ] { let db = test_db.db(); - let user_1 = db - .create_user( - "u1@example.com", - false, - NewUserParams { - github_login: "u1".into(), - github_user_id: 0, - invite_count: 0, - }, - ) - .await - .unwrap(); - let user_2 = db - .create_user( - "u2@example.com", - false, - NewUserParams { - github_login: "u2".into(), - github_user_id: 1, - invite_count: 0, - }, - ) - .await - .unwrap(); - let user_3 = db - .create_user( - "u3@example.com", - false, - NewUserParams { - github_login: "u3".into(), - github_user_id: 2, - invite_count: 0, - }, - ) - .await - .unwrap(); + let mut user_ids = Vec::new(); + for i in 0..3 { + user_ids.push( + db.create_user( + &format!("user{i}@example.com"), + false, + NewUserParams { + github_login: format!("user{i}"), + github_user_id: i, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id, + ); + } + + let user_1 = user_ids[0]; + let user_2 = user_ids[1]; + let user_3 = user_ids[2]; // User starts with no contacts assert_eq!( @@ -927,12 +877,12 @@ async fn test_add_contacts() { async fn test_invite_codes() { let postgres = TestDb::postgres().await; let db = postgres.db(); - let user1 = db + let NewUserResult { user_id: user1, .. } = db .create_user( - "u1@example.com", + "user1@example.com", false, NewUserParams { - github_login: "u1".into(), + github_login: "user1".into(), github_user_id: 0, invite_count: 0, }, @@ -954,13 +904,14 @@ async fn test_invite_codes() { // User 2 redeems the invite code and becomes a contact of user 1. let user2_invite = db - .create_invite_from_code(&invite_code, "u2@example.com", Some("user-2-device-id")) + .create_invite_from_code(&invite_code, "user2@example.com", Some("user-2-device-id")) .await .unwrap(); let NewUserResult { user_id: user2, inviting_user_id, signup_device_id, + metrics_id, } = db .create_user_from_invite( &user2_invite, @@ -976,6 +927,7 @@ async fn test_invite_codes() { assert_eq!(invite_count, 1); assert_eq!(inviting_user_id, Some(user1)); assert_eq!(signup_device_id.unwrap(), "user-2-device-id"); + assert_eq!(db.get_user_metrics_id(user2).await.unwrap(), metrics_id); assert_eq!( db.get_contacts(user1).await.unwrap(), [ @@ -1009,13 +961,14 @@ async fn test_invite_codes() { // User 3 redeems the invite code and becomes a contact of user 1. let user3_invite = db - .create_invite_from_code(&invite_code, "u3@example.com", None) + .create_invite_from_code(&invite_code, "user3@example.com", None) .await .unwrap(); let NewUserResult { user_id: user3, inviting_user_id, signup_device_id, + .. } = db .create_user_from_invite( &user3_invite, @@ -1067,7 +1020,7 @@ async fn test_invite_codes() { ); // Trying to reedem the code for the third time results in an error. - db.create_invite_from_code(&invite_code, "u4@example.com", Some("user-4-device-id")) + db.create_invite_from_code(&invite_code, "user4@example.com", Some("user-4-device-id")) .await .unwrap_err(); @@ -1079,7 +1032,7 @@ async fn test_invite_codes() { // User 4 can now redeem the invite code and becomes a contact of user 1. let user4_invite = db - .create_invite_from_code(&invite_code, "u4@example.com", Some("user-4-device-id")) + .create_invite_from_code(&invite_code, "user4@example.com", Some("user-4-device-id")) .await .unwrap(); let user4 = db @@ -1137,7 +1090,7 @@ async fn test_invite_codes() { ); // An existing user cannot redeem invite codes. - db.create_invite_from_code(&invite_code, "u2@example.com", Some("user-2-device-id")) + db.create_invite_from_code(&invite_code, "user2@example.com", Some("user-2-device-id")) .await .unwrap_err(); let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap(); @@ -1232,6 +1185,7 @@ async fn test_signups() { user_id, inviting_user_id, signup_device_id, + .. } = db .create_user_from_invite( &Invite { @@ -1284,6 +1238,51 @@ async fn test_signups() { .unwrap_err(); } +#[tokio::test(flavor = "multi_thread")] +async fn test_metrics_id() { + let postgres = TestDb::postgres().await; + let db = postgres.db(); + + let NewUserResult { + user_id: user1, + metrics_id: metrics_id1, + .. + } = db + .create_user( + "person1@example.com", + false, + NewUserParams { + github_login: "person1".into(), + github_user_id: 101, + invite_count: 5, + }, + ) + .await + .unwrap(); + let NewUserResult { + user_id: user2, + metrics_id: metrics_id2, + .. + } = db + .create_user( + "person2@example.com", + false, + NewUserParams { + github_login: "person2".into(), + github_user_id: 102, + invite_count: 5, + }, + ) + .await + .unwrap(); + + assert_eq!(db.get_user_metrics_id(user1).await.unwrap(), metrics_id1); + assert_eq!(db.get_user_metrics_id(user2).await.unwrap(), metrics_id2); + assert_eq!(metrics_id1.len(), 36); + assert_eq!(metrics_id2.len(), 36); + assert_ne!(metrics_id1, metrics_id2); +} + fn build_background_executor() -> Arc { Deterministic::new(0).build_background() } diff --git a/crates/collab/src/integration_tests.rs b/crates/collab/src/integration_tests.rs index 422c9fd0bb..a13a013e7a 100644 --- a/crates/collab/src/integration_tests.rs +++ b/crates/collab/src/integration_tests.rs @@ -4794,7 +4794,8 @@ async fn test_random_collaboration( }, ) .await - .unwrap(); + .unwrap() + .user_id; let mut available_guests = vec![ "guest-1".to_string(), "guest-2".to_string(), @@ -4814,7 +4815,8 @@ async fn test_random_collaboration( }, ) .await - .unwrap(); + .unwrap() + .user_id; assert_eq!(*username, format!("guest-{}", guest_user_id)); server .app_state @@ -5337,6 +5339,7 @@ impl TestServer { ) .await .unwrap() + .user_id }; let client_name = name.to_string(); let mut client = cx.read(|cx| Client::new(http.clone(), cx)); diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 318555b7ed..15748a52cf 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -206,7 +206,11 @@ impl Server { .add_message_handler(Server::unfollow) .add_message_handler(Server::update_followers) .add_request_handler(Server::get_channel_messages) +<<<<<<< HEAD .add_message_handler(Server::update_head_text); +======= + .add_request_handler(Server::get_private_user_info); +>>>>>>> 5d09083a (Identify users in amplitude via a separate 'metrics_id' UUID) Arc::new(server) } @@ -1742,6 +1746,19 @@ impl Server { }); Ok(()) } + async fn get_private_user_info( + self: Arc, + request: TypedEnvelope, + response: Response, + ) -> Result<()> { + let user_id = self + .store() + .await + .user_id_for_connection(request.sender_id)?; + let metrics_id = self.app_state.db.get_user_metrics_id(user_id).await?; + response.send(proto::GetPrivateUserInfoResponse { metrics_id })?; + Ok(()) + } pub(crate) async fn store(&self) -> StoreGuard<'_> { #[cfg(test)] diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index d6604383da..832c5bb6bd 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -108,7 +108,9 @@ message Envelope { FollowResponse follow_response = 93; UpdateFollowers update_followers = 94; Unfollow unfollow = 95; - UpdateHeadText update_head_text = 96; + GetPrivateUserInfo get_private_user_info = 96; + GetPrivateUserInfoResponse get_private_user_info_response = 97; + UpdateHeadText update_head_text = 98; } } @@ -749,6 +751,12 @@ message Unfollow { uint32 leader_id = 2; } +message GetPrivateUserInfo {} + +message GetPrivateUserInfoResponse { + string metrics_id = 1; +} + // Entities message UpdateActiveView { diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index e91a9fd558..8c5832c15f 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -168,6 +168,8 @@ messages!( (UpdateWorktree, Foreground), (UpdateWorktreeExtensions, Background), (UpdateHeadText, Background), + (GetPrivateUserInfo, Foreground), + (GetPrivateUserInfoResponse, Foreground), ); request_messages!( @@ -190,6 +192,7 @@ request_messages!( (GetTypeDefinition, GetTypeDefinitionResponse), (GetDocumentHighlights, GetDocumentHighlightsResponse), (GetReferences, GetReferencesResponse), + (GetPrivateUserInfo, GetPrivateUserInfoResponse), (GetProjectSymbols, GetProjectSymbolsResponse), (FuzzySearchUsers, UsersResponse), (GetUsers, UsersResponse), From 1aa554f4c98d45b7eec5646b22d65e540b4a751c Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 29 Sep 2022 13:51:17 -0700 Subject: [PATCH 097/140] Fix FakeServer to expect new GetPrivateUserInfo request --- crates/client/src/test.rs | 64 ++++++++++++++------- crates/contacts_panel/src/contacts_panel.rs | 11 ++++ 2 files changed, 54 insertions(+), 21 deletions(-) diff --git a/crates/client/src/test.rs b/crates/client/src/test.rs index c634978a57..56d3d80b63 100644 --- a/crates/client/src/test.rs +++ b/crates/client/src/test.rs @@ -6,7 +6,10 @@ use anyhow::{anyhow, Result}; use futures::{future::BoxFuture, stream::BoxStream, Future, StreamExt}; use gpui::{executor, ModelHandle, TestAppContext}; use parking_lot::Mutex; -use rpc::{proto, ConnectionId, Peer, Receipt, TypedEnvelope}; +use rpc::{ + proto::{self, GetPrivateUserInfo, GetPrivateUserInfoResponse}, + ConnectionId, Peer, Receipt, TypedEnvelope, +}; use std::{fmt, rc::Rc, sync::Arc}; pub struct FakeServer { @@ -93,6 +96,7 @@ impl FakeServer { .authenticate_and_connect(false, &cx.to_async()) .await .unwrap(); + server } @@ -126,26 +130,44 @@ impl FakeServer { #[allow(clippy::await_holding_lock)] pub async fn receive(&self) -> Result> { self.executor.start_waiting(); - let message = self - .state - .lock() - .incoming - .as_mut() - .expect("not connected") - .next() - .await - .ok_or_else(|| anyhow!("other half hung up"))?; - self.executor.finish_waiting(); - let type_name = message.payload_type_name(); - Ok(*message - .into_any() - .downcast::>() - .unwrap_or_else(|_| { - panic!( - "fake server received unexpected message type: {:?}", - type_name - ); - })) + + loop { + let message = self + .state + .lock() + .incoming + .as_mut() + .expect("not connected") + .next() + .await + .ok_or_else(|| anyhow!("other half hung up"))?; + self.executor.finish_waiting(); + let type_name = message.payload_type_name(); + let message = message.into_any(); + + if message.is::>() { + return Ok(*message.downcast().unwrap()); + } + + if message.is::>() { + self.respond( + message + .downcast::>() + .unwrap() + .receipt(), + GetPrivateUserInfoResponse { + metrics_id: "the-metrics-id".into(), + }, + ) + .await; + continue; + } + + panic!( + "fake server received unexpected message type: {:?}", + type_name + ); + } } pub async fn respond( diff --git a/crates/contacts_panel/src/contacts_panel.rs b/crates/contacts_panel/src/contacts_panel.rs index 7dcfb8cea4..91b86aaf0e 100644 --- a/crates/contacts_panel/src/contacts_panel.rs +++ b/crates/contacts_panel/src/contacts_panel.rs @@ -1220,6 +1220,17 @@ mod tests { let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx)); let project_store = cx.add_model(|_| ProjectStore::new(project::Db::open_fake())); let server = FakeServer::for_client(current_user_id, &client, cx).await; + + let request = server.receive::().await.unwrap(); + server + .respond( + request.receipt(), + proto::GetPrivateUserInfoResponse { + metrics_id: "the-metrics-id".into(), + }, + ) + .await; + let fs = FakeFs::new(cx.background()); fs.insert_tree("/private_dir", json!({ "one.rs": "" })) .await; From 34926abe83a181aa547d1a9fbd8ef640d1db63f7 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 29 Sep 2022 16:47:20 -0700 Subject: [PATCH 098/140] 0.57.0 --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fa8f8acbdc..8d359bf728 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7203,7 +7203,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.56.0" +version = "0.57.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index c96163d99e..48a84a5831 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -3,7 +3,7 @@ authors = ["Nathan Sobo "] description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.56.0" +version = "0.57.0" [lib] name = "zed" From fd42811ef1544d56ef4d9eb7887ab7a4ee69efb8 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 30 Sep 2022 09:51:03 +0200 Subject: [PATCH 099/140] Cache `CGEventSource` and avoid leaking `CGEvent` when handling events --- crates/gpui/src/platform/mac/event.rs | 83 +++++++++++++++------------ 1 file changed, 47 insertions(+), 36 deletions(-) diff --git a/crates/gpui/src/platform/mac/event.rs b/crates/gpui/src/platform/mac/event.rs index 51524f4b15..ea2b492b27 100644 --- a/crates/gpui/src/platform/mac/event.rs +++ b/crates/gpui/src/platform/mac/event.rs @@ -14,8 +14,10 @@ use core_graphics::{ event::{CGEvent, CGEventFlags, CGKeyCode}, event_source::{CGEventSource, CGEventSourceStateID}, }; +use ctor::ctor; +use foreign_types::ForeignType; use objc::{class, msg_send, sel, sel_impl}; -use std::{borrow::Cow, ffi::CStr, os::raw::c_char}; +use std::{borrow::Cow, ffi::CStr, mem, os::raw::c_char, ptr}; const BACKSPACE_KEY: u16 = 0x7f; const SPACE_KEY: u16 = b' ' as u16; @@ -25,6 +27,15 @@ const ESCAPE_KEY: u16 = 0x1b; const TAB_KEY: u16 = 0x09; const SHIFT_TAB_KEY: u16 = 0x19; +static mut EVENT_SOURCE: core_graphics::sys::CGEventSourceRef = ptr::null_mut(); + +#[ctor] +unsafe fn build_event_source() { + let source = CGEventSource::new(CGEventSourceStateID::Private).unwrap(); + EVENT_SOURCE = source.as_ptr(); + mem::forget(source); +} + pub fn key_to_native(key: &str) -> Cow { use cocoa::appkit::*; let code = match key { @@ -228,7 +239,8 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { let mut chars_ignoring_modifiers = CStr::from_ptr(native_event.charactersIgnoringModifiers().UTF8String() as *mut c_char) .to_str() - .unwrap(); + .unwrap() + .to_string(); let first_char = chars_ignoring_modifiers.chars().next().map(|ch| ch as u16); let modifiers = native_event.modifierFlags(); @@ -243,31 +255,31 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { #[allow(non_upper_case_globals)] let key = match first_char { - Some(SPACE_KEY) => "space", - Some(BACKSPACE_KEY) => "backspace", - Some(ENTER_KEY) | Some(NUMPAD_ENTER_KEY) => "enter", - Some(ESCAPE_KEY) => "escape", - Some(TAB_KEY) => "tab", - Some(SHIFT_TAB_KEY) => "tab", - Some(NSUpArrowFunctionKey) => "up", - Some(NSDownArrowFunctionKey) => "down", - Some(NSLeftArrowFunctionKey) => "left", - Some(NSRightArrowFunctionKey) => "right", - Some(NSPageUpFunctionKey) => "pageup", - Some(NSPageDownFunctionKey) => "pagedown", - Some(NSDeleteFunctionKey) => "delete", - Some(NSF1FunctionKey) => "f1", - Some(NSF2FunctionKey) => "f2", - Some(NSF3FunctionKey) => "f3", - Some(NSF4FunctionKey) => "f4", - Some(NSF5FunctionKey) => "f5", - Some(NSF6FunctionKey) => "f6", - Some(NSF7FunctionKey) => "f7", - Some(NSF8FunctionKey) => "f8", - Some(NSF9FunctionKey) => "f9", - Some(NSF10FunctionKey) => "f10", - Some(NSF11FunctionKey) => "f11", - Some(NSF12FunctionKey) => "f12", + Some(SPACE_KEY) => "space".to_string(), + Some(BACKSPACE_KEY) => "backspace".to_string(), + Some(ENTER_KEY) | Some(NUMPAD_ENTER_KEY) => "enter".to_string(), + Some(ESCAPE_KEY) => "escape".to_string(), + Some(TAB_KEY) => "tab".to_string(), + Some(SHIFT_TAB_KEY) => "tab".to_string(), + Some(NSUpArrowFunctionKey) => "up".to_string(), + Some(NSDownArrowFunctionKey) => "down".to_string(), + Some(NSLeftArrowFunctionKey) => "left".to_string(), + Some(NSRightArrowFunctionKey) => "right".to_string(), + Some(NSPageUpFunctionKey) => "pageup".to_string(), + Some(NSPageDownFunctionKey) => "pagedown".to_string(), + Some(NSDeleteFunctionKey) => "delete".to_string(), + Some(NSF1FunctionKey) => "f1".to_string(), + Some(NSF2FunctionKey) => "f2".to_string(), + Some(NSF3FunctionKey) => "f3".to_string(), + Some(NSF4FunctionKey) => "f4".to_string(), + Some(NSF5FunctionKey) => "f5".to_string(), + Some(NSF6FunctionKey) => "f6".to_string(), + Some(NSF7FunctionKey) => "f7".to_string(), + Some(NSF8FunctionKey) => "f8".to_string(), + Some(NSF9FunctionKey) => "f9".to_string(), + Some(NSF10FunctionKey) => "f10".to_string(), + Some(NSF11FunctionKey) => "f11".to_string(), + Some(NSF12FunctionKey) => "f12".to_string(), _ => { let mut chars_ignoring_modifiers_and_shift = chars_for_modified_key(native_event.keyCode(), false, false); @@ -303,21 +315,19 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { shift, cmd, function, - key: key.into(), + key, } } -fn chars_for_modified_key<'a>(code: CGKeyCode, cmd: bool, shift: bool) -> &'a str { +fn chars_for_modified_key(code: CGKeyCode, cmd: bool, shift: bool) -> String { // Ideally, we would use `[NSEvent charactersByApplyingModifiers]` but that // always returns an empty string with certain keyboards, e.g. Japanese. Synthesizing // an event with the given flags instead lets us access `characters`, which always // returns a valid string. - let event = CGEvent::new_keyboard_event( - CGEventSource::new(CGEventSourceStateID::Private).unwrap(), - code, - true, - ) - .unwrap(); + let source = unsafe { core_graphics::event_source::CGEventSource::from_ptr(EVENT_SOURCE) }; + let event = CGEvent::new_keyboard_event(source.clone(), code, true).unwrap(); + mem::forget(source); + let mut flags = CGEventFlags::empty(); if cmd { flags |= CGEventFlags::CGEventFlagCommand; @@ -327,10 +337,11 @@ fn chars_for_modified_key<'a>(code: CGKeyCode, cmd: bool, shift: bool) -> &'a st } event.set_flags(flags); - let event: id = unsafe { msg_send![class!(NSEvent), eventWithCGEvent: event] }; unsafe { + let event: id = msg_send![class!(NSEvent), eventWithCGEvent: &*event]; CStr::from_ptr(event.characters().UTF8String()) .to_str() .unwrap() + .to_string() } } From 56b416202386c80fa91f3fa159ed32cd7bc3fa32 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Sun, 2 Oct 2022 18:02:25 -0700 Subject: [PATCH 100/140] Fix stray merge failure --- crates/collab/src/rpc.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 15748a52cf..beba653fc6 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -206,11 +206,8 @@ impl Server { .add_message_handler(Server::unfollow) .add_message_handler(Server::update_followers) .add_request_handler(Server::get_channel_messages) -<<<<<<< HEAD - .add_message_handler(Server::update_head_text); -======= + .add_message_handler(Server::update_head_text) .add_request_handler(Server::get_private_user_info); ->>>>>>> 5d09083a (Identify users in amplitude via a separate 'metrics_id' UUID) Arc::new(server) } From c2370751020e52c957c93fa7ddc2cc7506dfd816 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Sun, 2 Oct 2022 18:35:19 -0700 Subject: [PATCH 101/140] Touched up settings text --- assets/settings/default.json | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 4ebc1e702f..66cc36c38a 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -74,8 +74,16 @@ "hard_tabs": false, // How many columns a tab should occupy. "tab_size": 4, - // Git gutter behavior configuration. Remove this item to disable git gutters entirely. + // Git gutter behavior configuration. "git_gutter": { + // Which files to show the git gutter on. This setting can take + // three values: + // 1. All files: + // "files_included": "all", + // 2. Only files tracked in git: + // "files_included": "only_tracked", + // 3. Disable git gutters: + // "files_included": "none", "files_included": "all" }, // Settings specific to the terminal From 01176e04b7f0399eb7e6f62bfbe7019cd0f39545 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Sun, 2 Oct 2022 18:42:03 -0700 Subject: [PATCH 102/140] Added clarification for git gutter settings --- assets/settings/default.json | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 66cc36c38a..11a4b72a10 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -78,11 +78,12 @@ "git_gutter": { // Which files to show the git gutter on. This setting can take // three values: - // 1. All files: + // 1. All files, files not tracked in git will be diffed against + // their contents when the file was last opened in Zed: // "files_included": "all", - // 2. Only files tracked in git: + // 2. Only show for files tracked in git: // "files_included": "only_tracked", - // 3. Disable git gutters: + // 3. Disable git gutters entirely: // "files_included": "none", "files_included": "all" }, From 9427bb7553ea5eb7bd51cdc6a94ab570300f624e Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 3 Oct 2022 11:58:48 -0400 Subject: [PATCH 103/140] Be clearer about using GitFilesIncluded setting --- crates/project/src/worktree.rs | 39 +++++++++++++++++----------------- 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 40efeee1d1..c650111207 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -673,28 +673,27 @@ impl LocalWorktree { cx.spawn(|this, mut cx| async move { let text = fs.load(&abs_path).await?; - let head_text = if matches!( - files_included, - settings::GitFilesIncluded::All | settings::GitFilesIncluded::OnlyTracked - ) { - let results = if let Some(repo) = snapshot.repo_for(&abs_path) { - cx.background() - .spawn({ - let path = path.clone(); - async move { repo.repo.lock().load_head_text(&path) } - }) - .await - } else { - None - }; + let head_text = match files_included { + settings::GitFilesIncluded::All | settings::GitFilesIncluded::OnlyTracked => { + let results = if let Some(repo) = snapshot.repo_for(&abs_path) { + cx.background() + .spawn({ + let path = path.clone(); + async move { repo.repo.lock().load_head_text(&path) } + }) + .await + } else { + None + }; - if files_included == settings::GitFilesIncluded::All { - results.or_else(|| Some(text.clone())) - } else { - results + if files_included == settings::GitFilesIncluded::All { + results.or_else(|| Some(text.clone())) + } else { + results + } } - } else { - None + + settings::GitFilesIncluded::None => None, }; // Eagerly populate the snapshot with an updated entry for the loaded file From 4477f95ee631a2fe456ca8c85f6a6d265a71184a Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 3 Oct 2022 10:52:57 -0700 Subject: [PATCH 104/140] Set `staff` user property in telemetry Co-authored-by: Joseph Lyons --- crates/client/src/client.rs | 2 +- crates/client/src/telemetry.rs | 30 +++++++++++++++++++-- crates/client/src/test.rs | 1 + crates/client/src/user.rs | 9 ++++++- crates/collab/src/rpc.rs | 11 +++++++- crates/contacts_panel/src/contacts_panel.rs | 10 ------- crates/rpc/proto/zed.proto | 1 + 7 files changed, 49 insertions(+), 15 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 9ec24abae5..73ecf16084 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -351,7 +351,7 @@ impl Client { })); } Status::SignedOut | Status::UpgradeRequired => { - self.telemetry.set_metrics_id(None); + self.telemetry.set_authenticated_user_info(None, false); state._reconnect_task.take(); } _ => {} diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index c9b5665e9e..7b0b2ef324 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -9,6 +9,7 @@ use isahc::Request; use lazy_static::lazy_static; use parking_lot::Mutex; use serde::Serialize; +use serde_json::json; use std::{ io::Write, mem, @@ -176,11 +177,32 @@ impl Telemetry { .detach(); } - pub fn set_metrics_id(&self, metrics_id: Option) { + pub fn set_authenticated_user_info( + self: &Arc, + metrics_id: Option, + is_staff: bool, + ) { + let is_signed_in = metrics_id.is_some(); self.state.lock().metrics_id = metrics_id.map(|s| s.into()); + if is_signed_in { + self.report_event_with_user_properties( + "$identify", + Default::default(), + json!({ "$set": { "staff": is_staff } }), + ) + } } pub fn report_event(self: &Arc, kind: &str, properties: Value) { + self.report_event_with_user_properties(kind, properties, Default::default()); + } + + fn report_event_with_user_properties( + self: &Arc, + kind: &str, + properties: Value, + user_properties: Value, + ) { if AMPLITUDE_API_KEY.is_none() { return; } @@ -198,7 +220,11 @@ impl Telemetry { } else { None }, - user_properties: None, + user_properties: if let Value::Object(user_properties) = user_properties { + Some(user_properties) + } else { + None + }, user_id: state.metrics_id.clone(), device_id: state.device_id.clone(), os_name: state.os_name, diff --git a/crates/client/src/test.rs b/crates/client/src/test.rs index 56d3d80b63..288c9a31fa 100644 --- a/crates/client/src/test.rs +++ b/crates/client/src/test.rs @@ -157,6 +157,7 @@ impl FakeServer { .receipt(), GetPrivateUserInfoResponse { metrics_id: "the-metrics-id".into(), + staff: false, }, ) .await; diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index b31cda94b3..d52d6367b0 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -148,7 +148,14 @@ impl UserStore { let fetch_metrics_id = client.request(proto::GetPrivateUserInfo {}).log_err(); let (user, info) = futures::join!(fetch_user, fetch_metrics_id); - client.telemetry.set_metrics_id(info.map(|i| i.metrics_id)); + if let Some(info) = info { + client.telemetry.set_authenticated_user_info( + Some(info.metrics_id), + info.staff, + ); + } else { + client.telemetry.set_authenticated_user_info(None, false); + } client.telemetry.report_event("sign in", Default::default()); current_user_tx.send(user).await.ok(); } diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 467ec174ab..e42b0812ab 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -1738,7 +1738,16 @@ impl Server { .await .user_id_for_connection(request.sender_id)?; let metrics_id = self.app_state.db.get_user_metrics_id(user_id).await?; - response.send(proto::GetPrivateUserInfoResponse { metrics_id })?; + let user = self + .app_state + .db + .get_user_by_id(user_id) + .await? + .ok_or_else(|| anyhow!("user not found"))?; + response.send(proto::GetPrivateUserInfoResponse { + metrics_id, + staff: user.admin, + })?; Ok(()) } diff --git a/crates/contacts_panel/src/contacts_panel.rs b/crates/contacts_panel/src/contacts_panel.rs index 91b86aaf0e..c06b2e17a1 100644 --- a/crates/contacts_panel/src/contacts_panel.rs +++ b/crates/contacts_panel/src/contacts_panel.rs @@ -1221,16 +1221,6 @@ mod tests { let project_store = cx.add_model(|_| ProjectStore::new(project::Db::open_fake())); let server = FakeServer::for_client(current_user_id, &client, cx).await; - let request = server.receive::().await.unwrap(); - server - .respond( - request.receipt(), - proto::GetPrivateUserInfoResponse { - metrics_id: "the-metrics-id".into(), - }, - ) - .await; - let fs = FakeFs::new(cx.background()); fs.insert_tree("/private_dir", json!({ "one.rs": "" })) .await; diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 6a48ad1b97..37434a6d4e 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -755,6 +755,7 @@ message GetPrivateUserInfo {} message GetPrivateUserInfoResponse { string metrics_id = 1; + bool staff = 2; } // Entities From 8f4b3c34938acce79e360266a914a31b64d47480 Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 3 Oct 2022 14:00:58 -0400 Subject: [PATCH 105/140] Store repo content path as absolute Co-Authored-By: Mikayla Maki --- crates/project/src/worktree.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index c650111207..e04ff2b516 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -1561,7 +1561,7 @@ impl LocalSnapshot { if parent_path.file_name() == Some(&DOT_GIT) { let abs_path = self.abs_path.join(&parent_path); - let content_path: Arc = parent_path.parent().unwrap().into(); + let content_path: Arc = abs_path.parent().unwrap().into(); if let Err(ix) = self .git_repositories .binary_search_by_key(&&content_path, |repo| &repo.content_path) From 06813be5c81b8b8a95d3c3b5fe439de1a0d26f6c Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 3 Oct 2022 11:05:45 -0700 Subject: [PATCH 106/140] Mark platform as "Zed" for telemetry events from the app Co-authored-by: Joseph Lyons --- crates/client/src/telemetry.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 7b0b2ef324..f92bf1592b 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -74,6 +74,7 @@ struct AmplitudeEvent { os_name: &'static str, os_version: Option>, app_version: Option>, + platform: &'static str, event_id: usize, session_id: u128, time: u128, @@ -228,6 +229,7 @@ impl Telemetry { user_id: state.metrics_id.clone(), device_id: state.device_id.clone(), os_name: state.os_name, + platform: "Zed", os_version: state.os_version.clone(), app_version: state.app_version.clone(), event_id: post_inc(&mut state.next_event_id), From a5c2f22bf7339066f7e507c059febe1884a500f5 Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 3 Oct 2022 14:53:33 -0400 Subject: [PATCH 107/140] Move git gutter settings out of editor settings Co-Authored-By: Mikayla Maki --- assets/settings/default.json | 24 +++++++++++----------- crates/collab/src/rpc.rs | 2 +- crates/project/src/worktree.rs | 4 ++-- crates/settings/src/settings.rs | 33 +++++++++++++++++++------------ crates/workspace/src/workspace.rs | 4 ++-- 5 files changed, 38 insertions(+), 29 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 11a4b72a10..fc1b1906fc 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -75,17 +75,19 @@ // How many columns a tab should occupy. "tab_size": 4, // Git gutter behavior configuration. - "git_gutter": { - // Which files to show the git gutter on. This setting can take - // three values: - // 1. All files, files not tracked in git will be diffed against - // their contents when the file was last opened in Zed: - // "files_included": "all", - // 2. Only show for files tracked in git: - // "files_included": "only_tracked", - // 3. Disable git gutters entirely: - // "files_included": "none", - "files_included": "all" + "git": { + "git_gutter": { + // Which files to show the git gutter on. This setting can take + // three values: + // 1. All files, files not tracked in git will be diffed against + // their contents when the file was last opened in Zed: + // "files_included": "all", + // 2. Only show for files tracked in git: + // "files_included": "only_tracked", + // 3. Disable git gutters entirely: + // "files_included": "none", + "files_included": "all" + } }, // Settings specific to the terminal "terminal": { diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 627eaf719e..609ae89625 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -1744,7 +1744,7 @@ impl Server { Ok(()) } - async fn get_private_user_info( + async fn get_private_user_info( self: Arc, request: TypedEnvelope, response: Response, diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index e04ff2b516..b914282e01 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -665,9 +665,9 @@ impl LocalWorktree { let files_included = cx .global::() - .editor_overrides + .git .git_gutter - .unwrap_or_default() + .expect("This should be Some by setting setup") .files_included; cx.spawn(|this, mut cx| async move { diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 3f4a764c79..9de4335ec8 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -32,6 +32,7 @@ pub struct Settings { pub default_dock_anchor: DockAnchor, pub editor_defaults: EditorSettings, pub editor_overrides: EditorSettings, + pub git: GitSettings, pub terminal_defaults: TerminalSettings, pub terminal_overrides: TerminalSettings, pub language_defaults: HashMap, EditorSettings>, @@ -52,20 +53,13 @@ impl FeatureFlags { } } -#[derive(Clone, Debug, Default, Deserialize, JsonSchema)] -pub struct EditorSettings { - pub tab_size: Option, - pub hard_tabs: Option, - pub soft_wrap: Option, - pub preferred_line_length: Option, - pub format_on_save: Option, - pub formatter: Option, - pub enable_language_server: Option, - pub git_gutter: Option, +#[derive(Copy, Clone, Debug, Default, Deserialize, JsonSchema)] +pub struct GitSettings { + pub git_gutter: Option, } #[derive(Clone, Copy, Debug, Default, Deserialize, JsonSchema)] -pub struct GitGutterConfig { +pub struct GitGutterSettings { pub files_included: GitFilesIncluded, pub debounce_delay_millis: Option, } @@ -79,6 +73,17 @@ pub enum GitFilesIncluded { None, } +#[derive(Clone, Debug, Default, Deserialize, JsonSchema)] +pub struct EditorSettings { + pub tab_size: Option, + pub hard_tabs: Option, + pub soft_wrap: Option, + pub preferred_line_length: Option, + pub format_on_save: Option, + pub formatter: Option, + pub enable_language_server: Option, +} + #[derive(Copy, Clone, Debug, Deserialize, PartialEq, Eq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum SoftWrap { @@ -212,6 +217,8 @@ pub struct SettingsFileContent { #[serde(default)] pub terminal: TerminalSettings, #[serde(default)] + pub git: Option, + #[serde(default)] #[serde(alias = "language_overrides")] pub languages: HashMap, EditorSettings>, #[serde(default)] @@ -266,9 +273,9 @@ impl Settings { format_on_save: required(defaults.editor.format_on_save), formatter: required(defaults.editor.formatter), enable_language_server: required(defaults.editor.enable_language_server), - git_gutter: defaults.editor.git_gutter, }, editor_overrides: Default::default(), + git: defaults.git.unwrap(), terminal_defaults: Default::default(), terminal_overrides: Default::default(), language_defaults: defaults.languages, @@ -395,11 +402,11 @@ impl Settings { format_on_save: Some(FormatOnSave::On), formatter: Some(Formatter::LanguageServer), enable_language_server: Some(true), - git_gutter: Default::default(), }, editor_overrides: Default::default(), terminal_defaults: Default::default(), terminal_overrides: Default::default(), + git: Default::default(), language_defaults: Default::default(), language_overrides: Default::default(), lsp: Default::default(), diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 921fb2de20..fc1f6432a1 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -736,9 +736,9 @@ impl ItemHandle for ViewHandle { let debounce_delay = cx .global::() - .editor_overrides + .git .git_gutter - .unwrap_or_default() + .expect("This should be Some by setting setup") .debounce_delay_millis; let item = item.clone(); From e6487de0691ffcafa1e727727bddfec4dc2d6377 Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 3 Oct 2022 15:11:06 -0400 Subject: [PATCH 108/140] Rename head text to indicate that it's not always going to be from head Co-Authored-By: Mikayla Maki --- crates/collab/src/integration_tests.rs | 30 +++++++++++++------------- crates/collab/src/rpc.rs | 6 +++--- crates/git/src/diff.rs | 24 ++++++++++----------- crates/git/src/repository.rs | 9 ++++---- crates/language/src/buffer.rs | 28 ++++++++++++------------ crates/project/src/fs.rs | 6 +----- crates/project/src/project.rs | 20 ++++++++--------- crates/project/src/worktree.rs | 10 ++++----- crates/rpc/proto/zed.proto | 8 +++---- crates/rpc/src/proto.rs | 4 ++-- crates/settings/src/settings.rs | 11 +++++++--- 11 files changed, 78 insertions(+), 78 deletions(-) diff --git a/crates/collab/src/integration_tests.rs b/crates/collab/src/integration_tests.rs index a13a013e7a..58a8efc411 100644 --- a/crates/collab/src/integration_tests.rs +++ b/crates/collab/src/integration_tests.rs @@ -948,7 +948,7 @@ async fn test_propagate_saves_and_fs_changes( } #[gpui::test(iterations = 10)] -async fn test_git_head_text( +async fn test_git_diff_base_change( executor: Arc, cx_a: &mut TestAppContext, cx_b: &mut TestAppContext, @@ -977,13 +977,13 @@ async fn test_git_head_text( ) .await; - let head_text = " + let diff_base = " one three " .unindent(); - let new_head_text = " + let new_diff_base = " one two " @@ -992,9 +992,9 @@ async fn test_git_head_text( client_a .fs .as_fake() - .set_head_state_for_git_repository( + .set_index_for_repo( Path::new("/dir/.git"), - &[(Path::new("a.txt"), head_text.clone())], + &[(Path::new("a.txt"), diff_base.clone())], ) .await; @@ -1012,11 +1012,11 @@ async fn test_git_head_text( // Smoke test diffing buffer_a.read_with(cx_a, |buffer, _| { - assert_eq!(buffer.head_text(), Some(head_text.as_ref())); + assert_eq!(buffer.diff_base(), Some(diff_base.as_ref())); git::diff::assert_hunks( buffer.snapshot().git_diff_hunks_in_range(0..4), &buffer, - &head_text, + &diff_base, &[(1..2, "", "two\n")], ); }); @@ -1032,11 +1032,11 @@ async fn test_git_head_text( // Smoke test diffing buffer_b.read_with(cx_b, |buffer, _| { - assert_eq!(buffer.head_text(), Some(head_text.as_ref())); + assert_eq!(buffer.diff_base(), Some(diff_base.as_ref())); git::diff::assert_hunks( buffer.snapshot().git_diff_hunks_in_range(0..4), &buffer, - &head_text, + &diff_base, &[(1..2, "", "two\n")], ); }); @@ -1044,9 +1044,9 @@ async fn test_git_head_text( client_a .fs .as_fake() - .set_head_state_for_git_repository( + .set_index_for_repo( Path::new("/dir/.git"), - &[(Path::new("a.txt"), new_head_text.clone())], + &[(Path::new("a.txt"), new_diff_base.clone())], ) .await; @@ -1055,23 +1055,23 @@ async fn test_git_head_text( // Smoke test new diffing buffer_a.read_with(cx_a, |buffer, _| { - assert_eq!(buffer.head_text(), Some(new_head_text.as_ref())); + assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref())); git::diff::assert_hunks( buffer.snapshot().git_diff_hunks_in_range(0..4), &buffer, - &head_text, + &diff_base, &[(2..3, "", "three\n")], ); }); // Smoke test B buffer_b.read_with(cx_b, |buffer, _| { - assert_eq!(buffer.head_text(), Some(new_head_text.as_ref())); + assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref())); git::diff::assert_hunks( buffer.snapshot().git_diff_hunks_in_range(0..4), &buffer, - &head_text, + &diff_base, &[(2..3, "", "three\n")], ); }); diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 609ae89625..9f3c01ac83 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -206,7 +206,7 @@ impl Server { .add_message_handler(Server::unfollow) .add_message_handler(Server::update_followers) .add_request_handler(Server::get_channel_messages) - .add_message_handler(Server::update_head_text) + .add_message_handler(Server::update_diff_base) .add_request_handler(Server::get_private_user_info); Arc::new(server) @@ -1729,9 +1729,9 @@ impl Server { Ok(()) } - async fn update_head_text( + async fn update_diff_base( self: Arc, - request: TypedEnvelope, + request: TypedEnvelope, ) -> Result<()> { let receiver_ids = self.store().await.project_connection_ids( ProjectId::from_proto(request.payload.project_id), diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index 48630fc91c..abf874e2bb 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -111,11 +111,11 @@ impl BufferDiff { } } - pub async fn update(&mut self, head_text: &str, buffer: &text::BufferSnapshot) { + pub async fn update(&mut self, diff_base: &str, buffer: &text::BufferSnapshot) { let mut tree = SumTree::new(); let buffer_text = buffer.as_rope().to_string(); - let patch = Self::diff(&head_text, &buffer_text); + let patch = Self::diff(&diff_base, &buffer_text); if let Some(patch) = patch { let mut divergence = 0; @@ -228,7 +228,7 @@ impl BufferDiff { pub fn assert_hunks( diff_hunks: Iter, buffer: &BufferSnapshot, - head_text: &str, + diff_base: &str, expected_hunks: &[(Range, &str, &str)], ) where Iter: Iterator>, @@ -237,7 +237,7 @@ pub fn assert_hunks( .map(|hunk| { ( hunk.buffer_range.clone(), - &head_text[hunk.head_byte_range], + &diff_base[hunk.head_byte_range], buffer .text_for_range( Point::new(hunk.buffer_range.start, 0) @@ -264,7 +264,7 @@ mod tests { #[test] fn test_buffer_diff_simple() { - let head_text = " + let diff_base = " one two three @@ -280,27 +280,27 @@ mod tests { let mut buffer = Buffer::new(0, 0, buffer_text); let mut diff = BufferDiff::new(); - smol::block_on(diff.update(&head_text, &buffer)); + smol::block_on(diff.update(&diff_base, &buffer)); assert_hunks( diff.hunks(&buffer), &buffer, - &head_text, + &diff_base, &[(1..2, "two\n", "HELLO\n")], ); buffer.edit([(0..0, "point five\n")]); - smol::block_on(diff.update(&head_text, &buffer)); + smol::block_on(diff.update(&diff_base, &buffer)); assert_hunks( diff.hunks(&buffer), &buffer, - &head_text, + &diff_base, &[(0..1, "", "point five\n"), (2..3, "two\n", "HELLO\n")], ); } #[test] fn test_buffer_diff_range() { - let head_text = " + let diff_base = " one two three @@ -337,13 +337,13 @@ mod tests { let buffer = Buffer::new(0, 0, buffer_text); let mut diff = BufferDiff::new(); - smol::block_on(diff.update(&head_text, &buffer)); + smol::block_on(diff.update(&diff_base, &buffer)); assert_eq!(diff.hunks(&buffer).count(), 8); assert_hunks( diff.hunks_in_range(7..12, &buffer), &buffer, - &head_text, + &diff_base, &[ (6..7, "", "HELLO\n"), (9..10, "six\n", "SIXTEEN\n"), diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index a49a1e0b60..67e93416ae 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -10,12 +10,12 @@ pub use git2::Repository as LibGitRepository; #[async_trait::async_trait] pub trait GitRepository: Send { - fn load_head_text(&self, relative_file_path: &Path) -> Option; + fn load_index(&self, relative_file_path: &Path) -> Option; } #[async_trait::async_trait] impl GitRepository for LibGitRepository { - fn load_head_text(&self, relative_file_path: &Path) -> Option { + fn load_index(&self, relative_file_path: &Path) -> Option { fn logic(repo: &LibGitRepository, relative_file_path: &Path) -> Result> { const STAGE_NORMAL: i32 = 0; let index = repo.index()?; @@ -25,8 +25,7 @@ impl GitRepository for LibGitRepository { }; let content = repo.find_blob(oid)?.content().to_owned(); - let head_text = String::from_utf8(content)?; - Ok(Some(head_text)) + Ok(Some(String::from_utf8(content)?)) } match logic(&self, relative_file_path) { @@ -55,7 +54,7 @@ impl FakeGitRepository { #[async_trait::async_trait] impl GitRepository for FakeGitRepository { - fn load_head_text(&self, path: &Path) -> Option { + fn load_index(&self, path: &Path) -> Option { let state = self.state.lock(); state.index_contents.get(path).cloned() } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 22706ab1b5..11ca4fa52a 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -53,7 +53,7 @@ struct GitDiffStatus { pub struct Buffer { text: TextBuffer, - head_text: Option, + diff_base: Option, git_diff_status: GitDiffStatus, file: Option>, saved_version: clock::Global, @@ -346,13 +346,13 @@ impl Buffer { pub fn from_file>( replica_id: ReplicaId, base_text: T, - head_text: Option, + diff_base: Option, file: Arc, cx: &mut ModelContext, ) -> Self { Self::build( TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()), - head_text.map(|h| h.into().into_boxed_str().into()), + diff_base.map(|h| h.into().into_boxed_str().into()), Some(file), ) } @@ -365,7 +365,7 @@ impl Buffer { let buffer = TextBuffer::new(replica_id, message.id, message.base_text); let mut this = Self::build( buffer, - message.head_text.map(|text| text.into_boxed_str().into()), + message.diff_base.map(|text| text.into_boxed_str().into()), file, ); this.text.set_line_ending(proto::deserialize_line_ending( @@ -380,7 +380,7 @@ impl Buffer { id: self.remote_id(), file: self.file.as_ref().map(|f| f.to_proto()), base_text: self.base_text().to_string(), - head_text: self.head_text.as_ref().map(|h| h.to_string()), + diff_base: self.diff_base.as_ref().map(|h| h.to_string()), line_ending: proto::serialize_line_ending(self.line_ending()) as i32, } } @@ -423,7 +423,7 @@ impl Buffer { self } - fn build(buffer: TextBuffer, head_text: Option, file: Option>) -> Self { + fn build(buffer: TextBuffer, diff_base: Option, file: Option>) -> Self { let saved_mtime = if let Some(file) = file.as_ref() { file.mtime() } else { @@ -437,7 +437,7 @@ impl Buffer { transaction_depth: 0, was_dirty_before_starting_transaction: None, text: buffer, - head_text, + diff_base, git_diff_status: GitDiffStatus { diff: git::diff::BufferDiff::new(), update_in_progress: false, @@ -663,12 +663,12 @@ impl Buffer { } #[cfg(any(test, feature = "test-support"))] - pub fn head_text(&self) -> Option<&str> { - self.head_text.as_deref() + pub fn diff_base(&self) -> Option<&str> { + self.diff_base.as_deref() } - pub fn update_head_text(&mut self, head_text: Option, cx: &mut ModelContext) { - self.head_text = head_text; + pub fn update_diff_base(&mut self, diff_base: Option, cx: &mut ModelContext) { + self.diff_base = diff_base; self.git_diff_recalc(cx); } @@ -682,13 +682,13 @@ impl Buffer { return; } - if let Some(head_text) = &self.head_text { + if let Some(diff_base) = &self.diff_base { let snapshot = self.snapshot(); - let head_text = head_text.clone(); + let diff_base = diff_base.clone(); let mut diff = self.git_diff_status.diff.clone(); let diff = cx.background().spawn(async move { - diff.update(&head_text, &snapshot).await; + diff.update(&diff_base, &snapshot).await; diff }); diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index 2b7aca642d..a43f18ca64 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -490,11 +490,7 @@ impl FakeFs { .boxed() } - pub async fn set_head_state_for_git_repository( - &self, - dot_git: &Path, - head_state: &[(&Path, String)], - ) { + pub async fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) { let content_path = dot_git.parent().unwrap(); let mut state = self.state.lock().await; let entry = state.read_path(dot_git).await.unwrap(); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 1064d05fe9..7ce9b46085 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -424,7 +424,7 @@ impl Project { client.add_model_request_handler(Self::handle_open_buffer_by_id); client.add_model_request_handler(Self::handle_open_buffer_by_path); client.add_model_request_handler(Self::handle_save_buffer); - client.add_model_message_handler(Self::handle_update_head_text); + client.add_model_message_handler(Self::handle_update_diff_base); } pub fn local( @@ -4675,22 +4675,22 @@ impl Project { let client = self.client.clone(); cx.spawn(|_, mut cx| async move { - let head_text = cx + let diff_base = cx .background() - .spawn(async move { repo.repo.lock().load_head_text(&path) }) + .spawn(async move { repo.repo.lock().load_index(&path) }) .await; let buffer_id = buffer.update(&mut cx, |buffer, cx| { - buffer.update_head_text(head_text.clone(), cx); + buffer.update_diff_base(diff_base.clone(), cx); buffer.remote_id() }); if let Some(project_id) = shared_remote_id { client - .send(proto::UpdateHeadText { + .send(proto::UpdateDiffBase { project_id, buffer_id: buffer_id as u64, - head_text, + diff_base, }) .log_err(); } @@ -5272,22 +5272,22 @@ impl Project { }) } - async fn handle_update_head_text( + async fn handle_update_diff_base( this: ModelHandle, - envelope: TypedEnvelope, + envelope: TypedEnvelope, _: Arc, mut cx: AsyncAppContext, ) -> Result<()> { this.update(&mut cx, |this, cx| { let buffer_id = envelope.payload.buffer_id; - let head_text = envelope.payload.head_text; + let diff_base = envelope.payload.diff_base; let buffer = this .opened_buffers .get_mut(&buffer_id) .and_then(|b| b.upgrade(cx)) .ok_or_else(|| anyhow!("No such buffer {}", buffer_id))?; - buffer.update(cx, |buffer, cx| buffer.update_head_text(head_text, cx)); + buffer.update(cx, |buffer, cx| buffer.update_diff_base(diff_base, cx)); Ok(()) }) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index b914282e01..ea02431ab9 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -481,11 +481,11 @@ impl LocalWorktree { ) -> Task>> { let path = Arc::from(path); cx.spawn(move |this, mut cx| async move { - let (file, contents, head_text) = this + let (file, contents, diff_base) = this .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx)) .await?; Ok(cx.add_model(|cx| { - let mut buffer = Buffer::from_file(0, contents, head_text, Arc::new(file), cx); + let mut buffer = Buffer::from_file(0, contents, diff_base, Arc::new(file), cx); buffer.git_diff_recalc(cx); buffer })) @@ -673,13 +673,13 @@ impl LocalWorktree { cx.spawn(|this, mut cx| async move { let text = fs.load(&abs_path).await?; - let head_text = match files_included { + let diff_base = match files_included { settings::GitFilesIncluded::All | settings::GitFilesIncluded::OnlyTracked => { let results = if let Some(repo) = snapshot.repo_for(&abs_path) { cx.background() .spawn({ let path = path.clone(); - async move { repo.repo.lock().load_head_text(&path) } + async move { repo.repo.lock().load_index(&path) } }) .await } else { @@ -714,7 +714,7 @@ impl LocalWorktree { is_local: true, }, text, - head_text, + diff_base, )) }) } diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 832c5bb6bd..3c7fa2ad40 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -110,7 +110,7 @@ message Envelope { Unfollow unfollow = 95; GetPrivateUserInfo get_private_user_info = 96; GetPrivateUserInfoResponse get_private_user_info_response = 97; - UpdateHeadText update_head_text = 98; + UpdateDiffBase update_diff_base = 98; } } @@ -830,7 +830,7 @@ message BufferState { uint64 id = 1; optional File file = 2; string base_text = 3; - optional string head_text = 4; + optional string diff_base = 4; LineEnding line_ending = 5; } @@ -1002,8 +1002,8 @@ message WorktreeMetadata { bool visible = 3; } -message UpdateHeadText { +message UpdateDiffBase { uint64 project_id = 1; uint64 buffer_id = 2; - optional string head_text = 3; + optional string diff_base = 3; } diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index 8c5832c15f..8d9d715b6c 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -167,7 +167,7 @@ messages!( (UpdateProject, Foreground), (UpdateWorktree, Foreground), (UpdateWorktreeExtensions, Background), - (UpdateHeadText, Background), + (UpdateDiffBase, Background), (GetPrivateUserInfo, Foreground), (GetPrivateUserInfoResponse, Foreground), ); @@ -267,7 +267,7 @@ entity_messages!( UpdateProject, UpdateWorktree, UpdateWorktreeExtensions, - UpdateHeadText + UpdateDiffBase ); entity_messages!(channel_id, ChannelMessageSent); diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 9de4335ec8..9655529744 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -55,11 +55,11 @@ impl FeatureFlags { #[derive(Copy, Clone, Debug, Default, Deserialize, JsonSchema)] pub struct GitSettings { - pub git_gutter: Option, + pub git_gutter: Option, } #[derive(Clone, Copy, Debug, Default, Deserialize, JsonSchema)] -pub struct GitGutterSettings { +pub struct GitGutter { pub files_included: GitFilesIncluded, pub debounce_delay_millis: Option, } @@ -406,7 +406,12 @@ impl Settings { editor_overrides: Default::default(), terminal_defaults: Default::default(), terminal_overrides: Default::default(), - git: Default::default(), + git: GitSettings { + git_gutter: Some(GitGutter { + files_included: GitFilesIncluded::All, + debounce_delay_millis: None, + }), + }, language_defaults: Default::default(), language_overrides: Default::default(), lsp: Default::default(), From f3d83631efe8a685505b4dd1b62fc9efdc351ff8 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 3 Oct 2022 12:13:27 -0700 Subject: [PATCH 109/140] Remove unnecessary min_id_length option from amplitude requests --- crates/client/src/telemetry.rs | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index f92bf1592b..0c162580d4 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -53,12 +53,6 @@ lazy_static! { struct AmplitudeEventBatch { api_key: &'static str, events: Vec, - options: AmplitudeEventBatchOptions, -} - -#[derive(Serialize)] -struct AmplitudeEventBatchOptions { - min_id_length: usize, } #[derive(Serialize)] @@ -273,11 +267,7 @@ impl Telemetry { } } - let batch = AmplitudeEventBatch { - api_key, - events, - options: AmplitudeEventBatchOptions { min_id_length: 1 }, - }; + let batch = AmplitudeEventBatch { api_key, events }; json_bytes.clear(); serde_json::to_writer(&mut json_bytes, &batch)?; let request = From 6f6d72890a3981140e2258cad67ec9113d358f35 Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 3 Oct 2022 15:42:30 -0400 Subject: [PATCH 110/140] Once again respect user settings for git gutter Co-Authored-By: Mikayla Maki --- crates/project/src/worktree.rs | 8 ++------ crates/settings/src/settings.rs | 27 +++++++++++++++++++++++++-- crates/workspace/src/workspace.rs | 14 ++++++++++---- 3 files changed, 37 insertions(+), 12 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index ea02431ab9..1016e58b73 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -663,12 +663,8 @@ impl LocalWorktree { let fs = self.fs.clone(); let snapshot = self.snapshot(); - let files_included = cx - .global::() - .git - .git_gutter - .expect("This should be Some by setting setup") - .files_included; + let settings = cx.global::(); + let files_included = settings.git_gutter().files_included(settings); cx.spawn(|this, mut cx| async move { let text = fs.load(&abs_path).await?; diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 9655529744..3bf09436ed 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -33,6 +33,7 @@ pub struct Settings { pub editor_defaults: EditorSettings, pub editor_overrides: EditorSettings, pub git: GitSettings, + pub git_overrides: GitSettings, pub terminal_defaults: TerminalSettings, pub terminal_overrides: TerminalSettings, pub language_defaults: HashMap, EditorSettings>, @@ -60,10 +61,21 @@ pub struct GitSettings { #[derive(Clone, Copy, Debug, Default, Deserialize, JsonSchema)] pub struct GitGutter { - pub files_included: GitFilesIncluded, + pub files_included: Option, pub debounce_delay_millis: Option, } +impl GitGutter { + pub fn files_included(&self, settings: &Settings) -> GitFilesIncluded { + self.files_included.unwrap_or_else(|| { + settings + .git.git_gutter.expect("git_gutter must be some in defaults.json") + .files_included + .expect("Should be some in defaults.json") + }) + } +} + #[derive(Clone, Copy, Debug, Default, Deserialize, JsonSchema, PartialEq, Eq)] #[serde(rename_all = "snake_case")] pub enum GitFilesIncluded { @@ -276,6 +288,7 @@ impl Settings { }, editor_overrides: Default::default(), git: defaults.git.unwrap(), + git_overrides: Default::default(), terminal_defaults: Default::default(), terminal_overrides: Default::default(), language_defaults: defaults.languages, @@ -327,6 +340,7 @@ impl Settings { } self.editor_overrides = data.editor; + self.git_overrides = data.git.unwrap_or_default(); self.terminal_defaults.font_size = data.terminal.font_size; self.terminal_overrides = data.terminal; self.language_overrides = data.languages; @@ -382,6 +396,14 @@ impl Settings { .expect("missing default") } + pub fn git_gutter(&self) -> GitGutter { + self.git_overrides.git_gutter.unwrap_or_else(|| { + self.git + .git_gutter + .expect("git_gutter should be some by setting setup") + }) + } + #[cfg(any(test, feature = "test-support"))] pub fn test(cx: &gpui::AppContext) -> Settings { Settings { @@ -408,10 +430,11 @@ impl Settings { terminal_overrides: Default::default(), git: GitSettings { git_gutter: Some(GitGutter { - files_included: GitFilesIncluded::All, + files_included: Some(GitFilesIncluded::All), debounce_delay_millis: None, }), }, + git_overrides: Default::default(), language_defaults: Default::default(), language_overrides: Default::default(), lsp: Default::default(), diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index fc1f6432a1..44c9b19f1b 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -734,12 +734,18 @@ impl ItemHandle for ViewHandle { ); } - let debounce_delay = cx - .global::() - .git + let settings = cx.global::(); + let debounce_delay = settings + .git_overrides .git_gutter - .expect("This should be Some by setting setup") + .unwrap_or_else(|| { + settings + .git + .git_gutter + .expect("This should be Some by setting setup") + }) .debounce_delay_millis; + let item = item.clone(); if let Some(delay) = debounce_delay { From c354b9b9596a41c40dfb4f573d5b66fe9c78e210 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 3 Oct 2022 13:24:37 -0700 Subject: [PATCH 111/140] Add assertions to test for autoclose with embedded languages --- crates/editor/src/editor.rs | 204 +++++++++++++++++++++++++++----- crates/language/src/language.rs | 2 +- 2 files changed, 173 insertions(+), 33 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 699b442a5d..769c03d6ff 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -6001,6 +6001,10 @@ impl Editor { } impl EditorSnapshot { + pub fn language_at(&self, position: T) -> Option<&Arc> { + self.display_snapshot.buffer_snapshot.language_at(position) + } + pub fn is_focused(&self) -> bool { self.is_focused } @@ -9788,13 +9792,24 @@ mod tests { Language::new( LanguageConfig { name: "HTML".into(), - brackets: vec![BracketPair { - start: "<".to_string(), - end: ">".to_string(), - close: true, - newline: true, - }], - autoclose_before: "})]".to_string(), + brackets: vec![ + BracketPair { + start: "<".into(), + end: ">".into(), + ..Default::default() + }, + BracketPair { + start: "{".into(), + end: "}".into(), + ..Default::default() + }, + BracketPair { + start: "(".into(), + end: ")".into(), + ..Default::default() + }, + ], + autoclose_before: "})]>".into(), ..Default::default() }, Some(tree_sitter_html::language()), @@ -9812,13 +9827,24 @@ mod tests { let javascript_language = Arc::new(Language::new( LanguageConfig { name: "JavaScript".into(), - brackets: vec![BracketPair { - start: "/*".to_string(), - end: "*/".to_string(), - close: true, - newline: true, - }], - autoclose_before: "})]".to_string(), + brackets: vec![ + BracketPair { + start: "/*".into(), + end: " */".into(), + ..Default::default() + }, + BracketPair { + start: "{".into(), + end: "}".into(), + ..Default::default() + }, + BracketPair { + start: "(".into(), + end: ")".into(), + ..Default::default() + }, + ], + autoclose_before: "})]>".into(), ..Default::default() }, Some(tree_sitter_javascript::language()), @@ -9839,31 +9865,145 @@ mod tests { - + ˇ "# .unindent(), ); - let cursors = cx.update_editor(|editor, cx| editor.selections.ranges::(cx)); - cx.update_buffer(|buffer, _| { - let snapshot = buffer.snapshot(); + // Precondition: different languages are active at different locations. + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let cursors = editor.selections.ranges::(cx); + let languages = cursors + .iter() + .map(|c| snapshot.language_at(c.start).unwrap().name()) + .collect::>(); assert_eq!( - snapshot - .language_at(cursors[0].start) - .unwrap() - .name() - .as_ref(), - "HTML" - ); - assert_eq!( - snapshot - .language_at(cursors[1].start) - .unwrap() - .name() - .as_ref(), - "JavaScript" + languages, + &["HTML".into(), "JavaScript".into(), "HTML".into()] ); }); + + // Angle brackets autoclose in HTML, but not JavaScript. + cx.update_editor(|editor, cx| { + editor.handle_input("<", cx); + editor.handle_input("a", cx); + }); + cx.assert_editor_state( + &r#" + + + + "# + .unindent(), + ); + + // Curly braces and parens autoclose in both HTML and JavaScript. + cx.update_editor(|editor, cx| { + editor.handle_input(" b=", cx); + editor.handle_input("{", cx); + editor.handle_input("c", cx); + editor.handle_input("(", cx); + }); + cx.assert_editor_state( + &r#" + + + + "# + .unindent(), + ); + + // Brackets that were already autoclosed are skipped. + cx.update_editor(|editor, cx| { + editor.handle_input(")", cx); + editor.handle_input("d", cx); + editor.handle_input("}", cx); + }); + cx.assert_editor_state( + &r#" + + + + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + editor.handle_input(">", cx); + }); + cx.assert_editor_state( + &r#" + ˇ + + ˇ + "# + .unindent(), + ); + + // Reset + cx.set_state( + &r#" + ˇ + + ˇ + "# + .unindent(), + ); + + cx.update_editor(|editor, cx| { + editor.handle_input("<", cx); + }); + cx.assert_editor_state( + &r#" + <ˇ> + + <ˇ> + "# + .unindent(), + ); + + // When backspacing, the closing angle brackets are removed. + cx.update_editor(|editor, cx| { + editor.backspace(&Backspace, cx); + }); + cx.assert_editor_state( + &r#" + ˇ + + ˇ + "# + .unindent(), + ); + + // Block comments autoclose in JavaScript, but not HTML. + cx.update_editor(|editor, cx| { + editor.handle_input("/", cx); + editor.handle_input("*", cx); + }); + cx.assert_editor_state( + &r#" + /*ˇ + + /*ˇ + "# + .unindent(), + ); } #[gpui::test] diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index b8d4ca309f..59da0909c6 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -271,7 +271,7 @@ pub struct FakeLspAdapter { pub disk_based_diagnostics_sources: Vec, } -#[derive(Clone, Debug, Deserialize)] +#[derive(Clone, Debug, Default, Deserialize)] pub struct BracketPair { pub start: String, pub end: String, From 6f7547d28f131cf1af5ef59b593d9f813c0a6786 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 3 Oct 2022 17:18:38 -0700 Subject: [PATCH 112/140] Fixed a couple bugs in tests and worktree path handling --- crates/git/src/repository.rs | 1 + crates/project/src/fs.rs | 3 +-- crates/project/src/project.rs | 18 +++++++++------- crates/project/src/worktree.rs | 39 ++++++++++++++++------------------ 4 files changed, 30 insertions(+), 31 deletions(-) diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 67e93416ae..38393dc8a8 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -19,6 +19,7 @@ impl GitRepository for LibGitRepository { fn logic(repo: &LibGitRepository, relative_file_path: &Path) -> Result> { const STAGE_NORMAL: i32 = 0; let index = repo.index()?; + dbg!(relative_file_path); let oid = match index.get_path(relative_file_path, STAGE_NORMAL) { Some(entry) => entry.id, None => return Ok(None), diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index a43f18ca64..812842a354 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -888,8 +888,7 @@ impl Fs for FakeFs { } fn open_repo(&self, abs_dot_git: &Path) -> Option>> { - let executor = self.executor.upgrade().unwrap(); - executor.block(async move { + smol::block_on(async move { let state = self.state.lock().await; let entry = state.read_path(abs_dot_git).await.unwrap(); let mut entry = entry.lock().await; diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 7ce9b46085..dc783f1818 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4541,7 +4541,7 @@ impl Project { cx.subscribe(worktree, |this, worktree, event, cx| match event { worktree::Event::UpdatedEntries => this.update_local_worktree_buffers(worktree, cx), worktree::Event::UpdatedGitRepositories(updated_repos) => { - this.update_local_worktree_buffers_git_repos(updated_repos, cx) + this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx) } }) .detach(); @@ -4652,21 +4652,23 @@ impl Project { fn update_local_worktree_buffers_git_repos( &mut self, + worktree: ModelHandle, repos: &[GitRepositoryEntry], cx: &mut ModelContext, ) { - //TODO: Produce protos - for (_, buffer) in &self.opened_buffers { if let Some(buffer) = buffer.upgrade(cx) { - let file = match buffer.read(cx).file().and_then(|file| file.as_local()) { + let file = match File::from_dyn(buffer.read(cx).file()) { Some(file) => file, - None => return, + None => continue, }; - let path = file.path().clone(); - let abs_path = file.abs_path(cx); + if file.worktree != worktree { + continue; + } - let repo = match repos.iter().find(|repo| repo.manages(&abs_path)) { + let path = file.path().clone(); + + let repo = match repos.iter().find(|repo| repo.manages(&path)) { Some(repo) => repo.clone(), None => return, }; diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 1016e58b73..fb07bd837f 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -41,6 +41,7 @@ use std::{ ffi::{OsStr, OsString}, fmt, future::Future, + mem, ops::{Deref, DerefMut}, os::unix::prelude::{OsStrExt, OsStringExt}, path::{Path, PathBuf}, @@ -664,6 +665,13 @@ impl LocalWorktree { let snapshot = self.snapshot(); let settings = cx.global::(); + + // Cut files included because we want to ship! + // TODO: + // - Rename / etc. setting to be show/hide git gutters + // - Unconditionally load index text for all files, + // - then choose at rendering time based on settings + let files_included = settings.git_gutter().files_included(settings); cx.spawn(|this, mut cx| async move { @@ -1379,6 +1387,7 @@ impl LocalSnapshot { // Gives the most specific git repository for a given path pub(crate) fn repo_for(&self, path: &Path) -> Option { + dbg!(&self.git_repositories); self.git_repositories .iter() .rev() //git_repository is ordered lexicographically @@ -1557,7 +1566,7 @@ impl LocalSnapshot { if parent_path.file_name() == Some(&DOT_GIT) { let abs_path = self.abs_path.join(&parent_path); - let content_path: Arc = abs_path.parent().unwrap().into(); + let content_path: Arc = parent_path.parent().unwrap().into(); if let Err(ix) = self .git_repositories .binary_search_by_key(&&content_path, |repo| &repo.content_path) @@ -1716,6 +1725,7 @@ impl LocalSnapshot { impl GitRepositoryEntry { // Note that these paths should be relative to the worktree root. pub(crate) fn manages(&self, path: &Path) -> bool { + dbg!(path, &self.content_path); path.starts_with(self.content_path.as_ref()) } @@ -2566,7 +2576,7 @@ impl BackgroundScanner { self.snapshot.lock().removed_entry_ids.clear(); self.update_ignore_statuses().await; - self.update_git_repositories().await; + self.update_git_repositories(); true } @@ -2632,25 +2642,11 @@ impl BackgroundScanner { .await; } - // TODO: Clarify what is going on here because re-loading every git repository - // on every file system event seems wrong - async fn update_git_repositories(&self) { + fn update_git_repositories(&self) { let mut snapshot = self.snapshot.lock(); - - let new_repos = snapshot - .git_repositories - .iter() - .cloned() - .filter_map(|mut repo_entry| { - let repo = self - .fs - .open_repo(&snapshot.abs_path.join(&repo_entry.git_dir_path))?; - repo_entry.repo = repo; - Some(repo_entry) - }) - .collect(); - - snapshot.git_repositories = new_repos; + let mut git_repositories = mem::take(&mut snapshot.git_repositories); + git_repositories.retain(|repo| snapshot.entry_for_path(&repo.git_dir_path).is_some()); + snapshot.git_repositories = git_repositories; } async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) { @@ -3245,7 +3241,8 @@ mod tests { "b.txt": "" } }, - "c.txt": "" + "c.txt": "", + })); let http_client = FakeHttpClient::with_404_response(); From 499e95d16a1213ab89052a98d0b3bd5e9b297f3e Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 3 Oct 2022 17:43:05 -0700 Subject: [PATCH 113/140] Removed debugs, simplified settings --- assets/settings/default.json | 18 +++----- crates/editor/src/element.rs | 71 ++++++++++++++++--------------- crates/git/src/repository.rs | 1 - crates/project/src/worktree.rs | 44 +++++-------------- crates/settings/src/settings.rs | 34 +++------------ crates/workspace/src/workspace.rs | 11 +---- 6 files changed, 61 insertions(+), 118 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index fc1b1906fc..fddac662a5 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -76,18 +76,12 @@ "tab_size": 4, // Git gutter behavior configuration. "git": { - "git_gutter": { - // Which files to show the git gutter on. This setting can take - // three values: - // 1. All files, files not tracked in git will be diffed against - // their contents when the file was last opened in Zed: - // "files_included": "all", - // 2. Only show for files tracked in git: - // "files_included": "only_tracked", - // 3. Disable git gutters entirely: - // "files_included": "none", - "files_included": "all" - } + // Control whether the git gutter is shown. May take 2 values: + // 1. Show the gutter + // "git_gutter": "tracked_files" + // 2. Hide the gutter + // "git_gutter": "hide" + "git_gutter": "tracked_files" }, // Settings specific to the terminal "terminal": { diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 5d83051567..56887f4b45 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -37,7 +37,7 @@ use gpui::{ use json::json; use language::{Bias, DiagnosticSeverity, OffsetUtf16, Selection}; use project::ProjectPath; -use settings::Settings; +use settings::{GitGutter, Settings}; use smallvec::SmallVec; use std::{ cmp::{self, Ordering}, @@ -607,13 +607,16 @@ impl EditorElement { }; let diff_style = &cx.global::().theme.editor.diff.clone(); - // dbg!("***************"); - // dbg!(&layout.diff_hunks); - // dbg!("***************"); + let show_gutter = matches!( + &cx.global::() + .git_overrides + .git_gutter + .unwrap_or_default(), + GitGutter::TrackedFiles + ); // line is `None` when there's a line wrap for (ix, line) in layout.line_number_layouts.iter().enumerate() { - // dbg!(ix); if let Some(line) = line { let line_origin = bounds.origin() + vec2f( @@ -624,39 +627,39 @@ impl EditorElement { line.paint(line_origin, visible_bounds, gutter_layout.line_height, cx); - //This line starts a buffer line, so let's do the diff calculation - let new_hunk = get_hunk(diff_layout.buffer_line, &layout.diff_hunks); + if show_gutter { + //This line starts a buffer line, so let's do the diff calculation + let new_hunk = get_hunk(diff_layout.buffer_line, &layout.diff_hunks); - // This + the unwraps are annoying, but at least it's legible - let (is_ending, is_starting) = match (diff_layout.last_diff, new_hunk) { - (None, None) => (false, false), - (None, Some(_)) => (false, true), - (Some(_), None) => (true, false), - (Some((old_hunk, _)), Some(new_hunk)) if new_hunk == old_hunk => (false, false), - (Some(_), Some(_)) => (true, true), - }; + // This + the unwraps are annoying, but at least it's legible + let (is_ending, is_starting) = match (diff_layout.last_diff, new_hunk) { + (None, None) => (false, false), + (None, Some(_)) => (false, true), + (Some(_), None) => (true, false), + (Some((old_hunk, _)), Some(new_hunk)) if new_hunk == old_hunk => { + (false, false) + } + (Some(_), Some(_)) => (true, true), + }; - // dbg!(diff_layout.buffer_line, is_starting); + if is_ending { + let (last_hunk, start_line) = diff_layout.last_diff.take().unwrap(); + cx.scene.push_quad(diff_quad( + last_hunk.status(), + start_line..ix, + &gutter_layout, + diff_style, + )); + } - if is_ending { - let (last_hunk, start_line) = diff_layout.last_diff.take().unwrap(); - // dbg!("ending"); - // dbg!(start_line..ix); - cx.scene.push_quad(diff_quad( - last_hunk.status(), - start_line..ix, - &gutter_layout, - diff_style, - )); + if is_starting { + let new_hunk = new_hunk.unwrap(); + + diff_layout.last_diff = Some((new_hunk, ix)); + }; + + diff_layout.buffer_line += 1; } - - if is_starting { - let new_hunk = new_hunk.unwrap(); - - diff_layout.last_diff = Some((new_hunk, ix)); - }; - - diff_layout.buffer_line += 1; } } diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 38393dc8a8..67e93416ae 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -19,7 +19,6 @@ impl GitRepository for LibGitRepository { fn logic(repo: &LibGitRepository, relative_file_path: &Path) -> Result> { const STAGE_NORMAL: i32 = 0; let index = repo.index()?; - dbg!(relative_file_path); let oid = match index.get_path(relative_file_path, STAGE_NORMAL) { Some(entry) => entry.id, None => return Ok(None), diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index fb07bd837f..6880ec4ff1 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -32,7 +32,7 @@ use postage::{ prelude::{Sink as _, Stream as _}, watch, }; -use settings::Settings; + use smol::channel::{self, Sender}; use std::{ any::Any, @@ -664,40 +664,18 @@ impl LocalWorktree { let fs = self.fs.clone(); let snapshot = self.snapshot(); - let settings = cx.global::(); - - // Cut files included because we want to ship! - // TODO: - // - Rename / etc. setting to be show/hide git gutters - // - Unconditionally load index text for all files, - // - then choose at rendering time based on settings - - let files_included = settings.git_gutter().files_included(settings); - cx.spawn(|this, mut cx| async move { let text = fs.load(&abs_path).await?; - let diff_base = match files_included { - settings::GitFilesIncluded::All | settings::GitFilesIncluded::OnlyTracked => { - let results = if let Some(repo) = snapshot.repo_for(&abs_path) { - cx.background() - .spawn({ - let path = path.clone(); - async move { repo.repo.lock().load_index(&path) } - }) - .await - } else { - None - }; - - if files_included == settings::GitFilesIncluded::All { - results.or_else(|| Some(text.clone())) - } else { - results - } - } - - settings::GitFilesIncluded::None => None, + let diff_base = if let Some(repo) = snapshot.repo_for(&abs_path) { + cx.background() + .spawn({ + let path = path.clone(); + async move { repo.repo.lock().load_index(&path) } + }) + .await + } else { + None }; // Eagerly populate the snapshot with an updated entry for the loaded file @@ -1387,7 +1365,6 @@ impl LocalSnapshot { // Gives the most specific git repository for a given path pub(crate) fn repo_for(&self, path: &Path) -> Option { - dbg!(&self.git_repositories); self.git_repositories .iter() .rev() //git_repository is ordered lexicographically @@ -1725,7 +1702,6 @@ impl LocalSnapshot { impl GitRepositoryEntry { // Note that these paths should be relative to the worktree root. pub(crate) fn manages(&self, path: &Path) -> bool { - dbg!(path, &self.content_path); path.starts_with(self.content_path.as_ref()) } diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 3bf09436ed..fd04fc0aa6 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -57,34 +57,19 @@ impl FeatureFlags { #[derive(Copy, Clone, Debug, Default, Deserialize, JsonSchema)] pub struct GitSettings { pub git_gutter: Option, + pub gutter_debounce: Option, } #[derive(Clone, Copy, Debug, Default, Deserialize, JsonSchema)] -pub struct GitGutter { - pub files_included: Option, - pub debounce_delay_millis: Option, -} - -impl GitGutter { - pub fn files_included(&self, settings: &Settings) -> GitFilesIncluded { - self.files_included.unwrap_or_else(|| { - settings - .git.git_gutter.expect("git_gutter must be some in defaults.json") - .files_included - .expect("Should be some in defaults.json") - }) - } -} - -#[derive(Clone, Copy, Debug, Default, Deserialize, JsonSchema, PartialEq, Eq)] #[serde(rename_all = "snake_case")] -pub enum GitFilesIncluded { +pub enum GitGutter { #[default] - All, - OnlyTracked, - None, + TrackedFiles, + Hide, } +pub struct GitGutterConfig {} + #[derive(Clone, Debug, Default, Deserialize, JsonSchema)] pub struct EditorSettings { pub tab_size: Option, @@ -428,12 +413,7 @@ impl Settings { editor_overrides: Default::default(), terminal_defaults: Default::default(), terminal_overrides: Default::default(), - git: GitSettings { - git_gutter: Some(GitGutter { - files_included: Some(GitFilesIncluded::All), - debounce_delay_millis: None, - }), - }, + git: Default::default(), git_overrides: Default::default(), language_defaults: Default::default(), language_overrides: Default::default(), diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 44c9b19f1b..2ae498d701 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -735,16 +735,7 @@ impl ItemHandle for ViewHandle { } let settings = cx.global::(); - let debounce_delay = settings - .git_overrides - .git_gutter - .unwrap_or_else(|| { - settings - .git - .git_gutter - .expect("This should be Some by setting setup") - }) - .debounce_delay_millis; + let debounce_delay = settings.git_overrides.gutter_debounce; let item = item.clone(); From 218ba810133067a9fdd7c9230ebf5f9dbceb04e9 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 3 Oct 2022 17:44:18 -0700 Subject: [PATCH 114/140] Fix autoclose error when cursor was at column 0 --- crates/editor/src/editor.rs | 263 ++++++++++++++---------------------- 1 file changed, 101 insertions(+), 162 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 769c03d6ff..93a47cf621 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -569,6 +569,7 @@ struct SelectNextState { done: bool, } +#[derive(Debug)] struct AutocloseRegion { selection_id: usize, range: Range, @@ -1883,19 +1884,20 @@ impl Editor { // If the inserted text is a suffix of an opening bracket and the // selection is preceded by the rest of the opening bracket, then // insert the closing bracket. - let should_autoclose = selection.start.column > (prefix_len as u32) - && snapshot.contains_str_at( - Point::new( - selection.start.row, - selection.start.column - (prefix_len as u32), - ), - &bracket_pair.start[..prefix_len], - ) - && snapshot - .chars_at(selection.start) - .next() - .map_or(true, |c| language.should_autoclose_before(c)); - if should_autoclose { + let following_text_allows_autoclose = snapshot + .chars_at(selection.start) + .next() + .map_or(true, |c| language.should_autoclose_before(c)); + let preceding_text_matches_prefix = prefix_len == 0 + || (selection.start.column >= (prefix_len as u32) + && snapshot.contains_str_at( + Point::new( + selection.start.row, + selection.start.column - (prefix_len as u32), + ), + &bracket_pair.start[..prefix_len], + )); + if following_text_allows_autoclose && preceding_text_matches_prefix { let anchor = snapshot.anchor_before(selection.end); new_selections .push((selection.map(|_| anchor.clone()), text.len())); @@ -2210,14 +2212,14 @@ impl Editor { buffer: &'a MultiBufferSnapshot, ) -> impl Iterator, Option<&'a AutocloseRegion>)> { let mut i = 0; - let mut pair_states = self.autoclose_regions.as_slice(); + let mut regions = self.autoclose_regions.as_slice(); selections.into_iter().map(move |selection| { let range = selection.start.to_offset(buffer)..selection.end.to_offset(buffer); let mut enclosing = None; - while let Some(pair_state) = pair_states.get(i) { + while let Some(pair_state) = regions.get(i) { if pair_state.range.end.to_offset(buffer) < range.start { - pair_states = &pair_states[i + 1..]; + regions = ®ions[i + 1..]; i = 0; } else if pair_state.range.start.to_offset(buffer) > range.end { break; @@ -9594,7 +9596,8 @@ mod tests { #[gpui::test] async fn test_autoclose_pairs(cx: &mut gpui::TestAppContext) { - cx.update(|cx| cx.set_global(Settings::test(cx))); + let mut cx = EditorTestContext::new(cx); + let language = Arc::new(Language::new( LanguageConfig { brackets: vec![ @@ -9623,165 +9626,101 @@ mod tests { Some(tree_sitter_rust::language()), )); - let text = r#" - a + let registry = Arc::new(LanguageRegistry::test()); + registry.add(language.clone()); + cx.update_buffer(|buffer, cx| { + buffer.set_language_registry(registry); + buffer.set_language(Some(language), cx); + }); - / - - "# - .unindent(); - - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); - let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); - let (_, view) = cx.add_window(|cx| build_editor(buffer, cx)); - view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) - .await; - - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1), - DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), - ]) - }); + cx.set_state( + &r#" + 🏀ˇ + εˇ + ❤️ˇ + "# + .unindent(), + ); + // autoclose multiple nested brackets at multiple cursors + cx.update_editor(|view, cx| { view.handle_input("{", cx); view.handle_input("{", cx); view.handle_input("{", cx); - assert_eq!( - view.text(cx), - " - {{{}}} - {{{}}} - / - - " - .unindent() - ); + }); + cx.assert_editor_state( + &" + 🏀{{{ˇ}}} + ε{{{ˇ}}} + ❤️{{{ˇ}}} + " + .unindent(), + ); + // skip over the auto-closed brackets when typing a closing bracket + cx.update_editor(|view, cx| { view.move_right(&MoveRight, cx); view.handle_input("}", cx); view.handle_input("}", cx); view.handle_input("}", cx); - assert_eq!( - view.text(cx), - " - {{{}}}} - {{{}}}} - / + }); + cx.assert_editor_state( + &" + 🏀{{{}}}}ˇ + ε{{{}}}}ˇ + ❤️{{{}}}}ˇ + " + .unindent(), + ); - " - .unindent() - ); - - view.undo(&Undo, cx); + // autoclose multi-character pairs + cx.set_state( + &" + ˇ + ˇ + " + .unindent(), + ); + cx.update_editor(|view, cx| { view.handle_input("/", cx); view.handle_input("*", cx); - assert_eq!( - view.text(cx), - " - /* */ - /* */ - / - - " - .unindent() - ); - - view.undo(&Undo, cx); - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1), - DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0), - ]) - }); - view.handle_input("*", cx); - assert_eq!( - view.text(cx), - " - a - - /* - * - " - .unindent() - ); - - // Don't autoclose if the next character isn't whitespace and isn't - // listed in the language's "autoclose_before" section. - view.finalize_last_transaction(cx); - view.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)]) - }); - view.handle_input("{", cx); - assert_eq!( - view.text(cx), - " - {a - - /* - * - " - .unindent() - ); - - view.undo(&Undo, cx); - view.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1)]) - }); - view.handle_input("{", cx); - assert_eq!( - view.text(cx), - " - {a} - - /* - * - " - .unindent() - ); - assert_eq!( - view.selections.display_ranges(cx), - [DisplayPoint::new(0, 1)..DisplayPoint::new(0, 2)] - ); - - view.undo(&Undo, cx); - view.handle_input("[", cx); - assert_eq!( - view.text(cx), - " - [a] - - /* - * - " - .unindent() - ); - assert_eq!( - view.selections.display_ranges(cx), - [DisplayPoint::new(0, 1)..DisplayPoint::new(0, 2)] - ); - - view.undo(&Undo, cx); - view.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1)]) - }); - view.handle_input("[", cx); - assert_eq!( - view.text(cx), - " - a[ - - /* - * - " - .unindent() - ); - assert_eq!( - view.selections.display_ranges(cx), - [DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2)] - ); }); + cx.assert_editor_state( + &" + /*ˇ */ + /*ˇ */ + " + .unindent(), + ); + + // one cursor autocloses a multi-character pair, one cursor + // does not autoclose. + cx.set_state( + &" + /ˇ + ˇ + " + .unindent(), + ); + cx.update_editor(|view, cx| view.handle_input("*", cx)); + cx.assert_editor_state( + &" + /*ˇ */ + *ˇ + " + .unindent(), + ); + + // Don't autoclose if the next character isn't whitespace and isn't + // listed in the language's "autoclose_before" section. + cx.set_state("ˇa b"); + cx.update_editor(|view, cx| view.handle_input("{", cx)); + cx.assert_editor_state("{ˇa b"); + + // Surround with brackets if text is selected + cx.set_state("«aˇ» b"); + cx.update_editor(|view, cx| view.handle_input("{", cx)); + cx.assert_editor_state("{«aˇ»} b"); } #[gpui::test] From b5d941b10cf7f1e129aebc9607a4330d902c9253 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 4 Oct 2022 11:43:52 -0700 Subject: [PATCH 115/140] 0.58.0 --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e99fa91008..26675a0596 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7151,7 +7151,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.57.0" +version = "0.58.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 48a84a5831..491937cd59 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -3,7 +3,7 @@ authors = ["Nathan Sobo "] description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.57.0" +version = "0.58.0" [lib] name = "zed" From 2bd947d4d07c7b5679c986301e727174d4d35491 Mon Sep 17 00:00:00 2001 From: Julia Date: Tue, 4 Oct 2022 14:58:44 -0400 Subject: [PATCH 116/140] Use correct start row for hunk retrieval & correct paint offset Co-Authored-By: Joseph Lyons --- crates/editor/src/element.rs | 63 +++++++++++++----------------------- 1 file changed, 23 insertions(+), 40 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 56887f4b45..2b93255972 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -534,23 +534,22 @@ impl EditorElement { } struct DiffLayout<'a> { - buffer_line: usize, - last_diff: Option<(&'a DiffHunk, usize)>, + buffer_row: u32, + last_diff: Option<&'a DiffHunk>, } fn diff_quad( - status: DiffHunkStatus, - layout_range: Range, + hunk: &DiffHunk, gutter_layout: &GutterLayout, diff_style: &DiffStyle, ) -> Quad { - let color = match status { + let color = match hunk.status() { DiffHunkStatus::Added => diff_style.inserted, DiffHunkStatus::Modified => diff_style.modified, //TODO: This rendering is entirely a horrible hack DiffHunkStatus::Removed => { - let row = layout_range.start; + let row = hunk.buffer_range.start; let offset = gutter_layout.line_height / 2.; let start_y = @@ -571,8 +570,8 @@ impl EditorElement { } }; - let start_row = layout_range.start; - let end_row = layout_range.end; + let start_row = hunk.buffer_range.start; + let end_row = hunk.buffer_range.end; let start_y = start_row as f32 * gutter_layout.line_height - gutter_layout.scroll_top; let end_y = end_row as f32 * gutter_layout.line_height - gutter_layout.scroll_top; @@ -590,19 +589,18 @@ impl EditorElement { } } + let scroll_position = layout.position_map.snapshot.scroll_position(); let gutter_layout = { - let scroll_position = layout.position_map.snapshot.scroll_position(); let line_height = layout.position_map.line_height; GutterLayout { scroll_top: scroll_position.y() * line_height, - // scroll_position, line_height, bounds, } }; let mut diff_layout = DiffLayout { - buffer_line: 0, + buffer_row: scroll_position.y() as u32, last_diff: None, }; @@ -629,49 +627,35 @@ impl EditorElement { if show_gutter { //This line starts a buffer line, so let's do the diff calculation - let new_hunk = get_hunk(diff_layout.buffer_line, &layout.diff_hunks); + let new_hunk = get_hunk(diff_layout.buffer_row, &layout.diff_hunks); - // This + the unwraps are annoying, but at least it's legible let (is_ending, is_starting) = match (diff_layout.last_diff, new_hunk) { - (None, None) => (false, false), - (None, Some(_)) => (false, true), - (Some(_), None) => (true, false), - (Some((old_hunk, _)), Some(new_hunk)) if new_hunk == old_hunk => { + (Some(old_hunk), Some(new_hunk)) if new_hunk == old_hunk => { (false, false) } - (Some(_), Some(_)) => (true, true), + (a, b) => (a.is_some(), b.is_some()), }; if is_ending { - let (last_hunk, start_line) = diff_layout.last_diff.take().unwrap(); - cx.scene.push_quad(diff_quad( - last_hunk.status(), - start_line..ix, - &gutter_layout, - diff_style, - )); + let last_hunk = diff_layout.last_diff.take().unwrap(); + cx.scene + .push_quad(diff_quad(last_hunk, &gutter_layout, diff_style)); } if is_starting { let new_hunk = new_hunk.unwrap(); - - diff_layout.last_diff = Some((new_hunk, ix)); + diff_layout.last_diff = Some(new_hunk); }; - diff_layout.buffer_line += 1; + diff_layout.buffer_row += 1; } } } - // If we ran out with a diff hunk still being prepped, paint it now - if let Some((last_hunk, start_line)) = diff_layout.last_diff { - let end_line = layout.line_number_layouts.len(); - cx.scene.push_quad(diff_quad( - last_hunk.status(), - start_line..end_line, - &gutter_layout, - diff_style, - )) + // If we ran out with a diff hunk still being prepped, paint it now + if let Some(last_hunk) = diff_layout.last_diff { + cx.scene + .push_quad(diff_quad(last_hunk, &gutter_layout, diff_style)) } if let Some((row, indicator)) = layout.code_actions_indicator.as_mut() { @@ -1385,14 +1369,13 @@ impl EditorElement { /// Get the hunk that contains buffer_line, starting from start_idx /// Returns none if there is none found, and -fn get_hunk(buffer_line: usize, hunks: &[DiffHunk]) -> Option<&DiffHunk> { +fn get_hunk(buffer_line: u32, hunks: &[DiffHunk]) -> Option<&DiffHunk> { for i in 0..hunks.len() { // Safety: Index out of bounds is handled by the check above let hunk = hunks.get(i).unwrap(); if hunk.buffer_range.contains(&(buffer_line as u32)) { return Some(hunk); - } else if hunk.status() == DiffHunkStatus::Removed - && buffer_line == hunk.buffer_range.start as usize + } else if hunk.status() == DiffHunkStatus::Removed && buffer_line == hunk.buffer_range.start { return Some(hunk); } else if hunk.buffer_range.start > buffer_line as u32 { From d9fb8c90d8e3cec81d7915a5db37c00b5d5a6cdb Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 4 Oct 2022 17:27:03 -0700 Subject: [PATCH 117/140] Start work on toggling block comments for HTML --- crates/editor/src/editor.rs | 308 ++++++++++++++++------ crates/language/src/language.rs | 17 +- crates/zed/src/languages/html/config.toml | 2 + 3 files changed, 240 insertions(+), 87 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 93a47cf621..b2420c1c44 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -4487,105 +4487,184 @@ impl Editor { pub fn toggle_comments(&mut self, _: &ToggleComments, cx: &mut ViewContext) { self.transact(cx, |this, cx| { let mut selections = this.selections.all::(cx); - let mut all_selection_lines_are_comments = true; - let mut edit_ranges = Vec::new(); + let mut edits = Vec::new(); + let mut selection_edit_ranges = Vec::new(); let mut last_toggled_row = None; - this.buffer.update(cx, |buffer, cx| { - // TODO: Handle selections that cross excerpts - for selection in &mut selections { - // Get the line comment prefix. Split its trailing whitespace into a separate string, - // as that portion won't be used for detecting if a line is a comment. - let full_comment_prefix: Arc = if let Some(prefix) = buffer - .language_at(selection.start, cx) - .and_then(|l| l.line_comment_prefix().map(|p| p.into())) - { - prefix + let snapshot = this.buffer.read(cx).read(cx); + let empty_str: Arc = "".into(); + + fn comment_prefix_range( + snapshot: &MultiBufferSnapshot, + row: u32, + comment_prefix: &str, + comment_prefix_whitespace: &str, + ) -> Range { + let start = Point::new(row, snapshot.indent_size_for_line(row).len); + + let mut line_bytes = snapshot + .bytes_in_range(start..snapshot.max_point()) + .flatten() + .copied(); + + // If this line currently begins with the line comment prefix, then record + // the range containing the prefix. + if line_bytes + .by_ref() + .take(comment_prefix.len()) + .eq(comment_prefix.bytes()) + { + // Include any whitespace that matches the comment prefix. + let matching_whitespace_len = line_bytes + .zip(comment_prefix_whitespace.bytes()) + .take_while(|(a, b)| a == b) + .count() as u32; + let end = Point::new( + start.row, + start.column + comment_prefix.len() as u32 + matching_whitespace_len, + ); + start..end + } else { + start..start + } + } + + fn comment_suffix_range( + snapshot: &MultiBufferSnapshot, + row: u32, + comment_suffix: &str, + comment_suffix_has_leading_space: bool, + ) -> Range { + let end = Point::new(row, snapshot.line_len(row)); + let suffix_start_column = end.column.saturating_sub(comment_suffix.len() as u32); + + let mut line_end_bytes = snapshot + .bytes_in_range(Point::new(end.row, suffix_start_column.saturating_sub(1))..end) + .flatten() + .copied(); + + let leading_space_len = if suffix_start_column > 0 + && line_end_bytes.next() == Some(b' ') + && comment_suffix_has_leading_space + { + 1 + } else { + 0 + }; + + // If this line currently begins with the line comment prefix, then record + // the range containing the prefix. + if line_end_bytes.by_ref().eq(comment_suffix.bytes()) { + let start = Point::new(end.row, suffix_start_column - leading_space_len); + start..end + } else { + end..end + } + } + + // TODO: Handle selections that cross excerpts + for selection in &mut selections { + let language = if let Some(language) = snapshot.language_at(selection.start) { + language + } else { + continue; + }; + + let mut all_selection_lines_are_comments = true; + selection_edit_ranges.clear(); + + // If multiple selections contain a given row, avoid processing that + // row more than once. + let mut start_row = selection.start.row; + if last_toggled_row == Some(start_row) { + start_row += 1; + } + let end_row = + if selection.end.row > selection.start.row && selection.end.column == 0 { + selection.end.row - 1 } else { - return; + selection.end.row }; + last_toggled_row = Some(end_row); + + // If the language has line comments, toggle those. + if let Some(full_comment_prefix) = language.line_comment_prefix() { + // Split the comment prefix's trailing whitespace into a separate string, + // as that portion won't be used for detecting if a line is a comment. let comment_prefix = full_comment_prefix.trim_end_matches(' '); let comment_prefix_whitespace = &full_comment_prefix[comment_prefix.len()..]; - edit_ranges.clear(); - let snapshot = buffer.snapshot(cx); - - let end_row = - if selection.end.row > selection.start.row && selection.end.column == 0 { - selection.end.row - } else { - selection.end.row + 1 - }; - - for row in selection.start.row..end_row { - // If multiple selections contain a given row, avoid processing that - // row more than once. - if last_toggled_row == Some(row) { - continue; - } else { - last_toggled_row = Some(row); - } + for row in start_row..=end_row { if snapshot.is_line_blank(row) { continue; } - let start = Point::new(row, snapshot.indent_size_for_line(row).len); - let mut line_bytes = snapshot - .bytes_in_range(start..snapshot.max_point()) - .flatten() - .copied(); - - // If this line currently begins with the line comment prefix, then record - // the range containing the prefix. - if all_selection_lines_are_comments - && line_bytes - .by_ref() - .take(comment_prefix.len()) - .eq(comment_prefix.bytes()) - { - // Include any whitespace that matches the comment prefix. - let matching_whitespace_len = line_bytes - .zip(comment_prefix_whitespace.bytes()) - .take_while(|(a, b)| a == b) - .count() - as u32; - let end = Point::new( - row, - start.column - + comment_prefix.len() as u32 - + matching_whitespace_len, - ); - edit_ranges.push(start..end); - } - // If this line does not begin with the line comment prefix, then record - // the position where the prefix should be inserted. - else { + let prefix_range = comment_prefix_range( + snapshot.deref(), + row, + comment_prefix, + comment_prefix_whitespace, + ); + if prefix_range.is_empty() { all_selection_lines_are_comments = false; - edit_ranges.push(start..start); } + selection_edit_ranges.push(prefix_range); } - if !edit_ranges.is_empty() { - if all_selection_lines_are_comments { - let empty_str: Arc = "".into(); - buffer.edit( - edit_ranges - .iter() - .cloned() - .map(|range| (range, empty_str.clone())), - None, - cx, - ); - } else { - let min_column = - edit_ranges.iter().map(|r| r.start.column).min().unwrap(); - let edits = edit_ranges.iter().map(|range| { - let position = Point::new(range.start.row, min_column); - (position..position, full_comment_prefix.clone()) - }); - buffer.edit(edits, None, cx); - } + if all_selection_lines_are_comments { + edits.extend( + selection_edit_ranges + .iter() + .cloned() + .map(|range| (range, empty_str.clone())), + ); + } else { + let min_column = selection_edit_ranges + .iter() + .map(|r| r.start.column) + .min() + .unwrap_or(0); + edits.extend(selection_edit_ranges.iter().map(|range| { + let position = Point::new(range.start.row, min_column); + (position..position, full_comment_prefix.clone()) + })); } + } else if let Some((full_comment_prefix, comment_suffix)) = + language.block_comment_delimiters() + { + let comment_prefix = full_comment_prefix.trim_end_matches(' '); + let comment_prefix_whitespace = &full_comment_prefix[comment_prefix.len()..]; + + let prefix_range = comment_prefix_range( + snapshot.deref(), + start_row, + comment_prefix, + comment_prefix_whitespace, + ); + let suffix_range = comment_suffix_range( + snapshot.deref(), + end_row, + comment_suffix.trim_start_matches(' '), + comment_suffix.starts_with(' '), + ); + + if prefix_range.is_empty() || suffix_range.is_empty() { + edits.push(( + prefix_range.start..prefix_range.start, + full_comment_prefix.clone(), + )); + edits.push((suffix_range.end..suffix_range.end, comment_suffix.clone())); + } else { + edits.push((prefix_range, empty_str.clone())); + edits.push((suffix_range, empty_str.clone())); + } + } else { + continue; } + } + + drop(snapshot); + this.buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); }); let selections = this.selections.all::(cx); @@ -10777,7 +10856,7 @@ mod tests { cx.update(|cx| cx.set_global(Settings::test(cx))); let language = Arc::new(Language::new( LanguageConfig { - line_comment: Some("// ".to_string()), + line_comment: Some("// ".into()), ..Default::default() }, Some(tree_sitter_rust::language()), @@ -10855,6 +10934,67 @@ mod tests { }); } + #[gpui::test] + async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx); + + let html_language = Arc::new( + Language::new( + LanguageConfig { + name: "HTML".into(), + block_comment: Some(("".into())), + ..Default::default() + }, + Some(tree_sitter_html::language()), + ) + .with_injection_query( + r#" + (script_element + (raw_text) @content + (#set! "language" "javascript")) + "#, + ) + .unwrap(), + ); + + let javascript_language = Arc::new(Language::new( + LanguageConfig { + name: "JavaScript".into(), + line_comment: Some("// ".into()), + ..Default::default() + }, + Some(tree_sitter_javascript::language()), + )); + + let registry = Arc::new(LanguageRegistry::test()); + registry.add(html_language.clone()); + registry.add(javascript_language.clone()); + + cx.update_buffer(|buffer, cx| { + buffer.set_language_registry(registry); + buffer.set_language(Some(html_language), cx); + }); + + cx.set_state( + &r#" +

A

ˇ +

B

ˇ +

C

ˇ + "# + .unindent(), + ); + + cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); + cx.assert_editor_state( + &r#" + + + + "# + .unindent(), + ); + } + #[gpui::test] fn test_editing_disjoint_excerpts(cx: &mut gpui::MutableAppContext) { cx.set_global(Settings::test(cx)); diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 59da0909c6..c7c5def833 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -231,7 +231,10 @@ pub struct LanguageConfig { pub decrease_indent_pattern: Option, #[serde(default)] pub autoclose_before: String, - pub line_comment: Option, + #[serde(default)] + pub line_comment: Option>, + #[serde(default)] + pub block_comment: Option<(Arc, Arc)>, } impl Default for LanguageConfig { @@ -245,6 +248,7 @@ impl Default for LanguageConfig { decrease_indent_pattern: Default::default(), autoclose_before: Default::default(), line_comment: Default::default(), + block_comment: Default::default(), } } } @@ -768,8 +772,15 @@ impl Language { self.config.name.clone() } - pub fn line_comment_prefix(&self) -> Option<&str> { - self.config.line_comment.as_deref() + pub fn line_comment_prefix(&self) -> Option<&Arc> { + self.config.line_comment.as_ref() + } + + pub fn block_comment_delimiters(&self) -> Option<(&Arc, &Arc)> { + self.config + .block_comment + .as_ref() + .map(|(start, end)| (start, end)) } pub async fn disk_based_diagnostic_sources(&self) -> &[String] { diff --git a/crates/zed/src/languages/html/config.toml b/crates/zed/src/languages/html/config.toml index 80b33b1243..3e618da25e 100644 --- a/crates/zed/src/languages/html/config.toml +++ b/crates/zed/src/languages/html/config.toml @@ -8,3 +8,5 @@ brackets = [ { start = "\"", end = "\"", close = true, newline = false }, { start = "!--", end = " --", close = true, newline = false }, ] + +block_comment = [""] \ No newline at end of file From aa8680640863d6f4cfb4c82588b92e684af1af03 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 5 Oct 2022 12:25:32 -0700 Subject: [PATCH 118/140] Finish generalizing ToggleComments to support block comments --- crates/editor/src/editor.rs | 102 ++++++++++++++++++++++++++++-- crates/language/src/buffer.rs | 1 + crates/language/src/syntax_map.rs | 47 ++++++++------ 3 files changed, 126 insertions(+), 24 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b2420c1c44..f7c9f81c0c 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -4492,6 +4492,7 @@ impl Editor { let mut last_toggled_row = None; let snapshot = this.buffer.read(cx).read(cx); let empty_str: Arc = "".into(); + let mut suffixes_inserted = Vec::new(); fn comment_prefix_range( snapshot: &MultiBufferSnapshot, @@ -4569,7 +4570,6 @@ impl Editor { continue; }; - let mut all_selection_lines_are_comments = true; selection_edit_ranges.clear(); // If multiple selections contain a given row, avoid processing that @@ -4586,12 +4586,17 @@ impl Editor { }; last_toggled_row = Some(end_row); + if start_row > end_row { + continue; + } + // If the language has line comments, toggle those. if let Some(full_comment_prefix) = language.line_comment_prefix() { // Split the comment prefix's trailing whitespace into a separate string, // as that portion won't be used for detecting if a line is a comment. let comment_prefix = full_comment_prefix.trim_end_matches(' '); let comment_prefix_whitespace = &full_comment_prefix[comment_prefix.len()..]; + let mut all_selection_lines_are_comments = true; for row in start_row..=end_row { if snapshot.is_line_blank(row) { @@ -4633,7 +4638,6 @@ impl Editor { { let comment_prefix = full_comment_prefix.trim_end_matches(' '); let comment_prefix_whitespace = &full_comment_prefix[comment_prefix.len()..]; - let prefix_range = comment_prefix_range( snapshot.deref(), start_row, @@ -4653,6 +4657,7 @@ impl Editor { full_comment_prefix.clone(), )); edits.push((suffix_range.end..suffix_range.end, comment_suffix.clone())); + suffixes_inserted.push((end_row, comment_suffix.len())); } else { edits.push((prefix_range, empty_str.clone())); edits.push((suffix_range, empty_str.clone())); @@ -4667,7 +4672,33 @@ impl Editor { buffer.edit(edits, None, cx); }); - let selections = this.selections.all::(cx); + // Adjust selections so that they end before any comment suffixes that + // were inserted. + let mut suffixes_inserted = suffixes_inserted.into_iter().peekable(); + let mut selections = this.selections.all::(cx); + let snapshot = this.buffer.read(cx).read(cx); + for selection in &mut selections { + while let Some((row, suffix_len)) = suffixes_inserted.peek().copied() { + match row.cmp(&selection.end.row) { + Ordering::Less => { + suffixes_inserted.next(); + continue; + } + Ordering::Greater => break, + Ordering::Equal => { + if selection.end.column == snapshot.line_len(row) { + if selection.is_empty() { + selection.start.column -= suffix_len as u32; + } + selection.end.column -= suffix_len as u32; + } + break; + } + } + } + } + + drop(snapshot); this.change_selections(Some(Autoscroll::Fit), cx, |s| s.select(selections)); }); } @@ -10975,6 +11006,7 @@ mod tests { buffer.set_language(Some(html_language), cx); }); + // Toggle comments for empty selections cx.set_state( &r#"

A

ˇ @@ -10983,7 +11015,6 @@ mod tests { "# .unindent(), ); - cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); cx.assert_editor_state( &r#" @@ -10993,6 +11024,69 @@ mod tests { "# .unindent(), ); + cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); + cx.assert_editor_state( + &r#" +

A

ˇ +

B

ˇ +

C

ˇ + "# + .unindent(), + ); + + // Toggle comments for mixture of empty and non-empty selections, where + // multiple selections occupy a given line. + cx.set_state( + &r#" +

+

ˇ»B

ˇ +

+

ˇ»D

ˇ + "# + .unindent(), + ); + + cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); + cx.assert_editor_state( + &r#" + + + "# + .unindent(), + ); + cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); + cx.assert_editor_state( + &r#" +

+

ˇ»B

ˇ +

+

ˇ»D

ˇ + "# + .unindent(), + ); + + // Toggle comments when different languages are active for different + // selections. + cx.set_state( + &r#" + ˇ + "# + .unindent(), + ); + cx.foreground().run_until_parked(); + cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); + cx.assert_editor_state( + &r#" + + // ˇvar x = new Y(); + + "# + .unindent(), + ); } #[gpui::test] diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 4ff1b002b0..b53a9e5573 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1840,6 +1840,7 @@ impl BufferSnapshot { let offset = position.to_offset(self); self.syntax .layers_for_range(offset..offset, &self.text) + .filter(|l| l.node.end_byte() > offset) .last() .map(|info| info.language) .or(self.language.as_ref()) diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index 8983406690..64145e535b 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -525,19 +525,19 @@ impl SyntaxSnapshot { } #[cfg(test)] - pub fn layers(&self, buffer: &BufferSnapshot) -> Vec { - self.layers_for_range(0..buffer.len(), buffer) + pub fn layers<'a>(&'a self, buffer: &'a BufferSnapshot) -> Vec { + self.layers_for_range(0..buffer.len(), buffer).collect() } pub fn layers_for_range<'a, T: ToOffset>( - &self, + &'a self, range: Range, - buffer: &BufferSnapshot, - ) -> Vec { + buffer: &'a BufferSnapshot, + ) -> impl 'a + Iterator { let start = buffer.anchor_before(range.start.to_offset(buffer)); let end = buffer.anchor_after(range.end.to_offset(buffer)); - let mut cursor = self.layers.filter::<_, ()>(|summary| { + let mut cursor = self.layers.filter::<_, ()>(move |summary| { if summary.max_depth > summary.min_depth { true } else { @@ -547,21 +547,26 @@ impl SyntaxSnapshot { } }); - let mut result = Vec::new(); + // let mut result = Vec::new(); cursor.next(buffer); - while let Some(layer) = cursor.item() { - result.push(SyntaxLayerInfo { - language: &layer.language, - depth: layer.depth, - node: layer.tree.root_node_with_offset( - layer.range.start.to_offset(buffer), - layer.range.start.to_point(buffer).to_ts_point(), - ), - }); - cursor.next(buffer) - } + std::iter::from_fn(move || { + if let Some(layer) = cursor.item() { + let info = SyntaxLayerInfo { + language: &layer.language, + depth: layer.depth, + node: layer.tree.root_node_with_offset( + layer.range.start.to_offset(buffer), + layer.range.start.to_point(buffer).to_ts_point(), + ), + }; + cursor.next(buffer); + Some(info) + } else { + None + } + }) - result + // result } } @@ -1848,7 +1853,9 @@ mod tests { range: Range, expected_layers: &[&str], ) { - let layers = syntax_map.layers_for_range(range, &buffer); + let layers = syntax_map + .layers_for_range(range, &buffer) + .collect::>(); assert_eq!( layers.len(), expected_layers.len(), From 3f4be5521c19ea51ff1f76fceb55af43fb58af7d Mon Sep 17 00:00:00 2001 From: Julia Date: Tue, 4 Oct 2022 20:42:01 -0400 Subject: [PATCH 119/140] Load diff base from correct relative path --- crates/project/src/project.rs | 7 ++++++- crates/project/src/worktree.rs | 16 +++++++++------- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index dc783f1818..54fdb269be 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4673,13 +4673,18 @@ impl Project { None => return, }; + let relative_repo = match path.strip_prefix(repo.content_path) { + Ok(relative_repo) => relative_repo.to_owned(), + Err(_) => return, + }; + let shared_remote_id = self.shared_remote_id(); let client = self.client.clone(); cx.spawn(|_, mut cx| async move { let diff_base = cx .background() - .spawn(async move { repo.repo.lock().load_index(&path) }) + .spawn(async move { repo.repo.lock().load_index(&relative_repo) }) .await; let buffer_id = buffer.update(&mut cx, |buffer, cx| { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 6880ec4ff1..5348f9785f 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -667,13 +667,15 @@ impl LocalWorktree { cx.spawn(|this, mut cx| async move { let text = fs.load(&abs_path).await?; - let diff_base = if let Some(repo) = snapshot.repo_for(&abs_path) { - cx.background() - .spawn({ - let path = path.clone(); - async move { repo.repo.lock().load_index(&path) } - }) - .await + let diff_base = if let Some(repo) = snapshot.repo_for(&path) { + if let Ok(repo_relative) = path.strip_prefix(repo.content_path) { + let repo_relative = repo_relative.to_owned(); + cx.background() + .spawn(async move { repo.repo.lock().load_index(&repo_relative) }) + .await + } else { + None + } } else { None }; From 8b86781ad13d3e5eee604fb93c15cf943bee8bf5 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 5 Oct 2022 14:44:34 -0700 Subject: [PATCH 120/140] Remove last usages of MultiBufferSnapshot::language --- crates/editor/src/editor.rs | 6 ++++-- crates/editor/src/multi_buffer.rs | 7 ------- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index f7c9f81c0c..4e18d04889 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -2021,7 +2021,7 @@ impl Editor { let end = selection.end; let mut insert_extra_newline = false; - if let Some(language) = buffer.language() { + if let Some(language) = buffer.language_at(start) { let leading_whitespace_len = buffer .reversed_chars_at(start) .take_while(|c| c.is_whitespace() && *c != '\n') @@ -2927,7 +2927,9 @@ impl Editor { { let indent_size = buffer.indent_size_for_line(line_buffer_range.start.row); - let language_name = buffer.language().map(|language| language.name()); + let language_name = buffer + .language_at(line_buffer_range.start) + .map(|language| language.name()); let indent_len = match indent_size.kind { IndentKind::Space => { cx.global::().tab_size(language_name.as_deref()) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 3b43f99ca0..cf9f29d73e 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -2501,13 +2501,6 @@ impl MultiBufferSnapshot { self.trailing_excerpt_update_count } - pub fn language(&self) -> Option<&Arc> { - self.excerpts - .iter() - .next() - .and_then(|excerpt| excerpt.buffer.language()) - } - pub fn language_at<'a, T: ToOffset>(&'a self, point: T) -> Option<&'a Arc> { self.point_to_buffer_offset(point) .and_then(|(buffer, offset)| buffer.language_at(offset)) From 7fb5fe036a70e0018739ec1a251f8c1602c74724 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 5 Oct 2022 17:07:35 -0700 Subject: [PATCH 121/140] Derive indent size from the language at the cursor when auto-indenting --- crates/editor/src/multi_buffer.rs | 16 ++++++++-- crates/language/src/buffer.rs | 52 ++++++++++++++++++++++--------- 2 files changed, 51 insertions(+), 17 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index cf9f29d73e..71cbefb78f 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1967,8 +1967,10 @@ impl MultiBufferSnapshot { let mut rows_for_excerpt = Vec::new(); let mut cursor = self.excerpts.cursor::(); - let mut rows = rows.into_iter().peekable(); + let mut prev_row = u32::MAX; + let mut prev_language_indent_size = IndentSize::default(); + while let Some(row) = rows.next() { cursor.seek(&Point::new(row, 0), Bias::Right, &()); let excerpt = match cursor.item() { @@ -1976,7 +1978,17 @@ impl MultiBufferSnapshot { _ => continue, }; - let single_indent_size = excerpt.buffer.single_indent_size(cx); + // Retrieve the language and indent size once for each disjoint region being indented. + let single_indent_size = if row.saturating_sub(1) == prev_row { + prev_language_indent_size + } else { + excerpt + .buffer + .language_indent_size_at(Point::new(row, 0), cx) + }; + prev_language_indent_size = single_indent_size; + prev_row = row; + let start_buffer_row = excerpt.range.context.start.to_point(&excerpt.buffer).row; let start_multibuffer_row = cursor.start().row; diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index b53a9e5573..9a1c292319 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -84,14 +84,15 @@ pub struct BufferSnapshot { parse_count: usize, } -#[derive(Clone, Copy, Debug, PartialEq, Eq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] pub struct IndentSize { pub len: u32, pub kind: IndentKind, } -#[derive(Clone, Copy, Debug, PartialEq, Eq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] pub enum IndentKind { + #[default] Space, Tab, } @@ -236,7 +237,6 @@ pub enum AutoindentMode { struct AutoindentRequest { before_edit: BufferSnapshot, entries: Vec, - indent_size: IndentSize, is_block_mode: bool, } @@ -249,6 +249,7 @@ struct AutoindentRequestEntry { /// only be adjusted if the suggested indentation level has *changed* /// since the edit was made. first_line_is_new: bool, + indent_size: IndentSize, original_indent_column: Option, } @@ -794,10 +795,13 @@ impl Buffer { // buffer before this batch of edits. let mut row_ranges = Vec::new(); let mut old_to_new_rows = BTreeMap::new(); + let mut language_indent_sizes_by_new_row = Vec::new(); for entry in &request.entries { let position = entry.range.start; let new_row = position.to_point(&snapshot).row; let new_end_row = entry.range.end.to_point(&snapshot).row + 1; + language_indent_sizes_by_new_row.push((new_row, entry.indent_size)); + if !entry.first_line_is_new { let old_row = position.to_point(&request.before_edit).row; old_to_new_rows.insert(old_row, new_row); @@ -811,6 +815,8 @@ impl Buffer { let mut old_suggestions = BTreeMap::::default(); let old_edited_ranges = contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields); + let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable(); + let mut language_indent_size = IndentSize::default(); for old_edited_range in old_edited_ranges { let suggestions = request .before_edit @@ -819,6 +825,17 @@ impl Buffer { .flatten(); for (old_row, suggestion) in old_edited_range.zip(suggestions) { if let Some(suggestion) = suggestion { + let new_row = *old_to_new_rows.get(&old_row).unwrap(); + + // Find the indent size based on the language for this row. + while let Some((row, size)) = language_indent_sizes.peek() { + if *row > new_row { + break; + } + language_indent_size = *size; + language_indent_sizes.next(); + } + let suggested_indent = old_to_new_rows .get(&suggestion.basis_row) .and_then(|from_row| old_suggestions.get(from_row).copied()) @@ -827,9 +844,8 @@ impl Buffer { .before_edit .indent_size_for_line(suggestion.basis_row) }) - .with_delta(suggestion.delta, request.indent_size); - old_suggestions - .insert(*old_to_new_rows.get(&old_row).unwrap(), suggested_indent); + .with_delta(suggestion.delta, language_indent_size); + old_suggestions.insert(new_row, suggested_indent); } } yield_now().await; @@ -850,6 +866,8 @@ impl Buffer { // Compute new suggestions for each line, but only include them in the result // if they differ from the old suggestion for that line. + let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable(); + let mut language_indent_size = IndentSize::default(); for new_edited_row_range in new_edited_row_ranges { let suggestions = snapshot .suggest_autoindents(new_edited_row_range.clone()) @@ -857,13 +875,22 @@ impl Buffer { .flatten(); for (new_row, suggestion) in new_edited_row_range.zip(suggestions) { if let Some(suggestion) = suggestion { + // Find the indent size based on the language for this row. + while let Some((row, size)) = language_indent_sizes.peek() { + if *row > new_row { + break; + } + language_indent_size = *size; + language_indent_sizes.next(); + } + let suggested_indent = indent_sizes .get(&suggestion.basis_row) .copied() .unwrap_or_else(|| { snapshot.indent_size_for_line(suggestion.basis_row) }) - .with_delta(suggestion.delta, request.indent_size); + .with_delta(suggestion.delta, language_indent_size); if old_suggestions .get(&new_row) .map_or(true, |old_indentation| { @@ -1194,7 +1221,6 @@ impl Buffer { let edit_id = edit_operation.local_timestamp(); if let Some((before_edit, mode)) = autoindent_request { - let indent_size = before_edit.single_indent_size(cx); let (start_columns, is_block_mode) = match mode { AutoindentMode::Block { original_indent_columns: start_columns, @@ -1243,6 +1269,7 @@ impl Buffer { AutoindentRequestEntry { first_line_is_new, original_indent_column: start_column, + indent_size: before_edit.language_indent_size_at(range.start, cx), range: self.anchor_before(new_start + range_of_insertion_to_indent.start) ..self.anchor_after(new_start + range_of_insertion_to_indent.end), } @@ -1252,7 +1279,6 @@ impl Buffer { self.autoindent_requests.push(Arc::new(AutoindentRequest { before_edit, entries, - indent_size, is_block_mode, })); } @@ -1570,8 +1596,8 @@ impl BufferSnapshot { indent_size_for_line(self, row) } - pub fn single_indent_size(&self, cx: &AppContext) -> IndentSize { - let language_name = self.language().map(|language| language.name()); + pub fn language_indent_size_at(&self, position: T, cx: &AppContext) -> IndentSize { + let language_name = self.language_at(position).map(|language| language.name()); let settings = cx.global::(); if settings.hard_tabs(language_name.as_deref()) { IndentSize::tab() @@ -1832,10 +1858,6 @@ impl BufferSnapshot { } } - pub fn language(&self) -> Option<&Arc> { - self.language.as_ref() - } - pub fn language_at(&self, position: D) -> Option<&Arc> { let offset = position.to_offset(self); self.syntax From b7e115a6a1baacd08a31c6d52586e0a7de1d64aa Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 5 Oct 2022 17:58:10 -0700 Subject: [PATCH 122/140] Add a test for multi-language auto-indent --- Cargo.lock | 2 + crates/language/Cargo.toml | 2 + crates/language/src/tests.rs | 116 ++++++++++++++++++++++++++++++++++- 3 files changed, 119 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 79eae80258..f7a354ed28 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2861,6 +2861,8 @@ dependencies = [ "text", "theme", "tree-sitter", + "tree-sitter-html", + "tree-sitter-javascript", "tree-sitter-json 0.19.0", "tree-sitter-python", "tree-sitter-rust", diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 6e9f368e77..6d80eee779 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -63,6 +63,8 @@ util = { path = "../util", features = ["test-support"] } ctor = "0.1" env_logger = "0.9" rand = "0.8.3" +tree-sitter-html = "*" +tree-sitter-javascript = "*" tree-sitter-json = "*" tree-sitter-rust = "*" tree-sitter-python = "*" diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 8f56f3287e..3cfddce71f 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -14,7 +14,7 @@ use std::{ }; use text::network::Network; use unindent::Unindent as _; -use util::post_inc; +use util::{post_inc, test::marked_text_ranges}; #[cfg(test)] #[ctor::ctor] @@ -1035,6 +1035,120 @@ fn test_autoindent_language_without_indents_query(cx: &mut MutableAppContext) { }); } +#[gpui::test] +fn test_autoindent_with_injected_languages(cx: &mut MutableAppContext) { + cx.set_global({ + let mut settings = Settings::test(cx); + settings.language_overrides.extend([ + ( + "HTML".into(), + settings::EditorSettings { + tab_size: Some(2.try_into().unwrap()), + ..Default::default() + }, + ), + ( + "JavaScript".into(), + settings::EditorSettings { + tab_size: Some(8.try_into().unwrap()), + ..Default::default() + }, + ), + ]); + settings + }); + + let html_language = Arc::new( + Language::new( + LanguageConfig { + name: "HTML".into(), + ..Default::default() + }, + Some(tree_sitter_html::language()), + ) + .with_indents_query( + " + (element + (start_tag) @start + (end_tag)? @end) @indent + ", + ) + .unwrap() + .with_injection_query( + r#" + (script_element + (raw_text) @content + (#set! "language" "javascript")) + "#, + ) + .unwrap(), + ); + + let javascript_language = Arc::new( + Language::new( + LanguageConfig { + name: "JavaScript".into(), + ..Default::default() + }, + Some(tree_sitter_javascript::language()), + ) + .with_indents_query( + r#" + (object "}" @end) @indent + "#, + ) + .unwrap(), + ); + + let language_registry = Arc::new(LanguageRegistry::test()); + language_registry.add(html_language.clone()); + language_registry.add(javascript_language.clone()); + + cx.add_model(|cx| { + let (text, ranges) = marked_text_ranges( + &" +
ˇ +
+ + ˇ + + " + .unindent(), + false, + ); + + let mut buffer = Buffer::new(0, text, cx); + buffer.set_language_registry(language_registry); + buffer.set_language(Some(html_language), cx); + buffer.edit( + ranges.into_iter().map(|range| (range, "\na")), + Some(AutoindentMode::EachLine), + cx, + ); + assert_eq!( + buffer.text(), + " +
+ a +
+ + + a + + " + .unindent() + ); + buffer + }); +} + #[gpui::test] fn test_serialization(cx: &mut gpui::MutableAppContext) { let mut now = Instant::now(); From edf4c3ec00b91029334eff26b02be5927e0eab13 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 5 Oct 2022 21:22:53 -0400 Subject: [PATCH 123/140] Add Discord webhook for published releases (#1684) --- .github/workflows/discord_webhook.yml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 .github/workflows/discord_webhook.yml diff --git a/.github/workflows/discord_webhook.yml b/.github/workflows/discord_webhook.yml new file mode 100644 index 0000000000..b71d451f5b --- /dev/null +++ b/.github/workflows/discord_webhook.yml @@ -0,0 +1,22 @@ +on: + release: + types: [published] + +jobs: + message: + runs-on: ubuntu-latest + steps: + - name: Discord Webhook Action + uses: tsickert/discord-webhook@v5.3.0 + with: + webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }} + content: | + 📣 Zed ${{ github.event.release.name }} was just released! + + Restart your Zed or head to https://zed.dev/releases to grab it. + + ```md + ### Changelog + + ${{ github.event.release.body }} + ``` \ No newline at end of file From 771215d254c7461fa2cc5d7fc0b6b6e674106814 Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 5 Oct 2022 16:28:01 -0400 Subject: [PATCH 124/140] Reload git index on file events to catch new contents --- crates/editor/src/element.rs | 4 +--- crates/git/src/repository.rs | 16 +++++++++++++--- crates/project/src/project.rs | 2 +- crates/project/src/worktree.rs | 3 ++- 4 files changed, 17 insertions(+), 8 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 2b93255972..acf2e5887c 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -630,9 +630,7 @@ impl EditorElement { let new_hunk = get_hunk(diff_layout.buffer_row, &layout.diff_hunks); let (is_ending, is_starting) = match (diff_layout.last_diff, new_hunk) { - (Some(old_hunk), Some(new_hunk)) if new_hunk == old_hunk => { - (false, false) - } + (Some(old_hunk), Some(new_hunk)) if new_hunk == old_hunk => (false, false), (a, b) => (a.is_some(), b.is_some()), }; diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 67e93416ae..ce881d2b0f 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -10,12 +10,20 @@ pub use git2::Repository as LibGitRepository; #[async_trait::async_trait] pub trait GitRepository: Send { - fn load_index(&self, relative_file_path: &Path) -> Option; + fn reload_index(&self); + + fn load_index_text(&self, relative_file_path: &Path) -> Option; } #[async_trait::async_trait] impl GitRepository for LibGitRepository { - fn load_index(&self, relative_file_path: &Path) -> Option { + fn reload_index(&self) { + if let Ok(mut index) = self.index() { + _ = index.read(false); + } + } + + fn load_index_text(&self, relative_file_path: &Path) -> Option { fn logic(repo: &LibGitRepository, relative_file_path: &Path) -> Result> { const STAGE_NORMAL: i32 = 0; let index = repo.index()?; @@ -54,7 +62,9 @@ impl FakeGitRepository { #[async_trait::async_trait] impl GitRepository for FakeGitRepository { - fn load_index(&self, path: &Path) -> Option { + fn reload_index(&self) {} + + fn load_index_text(&self, path: &Path) -> Option { let state = self.state.lock(); state.index_contents.get(path).cloned() } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 54fdb269be..99d74f67db 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4684,7 +4684,7 @@ impl Project { cx.spawn(|_, mut cx| async move { let diff_base = cx .background() - .spawn(async move { repo.repo.lock().load_index(&relative_repo) }) + .spawn(async move { repo.repo.lock().load_index_text(&relative_repo) }) .await; let buffer_id = buffer.update(&mut cx, |buffer, cx| { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 5348f9785f..968c2d4bc7 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -671,7 +671,7 @@ impl LocalWorktree { if let Ok(repo_relative) = path.strip_prefix(repo.content_path) { let repo_relative = repo_relative.to_owned(); cx.background() - .spawn(async move { repo.repo.lock().load_index(&repo_relative) }) + .spawn(async move { repo.repo.lock().load_index_text(&repo_relative) }) .await } else { None @@ -2505,6 +2505,7 @@ impl BackgroundScanner { let scan_id = snapshot.scan_id; if let Some(repo) = snapshot.in_dot_git(&path) { + repo.repo.lock().reload_index(); repo.scan_id = scan_id; } From fe7a39ba5c36a3cf6e4bf5f207dd4b1307df15b9 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 6 Oct 2022 11:54:28 -0700 Subject: [PATCH 125/140] Apply buffer diff edits as a single batch --- crates/language/src/buffer.rs | 55 +++++++++++++++-------------------- 1 file changed, 23 insertions(+), 32 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 294ecd5cd2..db9aa029f2 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -300,10 +300,8 @@ pub struct Chunk<'a> { pub struct Diff { base_version: clock::Global, - new_text: Arc, - changes: Vec<(ChangeTag, usize)>, line_ending: LineEnding, - start_offset: usize, + edits: Vec<(Range, Arc)>, } #[derive(Clone, Copy)] @@ -1084,16 +1082,30 @@ impl Buffer { let old_text = old_text.to_string(); let line_ending = LineEnding::detect(&new_text); LineEnding::normalize(&mut new_text); - let changes = TextDiff::from_chars(old_text.as_str(), new_text.as_str()) - .iter_all_changes() - .map(|c| (c.tag(), c.value().len())) - .collect::>(); + let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str()); + let mut edits = Vec::new(); + let mut offset = 0; + let empty: Arc = "".into(); + for change in diff.iter_all_changes() { + let value = change.value(); + let end_offset = offset + value.len(); + match change.tag() { + ChangeTag::Equal => { + offset = end_offset; + } + ChangeTag::Delete => { + edits.push((offset..end_offset, empty.clone())); + offset = end_offset; + } + ChangeTag::Insert => { + edits.push((offset..offset, value.into())); + } + } + } Diff { base_version, - new_text: new_text.into(), - changes, line_ending, - start_offset: 0, + edits, } }) } @@ -1103,28 +1115,7 @@ impl Buffer { self.finalize_last_transaction(); self.start_transaction(); self.text.set_line_ending(diff.line_ending); - let mut offset = diff.start_offset; - for (tag, len) in diff.changes { - let range = offset..(offset + len); - match tag { - ChangeTag::Equal => offset += len, - ChangeTag::Delete => { - self.edit([(range, "")], None, cx); - } - ChangeTag::Insert => { - self.edit( - [( - offset..offset, - &diff.new_text[range.start - diff.start_offset - ..range.end - diff.start_offset], - )], - None, - cx, - ); - offset += len; - } - } - } + self.edit(diff.edits, None, cx); if self.end_transaction(cx).is_some() { self.finalize_last_transaction() } else { From b6525e916461d6d1a0c45f2f4282f342de97dbd4 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 6 Oct 2022 13:32:34 -0700 Subject: [PATCH 126/140] Extract editor tests to their own file --- crates/editor/src/editor.rs | 4891 +---------------------------- crates/editor/src/editor_tests.rs | 4881 ++++++++++++++++++++++++++++ 2 files changed, 4883 insertions(+), 4889 deletions(-) create mode 100644 crates/editor/src/editor_tests.rs diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 4e18d04889..1abe65d482 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -9,6 +9,8 @@ pub mod movement; mod multi_buffer; pub mod selections_collection; +#[cfg(test)] +mod editor_tests; #[cfg(any(test, feature = "test-support"))] pub mod test; @@ -6805,4895 +6807,6 @@ pub fn styled_runs_for_code_label<'a>( }) } -#[cfg(test)] -mod tests { - use crate::test::{ - assert_text_with_selections, build_editor, select_ranges, EditorLspTestContext, - EditorTestContext, - }; - - use super::*; - use futures::StreamExt; - use gpui::{ - geometry::rect::RectF, - platform::{WindowBounds, WindowOptions}, - }; - use indoc::indoc; - use language::{FakeLspAdapter, LanguageConfig, LanguageRegistry}; - use project::FakeFs; - use settings::EditorSettings; - use std::{cell::RefCell, rc::Rc, time::Instant}; - use text::Point; - use unindent::Unindent; - use util::{ - assert_set_eq, - test::{marked_text_ranges, marked_text_ranges_by, sample_text, TextRangeMarker}, - }; - use workspace::{FollowableItem, ItemHandle, NavigationEntry, Pane}; - - #[gpui::test] - fn test_edit_events(cx: &mut MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = cx.add_model(|cx| language::Buffer::new(0, "123456", cx)); - - let events = Rc::new(RefCell::new(Vec::new())); - let (_, editor1) = cx.add_window(Default::default(), { - let events = events.clone(); - |cx| { - cx.subscribe(&cx.handle(), move |_, _, event, _| { - if matches!( - event, - Event::Edited | Event::BufferEdited | Event::DirtyChanged - ) { - events.borrow_mut().push(("editor1", *event)); - } - }) - .detach(); - Editor::for_buffer(buffer.clone(), None, cx) - } - }); - let (_, editor2) = cx.add_window(Default::default(), { - let events = events.clone(); - |cx| { - cx.subscribe(&cx.handle(), move |_, _, event, _| { - if matches!( - event, - Event::Edited | Event::BufferEdited | Event::DirtyChanged - ) { - events.borrow_mut().push(("editor2", *event)); - } - }) - .detach(); - Editor::for_buffer(buffer.clone(), None, cx) - } - }); - assert_eq!(mem::take(&mut *events.borrow_mut()), []); - - // Mutating editor 1 will emit an `Edited` event only for that editor. - editor1.update(cx, |editor, cx| editor.insert("X", cx)); - assert_eq!( - mem::take(&mut *events.borrow_mut()), - [ - ("editor1", Event::Edited), - ("editor1", Event::BufferEdited), - ("editor2", Event::BufferEdited), - ("editor1", Event::DirtyChanged), - ("editor2", Event::DirtyChanged) - ] - ); - - // Mutating editor 2 will emit an `Edited` event only for that editor. - editor2.update(cx, |editor, cx| editor.delete(&Delete, cx)); - assert_eq!( - mem::take(&mut *events.borrow_mut()), - [ - ("editor2", Event::Edited), - ("editor1", Event::BufferEdited), - ("editor2", Event::BufferEdited), - ] - ); - - // Undoing on editor 1 will emit an `Edited` event only for that editor. - editor1.update(cx, |editor, cx| editor.undo(&Undo, cx)); - assert_eq!( - mem::take(&mut *events.borrow_mut()), - [ - ("editor1", Event::Edited), - ("editor1", Event::BufferEdited), - ("editor2", Event::BufferEdited), - ("editor1", Event::DirtyChanged), - ("editor2", Event::DirtyChanged), - ] - ); - - // Redoing on editor 1 will emit an `Edited` event only for that editor. - editor1.update(cx, |editor, cx| editor.redo(&Redo, cx)); - assert_eq!( - mem::take(&mut *events.borrow_mut()), - [ - ("editor1", Event::Edited), - ("editor1", Event::BufferEdited), - ("editor2", Event::BufferEdited), - ("editor1", Event::DirtyChanged), - ("editor2", Event::DirtyChanged), - ] - ); - - // Undoing on editor 2 will emit an `Edited` event only for that editor. - editor2.update(cx, |editor, cx| editor.undo(&Undo, cx)); - assert_eq!( - mem::take(&mut *events.borrow_mut()), - [ - ("editor2", Event::Edited), - ("editor1", Event::BufferEdited), - ("editor2", Event::BufferEdited), - ("editor1", Event::DirtyChanged), - ("editor2", Event::DirtyChanged), - ] - ); - - // Redoing on editor 2 will emit an `Edited` event only for that editor. - editor2.update(cx, |editor, cx| editor.redo(&Redo, cx)); - assert_eq!( - mem::take(&mut *events.borrow_mut()), - [ - ("editor2", Event::Edited), - ("editor1", Event::BufferEdited), - ("editor2", Event::BufferEdited), - ("editor1", Event::DirtyChanged), - ("editor2", Event::DirtyChanged), - ] - ); - - // No event is emitted when the mutation is a no-op. - editor2.update(cx, |editor, cx| { - editor.change_selections(None, cx, |s| s.select_ranges([0..0])); - - editor.backspace(&Backspace, cx); - }); - assert_eq!(mem::take(&mut *events.borrow_mut()), []); - } - - #[gpui::test] - fn test_undo_redo_with_selection_restoration(cx: &mut MutableAppContext) { - cx.set_global(Settings::test(cx)); - let mut now = Instant::now(); - let buffer = cx.add_model(|cx| language::Buffer::new(0, "123456", cx)); - let group_interval = buffer.read(cx).transaction_group_interval(); - let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); - let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); - - editor.update(cx, |editor, cx| { - editor.start_transaction_at(now, cx); - editor.change_selections(None, cx, |s| s.select_ranges([2..4])); - - editor.insert("cd", cx); - editor.end_transaction_at(now, cx); - assert_eq!(editor.text(cx), "12cd56"); - assert_eq!(editor.selections.ranges(cx), vec![4..4]); - - editor.start_transaction_at(now, cx); - editor.change_selections(None, cx, |s| s.select_ranges([4..5])); - editor.insert("e", cx); - editor.end_transaction_at(now, cx); - assert_eq!(editor.text(cx), "12cde6"); - assert_eq!(editor.selections.ranges(cx), vec![5..5]); - - now += group_interval + Duration::from_millis(1); - editor.change_selections(None, cx, |s| s.select_ranges([2..2])); - - // Simulate an edit in another editor - buffer.update(cx, |buffer, cx| { - buffer.start_transaction_at(now, cx); - buffer.edit([(0..1, "a")], None, cx); - buffer.edit([(1..1, "b")], None, cx); - buffer.end_transaction_at(now, cx); - }); - - assert_eq!(editor.text(cx), "ab2cde6"); - assert_eq!(editor.selections.ranges(cx), vec![3..3]); - - // Last transaction happened past the group interval in a different editor. - // Undo it individually and don't restore selections. - editor.undo(&Undo, cx); - assert_eq!(editor.text(cx), "12cde6"); - assert_eq!(editor.selections.ranges(cx), vec![2..2]); - - // First two transactions happened within the group interval in this editor. - // Undo them together and restore selections. - editor.undo(&Undo, cx); - editor.undo(&Undo, cx); // Undo stack is empty here, so this is a no-op. - assert_eq!(editor.text(cx), "123456"); - assert_eq!(editor.selections.ranges(cx), vec![0..0]); - - // Redo the first two transactions together. - editor.redo(&Redo, cx); - assert_eq!(editor.text(cx), "12cde6"); - assert_eq!(editor.selections.ranges(cx), vec![5..5]); - - // Redo the last transaction on its own. - editor.redo(&Redo, cx); - assert_eq!(editor.text(cx), "ab2cde6"); - assert_eq!(editor.selections.ranges(cx), vec![6..6]); - - // Test empty transactions. - editor.start_transaction_at(now, cx); - editor.end_transaction_at(now, cx); - editor.undo(&Undo, cx); - assert_eq!(editor.text(cx), "12cde6"); - }); - } - - #[gpui::test] - fn test_ime_composition(cx: &mut MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = cx.add_model(|cx| { - let mut buffer = language::Buffer::new(0, "abcde", cx); - // Ensure automatic grouping doesn't occur. - buffer.set_group_interval(Duration::ZERO); - buffer - }); - - let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); - cx.add_window(Default::default(), |cx| { - let mut editor = build_editor(buffer.clone(), cx); - - // Start a new IME composition. - editor.replace_and_mark_text_in_range(Some(0..1), "à", None, cx); - editor.replace_and_mark_text_in_range(Some(0..1), "á", None, cx); - editor.replace_and_mark_text_in_range(Some(0..1), "ä", None, cx); - assert_eq!(editor.text(cx), "äbcde"); - assert_eq!( - editor.marked_text_ranges(cx), - Some(vec![OffsetUtf16(0)..OffsetUtf16(1)]) - ); - - // Finalize IME composition. - editor.replace_text_in_range(None, "ā", cx); - assert_eq!(editor.text(cx), "ābcde"); - assert_eq!(editor.marked_text_ranges(cx), None); - - // IME composition edits are grouped and are undone/redone at once. - editor.undo(&Default::default(), cx); - assert_eq!(editor.text(cx), "abcde"); - assert_eq!(editor.marked_text_ranges(cx), None); - editor.redo(&Default::default(), cx); - assert_eq!(editor.text(cx), "ābcde"); - assert_eq!(editor.marked_text_ranges(cx), None); - - // Start a new IME composition. - editor.replace_and_mark_text_in_range(Some(0..1), "à", None, cx); - assert_eq!( - editor.marked_text_ranges(cx), - Some(vec![OffsetUtf16(0)..OffsetUtf16(1)]) - ); - - // Undoing during an IME composition cancels it. - editor.undo(&Default::default(), cx); - assert_eq!(editor.text(cx), "ābcde"); - assert_eq!(editor.marked_text_ranges(cx), None); - - // Start a new IME composition with an invalid marked range, ensuring it gets clipped. - editor.replace_and_mark_text_in_range(Some(4..999), "è", None, cx); - assert_eq!(editor.text(cx), "ābcdè"); - assert_eq!( - editor.marked_text_ranges(cx), - Some(vec![OffsetUtf16(4)..OffsetUtf16(5)]) - ); - - // Finalize IME composition with an invalid replacement range, ensuring it gets clipped. - editor.replace_text_in_range(Some(4..999), "ę", cx); - assert_eq!(editor.text(cx), "ābcdę"); - assert_eq!(editor.marked_text_ranges(cx), None); - - // Start a new IME composition with multiple cursors. - editor.change_selections(None, cx, |s| { - s.select_ranges([ - OffsetUtf16(1)..OffsetUtf16(1), - OffsetUtf16(3)..OffsetUtf16(3), - OffsetUtf16(5)..OffsetUtf16(5), - ]) - }); - editor.replace_and_mark_text_in_range(Some(4..5), "XYZ", None, cx); - assert_eq!(editor.text(cx), "XYZbXYZdXYZ"); - assert_eq!( - editor.marked_text_ranges(cx), - Some(vec![ - OffsetUtf16(0)..OffsetUtf16(3), - OffsetUtf16(4)..OffsetUtf16(7), - OffsetUtf16(8)..OffsetUtf16(11) - ]) - ); - - // Ensure the newly-marked range gets treated as relative to the previously-marked ranges. - editor.replace_and_mark_text_in_range(Some(1..2), "1", None, cx); - assert_eq!(editor.text(cx), "X1ZbX1ZdX1Z"); - assert_eq!( - editor.marked_text_ranges(cx), - Some(vec![ - OffsetUtf16(1)..OffsetUtf16(2), - OffsetUtf16(5)..OffsetUtf16(6), - OffsetUtf16(9)..OffsetUtf16(10) - ]) - ); - - // Finalize IME composition with multiple cursors. - editor.replace_text_in_range(Some(9..10), "2", cx); - assert_eq!(editor.text(cx), "X2ZbX2ZdX2Z"); - assert_eq!(editor.marked_text_ranges(cx), None); - - editor - }); - } - - #[gpui::test] - fn test_selection_with_mouse(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - - let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\nddddddd\n", cx); - let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - editor.update(cx, |view, cx| { - view.begin_selection(DisplayPoint::new(2, 2), false, 1, cx); - }); - assert_eq!( - editor.update(cx, |view, cx| view.selections.display_ranges(cx)), - [DisplayPoint::new(2, 2)..DisplayPoint::new(2, 2)] - ); - - editor.update(cx, |view, cx| { - view.update_selection(DisplayPoint::new(3, 3), 0, Vector2F::zero(), cx); - }); - - assert_eq!( - editor.update(cx, |view, cx| view.selections.display_ranges(cx)), - [DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3)] - ); - - editor.update(cx, |view, cx| { - view.update_selection(DisplayPoint::new(1, 1), 0, Vector2F::zero(), cx); - }); - - assert_eq!( - editor.update(cx, |view, cx| view.selections.display_ranges(cx)), - [DisplayPoint::new(2, 2)..DisplayPoint::new(1, 1)] - ); - - editor.update(cx, |view, cx| { - view.end_selection(cx); - view.update_selection(DisplayPoint::new(3, 3), 0, Vector2F::zero(), cx); - }); - - assert_eq!( - editor.update(cx, |view, cx| view.selections.display_ranges(cx)), - [DisplayPoint::new(2, 2)..DisplayPoint::new(1, 1)] - ); - - editor.update(cx, |view, cx| { - view.begin_selection(DisplayPoint::new(3, 3), true, 1, cx); - view.update_selection(DisplayPoint::new(0, 0), 0, Vector2F::zero(), cx); - }); - - assert_eq!( - editor.update(cx, |view, cx| view.selections.display_ranges(cx)), - [ - DisplayPoint::new(2, 2)..DisplayPoint::new(1, 1), - DisplayPoint::new(3, 3)..DisplayPoint::new(0, 0) - ] - ); - - editor.update(cx, |view, cx| { - view.end_selection(cx); - }); - - assert_eq!( - editor.update(cx, |view, cx| view.selections.display_ranges(cx)), - [DisplayPoint::new(3, 3)..DisplayPoint::new(0, 0)] - ); - } - - #[gpui::test] - fn test_canceling_pending_selection(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\ndddddd\n", cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - - view.update(cx, |view, cx| { - view.begin_selection(DisplayPoint::new(2, 2), false, 1, cx); - assert_eq!( - view.selections.display_ranges(cx), - [DisplayPoint::new(2, 2)..DisplayPoint::new(2, 2)] - ); - }); - - view.update(cx, |view, cx| { - view.update_selection(DisplayPoint::new(3, 3), 0, Vector2F::zero(), cx); - assert_eq!( - view.selections.display_ranges(cx), - [DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3)] - ); - }); - - view.update(cx, |view, cx| { - view.cancel(&Cancel, cx); - view.update_selection(DisplayPoint::new(1, 1), 0, Vector2F::zero(), cx); - assert_eq!( - view.selections.display_ranges(cx), - [DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3)] - ); - }); - } - - #[gpui::test] - fn test_clone(cx: &mut gpui::MutableAppContext) { - let (text, selection_ranges) = marked_text_ranges( - indoc! {" - one - two - threeˇ - four - fiveˇ - "}, - true, - ); - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple(&text, cx); - - let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - - editor.update(cx, |editor, cx| { - editor.change_selections(None, cx, |s| s.select_ranges(selection_ranges.clone())); - editor.fold_ranges( - [ - Point::new(1, 0)..Point::new(2, 0), - Point::new(3, 0)..Point::new(4, 0), - ], - cx, - ); - }); - - let (_, cloned_editor) = editor.update(cx, |editor, cx| { - cx.add_window(Default::default(), |cx| editor.clone(cx)) - }); - - let snapshot = editor.update(cx, |e, cx| e.snapshot(cx)); - let cloned_snapshot = cloned_editor.update(cx, |e, cx| e.snapshot(cx)); - - assert_eq!( - cloned_editor.update(cx, |e, cx| e.display_text(cx)), - editor.update(cx, |e, cx| e.display_text(cx)) - ); - assert_eq!( - cloned_snapshot - .folds_in_range(0..text.len()) - .collect::>(), - snapshot.folds_in_range(0..text.len()).collect::>(), - ); - assert_set_eq!( - cloned_editor.read(cx).selections.ranges::(cx), - editor.read(cx).selections.ranges(cx) - ); - assert_set_eq!( - cloned_editor.update(cx, |e, cx| e.selections.display_ranges(cx)), - editor.update(cx, |e, cx| e.selections.display_ranges(cx)) - ); - } - - #[gpui::test] - fn test_navigation_history(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - use workspace::Item; - let (_, pane) = cx.add_window(Default::default(), |cx| Pane::new(None, cx)); - let buffer = MultiBuffer::build_simple(&sample_text(300, 5, 'a'), cx); - - cx.add_view(&pane, |cx| { - let mut editor = build_editor(buffer.clone(), cx); - let handle = cx.handle(); - editor.set_nav_history(Some(pane.read(cx).nav_history_for_item(&handle))); - - fn pop_history( - editor: &mut Editor, - cx: &mut MutableAppContext, - ) -> Option { - editor.nav_history.as_mut().unwrap().pop_backward(cx) - } - - // Move the cursor a small distance. - // Nothing is added to the navigation history. - editor.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)]) - }); - editor.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0)]) - }); - assert!(pop_history(&mut editor, cx).is_none()); - - // Move the cursor a large distance. - // The history can jump back to the previous position. - editor.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(13, 0)..DisplayPoint::new(13, 3)]) - }); - let nav_entry = pop_history(&mut editor, cx).unwrap(); - editor.navigate(nav_entry.data.unwrap(), cx); - assert_eq!(nav_entry.item.id(), cx.view_id()); - assert_eq!( - editor.selections.display_ranges(cx), - &[DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0)] - ); - assert!(pop_history(&mut editor, cx).is_none()); - - // Move the cursor a small distance via the mouse. - // Nothing is added to the navigation history. - editor.begin_selection(DisplayPoint::new(5, 0), false, 1, cx); - editor.end_selection(cx); - assert_eq!( - editor.selections.display_ranges(cx), - &[DisplayPoint::new(5, 0)..DisplayPoint::new(5, 0)] - ); - assert!(pop_history(&mut editor, cx).is_none()); - - // Move the cursor a large distance via the mouse. - // The history can jump back to the previous position. - editor.begin_selection(DisplayPoint::new(15, 0), false, 1, cx); - editor.end_selection(cx); - assert_eq!( - editor.selections.display_ranges(cx), - &[DisplayPoint::new(15, 0)..DisplayPoint::new(15, 0)] - ); - let nav_entry = pop_history(&mut editor, cx).unwrap(); - editor.navigate(nav_entry.data.unwrap(), cx); - assert_eq!(nav_entry.item.id(), cx.view_id()); - assert_eq!( - editor.selections.display_ranges(cx), - &[DisplayPoint::new(5, 0)..DisplayPoint::new(5, 0)] - ); - assert!(pop_history(&mut editor, cx).is_none()); - - // Set scroll position to check later - editor.set_scroll_position(Vector2F::new(5.5, 5.5), cx); - let original_scroll_position = editor.scroll_position; - let original_scroll_top_anchor = editor.scroll_top_anchor.clone(); - - // Jump to the end of the document and adjust scroll - editor.move_to_end(&MoveToEnd, cx); - editor.set_scroll_position(Vector2F::new(-2.5, -0.5), cx); - assert_ne!(editor.scroll_position, original_scroll_position); - assert_ne!(editor.scroll_top_anchor, original_scroll_top_anchor); - - let nav_entry = pop_history(&mut editor, cx).unwrap(); - editor.navigate(nav_entry.data.unwrap(), cx); - assert_eq!(editor.scroll_position, original_scroll_position); - assert_eq!(editor.scroll_top_anchor, original_scroll_top_anchor); - - // Ensure we don't panic when navigation data contains invalid anchors *and* points. - let mut invalid_anchor = editor.scroll_top_anchor.clone(); - invalid_anchor.text_anchor.buffer_id = Some(999); - let invalid_point = Point::new(9999, 0); - editor.navigate( - Box::new(NavigationData { - cursor_anchor: invalid_anchor.clone(), - cursor_position: invalid_point, - scroll_top_anchor: invalid_anchor, - scroll_top_row: invalid_point.row, - scroll_position: Default::default(), - }), - cx, - ); - assert_eq!( - editor.selections.display_ranges(cx), - &[editor.max_point(cx)..editor.max_point(cx)] - ); - assert_eq!( - editor.scroll_position(cx), - vec2f(0., editor.max_point(cx).row() as f32) - ); - - editor - }); - } - - #[gpui::test] - fn test_cancel(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\ndddddd\n", cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - - view.update(cx, |view, cx| { - view.begin_selection(DisplayPoint::new(3, 4), false, 1, cx); - view.update_selection(DisplayPoint::new(1, 1), 0, Vector2F::zero(), cx); - view.end_selection(cx); - - view.begin_selection(DisplayPoint::new(0, 1), true, 1, cx); - view.update_selection(DisplayPoint::new(0, 3), 0, Vector2F::zero(), cx); - view.end_selection(cx); - assert_eq!( - view.selections.display_ranges(cx), - [ - DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3), - DisplayPoint::new(3, 4)..DisplayPoint::new(1, 1), - ] - ); - }); - - view.update(cx, |view, cx| { - view.cancel(&Cancel, cx); - assert_eq!( - view.selections.display_ranges(cx), - [DisplayPoint::new(3, 4)..DisplayPoint::new(1, 1)] - ); - }); - - view.update(cx, |view, cx| { - view.cancel(&Cancel, cx); - assert_eq!( - view.selections.display_ranges(cx), - [DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1)] - ); - }); - } - - #[gpui::test] - fn test_fold(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple( - &" - impl Foo { - // Hello! - - fn a() { - 1 - } - - fn b() { - 2 - } - - fn c() { - 3 - } - } - " - .unindent(), - cx, - ); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); - - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(8, 0)..DisplayPoint::new(12, 0)]); - }); - view.fold(&Fold, cx); - assert_eq!( - view.display_text(cx), - " - impl Foo { - // Hello! - - fn a() { - 1 - } - - fn b() {… - } - - fn c() {… - } - } - " - .unindent(), - ); - - view.fold(&Fold, cx); - assert_eq!( - view.display_text(cx), - " - impl Foo {… - } - " - .unindent(), - ); - - view.unfold_lines(&UnfoldLines, cx); - assert_eq!( - view.display_text(cx), - " - impl Foo { - // Hello! - - fn a() { - 1 - } - - fn b() {… - } - - fn c() {… - } - } - " - .unindent(), - ); - - view.unfold_lines(&UnfoldLines, cx); - assert_eq!(view.display_text(cx), buffer.read(cx).read(cx).text()); - }); - } - - #[gpui::test] - fn test_move_cursor(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); - - buffer.update(cx, |buffer, cx| { - buffer.edit( - vec![ - (Point::new(1, 0)..Point::new(1, 0), "\t"), - (Point::new(1, 1)..Point::new(1, 1), "\t"), - ], - None, - cx, - ); - }); - - view.update(cx, |view, cx| { - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)] - ); - - view.move_down(&MoveDown, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)] - ); - - view.move_right(&MoveRight, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(1, 4)..DisplayPoint::new(1, 4)] - ); - - view.move_left(&MoveLeft, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)] - ); - - view.move_up(&MoveUp, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)] - ); - - view.move_to_end(&MoveToEnd, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(5, 6)..DisplayPoint::new(5, 6)] - ); - - view.move_to_beginning(&MoveToBeginning, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)] - ); - - view.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(0, 1)..DisplayPoint::new(0, 2)]); - }); - view.select_to_beginning(&SelectToBeginning, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(0, 1)..DisplayPoint::new(0, 0)] - ); - - view.select_to_end(&SelectToEnd, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(0, 1)..DisplayPoint::new(5, 6)] - ); - }); - } - - #[gpui::test] - fn test_move_cursor_multibyte(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple("ⓐⓑⓒⓓⓔ\nabcde\nαβγδε\n", cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); - - assert_eq!('ⓐ'.len_utf8(), 3); - assert_eq!('α'.len_utf8(), 2); - - view.update(cx, |view, cx| { - view.fold_ranges( - vec![ - Point::new(0, 6)..Point::new(0, 12), - Point::new(1, 2)..Point::new(1, 4), - Point::new(2, 4)..Point::new(2, 8), - ], - cx, - ); - assert_eq!(view.display_text(cx), "ⓐⓑ…ⓔ\nab…e\nαβ…ε\n"); - - view.move_right(&MoveRight, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(0, "ⓐ".len())] - ); - view.move_right(&MoveRight, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(0, "ⓐⓑ".len())] - ); - view.move_right(&MoveRight, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(0, "ⓐⓑ…".len())] - ); - - view.move_down(&MoveDown, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(1, "ab…".len())] - ); - view.move_left(&MoveLeft, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(1, "ab".len())] - ); - view.move_left(&MoveLeft, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(1, "a".len())] - ); - - view.move_down(&MoveDown, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(2, "α".len())] - ); - view.move_right(&MoveRight, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(2, "αβ".len())] - ); - view.move_right(&MoveRight, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(2, "αβ…".len())] - ); - view.move_right(&MoveRight, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(2, "αβ…ε".len())] - ); - - view.move_up(&MoveUp, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(1, "ab…e".len())] - ); - view.move_up(&MoveUp, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(0, "ⓐⓑ…ⓔ".len())] - ); - view.move_left(&MoveLeft, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(0, "ⓐⓑ…".len())] - ); - view.move_left(&MoveLeft, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(0, "ⓐⓑ".len())] - ); - view.move_left(&MoveLeft, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(0, "ⓐ".len())] - ); - }); - } - - #[gpui::test] - fn test_move_cursor_different_line_lengths(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple("ⓐⓑⓒⓓⓔ\nabcd\nαβγ\nabcd\nⓐⓑⓒⓓⓔ\n", cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([empty_range(0, "ⓐⓑⓒⓓⓔ".len())]); - }); - view.move_down(&MoveDown, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(1, "abcd".len())] - ); - - view.move_down(&MoveDown, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(2, "αβγ".len())] - ); - - view.move_down(&MoveDown, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(3, "abcd".len())] - ); - - view.move_down(&MoveDown, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(4, "ⓐⓑⓒⓓⓔ".len())] - ); - - view.move_up(&MoveUp, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(3, "abcd".len())] - ); - - view.move_up(&MoveUp, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[empty_range(2, "αβγ".len())] - ); - }); - } - - #[gpui::test] - fn test_beginning_end_of_line(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple("abc\n def", cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1), - DisplayPoint::new(1, 4)..DisplayPoint::new(1, 4), - ]); - }); - }); - - view.update(cx, |view, cx| { - view.move_to_beginning_of_line(&MoveToBeginningOfLine, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[ - DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), - DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2), - ] - ); - }); - - view.update(cx, |view, cx| { - view.move_to_beginning_of_line(&MoveToBeginningOfLine, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[ - DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), - DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), - ] - ); - }); - - view.update(cx, |view, cx| { - view.move_to_beginning_of_line(&MoveToBeginningOfLine, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[ - DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), - DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2), - ] - ); - }); - - view.update(cx, |view, cx| { - view.move_to_end_of_line(&MoveToEndOfLine, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[ - DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), - DisplayPoint::new(1, 5)..DisplayPoint::new(1, 5), - ] - ); - }); - - // Moving to the end of line again is a no-op. - view.update(cx, |view, cx| { - view.move_to_end_of_line(&MoveToEndOfLine, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[ - DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), - DisplayPoint::new(1, 5)..DisplayPoint::new(1, 5), - ] - ); - }); - - view.update(cx, |view, cx| { - view.move_left(&MoveLeft, cx); - view.select_to_beginning_of_line( - &SelectToBeginningOfLine { - stop_at_soft_wraps: true, - }, - cx, - ); - assert_eq!( - view.selections.display_ranges(cx), - &[ - DisplayPoint::new(0, 2)..DisplayPoint::new(0, 0), - DisplayPoint::new(1, 4)..DisplayPoint::new(1, 2), - ] - ); - }); - - view.update(cx, |view, cx| { - view.select_to_beginning_of_line( - &SelectToBeginningOfLine { - stop_at_soft_wraps: true, - }, - cx, - ); - assert_eq!( - view.selections.display_ranges(cx), - &[ - DisplayPoint::new(0, 2)..DisplayPoint::new(0, 0), - DisplayPoint::new(1, 4)..DisplayPoint::new(1, 0), - ] - ); - }); - - view.update(cx, |view, cx| { - view.select_to_beginning_of_line( - &SelectToBeginningOfLine { - stop_at_soft_wraps: true, - }, - cx, - ); - assert_eq!( - view.selections.display_ranges(cx), - &[ - DisplayPoint::new(0, 2)..DisplayPoint::new(0, 0), - DisplayPoint::new(1, 4)..DisplayPoint::new(1, 2), - ] - ); - }); - - view.update(cx, |view, cx| { - view.select_to_end_of_line( - &SelectToEndOfLine { - stop_at_soft_wraps: true, - }, - cx, - ); - assert_eq!( - view.selections.display_ranges(cx), - &[ - DisplayPoint::new(0, 2)..DisplayPoint::new(0, 3), - DisplayPoint::new(1, 4)..DisplayPoint::new(1, 5), - ] - ); - }); - - view.update(cx, |view, cx| { - view.delete_to_end_of_line(&DeleteToEndOfLine, cx); - assert_eq!(view.display_text(cx), "ab\n de"); - assert_eq!( - view.selections.display_ranges(cx), - &[ - DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), - DisplayPoint::new(1, 4)..DisplayPoint::new(1, 4), - ] - ); - }); - - view.update(cx, |view, cx| { - view.delete_to_beginning_of_line(&DeleteToBeginningOfLine, cx); - assert_eq!(view.display_text(cx), "\n"); - assert_eq!( - view.selections.display_ranges(cx), - &[ - DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), - DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), - ] - ); - }); - } - - #[gpui::test] - fn test_prev_next_word_boundary(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple("use std::str::{foo, bar}\n\n {baz.qux()}", cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - DisplayPoint::new(0, 11)..DisplayPoint::new(0, 11), - DisplayPoint::new(2, 4)..DisplayPoint::new(2, 4), - ]) - }); - - view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); - assert_selection_ranges("use std::ˇstr::{foo, bar}\n\n {ˇbaz.qux()}", view, cx); - - view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); - assert_selection_ranges("use stdˇ::str::{foo, bar}\n\n ˇ{baz.qux()}", view, cx); - - view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); - assert_selection_ranges("use ˇstd::str::{foo, bar}\n\nˇ {baz.qux()}", view, cx); - - view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); - assert_selection_ranges("ˇuse std::str::{foo, bar}\nˇ\n {baz.qux()}", view, cx); - - view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); - assert_selection_ranges("ˇuse std::str::{foo, barˇ}\n\n {baz.qux()}", view, cx); - - view.move_to_next_word_end(&MoveToNextWordEnd, cx); - assert_selection_ranges("useˇ std::str::{foo, bar}ˇ\n\n {baz.qux()}", view, cx); - - view.move_to_next_word_end(&MoveToNextWordEnd, cx); - assert_selection_ranges("use stdˇ::str::{foo, bar}\nˇ\n {baz.qux()}", view, cx); - - view.move_to_next_word_end(&MoveToNextWordEnd, cx); - assert_selection_ranges("use std::ˇstr::{foo, bar}\n\n {ˇbaz.qux()}", view, cx); - - view.move_right(&MoveRight, cx); - view.select_to_previous_word_start(&SelectToPreviousWordStart, cx); - assert_selection_ranges("use std::«ˇs»tr::{foo, bar}\n\n {«ˇb»az.qux()}", view, cx); - - view.select_to_previous_word_start(&SelectToPreviousWordStart, cx); - assert_selection_ranges("use std«ˇ::s»tr::{foo, bar}\n\n «ˇ{b»az.qux()}", view, cx); - - view.select_to_next_word_end(&SelectToNextWordEnd, cx); - assert_selection_ranges("use std::«ˇs»tr::{foo, bar}\n\n {«ˇb»az.qux()}", view, cx); - }); - } - - #[gpui::test] - fn test_prev_next_word_bounds_with_soft_wrap(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple("use one::{\n two::three::four::five\n};", cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - - view.update(cx, |view, cx| { - view.set_wrap_width(Some(140.), cx); - assert_eq!( - view.display_text(cx), - "use one::{\n two::three::\n four::five\n};" - ); - - view.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(1, 7)..DisplayPoint::new(1, 7)]); - }); - - view.move_to_next_word_end(&MoveToNextWordEnd, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(1, 9)..DisplayPoint::new(1, 9)] - ); - - view.move_to_next_word_end(&MoveToNextWordEnd, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(1, 14)..DisplayPoint::new(1, 14)] - ); - - view.move_to_next_word_end(&MoveToNextWordEnd, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(2, 4)..DisplayPoint::new(2, 4)] - ); - - view.move_to_next_word_end(&MoveToNextWordEnd, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(2, 8)..DisplayPoint::new(2, 8)] - ); - - view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(2, 4)..DisplayPoint::new(2, 4)] - ); - - view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(1, 14)..DisplayPoint::new(1, 14)] - ); - }); - } - - #[gpui::test] - async fn test_delete_to_beginning_of_line(cx: &mut gpui::TestAppContext) { - let mut cx = EditorTestContext::new(cx); - cx.set_state("one «two threeˇ» four"); - cx.update_editor(|editor, cx| { - editor.delete_to_beginning_of_line(&DeleteToBeginningOfLine, cx); - assert_eq!(editor.text(cx), " four"); - }); - } - - #[gpui::test] - fn test_delete_to_word_boundary(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple("one two three four", cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); - - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - // an empty selection - the preceding word fragment is deleted - DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), - // characters selected - they are deleted - DisplayPoint::new(0, 9)..DisplayPoint::new(0, 12), - ]) - }); - view.delete_to_previous_word_start(&DeleteToPreviousWordStart, cx); - }); - - assert_eq!(buffer.read(cx).read(cx).text(), "e two te four"); - - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - // an empty selection - the following word fragment is deleted - DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), - // characters selected - they are deleted - DisplayPoint::new(0, 9)..DisplayPoint::new(0, 10), - ]) - }); - view.delete_to_next_word_end(&DeleteToNextWordEnd, cx); - }); - - assert_eq!(buffer.read(cx).read(cx).text(), "e t te our"); - } - - #[gpui::test] - fn test_newline(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple("aaaa\n bbbb\n", cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); - - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), - DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2), - DisplayPoint::new(1, 6)..DisplayPoint::new(1, 6), - ]) - }); - - view.newline(&Newline, cx); - assert_eq!(view.text(cx), "aa\naa\n \n bb\n bb\n"); - }); - } - - #[gpui::test] - fn test_newline_with_old_selections(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple( - " - a - b( - X - ) - c( - X - ) - " - .unindent() - .as_str(), - cx, - ); - - let (_, editor) = cx.add_window(Default::default(), |cx| { - let mut editor = build_editor(buffer.clone(), cx); - editor.change_selections(None, cx, |s| { - s.select_ranges([ - Point::new(2, 4)..Point::new(2, 5), - Point::new(5, 4)..Point::new(5, 5), - ]) - }); - editor - }); - - // Edit the buffer directly, deleting ranges surrounding the editor's selections - buffer.update(cx, |buffer, cx| { - buffer.edit( - [ - (Point::new(1, 2)..Point::new(3, 0), ""), - (Point::new(4, 2)..Point::new(6, 0), ""), - ], - None, - cx, - ); - assert_eq!( - buffer.read(cx).text(), - " - a - b() - c() - " - .unindent() - ); - }); - - editor.update(cx, |editor, cx| { - assert_eq!( - editor.selections.ranges(cx), - &[ - Point::new(1, 2)..Point::new(1, 2), - Point::new(2, 2)..Point::new(2, 2), - ], - ); - - editor.newline(&Newline, cx); - assert_eq!( - editor.text(cx), - " - a - b( - ) - c( - ) - " - .unindent() - ); - - // The selections are moved after the inserted newlines - assert_eq!( - editor.selections.ranges(cx), - &[ - Point::new(2, 0)..Point::new(2, 0), - Point::new(4, 0)..Point::new(4, 0), - ], - ); - }); - } - - #[gpui::test] - async fn test_newline_below(cx: &mut gpui::TestAppContext) { - let mut cx = EditorTestContext::new(cx); - cx.update(|cx| { - cx.update_global::(|settings, _| { - settings.editor_overrides.tab_size = Some(NonZeroU32::new(4).unwrap()); - }); - }); - - let language = Arc::new( - Language::new( - LanguageConfig::default(), - Some(tree_sitter_rust::language()), - ) - .with_indents_query(r#"(_ "(" ")" @end) @indent"#) - .unwrap(), - ); - cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); - - cx.set_state(indoc! {" - const a: ˇA = ( - (ˇ - «const_functionˇ»(ˇ), - so«mˇ»et«hˇ»ing_ˇelse,ˇ - )ˇ - ˇ);ˇ - "}); - cx.update_editor(|e, cx| e.newline_below(&NewlineBelow, cx)); - cx.assert_editor_state(indoc! {" - const a: A = ( - ˇ - ( - ˇ - const_function(), - ˇ - ˇ - something_else, - ˇ - ˇ - ˇ - ˇ - ) - ˇ - ); - ˇ - ˇ - "}); - } - - #[gpui::test] - fn test_insert_with_old_selections(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple("a( X ), b( Y ), c( Z )", cx); - let (_, editor) = cx.add_window(Default::default(), |cx| { - let mut editor = build_editor(buffer.clone(), cx); - editor.change_selections(None, cx, |s| s.select_ranges([3..4, 11..12, 19..20])); - editor - }); - - // Edit the buffer directly, deleting ranges surrounding the editor's selections - buffer.update(cx, |buffer, cx| { - buffer.edit([(2..5, ""), (10..13, ""), (18..21, "")], None, cx); - assert_eq!(buffer.read(cx).text(), "a(), b(), c()".unindent()); - }); - - editor.update(cx, |editor, cx| { - assert_eq!(editor.selections.ranges(cx), &[2..2, 7..7, 12..12],); - - editor.insert("Z", cx); - assert_eq!(editor.text(cx), "a(Z), b(Z), c(Z)"); - - // The selections are moved after the inserted characters - assert_eq!(editor.selections.ranges(cx), &[3..3, 9..9, 15..15],); - }); - } - - #[gpui::test] - async fn test_tab(cx: &mut gpui::TestAppContext) { - let mut cx = EditorTestContext::new(cx); - cx.update(|cx| { - cx.update_global::(|settings, _| { - settings.editor_overrides.tab_size = Some(NonZeroU32::new(3).unwrap()); - }); - }); - cx.set_state(indoc! {" - ˇabˇc - ˇ🏀ˇ🏀ˇefg - dˇ - "}); - cx.update_editor(|e, cx| e.tab(&Tab, cx)); - cx.assert_editor_state(indoc! {" - ˇab ˇc - ˇ🏀 ˇ🏀 ˇefg - d ˇ - "}); - - cx.set_state(indoc! {" - a - «🏀ˇ»🏀«🏀ˇ»🏀«🏀ˇ» - "}); - cx.update_editor(|e, cx| e.tab(&Tab, cx)); - cx.assert_editor_state(indoc! {" - a - «🏀ˇ»🏀«🏀ˇ»🏀«🏀ˇ» - "}); - } - - #[gpui::test] - async fn test_tab_on_blank_line_auto_indents(cx: &mut gpui::TestAppContext) { - let mut cx = EditorTestContext::new(cx); - let language = Arc::new( - Language::new( - LanguageConfig::default(), - Some(tree_sitter_rust::language()), - ) - .with_indents_query(r#"(_ "(" ")" @end) @indent"#) - .unwrap(), - ); - cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); - - // cursors that are already at the suggested indent level insert - // a soft tab. cursors that are to the left of the suggested indent - // auto-indent their line. - cx.set_state(indoc! {" - ˇ - const a: B = ( - c( - d( - ˇ - ) - ˇ - ˇ ) - ); - "}); - cx.update_editor(|e, cx| e.tab(&Tab, cx)); - cx.assert_editor_state(indoc! {" - ˇ - const a: B = ( - c( - d( - ˇ - ) - ˇ - ˇ) - ); - "}); - - // handle auto-indent when there are multiple cursors on the same line - cx.set_state(indoc! {" - const a: B = ( - c( - ˇ ˇ - ˇ ) - ); - "}); - cx.update_editor(|e, cx| e.tab(&Tab, cx)); - cx.assert_editor_state(indoc! {" - const a: B = ( - c( - ˇ - ˇ) - ); - "}); - } - - #[gpui::test] - async fn test_indent_outdent(cx: &mut gpui::TestAppContext) { - let mut cx = EditorTestContext::new(cx); - - cx.set_state(indoc! {" - «oneˇ» «twoˇ» - three - four - "}); - cx.update_editor(|e, cx| e.tab(&Tab, cx)); - cx.assert_editor_state(indoc! {" - «oneˇ» «twoˇ» - three - four - "}); - - cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); - cx.assert_editor_state(indoc! {" - «oneˇ» «twoˇ» - three - four - "}); - - // select across line ending - cx.set_state(indoc! {" - one two - t«hree - ˇ» four - "}); - cx.update_editor(|e, cx| e.tab(&Tab, cx)); - cx.assert_editor_state(indoc! {" - one two - t«hree - ˇ» four - "}); - - cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); - cx.assert_editor_state(indoc! {" - one two - t«hree - ˇ» four - "}); - - // Ensure that indenting/outdenting works when the cursor is at column 0. - cx.set_state(indoc! {" - one two - ˇthree - four - "}); - cx.update_editor(|e, cx| e.tab(&Tab, cx)); - cx.assert_editor_state(indoc! {" - one two - ˇthree - four - "}); - - cx.set_state(indoc! {" - one two - ˇ three - four - "}); - cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); - cx.assert_editor_state(indoc! {" - one two - ˇthree - four - "}); - } - - #[gpui::test] - async fn test_indent_outdent_with_hard_tabs(cx: &mut gpui::TestAppContext) { - let mut cx = EditorTestContext::new(cx); - cx.update(|cx| { - cx.update_global::(|settings, _| { - settings.editor_overrides.hard_tabs = Some(true); - }); - }); - - // select two ranges on one line - cx.set_state(indoc! {" - «oneˇ» «twoˇ» - three - four - "}); - cx.update_editor(|e, cx| e.tab(&Tab, cx)); - cx.assert_editor_state(indoc! {" - \t«oneˇ» «twoˇ» - three - four - "}); - cx.update_editor(|e, cx| e.tab(&Tab, cx)); - cx.assert_editor_state(indoc! {" - \t\t«oneˇ» «twoˇ» - three - four - "}); - cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); - cx.assert_editor_state(indoc! {" - \t«oneˇ» «twoˇ» - three - four - "}); - cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); - cx.assert_editor_state(indoc! {" - «oneˇ» «twoˇ» - three - four - "}); - - // select across a line ending - cx.set_state(indoc! {" - one two - t«hree - ˇ»four - "}); - cx.update_editor(|e, cx| e.tab(&Tab, cx)); - cx.assert_editor_state(indoc! {" - one two - \tt«hree - ˇ»four - "}); - cx.update_editor(|e, cx| e.tab(&Tab, cx)); - cx.assert_editor_state(indoc! {" - one two - \t\tt«hree - ˇ»four - "}); - cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); - cx.assert_editor_state(indoc! {" - one two - \tt«hree - ˇ»four - "}); - cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); - cx.assert_editor_state(indoc! {" - one two - t«hree - ˇ»four - "}); - - // Ensure that indenting/outdenting works when the cursor is at column 0. - cx.set_state(indoc! {" - one two - ˇthree - four - "}); - cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); - cx.assert_editor_state(indoc! {" - one two - ˇthree - four - "}); - cx.update_editor(|e, cx| e.tab(&Tab, cx)); - cx.assert_editor_state(indoc! {" - one two - \tˇthree - four - "}); - cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); - cx.assert_editor_state(indoc! {" - one two - ˇthree - four - "}); - } - - #[gpui::test] - fn test_indent_outdent_with_excerpts(cx: &mut gpui::MutableAppContext) { - cx.set_global( - Settings::test(cx) - .with_language_defaults( - "TOML", - EditorSettings { - tab_size: Some(2.try_into().unwrap()), - ..Default::default() - }, - ) - .with_language_defaults( - "Rust", - EditorSettings { - tab_size: Some(4.try_into().unwrap()), - ..Default::default() - }, - ), - ); - let toml_language = Arc::new(Language::new( - LanguageConfig { - name: "TOML".into(), - ..Default::default() - }, - None, - )); - let rust_language = Arc::new(Language::new( - LanguageConfig { - name: "Rust".into(), - ..Default::default() - }, - None, - )); - - let toml_buffer = cx - .add_model(|cx| Buffer::new(0, "a = 1\nb = 2\n", cx).with_language(toml_language, cx)); - let rust_buffer = cx.add_model(|cx| { - Buffer::new(0, "const c: usize = 3;\n", cx).with_language(rust_language, cx) - }); - let multibuffer = cx.add_model(|cx| { - let mut multibuffer = MultiBuffer::new(0); - multibuffer.push_excerpts( - toml_buffer.clone(), - [ExcerptRange { - context: Point::new(0, 0)..Point::new(2, 0), - primary: None, - }], - cx, - ); - multibuffer.push_excerpts( - rust_buffer.clone(), - [ExcerptRange { - context: Point::new(0, 0)..Point::new(1, 0), - primary: None, - }], - cx, - ); - multibuffer - }); - - cx.add_window(Default::default(), |cx| { - let mut editor = build_editor(multibuffer, cx); - - assert_eq!( - editor.text(cx), - indoc! {" - a = 1 - b = 2 - - const c: usize = 3; - "} - ); - - select_ranges( - &mut editor, - indoc! {" - «aˇ» = 1 - b = 2 - - «const c:ˇ» usize = 3; - "}, - cx, - ); - - editor.tab(&Tab, cx); - assert_text_with_selections( - &mut editor, - indoc! {" - «aˇ» = 1 - b = 2 - - «const c:ˇ» usize = 3; - "}, - cx, - ); - editor.tab_prev(&TabPrev, cx); - assert_text_with_selections( - &mut editor, - indoc! {" - «aˇ» = 1 - b = 2 - - «const c:ˇ» usize = 3; - "}, - cx, - ); - - editor - }); - } - - #[gpui::test] - async fn test_backspace(cx: &mut gpui::TestAppContext) { - let mut cx = EditorTestContext::new(cx); - - // Basic backspace - cx.set_state(indoc! {" - onˇe two three - fou«rˇ» five six - seven «ˇeight nine - »ten - "}); - cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); - cx.assert_editor_state(indoc! {" - oˇe two three - fouˇ five six - seven ˇten - "}); - - // Test backspace inside and around indents - cx.set_state(indoc! {" - zero - ˇone - ˇtwo - ˇ ˇ ˇ three - ˇ ˇ four - "}); - cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); - cx.assert_editor_state(indoc! {" - zero - ˇone - ˇtwo - ˇ threeˇ four - "}); - - // Test backspace with line_mode set to true - cx.update_editor(|e, _| e.selections.line_mode = true); - cx.set_state(indoc! {" - The ˇquick ˇbrown - fox jumps over - the lazy dog - ˇThe qu«ick bˇ»rown"}); - cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); - cx.assert_editor_state(indoc! {" - ˇfox jumps over - the lazy dogˇ"}); - } - - #[gpui::test] - async fn test_delete(cx: &mut gpui::TestAppContext) { - let mut cx = EditorTestContext::new(cx); - - cx.set_state(indoc! {" - onˇe two three - fou«rˇ» five six - seven «ˇeight nine - »ten - "}); - cx.update_editor(|e, cx| e.delete(&Delete, cx)); - cx.assert_editor_state(indoc! {" - onˇ two three - fouˇ five six - seven ˇten - "}); - - // Test backspace with line_mode set to true - cx.update_editor(|e, _| e.selections.line_mode = true); - cx.set_state(indoc! {" - The ˇquick ˇbrown - fox «ˇjum»ps over - the lazy dog - ˇThe qu«ick bˇ»rown"}); - cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); - cx.assert_editor_state("ˇthe lazy dogˇ"); - } - - #[gpui::test] - fn test_delete_line(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1), - DisplayPoint::new(1, 0)..DisplayPoint::new(1, 1), - DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0), - ]) - }); - view.delete_line(&DeleteLine, cx); - assert_eq!(view.display_text(cx), "ghi"); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), - DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1) - ] - ); - }); - - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(2, 0)..DisplayPoint::new(0, 1)]) - }); - view.delete_line(&DeleteLine, cx); - assert_eq!(view.display_text(cx), "ghi\n"); - assert_eq!( - view.selections.display_ranges(cx), - vec![DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1)] - ); - }); - } - - #[gpui::test] - fn test_duplicate_line(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1), - DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), - DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), - DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0), - ]) - }); - view.duplicate_line(&DuplicateLine, cx); - assert_eq!(view.display_text(cx), "abc\nabc\ndef\ndef\nghi\n\n"); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(1, 0)..DisplayPoint::new(1, 1), - DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2), - DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0), - DisplayPoint::new(6, 0)..DisplayPoint::new(6, 0), - ] - ); - }); - - let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - DisplayPoint::new(0, 1)..DisplayPoint::new(1, 1), - DisplayPoint::new(1, 2)..DisplayPoint::new(2, 1), - ]) - }); - view.duplicate_line(&DuplicateLine, cx); - assert_eq!(view.display_text(cx), "abc\ndef\nghi\nabc\ndef\nghi\n"); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(3, 1)..DisplayPoint::new(4, 1), - DisplayPoint::new(4, 2)..DisplayPoint::new(5, 1), - ] - ); - }); - } - - #[gpui::test] - fn test_move_line_up_down(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple(&sample_text(10, 5, 'a'), cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - view.update(cx, |view, cx| { - view.fold_ranges( - vec![ - Point::new(0, 2)..Point::new(1, 2), - Point::new(2, 3)..Point::new(4, 1), - Point::new(7, 0)..Point::new(8, 4), - ], - cx, - ); - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1), - DisplayPoint::new(3, 1)..DisplayPoint::new(3, 1), - DisplayPoint::new(3, 2)..DisplayPoint::new(4, 3), - DisplayPoint::new(5, 0)..DisplayPoint::new(5, 2), - ]) - }); - assert_eq!( - view.display_text(cx), - "aa…bbb\nccc…eeee\nfffff\nggggg\n…i\njjjjj" - ); - - view.move_line_up(&MoveLineUp, cx); - assert_eq!( - view.display_text(cx), - "aa…bbb\nccc…eeee\nggggg\n…i\njjjjj\nfffff" - ); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1), - DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1), - DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3), - DisplayPoint::new(4, 0)..DisplayPoint::new(4, 2) - ] - ); - }); - - view.update(cx, |view, cx| { - view.move_line_down(&MoveLineDown, cx); - assert_eq!( - view.display_text(cx), - "ccc…eeee\naa…bbb\nfffff\nggggg\n…i\njjjjj" - ); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1), - DisplayPoint::new(3, 1)..DisplayPoint::new(3, 1), - DisplayPoint::new(3, 2)..DisplayPoint::new(4, 3), - DisplayPoint::new(5, 0)..DisplayPoint::new(5, 2) - ] - ); - }); - - view.update(cx, |view, cx| { - view.move_line_down(&MoveLineDown, cx); - assert_eq!( - view.display_text(cx), - "ccc…eeee\nfffff\naa…bbb\nggggg\n…i\njjjjj" - ); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1), - DisplayPoint::new(3, 1)..DisplayPoint::new(3, 1), - DisplayPoint::new(3, 2)..DisplayPoint::new(4, 3), - DisplayPoint::new(5, 0)..DisplayPoint::new(5, 2) - ] - ); - }); - - view.update(cx, |view, cx| { - view.move_line_up(&MoveLineUp, cx); - assert_eq!( - view.display_text(cx), - "ccc…eeee\naa…bbb\nggggg\n…i\njjjjj\nfffff" - ); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1), - DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1), - DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3), - DisplayPoint::new(4, 0)..DisplayPoint::new(4, 2) - ] - ); - }); - } - - #[gpui::test] - fn test_move_line_up_down_with_blocks(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple(&sample_text(10, 5, 'a'), cx); - let snapshot = buffer.read(cx).snapshot(cx); - let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - editor.update(cx, |editor, cx| { - editor.insert_blocks( - [BlockProperties { - style: BlockStyle::Fixed, - position: snapshot.anchor_after(Point::new(2, 0)), - disposition: BlockDisposition::Below, - height: 1, - render: Arc::new(|_| Empty::new().boxed()), - }], - cx, - ); - editor.change_selections(None, cx, |s| { - s.select_ranges([Point::new(2, 0)..Point::new(2, 0)]) - }); - editor.move_line_down(&MoveLineDown, cx); - }); - } - - #[gpui::test] - fn test_transpose(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - - _ = cx - .add_window(Default::default(), |cx| { - let mut editor = build_editor(MultiBuffer::build_simple("abc", cx), cx); - - editor.change_selections(None, cx, |s| s.select_ranges([1..1])); - editor.transpose(&Default::default(), cx); - assert_eq!(editor.text(cx), "bac"); - assert_eq!(editor.selections.ranges(cx), [2..2]); - - editor.transpose(&Default::default(), cx); - assert_eq!(editor.text(cx), "bca"); - assert_eq!(editor.selections.ranges(cx), [3..3]); - - editor.transpose(&Default::default(), cx); - assert_eq!(editor.text(cx), "bac"); - assert_eq!(editor.selections.ranges(cx), [3..3]); - - editor - }) - .1; - - _ = cx - .add_window(Default::default(), |cx| { - let mut editor = build_editor(MultiBuffer::build_simple("abc\nde", cx), cx); - - editor.change_selections(None, cx, |s| s.select_ranges([3..3])); - editor.transpose(&Default::default(), cx); - assert_eq!(editor.text(cx), "acb\nde"); - assert_eq!(editor.selections.ranges(cx), [3..3]); - - editor.change_selections(None, cx, |s| s.select_ranges([4..4])); - editor.transpose(&Default::default(), cx); - assert_eq!(editor.text(cx), "acbd\ne"); - assert_eq!(editor.selections.ranges(cx), [5..5]); - - editor.transpose(&Default::default(), cx); - assert_eq!(editor.text(cx), "acbde\n"); - assert_eq!(editor.selections.ranges(cx), [6..6]); - - editor.transpose(&Default::default(), cx); - assert_eq!(editor.text(cx), "acbd\ne"); - assert_eq!(editor.selections.ranges(cx), [6..6]); - - editor - }) - .1; - - _ = cx - .add_window(Default::default(), |cx| { - let mut editor = build_editor(MultiBuffer::build_simple("abc\nde", cx), cx); - - editor.change_selections(None, cx, |s| s.select_ranges([1..1, 2..2, 4..4])); - editor.transpose(&Default::default(), cx); - assert_eq!(editor.text(cx), "bacd\ne"); - assert_eq!(editor.selections.ranges(cx), [2..2, 3..3, 5..5]); - - editor.transpose(&Default::default(), cx); - assert_eq!(editor.text(cx), "bcade\n"); - assert_eq!(editor.selections.ranges(cx), [3..3, 4..4, 6..6]); - - editor.transpose(&Default::default(), cx); - assert_eq!(editor.text(cx), "bcda\ne"); - assert_eq!(editor.selections.ranges(cx), [4..4, 6..6]); - - editor.transpose(&Default::default(), cx); - assert_eq!(editor.text(cx), "bcade\n"); - assert_eq!(editor.selections.ranges(cx), [4..4, 6..6]); - - editor.transpose(&Default::default(), cx); - assert_eq!(editor.text(cx), "bcaed\n"); - assert_eq!(editor.selections.ranges(cx), [5..5, 6..6]); - - editor - }) - .1; - - _ = cx - .add_window(Default::default(), |cx| { - let mut editor = build_editor(MultiBuffer::build_simple("🍐🏀✋", cx), cx); - - editor.change_selections(None, cx, |s| s.select_ranges([4..4])); - editor.transpose(&Default::default(), cx); - assert_eq!(editor.text(cx), "🏀🍐✋"); - assert_eq!(editor.selections.ranges(cx), [8..8]); - - editor.transpose(&Default::default(), cx); - assert_eq!(editor.text(cx), "🏀✋🍐"); - assert_eq!(editor.selections.ranges(cx), [11..11]); - - editor.transpose(&Default::default(), cx); - assert_eq!(editor.text(cx), "🏀🍐✋"); - assert_eq!(editor.selections.ranges(cx), [11..11]); - - editor - }) - .1; - } - - #[gpui::test] - async fn test_clipboard(cx: &mut gpui::TestAppContext) { - let mut cx = EditorTestContext::new(cx); - - cx.set_state("«one✅ ˇ»two «three ˇ»four «five ˇ»six "); - cx.update_editor(|e, cx| e.cut(&Cut, cx)); - cx.assert_editor_state("ˇtwo ˇfour ˇsix "); - - // Paste with three cursors. Each cursor pastes one slice of the clipboard text. - cx.set_state("two ˇfour ˇsix ˇ"); - cx.update_editor(|e, cx| e.paste(&Paste, cx)); - cx.assert_editor_state("two one✅ ˇfour three ˇsix five ˇ"); - - // Paste again but with only two cursors. Since the number of cursors doesn't - // match the number of slices in the clipboard, the entire clipboard text - // is pasted at each cursor. - cx.set_state("ˇtwo one✅ four three six five ˇ"); - cx.update_editor(|e, cx| { - e.handle_input("( ", cx); - e.paste(&Paste, cx); - e.handle_input(") ", cx); - }); - cx.assert_editor_state(indoc! {" - ( one✅ - three - five ) ˇtwo one✅ four three six five ( one✅ - three - five ) ˇ"}); - - // Cut with three selections, one of which is full-line. - cx.set_state(indoc! {" - 1«2ˇ»3 - 4ˇ567 - «8ˇ»9"}); - cx.update_editor(|e, cx| e.cut(&Cut, cx)); - cx.assert_editor_state(indoc! {" - 1ˇ3 - ˇ9"}); - - // Paste with three selections, noticing how the copied selection that was full-line - // gets inserted before the second cursor. - cx.set_state(indoc! {" - 1ˇ3 - 9ˇ - «oˇ»ne"}); - cx.update_editor(|e, cx| e.paste(&Paste, cx)); - cx.assert_editor_state(indoc! {" - 12ˇ3 - 4567 - 9ˇ - 8ˇne"}); - - // Copy with a single cursor only, which writes the whole line into the clipboard. - cx.set_state(indoc! {" - The quick brown - fox juˇmps over - the lazy dog"}); - cx.update_editor(|e, cx| e.copy(&Copy, cx)); - cx.cx.assert_clipboard_content(Some("fox jumps over\n")); - - // Paste with three selections, noticing how the copied full-line selection is inserted - // before the empty selections but replaces the selection that is non-empty. - cx.set_state(indoc! {" - Tˇhe quick brown - «foˇ»x jumps over - tˇhe lazy dog"}); - cx.update_editor(|e, cx| e.paste(&Paste, cx)); - cx.assert_editor_state(indoc! {" - fox jumps over - Tˇhe quick brown - fox jumps over - ˇx jumps over - fox jumps over - tˇhe lazy dog"}); - } - - #[gpui::test] - async fn test_paste_multiline(cx: &mut gpui::TestAppContext) { - let mut cx = EditorTestContext::new(cx); - let language = Arc::new(Language::new( - LanguageConfig::default(), - Some(tree_sitter_rust::language()), - )); - cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); - - // Cut an indented block, without the leading whitespace. - cx.set_state(indoc! {" - const a: B = ( - c(), - «d( - e, - f - )ˇ» - ); - "}); - cx.update_editor(|e, cx| e.cut(&Cut, cx)); - cx.assert_editor_state(indoc! {" - const a: B = ( - c(), - ˇ - ); - "}); - - // Paste it at the same position. - cx.update_editor(|e, cx| e.paste(&Paste, cx)); - cx.assert_editor_state(indoc! {" - const a: B = ( - c(), - d( - e, - f - )ˇ - ); - "}); - - // Paste it at a line with a lower indent level. - cx.set_state(indoc! {" - ˇ - const a: B = ( - c(), - ); - "}); - cx.update_editor(|e, cx| e.paste(&Paste, cx)); - cx.assert_editor_state(indoc! {" - d( - e, - f - )ˇ - const a: B = ( - c(), - ); - "}); - - // Cut an indented block, with the leading whitespace. - cx.set_state(indoc! {" - const a: B = ( - c(), - « d( - e, - f - ) - ˇ»); - "}); - cx.update_editor(|e, cx| e.cut(&Cut, cx)); - cx.assert_editor_state(indoc! {" - const a: B = ( - c(), - ˇ); - "}); - - // Paste it at the same position. - cx.update_editor(|e, cx| e.paste(&Paste, cx)); - cx.assert_editor_state(indoc! {" - const a: B = ( - c(), - d( - e, - f - ) - ˇ); - "}); - - // Paste it at a line with a higher indent level. - cx.set_state(indoc! {" - const a: B = ( - c(), - d( - e, - fˇ - ) - ); - "}); - cx.update_editor(|e, cx| e.paste(&Paste, cx)); - cx.assert_editor_state(indoc! {" - const a: B = ( - c(), - d( - e, - f d( - e, - f - ) - ˇ - ) - ); - "}); - } - - #[gpui::test] - fn test_select_all(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple("abc\nde\nfgh", cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - view.update(cx, |view, cx| { - view.select_all(&SelectAll, cx); - assert_eq!( - view.selections.display_ranges(cx), - &[DisplayPoint::new(0, 0)..DisplayPoint::new(2, 3)] - ); - }); - } - - #[gpui::test] - fn test_select_line(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple(&sample_text(6, 5, 'a'), cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1), - DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), - DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), - DisplayPoint::new(4, 2)..DisplayPoint::new(4, 2), - ]) - }); - view.select_line(&SelectLine, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(0, 0)..DisplayPoint::new(2, 0), - DisplayPoint::new(4, 0)..DisplayPoint::new(5, 0), - ] - ); - }); - - view.update(cx, |view, cx| { - view.select_line(&SelectLine, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(0, 0)..DisplayPoint::new(3, 0), - DisplayPoint::new(4, 0)..DisplayPoint::new(5, 5), - ] - ); - }); - - view.update(cx, |view, cx| { - view.select_line(&SelectLine, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![DisplayPoint::new(0, 0)..DisplayPoint::new(5, 5)] - ); - }); - } - - #[gpui::test] - fn test_split_selection_into_lines(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple(&sample_text(9, 5, 'a'), cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - view.update(cx, |view, cx| { - view.fold_ranges( - vec![ - Point::new(0, 2)..Point::new(1, 2), - Point::new(2, 3)..Point::new(4, 1), - Point::new(7, 0)..Point::new(8, 4), - ], - cx, - ); - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1), - DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), - DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), - DisplayPoint::new(4, 4)..DisplayPoint::new(4, 4), - ]) - }); - assert_eq!(view.display_text(cx), "aa…bbb\nccc…eeee\nfffff\nggggg\n…i"); - }); - - view.update(cx, |view, cx| { - view.split_selection_into_lines(&SplitSelectionIntoLines, cx); - assert_eq!( - view.display_text(cx), - "aaaaa\nbbbbb\nccc…eeee\nfffff\nggggg\n…i" - ); - assert_eq!( - view.selections.display_ranges(cx), - [ - DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1), - DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), - DisplayPoint::new(2, 0)..DisplayPoint::new(2, 0), - DisplayPoint::new(5, 4)..DisplayPoint::new(5, 4) - ] - ); - }); - - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(5, 0)..DisplayPoint::new(0, 1)]) - }); - view.split_selection_into_lines(&SplitSelectionIntoLines, cx); - assert_eq!( - view.display_text(cx), - "aaaaa\nbbbbb\nccccc\nddddd\neeeee\nfffff\nggggg\nhhhhh\niiiii" - ); - assert_eq!( - view.selections.display_ranges(cx), - [ - DisplayPoint::new(0, 5)..DisplayPoint::new(0, 5), - DisplayPoint::new(1, 5)..DisplayPoint::new(1, 5), - DisplayPoint::new(2, 5)..DisplayPoint::new(2, 5), - DisplayPoint::new(3, 5)..DisplayPoint::new(3, 5), - DisplayPoint::new(4, 5)..DisplayPoint::new(4, 5), - DisplayPoint::new(5, 5)..DisplayPoint::new(5, 5), - DisplayPoint::new(6, 5)..DisplayPoint::new(6, 5), - DisplayPoint::new(7, 0)..DisplayPoint::new(7, 0) - ] - ); - }); - } - - #[gpui::test] - fn test_add_selection_above_below(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = MultiBuffer::build_simple("abc\ndefghi\n\njk\nlmno\n", cx); - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); - - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3)]) - }); - }); - view.update(cx, |view, cx| { - view.add_selection_above(&AddSelectionAbove, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), - DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3) - ] - ); - }); - - view.update(cx, |view, cx| { - view.add_selection_above(&AddSelectionAbove, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), - DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3) - ] - ); - }); - - view.update(cx, |view, cx| { - view.add_selection_below(&AddSelectionBelow, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3)] - ); - - view.undo_selection(&UndoSelection, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), - DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3) - ] - ); - - view.redo_selection(&RedoSelection, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3)] - ); - }); - - view.update(cx, |view, cx| { - view.add_selection_below(&AddSelectionBelow, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3), - DisplayPoint::new(4, 3)..DisplayPoint::new(4, 3) - ] - ); - }); - - view.update(cx, |view, cx| { - view.add_selection_below(&AddSelectionBelow, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3), - DisplayPoint::new(4, 3)..DisplayPoint::new(4, 3) - ] - ); - }); - - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3)]) - }); - }); - view.update(cx, |view, cx| { - view.add_selection_below(&AddSelectionBelow, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3), - DisplayPoint::new(4, 4)..DisplayPoint::new(4, 3) - ] - ); - }); - - view.update(cx, |view, cx| { - view.add_selection_below(&AddSelectionBelow, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3), - DisplayPoint::new(4, 4)..DisplayPoint::new(4, 3) - ] - ); - }); - - view.update(cx, |view, cx| { - view.add_selection_above(&AddSelectionAbove, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3)] - ); - }); - - view.update(cx, |view, cx| { - view.add_selection_above(&AddSelectionAbove, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3)] - ); - }); - - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(0, 1)..DisplayPoint::new(1, 4)]) - }); - view.add_selection_below(&AddSelectionBelow, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3), - DisplayPoint::new(1, 1)..DisplayPoint::new(1, 4), - DisplayPoint::new(3, 1)..DisplayPoint::new(3, 2), - ] - ); - }); - - view.update(cx, |view, cx| { - view.add_selection_below(&AddSelectionBelow, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3), - DisplayPoint::new(1, 1)..DisplayPoint::new(1, 4), - DisplayPoint::new(3, 1)..DisplayPoint::new(3, 2), - DisplayPoint::new(4, 1)..DisplayPoint::new(4, 4), - ] - ); - }); - - view.update(cx, |view, cx| { - view.add_selection_above(&AddSelectionAbove, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3), - DisplayPoint::new(1, 1)..DisplayPoint::new(1, 4), - DisplayPoint::new(3, 1)..DisplayPoint::new(3, 2), - ] - ); - }); - - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(4, 3)..DisplayPoint::new(1, 1)]) - }); - }); - view.update(cx, |view, cx| { - view.add_selection_above(&AddSelectionAbove, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(0, 3)..DisplayPoint::new(0, 1), - DisplayPoint::new(1, 3)..DisplayPoint::new(1, 1), - DisplayPoint::new(3, 2)..DisplayPoint::new(3, 1), - DisplayPoint::new(4, 3)..DisplayPoint::new(4, 1), - ] - ); - }); - - view.update(cx, |view, cx| { - view.add_selection_below(&AddSelectionBelow, cx); - assert_eq!( - view.selections.display_ranges(cx), - vec![ - DisplayPoint::new(1, 3)..DisplayPoint::new(1, 1), - DisplayPoint::new(3, 2)..DisplayPoint::new(3, 1), - DisplayPoint::new(4, 3)..DisplayPoint::new(4, 1), - ] - ); - }); - } - - #[gpui::test] - async fn test_select_next(cx: &mut gpui::TestAppContext) { - let mut cx = EditorTestContext::new(cx); - cx.set_state("abc\nˇabc abc\ndefabc\nabc"); - - cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx)); - cx.assert_editor_state("abc\n«abcˇ» abc\ndefabc\nabc"); - - cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx)); - cx.assert_editor_state("abc\n«abcˇ» «abcˇ»\ndefabc\nabc"); - - cx.update_editor(|view, cx| view.undo_selection(&UndoSelection, cx)); - cx.assert_editor_state("abc\n«abcˇ» abc\ndefabc\nabc"); - - cx.update_editor(|view, cx| view.redo_selection(&RedoSelection, cx)); - cx.assert_editor_state("abc\n«abcˇ» «abcˇ»\ndefabc\nabc"); - - cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx)); - cx.assert_editor_state("abc\n«abcˇ» «abcˇ»\ndefabc\n«abcˇ»"); - - cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx)); - cx.assert_editor_state("«abcˇ»\n«abcˇ» «abcˇ»\ndefabc\n«abcˇ»"); - } - - #[gpui::test] - async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) { - cx.update(|cx| cx.set_global(Settings::test(cx))); - let language = Arc::new(Language::new( - LanguageConfig::default(), - Some(tree_sitter_rust::language()), - )); - - let text = r#" - use mod1::mod2::{mod3, mod4}; - - fn fn_1(param1: bool, param2: &str) { - let var1 = "text"; - } - "# - .unindent(); - - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); - let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); - let (_, view) = cx.add_window(|cx| build_editor(buffer, cx)); - view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) - .await; - - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25), - DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12), - DisplayPoint::new(3, 18)..DisplayPoint::new(3, 18), - ]); - }); - view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); - }); - assert_eq!( - view.update(cx, |view, cx| { view.selections.display_ranges(cx) }), - &[ - DisplayPoint::new(0, 23)..DisplayPoint::new(0, 27), - DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7), - DisplayPoint::new(3, 15)..DisplayPoint::new(3, 21), - ] - ); - - view.update(cx, |view, cx| { - view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); - }); - assert_eq!( - view.update(cx, |view, cx| view.selections.display_ranges(cx)), - &[ - DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28), - DisplayPoint::new(4, 1)..DisplayPoint::new(2, 0), - ] - ); - - view.update(cx, |view, cx| { - view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); - }); - assert_eq!( - view.update(cx, |view, cx| view.selections.display_ranges(cx)), - &[DisplayPoint::new(5, 0)..DisplayPoint::new(0, 0)] - ); - - // Trying to expand the selected syntax node one more time has no effect. - view.update(cx, |view, cx| { - view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); - }); - assert_eq!( - view.update(cx, |view, cx| view.selections.display_ranges(cx)), - &[DisplayPoint::new(5, 0)..DisplayPoint::new(0, 0)] - ); - - view.update(cx, |view, cx| { - view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); - }); - assert_eq!( - view.update(cx, |view, cx| view.selections.display_ranges(cx)), - &[ - DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28), - DisplayPoint::new(4, 1)..DisplayPoint::new(2, 0), - ] - ); - - view.update(cx, |view, cx| { - view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); - }); - assert_eq!( - view.update(cx, |view, cx| view.selections.display_ranges(cx)), - &[ - DisplayPoint::new(0, 23)..DisplayPoint::new(0, 27), - DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7), - DisplayPoint::new(3, 15)..DisplayPoint::new(3, 21), - ] - ); - - view.update(cx, |view, cx| { - view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); - }); - assert_eq!( - view.update(cx, |view, cx| view.selections.display_ranges(cx)), - &[ - DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25), - DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12), - DisplayPoint::new(3, 18)..DisplayPoint::new(3, 18), - ] - ); - - // Trying to shrink the selected syntax node one more time has no effect. - view.update(cx, |view, cx| { - view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); - }); - assert_eq!( - view.update(cx, |view, cx| view.selections.display_ranges(cx)), - &[ - DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25), - DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12), - DisplayPoint::new(3, 18)..DisplayPoint::new(3, 18), - ] - ); - - // Ensure that we keep expanding the selection if the larger selection starts or ends within - // a fold. - view.update(cx, |view, cx| { - view.fold_ranges( - vec![ - Point::new(0, 21)..Point::new(0, 24), - Point::new(3, 20)..Point::new(3, 22), - ], - cx, - ); - view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); - }); - assert_eq!( - view.update(cx, |view, cx| view.selections.display_ranges(cx)), - &[ - DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28), - DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7), - DisplayPoint::new(3, 4)..DisplayPoint::new(3, 23), - ] - ); - } - - #[gpui::test] - async fn test_autoindent_selections(cx: &mut gpui::TestAppContext) { - cx.update(|cx| cx.set_global(Settings::test(cx))); - let language = Arc::new( - Language::new( - LanguageConfig { - brackets: vec![ - BracketPair { - start: "{".to_string(), - end: "}".to_string(), - close: false, - newline: true, - }, - BracketPair { - start: "(".to_string(), - end: ")".to_string(), - close: false, - newline: true, - }, - ], - ..Default::default() - }, - Some(tree_sitter_rust::language()), - ) - .with_indents_query( - r#" - (_ "(" ")" @end) @indent - (_ "{" "}" @end) @indent - "#, - ) - .unwrap(), - ); - - let text = "fn a() {}"; - - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); - let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); - let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx)); - editor - .condition(cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx)) - .await; - - editor.update(cx, |editor, cx| { - editor.change_selections(None, cx, |s| s.select_ranges([5..5, 8..8, 9..9])); - editor.newline(&Newline, cx); - assert_eq!(editor.text(cx), "fn a(\n \n) {\n \n}\n"); - assert_eq!( - editor.selections.ranges(cx), - &[ - Point::new(1, 4)..Point::new(1, 4), - Point::new(3, 4)..Point::new(3, 4), - Point::new(5, 0)..Point::new(5, 0) - ] - ); - }); - } - - #[gpui::test] - async fn test_autoclose_pairs(cx: &mut gpui::TestAppContext) { - let mut cx = EditorTestContext::new(cx); - - let language = Arc::new(Language::new( - LanguageConfig { - brackets: vec![ - BracketPair { - start: "{".to_string(), - end: "}".to_string(), - close: true, - newline: true, - }, - BracketPair { - start: "/*".to_string(), - end: " */".to_string(), - close: true, - newline: true, - }, - BracketPair { - start: "[".to_string(), - end: "]".to_string(), - close: false, - newline: true, - }, - ], - autoclose_before: "})]".to_string(), - ..Default::default() - }, - Some(tree_sitter_rust::language()), - )); - - let registry = Arc::new(LanguageRegistry::test()); - registry.add(language.clone()); - cx.update_buffer(|buffer, cx| { - buffer.set_language_registry(registry); - buffer.set_language(Some(language), cx); - }); - - cx.set_state( - &r#" - 🏀ˇ - εˇ - ❤️ˇ - "# - .unindent(), - ); - - // autoclose multiple nested brackets at multiple cursors - cx.update_editor(|view, cx| { - view.handle_input("{", cx); - view.handle_input("{", cx); - view.handle_input("{", cx); - }); - cx.assert_editor_state( - &" - 🏀{{{ˇ}}} - ε{{{ˇ}}} - ❤️{{{ˇ}}} - " - .unindent(), - ); - - // skip over the auto-closed brackets when typing a closing bracket - cx.update_editor(|view, cx| { - view.move_right(&MoveRight, cx); - view.handle_input("}", cx); - view.handle_input("}", cx); - view.handle_input("}", cx); - }); - cx.assert_editor_state( - &" - 🏀{{{}}}}ˇ - ε{{{}}}}ˇ - ❤️{{{}}}}ˇ - " - .unindent(), - ); - - // autoclose multi-character pairs - cx.set_state( - &" - ˇ - ˇ - " - .unindent(), - ); - cx.update_editor(|view, cx| { - view.handle_input("/", cx); - view.handle_input("*", cx); - }); - cx.assert_editor_state( - &" - /*ˇ */ - /*ˇ */ - " - .unindent(), - ); - - // one cursor autocloses a multi-character pair, one cursor - // does not autoclose. - cx.set_state( - &" - /ˇ - ˇ - " - .unindent(), - ); - cx.update_editor(|view, cx| view.handle_input("*", cx)); - cx.assert_editor_state( - &" - /*ˇ */ - *ˇ - " - .unindent(), - ); - - // Don't autoclose if the next character isn't whitespace and isn't - // listed in the language's "autoclose_before" section. - cx.set_state("ˇa b"); - cx.update_editor(|view, cx| view.handle_input("{", cx)); - cx.assert_editor_state("{ˇa b"); - - // Surround with brackets if text is selected - cx.set_state("«aˇ» b"); - cx.update_editor(|view, cx| view.handle_input("{", cx)); - cx.assert_editor_state("{«aˇ»} b"); - } - - #[gpui::test] - async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) { - let mut cx = EditorTestContext::new(cx); - - let html_language = Arc::new( - Language::new( - LanguageConfig { - name: "HTML".into(), - brackets: vec![ - BracketPair { - start: "<".into(), - end: ">".into(), - ..Default::default() - }, - BracketPair { - start: "{".into(), - end: "}".into(), - ..Default::default() - }, - BracketPair { - start: "(".into(), - end: ")".into(), - ..Default::default() - }, - ], - autoclose_before: "})]>".into(), - ..Default::default() - }, - Some(tree_sitter_html::language()), - ) - .with_injection_query( - r#" - (script_element - (raw_text) @content - (#set! "language" "javascript")) - "#, - ) - .unwrap(), - ); - - let javascript_language = Arc::new(Language::new( - LanguageConfig { - name: "JavaScript".into(), - brackets: vec![ - BracketPair { - start: "/*".into(), - end: " */".into(), - ..Default::default() - }, - BracketPair { - start: "{".into(), - end: "}".into(), - ..Default::default() - }, - BracketPair { - start: "(".into(), - end: ")".into(), - ..Default::default() - }, - ], - autoclose_before: "})]>".into(), - ..Default::default() - }, - Some(tree_sitter_javascript::language()), - )); - - let registry = Arc::new(LanguageRegistry::test()); - registry.add(html_language.clone()); - registry.add(javascript_language.clone()); - - cx.update_buffer(|buffer, cx| { - buffer.set_language_registry(registry); - buffer.set_language(Some(html_language), cx); - }); - - cx.set_state( - &r#" - ˇ - - ˇ - "# - .unindent(), - ); - - // Precondition: different languages are active at different locations. - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let cursors = editor.selections.ranges::(cx); - let languages = cursors - .iter() - .map(|c| snapshot.language_at(c.start).unwrap().name()) - .collect::>(); - assert_eq!( - languages, - &["HTML".into(), "JavaScript".into(), "HTML".into()] - ); - }); - - // Angle brackets autoclose in HTML, but not JavaScript. - cx.update_editor(|editor, cx| { - editor.handle_input("<", cx); - editor.handle_input("a", cx); - }); - cx.assert_editor_state( - &r#" - - - - "# - .unindent(), - ); - - // Curly braces and parens autoclose in both HTML and JavaScript. - cx.update_editor(|editor, cx| { - editor.handle_input(" b=", cx); - editor.handle_input("{", cx); - editor.handle_input("c", cx); - editor.handle_input("(", cx); - }); - cx.assert_editor_state( - &r#" -
- - - "# - .unindent(), - ); - - // Brackets that were already autoclosed are skipped. - cx.update_editor(|editor, cx| { - editor.handle_input(")", cx); - editor.handle_input("d", cx); - editor.handle_input("}", cx); - }); - cx.assert_editor_state( - &r#" - - - - "# - .unindent(), - ); - cx.update_editor(|editor, cx| { - editor.handle_input(">", cx); - }); - cx.assert_editor_state( - &r#" - ˇ - - ˇ - "# - .unindent(), - ); - - // Reset - cx.set_state( - &r#" - ˇ - - ˇ - "# - .unindent(), - ); - - cx.update_editor(|editor, cx| { - editor.handle_input("<", cx); - }); - cx.assert_editor_state( - &r#" - <ˇ> - - <ˇ> - "# - .unindent(), - ); - - // When backspacing, the closing angle brackets are removed. - cx.update_editor(|editor, cx| { - editor.backspace(&Backspace, cx); - }); - cx.assert_editor_state( - &r#" - ˇ - - ˇ - "# - .unindent(), - ); - - // Block comments autoclose in JavaScript, but not HTML. - cx.update_editor(|editor, cx| { - editor.handle_input("/", cx); - editor.handle_input("*", cx); - }); - cx.assert_editor_state( - &r#" - /*ˇ - - /*ˇ - "# - .unindent(), - ); - } - - #[gpui::test] - async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) { - cx.update(|cx| cx.set_global(Settings::test(cx))); - let language = Arc::new(Language::new( - LanguageConfig { - brackets: vec![BracketPair { - start: "{".to_string(), - end: "}".to_string(), - close: true, - newline: true, - }], - ..Default::default() - }, - Some(tree_sitter_rust::language()), - )); - - let text = r#" - a - b - c - "# - .unindent(); - - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); - let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); - let (_, view) = cx.add_window(|cx| build_editor(buffer, cx)); - view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) - .await; - - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1), - DisplayPoint::new(1, 0)..DisplayPoint::new(1, 1), - DisplayPoint::new(2, 0)..DisplayPoint::new(2, 1), - ]) - }); - - view.handle_input("{", cx); - view.handle_input("{", cx); - view.handle_input("{", cx); - assert_eq!( - view.text(cx), - " - {{{a}}} - {{{b}}} - {{{c}}} - " - .unindent() - ); - assert_eq!( - view.selections.display_ranges(cx), - [ - DisplayPoint::new(0, 3)..DisplayPoint::new(0, 4), - DisplayPoint::new(1, 3)..DisplayPoint::new(1, 4), - DisplayPoint::new(2, 3)..DisplayPoint::new(2, 4) - ] - ); - - view.undo(&Undo, cx); - assert_eq!( - view.text(cx), - " - a - b - c - " - .unindent() - ); - assert_eq!( - view.selections.display_ranges(cx), - [ - DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1), - DisplayPoint::new(1, 0)..DisplayPoint::new(1, 1), - DisplayPoint::new(2, 0)..DisplayPoint::new(2, 1) - ] - ); - }); - } - - #[gpui::test] - async fn test_delete_autoclose_pair(cx: &mut gpui::TestAppContext) { - cx.update(|cx| cx.set_global(Settings::test(cx))); - let language = Arc::new(Language::new( - LanguageConfig { - brackets: vec![BracketPair { - start: "{".to_string(), - end: "}".to_string(), - close: true, - newline: true, - }], - autoclose_before: "}".to_string(), - ..Default::default() - }, - Some(tree_sitter_rust::language()), - )); - - let text = r#" - a - b - c - "# - .unindent(); - - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); - let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); - let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx)); - editor - .condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) - .await; - - editor.update(cx, |editor, cx| { - editor.change_selections(None, cx, |s| { - s.select_ranges([ - Point::new(0, 1)..Point::new(0, 1), - Point::new(1, 1)..Point::new(1, 1), - Point::new(2, 1)..Point::new(2, 1), - ]) - }); - - editor.handle_input("{", cx); - editor.handle_input("{", cx); - editor.handle_input("_", cx); - assert_eq!( - editor.text(cx), - " - a{{_}} - b{{_}} - c{{_}} - " - .unindent() - ); - assert_eq!( - editor.selections.ranges::(cx), - [ - Point::new(0, 4)..Point::new(0, 4), - Point::new(1, 4)..Point::new(1, 4), - Point::new(2, 4)..Point::new(2, 4) - ] - ); - - editor.backspace(&Default::default(), cx); - editor.backspace(&Default::default(), cx); - assert_eq!( - editor.text(cx), - " - a{} - b{} - c{} - " - .unindent() - ); - assert_eq!( - editor.selections.ranges::(cx), - [ - Point::new(0, 2)..Point::new(0, 2), - Point::new(1, 2)..Point::new(1, 2), - Point::new(2, 2)..Point::new(2, 2) - ] - ); - - editor.delete_to_previous_word_start(&Default::default(), cx); - assert_eq!( - editor.text(cx), - " - a - b - c - " - .unindent() - ); - assert_eq!( - editor.selections.ranges::(cx), - [ - Point::new(0, 1)..Point::new(0, 1), - Point::new(1, 1)..Point::new(1, 1), - Point::new(2, 1)..Point::new(2, 1) - ] - ); - }); - } - - #[gpui::test] - async fn test_snippets(cx: &mut gpui::TestAppContext) { - cx.update(|cx| cx.set_global(Settings::test(cx))); - - let (text, insertion_ranges) = marked_text_ranges( - indoc! {" - a.ˇ b - a.ˇ b - a.ˇ b - "}, - false, - ); - - let buffer = cx.update(|cx| MultiBuffer::build_simple(&text, cx)); - let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx)); - - editor.update(cx, |editor, cx| { - let snippet = Snippet::parse("f(${1:one}, ${2:two}, ${1:three})$0").unwrap(); - - editor - .insert_snippet(&insertion_ranges, snippet, cx) - .unwrap(); - - fn assert(editor: &mut Editor, cx: &mut ViewContext, marked_text: &str) { - let (expected_text, selection_ranges) = marked_text_ranges(marked_text, false); - assert_eq!(editor.text(cx), expected_text); - assert_eq!(editor.selections.ranges::(cx), selection_ranges); - } - - assert( - editor, - cx, - indoc! {" - a.f(«one», two, «three») b - a.f(«one», two, «three») b - a.f(«one», two, «three») b - "}, - ); - - // Can't move earlier than the first tab stop - assert!(!editor.move_to_prev_snippet_tabstop(cx)); - assert( - editor, - cx, - indoc! {" - a.f(«one», two, «three») b - a.f(«one», two, «three») b - a.f(«one», two, «three») b - "}, - ); - - assert!(editor.move_to_next_snippet_tabstop(cx)); - assert( - editor, - cx, - indoc! {" - a.f(one, «two», three) b - a.f(one, «two», three) b - a.f(one, «two», three) b - "}, - ); - - editor.move_to_prev_snippet_tabstop(cx); - assert( - editor, - cx, - indoc! {" - a.f(«one», two, «three») b - a.f(«one», two, «three») b - a.f(«one», two, «three») b - "}, - ); - - assert!(editor.move_to_next_snippet_tabstop(cx)); - assert( - editor, - cx, - indoc! {" - a.f(one, «two», three) b - a.f(one, «two», three) b - a.f(one, «two», three) b - "}, - ); - assert!(editor.move_to_next_snippet_tabstop(cx)); - assert( - editor, - cx, - indoc! {" - a.f(one, two, three)ˇ b - a.f(one, two, three)ˇ b - a.f(one, two, three)ˇ b - "}, - ); - - // As soon as the last tab stop is reached, snippet state is gone - editor.move_to_prev_snippet_tabstop(cx); - assert( - editor, - cx, - indoc! {" - a.f(one, two, three)ˇ b - a.f(one, two, three)ˇ b - a.f(one, two, three)ˇ b - "}, - ); - }); - } - - #[gpui::test] - async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) { - cx.foreground().forbid_parking(); - - let mut language = Language::new( - LanguageConfig { - name: "Rust".into(), - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - Some(tree_sitter_rust::language()), - ); - let mut fake_servers = language - .set_fake_lsp_adapter(Arc::new(FakeLspAdapter { - capabilities: lsp::ServerCapabilities { - document_formatting_provider: Some(lsp::OneOf::Left(true)), - ..Default::default() - }, - ..Default::default() - })) - .await; - - let fs = FakeFs::new(cx.background()); - fs.insert_file("/file.rs", Default::default()).await; - - let project = Project::test(fs, ["/file.rs".as_ref()], cx).await; - project.update(cx, |project, _| project.languages().add(Arc::new(language))); - let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx)) - .await - .unwrap(); - - cx.foreground().start_waiting(); - let fake_server = fake_servers.next().await.unwrap(); - - let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); - let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx)); - editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); - assert!(cx.read(|cx| editor.is_dirty(cx))); - - let save = cx.update(|cx| editor.save(project.clone(), cx)); - fake_server - .handle_request::(move |params, _| async move { - assert_eq!( - params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() - ); - assert_eq!(params.options.tab_size, 4); - Ok(Some(vec![lsp::TextEdit::new( - lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)), - ", ".to_string(), - )])) - }) - .next() - .await; - cx.foreground().start_waiting(); - save.await.unwrap(); - assert_eq!( - editor.read_with(cx, |editor, cx| editor.text(cx)), - "one, two\nthree\n" - ); - assert!(!cx.read(|cx| editor.is_dirty(cx))); - - editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); - assert!(cx.read(|cx| editor.is_dirty(cx))); - - // Ensure we can still save even if formatting hangs. - fake_server.handle_request::(move |params, _| async move { - assert_eq!( - params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() - ); - futures::future::pending::<()>().await; - unreachable!() - }); - let save = cx.update(|cx| editor.save(project.clone(), cx)); - cx.foreground().advance_clock(super::FORMAT_TIMEOUT); - cx.foreground().start_waiting(); - save.await.unwrap(); - assert_eq!( - editor.read_with(cx, |editor, cx| editor.text(cx)), - "one\ntwo\nthree\n" - ); - assert!(!cx.read(|cx| editor.is_dirty(cx))); - - // Set rust language override and assert overriden tabsize is sent to language server - cx.update(|cx| { - cx.update_global::(|settings, _| { - settings.language_overrides.insert( - "Rust".into(), - EditorSettings { - tab_size: Some(8.try_into().unwrap()), - ..Default::default() - }, - ); - }) - }); - - let save = cx.update(|cx| editor.save(project.clone(), cx)); - fake_server - .handle_request::(move |params, _| async move { - assert_eq!( - params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() - ); - assert_eq!(params.options.tab_size, 8); - Ok(Some(vec![])) - }) - .next() - .await; - cx.foreground().start_waiting(); - save.await.unwrap(); - } - - #[gpui::test] - async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) { - cx.foreground().forbid_parking(); - - let mut language = Language::new( - LanguageConfig { - name: "Rust".into(), - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - Some(tree_sitter_rust::language()), - ); - let mut fake_servers = language - .set_fake_lsp_adapter(Arc::new(FakeLspAdapter { - capabilities: lsp::ServerCapabilities { - document_range_formatting_provider: Some(lsp::OneOf::Left(true)), - ..Default::default() - }, - ..Default::default() - })) - .await; - - let fs = FakeFs::new(cx.background()); - fs.insert_file("/file.rs", Default::default()).await; - - let project = Project::test(fs, ["/file.rs".as_ref()], cx).await; - project.update(cx, |project, _| project.languages().add(Arc::new(language))); - let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx)) - .await - .unwrap(); - - cx.foreground().start_waiting(); - let fake_server = fake_servers.next().await.unwrap(); - - let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); - let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx)); - editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); - assert!(cx.read(|cx| editor.is_dirty(cx))); - - let save = cx.update(|cx| editor.save(project.clone(), cx)); - fake_server - .handle_request::(move |params, _| async move { - assert_eq!( - params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() - ); - assert_eq!(params.options.tab_size, 4); - Ok(Some(vec![lsp::TextEdit::new( - lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)), - ", ".to_string(), - )])) - }) - .next() - .await; - cx.foreground().start_waiting(); - save.await.unwrap(); - assert_eq!( - editor.read_with(cx, |editor, cx| editor.text(cx)), - "one, two\nthree\n" - ); - assert!(!cx.read(|cx| editor.is_dirty(cx))); - - editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); - assert!(cx.read(|cx| editor.is_dirty(cx))); - - // Ensure we can still save even if formatting hangs. - fake_server.handle_request::( - move |params, _| async move { - assert_eq!( - params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() - ); - futures::future::pending::<()>().await; - unreachable!() - }, - ); - let save = cx.update(|cx| editor.save(project.clone(), cx)); - cx.foreground().advance_clock(super::FORMAT_TIMEOUT); - cx.foreground().start_waiting(); - save.await.unwrap(); - assert_eq!( - editor.read_with(cx, |editor, cx| editor.text(cx)), - "one\ntwo\nthree\n" - ); - assert!(!cx.read(|cx| editor.is_dirty(cx))); - - // Set rust language override and assert overriden tabsize is sent to language server - cx.update(|cx| { - cx.update_global::(|settings, _| { - settings.language_overrides.insert( - "Rust".into(), - EditorSettings { - tab_size: Some(8.try_into().unwrap()), - ..Default::default() - }, - ); - }) - }); - - let save = cx.update(|cx| editor.save(project.clone(), cx)); - fake_server - .handle_request::(move |params, _| async move { - assert_eq!( - params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() - ); - assert_eq!(params.options.tab_size, 8); - Ok(Some(vec![])) - }) - .next() - .await; - cx.foreground().start_waiting(); - save.await.unwrap(); - } - - #[gpui::test] - async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) { - cx.foreground().forbid_parking(); - - let mut language = Language::new( - LanguageConfig { - name: "Rust".into(), - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - Some(tree_sitter_rust::language()), - ); - let mut fake_servers = language - .set_fake_lsp_adapter(Arc::new(FakeLspAdapter { - capabilities: lsp::ServerCapabilities { - document_formatting_provider: Some(lsp::OneOf::Left(true)), - ..Default::default() - }, - ..Default::default() - })) - .await; - - let fs = FakeFs::new(cx.background()); - fs.insert_file("/file.rs", Default::default()).await; - - let project = Project::test(fs, ["/file.rs".as_ref()], cx).await; - project.update(cx, |project, _| project.languages().add(Arc::new(language))); - let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx)) - .await - .unwrap(); - - cx.foreground().start_waiting(); - let fake_server = fake_servers.next().await.unwrap(); - - let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); - let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx)); - editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); - - let format = editor.update(cx, |editor, cx| editor.perform_format(project.clone(), cx)); - fake_server - .handle_request::(move |params, _| async move { - assert_eq!( - params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() - ); - assert_eq!(params.options.tab_size, 4); - Ok(Some(vec![lsp::TextEdit::new( - lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)), - ", ".to_string(), - )])) - }) - .next() - .await; - cx.foreground().start_waiting(); - format.await.unwrap(); - assert_eq!( - editor.read_with(cx, |editor, cx| editor.text(cx)), - "one, two\nthree\n" - ); - - editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); - // Ensure we don't lock if formatting hangs. - fake_server.handle_request::(move |params, _| async move { - assert_eq!( - params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() - ); - futures::future::pending::<()>().await; - unreachable!() - }); - let format = editor.update(cx, |editor, cx| editor.perform_format(project, cx)); - cx.foreground().advance_clock(super::FORMAT_TIMEOUT); - cx.foreground().start_waiting(); - format.await.unwrap(); - assert_eq!( - editor.read_with(cx, |editor, cx| editor.text(cx)), - "one\ntwo\nthree\n" - ); - } - - #[gpui::test] - async fn test_completion(cx: &mut gpui::TestAppContext) { - let mut cx = EditorLspTestContext::new_rust( - lsp::ServerCapabilities { - completion_provider: Some(lsp::CompletionOptions { - trigger_characters: Some(vec![".".to_string(), ":".to_string()]), - ..Default::default() - }), - ..Default::default() - }, - cx, - ) - .await; - - cx.set_state(indoc! {" - oneˇ - two - three - "}); - cx.simulate_keystroke("."); - handle_completion_request( - &mut cx, - indoc! {" - one.|<> - two - three - "}, - vec!["first_completion", "second_completion"], - ) - .await; - cx.condition(|editor, _| editor.context_menu_visible()) - .await; - let apply_additional_edits = cx.update_editor(|editor, cx| { - editor.move_down(&MoveDown, cx); - editor - .confirm_completion(&ConfirmCompletion::default(), cx) - .unwrap() - }); - cx.assert_editor_state(indoc! {" - one.second_completionˇ - two - three - "}); - - handle_resolve_completion_request( - &mut cx, - Some(( - indoc! {" - one.second_completion - two - threeˇ - "}, - "\nadditional edit", - )), - ) - .await; - apply_additional_edits.await.unwrap(); - cx.assert_editor_state(indoc! {" - one.second_completionˇ - two - three - additional edit - "}); - - cx.set_state(indoc! {" - one.second_completion - twoˇ - threeˇ - additional edit - "}); - cx.simulate_keystroke(" "); - assert!(cx.editor(|e, _| e.context_menu.is_none())); - cx.simulate_keystroke("s"); - assert!(cx.editor(|e, _| e.context_menu.is_none())); - - cx.assert_editor_state(indoc! {" - one.second_completion - two sˇ - three sˇ - additional edit - "}); - // - handle_completion_request( - &mut cx, - indoc! {" - one.second_completion - two s - three - additional edit - "}, - vec!["fourth_completion", "fifth_completion", "sixth_completion"], - ) - .await; - cx.condition(|editor, _| editor.context_menu_visible()) - .await; - - cx.simulate_keystroke("i"); - - handle_completion_request( - &mut cx, - indoc! {" - one.second_completion - two si - three - additional edit - "}, - vec!["fourth_completion", "fifth_completion", "sixth_completion"], - ) - .await; - cx.condition(|editor, _| editor.context_menu_visible()) - .await; - - let apply_additional_edits = cx.update_editor(|editor, cx| { - editor - .confirm_completion(&ConfirmCompletion::default(), cx) - .unwrap() - }); - cx.assert_editor_state(indoc! {" - one.second_completion - two sixth_completionˇ - three sixth_completionˇ - additional edit - "}); - - handle_resolve_completion_request(&mut cx, None).await; - apply_additional_edits.await.unwrap(); - - cx.update(|cx| { - cx.update_global::(|settings, _| { - settings.show_completions_on_input = false; - }) - }); - cx.set_state("editorˇ"); - cx.simulate_keystroke("."); - assert!(cx.editor(|e, _| e.context_menu.is_none())); - cx.simulate_keystroke("c"); - cx.simulate_keystroke("l"); - cx.simulate_keystroke("o"); - cx.assert_editor_state("editor.cloˇ"); - assert!(cx.editor(|e, _| e.context_menu.is_none())); - cx.update_editor(|editor, cx| { - editor.show_completions(&ShowCompletions, cx); - }); - handle_completion_request(&mut cx, "editor.", vec!["close", "clobber"]).await; - cx.condition(|editor, _| editor.context_menu_visible()) - .await; - let apply_additional_edits = cx.update_editor(|editor, cx| { - editor - .confirm_completion(&ConfirmCompletion::default(), cx) - .unwrap() - }); - cx.assert_editor_state("editor.closeˇ"); - handle_resolve_completion_request(&mut cx, None).await; - apply_additional_edits.await.unwrap(); - - // Handle completion request passing a marked string specifying where the completion - // should be triggered from using '|' character, what range should be replaced, and what completions - // should be returned using '<' and '>' to delimit the range - async fn handle_completion_request<'a>( - cx: &mut EditorLspTestContext<'a>, - marked_string: &str, - completions: Vec<&'static str>, - ) { - let complete_from_marker: TextRangeMarker = '|'.into(); - let replace_range_marker: TextRangeMarker = ('<', '>').into(); - let (_, mut marked_ranges) = marked_text_ranges_by( - marked_string, - vec![complete_from_marker.clone(), replace_range_marker.clone()], - ); - - let complete_from_position = - cx.to_lsp(marked_ranges.remove(&complete_from_marker).unwrap()[0].start); - let replace_range = - cx.to_lsp_range(marked_ranges.remove(&replace_range_marker).unwrap()[0].clone()); - - cx.handle_request::(move |url, params, _| { - let completions = completions.clone(); - async move { - assert_eq!(params.text_document_position.text_document.uri, url.clone()); - assert_eq!( - params.text_document_position.position, - complete_from_position - ); - Ok(Some(lsp::CompletionResponse::Array( - completions - .iter() - .map(|completion_text| lsp::CompletionItem { - label: completion_text.to_string(), - text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { - range: replace_range, - new_text: completion_text.to_string(), - })), - ..Default::default() - }) - .collect(), - ))) - } - }) - .next() - .await; - } - - async fn handle_resolve_completion_request<'a>( - cx: &mut EditorLspTestContext<'a>, - edit: Option<(&'static str, &'static str)>, - ) { - let edit = edit.map(|(marked_string, new_text)| { - let (_, marked_ranges) = marked_text_ranges(marked_string, false); - let replace_range = cx.to_lsp_range(marked_ranges[0].clone()); - vec![lsp::TextEdit::new(replace_range, new_text.to_string())] - }); - - cx.handle_request::(move |_, _, _| { - let edit = edit.clone(); - async move { - Ok(lsp::CompletionItem { - additional_text_edits: edit, - ..Default::default() - }) - } - }) - .next() - .await; - } - } - - #[gpui::test] - async fn test_toggle_comment(cx: &mut gpui::TestAppContext) { - cx.update(|cx| cx.set_global(Settings::test(cx))); - let language = Arc::new(Language::new( - LanguageConfig { - line_comment: Some("// ".into()), - ..Default::default() - }, - Some(tree_sitter_rust::language()), - )); - - let text = " - fn a() { - //b(); - // c(); - // d(); - } - " - .unindent(); - - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); - let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); - let (_, view) = cx.add_window(|cx| build_editor(buffer, cx)); - - view.update(cx, |editor, cx| { - // If multiple selections intersect a line, the line is only - // toggled once. - editor.change_selections(None, cx, |s| { - s.select_display_ranges([ - DisplayPoint::new(1, 3)..DisplayPoint::new(2, 3), - DisplayPoint::new(3, 5)..DisplayPoint::new(3, 6), - ]) - }); - editor.toggle_comments(&ToggleComments, cx); - assert_eq!( - editor.text(cx), - " - fn a() { - b(); - c(); - d(); - } - " - .unindent() - ); - - // The comment prefix is inserted at the same column for every line - // in a selection. - editor.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(1, 3)..DisplayPoint::new(3, 6)]) - }); - editor.toggle_comments(&ToggleComments, cx); - assert_eq!( - editor.text(cx), - " - fn a() { - // b(); - // c(); - // d(); - } - " - .unindent() - ); - - // If a selection ends at the beginning of a line, that line is not toggled. - editor.change_selections(None, cx, |s| { - s.select_display_ranges([DisplayPoint::new(2, 0)..DisplayPoint::new(3, 0)]) - }); - editor.toggle_comments(&ToggleComments, cx); - assert_eq!( - editor.text(cx), - " - fn a() { - // b(); - c(); - // d(); - } - " - .unindent() - ); - }); - } - - #[gpui::test] - async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) { - let mut cx = EditorTestContext::new(cx); - - let html_language = Arc::new( - Language::new( - LanguageConfig { - name: "HTML".into(), - block_comment: Some(("".into())), - ..Default::default() - }, - Some(tree_sitter_html::language()), - ) - .with_injection_query( - r#" - (script_element - (raw_text) @content - (#set! "language" "javascript")) - "#, - ) - .unwrap(), - ); - - let javascript_language = Arc::new(Language::new( - LanguageConfig { - name: "JavaScript".into(), - line_comment: Some("// ".into()), - ..Default::default() - }, - Some(tree_sitter_javascript::language()), - )); - - let registry = Arc::new(LanguageRegistry::test()); - registry.add(html_language.clone()); - registry.add(javascript_language.clone()); - - cx.update_buffer(|buffer, cx| { - buffer.set_language_registry(registry); - buffer.set_language(Some(html_language), cx); - }); - - // Toggle comments for empty selections - cx.set_state( - &r#" -

A

ˇ -

B

ˇ -

C

ˇ - "# - .unindent(), - ); - cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); - cx.assert_editor_state( - &r#" - - - - "# - .unindent(), - ); - cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); - cx.assert_editor_state( - &r#" -

A

ˇ -

B

ˇ -

C

ˇ - "# - .unindent(), - ); - - // Toggle comments for mixture of empty and non-empty selections, where - // multiple selections occupy a given line. - cx.set_state( - &r#" -

-

ˇ»B

ˇ -

-

ˇ»D

ˇ - "# - .unindent(), - ); - - cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); - cx.assert_editor_state( - &r#" - - - "# - .unindent(), - ); - cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); - cx.assert_editor_state( - &r#" -

-

ˇ»B

ˇ -

-

ˇ»D

ˇ - "# - .unindent(), - ); - - // Toggle comments when different languages are active for different - // selections. - cx.set_state( - &r#" - ˇ - "# - .unindent(), - ); - cx.foreground().run_until_parked(); - cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); - cx.assert_editor_state( - &r#" - - // ˇvar x = new Y(); - - "# - .unindent(), - ); - } - - #[gpui::test] - fn test_editing_disjoint_excerpts(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx)); - let multibuffer = cx.add_model(|cx| { - let mut multibuffer = MultiBuffer::new(0); - multibuffer.push_excerpts( - buffer.clone(), - [ - ExcerptRange { - context: Point::new(0, 0)..Point::new(0, 4), - primary: None, - }, - ExcerptRange { - context: Point::new(1, 0)..Point::new(1, 4), - primary: None, - }, - ], - cx, - ); - multibuffer - }); - - assert_eq!(multibuffer.read(cx).read(cx).text(), "aaaa\nbbbb"); - - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(multibuffer, cx)); - view.update(cx, |view, cx| { - assert_eq!(view.text(cx), "aaaa\nbbbb"); - view.change_selections(None, cx, |s| { - s.select_ranges([ - Point::new(0, 0)..Point::new(0, 0), - Point::new(1, 0)..Point::new(1, 0), - ]) - }); - - view.handle_input("X", cx); - assert_eq!(view.text(cx), "Xaaaa\nXbbbb"); - assert_eq!( - view.selections.ranges(cx), - [ - Point::new(0, 1)..Point::new(0, 1), - Point::new(1, 1)..Point::new(1, 1), - ] - ) - }); - } - - #[gpui::test] - fn test_editing_overlapping_excerpts(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let markers = vec![('[', ']').into(), ('(', ')').into()]; - let (initial_text, mut excerpt_ranges) = marked_text_ranges_by( - indoc! {" - [aaaa - (bbbb] - cccc)", - }, - markers.clone(), - ); - let excerpt_ranges = markers.into_iter().map(|marker| { - let context = excerpt_ranges.remove(&marker).unwrap()[0].clone(); - ExcerptRange { - context, - primary: None, - } - }); - let buffer = cx.add_model(|cx| Buffer::new(0, initial_text, cx)); - let multibuffer = cx.add_model(|cx| { - let mut multibuffer = MultiBuffer::new(0); - multibuffer.push_excerpts(buffer, excerpt_ranges, cx); - multibuffer - }); - - let (_, view) = cx.add_window(Default::default(), |cx| build_editor(multibuffer, cx)); - view.update(cx, |view, cx| { - let (expected_text, selection_ranges) = marked_text_ranges( - indoc! {" - aaaa - bˇbbb - bˇbbˇb - cccc" - }, - true, - ); - assert_eq!(view.text(cx), expected_text); - view.change_selections(None, cx, |s| s.select_ranges(selection_ranges)); - - view.handle_input("X", cx); - - let (expected_text, expected_selections) = marked_text_ranges( - indoc! {" - aaaa - bXˇbbXb - bXˇbbXˇb - cccc" - }, - false, - ); - assert_eq!(view.text(cx), expected_text); - assert_eq!(view.selections.ranges(cx), expected_selections); - - view.newline(&Newline, cx); - let (expected_text, expected_selections) = marked_text_ranges( - indoc! {" - aaaa - bX - ˇbbX - b - bX - ˇbbX - ˇb - cccc" - }, - false, - ); - assert_eq!(view.text(cx), expected_text); - assert_eq!(view.selections.ranges(cx), expected_selections); - }); - } - - #[gpui::test] - fn test_refresh_selections(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx)); - let mut excerpt1_id = None; - let multibuffer = cx.add_model(|cx| { - let mut multibuffer = MultiBuffer::new(0); - excerpt1_id = multibuffer - .push_excerpts( - buffer.clone(), - [ - ExcerptRange { - context: Point::new(0, 0)..Point::new(1, 4), - primary: None, - }, - ExcerptRange { - context: Point::new(1, 0)..Point::new(2, 4), - primary: None, - }, - ], - cx, - ) - .into_iter() - .next(); - multibuffer - }); - assert_eq!( - multibuffer.read(cx).read(cx).text(), - "aaaa\nbbbb\nbbbb\ncccc" - ); - let (_, editor) = cx.add_window(Default::default(), |cx| { - let mut editor = build_editor(multibuffer.clone(), cx); - let snapshot = editor.snapshot(cx); - editor.change_selections(None, cx, |s| { - s.select_ranges([Point::new(1, 3)..Point::new(1, 3)]) - }); - editor.begin_selection(Point::new(2, 1).to_display_point(&snapshot), true, 1, cx); - assert_eq!( - editor.selections.ranges(cx), - [ - Point::new(1, 3)..Point::new(1, 3), - Point::new(2, 1)..Point::new(2, 1), - ] - ); - editor - }); - - // Refreshing selections is a no-op when excerpts haven't changed. - editor.update(cx, |editor, cx| { - editor.change_selections(None, cx, |s| { - s.refresh(); - }); - assert_eq!( - editor.selections.ranges(cx), - [ - Point::new(1, 3)..Point::new(1, 3), - Point::new(2, 1)..Point::new(2, 1), - ] - ); - }); - - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts([&excerpt1_id.unwrap()], cx); - }); - editor.update(cx, |editor, cx| { - // Removing an excerpt causes the first selection to become degenerate. - assert_eq!( - editor.selections.ranges(cx), - [ - Point::new(0, 0)..Point::new(0, 0), - Point::new(0, 1)..Point::new(0, 1) - ] - ); - - // Refreshing selections will relocate the first selection to the original buffer - // location. - editor.change_selections(None, cx, |s| { - s.refresh(); - }); - assert_eq!( - editor.selections.ranges(cx), - [ - Point::new(0, 1)..Point::new(0, 1), - Point::new(0, 3)..Point::new(0, 3) - ] - ); - assert!(editor.selections.pending_anchor().is_some()); - }); - } - - #[gpui::test] - fn test_refresh_selections_while_selecting_with_mouse(cx: &mut gpui::MutableAppContext) { - cx.set_global(Settings::test(cx)); - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx)); - let mut excerpt1_id = None; - let multibuffer = cx.add_model(|cx| { - let mut multibuffer = MultiBuffer::new(0); - excerpt1_id = multibuffer - .push_excerpts( - buffer.clone(), - [ - ExcerptRange { - context: Point::new(0, 0)..Point::new(1, 4), - primary: None, - }, - ExcerptRange { - context: Point::new(1, 0)..Point::new(2, 4), - primary: None, - }, - ], - cx, - ) - .into_iter() - .next(); - multibuffer - }); - assert_eq!( - multibuffer.read(cx).read(cx).text(), - "aaaa\nbbbb\nbbbb\ncccc" - ); - let (_, editor) = cx.add_window(Default::default(), |cx| { - let mut editor = build_editor(multibuffer.clone(), cx); - let snapshot = editor.snapshot(cx); - editor.begin_selection(Point::new(1, 3).to_display_point(&snapshot), false, 1, cx); - assert_eq!( - editor.selections.ranges(cx), - [Point::new(1, 3)..Point::new(1, 3)] - ); - editor - }); - - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts([&excerpt1_id.unwrap()], cx); - }); - editor.update(cx, |editor, cx| { - assert_eq!( - editor.selections.ranges(cx), - [Point::new(0, 0)..Point::new(0, 0)] - ); - - // Ensure we don't panic when selections are refreshed and that the pending selection is finalized. - editor.change_selections(None, cx, |s| { - s.refresh(); - }); - assert_eq!( - editor.selections.ranges(cx), - [Point::new(0, 3)..Point::new(0, 3)] - ); - assert!(editor.selections.pending_anchor().is_some()); - }); - } - - #[gpui::test] - async fn test_extra_newline_insertion(cx: &mut gpui::TestAppContext) { - cx.update(|cx| cx.set_global(Settings::test(cx))); - let language = Arc::new( - Language::new( - LanguageConfig { - brackets: vec![ - BracketPair { - start: "{".to_string(), - end: "}".to_string(), - close: true, - newline: true, - }, - BracketPair { - start: "/* ".to_string(), - end: " */".to_string(), - close: true, - newline: true, - }, - ], - ..Default::default() - }, - Some(tree_sitter_rust::language()), - ) - .with_indents_query("") - .unwrap(), - ); - - let text = concat!( - "{ }\n", // Suppress rustfmt - " x\n", // - " /* */\n", // - "x\n", // - "{{} }\n", // - ); - - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); - let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); - let (_, view) = cx.add_window(|cx| build_editor(buffer, cx)); - view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) - .await; - - view.update(cx, |view, cx| { - view.change_selections(None, cx, |s| { - s.select_display_ranges([ - DisplayPoint::new(0, 2)..DisplayPoint::new(0, 3), - DisplayPoint::new(2, 5)..DisplayPoint::new(2, 5), - DisplayPoint::new(4, 4)..DisplayPoint::new(4, 4), - ]) - }); - view.newline(&Newline, cx); - - assert_eq!( - view.buffer().read(cx).read(cx).text(), - concat!( - "{ \n", // Suppress rustfmt - "\n", // - "}\n", // - " x\n", // - " /* \n", // - " \n", // - " */\n", // - "x\n", // - "{{} \n", // - "}\n", // - ) - ); - }); - } - - #[gpui::test] - fn test_highlighted_ranges(cx: &mut gpui::MutableAppContext) { - let buffer = MultiBuffer::build_simple(&sample_text(16, 8, 'a'), cx); - - cx.set_global(Settings::test(cx)); - let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); - - editor.update(cx, |editor, cx| { - struct Type1; - struct Type2; - - let buffer = buffer.read(cx).snapshot(cx); - - let anchor_range = |range: Range| { - buffer.anchor_after(range.start)..buffer.anchor_after(range.end) - }; - - editor.highlight_background::( - vec![ - anchor_range(Point::new(2, 1)..Point::new(2, 3)), - anchor_range(Point::new(4, 2)..Point::new(4, 4)), - anchor_range(Point::new(6, 3)..Point::new(6, 5)), - anchor_range(Point::new(8, 4)..Point::new(8, 6)), - ], - |_| Color::red(), - cx, - ); - editor.highlight_background::( - vec![ - anchor_range(Point::new(3, 2)..Point::new(3, 5)), - anchor_range(Point::new(5, 3)..Point::new(5, 6)), - anchor_range(Point::new(7, 4)..Point::new(7, 7)), - anchor_range(Point::new(9, 5)..Point::new(9, 8)), - ], - |_| Color::green(), - cx, - ); - - let snapshot = editor.snapshot(cx); - let mut highlighted_ranges = editor.background_highlights_in_range( - anchor_range(Point::new(3, 4)..Point::new(7, 4)), - &snapshot, - cx.global::().theme.as_ref(), - ); - // Enforce a consistent ordering based on color without relying on the ordering of the - // highlight's `TypeId` which is non-deterministic. - highlighted_ranges.sort_unstable_by_key(|(_, color)| *color); - assert_eq!( - highlighted_ranges, - &[ - ( - DisplayPoint::new(3, 2)..DisplayPoint::new(3, 5), - Color::green(), - ), - ( - DisplayPoint::new(5, 3)..DisplayPoint::new(5, 6), - Color::green(), - ), - ( - DisplayPoint::new(4, 2)..DisplayPoint::new(4, 4), - Color::red(), - ), - ( - DisplayPoint::new(6, 3)..DisplayPoint::new(6, 5), - Color::red(), - ), - ] - ); - assert_eq!( - editor.background_highlights_in_range( - anchor_range(Point::new(5, 6)..Point::new(6, 4)), - &snapshot, - cx.global::().theme.as_ref(), - ), - &[( - DisplayPoint::new(6, 3)..DisplayPoint::new(6, 5), - Color::red(), - )] - ); - }); - } - - #[gpui::test] - fn test_following(cx: &mut gpui::MutableAppContext) { - let buffer = MultiBuffer::build_simple(&sample_text(16, 8, 'a'), cx); - - cx.set_global(Settings::test(cx)); - - let (_, leader) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); - let (_, follower) = cx.add_window( - WindowOptions { - bounds: WindowBounds::Fixed(RectF::from_points(vec2f(0., 0.), vec2f(10., 80.))), - ..Default::default() - }, - |cx| build_editor(buffer.clone(), cx), - ); - - let pending_update = Rc::new(RefCell::new(None)); - follower.update(cx, { - let update = pending_update.clone(); - |_, cx| { - cx.subscribe(&leader, move |_, leader, event, cx| { - leader - .read(cx) - .add_event_to_update_proto(event, &mut *update.borrow_mut(), cx); - }) - .detach(); - } - }); - - // Update the selections only - leader.update(cx, |leader, cx| { - leader.change_selections(None, cx, |s| s.select_ranges([1..1])); - }); - follower.update(cx, |follower, cx| { - follower - .apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx) - .unwrap(); - }); - assert_eq!(follower.read(cx).selections.ranges(cx), vec![1..1]); - - // Update the scroll position only - leader.update(cx, |leader, cx| { - leader.set_scroll_position(vec2f(1.5, 3.5), cx); - }); - follower.update(cx, |follower, cx| { - follower - .apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx) - .unwrap(); - }); - assert_eq!( - follower.update(cx, |follower, cx| follower.scroll_position(cx)), - vec2f(1.5, 3.5) - ); - - // Update the selections and scroll position - leader.update(cx, |leader, cx| { - leader.change_selections(None, cx, |s| s.select_ranges([0..0])); - leader.request_autoscroll(Autoscroll::Newest, cx); - leader.set_scroll_position(vec2f(1.5, 3.5), cx); - }); - follower.update(cx, |follower, cx| { - let initial_scroll_position = follower.scroll_position(cx); - follower - .apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx) - .unwrap(); - assert_eq!(follower.scroll_position(cx), initial_scroll_position); - assert!(follower.autoscroll_request.is_some()); - }); - assert_eq!(follower.read(cx).selections.ranges(cx), vec![0..0]); - - // Creating a pending selection that precedes another selection - leader.update(cx, |leader, cx| { - leader.change_selections(None, cx, |s| s.select_ranges([1..1])); - leader.begin_selection(DisplayPoint::new(0, 0), true, 1, cx); - }); - follower.update(cx, |follower, cx| { - follower - .apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx) - .unwrap(); - }); - assert_eq!(follower.read(cx).selections.ranges(cx), vec![0..0, 1..1]); - - // Extend the pending selection so that it surrounds another selection - leader.update(cx, |leader, cx| { - leader.extend_selection(DisplayPoint::new(0, 2), 1, cx); - }); - follower.update(cx, |follower, cx| { - follower - .apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx) - .unwrap(); - }); - assert_eq!(follower.read(cx).selections.ranges(cx), vec![0..2]); - } - - #[test] - fn test_combine_syntax_and_fuzzy_match_highlights() { - let string = "abcdefghijklmnop"; - let syntax_ranges = [ - ( - 0..3, - HighlightStyle { - color: Some(Color::red()), - ..Default::default() - }, - ), - ( - 4..8, - HighlightStyle { - color: Some(Color::green()), - ..Default::default() - }, - ), - ]; - let match_indices = [4, 6, 7, 8]; - assert_eq!( - combine_syntax_and_fuzzy_match_highlights( - string, - Default::default(), - syntax_ranges.into_iter(), - &match_indices, - ), - &[ - ( - 0..3, - HighlightStyle { - color: Some(Color::red()), - ..Default::default() - }, - ), - ( - 4..5, - HighlightStyle { - color: Some(Color::green()), - weight: Some(fonts::Weight::BOLD), - ..Default::default() - }, - ), - ( - 5..6, - HighlightStyle { - color: Some(Color::green()), - ..Default::default() - }, - ), - ( - 6..8, - HighlightStyle { - color: Some(Color::green()), - weight: Some(fonts::Weight::BOLD), - ..Default::default() - }, - ), - ( - 8..9, - HighlightStyle { - weight: Some(fonts::Weight::BOLD), - ..Default::default() - }, - ), - ] - ); - } - - fn empty_range(row: usize, column: usize) -> Range { - let point = DisplayPoint::new(row as u32, column as u32); - point..point - } - - fn assert_selection_ranges(marked_text: &str, view: &mut Editor, cx: &mut ViewContext) { - let (text, ranges) = marked_text_ranges(marked_text, true); - assert_eq!(view.text(cx), text); - assert_eq!( - view.selections.ranges(cx), - ranges, - "Assert selections are {}", - marked_text - ); - } -} - trait RangeExt { fn sorted(&self) -> Range; fn to_inclusive(&self) -> RangeInclusive; diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs new file mode 100644 index 0000000000..c2840cc17b --- /dev/null +++ b/crates/editor/src/editor_tests.rs @@ -0,0 +1,4881 @@ +use crate::test::{ + assert_text_with_selections, build_editor, select_ranges, EditorLspTestContext, + EditorTestContext, +}; + +use super::*; +use futures::StreamExt; +use gpui::{ + geometry::rect::RectF, + platform::{WindowBounds, WindowOptions}, +}; +use indoc::indoc; +use language::{FakeLspAdapter, LanguageConfig, LanguageRegistry}; +use project::FakeFs; +use settings::EditorSettings; +use std::{cell::RefCell, rc::Rc, time::Instant}; +use text::Point; +use unindent::Unindent; +use util::{ + assert_set_eq, + test::{marked_text_ranges, marked_text_ranges_by, sample_text, TextRangeMarker}, +}; +use workspace::{FollowableItem, ItemHandle, NavigationEntry, Pane}; + +#[gpui::test] +fn test_edit_events(cx: &mut MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = cx.add_model(|cx| language::Buffer::new(0, "123456", cx)); + + let events = Rc::new(RefCell::new(Vec::new())); + let (_, editor1) = cx.add_window(Default::default(), { + let events = events.clone(); + |cx| { + cx.subscribe(&cx.handle(), move |_, _, event, _| { + if matches!( + event, + Event::Edited | Event::BufferEdited | Event::DirtyChanged + ) { + events.borrow_mut().push(("editor1", *event)); + } + }) + .detach(); + Editor::for_buffer(buffer.clone(), None, cx) + } + }); + let (_, editor2) = cx.add_window(Default::default(), { + let events = events.clone(); + |cx| { + cx.subscribe(&cx.handle(), move |_, _, event, _| { + if matches!( + event, + Event::Edited | Event::BufferEdited | Event::DirtyChanged + ) { + events.borrow_mut().push(("editor2", *event)); + } + }) + .detach(); + Editor::for_buffer(buffer.clone(), None, cx) + } + }); + assert_eq!(mem::take(&mut *events.borrow_mut()), []); + + // Mutating editor 1 will emit an `Edited` event only for that editor. + editor1.update(cx, |editor, cx| editor.insert("X", cx)); + assert_eq!( + mem::take(&mut *events.borrow_mut()), + [ + ("editor1", Event::Edited), + ("editor1", Event::BufferEdited), + ("editor2", Event::BufferEdited), + ("editor1", Event::DirtyChanged), + ("editor2", Event::DirtyChanged) + ] + ); + + // Mutating editor 2 will emit an `Edited` event only for that editor. + editor2.update(cx, |editor, cx| editor.delete(&Delete, cx)); + assert_eq!( + mem::take(&mut *events.borrow_mut()), + [ + ("editor2", Event::Edited), + ("editor1", Event::BufferEdited), + ("editor2", Event::BufferEdited), + ] + ); + + // Undoing on editor 1 will emit an `Edited` event only for that editor. + editor1.update(cx, |editor, cx| editor.undo(&Undo, cx)); + assert_eq!( + mem::take(&mut *events.borrow_mut()), + [ + ("editor1", Event::Edited), + ("editor1", Event::BufferEdited), + ("editor2", Event::BufferEdited), + ("editor1", Event::DirtyChanged), + ("editor2", Event::DirtyChanged), + ] + ); + + // Redoing on editor 1 will emit an `Edited` event only for that editor. + editor1.update(cx, |editor, cx| editor.redo(&Redo, cx)); + assert_eq!( + mem::take(&mut *events.borrow_mut()), + [ + ("editor1", Event::Edited), + ("editor1", Event::BufferEdited), + ("editor2", Event::BufferEdited), + ("editor1", Event::DirtyChanged), + ("editor2", Event::DirtyChanged), + ] + ); + + // Undoing on editor 2 will emit an `Edited` event only for that editor. + editor2.update(cx, |editor, cx| editor.undo(&Undo, cx)); + assert_eq!( + mem::take(&mut *events.borrow_mut()), + [ + ("editor2", Event::Edited), + ("editor1", Event::BufferEdited), + ("editor2", Event::BufferEdited), + ("editor1", Event::DirtyChanged), + ("editor2", Event::DirtyChanged), + ] + ); + + // Redoing on editor 2 will emit an `Edited` event only for that editor. + editor2.update(cx, |editor, cx| editor.redo(&Redo, cx)); + assert_eq!( + mem::take(&mut *events.borrow_mut()), + [ + ("editor2", Event::Edited), + ("editor1", Event::BufferEdited), + ("editor2", Event::BufferEdited), + ("editor1", Event::DirtyChanged), + ("editor2", Event::DirtyChanged), + ] + ); + + // No event is emitted when the mutation is a no-op. + editor2.update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([0..0])); + + editor.backspace(&Backspace, cx); + }); + assert_eq!(mem::take(&mut *events.borrow_mut()), []); +} + +#[gpui::test] +fn test_undo_redo_with_selection_restoration(cx: &mut MutableAppContext) { + cx.set_global(Settings::test(cx)); + let mut now = Instant::now(); + let buffer = cx.add_model(|cx| language::Buffer::new(0, "123456", cx)); + let group_interval = buffer.read(cx).transaction_group_interval(); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); + + editor.update(cx, |editor, cx| { + editor.start_transaction_at(now, cx); + editor.change_selections(None, cx, |s| s.select_ranges([2..4])); + + editor.insert("cd", cx); + editor.end_transaction_at(now, cx); + assert_eq!(editor.text(cx), "12cd56"); + assert_eq!(editor.selections.ranges(cx), vec![4..4]); + + editor.start_transaction_at(now, cx); + editor.change_selections(None, cx, |s| s.select_ranges([4..5])); + editor.insert("e", cx); + editor.end_transaction_at(now, cx); + assert_eq!(editor.text(cx), "12cde6"); + assert_eq!(editor.selections.ranges(cx), vec![5..5]); + + now += group_interval + Duration::from_millis(1); + editor.change_selections(None, cx, |s| s.select_ranges([2..2])); + + // Simulate an edit in another editor + buffer.update(cx, |buffer, cx| { + buffer.start_transaction_at(now, cx); + buffer.edit([(0..1, "a")], None, cx); + buffer.edit([(1..1, "b")], None, cx); + buffer.end_transaction_at(now, cx); + }); + + assert_eq!(editor.text(cx), "ab2cde6"); + assert_eq!(editor.selections.ranges(cx), vec![3..3]); + + // Last transaction happened past the group interval in a different editor. + // Undo it individually and don't restore selections. + editor.undo(&Undo, cx); + assert_eq!(editor.text(cx), "12cde6"); + assert_eq!(editor.selections.ranges(cx), vec![2..2]); + + // First two transactions happened within the group interval in this editor. + // Undo them together and restore selections. + editor.undo(&Undo, cx); + editor.undo(&Undo, cx); // Undo stack is empty here, so this is a no-op. + assert_eq!(editor.text(cx), "123456"); + assert_eq!(editor.selections.ranges(cx), vec![0..0]); + + // Redo the first two transactions together. + editor.redo(&Redo, cx); + assert_eq!(editor.text(cx), "12cde6"); + assert_eq!(editor.selections.ranges(cx), vec![5..5]); + + // Redo the last transaction on its own. + editor.redo(&Redo, cx); + assert_eq!(editor.text(cx), "ab2cde6"); + assert_eq!(editor.selections.ranges(cx), vec![6..6]); + + // Test empty transactions. + editor.start_transaction_at(now, cx); + editor.end_transaction_at(now, cx); + editor.undo(&Undo, cx); + assert_eq!(editor.text(cx), "12cde6"); + }); +} + +#[gpui::test] +fn test_ime_composition(cx: &mut MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = cx.add_model(|cx| { + let mut buffer = language::Buffer::new(0, "abcde", cx); + // Ensure automatic grouping doesn't occur. + buffer.set_group_interval(Duration::ZERO); + buffer + }); + + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); + cx.add_window(Default::default(), |cx| { + let mut editor = build_editor(buffer.clone(), cx); + + // Start a new IME composition. + editor.replace_and_mark_text_in_range(Some(0..1), "à", None, cx); + editor.replace_and_mark_text_in_range(Some(0..1), "á", None, cx); + editor.replace_and_mark_text_in_range(Some(0..1), "ä", None, cx); + assert_eq!(editor.text(cx), "äbcde"); + assert_eq!( + editor.marked_text_ranges(cx), + Some(vec![OffsetUtf16(0)..OffsetUtf16(1)]) + ); + + // Finalize IME composition. + editor.replace_text_in_range(None, "ā", cx); + assert_eq!(editor.text(cx), "ābcde"); + assert_eq!(editor.marked_text_ranges(cx), None); + + // IME composition edits are grouped and are undone/redone at once. + editor.undo(&Default::default(), cx); + assert_eq!(editor.text(cx), "abcde"); + assert_eq!(editor.marked_text_ranges(cx), None); + editor.redo(&Default::default(), cx); + assert_eq!(editor.text(cx), "ābcde"); + assert_eq!(editor.marked_text_ranges(cx), None); + + // Start a new IME composition. + editor.replace_and_mark_text_in_range(Some(0..1), "à", None, cx); + assert_eq!( + editor.marked_text_ranges(cx), + Some(vec![OffsetUtf16(0)..OffsetUtf16(1)]) + ); + + // Undoing during an IME composition cancels it. + editor.undo(&Default::default(), cx); + assert_eq!(editor.text(cx), "ābcde"); + assert_eq!(editor.marked_text_ranges(cx), None); + + // Start a new IME composition with an invalid marked range, ensuring it gets clipped. + editor.replace_and_mark_text_in_range(Some(4..999), "è", None, cx); + assert_eq!(editor.text(cx), "ābcdè"); + assert_eq!( + editor.marked_text_ranges(cx), + Some(vec![OffsetUtf16(4)..OffsetUtf16(5)]) + ); + + // Finalize IME composition with an invalid replacement range, ensuring it gets clipped. + editor.replace_text_in_range(Some(4..999), "ę", cx); + assert_eq!(editor.text(cx), "ābcdę"); + assert_eq!(editor.marked_text_ranges(cx), None); + + // Start a new IME composition with multiple cursors. + editor.change_selections(None, cx, |s| { + s.select_ranges([ + OffsetUtf16(1)..OffsetUtf16(1), + OffsetUtf16(3)..OffsetUtf16(3), + OffsetUtf16(5)..OffsetUtf16(5), + ]) + }); + editor.replace_and_mark_text_in_range(Some(4..5), "XYZ", None, cx); + assert_eq!(editor.text(cx), "XYZbXYZdXYZ"); + assert_eq!( + editor.marked_text_ranges(cx), + Some(vec![ + OffsetUtf16(0)..OffsetUtf16(3), + OffsetUtf16(4)..OffsetUtf16(7), + OffsetUtf16(8)..OffsetUtf16(11) + ]) + ); + + // Ensure the newly-marked range gets treated as relative to the previously-marked ranges. + editor.replace_and_mark_text_in_range(Some(1..2), "1", None, cx); + assert_eq!(editor.text(cx), "X1ZbX1ZdX1Z"); + assert_eq!( + editor.marked_text_ranges(cx), + Some(vec![ + OffsetUtf16(1)..OffsetUtf16(2), + OffsetUtf16(5)..OffsetUtf16(6), + OffsetUtf16(9)..OffsetUtf16(10) + ]) + ); + + // Finalize IME composition with multiple cursors. + editor.replace_text_in_range(Some(9..10), "2", cx); + assert_eq!(editor.text(cx), "X2ZbX2ZdX2Z"); + assert_eq!(editor.marked_text_ranges(cx), None); + + editor + }); +} + +#[gpui::test] +fn test_selection_with_mouse(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + + let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\nddddddd\n", cx); + let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + editor.update(cx, |view, cx| { + view.begin_selection(DisplayPoint::new(2, 2), false, 1, cx); + }); + assert_eq!( + editor.update(cx, |view, cx| view.selections.display_ranges(cx)), + [DisplayPoint::new(2, 2)..DisplayPoint::new(2, 2)] + ); + + editor.update(cx, |view, cx| { + view.update_selection(DisplayPoint::new(3, 3), 0, Vector2F::zero(), cx); + }); + + assert_eq!( + editor.update(cx, |view, cx| view.selections.display_ranges(cx)), + [DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3)] + ); + + editor.update(cx, |view, cx| { + view.update_selection(DisplayPoint::new(1, 1), 0, Vector2F::zero(), cx); + }); + + assert_eq!( + editor.update(cx, |view, cx| view.selections.display_ranges(cx)), + [DisplayPoint::new(2, 2)..DisplayPoint::new(1, 1)] + ); + + editor.update(cx, |view, cx| { + view.end_selection(cx); + view.update_selection(DisplayPoint::new(3, 3), 0, Vector2F::zero(), cx); + }); + + assert_eq!( + editor.update(cx, |view, cx| view.selections.display_ranges(cx)), + [DisplayPoint::new(2, 2)..DisplayPoint::new(1, 1)] + ); + + editor.update(cx, |view, cx| { + view.begin_selection(DisplayPoint::new(3, 3), true, 1, cx); + view.update_selection(DisplayPoint::new(0, 0), 0, Vector2F::zero(), cx); + }); + + assert_eq!( + editor.update(cx, |view, cx| view.selections.display_ranges(cx)), + [ + DisplayPoint::new(2, 2)..DisplayPoint::new(1, 1), + DisplayPoint::new(3, 3)..DisplayPoint::new(0, 0) + ] + ); + + editor.update(cx, |view, cx| { + view.end_selection(cx); + }); + + assert_eq!( + editor.update(cx, |view, cx| view.selections.display_ranges(cx)), + [DisplayPoint::new(3, 3)..DisplayPoint::new(0, 0)] + ); +} + +#[gpui::test] +fn test_canceling_pending_selection(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\ndddddd\n", cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + + view.update(cx, |view, cx| { + view.begin_selection(DisplayPoint::new(2, 2), false, 1, cx); + assert_eq!( + view.selections.display_ranges(cx), + [DisplayPoint::new(2, 2)..DisplayPoint::new(2, 2)] + ); + }); + + view.update(cx, |view, cx| { + view.update_selection(DisplayPoint::new(3, 3), 0, Vector2F::zero(), cx); + assert_eq!( + view.selections.display_ranges(cx), + [DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3)] + ); + }); + + view.update(cx, |view, cx| { + view.cancel(&Cancel, cx); + view.update_selection(DisplayPoint::new(1, 1), 0, Vector2F::zero(), cx); + assert_eq!( + view.selections.display_ranges(cx), + [DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3)] + ); + }); +} + +#[gpui::test] +fn test_clone(cx: &mut gpui::MutableAppContext) { + let (text, selection_ranges) = marked_text_ranges( + indoc! {" + one + two + threeˇ + four + fiveˇ + "}, + true, + ); + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple(&text, cx); + + let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + + editor.update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges(selection_ranges.clone())); + editor.fold_ranges( + [ + Point::new(1, 0)..Point::new(2, 0), + Point::new(3, 0)..Point::new(4, 0), + ], + cx, + ); + }); + + let (_, cloned_editor) = editor.update(cx, |editor, cx| { + cx.add_window(Default::default(), |cx| editor.clone(cx)) + }); + + let snapshot = editor.update(cx, |e, cx| e.snapshot(cx)); + let cloned_snapshot = cloned_editor.update(cx, |e, cx| e.snapshot(cx)); + + assert_eq!( + cloned_editor.update(cx, |e, cx| e.display_text(cx)), + editor.update(cx, |e, cx| e.display_text(cx)) + ); + assert_eq!( + cloned_snapshot + .folds_in_range(0..text.len()) + .collect::>(), + snapshot.folds_in_range(0..text.len()).collect::>(), + ); + assert_set_eq!( + cloned_editor.read(cx).selections.ranges::(cx), + editor.read(cx).selections.ranges(cx) + ); + assert_set_eq!( + cloned_editor.update(cx, |e, cx| e.selections.display_ranges(cx)), + editor.update(cx, |e, cx| e.selections.display_ranges(cx)) + ); +} + +#[gpui::test] +fn test_navigation_history(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + use workspace::Item; + let (_, pane) = cx.add_window(Default::default(), |cx| Pane::new(None, cx)); + let buffer = MultiBuffer::build_simple(&sample_text(300, 5, 'a'), cx); + + cx.add_view(&pane, |cx| { + let mut editor = build_editor(buffer.clone(), cx); + let handle = cx.handle(); + editor.set_nav_history(Some(pane.read(cx).nav_history_for_item(&handle))); + + fn pop_history(editor: &mut Editor, cx: &mut MutableAppContext) -> Option { + editor.nav_history.as_mut().unwrap().pop_backward(cx) + } + + // Move the cursor a small distance. + // Nothing is added to the navigation history. + editor.change_selections(None, cx, |s| { + s.select_display_ranges([DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)]) + }); + editor.change_selections(None, cx, |s| { + s.select_display_ranges([DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0)]) + }); + assert!(pop_history(&mut editor, cx).is_none()); + + // Move the cursor a large distance. + // The history can jump back to the previous position. + editor.change_selections(None, cx, |s| { + s.select_display_ranges([DisplayPoint::new(13, 0)..DisplayPoint::new(13, 3)]) + }); + let nav_entry = pop_history(&mut editor, cx).unwrap(); + editor.navigate(nav_entry.data.unwrap(), cx); + assert_eq!(nav_entry.item.id(), cx.view_id()); + assert_eq!( + editor.selections.display_ranges(cx), + &[DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0)] + ); + assert!(pop_history(&mut editor, cx).is_none()); + + // Move the cursor a small distance via the mouse. + // Nothing is added to the navigation history. + editor.begin_selection(DisplayPoint::new(5, 0), false, 1, cx); + editor.end_selection(cx); + assert_eq!( + editor.selections.display_ranges(cx), + &[DisplayPoint::new(5, 0)..DisplayPoint::new(5, 0)] + ); + assert!(pop_history(&mut editor, cx).is_none()); + + // Move the cursor a large distance via the mouse. + // The history can jump back to the previous position. + editor.begin_selection(DisplayPoint::new(15, 0), false, 1, cx); + editor.end_selection(cx); + assert_eq!( + editor.selections.display_ranges(cx), + &[DisplayPoint::new(15, 0)..DisplayPoint::new(15, 0)] + ); + let nav_entry = pop_history(&mut editor, cx).unwrap(); + editor.navigate(nav_entry.data.unwrap(), cx); + assert_eq!(nav_entry.item.id(), cx.view_id()); + assert_eq!( + editor.selections.display_ranges(cx), + &[DisplayPoint::new(5, 0)..DisplayPoint::new(5, 0)] + ); + assert!(pop_history(&mut editor, cx).is_none()); + + // Set scroll position to check later + editor.set_scroll_position(Vector2F::new(5.5, 5.5), cx); + let original_scroll_position = editor.scroll_position; + let original_scroll_top_anchor = editor.scroll_top_anchor.clone(); + + // Jump to the end of the document and adjust scroll + editor.move_to_end(&MoveToEnd, cx); + editor.set_scroll_position(Vector2F::new(-2.5, -0.5), cx); + assert_ne!(editor.scroll_position, original_scroll_position); + assert_ne!(editor.scroll_top_anchor, original_scroll_top_anchor); + + let nav_entry = pop_history(&mut editor, cx).unwrap(); + editor.navigate(nav_entry.data.unwrap(), cx); + assert_eq!(editor.scroll_position, original_scroll_position); + assert_eq!(editor.scroll_top_anchor, original_scroll_top_anchor); + + // Ensure we don't panic when navigation data contains invalid anchors *and* points. + let mut invalid_anchor = editor.scroll_top_anchor.clone(); + invalid_anchor.text_anchor.buffer_id = Some(999); + let invalid_point = Point::new(9999, 0); + editor.navigate( + Box::new(NavigationData { + cursor_anchor: invalid_anchor.clone(), + cursor_position: invalid_point, + scroll_top_anchor: invalid_anchor, + scroll_top_row: invalid_point.row, + scroll_position: Default::default(), + }), + cx, + ); + assert_eq!( + editor.selections.display_ranges(cx), + &[editor.max_point(cx)..editor.max_point(cx)] + ); + assert_eq!( + editor.scroll_position(cx), + vec2f(0., editor.max_point(cx).row() as f32) + ); + + editor + }); +} + +#[gpui::test] +fn test_cancel(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\ndddddd\n", cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + + view.update(cx, |view, cx| { + view.begin_selection(DisplayPoint::new(3, 4), false, 1, cx); + view.update_selection(DisplayPoint::new(1, 1), 0, Vector2F::zero(), cx); + view.end_selection(cx); + + view.begin_selection(DisplayPoint::new(0, 1), true, 1, cx); + view.update_selection(DisplayPoint::new(0, 3), 0, Vector2F::zero(), cx); + view.end_selection(cx); + assert_eq!( + view.selections.display_ranges(cx), + [ + DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3), + DisplayPoint::new(3, 4)..DisplayPoint::new(1, 1), + ] + ); + }); + + view.update(cx, |view, cx| { + view.cancel(&Cancel, cx); + assert_eq!( + view.selections.display_ranges(cx), + [DisplayPoint::new(3, 4)..DisplayPoint::new(1, 1)] + ); + }); + + view.update(cx, |view, cx| { + view.cancel(&Cancel, cx); + assert_eq!( + view.selections.display_ranges(cx), + [DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1)] + ); + }); +} + +#[gpui::test] +fn test_fold(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple( + &" + impl Foo { + // Hello! + + fn a() { + 1 + } + + fn b() { + 2 + } + + fn c() { + 3 + } + } + " + .unindent(), + cx, + ); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); + + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([DisplayPoint::new(8, 0)..DisplayPoint::new(12, 0)]); + }); + view.fold(&Fold, cx); + assert_eq!( + view.display_text(cx), + " + impl Foo { + // Hello! + + fn a() { + 1 + } + + fn b() {… + } + + fn c() {… + } + } + " + .unindent(), + ); + + view.fold(&Fold, cx); + assert_eq!( + view.display_text(cx), + " + impl Foo {… + } + " + .unindent(), + ); + + view.unfold_lines(&UnfoldLines, cx); + assert_eq!( + view.display_text(cx), + " + impl Foo { + // Hello! + + fn a() { + 1 + } + + fn b() {… + } + + fn c() {… + } + } + " + .unindent(), + ); + + view.unfold_lines(&UnfoldLines, cx); + assert_eq!(view.display_text(cx), buffer.read(cx).read(cx).text()); + }); +} + +#[gpui::test] +fn test_move_cursor(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); + + buffer.update(cx, |buffer, cx| { + buffer.edit( + vec![ + (Point::new(1, 0)..Point::new(1, 0), "\t"), + (Point::new(1, 1)..Point::new(1, 1), "\t"), + ], + None, + cx, + ); + }); + + view.update(cx, |view, cx| { + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)] + ); + + view.move_down(&MoveDown, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)] + ); + + view.move_right(&MoveRight, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(1, 4)..DisplayPoint::new(1, 4)] + ); + + view.move_left(&MoveLeft, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)] + ); + + view.move_up(&MoveUp, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)] + ); + + view.move_to_end(&MoveToEnd, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(5, 6)..DisplayPoint::new(5, 6)] + ); + + view.move_to_beginning(&MoveToBeginning, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)] + ); + + view.change_selections(None, cx, |s| { + s.select_display_ranges([DisplayPoint::new(0, 1)..DisplayPoint::new(0, 2)]); + }); + view.select_to_beginning(&SelectToBeginning, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(0, 1)..DisplayPoint::new(0, 0)] + ); + + view.select_to_end(&SelectToEnd, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(0, 1)..DisplayPoint::new(5, 6)] + ); + }); +} + +#[gpui::test] +fn test_move_cursor_multibyte(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple("ⓐⓑⓒⓓⓔ\nabcde\nαβγδε\n", cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); + + assert_eq!('ⓐ'.len_utf8(), 3); + assert_eq!('α'.len_utf8(), 2); + + view.update(cx, |view, cx| { + view.fold_ranges( + vec![ + Point::new(0, 6)..Point::new(0, 12), + Point::new(1, 2)..Point::new(1, 4), + Point::new(2, 4)..Point::new(2, 8), + ], + cx, + ); + assert_eq!(view.display_text(cx), "ⓐⓑ…ⓔ\nab…e\nαβ…ε\n"); + + view.move_right(&MoveRight, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(0, "ⓐ".len())] + ); + view.move_right(&MoveRight, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(0, "ⓐⓑ".len())] + ); + view.move_right(&MoveRight, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(0, "ⓐⓑ…".len())] + ); + + view.move_down(&MoveDown, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(1, "ab…".len())] + ); + view.move_left(&MoveLeft, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(1, "ab".len())] + ); + view.move_left(&MoveLeft, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(1, "a".len())] + ); + + view.move_down(&MoveDown, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(2, "α".len())] + ); + view.move_right(&MoveRight, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(2, "αβ".len())] + ); + view.move_right(&MoveRight, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(2, "αβ…".len())] + ); + view.move_right(&MoveRight, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(2, "αβ…ε".len())] + ); + + view.move_up(&MoveUp, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(1, "ab…e".len())] + ); + view.move_up(&MoveUp, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(0, "ⓐⓑ…ⓔ".len())] + ); + view.move_left(&MoveLeft, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(0, "ⓐⓑ…".len())] + ); + view.move_left(&MoveLeft, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(0, "ⓐⓑ".len())] + ); + view.move_left(&MoveLeft, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(0, "ⓐ".len())] + ); + }); +} + +#[gpui::test] +fn test_move_cursor_different_line_lengths(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple("ⓐⓑⓒⓓⓔ\nabcd\nαβγ\nabcd\nⓐⓑⓒⓓⓔ\n", cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([empty_range(0, "ⓐⓑⓒⓓⓔ".len())]); + }); + view.move_down(&MoveDown, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(1, "abcd".len())] + ); + + view.move_down(&MoveDown, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(2, "αβγ".len())] + ); + + view.move_down(&MoveDown, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(3, "abcd".len())] + ); + + view.move_down(&MoveDown, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(4, "ⓐⓑⓒⓓⓔ".len())] + ); + + view.move_up(&MoveUp, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(3, "abcd".len())] + ); + + view.move_up(&MoveUp, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(2, "αβγ".len())] + ); + }); +} + +#[gpui::test] +fn test_beginning_end_of_line(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple("abc\n def", cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1), + DisplayPoint::new(1, 4)..DisplayPoint::new(1, 4), + ]); + }); + }); + + view.update(cx, |view, cx| { + view.move_to_beginning_of_line(&MoveToBeginningOfLine, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), + DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2), + ] + ); + }); + + view.update(cx, |view, cx| { + view.move_to_beginning_of_line(&MoveToBeginningOfLine, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), + DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), + ] + ); + }); + + view.update(cx, |view, cx| { + view.move_to_beginning_of_line(&MoveToBeginningOfLine, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), + DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2), + ] + ); + }); + + view.update(cx, |view, cx| { + view.move_to_end_of_line(&MoveToEndOfLine, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), + DisplayPoint::new(1, 5)..DisplayPoint::new(1, 5), + ] + ); + }); + + // Moving to the end of line again is a no-op. + view.update(cx, |view, cx| { + view.move_to_end_of_line(&MoveToEndOfLine, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), + DisplayPoint::new(1, 5)..DisplayPoint::new(1, 5), + ] + ); + }); + + view.update(cx, |view, cx| { + view.move_left(&MoveLeft, cx); + view.select_to_beginning_of_line( + &SelectToBeginningOfLine { + stop_at_soft_wraps: true, + }, + cx, + ); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(0, 2)..DisplayPoint::new(0, 0), + DisplayPoint::new(1, 4)..DisplayPoint::new(1, 2), + ] + ); + }); + + view.update(cx, |view, cx| { + view.select_to_beginning_of_line( + &SelectToBeginningOfLine { + stop_at_soft_wraps: true, + }, + cx, + ); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(0, 2)..DisplayPoint::new(0, 0), + DisplayPoint::new(1, 4)..DisplayPoint::new(1, 0), + ] + ); + }); + + view.update(cx, |view, cx| { + view.select_to_beginning_of_line( + &SelectToBeginningOfLine { + stop_at_soft_wraps: true, + }, + cx, + ); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(0, 2)..DisplayPoint::new(0, 0), + DisplayPoint::new(1, 4)..DisplayPoint::new(1, 2), + ] + ); + }); + + view.update(cx, |view, cx| { + view.select_to_end_of_line( + &SelectToEndOfLine { + stop_at_soft_wraps: true, + }, + cx, + ); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(0, 2)..DisplayPoint::new(0, 3), + DisplayPoint::new(1, 4)..DisplayPoint::new(1, 5), + ] + ); + }); + + view.update(cx, |view, cx| { + view.delete_to_end_of_line(&DeleteToEndOfLine, cx); + assert_eq!(view.display_text(cx), "ab\n de"); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), + DisplayPoint::new(1, 4)..DisplayPoint::new(1, 4), + ] + ); + }); + + view.update(cx, |view, cx| { + view.delete_to_beginning_of_line(&DeleteToBeginningOfLine, cx); + assert_eq!(view.display_text(cx), "\n"); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), + DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), + ] + ); + }); +} + +#[gpui::test] +fn test_prev_next_word_boundary(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple("use std::str::{foo, bar}\n\n {baz.qux()}", cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(0, 11)..DisplayPoint::new(0, 11), + DisplayPoint::new(2, 4)..DisplayPoint::new(2, 4), + ]) + }); + + view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); + assert_selection_ranges("use std::ˇstr::{foo, bar}\n\n {ˇbaz.qux()}", view, cx); + + view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); + assert_selection_ranges("use stdˇ::str::{foo, bar}\n\n ˇ{baz.qux()}", view, cx); + + view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); + assert_selection_ranges("use ˇstd::str::{foo, bar}\n\nˇ {baz.qux()}", view, cx); + + view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); + assert_selection_ranges("ˇuse std::str::{foo, bar}\nˇ\n {baz.qux()}", view, cx); + + view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); + assert_selection_ranges("ˇuse std::str::{foo, barˇ}\n\n {baz.qux()}", view, cx); + + view.move_to_next_word_end(&MoveToNextWordEnd, cx); + assert_selection_ranges("useˇ std::str::{foo, bar}ˇ\n\n {baz.qux()}", view, cx); + + view.move_to_next_word_end(&MoveToNextWordEnd, cx); + assert_selection_ranges("use stdˇ::str::{foo, bar}\nˇ\n {baz.qux()}", view, cx); + + view.move_to_next_word_end(&MoveToNextWordEnd, cx); + assert_selection_ranges("use std::ˇstr::{foo, bar}\n\n {ˇbaz.qux()}", view, cx); + + view.move_right(&MoveRight, cx); + view.select_to_previous_word_start(&SelectToPreviousWordStart, cx); + assert_selection_ranges("use std::«ˇs»tr::{foo, bar}\n\n {«ˇb»az.qux()}", view, cx); + + view.select_to_previous_word_start(&SelectToPreviousWordStart, cx); + assert_selection_ranges("use std«ˇ::s»tr::{foo, bar}\n\n «ˇ{b»az.qux()}", view, cx); + + view.select_to_next_word_end(&SelectToNextWordEnd, cx); + assert_selection_ranges("use std::«ˇs»tr::{foo, bar}\n\n {«ˇb»az.qux()}", view, cx); + }); +} + +#[gpui::test] +fn test_prev_next_word_bounds_with_soft_wrap(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple("use one::{\n two::three::four::five\n};", cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + + view.update(cx, |view, cx| { + view.set_wrap_width(Some(140.), cx); + assert_eq!( + view.display_text(cx), + "use one::{\n two::three::\n four::five\n};" + ); + + view.change_selections(None, cx, |s| { + s.select_display_ranges([DisplayPoint::new(1, 7)..DisplayPoint::new(1, 7)]); + }); + + view.move_to_next_word_end(&MoveToNextWordEnd, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(1, 9)..DisplayPoint::new(1, 9)] + ); + + view.move_to_next_word_end(&MoveToNextWordEnd, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(1, 14)..DisplayPoint::new(1, 14)] + ); + + view.move_to_next_word_end(&MoveToNextWordEnd, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(2, 4)..DisplayPoint::new(2, 4)] + ); + + view.move_to_next_word_end(&MoveToNextWordEnd, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(2, 8)..DisplayPoint::new(2, 8)] + ); + + view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(2, 4)..DisplayPoint::new(2, 4)] + ); + + view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(1, 14)..DisplayPoint::new(1, 14)] + ); + }); +} + +#[gpui::test] +async fn test_delete_to_beginning_of_line(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx); + cx.set_state("one «two threeˇ» four"); + cx.update_editor(|editor, cx| { + editor.delete_to_beginning_of_line(&DeleteToBeginningOfLine, cx); + assert_eq!(editor.text(cx), " four"); + }); +} + +#[gpui::test] +fn test_delete_to_word_boundary(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple("one two three four", cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); + + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + // an empty selection - the preceding word fragment is deleted + DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), + // characters selected - they are deleted + DisplayPoint::new(0, 9)..DisplayPoint::new(0, 12), + ]) + }); + view.delete_to_previous_word_start(&DeleteToPreviousWordStart, cx); + }); + + assert_eq!(buffer.read(cx).read(cx).text(), "e two te four"); + + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + // an empty selection - the following word fragment is deleted + DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), + // characters selected - they are deleted + DisplayPoint::new(0, 9)..DisplayPoint::new(0, 10), + ]) + }); + view.delete_to_next_word_end(&DeleteToNextWordEnd, cx); + }); + + assert_eq!(buffer.read(cx).read(cx).text(), "e t te our"); +} + +#[gpui::test] +fn test_newline(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple("aaaa\n bbbb\n", cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); + + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), + DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2), + DisplayPoint::new(1, 6)..DisplayPoint::new(1, 6), + ]) + }); + + view.newline(&Newline, cx); + assert_eq!(view.text(cx), "aa\naa\n \n bb\n bb\n"); + }); +} + +#[gpui::test] +fn test_newline_with_old_selections(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple( + " + a + b( + X + ) + c( + X + ) + " + .unindent() + .as_str(), + cx, + ); + + let (_, editor) = cx.add_window(Default::default(), |cx| { + let mut editor = build_editor(buffer.clone(), cx); + editor.change_selections(None, cx, |s| { + s.select_ranges([ + Point::new(2, 4)..Point::new(2, 5), + Point::new(5, 4)..Point::new(5, 5), + ]) + }); + editor + }); + + // Edit the buffer directly, deleting ranges surrounding the editor's selections + buffer.update(cx, |buffer, cx| { + buffer.edit( + [ + (Point::new(1, 2)..Point::new(3, 0), ""), + (Point::new(4, 2)..Point::new(6, 0), ""), + ], + None, + cx, + ); + assert_eq!( + buffer.read(cx).text(), + " + a + b() + c() + " + .unindent() + ); + }); + + editor.update(cx, |editor, cx| { + assert_eq!( + editor.selections.ranges(cx), + &[ + Point::new(1, 2)..Point::new(1, 2), + Point::new(2, 2)..Point::new(2, 2), + ], + ); + + editor.newline(&Newline, cx); + assert_eq!( + editor.text(cx), + " + a + b( + ) + c( + ) + " + .unindent() + ); + + // The selections are moved after the inserted newlines + assert_eq!( + editor.selections.ranges(cx), + &[ + Point::new(2, 0)..Point::new(2, 0), + Point::new(4, 0)..Point::new(4, 0), + ], + ); + }); +} + +#[gpui::test] +async fn test_newline_below(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx); + cx.update(|cx| { + cx.update_global::(|settings, _| { + settings.editor_overrides.tab_size = Some(NonZeroU32::new(4).unwrap()); + }); + }); + + let language = Arc::new( + Language::new( + LanguageConfig::default(), + Some(tree_sitter_rust::language()), + ) + .with_indents_query(r#"(_ "(" ")" @end) @indent"#) + .unwrap(), + ); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + cx.set_state(indoc! {" + const a: ˇA = ( + (ˇ + «const_functionˇ»(ˇ), + so«mˇ»et«hˇ»ing_ˇelse,ˇ + )ˇ + ˇ);ˇ + "}); + cx.update_editor(|e, cx| e.newline_below(&NewlineBelow, cx)); + cx.assert_editor_state(indoc! {" + const a: A = ( + ˇ + ( + ˇ + const_function(), + ˇ + ˇ + something_else, + ˇ + ˇ + ˇ + ˇ + ) + ˇ + ); + ˇ + ˇ + "}); +} + +#[gpui::test] +fn test_insert_with_old_selections(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple("a( X ), b( Y ), c( Z )", cx); + let (_, editor) = cx.add_window(Default::default(), |cx| { + let mut editor = build_editor(buffer.clone(), cx); + editor.change_selections(None, cx, |s| s.select_ranges([3..4, 11..12, 19..20])); + editor + }); + + // Edit the buffer directly, deleting ranges surrounding the editor's selections + buffer.update(cx, |buffer, cx| { + buffer.edit([(2..5, ""), (10..13, ""), (18..21, "")], None, cx); + assert_eq!(buffer.read(cx).text(), "a(), b(), c()".unindent()); + }); + + editor.update(cx, |editor, cx| { + assert_eq!(editor.selections.ranges(cx), &[2..2, 7..7, 12..12],); + + editor.insert("Z", cx); + assert_eq!(editor.text(cx), "a(Z), b(Z), c(Z)"); + + // The selections are moved after the inserted characters + assert_eq!(editor.selections.ranges(cx), &[3..3, 9..9, 15..15],); + }); +} + +#[gpui::test] +async fn test_tab(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx); + cx.update(|cx| { + cx.update_global::(|settings, _| { + settings.editor_overrides.tab_size = Some(NonZeroU32::new(3).unwrap()); + }); + }); + cx.set_state(indoc! {" + ˇabˇc + ˇ🏀ˇ🏀ˇefg + dˇ + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + ˇab ˇc + ˇ🏀 ˇ🏀 ˇefg + d ˇ + "}); + + cx.set_state(indoc! {" + a + «🏀ˇ»🏀«🏀ˇ»🏀«🏀ˇ» + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + a + «🏀ˇ»🏀«🏀ˇ»🏀«🏀ˇ» + "}); +} + +#[gpui::test] +async fn test_tab_on_blank_line_auto_indents(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx); + let language = Arc::new( + Language::new( + LanguageConfig::default(), + Some(tree_sitter_rust::language()), + ) + .with_indents_query(r#"(_ "(" ")" @end) @indent"#) + .unwrap(), + ); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + // cursors that are already at the suggested indent level insert + // a soft tab. cursors that are to the left of the suggested indent + // auto-indent their line. + cx.set_state(indoc! {" + ˇ + const a: B = ( + c( + d( + ˇ + ) + ˇ + ˇ ) + ); + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + ˇ + const a: B = ( + c( + d( + ˇ + ) + ˇ + ˇ) + ); + "}); + + // handle auto-indent when there are multiple cursors on the same line + cx.set_state(indoc! {" + const a: B = ( + c( + ˇ ˇ + ˇ ) + ); + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + const a: B = ( + c( + ˇ + ˇ) + ); + "}); +} + +#[gpui::test] +async fn test_indent_outdent(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx); + + cx.set_state(indoc! {" + «oneˇ» «twoˇ» + three + four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + «oneˇ» «twoˇ» + three + four + "}); + + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + «oneˇ» «twoˇ» + three + four + "}); + + // select across line ending + cx.set_state(indoc! {" + one two + t«hree + ˇ» four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + one two + t«hree + ˇ» four + "}); + + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + one two + t«hree + ˇ» four + "}); + + // Ensure that indenting/outdenting works when the cursor is at column 0. + cx.set_state(indoc! {" + one two + ˇthree + four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + one two + ˇthree + four + "}); + + cx.set_state(indoc! {" + one two + ˇ three + four + "}); + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + one two + ˇthree + four + "}); +} + +#[gpui::test] +async fn test_indent_outdent_with_hard_tabs(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx); + cx.update(|cx| { + cx.update_global::(|settings, _| { + settings.editor_overrides.hard_tabs = Some(true); + }); + }); + + // select two ranges on one line + cx.set_state(indoc! {" + «oneˇ» «twoˇ» + three + four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + \t«oneˇ» «twoˇ» + three + four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + \t\t«oneˇ» «twoˇ» + three + four + "}); + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + \t«oneˇ» «twoˇ» + three + four + "}); + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + «oneˇ» «twoˇ» + three + four + "}); + + // select across a line ending + cx.set_state(indoc! {" + one two + t«hree + ˇ»four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + one two + \tt«hree + ˇ»four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + one two + \t\tt«hree + ˇ»four + "}); + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + one two + \tt«hree + ˇ»four + "}); + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + one two + t«hree + ˇ»four + "}); + + // Ensure that indenting/outdenting works when the cursor is at column 0. + cx.set_state(indoc! {" + one two + ˇthree + four + "}); + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + one two + ˇthree + four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + one two + \tˇthree + four + "}); + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + one two + ˇthree + four + "}); +} + +#[gpui::test] +fn test_indent_outdent_with_excerpts(cx: &mut gpui::MutableAppContext) { + cx.set_global( + Settings::test(cx) + .with_language_defaults( + "TOML", + EditorSettings { + tab_size: Some(2.try_into().unwrap()), + ..Default::default() + }, + ) + .with_language_defaults( + "Rust", + EditorSettings { + tab_size: Some(4.try_into().unwrap()), + ..Default::default() + }, + ), + ); + let toml_language = Arc::new(Language::new( + LanguageConfig { + name: "TOML".into(), + ..Default::default() + }, + None, + )); + let rust_language = Arc::new(Language::new( + LanguageConfig { + name: "Rust".into(), + ..Default::default() + }, + None, + )); + + let toml_buffer = + cx.add_model(|cx| Buffer::new(0, "a = 1\nb = 2\n", cx).with_language(toml_language, cx)); + let rust_buffer = cx.add_model(|cx| { + Buffer::new(0, "const c: usize = 3;\n", cx).with_language(rust_language, cx) + }); + let multibuffer = cx.add_model(|cx| { + let mut multibuffer = MultiBuffer::new(0); + multibuffer.push_excerpts( + toml_buffer.clone(), + [ExcerptRange { + context: Point::new(0, 0)..Point::new(2, 0), + primary: None, + }], + cx, + ); + multibuffer.push_excerpts( + rust_buffer.clone(), + [ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 0), + primary: None, + }], + cx, + ); + multibuffer + }); + + cx.add_window(Default::default(), |cx| { + let mut editor = build_editor(multibuffer, cx); + + assert_eq!( + editor.text(cx), + indoc! {" + a = 1 + b = 2 + + const c: usize = 3; + "} + ); + + select_ranges( + &mut editor, + indoc! {" + «aˇ» = 1 + b = 2 + + «const c:ˇ» usize = 3; + "}, + cx, + ); + + editor.tab(&Tab, cx); + assert_text_with_selections( + &mut editor, + indoc! {" + «aˇ» = 1 + b = 2 + + «const c:ˇ» usize = 3; + "}, + cx, + ); + editor.tab_prev(&TabPrev, cx); + assert_text_with_selections( + &mut editor, + indoc! {" + «aˇ» = 1 + b = 2 + + «const c:ˇ» usize = 3; + "}, + cx, + ); + + editor + }); +} + +#[gpui::test] +async fn test_backspace(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx); + + // Basic backspace + cx.set_state(indoc! {" + onˇe two three + fou«rˇ» five six + seven «ˇeight nine + »ten + "}); + cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); + cx.assert_editor_state(indoc! {" + oˇe two three + fouˇ five six + seven ˇten + "}); + + // Test backspace inside and around indents + cx.set_state(indoc! {" + zero + ˇone + ˇtwo + ˇ ˇ ˇ three + ˇ ˇ four + "}); + cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); + cx.assert_editor_state(indoc! {" + zero + ˇone + ˇtwo + ˇ threeˇ four + "}); + + // Test backspace with line_mode set to true + cx.update_editor(|e, _| e.selections.line_mode = true); + cx.set_state(indoc! {" + The ˇquick ˇbrown + fox jumps over + the lazy dog + ˇThe qu«ick bˇ»rown"}); + cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); + cx.assert_editor_state(indoc! {" + ˇfox jumps over + the lazy dogˇ"}); +} + +#[gpui::test] +async fn test_delete(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx); + + cx.set_state(indoc! {" + onˇe two three + fou«rˇ» five six + seven «ˇeight nine + »ten + "}); + cx.update_editor(|e, cx| e.delete(&Delete, cx)); + cx.assert_editor_state(indoc! {" + onˇ two three + fouˇ five six + seven ˇten + "}); + + // Test backspace with line_mode set to true + cx.update_editor(|e, _| e.selections.line_mode = true); + cx.set_state(indoc! {" + The ˇquick ˇbrown + fox «ˇjum»ps over + the lazy dog + ˇThe qu«ick bˇ»rown"}); + cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); + cx.assert_editor_state("ˇthe lazy dogˇ"); +} + +#[gpui::test] +fn test_delete_line(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1), + DisplayPoint::new(1, 0)..DisplayPoint::new(1, 1), + DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0), + ]) + }); + view.delete_line(&DeleteLine, cx); + assert_eq!(view.display_text(cx), "ghi"); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), + DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1) + ] + ); + }); + + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([DisplayPoint::new(2, 0)..DisplayPoint::new(0, 1)]) + }); + view.delete_line(&DeleteLine, cx); + assert_eq!(view.display_text(cx), "ghi\n"); + assert_eq!( + view.selections.display_ranges(cx), + vec![DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1)] + ); + }); +} + +#[gpui::test] +fn test_duplicate_line(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1), + DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), + DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), + DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0), + ]) + }); + view.duplicate_line(&DuplicateLine, cx); + assert_eq!(view.display_text(cx), "abc\nabc\ndef\ndef\nghi\n\n"); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(1, 0)..DisplayPoint::new(1, 1), + DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2), + DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0), + DisplayPoint::new(6, 0)..DisplayPoint::new(6, 0), + ] + ); + }); + + let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(0, 1)..DisplayPoint::new(1, 1), + DisplayPoint::new(1, 2)..DisplayPoint::new(2, 1), + ]) + }); + view.duplicate_line(&DuplicateLine, cx); + assert_eq!(view.display_text(cx), "abc\ndef\nghi\nabc\ndef\nghi\n"); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(3, 1)..DisplayPoint::new(4, 1), + DisplayPoint::new(4, 2)..DisplayPoint::new(5, 1), + ] + ); + }); +} + +#[gpui::test] +fn test_move_line_up_down(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple(&sample_text(10, 5, 'a'), cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + view.update(cx, |view, cx| { + view.fold_ranges( + vec![ + Point::new(0, 2)..Point::new(1, 2), + Point::new(2, 3)..Point::new(4, 1), + Point::new(7, 0)..Point::new(8, 4), + ], + cx, + ); + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1), + DisplayPoint::new(3, 1)..DisplayPoint::new(3, 1), + DisplayPoint::new(3, 2)..DisplayPoint::new(4, 3), + DisplayPoint::new(5, 0)..DisplayPoint::new(5, 2), + ]) + }); + assert_eq!( + view.display_text(cx), + "aa…bbb\nccc…eeee\nfffff\nggggg\n…i\njjjjj" + ); + + view.move_line_up(&MoveLineUp, cx); + assert_eq!( + view.display_text(cx), + "aa…bbb\nccc…eeee\nggggg\n…i\njjjjj\nfffff" + ); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1), + DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1), + DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3), + DisplayPoint::new(4, 0)..DisplayPoint::new(4, 2) + ] + ); + }); + + view.update(cx, |view, cx| { + view.move_line_down(&MoveLineDown, cx); + assert_eq!( + view.display_text(cx), + "ccc…eeee\naa…bbb\nfffff\nggggg\n…i\njjjjj" + ); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1), + DisplayPoint::new(3, 1)..DisplayPoint::new(3, 1), + DisplayPoint::new(3, 2)..DisplayPoint::new(4, 3), + DisplayPoint::new(5, 0)..DisplayPoint::new(5, 2) + ] + ); + }); + + view.update(cx, |view, cx| { + view.move_line_down(&MoveLineDown, cx); + assert_eq!( + view.display_text(cx), + "ccc…eeee\nfffff\naa…bbb\nggggg\n…i\njjjjj" + ); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1), + DisplayPoint::new(3, 1)..DisplayPoint::new(3, 1), + DisplayPoint::new(3, 2)..DisplayPoint::new(4, 3), + DisplayPoint::new(5, 0)..DisplayPoint::new(5, 2) + ] + ); + }); + + view.update(cx, |view, cx| { + view.move_line_up(&MoveLineUp, cx); + assert_eq!( + view.display_text(cx), + "ccc…eeee\naa…bbb\nggggg\n…i\njjjjj\nfffff" + ); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1), + DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1), + DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3), + DisplayPoint::new(4, 0)..DisplayPoint::new(4, 2) + ] + ); + }); +} + +#[gpui::test] +fn test_move_line_up_down_with_blocks(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple(&sample_text(10, 5, 'a'), cx); + let snapshot = buffer.read(cx).snapshot(cx); + let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + editor.update(cx, |editor, cx| { + editor.insert_blocks( + [BlockProperties { + style: BlockStyle::Fixed, + position: snapshot.anchor_after(Point::new(2, 0)), + disposition: BlockDisposition::Below, + height: 1, + render: Arc::new(|_| Empty::new().boxed()), + }], + cx, + ); + editor.change_selections(None, cx, |s| { + s.select_ranges([Point::new(2, 0)..Point::new(2, 0)]) + }); + editor.move_line_down(&MoveLineDown, cx); + }); +} + +#[gpui::test] +fn test_transpose(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + + _ = cx + .add_window(Default::default(), |cx| { + let mut editor = build_editor(MultiBuffer::build_simple("abc", cx), cx); + + editor.change_selections(None, cx, |s| s.select_ranges([1..1])); + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bac"); + assert_eq!(editor.selections.ranges(cx), [2..2]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bca"); + assert_eq!(editor.selections.ranges(cx), [3..3]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bac"); + assert_eq!(editor.selections.ranges(cx), [3..3]); + + editor + }) + .1; + + _ = cx + .add_window(Default::default(), |cx| { + let mut editor = build_editor(MultiBuffer::build_simple("abc\nde", cx), cx); + + editor.change_selections(None, cx, |s| s.select_ranges([3..3])); + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "acb\nde"); + assert_eq!(editor.selections.ranges(cx), [3..3]); + + editor.change_selections(None, cx, |s| s.select_ranges([4..4])); + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "acbd\ne"); + assert_eq!(editor.selections.ranges(cx), [5..5]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "acbde\n"); + assert_eq!(editor.selections.ranges(cx), [6..6]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "acbd\ne"); + assert_eq!(editor.selections.ranges(cx), [6..6]); + + editor + }) + .1; + + _ = cx + .add_window(Default::default(), |cx| { + let mut editor = build_editor(MultiBuffer::build_simple("abc\nde", cx), cx); + + editor.change_selections(None, cx, |s| s.select_ranges([1..1, 2..2, 4..4])); + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bacd\ne"); + assert_eq!(editor.selections.ranges(cx), [2..2, 3..3, 5..5]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bcade\n"); + assert_eq!(editor.selections.ranges(cx), [3..3, 4..4, 6..6]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bcda\ne"); + assert_eq!(editor.selections.ranges(cx), [4..4, 6..6]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bcade\n"); + assert_eq!(editor.selections.ranges(cx), [4..4, 6..6]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bcaed\n"); + assert_eq!(editor.selections.ranges(cx), [5..5, 6..6]); + + editor + }) + .1; + + _ = cx + .add_window(Default::default(), |cx| { + let mut editor = build_editor(MultiBuffer::build_simple("🍐🏀✋", cx), cx); + + editor.change_selections(None, cx, |s| s.select_ranges([4..4])); + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "🏀🍐✋"); + assert_eq!(editor.selections.ranges(cx), [8..8]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "🏀✋🍐"); + assert_eq!(editor.selections.ranges(cx), [11..11]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "🏀🍐✋"); + assert_eq!(editor.selections.ranges(cx), [11..11]); + + editor + }) + .1; +} + +#[gpui::test] +async fn test_clipboard(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx); + + cx.set_state("«one✅ ˇ»two «three ˇ»four «five ˇ»six "); + cx.update_editor(|e, cx| e.cut(&Cut, cx)); + cx.assert_editor_state("ˇtwo ˇfour ˇsix "); + + // Paste with three cursors. Each cursor pastes one slice of the clipboard text. + cx.set_state("two ˇfour ˇsix ˇ"); + cx.update_editor(|e, cx| e.paste(&Paste, cx)); + cx.assert_editor_state("two one✅ ˇfour three ˇsix five ˇ"); + + // Paste again but with only two cursors. Since the number of cursors doesn't + // match the number of slices in the clipboard, the entire clipboard text + // is pasted at each cursor. + cx.set_state("ˇtwo one✅ four three six five ˇ"); + cx.update_editor(|e, cx| { + e.handle_input("( ", cx); + e.paste(&Paste, cx); + e.handle_input(") ", cx); + }); + cx.assert_editor_state(indoc! {" + ( one✅ + three + five ) ˇtwo one✅ four three six five ( one✅ + three + five ) ˇ"}); + + // Cut with three selections, one of which is full-line. + cx.set_state(indoc! {" + 1«2ˇ»3 + 4ˇ567 + «8ˇ»9"}); + cx.update_editor(|e, cx| e.cut(&Cut, cx)); + cx.assert_editor_state(indoc! {" + 1ˇ3 + ˇ9"}); + + // Paste with three selections, noticing how the copied selection that was full-line + // gets inserted before the second cursor. + cx.set_state(indoc! {" + 1ˇ3 + 9ˇ + «oˇ»ne"}); + cx.update_editor(|e, cx| e.paste(&Paste, cx)); + cx.assert_editor_state(indoc! {" + 12ˇ3 + 4567 + 9ˇ + 8ˇne"}); + + // Copy with a single cursor only, which writes the whole line into the clipboard. + cx.set_state(indoc! {" + The quick brown + fox juˇmps over + the lazy dog"}); + cx.update_editor(|e, cx| e.copy(&Copy, cx)); + cx.cx.assert_clipboard_content(Some("fox jumps over\n")); + + // Paste with three selections, noticing how the copied full-line selection is inserted + // before the empty selections but replaces the selection that is non-empty. + cx.set_state(indoc! {" + Tˇhe quick brown + «foˇ»x jumps over + tˇhe lazy dog"}); + cx.update_editor(|e, cx| e.paste(&Paste, cx)); + cx.assert_editor_state(indoc! {" + fox jumps over + Tˇhe quick brown + fox jumps over + ˇx jumps over + fox jumps over + tˇhe lazy dog"}); +} + +#[gpui::test] +async fn test_paste_multiline(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx); + let language = Arc::new(Language::new( + LanguageConfig::default(), + Some(tree_sitter_rust::language()), + )); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + // Cut an indented block, without the leading whitespace. + cx.set_state(indoc! {" + const a: B = ( + c(), + «d( + e, + f + )ˇ» + ); + "}); + cx.update_editor(|e, cx| e.cut(&Cut, cx)); + cx.assert_editor_state(indoc! {" + const a: B = ( + c(), + ˇ + ); + "}); + + // Paste it at the same position. + cx.update_editor(|e, cx| e.paste(&Paste, cx)); + cx.assert_editor_state(indoc! {" + const a: B = ( + c(), + d( + e, + f + )ˇ + ); + "}); + + // Paste it at a line with a lower indent level. + cx.set_state(indoc! {" + ˇ + const a: B = ( + c(), + ); + "}); + cx.update_editor(|e, cx| e.paste(&Paste, cx)); + cx.assert_editor_state(indoc! {" + d( + e, + f + )ˇ + const a: B = ( + c(), + ); + "}); + + // Cut an indented block, with the leading whitespace. + cx.set_state(indoc! {" + const a: B = ( + c(), + « d( + e, + f + ) + ˇ»); + "}); + cx.update_editor(|e, cx| e.cut(&Cut, cx)); + cx.assert_editor_state(indoc! {" + const a: B = ( + c(), + ˇ); + "}); + + // Paste it at the same position. + cx.update_editor(|e, cx| e.paste(&Paste, cx)); + cx.assert_editor_state(indoc! {" + const a: B = ( + c(), + d( + e, + f + ) + ˇ); + "}); + + // Paste it at a line with a higher indent level. + cx.set_state(indoc! {" + const a: B = ( + c(), + d( + e, + fˇ + ) + ); + "}); + cx.update_editor(|e, cx| e.paste(&Paste, cx)); + cx.assert_editor_state(indoc! {" + const a: B = ( + c(), + d( + e, + f d( + e, + f + ) + ˇ + ) + ); + "}); +} + +#[gpui::test] +fn test_select_all(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple("abc\nde\nfgh", cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + view.update(cx, |view, cx| { + view.select_all(&SelectAll, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(0, 0)..DisplayPoint::new(2, 3)] + ); + }); +} + +#[gpui::test] +fn test_select_line(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple(&sample_text(6, 5, 'a'), cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1), + DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), + DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), + DisplayPoint::new(4, 2)..DisplayPoint::new(4, 2), + ]) + }); + view.select_line(&SelectLine, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(0, 0)..DisplayPoint::new(2, 0), + DisplayPoint::new(4, 0)..DisplayPoint::new(5, 0), + ] + ); + }); + + view.update(cx, |view, cx| { + view.select_line(&SelectLine, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(0, 0)..DisplayPoint::new(3, 0), + DisplayPoint::new(4, 0)..DisplayPoint::new(5, 5), + ] + ); + }); + + view.update(cx, |view, cx| { + view.select_line(&SelectLine, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![DisplayPoint::new(0, 0)..DisplayPoint::new(5, 5)] + ); + }); +} + +#[gpui::test] +fn test_split_selection_into_lines(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple(&sample_text(9, 5, 'a'), cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + view.update(cx, |view, cx| { + view.fold_ranges( + vec![ + Point::new(0, 2)..Point::new(1, 2), + Point::new(2, 3)..Point::new(4, 1), + Point::new(7, 0)..Point::new(8, 4), + ], + cx, + ); + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1), + DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), + DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), + DisplayPoint::new(4, 4)..DisplayPoint::new(4, 4), + ]) + }); + assert_eq!(view.display_text(cx), "aa…bbb\nccc…eeee\nfffff\nggggg\n…i"); + }); + + view.update(cx, |view, cx| { + view.split_selection_into_lines(&SplitSelectionIntoLines, cx); + assert_eq!( + view.display_text(cx), + "aaaaa\nbbbbb\nccc…eeee\nfffff\nggggg\n…i" + ); + assert_eq!( + view.selections.display_ranges(cx), + [ + DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1), + DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), + DisplayPoint::new(2, 0)..DisplayPoint::new(2, 0), + DisplayPoint::new(5, 4)..DisplayPoint::new(5, 4) + ] + ); + }); + + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([DisplayPoint::new(5, 0)..DisplayPoint::new(0, 1)]) + }); + view.split_selection_into_lines(&SplitSelectionIntoLines, cx); + assert_eq!( + view.display_text(cx), + "aaaaa\nbbbbb\nccccc\nddddd\neeeee\nfffff\nggggg\nhhhhh\niiiii" + ); + assert_eq!( + view.selections.display_ranges(cx), + [ + DisplayPoint::new(0, 5)..DisplayPoint::new(0, 5), + DisplayPoint::new(1, 5)..DisplayPoint::new(1, 5), + DisplayPoint::new(2, 5)..DisplayPoint::new(2, 5), + DisplayPoint::new(3, 5)..DisplayPoint::new(3, 5), + DisplayPoint::new(4, 5)..DisplayPoint::new(4, 5), + DisplayPoint::new(5, 5)..DisplayPoint::new(5, 5), + DisplayPoint::new(6, 5)..DisplayPoint::new(6, 5), + DisplayPoint::new(7, 0)..DisplayPoint::new(7, 0) + ] + ); + }); +} + +#[gpui::test] +fn test_add_selection_above_below(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = MultiBuffer::build_simple("abc\ndefghi\n\njk\nlmno\n", cx); + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx)); + + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3)]) + }); + }); + view.update(cx, |view, cx| { + view.add_selection_above(&AddSelectionAbove, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), + DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3) + ] + ); + }); + + view.update(cx, |view, cx| { + view.add_selection_above(&AddSelectionAbove, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), + DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3) + ] + ); + }); + + view.update(cx, |view, cx| { + view.add_selection_below(&AddSelectionBelow, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3)] + ); + + view.undo_selection(&UndoSelection, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), + DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3) + ] + ); + + view.redo_selection(&RedoSelection, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3)] + ); + }); + + view.update(cx, |view, cx| { + view.add_selection_below(&AddSelectionBelow, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3), + DisplayPoint::new(4, 3)..DisplayPoint::new(4, 3) + ] + ); + }); + + view.update(cx, |view, cx| { + view.add_selection_below(&AddSelectionBelow, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3), + DisplayPoint::new(4, 3)..DisplayPoint::new(4, 3) + ] + ); + }); + + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3)]) + }); + }); + view.update(cx, |view, cx| { + view.add_selection_below(&AddSelectionBelow, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3), + DisplayPoint::new(4, 4)..DisplayPoint::new(4, 3) + ] + ); + }); + + view.update(cx, |view, cx| { + view.add_selection_below(&AddSelectionBelow, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3), + DisplayPoint::new(4, 4)..DisplayPoint::new(4, 3) + ] + ); + }); + + view.update(cx, |view, cx| { + view.add_selection_above(&AddSelectionAbove, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3)] + ); + }); + + view.update(cx, |view, cx| { + view.add_selection_above(&AddSelectionAbove, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3)] + ); + }); + + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([DisplayPoint::new(0, 1)..DisplayPoint::new(1, 4)]) + }); + view.add_selection_below(&AddSelectionBelow, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3), + DisplayPoint::new(1, 1)..DisplayPoint::new(1, 4), + DisplayPoint::new(3, 1)..DisplayPoint::new(3, 2), + ] + ); + }); + + view.update(cx, |view, cx| { + view.add_selection_below(&AddSelectionBelow, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3), + DisplayPoint::new(1, 1)..DisplayPoint::new(1, 4), + DisplayPoint::new(3, 1)..DisplayPoint::new(3, 2), + DisplayPoint::new(4, 1)..DisplayPoint::new(4, 4), + ] + ); + }); + + view.update(cx, |view, cx| { + view.add_selection_above(&AddSelectionAbove, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3), + DisplayPoint::new(1, 1)..DisplayPoint::new(1, 4), + DisplayPoint::new(3, 1)..DisplayPoint::new(3, 2), + ] + ); + }); + + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([DisplayPoint::new(4, 3)..DisplayPoint::new(1, 1)]) + }); + }); + view.update(cx, |view, cx| { + view.add_selection_above(&AddSelectionAbove, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(0, 3)..DisplayPoint::new(0, 1), + DisplayPoint::new(1, 3)..DisplayPoint::new(1, 1), + DisplayPoint::new(3, 2)..DisplayPoint::new(3, 1), + DisplayPoint::new(4, 3)..DisplayPoint::new(4, 1), + ] + ); + }); + + view.update(cx, |view, cx| { + view.add_selection_below(&AddSelectionBelow, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(1, 3)..DisplayPoint::new(1, 1), + DisplayPoint::new(3, 2)..DisplayPoint::new(3, 1), + DisplayPoint::new(4, 3)..DisplayPoint::new(4, 1), + ] + ); + }); +} + +#[gpui::test] +async fn test_select_next(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx); + cx.set_state("abc\nˇabc abc\ndefabc\nabc"); + + cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx)); + cx.assert_editor_state("abc\n«abcˇ» abc\ndefabc\nabc"); + + cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx)); + cx.assert_editor_state("abc\n«abcˇ» «abcˇ»\ndefabc\nabc"); + + cx.update_editor(|view, cx| view.undo_selection(&UndoSelection, cx)); + cx.assert_editor_state("abc\n«abcˇ» abc\ndefabc\nabc"); + + cx.update_editor(|view, cx| view.redo_selection(&RedoSelection, cx)); + cx.assert_editor_state("abc\n«abcˇ» «abcˇ»\ndefabc\nabc"); + + cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx)); + cx.assert_editor_state("abc\n«abcˇ» «abcˇ»\ndefabc\n«abcˇ»"); + + cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx)); + cx.assert_editor_state("«abcˇ»\n«abcˇ» «abcˇ»\ndefabc\n«abcˇ»"); +} + +#[gpui::test] +async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) { + cx.update(|cx| cx.set_global(Settings::test(cx))); + let language = Arc::new(Language::new( + LanguageConfig::default(), + Some(tree_sitter_rust::language()), + )); + + let text = r#" + use mod1::mod2::{mod3, mod4}; + + fn fn_1(param1: bool, param2: &str) { + let var1 = "text"; + } + "# + .unindent(); + + let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (_, view) = cx.add_window(|cx| build_editor(buffer, cx)); + view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) + .await; + + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25), + DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12), + DisplayPoint::new(3, 18)..DisplayPoint::new(3, 18), + ]); + }); + view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| { view.selections.display_ranges(cx) }), + &[ + DisplayPoint::new(0, 23)..DisplayPoint::new(0, 27), + DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7), + DisplayPoint::new(3, 15)..DisplayPoint::new(3, 21), + ] + ); + + view.update(cx, |view, cx| { + view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[ + DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28), + DisplayPoint::new(4, 1)..DisplayPoint::new(2, 0), + ] + ); + + view.update(cx, |view, cx| { + view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[DisplayPoint::new(5, 0)..DisplayPoint::new(0, 0)] + ); + + // Trying to expand the selected syntax node one more time has no effect. + view.update(cx, |view, cx| { + view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[DisplayPoint::new(5, 0)..DisplayPoint::new(0, 0)] + ); + + view.update(cx, |view, cx| { + view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[ + DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28), + DisplayPoint::new(4, 1)..DisplayPoint::new(2, 0), + ] + ); + + view.update(cx, |view, cx| { + view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[ + DisplayPoint::new(0, 23)..DisplayPoint::new(0, 27), + DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7), + DisplayPoint::new(3, 15)..DisplayPoint::new(3, 21), + ] + ); + + view.update(cx, |view, cx| { + view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[ + DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25), + DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12), + DisplayPoint::new(3, 18)..DisplayPoint::new(3, 18), + ] + ); + + // Trying to shrink the selected syntax node one more time has no effect. + view.update(cx, |view, cx| { + view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[ + DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25), + DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12), + DisplayPoint::new(3, 18)..DisplayPoint::new(3, 18), + ] + ); + + // Ensure that we keep expanding the selection if the larger selection starts or ends within + // a fold. + view.update(cx, |view, cx| { + view.fold_ranges( + vec![ + Point::new(0, 21)..Point::new(0, 24), + Point::new(3, 20)..Point::new(3, 22), + ], + cx, + ); + view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[ + DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28), + DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7), + DisplayPoint::new(3, 4)..DisplayPoint::new(3, 23), + ] + ); +} + +#[gpui::test] +async fn test_autoindent_selections(cx: &mut gpui::TestAppContext) { + cx.update(|cx| cx.set_global(Settings::test(cx))); + let language = Arc::new( + Language::new( + LanguageConfig { + brackets: vec![ + BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: false, + newline: true, + }, + BracketPair { + start: "(".to_string(), + end: ")".to_string(), + close: false, + newline: true, + }, + ], + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ) + .with_indents_query( + r#" + (_ "(" ")" @end) @indent + (_ "{" "}" @end) @indent + "#, + ) + .unwrap(), + ); + + let text = "fn a() {}"; + + let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx)); + editor + .condition(cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx)) + .await; + + editor.update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([5..5, 8..8, 9..9])); + editor.newline(&Newline, cx); + assert_eq!(editor.text(cx), "fn a(\n \n) {\n \n}\n"); + assert_eq!( + editor.selections.ranges(cx), + &[ + Point::new(1, 4)..Point::new(1, 4), + Point::new(3, 4)..Point::new(3, 4), + Point::new(5, 0)..Point::new(5, 0) + ] + ); + }); +} + +#[gpui::test] +async fn test_autoclose_pairs(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx); + + let language = Arc::new(Language::new( + LanguageConfig { + brackets: vec![ + BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: true, + newline: true, + }, + BracketPair { + start: "/*".to_string(), + end: " */".to_string(), + close: true, + newline: true, + }, + BracketPair { + start: "[".to_string(), + end: "]".to_string(), + close: false, + newline: true, + }, + ], + autoclose_before: "})]".to_string(), + ..Default::default() + }, + Some(tree_sitter_rust::language()), + )); + + let registry = Arc::new(LanguageRegistry::test()); + registry.add(language.clone()); + cx.update_buffer(|buffer, cx| { + buffer.set_language_registry(registry); + buffer.set_language(Some(language), cx); + }); + + cx.set_state( + &r#" + 🏀ˇ + εˇ + ❤️ˇ + "# + .unindent(), + ); + + // autoclose multiple nested brackets at multiple cursors + cx.update_editor(|view, cx| { + view.handle_input("{", cx); + view.handle_input("{", cx); + view.handle_input("{", cx); + }); + cx.assert_editor_state( + &" + 🏀{{{ˇ}}} + ε{{{ˇ}}} + ❤️{{{ˇ}}} + " + .unindent(), + ); + + // skip over the auto-closed brackets when typing a closing bracket + cx.update_editor(|view, cx| { + view.move_right(&MoveRight, cx); + view.handle_input("}", cx); + view.handle_input("}", cx); + view.handle_input("}", cx); + }); + cx.assert_editor_state( + &" + 🏀{{{}}}}ˇ + ε{{{}}}}ˇ + ❤️{{{}}}}ˇ + " + .unindent(), + ); + + // autoclose multi-character pairs + cx.set_state( + &" + ˇ + ˇ + " + .unindent(), + ); + cx.update_editor(|view, cx| { + view.handle_input("/", cx); + view.handle_input("*", cx); + }); + cx.assert_editor_state( + &" + /*ˇ */ + /*ˇ */ + " + .unindent(), + ); + + // one cursor autocloses a multi-character pair, one cursor + // does not autoclose. + cx.set_state( + &" + /ˇ + ˇ + " + .unindent(), + ); + cx.update_editor(|view, cx| view.handle_input("*", cx)); + cx.assert_editor_state( + &" + /*ˇ */ + *ˇ + " + .unindent(), + ); + + // Don't autoclose if the next character isn't whitespace and isn't + // listed in the language's "autoclose_before" section. + cx.set_state("ˇa b"); + cx.update_editor(|view, cx| view.handle_input("{", cx)); + cx.assert_editor_state("{ˇa b"); + + // Surround with brackets if text is selected + cx.set_state("«aˇ» b"); + cx.update_editor(|view, cx| view.handle_input("{", cx)); + cx.assert_editor_state("{«aˇ»} b"); +} + +#[gpui::test] +async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx); + + let html_language = Arc::new( + Language::new( + LanguageConfig { + name: "HTML".into(), + brackets: vec![ + BracketPair { + start: "<".into(), + end: ">".into(), + ..Default::default() + }, + BracketPair { + start: "{".into(), + end: "}".into(), + ..Default::default() + }, + BracketPair { + start: "(".into(), + end: ")".into(), + ..Default::default() + }, + ], + autoclose_before: "})]>".into(), + ..Default::default() + }, + Some(tree_sitter_html::language()), + ) + .with_injection_query( + r#" + (script_element + (raw_text) @content + (#set! "language" "javascript")) + "#, + ) + .unwrap(), + ); + + let javascript_language = Arc::new(Language::new( + LanguageConfig { + name: "JavaScript".into(), + brackets: vec![ + BracketPair { + start: "/*".into(), + end: " */".into(), + ..Default::default() + }, + BracketPair { + start: "{".into(), + end: "}".into(), + ..Default::default() + }, + BracketPair { + start: "(".into(), + end: ")".into(), + ..Default::default() + }, + ], + autoclose_before: "})]>".into(), + ..Default::default() + }, + Some(tree_sitter_javascript::language()), + )); + + let registry = Arc::new(LanguageRegistry::test()); + registry.add(html_language.clone()); + registry.add(javascript_language.clone()); + + cx.update_buffer(|buffer, cx| { + buffer.set_language_registry(registry); + buffer.set_language(Some(html_language), cx); + }); + + cx.set_state( + &r#" + ˇ + + ˇ + "# + .unindent(), + ); + + // Precondition: different languages are active at different locations. + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let cursors = editor.selections.ranges::(cx); + let languages = cursors + .iter() + .map(|c| snapshot.language_at(c.start).unwrap().name()) + .collect::>(); + assert_eq!( + languages, + &["HTML".into(), "JavaScript".into(), "HTML".into()] + ); + }); + + // Angle brackets autoclose in HTML, but not JavaScript. + cx.update_editor(|editor, cx| { + editor.handle_input("<", cx); + editor.handle_input("a", cx); + }); + cx.assert_editor_state( + &r#" + + + + "# + .unindent(), + ); + + // Curly braces and parens autoclose in both HTML and JavaScript. + cx.update_editor(|editor, cx| { + editor.handle_input(" b=", cx); + editor.handle_input("{", cx); + editor.handle_input("c", cx); + editor.handle_input("(", cx); + }); + cx.assert_editor_state( + &r#" +
+ + + "# + .unindent(), + ); + + // Brackets that were already autoclosed are skipped. + cx.update_editor(|editor, cx| { + editor.handle_input(")", cx); + editor.handle_input("d", cx); + editor.handle_input("}", cx); + }); + cx.assert_editor_state( + &r#" + + + + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + editor.handle_input(">", cx); + }); + cx.assert_editor_state( + &r#" + ˇ + + ˇ + "# + .unindent(), + ); + + // Reset + cx.set_state( + &r#" + ˇ + + ˇ + "# + .unindent(), + ); + + cx.update_editor(|editor, cx| { + editor.handle_input("<", cx); + }); + cx.assert_editor_state( + &r#" + <ˇ> + + <ˇ> + "# + .unindent(), + ); + + // When backspacing, the closing angle brackets are removed. + cx.update_editor(|editor, cx| { + editor.backspace(&Backspace, cx); + }); + cx.assert_editor_state( + &r#" + ˇ + + ˇ + "# + .unindent(), + ); + + // Block comments autoclose in JavaScript, but not HTML. + cx.update_editor(|editor, cx| { + editor.handle_input("/", cx); + editor.handle_input("*", cx); + }); + cx.assert_editor_state( + &r#" + /*ˇ + + /*ˇ + "# + .unindent(), + ); +} + +#[gpui::test] +async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) { + cx.update(|cx| cx.set_global(Settings::test(cx))); + let language = Arc::new(Language::new( + LanguageConfig { + brackets: vec![BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: true, + newline: true, + }], + ..Default::default() + }, + Some(tree_sitter_rust::language()), + )); + + let text = r#" + a + b + c + "# + .unindent(); + + let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (_, view) = cx.add_window(|cx| build_editor(buffer, cx)); + view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) + .await; + + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1), + DisplayPoint::new(1, 0)..DisplayPoint::new(1, 1), + DisplayPoint::new(2, 0)..DisplayPoint::new(2, 1), + ]) + }); + + view.handle_input("{", cx); + view.handle_input("{", cx); + view.handle_input("{", cx); + assert_eq!( + view.text(cx), + " + {{{a}}} + {{{b}}} + {{{c}}} + " + .unindent() + ); + assert_eq!( + view.selections.display_ranges(cx), + [ + DisplayPoint::new(0, 3)..DisplayPoint::new(0, 4), + DisplayPoint::new(1, 3)..DisplayPoint::new(1, 4), + DisplayPoint::new(2, 3)..DisplayPoint::new(2, 4) + ] + ); + + view.undo(&Undo, cx); + assert_eq!( + view.text(cx), + " + a + b + c + " + .unindent() + ); + assert_eq!( + view.selections.display_ranges(cx), + [ + DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1), + DisplayPoint::new(1, 0)..DisplayPoint::new(1, 1), + DisplayPoint::new(2, 0)..DisplayPoint::new(2, 1) + ] + ); + }); +} + +#[gpui::test] +async fn test_delete_autoclose_pair(cx: &mut gpui::TestAppContext) { + cx.update(|cx| cx.set_global(Settings::test(cx))); + let language = Arc::new(Language::new( + LanguageConfig { + brackets: vec![BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: true, + newline: true, + }], + autoclose_before: "}".to_string(), + ..Default::default() + }, + Some(tree_sitter_rust::language()), + )); + + let text = r#" + a + b + c + "# + .unindent(); + + let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx)); + editor + .condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) + .await; + + editor.update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| { + s.select_ranges([ + Point::new(0, 1)..Point::new(0, 1), + Point::new(1, 1)..Point::new(1, 1), + Point::new(2, 1)..Point::new(2, 1), + ]) + }); + + editor.handle_input("{", cx); + editor.handle_input("{", cx); + editor.handle_input("_", cx); + assert_eq!( + editor.text(cx), + " + a{{_}} + b{{_}} + c{{_}} + " + .unindent() + ); + assert_eq!( + editor.selections.ranges::(cx), + [ + Point::new(0, 4)..Point::new(0, 4), + Point::new(1, 4)..Point::new(1, 4), + Point::new(2, 4)..Point::new(2, 4) + ] + ); + + editor.backspace(&Default::default(), cx); + editor.backspace(&Default::default(), cx); + assert_eq!( + editor.text(cx), + " + a{} + b{} + c{} + " + .unindent() + ); + assert_eq!( + editor.selections.ranges::(cx), + [ + Point::new(0, 2)..Point::new(0, 2), + Point::new(1, 2)..Point::new(1, 2), + Point::new(2, 2)..Point::new(2, 2) + ] + ); + + editor.delete_to_previous_word_start(&Default::default(), cx); + assert_eq!( + editor.text(cx), + " + a + b + c + " + .unindent() + ); + assert_eq!( + editor.selections.ranges::(cx), + [ + Point::new(0, 1)..Point::new(0, 1), + Point::new(1, 1)..Point::new(1, 1), + Point::new(2, 1)..Point::new(2, 1) + ] + ); + }); +} + +#[gpui::test] +async fn test_snippets(cx: &mut gpui::TestAppContext) { + cx.update(|cx| cx.set_global(Settings::test(cx))); + + let (text, insertion_ranges) = marked_text_ranges( + indoc! {" + a.ˇ b + a.ˇ b + a.ˇ b + "}, + false, + ); + + let buffer = cx.update(|cx| MultiBuffer::build_simple(&text, cx)); + let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx)); + + editor.update(cx, |editor, cx| { + let snippet = Snippet::parse("f(${1:one}, ${2:two}, ${1:three})$0").unwrap(); + + editor + .insert_snippet(&insertion_ranges, snippet, cx) + .unwrap(); + + fn assert(editor: &mut Editor, cx: &mut ViewContext, marked_text: &str) { + let (expected_text, selection_ranges) = marked_text_ranges(marked_text, false); + assert_eq!(editor.text(cx), expected_text); + assert_eq!(editor.selections.ranges::(cx), selection_ranges); + } + + assert( + editor, + cx, + indoc! {" + a.f(«one», two, «three») b + a.f(«one», two, «three») b + a.f(«one», two, «three») b + "}, + ); + + // Can't move earlier than the first tab stop + assert!(!editor.move_to_prev_snippet_tabstop(cx)); + assert( + editor, + cx, + indoc! {" + a.f(«one», two, «three») b + a.f(«one», two, «three») b + a.f(«one», two, «three») b + "}, + ); + + assert!(editor.move_to_next_snippet_tabstop(cx)); + assert( + editor, + cx, + indoc! {" + a.f(one, «two», three) b + a.f(one, «two», three) b + a.f(one, «two», three) b + "}, + ); + + editor.move_to_prev_snippet_tabstop(cx); + assert( + editor, + cx, + indoc! {" + a.f(«one», two, «three») b + a.f(«one», two, «three») b + a.f(«one», two, «three») b + "}, + ); + + assert!(editor.move_to_next_snippet_tabstop(cx)); + assert( + editor, + cx, + indoc! {" + a.f(one, «two», three) b + a.f(one, «two», three) b + a.f(one, «two», three) b + "}, + ); + assert!(editor.move_to_next_snippet_tabstop(cx)); + assert( + editor, + cx, + indoc! {" + a.f(one, two, three)ˇ b + a.f(one, two, three)ˇ b + a.f(one, two, three)ˇ b + "}, + ); + + // As soon as the last tab stop is reached, snippet state is gone + editor.move_to_prev_snippet_tabstop(cx); + assert( + editor, + cx, + indoc! {" + a.f(one, two, three)ˇ b + a.f(one, two, three)ˇ b + a.f(one, two, three)ˇ b + "}, + ); + }); +} + +#[gpui::test] +async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) { + cx.foreground().forbid_parking(); + + let mut language = Language::new( + LanguageConfig { + name: "Rust".into(), + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ); + let mut fake_servers = language + .set_fake_lsp_adapter(Arc::new(FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + document_formatting_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + })) + .await; + + let fs = FakeFs::new(cx.background()); + fs.insert_file("/file.rs", Default::default()).await; + + let project = Project::test(fs, ["/file.rs".as_ref()], cx).await; + project.update(cx, |project, _| project.languages().add(Arc::new(language))); + let buffer = project + .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx)) + .await + .unwrap(); + + cx.foreground().start_waiting(); + let fake_server = fake_servers.next().await.unwrap(); + + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx)); + editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); + assert!(cx.read(|cx| editor.is_dirty(cx))); + + let save = cx.update(|cx| editor.save(project.clone(), cx)); + fake_server + .handle_request::(move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + assert_eq!(params.options.tab_size, 4); + Ok(Some(vec![lsp::TextEdit::new( + lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)), + ", ".to_string(), + )])) + }) + .next() + .await; + cx.foreground().start_waiting(); + save.await.unwrap(); + assert_eq!( + editor.read_with(cx, |editor, cx| editor.text(cx)), + "one, two\nthree\n" + ); + assert!(!cx.read(|cx| editor.is_dirty(cx))); + + editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); + assert!(cx.read(|cx| editor.is_dirty(cx))); + + // Ensure we can still save even if formatting hangs. + fake_server.handle_request::(move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + futures::future::pending::<()>().await; + unreachable!() + }); + let save = cx.update(|cx| editor.save(project.clone(), cx)); + cx.foreground().advance_clock(super::FORMAT_TIMEOUT); + cx.foreground().start_waiting(); + save.await.unwrap(); + assert_eq!( + editor.read_with(cx, |editor, cx| editor.text(cx)), + "one\ntwo\nthree\n" + ); + assert!(!cx.read(|cx| editor.is_dirty(cx))); + + // Set rust language override and assert overriden tabsize is sent to language server + cx.update(|cx| { + cx.update_global::(|settings, _| { + settings.language_overrides.insert( + "Rust".into(), + EditorSettings { + tab_size: Some(8.try_into().unwrap()), + ..Default::default() + }, + ); + }) + }); + + let save = cx.update(|cx| editor.save(project.clone(), cx)); + fake_server + .handle_request::(move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + assert_eq!(params.options.tab_size, 8); + Ok(Some(vec![])) + }) + .next() + .await; + cx.foreground().start_waiting(); + save.await.unwrap(); +} + +#[gpui::test] +async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) { + cx.foreground().forbid_parking(); + + let mut language = Language::new( + LanguageConfig { + name: "Rust".into(), + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ); + let mut fake_servers = language + .set_fake_lsp_adapter(Arc::new(FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + document_range_formatting_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + })) + .await; + + let fs = FakeFs::new(cx.background()); + fs.insert_file("/file.rs", Default::default()).await; + + let project = Project::test(fs, ["/file.rs".as_ref()], cx).await; + project.update(cx, |project, _| project.languages().add(Arc::new(language))); + let buffer = project + .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx)) + .await + .unwrap(); + + cx.foreground().start_waiting(); + let fake_server = fake_servers.next().await.unwrap(); + + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx)); + editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); + assert!(cx.read(|cx| editor.is_dirty(cx))); + + let save = cx.update(|cx| editor.save(project.clone(), cx)); + fake_server + .handle_request::(move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + assert_eq!(params.options.tab_size, 4); + Ok(Some(vec![lsp::TextEdit::new( + lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)), + ", ".to_string(), + )])) + }) + .next() + .await; + cx.foreground().start_waiting(); + save.await.unwrap(); + assert_eq!( + editor.read_with(cx, |editor, cx| editor.text(cx)), + "one, two\nthree\n" + ); + assert!(!cx.read(|cx| editor.is_dirty(cx))); + + editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); + assert!(cx.read(|cx| editor.is_dirty(cx))); + + // Ensure we can still save even if formatting hangs. + fake_server.handle_request::( + move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + futures::future::pending::<()>().await; + unreachable!() + }, + ); + let save = cx.update(|cx| editor.save(project.clone(), cx)); + cx.foreground().advance_clock(super::FORMAT_TIMEOUT); + cx.foreground().start_waiting(); + save.await.unwrap(); + assert_eq!( + editor.read_with(cx, |editor, cx| editor.text(cx)), + "one\ntwo\nthree\n" + ); + assert!(!cx.read(|cx| editor.is_dirty(cx))); + + // Set rust language override and assert overriden tabsize is sent to language server + cx.update(|cx| { + cx.update_global::(|settings, _| { + settings.language_overrides.insert( + "Rust".into(), + EditorSettings { + tab_size: Some(8.try_into().unwrap()), + ..Default::default() + }, + ); + }) + }); + + let save = cx.update(|cx| editor.save(project.clone(), cx)); + fake_server + .handle_request::(move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + assert_eq!(params.options.tab_size, 8); + Ok(Some(vec![])) + }) + .next() + .await; + cx.foreground().start_waiting(); + save.await.unwrap(); +} + +#[gpui::test] +async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) { + cx.foreground().forbid_parking(); + + let mut language = Language::new( + LanguageConfig { + name: "Rust".into(), + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ); + let mut fake_servers = language + .set_fake_lsp_adapter(Arc::new(FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + document_formatting_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + })) + .await; + + let fs = FakeFs::new(cx.background()); + fs.insert_file("/file.rs", Default::default()).await; + + let project = Project::test(fs, ["/file.rs".as_ref()], cx).await; + project.update(cx, |project, _| project.languages().add(Arc::new(language))); + let buffer = project + .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx)) + .await + .unwrap(); + + cx.foreground().start_waiting(); + let fake_server = fake_servers.next().await.unwrap(); + + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx)); + editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); + + let format = editor.update(cx, |editor, cx| editor.perform_format(project.clone(), cx)); + fake_server + .handle_request::(move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + assert_eq!(params.options.tab_size, 4); + Ok(Some(vec![lsp::TextEdit::new( + lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)), + ", ".to_string(), + )])) + }) + .next() + .await; + cx.foreground().start_waiting(); + format.await.unwrap(); + assert_eq!( + editor.read_with(cx, |editor, cx| editor.text(cx)), + "one, two\nthree\n" + ); + + editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); + // Ensure we don't lock if formatting hangs. + fake_server.handle_request::(move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + futures::future::pending::<()>().await; + unreachable!() + }); + let format = editor.update(cx, |editor, cx| editor.perform_format(project, cx)); + cx.foreground().advance_clock(super::FORMAT_TIMEOUT); + cx.foreground().start_waiting(); + format.await.unwrap(); + assert_eq!( + editor.read_with(cx, |editor, cx| editor.text(cx)), + "one\ntwo\nthree\n" + ); +} + +#[gpui::test] +async fn test_completion(cx: &mut gpui::TestAppContext) { + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + completion_provider: Some(lsp::CompletionOptions { + trigger_characters: Some(vec![".".to_string(), ":".to_string()]), + ..Default::default() + }), + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {" + oneˇ + two + three + "}); + cx.simulate_keystroke("."); + handle_completion_request( + &mut cx, + indoc! {" + one.|<> + two + three + "}, + vec!["first_completion", "second_completion"], + ) + .await; + cx.condition(|editor, _| editor.context_menu_visible()) + .await; + let apply_additional_edits = cx.update_editor(|editor, cx| { + editor.move_down(&MoveDown, cx); + editor + .confirm_completion(&ConfirmCompletion::default(), cx) + .unwrap() + }); + cx.assert_editor_state(indoc! {" + one.second_completionˇ + two + three + "}); + + handle_resolve_completion_request( + &mut cx, + Some(( + indoc! {" + one.second_completion + two + threeˇ + "}, + "\nadditional edit", + )), + ) + .await; + apply_additional_edits.await.unwrap(); + cx.assert_editor_state(indoc! {" + one.second_completionˇ + two + three + additional edit + "}); + + cx.set_state(indoc! {" + one.second_completion + twoˇ + threeˇ + additional edit + "}); + cx.simulate_keystroke(" "); + assert!(cx.editor(|e, _| e.context_menu.is_none())); + cx.simulate_keystroke("s"); + assert!(cx.editor(|e, _| e.context_menu.is_none())); + + cx.assert_editor_state(indoc! {" + one.second_completion + two sˇ + three sˇ + additional edit + "}); + // + handle_completion_request( + &mut cx, + indoc! {" + one.second_completion + two s + three + additional edit + "}, + vec!["fourth_completion", "fifth_completion", "sixth_completion"], + ) + .await; + cx.condition(|editor, _| editor.context_menu_visible()) + .await; + + cx.simulate_keystroke("i"); + + handle_completion_request( + &mut cx, + indoc! {" + one.second_completion + two si + three + additional edit + "}, + vec!["fourth_completion", "fifth_completion", "sixth_completion"], + ) + .await; + cx.condition(|editor, _| editor.context_menu_visible()) + .await; + + let apply_additional_edits = cx.update_editor(|editor, cx| { + editor + .confirm_completion(&ConfirmCompletion::default(), cx) + .unwrap() + }); + cx.assert_editor_state(indoc! {" + one.second_completion + two sixth_completionˇ + three sixth_completionˇ + additional edit + "}); + + handle_resolve_completion_request(&mut cx, None).await; + apply_additional_edits.await.unwrap(); + + cx.update(|cx| { + cx.update_global::(|settings, _| { + settings.show_completions_on_input = false; + }) + }); + cx.set_state("editorˇ"); + cx.simulate_keystroke("."); + assert!(cx.editor(|e, _| e.context_menu.is_none())); + cx.simulate_keystroke("c"); + cx.simulate_keystroke("l"); + cx.simulate_keystroke("o"); + cx.assert_editor_state("editor.cloˇ"); + assert!(cx.editor(|e, _| e.context_menu.is_none())); + cx.update_editor(|editor, cx| { + editor.show_completions(&ShowCompletions, cx); + }); + handle_completion_request(&mut cx, "editor.", vec!["close", "clobber"]).await; + cx.condition(|editor, _| editor.context_menu_visible()) + .await; + let apply_additional_edits = cx.update_editor(|editor, cx| { + editor + .confirm_completion(&ConfirmCompletion::default(), cx) + .unwrap() + }); + cx.assert_editor_state("editor.closeˇ"); + handle_resolve_completion_request(&mut cx, None).await; + apply_additional_edits.await.unwrap(); + + // Handle completion request passing a marked string specifying where the completion + // should be triggered from using '|' character, what range should be replaced, and what completions + // should be returned using '<' and '>' to delimit the range + async fn handle_completion_request<'a>( + cx: &mut EditorLspTestContext<'a>, + marked_string: &str, + completions: Vec<&'static str>, + ) { + let complete_from_marker: TextRangeMarker = '|'.into(); + let replace_range_marker: TextRangeMarker = ('<', '>').into(); + let (_, mut marked_ranges) = marked_text_ranges_by( + marked_string, + vec![complete_from_marker.clone(), replace_range_marker.clone()], + ); + + let complete_from_position = + cx.to_lsp(marked_ranges.remove(&complete_from_marker).unwrap()[0].start); + let replace_range = + cx.to_lsp_range(marked_ranges.remove(&replace_range_marker).unwrap()[0].clone()); + + cx.handle_request::(move |url, params, _| { + let completions = completions.clone(); + async move { + assert_eq!(params.text_document_position.text_document.uri, url.clone()); + assert_eq!( + params.text_document_position.position, + complete_from_position + ); + Ok(Some(lsp::CompletionResponse::Array( + completions + .iter() + .map(|completion_text| lsp::CompletionItem { + label: completion_text.to_string(), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + range: replace_range, + new_text: completion_text.to_string(), + })), + ..Default::default() + }) + .collect(), + ))) + } + }) + .next() + .await; + } + + async fn handle_resolve_completion_request<'a>( + cx: &mut EditorLspTestContext<'a>, + edit: Option<(&'static str, &'static str)>, + ) { + let edit = edit.map(|(marked_string, new_text)| { + let (_, marked_ranges) = marked_text_ranges(marked_string, false); + let replace_range = cx.to_lsp_range(marked_ranges[0].clone()); + vec![lsp::TextEdit::new(replace_range, new_text.to_string())] + }); + + cx.handle_request::(move |_, _, _| { + let edit = edit.clone(); + async move { + Ok(lsp::CompletionItem { + additional_text_edits: edit, + ..Default::default() + }) + } + }) + .next() + .await; + } +} + +#[gpui::test] +async fn test_toggle_comment(cx: &mut gpui::TestAppContext) { + cx.update(|cx| cx.set_global(Settings::test(cx))); + let language = Arc::new(Language::new( + LanguageConfig { + line_comment: Some("// ".into()), + ..Default::default() + }, + Some(tree_sitter_rust::language()), + )); + + let text = " + fn a() { + //b(); + // c(); + // d(); + } + " + .unindent(); + + let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (_, view) = cx.add_window(|cx| build_editor(buffer, cx)); + + view.update(cx, |editor, cx| { + // If multiple selections intersect a line, the line is only + // toggled once. + editor.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(1, 3)..DisplayPoint::new(2, 3), + DisplayPoint::new(3, 5)..DisplayPoint::new(3, 6), + ]) + }); + editor.toggle_comments(&ToggleComments, cx); + assert_eq!( + editor.text(cx), + " + fn a() { + b(); + c(); + d(); + } + " + .unindent() + ); + + // The comment prefix is inserted at the same column for every line + // in a selection. + editor.change_selections(None, cx, |s| { + s.select_display_ranges([DisplayPoint::new(1, 3)..DisplayPoint::new(3, 6)]) + }); + editor.toggle_comments(&ToggleComments, cx); + assert_eq!( + editor.text(cx), + " + fn a() { + // b(); + // c(); + // d(); + } + " + .unindent() + ); + + // If a selection ends at the beginning of a line, that line is not toggled. + editor.change_selections(None, cx, |s| { + s.select_display_ranges([DisplayPoint::new(2, 0)..DisplayPoint::new(3, 0)]) + }); + editor.toggle_comments(&ToggleComments, cx); + assert_eq!( + editor.text(cx), + " + fn a() { + // b(); + c(); + // d(); + } + " + .unindent() + ); + }); +} + +#[gpui::test] +async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) { + let mut cx = EditorTestContext::new(cx); + + let html_language = Arc::new( + Language::new( + LanguageConfig { + name: "HTML".into(), + block_comment: Some(("".into())), + ..Default::default() + }, + Some(tree_sitter_html::language()), + ) + .with_injection_query( + r#" + (script_element + (raw_text) @content + (#set! "language" "javascript")) + "#, + ) + .unwrap(), + ); + + let javascript_language = Arc::new(Language::new( + LanguageConfig { + name: "JavaScript".into(), + line_comment: Some("// ".into()), + ..Default::default() + }, + Some(tree_sitter_javascript::language()), + )); + + let registry = Arc::new(LanguageRegistry::test()); + registry.add(html_language.clone()); + registry.add(javascript_language.clone()); + + cx.update_buffer(|buffer, cx| { + buffer.set_language_registry(registry); + buffer.set_language(Some(html_language), cx); + }); + + // Toggle comments for empty selections + cx.set_state( + &r#" +

A

ˇ +

B

ˇ +

C

ˇ + "# + .unindent(), + ); + cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); + cx.assert_editor_state( + &r#" + + + + "# + .unindent(), + ); + cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); + cx.assert_editor_state( + &r#" +

A

ˇ +

B

ˇ +

C

ˇ + "# + .unindent(), + ); + + // Toggle comments for mixture of empty and non-empty selections, where + // multiple selections occupy a given line. + cx.set_state( + &r#" +

+

ˇ»B

ˇ +

+

ˇ»D

ˇ + "# + .unindent(), + ); + + cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); + cx.assert_editor_state( + &r#" + + + "# + .unindent(), + ); + cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); + cx.assert_editor_state( + &r#" +

+

ˇ»B

ˇ +

+

ˇ»D

ˇ + "# + .unindent(), + ); + + // Toggle comments when different languages are active for different + // selections. + cx.set_state( + &r#" + ˇ + "# + .unindent(), + ); + cx.foreground().run_until_parked(); + cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); + cx.assert_editor_state( + &r#" + + // ˇvar x = new Y(); + + "# + .unindent(), + ); +} + +#[gpui::test] +fn test_editing_disjoint_excerpts(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx)); + let multibuffer = cx.add_model(|cx| { + let mut multibuffer = MultiBuffer::new(0); + multibuffer.push_excerpts( + buffer.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(0, 4), + primary: None, + }, + ExcerptRange { + context: Point::new(1, 0)..Point::new(1, 4), + primary: None, + }, + ], + cx, + ); + multibuffer + }); + + assert_eq!(multibuffer.read(cx).read(cx).text(), "aaaa\nbbbb"); + + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(multibuffer, cx)); + view.update(cx, |view, cx| { + assert_eq!(view.text(cx), "aaaa\nbbbb"); + view.change_selections(None, cx, |s| { + s.select_ranges([ + Point::new(0, 0)..Point::new(0, 0), + Point::new(1, 0)..Point::new(1, 0), + ]) + }); + + view.handle_input("X", cx); + assert_eq!(view.text(cx), "Xaaaa\nXbbbb"); + assert_eq!( + view.selections.ranges(cx), + [ + Point::new(0, 1)..Point::new(0, 1), + Point::new(1, 1)..Point::new(1, 1), + ] + ) + }); +} + +#[gpui::test] +fn test_editing_overlapping_excerpts(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let markers = vec![('[', ']').into(), ('(', ')').into()]; + let (initial_text, mut excerpt_ranges) = marked_text_ranges_by( + indoc! {" + [aaaa + (bbbb] + cccc)", + }, + markers.clone(), + ); + let excerpt_ranges = markers.into_iter().map(|marker| { + let context = excerpt_ranges.remove(&marker).unwrap()[0].clone(); + ExcerptRange { + context, + primary: None, + } + }); + let buffer = cx.add_model(|cx| Buffer::new(0, initial_text, cx)); + let multibuffer = cx.add_model(|cx| { + let mut multibuffer = MultiBuffer::new(0); + multibuffer.push_excerpts(buffer, excerpt_ranges, cx); + multibuffer + }); + + let (_, view) = cx.add_window(Default::default(), |cx| build_editor(multibuffer, cx)); + view.update(cx, |view, cx| { + let (expected_text, selection_ranges) = marked_text_ranges( + indoc! {" + aaaa + bˇbbb + bˇbbˇb + cccc" + }, + true, + ); + assert_eq!(view.text(cx), expected_text); + view.change_selections(None, cx, |s| s.select_ranges(selection_ranges)); + + view.handle_input("X", cx); + + let (expected_text, expected_selections) = marked_text_ranges( + indoc! {" + aaaa + bXˇbbXb + bXˇbbXˇb + cccc" + }, + false, + ); + assert_eq!(view.text(cx), expected_text); + assert_eq!(view.selections.ranges(cx), expected_selections); + + view.newline(&Newline, cx); + let (expected_text, expected_selections) = marked_text_ranges( + indoc! {" + aaaa + bX + ˇbbX + b + bX + ˇbbX + ˇb + cccc" + }, + false, + ); + assert_eq!(view.text(cx), expected_text); + assert_eq!(view.selections.ranges(cx), expected_selections); + }); +} + +#[gpui::test] +fn test_refresh_selections(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx)); + let mut excerpt1_id = None; + let multibuffer = cx.add_model(|cx| { + let mut multibuffer = MultiBuffer::new(0); + excerpt1_id = multibuffer + .push_excerpts( + buffer.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 4), + primary: None, + }, + ExcerptRange { + context: Point::new(1, 0)..Point::new(2, 4), + primary: None, + }, + ], + cx, + ) + .into_iter() + .next(); + multibuffer + }); + assert_eq!( + multibuffer.read(cx).read(cx).text(), + "aaaa\nbbbb\nbbbb\ncccc" + ); + let (_, editor) = cx.add_window(Default::default(), |cx| { + let mut editor = build_editor(multibuffer.clone(), cx); + let snapshot = editor.snapshot(cx); + editor.change_selections(None, cx, |s| { + s.select_ranges([Point::new(1, 3)..Point::new(1, 3)]) + }); + editor.begin_selection(Point::new(2, 1).to_display_point(&snapshot), true, 1, cx); + assert_eq!( + editor.selections.ranges(cx), + [ + Point::new(1, 3)..Point::new(1, 3), + Point::new(2, 1)..Point::new(2, 1), + ] + ); + editor + }); + + // Refreshing selections is a no-op when excerpts haven't changed. + editor.update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| { + s.refresh(); + }); + assert_eq!( + editor.selections.ranges(cx), + [ + Point::new(1, 3)..Point::new(1, 3), + Point::new(2, 1)..Point::new(2, 1), + ] + ); + }); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.remove_excerpts([&excerpt1_id.unwrap()], cx); + }); + editor.update(cx, |editor, cx| { + // Removing an excerpt causes the first selection to become degenerate. + assert_eq!( + editor.selections.ranges(cx), + [ + Point::new(0, 0)..Point::new(0, 0), + Point::new(0, 1)..Point::new(0, 1) + ] + ); + + // Refreshing selections will relocate the first selection to the original buffer + // location. + editor.change_selections(None, cx, |s| { + s.refresh(); + }); + assert_eq!( + editor.selections.ranges(cx), + [ + Point::new(0, 1)..Point::new(0, 1), + Point::new(0, 3)..Point::new(0, 3) + ] + ); + assert!(editor.selections.pending_anchor().is_some()); + }); +} + +#[gpui::test] +fn test_refresh_selections_while_selecting_with_mouse(cx: &mut gpui::MutableAppContext) { + cx.set_global(Settings::test(cx)); + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx)); + let mut excerpt1_id = None; + let multibuffer = cx.add_model(|cx| { + let mut multibuffer = MultiBuffer::new(0); + excerpt1_id = multibuffer + .push_excerpts( + buffer.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 4), + primary: None, + }, + ExcerptRange { + context: Point::new(1, 0)..Point::new(2, 4), + primary: None, + }, + ], + cx, + ) + .into_iter() + .next(); + multibuffer + }); + assert_eq!( + multibuffer.read(cx).read(cx).text(), + "aaaa\nbbbb\nbbbb\ncccc" + ); + let (_, editor) = cx.add_window(Default::default(), |cx| { + let mut editor = build_editor(multibuffer.clone(), cx); + let snapshot = editor.snapshot(cx); + editor.begin_selection(Point::new(1, 3).to_display_point(&snapshot), false, 1, cx); + assert_eq!( + editor.selections.ranges(cx), + [Point::new(1, 3)..Point::new(1, 3)] + ); + editor + }); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.remove_excerpts([&excerpt1_id.unwrap()], cx); + }); + editor.update(cx, |editor, cx| { + assert_eq!( + editor.selections.ranges(cx), + [Point::new(0, 0)..Point::new(0, 0)] + ); + + // Ensure we don't panic when selections are refreshed and that the pending selection is finalized. + editor.change_selections(None, cx, |s| { + s.refresh(); + }); + assert_eq!( + editor.selections.ranges(cx), + [Point::new(0, 3)..Point::new(0, 3)] + ); + assert!(editor.selections.pending_anchor().is_some()); + }); +} + +#[gpui::test] +async fn test_extra_newline_insertion(cx: &mut gpui::TestAppContext) { + cx.update(|cx| cx.set_global(Settings::test(cx))); + let language = Arc::new( + Language::new( + LanguageConfig { + brackets: vec![ + BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: true, + newline: true, + }, + BracketPair { + start: "/* ".to_string(), + end: " */".to_string(), + close: true, + newline: true, + }, + ], + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ) + .with_indents_query("") + .unwrap(), + ); + + let text = concat!( + "{ }\n", // Suppress rustfmt + " x\n", // + " /* */\n", // + "x\n", // + "{{} }\n", // + ); + + let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (_, view) = cx.add_window(|cx| build_editor(buffer, cx)); + view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) + .await; + + view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(0, 2)..DisplayPoint::new(0, 3), + DisplayPoint::new(2, 5)..DisplayPoint::new(2, 5), + DisplayPoint::new(4, 4)..DisplayPoint::new(4, 4), + ]) + }); + view.newline(&Newline, cx); + + assert_eq!( + view.buffer().read(cx).read(cx).text(), + concat!( + "{ \n", // Suppress rustfmt + "\n", // + "}\n", // + " x\n", // + " /* \n", // + " \n", // + " */\n", // + "x\n", // + "{{} \n", // + "}\n", // + ) + ); + }); +} + +#[gpui::test] +fn test_highlighted_ranges(cx: &mut gpui::MutableAppContext) { + let buffer = MultiBuffer::build_simple(&sample_text(16, 8, 'a'), cx); + + cx.set_global(Settings::test(cx)); + let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); + + editor.update(cx, |editor, cx| { + struct Type1; + struct Type2; + + let buffer = buffer.read(cx).snapshot(cx); + + let anchor_range = + |range: Range| buffer.anchor_after(range.start)..buffer.anchor_after(range.end); + + editor.highlight_background::( + vec![ + anchor_range(Point::new(2, 1)..Point::new(2, 3)), + anchor_range(Point::new(4, 2)..Point::new(4, 4)), + anchor_range(Point::new(6, 3)..Point::new(6, 5)), + anchor_range(Point::new(8, 4)..Point::new(8, 6)), + ], + |_| Color::red(), + cx, + ); + editor.highlight_background::( + vec![ + anchor_range(Point::new(3, 2)..Point::new(3, 5)), + anchor_range(Point::new(5, 3)..Point::new(5, 6)), + anchor_range(Point::new(7, 4)..Point::new(7, 7)), + anchor_range(Point::new(9, 5)..Point::new(9, 8)), + ], + |_| Color::green(), + cx, + ); + + let snapshot = editor.snapshot(cx); + let mut highlighted_ranges = editor.background_highlights_in_range( + anchor_range(Point::new(3, 4)..Point::new(7, 4)), + &snapshot, + cx.global::().theme.as_ref(), + ); + // Enforce a consistent ordering based on color without relying on the ordering of the + // highlight's `TypeId` which is non-deterministic. + highlighted_ranges.sort_unstable_by_key(|(_, color)| *color); + assert_eq!( + highlighted_ranges, + &[ + ( + DisplayPoint::new(3, 2)..DisplayPoint::new(3, 5), + Color::green(), + ), + ( + DisplayPoint::new(5, 3)..DisplayPoint::new(5, 6), + Color::green(), + ), + ( + DisplayPoint::new(4, 2)..DisplayPoint::new(4, 4), + Color::red(), + ), + ( + DisplayPoint::new(6, 3)..DisplayPoint::new(6, 5), + Color::red(), + ), + ] + ); + assert_eq!( + editor.background_highlights_in_range( + anchor_range(Point::new(5, 6)..Point::new(6, 4)), + &snapshot, + cx.global::().theme.as_ref(), + ), + &[( + DisplayPoint::new(6, 3)..DisplayPoint::new(6, 5), + Color::red(), + )] + ); + }); +} + +#[gpui::test] +fn test_following(cx: &mut gpui::MutableAppContext) { + let buffer = MultiBuffer::build_simple(&sample_text(16, 8, 'a'), cx); + + cx.set_global(Settings::test(cx)); + + let (_, leader) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx)); + let (_, follower) = cx.add_window( + WindowOptions { + bounds: WindowBounds::Fixed(RectF::from_points(vec2f(0., 0.), vec2f(10., 80.))), + ..Default::default() + }, + |cx| build_editor(buffer.clone(), cx), + ); + + let pending_update = Rc::new(RefCell::new(None)); + follower.update(cx, { + let update = pending_update.clone(); + |_, cx| { + cx.subscribe(&leader, move |_, leader, event, cx| { + leader + .read(cx) + .add_event_to_update_proto(event, &mut *update.borrow_mut(), cx); + }) + .detach(); + } + }); + + // Update the selections only + leader.update(cx, |leader, cx| { + leader.change_selections(None, cx, |s| s.select_ranges([1..1])); + }); + follower.update(cx, |follower, cx| { + follower + .apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx) + .unwrap(); + }); + assert_eq!(follower.read(cx).selections.ranges(cx), vec![1..1]); + + // Update the scroll position only + leader.update(cx, |leader, cx| { + leader.set_scroll_position(vec2f(1.5, 3.5), cx); + }); + follower.update(cx, |follower, cx| { + follower + .apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx) + .unwrap(); + }); + assert_eq!( + follower.update(cx, |follower, cx| follower.scroll_position(cx)), + vec2f(1.5, 3.5) + ); + + // Update the selections and scroll position + leader.update(cx, |leader, cx| { + leader.change_selections(None, cx, |s| s.select_ranges([0..0])); + leader.request_autoscroll(Autoscroll::Newest, cx); + leader.set_scroll_position(vec2f(1.5, 3.5), cx); + }); + follower.update(cx, |follower, cx| { + let initial_scroll_position = follower.scroll_position(cx); + follower + .apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx) + .unwrap(); + assert_eq!(follower.scroll_position(cx), initial_scroll_position); + assert!(follower.autoscroll_request.is_some()); + }); + assert_eq!(follower.read(cx).selections.ranges(cx), vec![0..0]); + + // Creating a pending selection that precedes another selection + leader.update(cx, |leader, cx| { + leader.change_selections(None, cx, |s| s.select_ranges([1..1])); + leader.begin_selection(DisplayPoint::new(0, 0), true, 1, cx); + }); + follower.update(cx, |follower, cx| { + follower + .apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx) + .unwrap(); + }); + assert_eq!(follower.read(cx).selections.ranges(cx), vec![0..0, 1..1]); + + // Extend the pending selection so that it surrounds another selection + leader.update(cx, |leader, cx| { + leader.extend_selection(DisplayPoint::new(0, 2), 1, cx); + }); + follower.update(cx, |follower, cx| { + follower + .apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx) + .unwrap(); + }); + assert_eq!(follower.read(cx).selections.ranges(cx), vec![0..2]); +} + +#[test] +fn test_combine_syntax_and_fuzzy_match_highlights() { + let string = "abcdefghijklmnop"; + let syntax_ranges = [ + ( + 0..3, + HighlightStyle { + color: Some(Color::red()), + ..Default::default() + }, + ), + ( + 4..8, + HighlightStyle { + color: Some(Color::green()), + ..Default::default() + }, + ), + ]; + let match_indices = [4, 6, 7, 8]; + assert_eq!( + combine_syntax_and_fuzzy_match_highlights( + string, + Default::default(), + syntax_ranges.into_iter(), + &match_indices, + ), + &[ + ( + 0..3, + HighlightStyle { + color: Some(Color::red()), + ..Default::default() + }, + ), + ( + 4..5, + HighlightStyle { + color: Some(Color::green()), + weight: Some(fonts::Weight::BOLD), + ..Default::default() + }, + ), + ( + 5..6, + HighlightStyle { + color: Some(Color::green()), + ..Default::default() + }, + ), + ( + 6..8, + HighlightStyle { + color: Some(Color::green()), + weight: Some(fonts::Weight::BOLD), + ..Default::default() + }, + ), + ( + 8..9, + HighlightStyle { + weight: Some(fonts::Weight::BOLD), + ..Default::default() + }, + ), + ] + ); +} + +fn empty_range(row: usize, column: usize) -> Range { + let point = DisplayPoint::new(row as u32, column as u32); + point..point +} + +fn assert_selection_ranges(marked_text: &str, view: &mut Editor, cx: &mut ViewContext) { + let (text, ranges) = marked_text_ranges(marked_text, true); + assert_eq!(view.text(cx), text); + assert_eq!( + view.selections.ranges(cx), + ranges, + "Assert selections are {}", + marked_text + ); +} From 63e1c839fefec43b0f797c15286ebef477d13815 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 6 Oct 2022 13:32:49 -0700 Subject: [PATCH 127/140] Rename language::tests -> language::buffer_tests --- crates/language/src/{tests.rs => buffer_tests.rs} | 0 crates/language/src/language.rs | 3 ++- 2 files changed, 2 insertions(+), 1 deletion(-) rename crates/language/src/{tests.rs => buffer_tests.rs} (100%) diff --git a/crates/language/src/tests.rs b/crates/language/src/buffer_tests.rs similarity index 100% rename from crates/language/src/tests.rs rename to crates/language/src/buffer_tests.rs diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index c7c5def833..bb75edbc32 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -4,8 +4,9 @@ mod highlight_map; mod outline; pub mod proto; mod syntax_map; + #[cfg(test)] -mod tests; +mod buffer_tests; use anyhow::{anyhow, Context, Result}; use async_trait::async_trait; From 8411d886aca5d09bb6e0b913a6689a99adea84a2 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 6 Oct 2022 15:13:29 -0700 Subject: [PATCH 128/140] Fix multi-line string formatting in editor_test.rs --- crates/editor/src/editor_tests.rs | 1300 ++++++++++++++--------------- 1 file changed, 649 insertions(+), 651 deletions(-) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index c2840cc17b..ac84c0ef1a 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -1,9 +1,8 @@ +use super::*; use crate::test::{ assert_text_with_selections, build_editor, select_ranges, EditorLspTestContext, EditorTestContext, }; - -use super::*; use futures::StreamExt; use gpui::{ geometry::rect::RectF, @@ -418,12 +417,12 @@ fn test_canceling_pending_selection(cx: &mut gpui::MutableAppContext) { fn test_clone(cx: &mut gpui::MutableAppContext) { let (text, selection_ranges) = marked_text_ranges( indoc! {" - one - two - threeˇ - four - fiveˇ - "}, + one + two + threeˇ + four + fiveˇ + "}, true, ); cx.set_global(Settings::test(cx)); @@ -624,22 +623,22 @@ fn test_fold(cx: &mut gpui::MutableAppContext) { cx.set_global(Settings::test(cx)); let buffer = MultiBuffer::build_simple( &" - impl Foo { - // Hello! + impl Foo { + // Hello! - fn a() { - 1 - } - - fn b() { - 2 - } - - fn c() { - 3 - } + fn a() { + 1 } - " + + fn b() { + 2 + } + + fn c() { + 3 + } + } + " .unindent(), cx, ); @@ -653,20 +652,20 @@ fn test_fold(cx: &mut gpui::MutableAppContext) { assert_eq!( view.display_text(cx), " - impl Foo { - // Hello! + impl Foo { + // Hello! - fn a() { - 1 - } - - fn b() {… - } - - fn c() {… - } + fn a() { + 1 } - " + + fn b() {… + } + + fn c() {… + } + } + " .unindent(), ); @@ -674,9 +673,9 @@ fn test_fold(cx: &mut gpui::MutableAppContext) { assert_eq!( view.display_text(cx), " - impl Foo {… - } - " + impl Foo {… + } + " .unindent(), ); @@ -684,20 +683,20 @@ fn test_fold(cx: &mut gpui::MutableAppContext) { assert_eq!( view.display_text(cx), " - impl Foo { - // Hello! + impl Foo { + // Hello! - fn a() { - 1 - } - - fn b() {… - } - - fn c() {… - } + fn a() { + 1 } - " + + fn b() {… + } + + fn c() {… + } + } + " .unindent(), ); @@ -1264,14 +1263,14 @@ fn test_newline_with_old_selections(cx: &mut gpui::MutableAppContext) { cx.set_global(Settings::test(cx)); let buffer = MultiBuffer::build_simple( " - a - b( - X - ) - c( - X - ) - " + a + b( + X + ) + c( + X + ) + " .unindent() .as_str(), cx, @@ -1301,10 +1300,10 @@ fn test_newline_with_old_selections(cx: &mut gpui::MutableAppContext) { assert_eq!( buffer.read(cx).text(), " - a - b() - c() - " + a + b() + c() + " .unindent() ); }); @@ -1322,12 +1321,12 @@ fn test_newline_with_old_selections(cx: &mut gpui::MutableAppContext) { assert_eq!( editor.text(cx), " - a - b( - ) - c( - ) - " + a + b( + ) + c( + ) + " .unindent() ); @@ -1362,33 +1361,33 @@ async fn test_newline_below(cx: &mut gpui::TestAppContext) { cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); cx.set_state(indoc! {" - const a: ˇA = ( - (ˇ - «const_functionˇ»(ˇ), - so«mˇ»et«hˇ»ing_ˇelse,ˇ - )ˇ - ˇ);ˇ - "}); + const a: ˇA = ( + (ˇ + «const_functionˇ»(ˇ), + so«mˇ»et«hˇ»ing_ˇelse,ˇ + )ˇ + ˇ);ˇ + "}); cx.update_editor(|e, cx| e.newline_below(&NewlineBelow, cx)); cx.assert_editor_state(indoc! {" - const a: A = ( - ˇ - ( - ˇ - const_function(), - ˇ - ˇ - something_else, - ˇ - ˇ - ˇ - ˇ - ) - ˇ - ); + const a: A = ( ˇ + ( + ˇ + const_function(), + ˇ + ˇ + something_else, + ˇ + ˇ + ˇ + ˇ + ) ˇ - "}); + ); + ˇ + ˇ + "}); } #[gpui::test] @@ -1427,26 +1426,26 @@ async fn test_tab(cx: &mut gpui::TestAppContext) { }); }); cx.set_state(indoc! {" - ˇabˇc - ˇ🏀ˇ🏀ˇefg - dˇ - "}); + ˇabˇc + ˇ🏀ˇ🏀ˇefg + dˇ + "}); cx.update_editor(|e, cx| e.tab(&Tab, cx)); cx.assert_editor_state(indoc! {" - ˇab ˇc - ˇ🏀 ˇ🏀 ˇefg - d ˇ - "}); + ˇab ˇc + ˇ🏀 ˇ🏀 ˇefg + d ˇ + "}); cx.set_state(indoc! {" - a - «🏀ˇ»🏀«🏀ˇ»🏀«🏀ˇ» - "}); + a + «🏀ˇ»🏀«🏀ˇ»🏀«🏀ˇ» + "}); cx.update_editor(|e, cx| e.tab(&Tab, cx)); cx.assert_editor_state(indoc! {" - a - «🏀ˇ»🏀«🏀ˇ»🏀«🏀ˇ» - "}); + a + «🏀ˇ»🏀«🏀ˇ»🏀«🏀ˇ» + "}); } #[gpui::test] @@ -1466,45 +1465,45 @@ async fn test_tab_on_blank_line_auto_indents(cx: &mut gpui::TestAppContext) { // a soft tab. cursors that are to the left of the suggested indent // auto-indent their line. cx.set_state(indoc! {" - ˇ - const a: B = ( - c( - d( - ˇ - ) - ˇ - ˇ ) - ); - "}); + ˇ + const a: B = ( + c( + d( + ˇ + ) + ˇ + ˇ ) + ); + "}); cx.update_editor(|e, cx| e.tab(&Tab, cx)); cx.assert_editor_state(indoc! {" - ˇ - const a: B = ( - c( - d( - ˇ - ) + ˇ + const a: B = ( + c( + d( ˇ - ˇ) - ); - "}); + ) + ˇ + ˇ) + ); + "}); // handle auto-indent when there are multiple cursors on the same line cx.set_state(indoc! {" - const a: B = ( - c( - ˇ ˇ - ˇ ) - ); - "}); + const a: B = ( + c( + ˇ ˇ + ˇ ) + ); + "}); cx.update_editor(|e, cx| e.tab(&Tab, cx)); cx.assert_editor_state(indoc! {" - const a: B = ( - c( - ˇ - ˇ) - ); - "}); + const a: B = ( + c( + ˇ + ˇ) + ); + "}); } #[gpui::test] @@ -1512,68 +1511,68 @@ async fn test_indent_outdent(cx: &mut gpui::TestAppContext) { let mut cx = EditorTestContext::new(cx); cx.set_state(indoc! {" - «oneˇ» «twoˇ» - three - four - "}); + «oneˇ» «twoˇ» + three + four + "}); cx.update_editor(|e, cx| e.tab(&Tab, cx)); cx.assert_editor_state(indoc! {" - «oneˇ» «twoˇ» - three - four - "}); + «oneˇ» «twoˇ» + three + four + "}); cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); cx.assert_editor_state(indoc! {" - «oneˇ» «twoˇ» - three - four - "}); + «oneˇ» «twoˇ» + three + four + "}); // select across line ending cx.set_state(indoc! {" - one two - t«hree - ˇ» four - "}); + one two + t«hree + ˇ» four + "}); cx.update_editor(|e, cx| e.tab(&Tab, cx)); cx.assert_editor_state(indoc! {" - one two - t«hree - ˇ» four - "}); + one two + t«hree + ˇ» four + "}); cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); cx.assert_editor_state(indoc! {" - one two - t«hree - ˇ» four - "}); + one two + t«hree + ˇ» four + "}); // Ensure that indenting/outdenting works when the cursor is at column 0. cx.set_state(indoc! {" - one two - ˇthree - four - "}); + one two + ˇthree + four + "}); cx.update_editor(|e, cx| e.tab(&Tab, cx)); cx.assert_editor_state(indoc! {" - one two - ˇthree - four - "}); + one two + ˇthree + four + "}); cx.set_state(indoc! {" - one two - ˇ three - four - "}); + one two + ˇ three + four + "}); cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); cx.assert_editor_state(indoc! {" - one two - ˇthree - four - "}); + one two + ˇthree + four + "}); } #[gpui::test] @@ -1587,90 +1586,90 @@ async fn test_indent_outdent_with_hard_tabs(cx: &mut gpui::TestAppContext) { // select two ranges on one line cx.set_state(indoc! {" - «oneˇ» «twoˇ» - three - four - "}); + «oneˇ» «twoˇ» + three + four + "}); cx.update_editor(|e, cx| e.tab(&Tab, cx)); cx.assert_editor_state(indoc! {" - \t«oneˇ» «twoˇ» - three - four - "}); + \t«oneˇ» «twoˇ» + three + four + "}); cx.update_editor(|e, cx| e.tab(&Tab, cx)); cx.assert_editor_state(indoc! {" - \t\t«oneˇ» «twoˇ» - three - four - "}); + \t\t«oneˇ» «twoˇ» + three + four + "}); cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); cx.assert_editor_state(indoc! {" - \t«oneˇ» «twoˇ» - three - four - "}); + \t«oneˇ» «twoˇ» + three + four + "}); cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); cx.assert_editor_state(indoc! {" - «oneˇ» «twoˇ» - three - four - "}); + «oneˇ» «twoˇ» + three + four + "}); // select across a line ending cx.set_state(indoc! {" - one two - t«hree - ˇ»four - "}); + one two + t«hree + ˇ»four + "}); cx.update_editor(|e, cx| e.tab(&Tab, cx)); cx.assert_editor_state(indoc! {" - one two - \tt«hree - ˇ»four - "}); + one two + \tt«hree + ˇ»four + "}); cx.update_editor(|e, cx| e.tab(&Tab, cx)); cx.assert_editor_state(indoc! {" - one two - \t\tt«hree - ˇ»four - "}); + one two + \t\tt«hree + ˇ»four + "}); cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); cx.assert_editor_state(indoc! {" - one two - \tt«hree - ˇ»four - "}); + one two + \tt«hree + ˇ»four + "}); cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); cx.assert_editor_state(indoc! {" - one two - t«hree - ˇ»four - "}); + one two + t«hree + ˇ»four + "}); // Ensure that indenting/outdenting works when the cursor is at column 0. cx.set_state(indoc! {" - one two - ˇthree - four - "}); + one two + ˇthree + four + "}); cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); cx.assert_editor_state(indoc! {" - one two - ˇthree - four - "}); + one two + ˇthree + four + "}); cx.update_editor(|e, cx| e.tab(&Tab, cx)); cx.assert_editor_state(indoc! {" - one two - \tˇthree - four - "}); + one two + \tˇthree + four + "}); cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); cx.assert_editor_state(indoc! {" - one two - ˇthree - four - "}); + one two + ˇthree + four + "}); } #[gpui::test] @@ -1739,21 +1738,21 @@ fn test_indent_outdent_with_excerpts(cx: &mut gpui::MutableAppContext) { assert_eq!( editor.text(cx), indoc! {" - a = 1 - b = 2 + a = 1 + b = 2 - const c: usize = 3; - "} + const c: usize = 3; + "} ); select_ranges( &mut editor, indoc! {" - «aˇ» = 1 - b = 2 + «aˇ» = 1 + b = 2 - «const c:ˇ» usize = 3; - "}, + «const c:ˇ» usize = 3; + "}, cx, ); @@ -1761,22 +1760,22 @@ fn test_indent_outdent_with_excerpts(cx: &mut gpui::MutableAppContext) { assert_text_with_selections( &mut editor, indoc! {" - «aˇ» = 1 - b = 2 + «aˇ» = 1 + b = 2 - «const c:ˇ» usize = 3; - "}, + «const c:ˇ» usize = 3; + "}, cx, ); editor.tab_prev(&TabPrev, cx); assert_text_with_selections( &mut editor, indoc! {" - «aˇ» = 1 - b = 2 + «aˇ» = 1 + b = 2 - «const c:ˇ» usize = 3; - "}, + «const c:ˇ» usize = 3; + "}, cx, ); @@ -1790,45 +1789,45 @@ async fn test_backspace(cx: &mut gpui::TestAppContext) { // Basic backspace cx.set_state(indoc! {" - onˇe two three - fou«rˇ» five six - seven «ˇeight nine - »ten - "}); + onˇe two three + fou«rˇ» five six + seven «ˇeight nine + »ten + "}); cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); cx.assert_editor_state(indoc! {" - oˇe two three - fouˇ five six - seven ˇten - "}); + oˇe two three + fouˇ five six + seven ˇten + "}); // Test backspace inside and around indents cx.set_state(indoc! {" - zero - ˇone - ˇtwo - ˇ ˇ ˇ three - ˇ ˇ four - "}); - cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); - cx.assert_editor_state(indoc! {" - zero + zero ˇone ˇtwo - ˇ threeˇ four - "}); + ˇ ˇ ˇ three + ˇ ˇ four + "}); + cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); + cx.assert_editor_state(indoc! {" + zero + ˇone + ˇtwo + ˇ threeˇ four + "}); // Test backspace with line_mode set to true cx.update_editor(|e, _| e.selections.line_mode = true); cx.set_state(indoc! {" - The ˇquick ˇbrown - fox jumps over - the lazy dog - ˇThe qu«ick bˇ»rown"}); + The ˇquick ˇbrown + fox jumps over + the lazy dog + ˇThe qu«ick bˇ»rown"}); cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); cx.assert_editor_state(indoc! {" - ˇfox jumps over - the lazy dogˇ"}); + ˇfox jumps over + the lazy dogˇ"}); } #[gpui::test] @@ -1836,25 +1835,25 @@ async fn test_delete(cx: &mut gpui::TestAppContext) { let mut cx = EditorTestContext::new(cx); cx.set_state(indoc! {" - onˇe two three - fou«rˇ» five six - seven «ˇeight nine - »ten - "}); + onˇe two three + fou«rˇ» five six + seven «ˇeight nine + »ten + "}); cx.update_editor(|e, cx| e.delete(&Delete, cx)); cx.assert_editor_state(indoc! {" - onˇ two three - fouˇ five six - seven ˇten - "}); + onˇ two three + fouˇ five six + seven ˇten + "}); // Test backspace with line_mode set to true cx.update_editor(|e, _| e.selections.line_mode = true); cx.set_state(indoc! {" - The ˇquick ˇbrown - fox «ˇjum»ps over - the lazy dog - ˇThe qu«ick bˇ»rown"}); + The ˇquick ˇbrown + fox «ˇjum»ps over + the lazy dog + ˇThe qu«ick bˇ»rown"}); cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); cx.assert_editor_state("ˇthe lazy dogˇ"); } @@ -2191,57 +2190,57 @@ async fn test_clipboard(cx: &mut gpui::TestAppContext) { e.handle_input(") ", cx); }); cx.assert_editor_state(indoc! {" - ( one✅ - three - five ) ˇtwo one✅ four three six five ( one✅ - three - five ) ˇ"}); + ( one✅ + three + five ) ˇtwo one✅ four three six five ( one✅ + three + five ) ˇ"}); // Cut with three selections, one of which is full-line. cx.set_state(indoc! {" - 1«2ˇ»3 - 4ˇ567 - «8ˇ»9"}); + 1«2ˇ»3 + 4ˇ567 + «8ˇ»9"}); cx.update_editor(|e, cx| e.cut(&Cut, cx)); cx.assert_editor_state(indoc! {" - 1ˇ3 - ˇ9"}); + 1ˇ3 + ˇ9"}); // Paste with three selections, noticing how the copied selection that was full-line // gets inserted before the second cursor. cx.set_state(indoc! {" - 1ˇ3 - 9ˇ - «oˇ»ne"}); + 1ˇ3 + 9ˇ + «oˇ»ne"}); cx.update_editor(|e, cx| e.paste(&Paste, cx)); cx.assert_editor_state(indoc! {" - 12ˇ3 - 4567 - 9ˇ - 8ˇne"}); + 12ˇ3 + 4567 + 9ˇ + 8ˇne"}); // Copy with a single cursor only, which writes the whole line into the clipboard. cx.set_state(indoc! {" - The quick brown - fox juˇmps over - the lazy dog"}); + The quick brown + fox juˇmps over + the lazy dog"}); cx.update_editor(|e, cx| e.copy(&Copy, cx)); cx.cx.assert_clipboard_content(Some("fox jumps over\n")); // Paste with three selections, noticing how the copied full-line selection is inserted // before the empty selections but replaces the selection that is non-empty. cx.set_state(indoc! {" - Tˇhe quick brown - «foˇ»x jumps over - tˇhe lazy dog"}); + Tˇhe quick brown + «foˇ»x jumps over + tˇhe lazy dog"}); cx.update_editor(|e, cx| e.paste(&Paste, cx)); cx.assert_editor_state(indoc! {" - fox jumps over - Tˇhe quick brown - fox jumps over - ˇx jumps over - fox jumps over - tˇhe lazy dog"}); + fox jumps over + Tˇhe quick brown + fox jumps over + ˇx jumps over + fox jumps over + tˇhe lazy dog"}); } #[gpui::test] @@ -2255,105 +2254,105 @@ async fn test_paste_multiline(cx: &mut gpui::TestAppContext) { // Cut an indented block, without the leading whitespace. cx.set_state(indoc! {" - const a: B = ( - c(), - «d( - e, - f - )ˇ» - ); - "}); + const a: B = ( + c(), + «d( + e, + f + )ˇ» + ); + "}); cx.update_editor(|e, cx| e.cut(&Cut, cx)); cx.assert_editor_state(indoc! {" - const a: B = ( - c(), - ˇ - ); - "}); + const a: B = ( + c(), + ˇ + ); + "}); // Paste it at the same position. cx.update_editor(|e, cx| e.paste(&Paste, cx)); cx.assert_editor_state(indoc! {" - const a: B = ( - c(), - d( - e, - f - )ˇ - ); - "}); - - // Paste it at a line with a lower indent level. - cx.set_state(indoc! {" - ˇ - const a: B = ( - c(), - ); - "}); - cx.update_editor(|e, cx| e.paste(&Paste, cx)); - cx.assert_editor_state(indoc! {" + const a: B = ( + c(), d( e, f )ˇ - const a: B = ( - c(), - ); - "}); + ); + "}); + + // Paste it at a line with a lower indent level. + cx.set_state(indoc! {" + ˇ + const a: B = ( + c(), + ); + "}); + cx.update_editor(|e, cx| e.paste(&Paste, cx)); + cx.assert_editor_state(indoc! {" + d( + e, + f + )ˇ + const a: B = ( + c(), + ); + "}); // Cut an indented block, with the leading whitespace. cx.set_state(indoc! {" - const a: B = ( - c(), - « d( - e, - f - ) - ˇ»); - "}); + const a: B = ( + c(), + « d( + e, + f + ) + ˇ»); + "}); cx.update_editor(|e, cx| e.cut(&Cut, cx)); cx.assert_editor_state(indoc! {" - const a: B = ( - c(), - ˇ); - "}); + const a: B = ( + c(), + ˇ); + "}); // Paste it at the same position. cx.update_editor(|e, cx| e.paste(&Paste, cx)); cx.assert_editor_state(indoc! {" - const a: B = ( - c(), - d( - e, - f - ) - ˇ); - "}); + const a: B = ( + c(), + d( + e, + f + ) + ˇ); + "}); // Paste it at a line with a higher indent level. cx.set_state(indoc! {" - const a: B = ( - c(), - d( - e, - fˇ - ) - ); - "}); + const a: B = ( + c(), + d( + e, + fˇ + ) + ); + "}); cx.update_editor(|e, cx| e.paste(&Paste, cx)); cx.assert_editor_state(indoc! {" - const a: B = ( - c(), - d( + const a: B = ( + c(), + d( + e, + f d( e, - f d( - e, - f - ) - ˇ + f ) - ); - "}); + ˇ + ) + ); + "}); } #[gpui::test] @@ -2706,12 +2705,12 @@ async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) { )); let text = r#" - use mod1::mod2::{mod3, mod4}; + use mod1::mod2::{mod3, mod4}; - fn fn_1(param1: bool, param2: &str) { - let var1 = "text"; - } - "# + fn fn_1(param1: bool, param2: &str) { + let var1 = "text"; + } + "# .unindent(); let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); @@ -2865,7 +2864,7 @@ async fn test_autoindent_selections(cx: &mut gpui::TestAppContext) { r#" (_ "(" ")" @end) @indent (_ "{" "}" @end) @indent - "#, + "#, ) .unwrap(), ); @@ -2935,10 +2934,10 @@ async fn test_autoclose_pairs(cx: &mut gpui::TestAppContext) { cx.set_state( &r#" - 🏀ˇ - εˇ - ❤️ˇ - "# + 🏀ˇ + εˇ + ❤️ˇ + "# .unindent(), ); @@ -2950,10 +2949,10 @@ async fn test_autoclose_pairs(cx: &mut gpui::TestAppContext) { }); cx.assert_editor_state( &" - 🏀{{{ˇ}}} - ε{{{ˇ}}} - ❤️{{{ˇ}}} - " + 🏀{{{ˇ}}} + ε{{{ˇ}}} + ❤️{{{ˇ}}} + " .unindent(), ); @@ -2966,19 +2965,19 @@ async fn test_autoclose_pairs(cx: &mut gpui::TestAppContext) { }); cx.assert_editor_state( &" - 🏀{{{}}}}ˇ - ε{{{}}}}ˇ - ❤️{{{}}}}ˇ - " + 🏀{{{}}}}ˇ + ε{{{}}}}ˇ + ❤️{{{}}}}ˇ + " .unindent(), ); // autoclose multi-character pairs cx.set_state( &" - ˇ - ˇ - " + ˇ + ˇ + " .unindent(), ); cx.update_editor(|view, cx| { @@ -2987,9 +2986,9 @@ async fn test_autoclose_pairs(cx: &mut gpui::TestAppContext) { }); cx.assert_editor_state( &" - /*ˇ */ - /*ˇ */ - " + /*ˇ */ + /*ˇ */ + " .unindent(), ); @@ -2997,17 +2996,17 @@ async fn test_autoclose_pairs(cx: &mut gpui::TestAppContext) { // does not autoclose. cx.set_state( &" - /ˇ - ˇ - " + /ˇ + ˇ + " .unindent(), ); cx.update_editor(|view, cx| view.handle_input("*", cx)); cx.assert_editor_state( &" - /*ˇ */ - *ˇ - " + /*ˇ */ + *ˇ + " .unindent(), ); @@ -3055,10 +3054,10 @@ async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) { ) .with_injection_query( r#" - (script_element - (raw_text) @content - (#set! "language" "javascript")) - "#, + (script_element + (raw_text) @content + (#set! "language" "javascript")) + "#, ) .unwrap(), ); @@ -3100,12 +3099,12 @@ async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) { cx.set_state( &r#" - ˇ - - ˇ - "# + ˇ + + ˇ + "# .unindent(), ); @@ -3130,12 +3129,12 @@ async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) { }); cx.assert_editor_state( &r#" - - - - "# + + + + "# .unindent(), ); @@ -3148,12 +3147,12 @@ async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) { }); cx.assert_editor_state( &r#" -
- - - "# + + + + "# .unindent(), ); @@ -3165,12 +3164,12 @@ async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) { }); cx.assert_editor_state( &r#" - - - - "# + + + + "# .unindent(), ); cx.update_editor(|editor, cx| { @@ -3178,24 +3177,24 @@ async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) { }); cx.assert_editor_state( &r#" - ˇ - - ˇ - "# + ˇ + + ˇ + "# .unindent(), ); // Reset cx.set_state( &r#" - ˇ - - ˇ - "# + ˇ + + ˇ + "# .unindent(), ); @@ -3204,12 +3203,12 @@ async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) { }); cx.assert_editor_state( &r#" - <ˇ> - - <ˇ> - "# + <ˇ> + + <ˇ> + "# .unindent(), ); @@ -3219,12 +3218,12 @@ async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) { }); cx.assert_editor_state( &r#" - ˇ - - ˇ - "# + ˇ + + ˇ + "# .unindent(), ); @@ -3235,12 +3234,12 @@ async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) { }); cx.assert_editor_state( &r#" - /*ˇ - - /*ˇ - "# + /*ˇ + + /*ˇ + "# .unindent(), ); } @@ -3262,10 +3261,10 @@ async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) { )); let text = r#" - a - b - c - "# + a + b + c + "# .unindent(); let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); @@ -3292,7 +3291,7 @@ async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) { {{{a}}} {{{b}}} {{{c}}} - " + " .unindent() ); assert_eq!( @@ -3311,7 +3310,7 @@ async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) { a b c - " + " .unindent() ); assert_eq!( @@ -3343,10 +3342,10 @@ async fn test_delete_autoclose_pair(cx: &mut gpui::TestAppContext) { )); let text = r#" - a - b - c - "# + a + b + c + "# .unindent(); let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); @@ -3374,7 +3373,7 @@ async fn test_delete_autoclose_pair(cx: &mut gpui::TestAppContext) { a{{_}} b{{_}} c{{_}} - " + " .unindent() ); assert_eq!( @@ -3394,7 +3393,7 @@ async fn test_delete_autoclose_pair(cx: &mut gpui::TestAppContext) { a{} b{} c{} - " + " .unindent() ); assert_eq!( @@ -3413,7 +3412,7 @@ async fn test_delete_autoclose_pair(cx: &mut gpui::TestAppContext) { a b c - " + " .unindent() ); assert_eq!( @@ -3433,10 +3432,10 @@ async fn test_snippets(cx: &mut gpui::TestAppContext) { let (text, insertion_ranges) = marked_text_ranges( indoc! {" - a.ˇ b - a.ˇ b - a.ˇ b - "}, + a.ˇ b + a.ˇ b + a.ˇ b + "}, false, ); @@ -3460,10 +3459,10 @@ async fn test_snippets(cx: &mut gpui::TestAppContext) { editor, cx, indoc! {" - a.f(«one», two, «three») b - a.f(«one», two, «three») b - a.f(«one», two, «three») b - "}, + a.f(«one», two, «three») b + a.f(«one», two, «three») b + a.f(«one», two, «three») b + "}, ); // Can't move earlier than the first tab stop @@ -3472,10 +3471,10 @@ async fn test_snippets(cx: &mut gpui::TestAppContext) { editor, cx, indoc! {" - a.f(«one», two, «three») b - a.f(«one», two, «three») b - a.f(«one», two, «three») b - "}, + a.f(«one», two, «three») b + a.f(«one», two, «three») b + a.f(«one», two, «three») b + "}, ); assert!(editor.move_to_next_snippet_tabstop(cx)); @@ -3483,10 +3482,10 @@ async fn test_snippets(cx: &mut gpui::TestAppContext) { editor, cx, indoc! {" - a.f(one, «two», three) b - a.f(one, «two», three) b - a.f(one, «two», three) b - "}, + a.f(one, «two», three) b + a.f(one, «two», three) b + a.f(one, «two», three) b + "}, ); editor.move_to_prev_snippet_tabstop(cx); @@ -3494,10 +3493,10 @@ async fn test_snippets(cx: &mut gpui::TestAppContext) { editor, cx, indoc! {" - a.f(«one», two, «three») b - a.f(«one», two, «three») b - a.f(«one», two, «three») b - "}, + a.f(«one», two, «three») b + a.f(«one», two, «three») b + a.f(«one», two, «three») b + "}, ); assert!(editor.move_to_next_snippet_tabstop(cx)); @@ -3505,20 +3504,20 @@ async fn test_snippets(cx: &mut gpui::TestAppContext) { editor, cx, indoc! {" - a.f(one, «two», three) b - a.f(one, «two», three) b - a.f(one, «two», three) b - "}, + a.f(one, «two», three) b + a.f(one, «two», three) b + a.f(one, «two», three) b + "}, ); assert!(editor.move_to_next_snippet_tabstop(cx)); assert( editor, cx, indoc! {" - a.f(one, two, three)ˇ b - a.f(one, two, three)ˇ b - a.f(one, two, three)ˇ b - "}, + a.f(one, two, three)ˇ b + a.f(one, two, three)ˇ b + a.f(one, two, three)ˇ b + "}, ); // As soon as the last tab stop is reached, snippet state is gone @@ -3527,10 +3526,10 @@ async fn test_snippets(cx: &mut gpui::TestAppContext) { editor, cx, indoc! {" - a.f(one, two, three)ˇ b - a.f(one, two, three)ˇ b - a.f(one, two, three)ˇ b - "}, + a.f(one, two, three)ˇ b + a.f(one, two, three)ˇ b + a.f(one, two, three)ˇ b + "}, ); }); } @@ -3861,18 +3860,18 @@ async fn test_completion(cx: &mut gpui::TestAppContext) { .await; cx.set_state(indoc! {" - oneˇ - two - three - "}); + oneˇ + two + three + "}); cx.simulate_keystroke("."); handle_completion_request( &mut cx, indoc! {" - one.|<> - two - three - "}, + one.|<> + two + three + "}, vec!["first_completion", "second_completion"], ) .await; @@ -3885,57 +3884,56 @@ async fn test_completion(cx: &mut gpui::TestAppContext) { .unwrap() }); cx.assert_editor_state(indoc! {" - one.second_completionˇ - two - three - "}); + one.second_completionˇ + two + three + "}); handle_resolve_completion_request( &mut cx, Some(( indoc! {" - one.second_completion - two - threeˇ - "}, + one.second_completion + two + threeˇ + "}, "\nadditional edit", )), ) .await; apply_additional_edits.await.unwrap(); cx.assert_editor_state(indoc! {" - one.second_completionˇ - two - three - additional edit - "}); + one.second_completionˇ + two + three + additional edit + "}); cx.set_state(indoc! {" - one.second_completion - twoˇ - threeˇ - additional edit - "}); + one.second_completion + twoˇ + threeˇ + additional edit + "}); cx.simulate_keystroke(" "); assert!(cx.editor(|e, _| e.context_menu.is_none())); cx.simulate_keystroke("s"); assert!(cx.editor(|e, _| e.context_menu.is_none())); cx.assert_editor_state(indoc! {" - one.second_completion - two sˇ - three sˇ - additional edit - "}); - // + one.second_completion + two sˇ + three sˇ + additional edit + "}); handle_completion_request( &mut cx, indoc! {" - one.second_completion - two s - three - additional edit - "}, + one.second_completion + two s + three + additional edit + "}, vec!["fourth_completion", "fifth_completion", "sixth_completion"], ) .await; @@ -3947,11 +3945,11 @@ async fn test_completion(cx: &mut gpui::TestAppContext) { handle_completion_request( &mut cx, indoc! {" - one.second_completion - two si - three - additional edit - "}, + one.second_completion + two si + three + additional edit + "}, vec!["fourth_completion", "fifth_completion", "sixth_completion"], ) .await; @@ -3964,11 +3962,11 @@ async fn test_completion(cx: &mut gpui::TestAppContext) { .unwrap() }); cx.assert_editor_state(indoc! {" - one.second_completion - two sixth_completionˇ - three sixth_completionˇ - additional edit - "}); + one.second_completion + two sixth_completionˇ + three sixth_completionˇ + additional edit + "}); handle_resolve_completion_request(&mut cx, None).await; apply_additional_edits.await.unwrap(); @@ -4084,12 +4082,12 @@ async fn test_toggle_comment(cx: &mut gpui::TestAppContext) { )); let text = " - fn a() { - //b(); - // c(); - // d(); - } - " + fn a() { + //b(); + // c(); + // d(); + } + " .unindent(); let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); @@ -4109,12 +4107,12 @@ async fn test_toggle_comment(cx: &mut gpui::TestAppContext) { assert_eq!( editor.text(cx), " - fn a() { - b(); - c(); - d(); - } - " + fn a() { + b(); + c(); + d(); + } + " .unindent() ); @@ -4127,12 +4125,12 @@ async fn test_toggle_comment(cx: &mut gpui::TestAppContext) { assert_eq!( editor.text(cx), " - fn a() { - // b(); - // c(); - // d(); - } - " + fn a() { + // b(); + // c(); + // d(); + } + " .unindent() ); @@ -4144,12 +4142,12 @@ async fn test_toggle_comment(cx: &mut gpui::TestAppContext) { assert_eq!( editor.text(cx), " - fn a() { - // b(); - c(); - // d(); - } - " + fn a() { + // b(); + c(); + // d(); + } + " .unindent() ); }); @@ -4170,10 +4168,10 @@ async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) { ) .with_injection_query( r#" - (script_element - (raw_text) @content - (#set! "language" "javascript")) - "#, + (script_element + (raw_text) @content + (#set! "language" "javascript")) + "#, ) .unwrap(), ); @@ -4199,28 +4197,28 @@ async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) { // Toggle comments for empty selections cx.set_state( &r#" -

A

ˇ -

B

ˇ -

C

ˇ - "# +

A

ˇ +

B

ˇ +

C

ˇ + "# .unindent(), ); cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); cx.assert_editor_state( &r#" - - - - "# + + + + "# .unindent(), ); cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); cx.assert_editor_state( &r#" -

A

ˇ -

B

ˇ -

C

ˇ - "# +

A

ˇ +

B

ˇ +

C

ˇ + "# .unindent(), ); @@ -4228,32 +4226,32 @@ async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) { // multiple selections occupy a given line. cx.set_state( &r#" -

-

ˇ»B

ˇ -

-

ˇ»D

ˇ - "# +

+

ˇ»B

ˇ +

+

ˇ»D

ˇ + "# .unindent(), ); cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); cx.assert_editor_state( &r#" - - - "# + + + "# .unindent(), ); cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); cx.assert_editor_state( &r#" -

-

ˇ»B

ˇ -

-

ˇ»D

ˇ - "# +

+

ˇ»B

ˇ +

+

ˇ»D

ˇ + "# .unindent(), ); @@ -4261,20 +4259,20 @@ async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) { // selections. cx.set_state( &r#" - ˇ - "# + ˇ + "# .unindent(), ); cx.foreground().run_until_parked(); cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx)); cx.assert_editor_state( &r#" - - // ˇvar x = new Y(); - - "# + + // ˇvar x = new Y(); + + "# .unindent(), ); } @@ -4332,9 +4330,9 @@ fn test_editing_overlapping_excerpts(cx: &mut gpui::MutableAppContext) { let markers = vec![('[', ']').into(), ('(', ')').into()]; let (initial_text, mut excerpt_ranges) = marked_text_ranges_by( indoc! {" - [aaaa - (bbbb] - cccc)", + [aaaa + (bbbb] + cccc)", }, markers.clone(), ); @@ -4356,10 +4354,10 @@ fn test_editing_overlapping_excerpts(cx: &mut gpui::MutableAppContext) { view.update(cx, |view, cx| { let (expected_text, selection_ranges) = marked_text_ranges( indoc! {" - aaaa - bˇbbb - bˇbbˇb - cccc" + aaaa + bˇbbb + bˇbbˇb + cccc" }, true, ); @@ -4370,10 +4368,10 @@ fn test_editing_overlapping_excerpts(cx: &mut gpui::MutableAppContext) { let (expected_text, expected_selections) = marked_text_ranges( indoc! {" - aaaa - bXˇbbXb - bXˇbbXˇb - cccc" + aaaa + bXˇbbXb + bXˇbbXˇb + cccc" }, false, ); @@ -4383,14 +4381,14 @@ fn test_editing_overlapping_excerpts(cx: &mut gpui::MutableAppContext) { view.newline(&Newline, cx); let (expected_text, expected_selections) = marked_text_ranges( indoc! {" - aaaa - bX - ˇbbX - b - bX - ˇbbX - ˇb - cccc" + aaaa + bX + ˇbbX + b + bX + ˇbbX + ˇb + cccc" }, false, ); @@ -4580,7 +4578,7 @@ async fn test_extra_newline_insertion(cx: &mut gpui::TestAppContext) { ); let text = concat!( - "{ }\n", // Suppress rustfmt + "{ }\n", // " x\n", // " /* */\n", // "x\n", // From 4508d94a3ef5d3d873d65e130e1fbfa9dd1bf74b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 6 Oct 2022 17:03:23 -0700 Subject: [PATCH 129/140] In deterministic executor, ensure fake timers are ordered by wake time Previously, advancing the clock would fail to wake a timer that was set *after* another time whose wake time had not yet arrived. --- crates/gpui/src/executor.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index 980da91167..0639445b0d 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -325,7 +325,12 @@ impl Deterministic { let mut state = self.state.lock(); let wakeup_at = state.now + duration; let id = util::post_inc(&mut state.next_timer_id); - state.pending_timers.push((id, wakeup_at, tx)); + match state + .pending_timers + .binary_search_by_key(&wakeup_at, |e| e.1) + { + Ok(ix) | Err(ix) => state.pending_timers.insert(ix, (id, wakeup_at, tx)), + } let state = self.state.clone(); Timer::Deterministic(DeterministicTimer { rx, id, state }) } From 47a8e4222ac0ee8f2d57f1b83fb1cb17d3c2461d Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 6 Oct 2022 17:03:38 -0700 Subject: [PATCH 130/140] Don't allow multiple concurrent formatting requests for the same buffer Co-authored-by: Nathan Sobo --- crates/editor/src/editor_tests.rs | 57 +++++++++++++++++++++++++++++++ crates/project/src/project.rs | 33 ++++++++++++++---- 2 files changed, 83 insertions(+), 7 deletions(-) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index ac84c0ef1a..430b958407 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -3845,6 +3845,63 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) { ); } +#[gpui::test] +async fn test_concurrent_format_requests(cx: &mut gpui::TestAppContext) { + cx.foreground().forbid_parking(); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + document_formatting_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {" + one.twoˇ + "}); + + // The format request takes a long time. When it completes, it inserts + // a newline and an indent before the `.` + cx.lsp + .handle_request::(move |_, cx| { + let executor = cx.background(); + async move { + executor.timer(Duration::from_millis(100)).await; + Ok(Some(vec![lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(0, 3)), + new_text: "\n ".into(), + }])) + } + }); + + // Submit a format request. + let format_1 = cx + .update_editor(|editor, cx| editor.format(&Format, cx)) + .unwrap(); + cx.foreground().run_until_parked(); + + // Submit a second format request. + let format_2 = cx + .update_editor(|editor, cx| editor.format(&Format, cx)) + .unwrap(); + cx.foreground().run_until_parked(); + + // Wait for both format requests to complete + cx.foreground().advance_clock(Duration::from_millis(200)); + cx.foreground().start_waiting(); + format_1.await.unwrap(); + cx.foreground().start_waiting(); + format_2.await.unwrap(); + + // The formatting edits only happens once. + cx.assert_editor_state(indoc! {" + one + .twoˇ + "}); +} + #[gpui::test] async fn test_completion(cx: &mut gpui::TestAppContext) { let mut cx = EditorLspTestContext::new_rust( diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index dc783f1818..3aa2f45264 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -8,10 +8,7 @@ pub mod worktree; mod project_tests; use anyhow::{anyhow, Context, Result}; -use client::{ - proto::{self}, - Client, PeerId, TypedEnvelope, User, UserStore, -}; +use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore}; use clock::ReplicaId; use collections::{hash_map, BTreeMap, HashMap, HashSet}; use futures::{future::Shared, AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt}; @@ -66,7 +63,7 @@ use std::{ time::Instant, }; use thiserror::Error; -use util::{post_inc, ResultExt, TryFutureExt as _}; +use util::{defer, post_inc, ResultExt, TryFutureExt as _}; pub use db::Db; pub use fs::*; @@ -128,6 +125,7 @@ pub struct Project { opened_buffers: HashMap, incomplete_buffers: HashMap>, buffer_snapshots: HashMap>, + buffers_being_formatted: HashSet, nonce: u128, initialized_persistent_state: bool, _maintain_buffer_languages: Task<()>, @@ -512,6 +510,7 @@ impl Project { language_server_statuses: Default::default(), last_workspace_edits_by_language_server: Default::default(), language_server_settings: Default::default(), + buffers_being_formatted: Default::default(), next_language_server_id: 0, nonce: StdRng::from_entropy().gen(), initialized_persistent_state: false, @@ -627,6 +626,7 @@ impl Project { last_workspace_edits_by_language_server: Default::default(), next_language_server_id: 0, opened_buffers: Default::default(), + buffers_being_formatted: Default::default(), buffer_snapshots: Default::default(), nonce: StdRng::from_entropy().gen(), initialized_persistent_state: false, @@ -3113,7 +3113,26 @@ impl Project { .await?; } - for (buffer, buffer_abs_path, language_server) in local_buffers { + // Do not allow multiple concurrent formatting requests for the + // same buffer. + this.update(&mut cx, |this, _| { + local_buffers + .retain(|(buffer, _, _)| this.buffers_being_formatted.insert(buffer.id())); + }); + let _cleanup = defer({ + let this = this.clone(); + let mut cx = cx.clone(); + let local_buffers = &local_buffers; + move || { + this.update(&mut cx, |this, _| { + for (buffer, _, _) in local_buffers { + this.buffers_being_formatted.remove(&buffer.id()); + } + }); + } + }); + + for (buffer, buffer_abs_path, language_server) in &local_buffers { let (format_on_save, formatter, tab_size) = buffer.read_with(&cx, |buffer, cx| { let settings = cx.global::(); let language_name = buffer.language().map(|language| language.name()); @@ -3165,7 +3184,7 @@ impl Project { buffer.forget_transaction(transaction.id) }); } - project_transaction.0.insert(buffer, transaction); + project_transaction.0.insert(buffer.clone(), transaction); } } From d67fad8dca3b81966ec63cb6e3ef85eff5bbc010 Mon Sep 17 00:00:00 2001 From: Julia Date: Thu, 6 Oct 2022 22:20:10 -0400 Subject: [PATCH 131/140] Extend a test to cover repos not at worktree root --- crates/collab/src/integration_tests.rs | 142 ++++++++++++++++++++++--- crates/project/src/fs.rs | 3 +- 2 files changed, 130 insertions(+), 15 deletions(-) diff --git a/crates/collab/src/integration_tests.rs b/crates/collab/src/integration_tests.rs index 58a8efc411..7e84c70601 100644 --- a/crates/collab/src/integration_tests.rs +++ b/crates/collab/src/integration_tests.rs @@ -966,7 +966,14 @@ async fn test_git_diff_base_change( .insert_tree( "/dir", json!({ - ".git": { + ".git": {}, + "sub": { + ".git": {}, + "b.txt": " + one + two + three + ".unindent(), }, "a.txt": " one @@ -977,6 +984,11 @@ async fn test_git_diff_base_change( ) .await; + let (project_local, worktree_id) = client_a.build_local_project("/dir", cx_a).await; + let project_remote = client_b + .build_remote_project(&project_local, cx_a, cx_b) + .await; + let diff_base = " one three @@ -998,12 +1010,9 @@ async fn test_git_diff_base_change( ) .await; - let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await; - let project_b = client_b.build_remote_project(&project_a, cx_a, cx_b).await; - // Create the buffer - let buffer_a = project_a - .update(cx_a, |p, cx| p.open_buffer((worktree_id, "/dir/a.txt"), cx)) + let buffer_local_a = project_local + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) .await .unwrap(); @@ -1011,7 +1020,7 @@ async fn test_git_diff_base_change( executor.run_until_parked(); // Smoke test diffing - buffer_a.read_with(cx_a, |buffer, _| { + buffer_local_a.read_with(cx_a, |buffer, _| { assert_eq!(buffer.diff_base(), Some(diff_base.as_ref())); git::diff::assert_hunks( buffer.snapshot().git_diff_hunks_in_range(0..4), @@ -1022,8 +1031,8 @@ async fn test_git_diff_base_change( }); // Create remote buffer - let buffer_b = project_b - .update(cx_b, |p, cx| p.open_buffer((worktree_id, "/dir/a.txt"), cx)) + let buffer_remote_a = project_remote + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) .await .unwrap(); @@ -1031,7 +1040,7 @@ async fn test_git_diff_base_change( executor.run_until_parked(); // Smoke test diffing - buffer_b.read_with(cx_b, |buffer, _| { + buffer_remote_a.read_with(cx_b, |buffer, _| { assert_eq!(buffer.diff_base(), Some(diff_base.as_ref())); git::diff::assert_hunks( buffer.snapshot().git_diff_hunks_in_range(0..4), @@ -1050,11 +1059,11 @@ async fn test_git_diff_base_change( ) .await; - // Wait for buffer_a to receive it + // Wait for buffer_local_a to receive it executor.run_until_parked(); // Smoke test new diffing - buffer_a.read_with(cx_a, |buffer, _| { + buffer_local_a.read_with(cx_a, |buffer, _| { assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref())); git::diff::assert_hunks( @@ -1066,7 +1075,114 @@ async fn test_git_diff_base_change( }); // Smoke test B - buffer_b.read_with(cx_b, |buffer, _| { + buffer_remote_a.read_with(cx_b, |buffer, _| { + assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref())); + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_range(0..4), + &buffer, + &diff_base, + &[(2..3, "", "three\n")], + ); + }); + + //Nested git dir + + let diff_base = " + one + three + " + .unindent(); + + let new_diff_base = " + one + two + " + .unindent(); + + client_a + .fs + .as_fake() + .set_index_for_repo( + Path::new("/dir/sub/.git"), + &[(Path::new("b.txt"), diff_base.clone())], + ) + .await; + + // Create the buffer + let buffer_local_b = project_local + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx)) + .await + .unwrap(); + + // Wait for it to catch up to the new diff + executor.run_until_parked(); + + // Smoke test diffing + buffer_local_b.read_with(cx_a, |buffer, _| { + assert_eq!(buffer.diff_base(), Some(diff_base.as_ref())); + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_range(0..4), + &buffer, + &diff_base, + &[(1..2, "", "two\n")], + ); + }); + + // Create remote buffer + let buffer_remote_b = project_remote + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx)) + .await + .unwrap(); + + // Wait remote buffer to catch up to the new diff + executor.run_until_parked(); + + // Smoke test diffing + buffer_remote_b.read_with(cx_b, |buffer, _| { + assert_eq!(buffer.diff_base(), Some(diff_base.as_ref())); + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_range(0..4), + &buffer, + &diff_base, + &[(1..2, "", "two\n")], + ); + }); + + client_a + .fs + .as_fake() + .set_index_for_repo( + Path::new("/dir/sub/.git"), + &[(Path::new("b.txt"), new_diff_base.clone())], + ) + .await; + + // Wait for buffer_local_b to receive it + executor.run_until_parked(); + + // Smoke test new diffing + buffer_local_b.read_with(cx_a, |buffer, _| { + assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref())); + println!("{:?}", buffer.as_rope().to_string()); + println!("{:?}", buffer.diff_base()); + println!( + "{:?}", + buffer + .snapshot() + .git_diff_hunks_in_range(0..4) + .collect::>() + ); + + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_range(0..4), + &buffer, + &diff_base, + &[(2..3, "", "three\n")], + ); + }); + + // Smoke test B + buffer_remote_b.read_with(cx_b, |buffer, _| { assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref())); git::diff::assert_hunks( buffer.snapshot().git_diff_hunks_in_range(0..4), diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index 812842a354..a9a0a1707f 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -491,7 +491,6 @@ impl FakeFs { } pub async fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) { - let content_path = dot_git.parent().unwrap(); let mut state = self.state.lock().await; let entry = state.read_path(dot_git).await.unwrap(); let mut entry = entry.lock().await; @@ -504,7 +503,7 @@ impl FakeFs { repo_state.index_contents.extend( head_state .iter() - .map(|(path, content)| (content_path.join(path), content.clone())), + .map(|(path, content)| (path.to_path_buf(), content.clone())), ); state.emit_event([dot_git]); From 188b775fa6a21ec3c91f6e524b1e0990f74b3b56 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Fri, 7 Oct 2022 10:03:09 -0700 Subject: [PATCH 132/140] Fixed non-block terminal cursors being displayed incorrectly --- crates/terminal/src/terminal_element.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/terminal/src/terminal_element.rs b/crates/terminal/src/terminal_element.rs index e7fd69fe49..0f037863af 100644 --- a/crates/terminal/src/terminal_element.rs +++ b/crates/terminal/src/terminal_element.rs @@ -680,12 +680,12 @@ impl Element for TerminalElement { let focused = self.focused; TerminalElement::shape_cursor(cursor_point, dimensions, &cursor_text).map( move |(cursor_position, block_width)| { - let shape = match cursor.shape { - AlacCursorShape::Block if !focused => CursorShape::Hollow, - AlacCursorShape::Block => CursorShape::Block, - AlacCursorShape::Underline => CursorShape::Underscore, - AlacCursorShape::Beam => CursorShape::Bar, - AlacCursorShape::HollowBlock => CursorShape::Hollow, + let (shape, text) = match cursor.shape { + AlacCursorShape::Block if !focused => (CursorShape::Hollow, None), + AlacCursorShape::Block => (CursorShape::Block, Some(cursor_text)), + AlacCursorShape::Underline => (CursorShape::Underscore, None), + AlacCursorShape::Beam => (CursorShape::Bar, None), + AlacCursorShape::HollowBlock => (CursorShape::Hollow, None), //This case is handled in the if wrapping the whole cursor layout AlacCursorShape::Hidden => unreachable!(), }; @@ -696,7 +696,7 @@ impl Element for TerminalElement { dimensions.line_height, terminal_theme.colors.cursor, shape, - Some(cursor_text), + text, ) }, ) From bf50a8ad8e6d0e01c713e32908036c3404b46ba4 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Fri, 7 Oct 2022 11:37:39 -0700 Subject: [PATCH 133/140] Implemented a simplistic version of correct cmd-k behavior --- crates/terminal/src/terminal.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 473bbd4f52..b86043b122 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -618,8 +618,11 @@ impl Terminal { term.resize(new_size); } InternalEvent::Clear => { - self.write_to_pty("\x0c".to_string()); term.clear_screen(ClearMode::Saved); + + term.clear_screen(ClearMode::All); + + term.grid_mut().cursor.point = Point::new(Line(0), Column(0)); } InternalEvent::Scroll(scroll) => { term.scroll_display(*scroll); From 15595a67faafd1ca68bd01631d9164c1fc6fba5e Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Fri, 7 Oct 2022 12:04:26 -0700 Subject: [PATCH 134/140] Added a horrible hacky way of doing cmd-k correctly. --- crates/terminal/src/terminal.rs | 27 +++++++++++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index b86043b122..004815a510 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -618,11 +618,34 @@ impl Terminal { term.resize(new_size); } InternalEvent::Clear => { + // Clear back buffer term.clear_screen(ClearMode::Saved); - term.clear_screen(ClearMode::All); + let cursor = term.grid().cursor.point; - term.grid_mut().cursor.point = Point::new(Line(0), Column(0)); + // Clear the lines above + term.grid_mut().reset_region(..cursor.line); + + // Copy the current line up + let line = term.grid()[cursor.line][..cursor.column] + .iter() + .cloned() + .enumerate() + .collect::>(); + + for (i, cell) in line { + term.grid_mut()[Line(0)][Column(i)] = cell; + } + + // Reset the cursor + term.grid_mut().cursor.point = + Point::new(Line(0), term.grid_mut().cursor.point.column); + let new_cursor = term.grid().cursor.point; + + // Clear the lines below the new cursor + if (new_cursor.line.0 as usize) < term.screen_lines() - 1 { + term.grid_mut().reset_region((new_cursor.line + 1)..); + } } InternalEvent::Scroll(scroll) => { term.scroll_display(*scroll); From e15f27106d713a3a519cc44b02e0c41eccd0a31a Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 7 Oct 2022 12:20:54 -0400 Subject: [PATCH 135/140] Reset buffer git diff when setting diff base to None Co-Authored-By: Joseph Lyons --- crates/git/src/diff.rs | 8 ++++++++ crates/language/src/buffer.rs | 5 +++++ 2 files changed, 13 insertions(+) diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index abf874e2bb..4191e5d260 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -104,6 +104,11 @@ impl BufferDiff { }) } + pub fn clear(&mut self, buffer: &text::BufferSnapshot) { + self.last_buffer_version = Some(buffer.version().clone()); + self.tree = SumTree::new(); + } + pub fn needs_update(&self, buffer: &text::BufferSnapshot) -> bool { match &self.last_buffer_version { Some(last) => buffer.version().changed_since(last), @@ -296,6 +301,9 @@ mod tests { &diff_base, &[(0..1, "", "point five\n"), (2..3, "two\n", "HELLO\n")], ); + + diff.clear(&buffer); + assert_hunks(diff.hunks(&buffer), &buffer, &diff_base, &[]); } #[test] diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index db9aa029f2..a3c0c54d01 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -707,6 +707,11 @@ impl Buffer { } }) .detach() + } else { + let snapshot = self.snapshot(); + self.git_diff_status.diff.clear(&snapshot); + self.git_diff_update_count += 1; + cx.notify(); } } From 070c4bc503b752b2632f34f8313bf2e71439eb64 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 7 Oct 2022 12:44:20 -0700 Subject: [PATCH 136/140] Add color for 'variable.special' and use it in highlight queries --- crates/zed/src/languages/cpp/highlights.scm | 2 +- crates/zed/src/languages/css/highlights.scm | 11 +++++++---- crates/zed/src/languages/javascript/highlights.scm | 4 ++-- crates/zed/src/languages/rust/highlights.scm | 2 +- crates/zed/src/languages/typescript/highlights.scm | 4 ++-- styles/src/themes/common/base16.ts | 4 ++++ 6 files changed, 17 insertions(+), 10 deletions(-) diff --git a/crates/zed/src/languages/cpp/highlights.scm b/crates/zed/src/languages/cpp/highlights.scm index 2dd9188308..b832fb4e2d 100644 --- a/crates/zed/src/languages/cpp/highlights.scm +++ b/crates/zed/src/languages/cpp/highlights.scm @@ -41,7 +41,7 @@ (field_identifier) @property (statement_identifier) @label -(this) @variable.builtin +(this) @variable.special [ "break" diff --git a/crates/zed/src/languages/css/highlights.scm b/crates/zed/src/languages/css/highlights.scm index 3638837af7..aba156633a 100644 --- a/crates/zed/src/languages/css/highlights.scm +++ b/crates/zed/src/languages/css/highlights.scm @@ -41,10 +41,13 @@ (function_name) @function -((property_name) @variable - (#match? @variable "^--")) -((plain_value) @variable - (#match? @variable "^--")) +( + [ + (property_name) + (plain_value) + ] @variable.special + (#match? @variable.special "^--") +) [ "@media" diff --git a/crates/zed/src/languages/javascript/highlights.scm b/crates/zed/src/languages/javascript/highlights.scm index d3921cdbc8..773780a8e0 100644 --- a/crates/zed/src/languages/javascript/highlights.scm +++ b/crates/zed/src/languages/javascript/highlights.scm @@ -55,8 +55,8 @@ ; Literals -(this) @variable.builtin -(super) @variable.builtin +(this) @variable.special +(super) @variable.special [ (true) diff --git a/crates/zed/src/languages/rust/highlights.scm b/crates/zed/src/languages/rust/highlights.scm index 72482b4073..f4a451529e 100644 --- a/crates/zed/src/languages/rust/highlights.scm +++ b/crates/zed/src/languages/rust/highlights.scm @@ -1,6 +1,6 @@ (type_identifier) @type (primitive_type) @type.builtin -(self) @variable.builtin +(self) @variable.special (field_identifier) @property (call_expression diff --git a/crates/zed/src/languages/typescript/highlights.scm b/crates/zed/src/languages/typescript/highlights.scm index d3921cdbc8..773780a8e0 100644 --- a/crates/zed/src/languages/typescript/highlights.scm +++ b/crates/zed/src/languages/typescript/highlights.scm @@ -55,8 +55,8 @@ ; Literals -(this) @variable.builtin -(super) @variable.builtin +(this) @variable.special +(super) @variable.special [ (true) diff --git a/styles/src/themes/common/base16.ts b/styles/src/themes/common/base16.ts index 326928252e..1811167719 100644 --- a/styles/src/themes/common/base16.ts +++ b/styles/src/themes/common/base16.ts @@ -185,6 +185,10 @@ export function createTheme( color: sample(ramps.neutral, 7), weight: fontWeights.normal, }, + "variable.special": { + color: sample(ramps.blue, 0.80), + weight: fontWeights.normal, + }, comment: { color: sample(ramps.neutral, 5), weight: fontWeights.normal, From fcf13b44fb0be7e41323fadeb8e9626ce0c26767 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 7 Oct 2022 12:44:39 -0700 Subject: [PATCH 137/140] CSS: color '#' the same as the rest of the color --- crates/zed/src/languages/css/highlights.scm | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/zed/src/languages/css/highlights.scm b/crates/zed/src/languages/css/highlights.scm index aba156633a..e271d8583c 100644 --- a/crates/zed/src/languages/css/highlights.scm +++ b/crates/zed/src/languages/css/highlights.scm @@ -73,7 +73,6 @@ (unit) @type [ - "#" "," ":" ] @punctuation.delimiter From 95cb9ceac9d7e90f0aa2138edff711c0ced17c45 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 7 Oct 2022 12:44:55 -0700 Subject: [PATCH 138/140] Collapse variant and type into the same color --- crates/zed/src/languages/rust/highlights.scm | 13 ++----------- styles/src/themes/common/base16.ts | 8 ++------ styles/src/themes/common/theme.ts | 2 +- 3 files changed, 5 insertions(+), 18 deletions(-) diff --git a/crates/zed/src/languages/rust/highlights.scm b/crates/zed/src/languages/rust/highlights.scm index f4a451529e..d717c5d459 100644 --- a/crates/zed/src/languages/rust/highlights.scm +++ b/crates/zed/src/languages/rust/highlights.scm @@ -27,17 +27,8 @@ ; Identifier conventions -; Assume uppercase names are enum constructors -((identifier) @variant - (#match? @variant "^[A-Z]")) - -; Assume that uppercase names in paths are types -((scoped_identifier - path: (identifier) @type) - (#match? @type "^[A-Z]")) -((scoped_identifier - path: (scoped_identifier - name: (identifier) @type)) +; Assume uppercase names are types/enum-constructors +((identifier) @type (#match? @type "^[A-Z]")) ; Assume all-caps names are constants diff --git a/styles/src/themes/common/base16.ts b/styles/src/themes/common/base16.ts index 1811167719..cd6d46a771 100644 --- a/styles/src/themes/common/base16.ts +++ b/styles/src/themes/common/base16.ts @@ -214,15 +214,11 @@ export function createTheme( weight: fontWeights.normal, }, constructor: { - color: sample(ramps.blue, 0.5), - weight: fontWeights.normal, - }, - variant: { - color: sample(ramps.blue, 0.5), + color: sample(ramps.cyan, 0.5), weight: fontWeights.normal, }, property: { - color: sample(ramps.blue, 0.5), + color: sample(ramps.blue, 0.6), weight: fontWeights.normal, }, enum: { diff --git a/styles/src/themes/common/theme.ts b/styles/src/themes/common/theme.ts index b93148ae2c..a787443f31 100644 --- a/styles/src/themes/common/theme.ts +++ b/styles/src/themes/common/theme.ts @@ -43,7 +43,7 @@ export interface Syntax { keyword: SyntaxHighlightStyle; function: SyntaxHighlightStyle; type: SyntaxHighlightStyle; - variant: SyntaxHighlightStyle; + constructor: SyntaxHighlightStyle; property: SyntaxHighlightStyle; enum: SyntaxHighlightStyle; operator: SyntaxHighlightStyle; From 6ecf870c665514ba7c812dad52d6cd629f1c5c84 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 7 Oct 2022 12:46:49 -0700 Subject: [PATCH 139/140] Tweak SCREAMING_SNAKE_CASE regexes in highlight queries --- crates/zed/src/languages/c/highlights.scm | 2 +- crates/zed/src/languages/cpp/highlights.scm | 2 +- crates/zed/src/languages/javascript/highlights.scm | 2 +- crates/zed/src/languages/python/highlights.scm | 2 +- crates/zed/src/languages/rust/highlights.scm | 2 +- crates/zed/src/languages/typescript/highlights.scm | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/crates/zed/src/languages/c/highlights.scm b/crates/zed/src/languages/c/highlights.scm index 007c871ffa..064ec61a37 100644 --- a/crates/zed/src/languages/c/highlights.scm +++ b/crates/zed/src/languages/c/highlights.scm @@ -86,7 +86,7 @@ (identifier) @variable ((identifier) @constant - (#match? @constant "^[A-Z][A-Z\\d_]*$")) + (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) (call_expression function: (identifier) @function) diff --git a/crates/zed/src/languages/cpp/highlights.scm b/crates/zed/src/languages/cpp/highlights.scm index b832fb4e2d..bcfa01ca5c 100644 --- a/crates/zed/src/languages/cpp/highlights.scm +++ b/crates/zed/src/languages/cpp/highlights.scm @@ -37,7 +37,7 @@ (type_identifier) @type ((identifier) @constant - (#match? @constant "^[A-Z][A-Z\\d_]*$")) + (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) (field_identifier) @property (statement_identifier) @label diff --git a/crates/zed/src/languages/javascript/highlights.scm b/crates/zed/src/languages/javascript/highlights.scm index 773780a8e0..bd1986b6b3 100644 --- a/crates/zed/src/languages/javascript/highlights.scm +++ b/crates/zed/src/languages/javascript/highlights.scm @@ -51,7 +51,7 @@ (shorthand_property_identifier) (shorthand_property_identifier_pattern) ] @constant - (#match? @constant "^[A-Z_][A-Z\\d_]+$")) + (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) ; Literals diff --git a/crates/zed/src/languages/python/highlights.scm b/crates/zed/src/languages/python/highlights.scm index 118af92aaa..71ab963d82 100644 --- a/crates/zed/src/languages/python/highlights.scm +++ b/crates/zed/src/languages/python/highlights.scm @@ -21,7 +21,7 @@ (#match? @type "^[A-Z]")) ((identifier) @constant - (#match? @constant "^[A-Z][A-Z_]*$")) + (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) ; Builtin functions diff --git a/crates/zed/src/languages/rust/highlights.scm b/crates/zed/src/languages/rust/highlights.scm index d717c5d459..98ea1ee40e 100644 --- a/crates/zed/src/languages/rust/highlights.scm +++ b/crates/zed/src/languages/rust/highlights.scm @@ -33,7 +33,7 @@ ; Assume all-caps names are constants ((identifier) @constant - (#match? @constant "^[A-Z][A-Z\\d_]+$")) + (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) [ "(" diff --git a/crates/zed/src/languages/typescript/highlights.scm b/crates/zed/src/languages/typescript/highlights.scm index 773780a8e0..bd1986b6b3 100644 --- a/crates/zed/src/languages/typescript/highlights.scm +++ b/crates/zed/src/languages/typescript/highlights.scm @@ -51,7 +51,7 @@ (shorthand_property_identifier) (shorthand_property_identifier_pattern) ] @constant - (#match? @constant "^[A-Z_][A-Z\\d_]+$")) + (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) ; Literals From e96abf1429174b80807baa85de97b68f59782288 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 7 Oct 2022 14:51:01 -0700 Subject: [PATCH 140/140] 0.59.0 --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 246dfbbbef..da2362670d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7236,7 +7236,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.58.0" +version = "0.59.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index cdf0e36eba..9a65fd0816 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -3,7 +3,7 @@ authors = ["Nathan Sobo "] description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.58.0" +version = "0.59.0" [lib] name = "zed"