Merge pull request #1935 from zed-industries/reconnections-2
Move in-memory server state to the database
This commit is contained in:
commit
f1b35981c2
45 changed files with 5643 additions and 4324 deletions
344
Cargo.lock
generated
344
Cargo.lock
generated
|
@ -2,6 +2,12 @@
|
|||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "Inflector"
|
||||
version = "0.11.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3"
|
||||
|
||||
[[package]]
|
||||
name = "activity_indicator"
|
||||
version = "0.1.0"
|
||||
|
@ -107,6 +113,12 @@ dependencies = [
|
|||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aliasable"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd"
|
||||
|
||||
[[package]]
|
||||
name = "ambient-authority"
|
||||
version = "0.0.1"
|
||||
|
@ -562,6 +574,19 @@ dependencies = [
|
|||
"rustc-demangle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bae"
|
||||
version = "0.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33b8de67cc41132507eeece2584804efcb15f85ba516e34c944b7667f480397a"
|
||||
dependencies = [
|
||||
"heck 0.3.3",
|
||||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.13.1"
|
||||
|
@ -650,6 +675,51 @@ dependencies = [
|
|||
"futures-lite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "borsh"
|
||||
version = "0.9.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "15bf3650200d8bffa99015595e10f1fbd17de07abbc25bb067da79e769939bfa"
|
||||
dependencies = [
|
||||
"borsh-derive",
|
||||
"hashbrown 0.11.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "borsh-derive"
|
||||
version = "0.9.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6441c552f230375d18e3cc377677914d2ca2b0d36e52129fe15450a2dce46775"
|
||||
dependencies = [
|
||||
"borsh-derive-internal",
|
||||
"borsh-schema-derive-internal",
|
||||
"proc-macro-crate",
|
||||
"proc-macro2",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "borsh-derive-internal"
|
||||
version = "0.9.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5449c28a7b352f2d1e592a8a28bf139bc71afb0764a14f3c02500935d8c44065"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "borsh-schema-derive-internal"
|
||||
version = "0.9.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cdbd5696d8bfa21d53d9fe39a714a18538bad11492a42d066dbbc395fb1951c0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "breadcrumbs"
|
||||
version = "0.1.0"
|
||||
|
@ -693,6 +763,27 @@ version = "3.11.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba"
|
||||
|
||||
[[package]]
|
||||
name = "bytecheck"
|
||||
version = "0.6.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d11cac2c12b5adc6570dad2ee1b87eff4955dac476fe12d81e5fdd352e52406f"
|
||||
dependencies = [
|
||||
"bytecheck_derive",
|
||||
"ptr_meta",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bytecheck_derive"
|
||||
version = "0.6.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13e576ebe98e605500b3c8041bb888e966653577172df6dd97398714eb30b9bf"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bytemuck"
|
||||
version = "1.12.3"
|
||||
|
@ -850,6 +941,7 @@ dependencies = [
|
|||
"js-sys",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
"serde",
|
||||
"time 0.1.45",
|
||||
"wasm-bindgen",
|
||||
"winapi 0.3.9",
|
||||
|
@ -1041,7 +1133,6 @@ name = "collab"
|
|||
version = "0.2.5"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"async-tungstenite",
|
||||
"axum",
|
||||
"axum-extra",
|
||||
|
@ -1051,6 +1142,7 @@ dependencies = [
|
|||
"client",
|
||||
"collections",
|
||||
"ctor",
|
||||
"dashmap",
|
||||
"editor",
|
||||
"env_logger",
|
||||
"envy",
|
||||
|
@ -1074,6 +1166,8 @@ dependencies = [
|
|||
"reqwest",
|
||||
"rpc",
|
||||
"scrypt",
|
||||
"sea-orm",
|
||||
"sea-query",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
|
@ -1546,6 +1640,19 @@ dependencies = [
|
|||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dashmap"
|
||||
version = "5.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"hashbrown 0.12.3",
|
||||
"lock_api",
|
||||
"once_cell",
|
||||
"parking_lot_core 0.9.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "data-url"
|
||||
version = "0.1.1"
|
||||
|
@ -3107,9 +3214,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "libsqlite3-sys"
|
||||
version = "0.25.2"
|
||||
version = "0.24.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29f835d03d717946d28b1d1ed632eb6f0e24a299388ee623d0c23118d3e8a7fa"
|
||||
checksum = "898745e570c7d0453cc1fbc4a701eb6c662ed54e8fec8b7d14be137ebeeb9d14"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
|
@ -3858,6 +3965,29 @@ version = "6.4.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee"
|
||||
|
||||
[[package]]
|
||||
name = "ouroboros"
|
||||
version = "0.15.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dfbb50b356159620db6ac971c6d5c9ab788c9cc38a6f49619fca2a27acb062ca"
|
||||
dependencies = [
|
||||
"aliasable",
|
||||
"ouroboros_macro",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ouroboros_macro"
|
||||
version = "0.15.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a0d9d1a6191c4f391f87219d1ea42b23f09ee84d64763cd05ee6ea88d9f384d"
|
||||
dependencies = [
|
||||
"Inflector",
|
||||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "outline"
|
||||
version = "0.1.0"
|
||||
|
@ -4216,6 +4346,15 @@ version = "0.2.17"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-crate"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785"
|
||||
dependencies = [
|
||||
"toml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error"
|
||||
version = "1.0.4"
|
||||
|
@ -4461,6 +4600,26 @@ dependencies = [
|
|||
"cc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ptr_meta"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1"
|
||||
dependencies = [
|
||||
"ptr_meta_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ptr_meta_derive"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pulldown-cmark"
|
||||
version = "0.9.2"
|
||||
|
@ -4697,6 +4856,15 @@ dependencies = [
|
|||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rend"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79af64b4b6362ffba04eef3a4e10829718a4896dac19daa741851c86781edf95"
|
||||
dependencies = [
|
||||
"bytecheck",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "reqwest"
|
||||
version = "0.11.13"
|
||||
|
@ -4774,6 +4942,31 @@ dependencies = [
|
|||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rkyv"
|
||||
version = "0.7.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cec2b3485b07d96ddfd3134767b8a447b45ea4eb91448d0a35180ec0ffd5ed15"
|
||||
dependencies = [
|
||||
"bytecheck",
|
||||
"hashbrown 0.12.3",
|
||||
"ptr_meta",
|
||||
"rend",
|
||||
"rkyv_derive",
|
||||
"seahash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rkyv_derive"
|
||||
version = "0.7.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6eaedadc88b53e36dd32d940ed21ae4d850d5916f2581526921f553a72ac34c4"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rmp"
|
||||
version = "0.8.11"
|
||||
|
@ -4901,6 +5094,24 @@ dependencies = [
|
|||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rust_decimal"
|
||||
version = "1.27.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33c321ee4e17d2b7abe12b5d20c1231db708dd36185c8a21e9de5fed6da4dbe9"
|
||||
dependencies = [
|
||||
"arrayvec 0.7.2",
|
||||
"borsh",
|
||||
"bytecheck",
|
||||
"byteorder",
|
||||
"bytes 1.3.0",
|
||||
"num-traits",
|
||||
"rand 0.8.5",
|
||||
"rkyv",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.21"
|
||||
|
@ -4972,6 +5183,12 @@ dependencies = [
|
|||
"base64",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustversion"
|
||||
version = "1.0.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8"
|
||||
|
||||
[[package]]
|
||||
name = "rustybuzz"
|
||||
version = "0.3.0"
|
||||
|
@ -5113,6 +5330,109 @@ dependencies = [
|
|||
"untrusted",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sea-orm"
|
||||
version = "0.10.5"
|
||||
source = "git+https://github.com/zed-industries/sea-orm?rev=18f4c691085712ad014a51792af75a9044bacee6#18f4c691085712ad014a51792af75a9044bacee6"
|
||||
dependencies = [
|
||||
"async-stream",
|
||||
"async-trait",
|
||||
"chrono",
|
||||
"futures 0.3.25",
|
||||
"futures-util",
|
||||
"log",
|
||||
"ouroboros",
|
||||
"rust_decimal",
|
||||
"sea-orm-macros",
|
||||
"sea-query",
|
||||
"sea-query-binder",
|
||||
"sea-strum",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sqlx",
|
||||
"thiserror",
|
||||
"time 0.3.17",
|
||||
"tracing",
|
||||
"url",
|
||||
"uuid 1.2.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sea-orm-macros"
|
||||
version = "0.10.5"
|
||||
source = "git+https://github.com/zed-industries/sea-orm?rev=18f4c691085712ad014a51792af75a9044bacee6#18f4c691085712ad014a51792af75a9044bacee6"
|
||||
dependencies = [
|
||||
"bae",
|
||||
"heck 0.3.3",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sea-query"
|
||||
version = "0.27.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a4f0fc4d8e44e1d51c739a68d336252a18bc59553778075d5e32649be6ec92ed"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"rust_decimal",
|
||||
"sea-query-derive",
|
||||
"serde_json",
|
||||
"time 0.3.17",
|
||||
"uuid 1.2.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sea-query-binder"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c2585b89c985cfacfe0ec9fc9e7bb055b776c1a2581c4e3c6185af2b8bf8865"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"rust_decimal",
|
||||
"sea-query",
|
||||
"serde_json",
|
||||
"sqlx",
|
||||
"time 0.3.17",
|
||||
"uuid 1.2.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sea-query-derive"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34cdc022b4f606353fe5dc85b09713a04e433323b70163e81513b141c6ae6eb5"
|
||||
dependencies = [
|
||||
"heck 0.3.3",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sea-strum"
|
||||
version = "0.23.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "391d06a6007842cfe79ac6f7f53911b76dfd69fc9a6769f1cf6569d12ce20e1b"
|
||||
dependencies = [
|
||||
"sea-strum_macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sea-strum_macros"
|
||||
version = "0.23.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69b4397b825df6ccf1e98bcdabef3bbcfc47ff5853983467850eeab878384f21"
|
||||
dependencies = [
|
||||
"heck 0.3.3",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "seahash"
|
||||
version = "4.1.0"
|
||||
|
@ -5626,8 +5946,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "sqlx"
|
||||
version = "0.6.2"
|
||||
source = "git+https://github.com/launchbadge/sqlx?rev=4b7053807c705df312bcb9b6281e184bf7534eb3#4b7053807c705df312bcb9b6281e184bf7534eb3"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "788841def501aabde58d3666fcea11351ec3962e6ea75dbcd05c84a71d68bcd1"
|
||||
dependencies = [
|
||||
"sqlx-core",
|
||||
"sqlx-macros",
|
||||
|
@ -5636,7 +5957,8 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "sqlx-core"
|
||||
version = "0.6.2"
|
||||
source = "git+https://github.com/launchbadge/sqlx?rev=4b7053807c705df312bcb9b6281e184bf7534eb3#4b7053807c705df312bcb9b6281e184bf7534eb3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dcbc16ddba161afc99e14d1713a453747a2b07fc097d2009f4c300ec99286105"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"atoi",
|
||||
|
@ -5644,6 +5966,7 @@ dependencies = [
|
|||
"bitflags",
|
||||
"byteorder",
|
||||
"bytes 1.3.0",
|
||||
"chrono",
|
||||
"crc",
|
||||
"crossbeam-queue",
|
||||
"dirs 4.0.0",
|
||||
|
@ -5667,10 +5990,12 @@ dependencies = [
|
|||
"log",
|
||||
"md-5",
|
||||
"memchr",
|
||||
"num-bigint",
|
||||
"once_cell",
|
||||
"paste",
|
||||
"percent-encoding",
|
||||
"rand 0.8.5",
|
||||
"rust_decimal",
|
||||
"rustls 0.20.7",
|
||||
"rustls-pemfile",
|
||||
"serde",
|
||||
|
@ -5693,7 +6018,8 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "sqlx-macros"
|
||||
version = "0.6.2"
|
||||
source = "git+https://github.com/launchbadge/sqlx?rev=4b7053807c705df312bcb9b6281e184bf7534eb3#4b7053807c705df312bcb9b6281e184bf7534eb3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b850fa514dc11f2ee85be9d055c512aa866746adfacd1cb42d867d68e6a5b0d9"
|
||||
dependencies = [
|
||||
"dotenvy",
|
||||
"either",
|
||||
|
@ -5712,7 +6038,8 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "sqlx-rt"
|
||||
version = "0.6.2"
|
||||
source = "git+https://github.com/launchbadge/sqlx?rev=4b7053807c705df312bcb9b6281e184bf7534eb3#4b7053807c705df312bcb9b6281e184bf7534eb3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24c5b2d25fa654cc5f841750b8e1cdedbe21189bf9a9382ee90bfa9dd3562396"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"tokio",
|
||||
|
@ -6870,6 +7197,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "422ee0de9031b5b948b97a8fc04e3aa35230001a722ddd27943e0be31564ce4c"
|
||||
dependencies = [
|
||||
"getrandom 0.2.8",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
@ -22,7 +22,7 @@ pub fn init(client: Arc<Client>, user_store: ModelHandle<UserStore>, cx: &mut Mu
|
|||
#[derive(Clone)]
|
||||
pub struct IncomingCall {
|
||||
pub room_id: u64,
|
||||
pub caller: Arc<User>,
|
||||
pub calling_user: Arc<User>,
|
||||
pub participants: Vec<Arc<User>>,
|
||||
pub initial_project: Option<proto::ParticipantProject>,
|
||||
}
|
||||
|
@ -78,9 +78,9 @@ impl ActiveCall {
|
|||
user_store.get_users(envelope.payload.participant_user_ids, cx)
|
||||
})
|
||||
.await?,
|
||||
caller: user_store
|
||||
calling_user: user_store
|
||||
.update(&mut cx, |user_store, cx| {
|
||||
user_store.get_user(envelope.payload.caller_user_id, cx)
|
||||
user_store.get_user(envelope.payload.calling_user_id, cx)
|
||||
})
|
||||
.await?,
|
||||
initial_project: envelope.payload.initial_project,
|
||||
|
@ -110,13 +110,13 @@ impl ActiveCall {
|
|||
|
||||
pub fn invite(
|
||||
&mut self,
|
||||
recipient_user_id: u64,
|
||||
called_user_id: u64,
|
||||
initial_project: Option<ModelHandle<Project>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let client = self.client.clone();
|
||||
let user_store = self.user_store.clone();
|
||||
if !self.pending_invites.insert(recipient_user_id) {
|
||||
if !self.pending_invites.insert(called_user_id) {
|
||||
return Task::ready(Err(anyhow!("user was already invited")));
|
||||
}
|
||||
|
||||
|
@ -136,13 +136,13 @@ impl ActiveCall {
|
|||
};
|
||||
|
||||
room.update(&mut cx, |room, cx| {
|
||||
room.call(recipient_user_id, initial_project_id, cx)
|
||||
room.call(called_user_id, initial_project_id, cx)
|
||||
})
|
||||
.await?;
|
||||
} else {
|
||||
let room = cx
|
||||
.update(|cx| {
|
||||
Room::create(recipient_user_id, initial_project, client, user_store, cx)
|
||||
Room::create(called_user_id, initial_project, client, user_store, cx)
|
||||
})
|
||||
.await?;
|
||||
|
||||
|
@ -155,7 +155,7 @@ impl ActiveCall {
|
|||
|
||||
let result = invite.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.pending_invites.remove(&recipient_user_id);
|
||||
this.pending_invites.remove(&called_user_id);
|
||||
cx.notify();
|
||||
});
|
||||
result
|
||||
|
@ -164,7 +164,7 @@ impl ActiveCall {
|
|||
|
||||
pub fn cancel_invite(
|
||||
&mut self,
|
||||
recipient_user_id: u64,
|
||||
called_user_id: u64,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let room_id = if let Some(room) = self.room() {
|
||||
|
@ -178,7 +178,7 @@ impl ActiveCall {
|
|||
client
|
||||
.request(proto::CancelCall {
|
||||
room_id,
|
||||
recipient_user_id,
|
||||
called_user_id,
|
||||
})
|
||||
.await?;
|
||||
anyhow::Ok(())
|
||||
|
|
|
@ -10,7 +10,7 @@ use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext
|
|||
use live_kit_client::{LocalTrackPublication, LocalVideoTrack, RemoteVideoTrackUpdate};
|
||||
use postage::stream::Stream;
|
||||
use project::Project;
|
||||
use std::{mem, os::unix::prelude::OsStrExt, sync::Arc};
|
||||
use std::{mem, sync::Arc};
|
||||
use util::{post_inc, ResultExt};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
|
@ -53,7 +53,7 @@ impl Entity for Room {
|
|||
|
||||
fn release(&mut self, _: &mut MutableAppContext) {
|
||||
if self.status.is_online() {
|
||||
self.client.send(proto::LeaveRoom { id: self.id }).log_err();
|
||||
self.client.send(proto::LeaveRoom {}).log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -149,7 +149,7 @@ impl Room {
|
|||
}
|
||||
|
||||
pub(crate) fn create(
|
||||
recipient_user_id: u64,
|
||||
called_user_id: u64,
|
||||
initial_project: Option<ModelHandle<Project>>,
|
||||
client: Arc<Client>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
|
@ -182,7 +182,7 @@ impl Room {
|
|||
match room
|
||||
.update(&mut cx, |room, cx| {
|
||||
room.leave_when_empty = true;
|
||||
room.call(recipient_user_id, initial_project_id, cx)
|
||||
room.call(called_user_id, initial_project_id, cx)
|
||||
})
|
||||
.await
|
||||
{
|
||||
|
@ -241,7 +241,7 @@ impl Room {
|
|||
self.participant_user_ids.clear();
|
||||
self.subscriptions.clear();
|
||||
self.live_kit.take();
|
||||
self.client.send(proto::LeaveRoom { id: self.id })?;
|
||||
self.client.send(proto::LeaveRoom {})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -294,6 +294,11 @@ impl Room {
|
|||
.position(|participant| Some(participant.user_id) == self.client.user_id());
|
||||
let local_participant = local_participant_ix.map(|ix| room.participants.swap_remove(ix));
|
||||
|
||||
let pending_participant_user_ids = room
|
||||
.pending_participants
|
||||
.iter()
|
||||
.map(|p| p.user_id)
|
||||
.collect::<Vec<_>>();
|
||||
let remote_participant_user_ids = room
|
||||
.participants
|
||||
.iter()
|
||||
|
@ -303,7 +308,7 @@ impl Room {
|
|||
self.user_store.update(cx, move |user_store, cx| {
|
||||
(
|
||||
user_store.get_users(remote_participant_user_ids, cx),
|
||||
user_store.get_users(room.pending_participant_user_ids, cx),
|
||||
user_store.get_users(pending_participant_user_ids, cx),
|
||||
)
|
||||
});
|
||||
self.pending_room_update = Some(cx.spawn(|this, mut cx| async move {
|
||||
|
@ -487,7 +492,7 @@ impl Room {
|
|||
|
||||
pub(crate) fn call(
|
||||
&mut self,
|
||||
recipient_user_id: u64,
|
||||
called_user_id: u64,
|
||||
initial_project_id: Option<u64>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
|
@ -503,7 +508,7 @@ impl Room {
|
|||
let result = client
|
||||
.request(proto::Call {
|
||||
room_id,
|
||||
recipient_user_id,
|
||||
called_user_id,
|
||||
initial_project_id,
|
||||
})
|
||||
.await;
|
||||
|
@ -538,7 +543,7 @@ impl Room {
|
|||
id: worktree.id().to_proto(),
|
||||
root_name: worktree.root_name().into(),
|
||||
visible: worktree.is_visible(),
|
||||
abs_path: worktree.abs_path().as_os_str().as_bytes().to_vec(),
|
||||
abs_path: worktree.abs_path().to_string_lossy().into(),
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
|
|
|
@ -16,8 +16,7 @@ use gpui::{
|
|||
actions,
|
||||
serde_json::{self, Value},
|
||||
AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, AnyWeakViewHandle, AppContext,
|
||||
AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, View, ViewContext,
|
||||
ViewHandle,
|
||||
AsyncAppContext, Entity, ModelHandle, MutableAppContext, Task, View, ViewContext, ViewHandle,
|
||||
};
|
||||
use http::HttpClient;
|
||||
use lazy_static::lazy_static;
|
||||
|
@ -32,6 +31,7 @@ use std::{
|
|||
convert::TryFrom,
|
||||
fmt::Write as _,
|
||||
future::Future,
|
||||
marker::PhantomData,
|
||||
path::PathBuf,
|
||||
sync::{Arc, Weak},
|
||||
time::{Duration, Instant},
|
||||
|
@ -171,7 +171,7 @@ struct ClientState {
|
|||
entity_id_extractors: HashMap<TypeId, fn(&dyn AnyTypedEnvelope) -> u64>,
|
||||
_reconnect_task: Option<Task<()>>,
|
||||
reconnect_interval: Duration,
|
||||
entities_by_type_and_remote_id: HashMap<(TypeId, u64), AnyWeakEntityHandle>,
|
||||
entities_by_type_and_remote_id: HashMap<(TypeId, u64), WeakSubscriber>,
|
||||
models_by_message_type: HashMap<TypeId, AnyWeakModelHandle>,
|
||||
entity_types_by_message_type: HashMap<TypeId, TypeId>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
|
@ -181,7 +181,7 @@ struct ClientState {
|
|||
dyn Send
|
||||
+ Sync
|
||||
+ Fn(
|
||||
AnyEntityHandle,
|
||||
Subscriber,
|
||||
Box<dyn AnyTypedEnvelope>,
|
||||
&Arc<Client>,
|
||||
AsyncAppContext,
|
||||
|
@ -190,12 +190,13 @@ struct ClientState {
|
|||
>,
|
||||
}
|
||||
|
||||
enum AnyWeakEntityHandle {
|
||||
enum WeakSubscriber {
|
||||
Model(AnyWeakModelHandle),
|
||||
View(AnyWeakViewHandle),
|
||||
Pending(Vec<Box<dyn AnyTypedEnvelope>>),
|
||||
}
|
||||
|
||||
enum AnyEntityHandle {
|
||||
enum Subscriber {
|
||||
Model(AnyModelHandle),
|
||||
View(AnyViewHandle),
|
||||
}
|
||||
|
@ -253,6 +254,54 @@ impl Drop for Subscription {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct PendingEntitySubscription<T: Entity> {
|
||||
client: Arc<Client>,
|
||||
remote_id: u64,
|
||||
_entity_type: PhantomData<T>,
|
||||
consumed: bool,
|
||||
}
|
||||
|
||||
impl<T: Entity> PendingEntitySubscription<T> {
|
||||
pub fn set_model(mut self, model: &ModelHandle<T>, cx: &mut AsyncAppContext) -> Subscription {
|
||||
self.consumed = true;
|
||||
let mut state = self.client.state.write();
|
||||
let id = (TypeId::of::<T>(), self.remote_id);
|
||||
let Some(WeakSubscriber::Pending(messages)) =
|
||||
state.entities_by_type_and_remote_id.remove(&id)
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
state
|
||||
.entities_by_type_and_remote_id
|
||||
.insert(id, WeakSubscriber::Model(model.downgrade().into()));
|
||||
drop(state);
|
||||
for message in messages {
|
||||
self.client.handle_message(message, cx);
|
||||
}
|
||||
Subscription::Entity {
|
||||
client: Arc::downgrade(&self.client),
|
||||
id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Entity> Drop for PendingEntitySubscription<T> {
|
||||
fn drop(&mut self) {
|
||||
if !self.consumed {
|
||||
let mut state = self.client.state.write();
|
||||
if let Some(WeakSubscriber::Pending(messages)) = state
|
||||
.entities_by_type_and_remote_id
|
||||
.remove(&(TypeId::of::<T>(), self.remote_id))
|
||||
{
|
||||
for message in messages {
|
||||
log::info!("unhandled message {}", message.payload_type_name());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Client {
|
||||
pub fn new(http: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> {
|
||||
Arc::new(Self {
|
||||
|
@ -348,7 +397,11 @@ impl Client {
|
|||
let this = self.clone();
|
||||
let reconnect_interval = state.reconnect_interval;
|
||||
state._reconnect_task = Some(cx.spawn(|cx| async move {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
let mut rng = StdRng::seed_from_u64(0);
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
let mut rng = StdRng::from_entropy();
|
||||
|
||||
let mut delay = INITIAL_RECONNECTION_DELAY;
|
||||
while let Err(error) = this.authenticate_and_connect(true, &cx).await {
|
||||
log::error!("failed to connect {}", error);
|
||||
|
@ -386,26 +439,28 @@ impl Client {
|
|||
self.state
|
||||
.write()
|
||||
.entities_by_type_and_remote_id
|
||||
.insert(id, AnyWeakEntityHandle::View(cx.weak_handle().into()));
|
||||
.insert(id, WeakSubscriber::View(cx.weak_handle().into()));
|
||||
Subscription::Entity {
|
||||
client: Arc::downgrade(self),
|
||||
id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_model_for_remote_entity<T: Entity>(
|
||||
pub fn subscribe_to_entity<T: Entity>(
|
||||
self: &Arc<Self>,
|
||||
remote_id: u64,
|
||||
cx: &mut ModelContext<T>,
|
||||
) -> Subscription {
|
||||
) -> PendingEntitySubscription<T> {
|
||||
let id = (TypeId::of::<T>(), remote_id);
|
||||
self.state
|
||||
.write()
|
||||
.entities_by_type_and_remote_id
|
||||
.insert(id, AnyWeakEntityHandle::Model(cx.weak_handle().into()));
|
||||
Subscription::Entity {
|
||||
client: Arc::downgrade(self),
|
||||
id,
|
||||
.insert(id, WeakSubscriber::Pending(Default::default()));
|
||||
|
||||
PendingEntitySubscription {
|
||||
client: self.clone(),
|
||||
remote_id,
|
||||
consumed: false,
|
||||
_entity_type: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -433,7 +488,7 @@ impl Client {
|
|||
let prev_handler = state.message_handlers.insert(
|
||||
message_type_id,
|
||||
Arc::new(move |handle, envelope, client, cx| {
|
||||
let handle = if let AnyEntityHandle::Model(handle) = handle {
|
||||
let handle = if let Subscriber::Model(handle) = handle {
|
||||
handle
|
||||
} else {
|
||||
unreachable!();
|
||||
|
@ -487,7 +542,7 @@ impl Client {
|
|||
F: 'static + Future<Output = Result<()>>,
|
||||
{
|
||||
self.add_entity_message_handler::<M, E, _, _>(move |handle, message, client, cx| {
|
||||
if let AnyEntityHandle::View(handle) = handle {
|
||||
if let Subscriber::View(handle) = handle {
|
||||
handler(handle.downcast::<E>().unwrap(), message, client, cx)
|
||||
} else {
|
||||
unreachable!();
|
||||
|
@ -506,7 +561,7 @@ impl Client {
|
|||
F: 'static + Future<Output = Result<()>>,
|
||||
{
|
||||
self.add_entity_message_handler::<M, E, _, _>(move |handle, message, client, cx| {
|
||||
if let AnyEntityHandle::Model(handle) = handle {
|
||||
if let Subscriber::Model(handle) = handle {
|
||||
handler(handle.downcast::<E>().unwrap(), message, client, cx)
|
||||
} else {
|
||||
unreachable!();
|
||||
|
@ -521,7 +576,7 @@ impl Client {
|
|||
H: 'static
|
||||
+ Send
|
||||
+ Sync
|
||||
+ Fn(AnyEntityHandle, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
|
||||
+ Fn(Subscriber, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
|
||||
F: 'static + Future<Output = Result<()>>,
|
||||
{
|
||||
let model_type_id = TypeId::of::<E>();
|
||||
|
@ -783,94 +838,8 @@ impl Client {
|
|||
let cx = cx.clone();
|
||||
let this = self.clone();
|
||||
async move {
|
||||
let mut message_id = 0_usize;
|
||||
while let Some(message) = incoming.next().await {
|
||||
let mut state = this.state.write();
|
||||
message_id += 1;
|
||||
let type_name = message.payload_type_name();
|
||||
let payload_type_id = message.payload_type_id();
|
||||
let sender_id = message.original_sender_id().map(|id| id.0);
|
||||
|
||||
let model = state
|
||||
.models_by_message_type
|
||||
.get(&payload_type_id)
|
||||
.and_then(|model| model.upgrade(&cx))
|
||||
.map(AnyEntityHandle::Model)
|
||||
.or_else(|| {
|
||||
let entity_type_id =
|
||||
*state.entity_types_by_message_type.get(&payload_type_id)?;
|
||||
let entity_id = state
|
||||
.entity_id_extractors
|
||||
.get(&message.payload_type_id())
|
||||
.map(|extract_entity_id| {
|
||||
(extract_entity_id)(message.as_ref())
|
||||
})?;
|
||||
|
||||
let entity = state
|
||||
.entities_by_type_and_remote_id
|
||||
.get(&(entity_type_id, entity_id))?;
|
||||
if let Some(entity) = entity.upgrade(&cx) {
|
||||
Some(entity)
|
||||
} else {
|
||||
state
|
||||
.entities_by_type_and_remote_id
|
||||
.remove(&(entity_type_id, entity_id));
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
let model = if let Some(model) = model {
|
||||
model
|
||||
} else {
|
||||
log::info!("unhandled message {}", type_name);
|
||||
continue;
|
||||
};
|
||||
|
||||
let handler = state.message_handlers.get(&payload_type_id).cloned();
|
||||
// Dropping the state prevents deadlocks if the handler interacts with rpc::Client.
|
||||
// It also ensures we don't hold the lock while yielding back to the executor, as
|
||||
// that might cause the executor thread driving this future to block indefinitely.
|
||||
drop(state);
|
||||
|
||||
if let Some(handler) = handler {
|
||||
let future = handler(model, message, &this, cx.clone());
|
||||
let client_id = this.id;
|
||||
log::debug!(
|
||||
"rpc message received. client_id:{}, message_id:{}, sender_id:{:?}, type:{}",
|
||||
client_id,
|
||||
message_id,
|
||||
sender_id,
|
||||
type_name
|
||||
);
|
||||
cx.foreground()
|
||||
.spawn(async move {
|
||||
match future.await {
|
||||
Ok(()) => {
|
||||
log::debug!(
|
||||
"rpc message handled. client_id:{}, message_id:{}, sender_id:{:?}, type:{}",
|
||||
client_id,
|
||||
message_id,
|
||||
sender_id,
|
||||
type_name
|
||||
);
|
||||
}
|
||||
Err(error) => {
|
||||
log::error!(
|
||||
"error handling message. client_id:{}, message_id:{}, sender_id:{:?}, type:{}, error:{:?}",
|
||||
client_id,
|
||||
message_id,
|
||||
sender_id,
|
||||
type_name,
|
||||
error
|
||||
);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
} else {
|
||||
log::info!("unhandled message {}", type_name);
|
||||
}
|
||||
|
||||
this.handle_message(message, &cx);
|
||||
// Don't starve the main thread when receiving lots of messages at once.
|
||||
smol::future::yield_now().await;
|
||||
}
|
||||
|
@ -1217,6 +1186,97 @@ impl Client {
|
|||
self.peer.respond_with_error(receipt, error)
|
||||
}
|
||||
|
||||
fn handle_message(
|
||||
self: &Arc<Client>,
|
||||
message: Box<dyn AnyTypedEnvelope>,
|
||||
cx: &AsyncAppContext,
|
||||
) {
|
||||
let mut state = self.state.write();
|
||||
let type_name = message.payload_type_name();
|
||||
let payload_type_id = message.payload_type_id();
|
||||
let sender_id = message.original_sender_id().map(|id| id.0);
|
||||
|
||||
let mut subscriber = None;
|
||||
|
||||
if let Some(message_model) = state
|
||||
.models_by_message_type
|
||||
.get(&payload_type_id)
|
||||
.and_then(|model| model.upgrade(cx))
|
||||
{
|
||||
subscriber = Some(Subscriber::Model(message_model));
|
||||
} else if let Some((extract_entity_id, entity_type_id)) =
|
||||
state.entity_id_extractors.get(&payload_type_id).zip(
|
||||
state
|
||||
.entity_types_by_message_type
|
||||
.get(&payload_type_id)
|
||||
.copied(),
|
||||
)
|
||||
{
|
||||
let entity_id = (extract_entity_id)(message.as_ref());
|
||||
|
||||
match state
|
||||
.entities_by_type_and_remote_id
|
||||
.get_mut(&(entity_type_id, entity_id))
|
||||
{
|
||||
Some(WeakSubscriber::Pending(pending)) => {
|
||||
pending.push(message);
|
||||
return;
|
||||
}
|
||||
Some(weak_subscriber @ _) => subscriber = weak_subscriber.upgrade(cx),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let subscriber = if let Some(subscriber) = subscriber {
|
||||
subscriber
|
||||
} else {
|
||||
log::info!("unhandled message {}", type_name);
|
||||
return;
|
||||
};
|
||||
|
||||
let handler = state.message_handlers.get(&payload_type_id).cloned();
|
||||
// Dropping the state prevents deadlocks if the handler interacts with rpc::Client.
|
||||
// It also ensures we don't hold the lock while yielding back to the executor, as
|
||||
// that might cause the executor thread driving this future to block indefinitely.
|
||||
drop(state);
|
||||
|
||||
if let Some(handler) = handler {
|
||||
let future = handler(subscriber, message, &self, cx.clone());
|
||||
let client_id = self.id;
|
||||
log::debug!(
|
||||
"rpc message received. client_id:{}, sender_id:{:?}, type:{}",
|
||||
client_id,
|
||||
sender_id,
|
||||
type_name
|
||||
);
|
||||
cx.foreground()
|
||||
.spawn(async move {
|
||||
match future.await {
|
||||
Ok(()) => {
|
||||
log::debug!(
|
||||
"rpc message handled. client_id:{}, sender_id:{:?}, type:{}",
|
||||
client_id,
|
||||
sender_id,
|
||||
type_name
|
||||
);
|
||||
}
|
||||
Err(error) => {
|
||||
log::error!(
|
||||
"error handling message. client_id:{}, sender_id:{:?}, type:{}, error:{:?}",
|
||||
client_id,
|
||||
sender_id,
|
||||
type_name,
|
||||
error
|
||||
);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
} else {
|
||||
log::info!("unhandled message {}", type_name);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start_telemetry(&self) {
|
||||
self.telemetry.start();
|
||||
}
|
||||
|
@ -1230,11 +1290,12 @@ impl Client {
|
|||
}
|
||||
}
|
||||
|
||||
impl AnyWeakEntityHandle {
|
||||
fn upgrade(&self, cx: &AsyncAppContext) -> Option<AnyEntityHandle> {
|
||||
impl WeakSubscriber {
|
||||
fn upgrade(&self, cx: &AsyncAppContext) -> Option<Subscriber> {
|
||||
match self {
|
||||
AnyWeakEntityHandle::Model(handle) => handle.upgrade(cx).map(AnyEntityHandle::Model),
|
||||
AnyWeakEntityHandle::View(handle) => handle.upgrade(cx).map(AnyEntityHandle::View),
|
||||
WeakSubscriber::Model(handle) => handle.upgrade(cx).map(Subscriber::Model),
|
||||
WeakSubscriber::View(handle) => handle.upgrade(cx).map(Subscriber::View),
|
||||
WeakSubscriber::Pending(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1479,11 +1540,17 @@ mod tests {
|
|||
subscription: None,
|
||||
});
|
||||
|
||||
let _subscription1 = model1.update(cx, |_, cx| client.add_model_for_remote_entity(1, cx));
|
||||
let _subscription2 = model2.update(cx, |_, cx| client.add_model_for_remote_entity(2, cx));
|
||||
let _subscription1 = client
|
||||
.subscribe_to_entity(1)
|
||||
.set_model(&model1, &mut cx.to_async());
|
||||
let _subscription2 = client
|
||||
.subscribe_to_entity(2)
|
||||
.set_model(&model2, &mut cx.to_async());
|
||||
// Ensure dropping a subscription for the same entity type still allows receiving of
|
||||
// messages for other entity IDs of the same type.
|
||||
let subscription3 = model3.update(cx, |_, cx| client.add_model_for_remote_entity(3, cx));
|
||||
let subscription3 = client
|
||||
.subscribe_to_entity(3)
|
||||
.set_model(&model3, &mut cx.to_async());
|
||||
drop(subscription3);
|
||||
|
||||
server.send(proto::JoinProject { project_id: 1 });
|
||||
|
|
|
@ -19,12 +19,12 @@ rpc = { path = "../rpc" }
|
|||
util = { path = "../util" }
|
||||
|
||||
anyhow = "1.0.40"
|
||||
async-trait = "0.1.50"
|
||||
async-tungstenite = "0.16"
|
||||
axum = { version = "0.5", features = ["json", "headers", "ws"] }
|
||||
axum-extra = { version = "0.3", features = ["erased-json"] }
|
||||
base64 = "0.13"
|
||||
clap = { version = "3.1", features = ["derive"], optional = true }
|
||||
dashmap = "5.4"
|
||||
envy = "0.4.2"
|
||||
futures = "0.3"
|
||||
hyper = "0.14"
|
||||
|
@ -36,9 +36,13 @@ prometheus = "0.13"
|
|||
rand = "0.8"
|
||||
reqwest = { version = "0.11", features = ["json"], optional = true }
|
||||
scrypt = "0.7"
|
||||
# Remove fork dependency when a version with https://github.com/SeaQL/sea-orm/pull/1283 is released.
|
||||
sea-orm = { git = "https://github.com/zed-industries/sea-orm", rev = "18f4c691085712ad014a51792af75a9044bacee6", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls"] }
|
||||
sea-query = "0.27"
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_json = "1.0"
|
||||
sha-1 = "0.9"
|
||||
sqlx = { version = "0.6", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] }
|
||||
time = { version = "0.3", features = ["serde", "serde-well-known"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio-tungstenite = "0.17"
|
||||
|
@ -49,11 +53,6 @@ tracing = "0.1.34"
|
|||
tracing-log = "0.1.3"
|
||||
tracing-subscriber = { version = "0.3.11", features = ["env-filter", "json"] }
|
||||
|
||||
[dependencies.sqlx]
|
||||
git = "https://github.com/launchbadge/sqlx"
|
||||
rev = "4b7053807c705df312bcb9b6281e184bf7534eb3"
|
||||
features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid"]
|
||||
|
||||
[dev-dependencies]
|
||||
collections = { path = "../collections", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
|
@ -76,13 +75,10 @@ env_logger = "0.9"
|
|||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||
util = { path = "../util" }
|
||||
lazy_static = "1.4"
|
||||
sea-orm = { git = "https://github.com/zed-industries/sea-orm", rev = "18f4c691085712ad014a51792af75a9044bacee6", features = ["sqlx-sqlite"] }
|
||||
serde_json = { version = "1.0", features = ["preserve_order"] }
|
||||
sqlx = { version = "0.6", features = ["sqlite"] }
|
||||
unindent = "0.1"
|
||||
|
||||
[dev-dependencies.sqlx]
|
||||
git = "https://github.com/launchbadge/sqlx"
|
||||
rev = "4b7053807c705df312bcb9b6281e184bf7534eb3"
|
||||
features = ["sqlite"]
|
||||
|
||||
[features]
|
||||
seed-support = ["clap", "lipsum", "reqwest"]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
CREATE TABLE IF NOT EXISTS "users" (
|
||||
CREATE TABLE "users" (
|
||||
"id" INTEGER PRIMARY KEY,
|
||||
"github_login" VARCHAR,
|
||||
"admin" BOOLEAN,
|
||||
|
@ -8,7 +8,7 @@ CREATE TABLE IF NOT EXISTS "users" (
|
|||
"inviter_id" INTEGER REFERENCES users (id),
|
||||
"connected_once" BOOLEAN NOT NULL DEFAULT false,
|
||||
"created_at" TIMESTAMP NOT NULL DEFAULT now,
|
||||
"metrics_id" VARCHAR(255),
|
||||
"metrics_id" TEXT,
|
||||
"github_user_id" INTEGER
|
||||
);
|
||||
CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login");
|
||||
|
@ -16,14 +16,14 @@ CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code");
|
|||
CREATE INDEX "index_users_on_email_address" ON "users" ("email_address");
|
||||
CREATE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "access_tokens" (
|
||||
CREATE TABLE "access_tokens" (
|
||||
"id" INTEGER PRIMARY KEY,
|
||||
"user_id" INTEGER REFERENCES users (id),
|
||||
"hash" VARCHAR(128)
|
||||
);
|
||||
CREATE INDEX "index_access_tokens_user_id" ON "access_tokens" ("user_id");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "contacts" (
|
||||
CREATE TABLE "contacts" (
|
||||
"id" INTEGER PRIMARY KEY,
|
||||
"user_id_a" INTEGER REFERENCES users (id) NOT NULL,
|
||||
"user_id_b" INTEGER REFERENCES users (id) NOT NULL,
|
||||
|
@ -34,8 +34,96 @@ CREATE TABLE IF NOT EXISTS "contacts" (
|
|||
CREATE UNIQUE INDEX "index_contacts_user_ids" ON "contacts" ("user_id_a", "user_id_b");
|
||||
CREATE INDEX "index_contacts_user_id_b" ON "contacts" ("user_id_b");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "projects" (
|
||||
CREATE TABLE "rooms" (
|
||||
"id" INTEGER PRIMARY KEY,
|
||||
"host_user_id" INTEGER REFERENCES users (id) NOT NULL,
|
||||
"unregistered" BOOLEAN NOT NULL DEFAULT false
|
||||
"live_kit_room" VARCHAR NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE "projects" (
|
||||
"id" INTEGER PRIMARY KEY,
|
||||
"room_id" INTEGER REFERENCES rooms (id) NOT NULL,
|
||||
"host_user_id" INTEGER REFERENCES users (id) NOT NULL,
|
||||
"host_connection_id" INTEGER NOT NULL,
|
||||
"host_connection_epoch" TEXT NOT NULL
|
||||
);
|
||||
CREATE INDEX "index_projects_on_host_connection_epoch" ON "projects" ("host_connection_epoch");
|
||||
|
||||
CREATE TABLE "worktrees" (
|
||||
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
|
||||
"id" INTEGER NOT NULL,
|
||||
"root_name" VARCHAR NOT NULL,
|
||||
"abs_path" VARCHAR NOT NULL,
|
||||
"visible" BOOL NOT NULL,
|
||||
"scan_id" INTEGER NOT NULL,
|
||||
"is_complete" BOOL NOT NULL,
|
||||
PRIMARY KEY(project_id, id)
|
||||
);
|
||||
CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id");
|
||||
|
||||
CREATE TABLE "worktree_entries" (
|
||||
"project_id" INTEGER NOT NULL,
|
||||
"worktree_id" INTEGER NOT NULL,
|
||||
"id" INTEGER NOT NULL,
|
||||
"is_dir" BOOL NOT NULL,
|
||||
"path" VARCHAR NOT NULL,
|
||||
"inode" INTEGER NOT NULL,
|
||||
"mtime_seconds" INTEGER NOT NULL,
|
||||
"mtime_nanos" INTEGER NOT NULL,
|
||||
"is_symlink" BOOL NOT NULL,
|
||||
"is_ignored" BOOL NOT NULL,
|
||||
PRIMARY KEY(project_id, worktree_id, id),
|
||||
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id");
|
||||
CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id");
|
||||
|
||||
CREATE TABLE "worktree_diagnostic_summaries" (
|
||||
"project_id" INTEGER NOT NULL,
|
||||
"worktree_id" INTEGER NOT NULL,
|
||||
"path" VARCHAR NOT NULL,
|
||||
"language_server_id" INTEGER NOT NULL,
|
||||
"error_count" INTEGER NOT NULL,
|
||||
"warning_count" INTEGER NOT NULL,
|
||||
PRIMARY KEY(project_id, worktree_id, path),
|
||||
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id" ON "worktree_diagnostic_summaries" ("project_id");
|
||||
CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id_and_worktree_id" ON "worktree_diagnostic_summaries" ("project_id", "worktree_id");
|
||||
|
||||
CREATE TABLE "language_servers" (
|
||||
"id" INTEGER NOT NULL,
|
||||
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
|
||||
"name" VARCHAR NOT NULL,
|
||||
PRIMARY KEY(project_id, id)
|
||||
);
|
||||
CREATE INDEX "index_language_servers_on_project_id" ON "language_servers" ("project_id");
|
||||
|
||||
CREATE TABLE "project_collaborators" (
|
||||
"id" INTEGER PRIMARY KEY,
|
||||
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
|
||||
"connection_id" INTEGER NOT NULL,
|
||||
"connection_epoch" TEXT NOT NULL,
|
||||
"user_id" INTEGER NOT NULL,
|
||||
"replica_id" INTEGER NOT NULL,
|
||||
"is_host" BOOLEAN NOT NULL
|
||||
);
|
||||
CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id");
|
||||
CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_and_replica_id" ON "project_collaborators" ("project_id", "replica_id");
|
||||
CREATE INDEX "index_project_collaborators_on_connection_epoch" ON "project_collaborators" ("connection_epoch");
|
||||
|
||||
CREATE TABLE "room_participants" (
|
||||
"id" INTEGER PRIMARY KEY,
|
||||
"room_id" INTEGER NOT NULL REFERENCES rooms (id),
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id),
|
||||
"answering_connection_id" INTEGER,
|
||||
"answering_connection_epoch" TEXT,
|
||||
"location_kind" INTEGER,
|
||||
"location_project_id" INTEGER,
|
||||
"initial_project_id" INTEGER,
|
||||
"calling_user_id" INTEGER NOT NULL REFERENCES users (id),
|
||||
"calling_connection_id" INTEGER NOT NULL,
|
||||
"calling_connection_epoch" TEXT NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id");
|
||||
CREATE INDEX "index_room_participants_on_answering_connection_epoch" ON "room_participants" ("answering_connection_epoch");
|
||||
CREATE INDEX "index_room_participants_on_calling_connection_epoch" ON "room_participants" ("calling_connection_epoch");
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
CREATE TABLE IF NOT EXISTS "rooms" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"live_kit_room" VARCHAR NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE "projects"
|
||||
ADD "room_id" INTEGER REFERENCES rooms (id),
|
||||
ADD "host_connection_id" INTEGER,
|
||||
ADD "host_connection_epoch" UUID,
|
||||
DROP COLUMN "unregistered";
|
||||
CREATE INDEX "index_projects_on_host_connection_epoch" ON "projects" ("host_connection_epoch");
|
||||
|
||||
CREATE TABLE "worktrees" (
|
||||
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
|
||||
"id" INT8 NOT NULL,
|
||||
"root_name" VARCHAR NOT NULL,
|
||||
"abs_path" VARCHAR NOT NULL,
|
||||
"visible" BOOL NOT NULL,
|
||||
"scan_id" INT8 NOT NULL,
|
||||
"is_complete" BOOL NOT NULL,
|
||||
PRIMARY KEY(project_id, id)
|
||||
);
|
||||
CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id");
|
||||
|
||||
CREATE TABLE "worktree_entries" (
|
||||
"project_id" INTEGER NOT NULL,
|
||||
"worktree_id" INT8 NOT NULL,
|
||||
"id" INT8 NOT NULL,
|
||||
"is_dir" BOOL NOT NULL,
|
||||
"path" VARCHAR NOT NULL,
|
||||
"inode" INT8 NOT NULL,
|
||||
"mtime_seconds" INT8 NOT NULL,
|
||||
"mtime_nanos" INTEGER NOT NULL,
|
||||
"is_symlink" BOOL NOT NULL,
|
||||
"is_ignored" BOOL NOT NULL,
|
||||
PRIMARY KEY(project_id, worktree_id, id),
|
||||
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id");
|
||||
CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id");
|
||||
|
||||
CREATE TABLE "worktree_diagnostic_summaries" (
|
||||
"project_id" INTEGER NOT NULL,
|
||||
"worktree_id" INT8 NOT NULL,
|
||||
"path" VARCHAR NOT NULL,
|
||||
"language_server_id" INT8 NOT NULL,
|
||||
"error_count" INTEGER NOT NULL,
|
||||
"warning_count" INTEGER NOT NULL,
|
||||
PRIMARY KEY(project_id, worktree_id, path),
|
||||
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id" ON "worktree_diagnostic_summaries" ("project_id");
|
||||
CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id_and_worktree_id" ON "worktree_diagnostic_summaries" ("project_id", "worktree_id");
|
||||
|
||||
CREATE TABLE "language_servers" (
|
||||
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
|
||||
"id" INT8 NOT NULL,
|
||||
"name" VARCHAR NOT NULL,
|
||||
PRIMARY KEY(project_id, id)
|
||||
);
|
||||
CREATE INDEX "index_language_servers_on_project_id" ON "language_servers" ("project_id");
|
||||
|
||||
CREATE TABLE "project_collaborators" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
|
||||
"connection_id" INTEGER NOT NULL,
|
||||
"connection_epoch" UUID NOT NULL,
|
||||
"user_id" INTEGER NOT NULL,
|
||||
"replica_id" INTEGER NOT NULL,
|
||||
"is_host" BOOLEAN NOT NULL
|
||||
);
|
||||
CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id");
|
||||
CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_and_replica_id" ON "project_collaborators" ("project_id", "replica_id");
|
||||
CREATE INDEX "index_project_collaborators_on_connection_epoch" ON "project_collaborators" ("connection_epoch");
|
||||
|
||||
CREATE TABLE "room_participants" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"room_id" INTEGER NOT NULL REFERENCES rooms (id),
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id),
|
||||
"answering_connection_id" INTEGER,
|
||||
"answering_connection_epoch" UUID,
|
||||
"location_kind" INTEGER,
|
||||
"location_project_id" INTEGER,
|
||||
"initial_project_id" INTEGER,
|
||||
"calling_user_id" INTEGER NOT NULL REFERENCES users (id),
|
||||
"calling_connection_id" INTEGER NOT NULL,
|
||||
"calling_connection_epoch" UUID NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id");
|
||||
CREATE INDEX "index_room_participants_on_answering_connection_epoch" ON "room_participants" ("answering_connection_epoch");
|
||||
CREATE INDEX "index_room_participants_on_calling_connection_epoch" ON "room_participants" ("calling_connection_epoch");
|
|
@ -1,6 +1,6 @@
|
|||
use crate::{
|
||||
auth,
|
||||
db::{Invite, NewUserParams, Signup, User, UserId, WaitlistSummary},
|
||||
db::{Invite, NewSignup, NewUserParams, User, UserId, WaitlistSummary},
|
||||
rpc::{self, ResultExt},
|
||||
AppState, Error, Result,
|
||||
};
|
||||
|
@ -204,7 +204,7 @@ async fn create_user(
|
|||
#[derive(Deserialize)]
|
||||
struct UpdateUserParams {
|
||||
admin: Option<bool>,
|
||||
invite_count: Option<u32>,
|
||||
invite_count: Option<i32>,
|
||||
}
|
||||
|
||||
async fn update_user(
|
||||
|
@ -335,7 +335,7 @@ async fn get_user_for_invite_code(
|
|||
}
|
||||
|
||||
async fn create_signup(
|
||||
Json(params): Json<Signup>,
|
||||
Json(params): Json<NewSignup>,
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
) -> Result<()> {
|
||||
app.db.create_signup(¶ms).await?;
|
||||
|
|
|
@ -75,7 +75,7 @@ pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl Into
|
|||
|
||||
const MAX_ACCESS_TOKENS_TO_STORE: usize = 8;
|
||||
|
||||
pub async fn create_access_token(db: &db::DefaultDb, user_id: UserId) -> Result<String> {
|
||||
pub async fn create_access_token(db: &db::Database, user_id: UserId) -> Result<String> {
|
||||
let access_token = rpc::auth::random_token();
|
||||
let access_token_hash =
|
||||
hash_access_token(&access_token).context("failed to hash access token")?;
|
||||
|
|
|
@ -1,12 +1,8 @@
|
|||
use collab::{Error, Result};
|
||||
use db::DefaultDb;
|
||||
use collab::db;
|
||||
use db::{ConnectOptions, Database};
|
||||
use serde::{de::DeserializeOwned, Deserialize};
|
||||
use std::fmt::Write;
|
||||
|
||||
#[allow(unused)]
|
||||
#[path = "../db.rs"]
|
||||
mod db;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GitHubUser {
|
||||
id: i32,
|
||||
|
@ -17,7 +13,7 @@ struct GitHubUser {
|
|||
#[tokio::main]
|
||||
async fn main() {
|
||||
let database_url = std::env::var("DATABASE_URL").expect("missing DATABASE_URL env var");
|
||||
let db = DefaultDb::new(&database_url, 5)
|
||||
let db = Database::new(ConnectOptions::new(database_url))
|
||||
.await
|
||||
.expect("failed to connect to postgres database");
|
||||
let github_token = std::env::var("GITHUB_TOKEN").expect("missing GITHUB_TOKEN env var");
|
||||
|
|
File diff suppressed because it is too large
Load diff
29
crates/collab/src/db/access_token.rs
Normal file
29
crates/collab/src/db/access_token.rs
Normal file
|
@ -0,0 +1,29 @@
|
|||
use super::{AccessTokenId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "access_tokens")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: AccessTokenId,
|
||||
pub user_id: UserId,
|
||||
pub hash: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
User,
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::User.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
58
crates/collab/src/db/contact.rs
Normal file
58
crates/collab/src/db/contact.rs
Normal file
|
@ -0,0 +1,58 @@
|
|||
use super::{ContactId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "contacts")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ContactId,
|
||||
pub user_id_a: UserId,
|
||||
pub user_id_b: UserId,
|
||||
pub a_to_b: bool,
|
||||
pub should_notify: bool,
|
||||
pub accepted: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::room_participant::Entity",
|
||||
from = "Column::UserIdA",
|
||||
to = "super::room_participant::Column::UserId"
|
||||
)]
|
||||
UserARoomParticipant,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::room_participant::Entity",
|
||||
from = "Column::UserIdB",
|
||||
to = "super::room_participant::Column::UserId"
|
||||
)]
|
||||
UserBRoomParticipant,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Contact {
|
||||
Accepted {
|
||||
user_id: UserId,
|
||||
should_notify: bool,
|
||||
busy: bool,
|
||||
},
|
||||
Outgoing {
|
||||
user_id: UserId,
|
||||
},
|
||||
Incoming {
|
||||
user_id: UserId,
|
||||
should_notify: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl Contact {
|
||||
pub fn user_id(&self) -> UserId {
|
||||
match self {
|
||||
Contact::Accepted { user_id, .. } => *user_id,
|
||||
Contact::Outgoing { user_id } => *user_id,
|
||||
Contact::Incoming { user_id, .. } => *user_id,
|
||||
}
|
||||
}
|
||||
}
|
30
crates/collab/src/db/language_server.rs
Normal file
30
crates/collab/src/db/language_server.rs
Normal file
|
@ -0,0 +1,30 @@
|
|||
use super::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "language_servers")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub project_id: ProjectId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::project::Entity",
|
||||
from = "Column::ProjectId",
|
||||
to = "super::project::Column::Id"
|
||||
)]
|
||||
Project,
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
67
crates/collab/src/db/project.rs
Normal file
67
crates/collab/src/db/project.rs
Normal file
|
@ -0,0 +1,67 @@
|
|||
use super::{ProjectId, RoomId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "projects")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ProjectId,
|
||||
pub room_id: RoomId,
|
||||
pub host_user_id: UserId,
|
||||
pub host_connection_id: i32,
|
||||
pub host_connection_epoch: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::HostUserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
HostUser,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::room::Entity",
|
||||
from = "Column::RoomId",
|
||||
to = "super::room::Column::Id"
|
||||
)]
|
||||
Room,
|
||||
#[sea_orm(has_many = "super::worktree::Entity")]
|
||||
Worktrees,
|
||||
#[sea_orm(has_many = "super::project_collaborator::Entity")]
|
||||
Collaborators,
|
||||
#[sea_orm(has_many = "super::language_server::Entity")]
|
||||
LanguageServers,
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::HostUser.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::room::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Room.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::worktree::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Worktrees.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::project_collaborator::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Collaborators.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::language_server::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::LanguageServers.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
33
crates/collab/src/db/project_collaborator.rs
Normal file
33
crates/collab/src/db/project_collaborator.rs
Normal file
|
@ -0,0 +1,33 @@
|
|||
use super::{ProjectCollaboratorId, ProjectId, ReplicaId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "project_collaborators")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ProjectCollaboratorId,
|
||||
pub project_id: ProjectId,
|
||||
pub connection_id: i32,
|
||||
pub connection_epoch: Uuid,
|
||||
pub user_id: UserId,
|
||||
pub replica_id: ReplicaId,
|
||||
pub is_host: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::project::Entity",
|
||||
from = "Column::ProjectId",
|
||||
to = "super::project::Column::Id"
|
||||
)]
|
||||
Project,
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
32
crates/collab/src/db/room.rs
Normal file
32
crates/collab/src/db/room.rs
Normal file
|
@ -0,0 +1,32 @@
|
|||
use super::RoomId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "rooms")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: RoomId,
|
||||
pub live_kit_room: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::room_participant::Entity")]
|
||||
RoomParticipant,
|
||||
#[sea_orm(has_many = "super::project::Entity")]
|
||||
Project,
|
||||
}
|
||||
|
||||
impl Related<super::room_participant::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::RoomParticipant.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
49
crates/collab/src/db/room_participant.rs
Normal file
49
crates/collab/src/db/room_participant.rs
Normal file
|
@ -0,0 +1,49 @@
|
|||
use super::{ProjectId, RoomId, RoomParticipantId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "room_participants")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: RoomParticipantId,
|
||||
pub room_id: RoomId,
|
||||
pub user_id: UserId,
|
||||
pub answering_connection_id: Option<i32>,
|
||||
pub answering_connection_epoch: Option<Uuid>,
|
||||
pub location_kind: Option<i32>,
|
||||
pub location_project_id: Option<ProjectId>,
|
||||
pub initial_project_id: Option<ProjectId>,
|
||||
pub calling_user_id: UserId,
|
||||
pub calling_connection_id: i32,
|
||||
pub calling_connection_epoch: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
User,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::room::Entity",
|
||||
from = "Column::RoomId",
|
||||
to = "super::room::Column::Id"
|
||||
)]
|
||||
Room,
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::User.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::room::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Room.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
56
crates/collab/src/db/signup.rs
Normal file
56
crates/collab/src/db/signup.rs
Normal file
|
@ -0,0 +1,56 @@
|
|||
use super::{SignupId, UserId};
|
||||
use sea_orm::{entity::prelude::*, FromQueryResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "signups")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: SignupId,
|
||||
pub email_address: String,
|
||||
pub email_confirmation_code: String,
|
||||
pub email_confirmation_sent: bool,
|
||||
pub created_at: DateTime,
|
||||
pub device_id: Option<String>,
|
||||
pub user_id: Option<UserId>,
|
||||
pub inviting_user_id: Option<UserId>,
|
||||
pub platform_mac: bool,
|
||||
pub platform_linux: bool,
|
||||
pub platform_windows: bool,
|
||||
pub platform_unknown: bool,
|
||||
pub editor_features: Option<Vec<String>>,
|
||||
pub programming_languages: Option<Vec<String>>,
|
||||
pub added_to_mailing_list: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, FromQueryResult, Serialize, Deserialize)]
|
||||
pub struct Invite {
|
||||
pub email_address: String,
|
||||
pub email_confirmation_code: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Deserialize)]
|
||||
pub struct NewSignup {
|
||||
pub email_address: String,
|
||||
pub platform_mac: bool,
|
||||
pub platform_windows: bool,
|
||||
pub platform_linux: bool,
|
||||
pub editor_features: Vec<String>,
|
||||
pub programming_languages: Vec<String>,
|
||||
pub device_id: Option<String>,
|
||||
pub added_to_mailing_list: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize, FromQueryResult)]
|
||||
pub struct WaitlistSummary {
|
||||
pub count: i64,
|
||||
pub linux_count: i64,
|
||||
pub mac_count: i64,
|
||||
pub windows_count: i64,
|
||||
pub unknown_count: i64,
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
use super::db::*;
|
||||
use super::*;
|
||||
use gpui::executor::{Background, Deterministic};
|
||||
use std::sync::Arc;
|
||||
|
||||
|
@ -6,14 +6,14 @@ macro_rules! test_both_dbs {
|
|||
($postgres_test_name:ident, $sqlite_test_name:ident, $db:ident, $body:block) => {
|
||||
#[gpui::test]
|
||||
async fn $postgres_test_name() {
|
||||
let test_db = PostgresTestDb::new(Deterministic::new(0).build_background());
|
||||
let test_db = TestDb::postgres(Deterministic::new(0).build_background());
|
||||
let $db = test_db.db();
|
||||
$body
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn $sqlite_test_name() {
|
||||
let test_db = SqliteTestDb::new(Deterministic::new(0).build_background());
|
||||
let test_db = TestDb::sqlite(Deterministic::new(0).build_background());
|
||||
let $db = test_db.db();
|
||||
$body
|
||||
}
|
||||
|
@ -26,9 +26,10 @@ test_both_dbs!(
|
|||
db,
|
||||
{
|
||||
let mut user_ids = Vec::new();
|
||||
let mut user_metric_ids = Vec::new();
|
||||
for i in 1..=4 {
|
||||
user_ids.push(
|
||||
db.create_user(
|
||||
let user = db
|
||||
.create_user(
|
||||
&format!("user{i}@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
|
@ -38,9 +39,9 @@ test_both_dbs!(
|
|||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id,
|
||||
);
|
||||
.unwrap();
|
||||
user_ids.push(user.user_id);
|
||||
user_metric_ids.push(user.metrics_id);
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
|
@ -52,6 +53,7 @@ test_both_dbs!(
|
|||
github_user_id: Some(1),
|
||||
email_address: Some("user1@example.com".to_string()),
|
||||
admin: false,
|
||||
metrics_id: user_metric_ids[0].parse().unwrap(),
|
||||
..Default::default()
|
||||
},
|
||||
User {
|
||||
|
@ -60,6 +62,7 @@ test_both_dbs!(
|
|||
github_user_id: Some(2),
|
||||
email_address: Some("user2@example.com".to_string()),
|
||||
admin: false,
|
||||
metrics_id: user_metric_ids[1].parse().unwrap(),
|
||||
..Default::default()
|
||||
},
|
||||
User {
|
||||
|
@ -68,6 +71,7 @@ test_both_dbs!(
|
|||
github_user_id: Some(3),
|
||||
email_address: Some("user3@example.com".to_string()),
|
||||
admin: false,
|
||||
metrics_id: user_metric_ids[2].parse().unwrap(),
|
||||
..Default::default()
|
||||
},
|
||||
User {
|
||||
|
@ -76,6 +80,7 @@ test_both_dbs!(
|
|||
github_user_id: Some(4),
|
||||
email_address: Some("user4@example.com".to_string()),
|
||||
admin: false,
|
||||
metrics_id: user_metric_ids[3].parse().unwrap(),
|
||||
..Default::default()
|
||||
}
|
||||
]
|
||||
|
@ -258,7 +263,8 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
|
|||
db.get_contacts(user_1).await.unwrap(),
|
||||
&[Contact::Accepted {
|
||||
user_id: user_2,
|
||||
should_notify: true
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
}],
|
||||
);
|
||||
assert!(db.has_contact(user_1, user_2).await.unwrap());
|
||||
|
@ -268,6 +274,7 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
|
|||
&[Contact::Accepted {
|
||||
user_id: user_1,
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
|
||||
|
@ -284,6 +291,7 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
|
|||
&[Contact::Accepted {
|
||||
user_id: user_2,
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
|
||||
|
@ -296,6 +304,7 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
|
|||
&[Contact::Accepted {
|
||||
user_id: user_2,
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
|
||||
|
@ -309,10 +318,12 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
|
|||
Contact::Accepted {
|
||||
user_id: user_2,
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
},
|
||||
Contact::Accepted {
|
||||
user_id: user_3,
|
||||
should_notify: false
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}
|
||||
]
|
||||
);
|
||||
|
@ -320,7 +331,8 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
|
|||
db.get_contacts(user_3).await.unwrap(),
|
||||
&[Contact::Accepted {
|
||||
user_id: user_1,
|
||||
should_notify: false
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}],
|
||||
);
|
||||
|
||||
|
@ -335,14 +347,16 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
|
|||
db.get_contacts(user_2).await.unwrap(),
|
||||
&[Contact::Accepted {
|
||||
user_id: user_1,
|
||||
should_notify: false
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
db.get_contacts(user_3).await.unwrap(),
|
||||
&[Contact::Accepted {
|
||||
user_id: user_1,
|
||||
should_notify: false
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}],
|
||||
);
|
||||
});
|
||||
|
@ -390,14 +404,14 @@ test_both_dbs!(test_metrics_id_postgres, test_metrics_id_sqlite, db, {
|
|||
|
||||
#[test]
|
||||
fn test_fuzzy_like_string() {
|
||||
assert_eq!(DefaultDb::fuzzy_like_string("abcd"), "%a%b%c%d%");
|
||||
assert_eq!(DefaultDb::fuzzy_like_string("x y"), "%x%y%");
|
||||
assert_eq!(DefaultDb::fuzzy_like_string(" z "), "%z%");
|
||||
assert_eq!(Database::fuzzy_like_string("abcd"), "%a%b%c%d%");
|
||||
assert_eq!(Database::fuzzy_like_string("x y"), "%x%y%");
|
||||
assert_eq!(Database::fuzzy_like_string(" z "), "%z%");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_fuzzy_search_users() {
|
||||
let test_db = PostgresTestDb::new(build_background_executor());
|
||||
let test_db = TestDb::postgres(build_background_executor());
|
||||
let db = test_db.db();
|
||||
for (i, github_login) in [
|
||||
"California",
|
||||
|
@ -433,7 +447,7 @@ async fn test_fuzzy_search_users() {
|
|||
&["rhode-island", "colorado", "oregon"],
|
||||
);
|
||||
|
||||
async fn fuzzy_search_user_names(db: &Db<sqlx::Postgres>, query: &str) -> Vec<String> {
|
||||
async fn fuzzy_search_user_names(db: &Database, query: &str) -> Vec<String> {
|
||||
db.fuzzy_search_users(query, 10)
|
||||
.await
|
||||
.unwrap()
|
||||
|
@ -445,7 +459,7 @@ async fn test_fuzzy_search_users() {
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_invite_codes() {
|
||||
let test_db = PostgresTestDb::new(build_background_executor());
|
||||
let test_db = TestDb::postgres(build_background_executor());
|
||||
let db = test_db.db();
|
||||
|
||||
let NewUserResult { user_id: user1, .. } = db
|
||||
|
@ -504,14 +518,16 @@ async fn test_invite_codes() {
|
|||
db.get_contacts(user1).await.unwrap(),
|
||||
[Contact::Accepted {
|
||||
user_id: user2,
|
||||
should_notify: true
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
db.get_contacts(user2).await.unwrap(),
|
||||
[Contact::Accepted {
|
||||
user_id: user1,
|
||||
should_notify: false
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
|
@ -550,11 +566,13 @@ async fn test_invite_codes() {
|
|||
[
|
||||
Contact::Accepted {
|
||||
user_id: user2,
|
||||
should_notify: true
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
},
|
||||
Contact::Accepted {
|
||||
user_id: user3,
|
||||
should_notify: true
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
}
|
||||
]
|
||||
);
|
||||
|
@ -562,7 +580,8 @@ async fn test_invite_codes() {
|
|||
db.get_contacts(user3).await.unwrap(),
|
||||
[Contact::Accepted {
|
||||
user_id: user1,
|
||||
should_notify: false
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
|
@ -607,15 +626,18 @@ async fn test_invite_codes() {
|
|||
[
|
||||
Contact::Accepted {
|
||||
user_id: user2,
|
||||
should_notify: true
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
},
|
||||
Contact::Accepted {
|
||||
user_id: user3,
|
||||
should_notify: true
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
},
|
||||
Contact::Accepted {
|
||||
user_id: user4,
|
||||
should_notify: true
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
}
|
||||
]
|
||||
);
|
||||
|
@ -623,7 +645,8 @@ async fn test_invite_codes() {
|
|||
db.get_contacts(user4).await.unwrap(),
|
||||
[Contact::Accepted {
|
||||
user_id: user1,
|
||||
should_notify: false
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
|
@ -641,7 +664,7 @@ async fn test_invite_codes() {
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_signups() {
|
||||
let test_db = PostgresTestDb::new(build_background_executor());
|
||||
let test_db = TestDb::postgres(build_background_executor());
|
||||
let db = test_db.db();
|
||||
|
||||
let usernames = (0..8).map(|i| format!("person-{i}")).collect::<Vec<_>>();
|
||||
|
@ -649,7 +672,7 @@ async fn test_signups() {
|
|||
let all_signups = usernames
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, username)| Signup {
|
||||
.map(|(i, username)| NewSignup {
|
||||
email_address: format!("{username}@example.com"),
|
||||
platform_mac: true,
|
||||
platform_linux: i % 2 == 0,
|
||||
|
@ -659,7 +682,7 @@ async fn test_signups() {
|
|||
device_id: Some(format!("device_id_{i}")),
|
||||
added_to_mailing_list: i != 0, // One user failed to subscribe
|
||||
})
|
||||
.collect::<Vec<Signup>>();
|
||||
.collect::<Vec<NewSignup>>();
|
||||
|
||||
// people sign up on the waitlist
|
||||
for signup in &all_signups {
|
49
crates/collab/src/db/user.rs
Normal file
49
crates/collab/src/db/user.rs
Normal file
|
@ -0,0 +1,49 @@
|
|||
use super::UserId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel, Serialize)]
|
||||
#[sea_orm(table_name = "users")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: UserId,
|
||||
pub github_login: String,
|
||||
pub github_user_id: Option<i32>,
|
||||
pub email_address: Option<String>,
|
||||
pub admin: bool,
|
||||
pub invite_code: Option<String>,
|
||||
pub invite_count: i32,
|
||||
pub inviter_id: Option<UserId>,
|
||||
pub connected_once: bool,
|
||||
pub metrics_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::access_token::Entity")]
|
||||
AccessToken,
|
||||
#[sea_orm(has_one = "super::room_participant::Entity")]
|
||||
RoomParticipant,
|
||||
#[sea_orm(has_many = "super::project::Entity")]
|
||||
HostedProjects,
|
||||
}
|
||||
|
||||
impl Related<super::access_token::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::AccessToken.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::room_participant::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::RoomParticipant.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::HostedProjects.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
34
crates/collab/src/db/worktree.rs
Normal file
34
crates/collab/src/db/worktree.rs
Normal file
|
@ -0,0 +1,34 @@
|
|||
use super::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "worktrees")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
#[sea_orm(primary_key)]
|
||||
pub project_id: ProjectId,
|
||||
pub abs_path: String,
|
||||
pub root_name: String,
|
||||
pub visible: bool,
|
||||
pub scan_id: i64,
|
||||
pub is_complete: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::project::Entity",
|
||||
from = "Column::ProjectId",
|
||||
to = "super::project::Column::Id"
|
||||
)]
|
||||
Project,
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
21
crates/collab/src/db/worktree_diagnostic_summary.rs
Normal file
21
crates/collab/src/db/worktree_diagnostic_summary.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
use super::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "worktree_diagnostic_summaries")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub project_id: ProjectId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub worktree_id: i64,
|
||||
#[sea_orm(primary_key)]
|
||||
pub path: String,
|
||||
pub language_server_id: i64,
|
||||
pub error_count: i32,
|
||||
pub warning_count: i32,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
25
crates/collab/src/db/worktree_entry.rs
Normal file
25
crates/collab/src/db/worktree_entry.rs
Normal file
|
@ -0,0 +1,25 @@
|
|||
use super::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "worktree_entries")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub project_id: ProjectId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub worktree_id: i64,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub is_dir: bool,
|
||||
pub path: String,
|
||||
pub inode: i64,
|
||||
pub mtime_seconds: i64,
|
||||
pub mtime_nanos: i32,
|
||||
pub is_symlink: bool,
|
||||
pub is_ignored: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
|
@ -1,5 +1,5 @@
|
|||
use crate::{
|
||||
db::{NewUserParams, ProjectId, SqliteTestDb as TestDb, UserId},
|
||||
db::{self, NewUserParams, TestDb, UserId},
|
||||
rpc::{Executor, Server},
|
||||
AppState,
|
||||
};
|
||||
|
@ -31,9 +31,7 @@ use language::{
|
|||
use live_kit_client::MacOSDisplay;
|
||||
use lsp::{self, FakeLanguageServer};
|
||||
use parking_lot::Mutex;
|
||||
use project::{
|
||||
search::SearchQuery, DiagnosticSummary, Project, ProjectPath, ProjectStore, WorktreeId,
|
||||
};
|
||||
use project::{search::SearchQuery, DiagnosticSummary, Project, ProjectPath, WorktreeId};
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
use settings::{Formatter, Settings};
|
||||
|
@ -72,8 +70,6 @@ async fn test_basic_calls(
|
|||
deterministic.forbid_parking();
|
||||
let mut server = TestServer::start(cx_a.background()).await;
|
||||
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
let client_c = server.create_client(cx_c, "user_c").await;
|
||||
|
@ -105,7 +101,7 @@ async fn test_basic_calls(
|
|||
// User B receives the call.
|
||||
let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
|
||||
let call_b = incoming_call_b.next().await.unwrap().unwrap();
|
||||
assert_eq!(call_b.caller.github_login, "user_a");
|
||||
assert_eq!(call_b.calling_user.github_login, "user_a");
|
||||
|
||||
// User B connects via another client and also receives a ring on the newly-connected client.
|
||||
let _client_b2 = server.create_client(cx_b2, "user_b").await;
|
||||
|
@ -113,7 +109,7 @@ async fn test_basic_calls(
|
|||
let mut incoming_call_b2 = active_call_b2.read_with(cx_b2, |call, _| call.incoming());
|
||||
deterministic.run_until_parked();
|
||||
let call_b2 = incoming_call_b2.next().await.unwrap().unwrap();
|
||||
assert_eq!(call_b2.caller.github_login, "user_a");
|
||||
assert_eq!(call_b2.calling_user.github_login, "user_a");
|
||||
|
||||
// User B joins the room using the first client.
|
||||
active_call_b
|
||||
|
@ -166,7 +162,7 @@ async fn test_basic_calls(
|
|||
|
||||
// User C receives the call, but declines it.
|
||||
let call_c = incoming_call_c.next().await.unwrap().unwrap();
|
||||
assert_eq!(call_c.caller.github_login, "user_b");
|
||||
assert_eq!(call_c.calling_user.github_login, "user_b");
|
||||
active_call_c.update(cx_c, |call, _| call.decline_incoming().unwrap());
|
||||
assert!(incoming_call_c.next().await.unwrap().is_none());
|
||||
|
||||
|
@ -259,8 +255,6 @@ async fn test_basic_calls(
|
|||
pending: Default::default()
|
||||
}
|
||||
);
|
||||
|
||||
eprintln!("finished test {:?}", start.elapsed());
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
|
@ -309,7 +303,7 @@ async fn test_room_uniqueness(
|
|||
// User B receives the call from user A.
|
||||
let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
|
||||
let call_b1 = incoming_call_b.next().await.unwrap().unwrap();
|
||||
assert_eq!(call_b1.caller.github_login, "user_a");
|
||||
assert_eq!(call_b1.calling_user.github_login, "user_a");
|
||||
|
||||
// Ensure calling users A and B from client C fails.
|
||||
active_call_c
|
||||
|
@ -368,7 +362,7 @@ async fn test_room_uniqueness(
|
|||
.unwrap();
|
||||
deterministic.run_until_parked();
|
||||
let call_b2 = incoming_call_b.next().await.unwrap().unwrap();
|
||||
assert_eq!(call_b2.caller.github_login, "user_c");
|
||||
assert_eq!(call_b2.calling_user.github_login, "user_c");
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
|
@ -696,7 +690,7 @@ async fn test_share_project(
|
|||
let incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
|
||||
deterministic.run_until_parked();
|
||||
let call = incoming_call_b.borrow().clone().unwrap();
|
||||
assert_eq!(call.caller.github_login, "user_a");
|
||||
assert_eq!(call.calling_user.github_login, "user_a");
|
||||
let initial_project = call.initial_project.unwrap();
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.accept_incoming(cx))
|
||||
|
@ -767,7 +761,7 @@ async fn test_share_project(
|
|||
let incoming_call_c = active_call_c.read_with(cx_c, |call, _| call.incoming());
|
||||
deterministic.run_until_parked();
|
||||
let call = incoming_call_c.borrow().clone().unwrap();
|
||||
assert_eq!(call.caller.github_login, "user_b");
|
||||
assert_eq!(call.calling_user.github_login, "user_b");
|
||||
let initial_project = call.initial_project.unwrap();
|
||||
active_call_c
|
||||
.update(cx_c, |call, cx| call.accept_incoming(cx))
|
||||
|
@ -2292,7 +2286,6 @@ async fn test_leaving_project(
|
|||
project_id,
|
||||
client_b.client.clone(),
|
||||
client_b.user_store.clone(),
|
||||
client_b.project_store.clone(),
|
||||
client_b.language_registry.clone(),
|
||||
FakeFs::new(cx.background()),
|
||||
cx,
|
||||
|
@ -2416,12 +2409,6 @@ async fn test_collaborating_with_diagnostics(
|
|||
|
||||
// Wait for server to see the diagnostics update.
|
||||
deterministic.run_until_parked();
|
||||
{
|
||||
let store = server.store.lock().await;
|
||||
let project = store.project(ProjectId::from_proto(project_id)).unwrap();
|
||||
let worktree = project.worktrees.get(&worktree_id.to_proto()).unwrap();
|
||||
assert!(!worktree.diagnostic_summaries.is_empty());
|
||||
}
|
||||
|
||||
// Ensure client B observes the new diagnostics.
|
||||
project_b.read_with(cx_b, |project, cx| {
|
||||
|
@ -2443,7 +2430,10 @@ async fn test_collaborating_with_diagnostics(
|
|||
|
||||
// Join project as client C and observe the diagnostics.
|
||||
let project_c = client_c.build_remote_project(project_id, cx_c).await;
|
||||
let project_c_diagnostic_summaries = Rc::new(RefCell::new(Vec::new()));
|
||||
let project_c_diagnostic_summaries =
|
||||
Rc::new(RefCell::new(project_c.read_with(cx_c, |project, cx| {
|
||||
project.diagnostic_summaries(cx).collect::<Vec<_>>()
|
||||
})));
|
||||
project_c.update(cx_c, |_, cx| {
|
||||
let summaries = project_c_diagnostic_summaries.clone();
|
||||
cx.subscribe(&project_c, {
|
||||
|
@ -5627,18 +5617,15 @@ async fn test_random_collaboration(
|
|||
}
|
||||
for user_id in &user_ids {
|
||||
let contacts = server.app_state.db.get_contacts(*user_id).await.unwrap();
|
||||
let contacts = server
|
||||
.store
|
||||
.lock()
|
||||
.await
|
||||
.build_initial_contacts_update(contacts)
|
||||
.contacts;
|
||||
let pool = server.connection_pool.lock().await;
|
||||
for contact in contacts {
|
||||
if contact.online {
|
||||
assert_ne!(
|
||||
contact.user_id, removed_guest_id.0 as u64,
|
||||
"removed guest is still a contact of another peer"
|
||||
);
|
||||
if let db::Contact::Accepted { user_id, .. } = contact {
|
||||
if pool.is_user_online(user_id) {
|
||||
assert_ne!(
|
||||
user_id, removed_guest_id,
|
||||
"removed guest is still a contact of another peer"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5830,7 +5817,13 @@ impl TestServer {
|
|||
async fn start(background: Arc<executor::Background>) -> Self {
|
||||
static NEXT_LIVE_KIT_SERVER_ID: AtomicUsize = AtomicUsize::new(0);
|
||||
|
||||
let test_db = TestDb::new(background.clone());
|
||||
let use_postgres = env::var("USE_POSTGRES").ok();
|
||||
let use_postgres = use_postgres.as_deref();
|
||||
let test_db = if use_postgres == Some("true") || use_postgres == Some("1") {
|
||||
TestDb::postgres(background.clone())
|
||||
} else {
|
||||
TestDb::sqlite(background.clone())
|
||||
};
|
||||
let live_kit_server_id = NEXT_LIVE_KIT_SERVER_ID.fetch_add(1, SeqCst);
|
||||
let live_kit_server = live_kit_client::TestServer::create(
|
||||
format!("http://livekit.{}.test", live_kit_server_id),
|
||||
|
@ -5948,11 +5941,9 @@ impl TestServer {
|
|||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http, cx));
|
||||
let project_store = cx.add_model(|_| ProjectStore::new());
|
||||
let app_state = Arc::new(workspace::AppState {
|
||||
client: client.clone(),
|
||||
user_store: user_store.clone(),
|
||||
project_store: project_store.clone(),
|
||||
languages: Arc::new(LanguageRegistry::new(Task::ready(()))),
|
||||
themes: ThemeRegistry::new((), cx.font_cache()),
|
||||
fs: fs.clone(),
|
||||
|
@ -5979,7 +5970,6 @@ impl TestServer {
|
|||
remote_projects: Default::default(),
|
||||
next_root_dir_id: 0,
|
||||
user_store,
|
||||
project_store,
|
||||
fs,
|
||||
language_registry: Arc::new(LanguageRegistry::test()),
|
||||
buffers: Default::default(),
|
||||
|
@ -6085,7 +6075,6 @@ struct TestClient {
|
|||
remote_projects: Vec<ModelHandle<Project>>,
|
||||
next_root_dir_id: usize,
|
||||
pub user_store: ModelHandle<UserStore>,
|
||||
pub project_store: ModelHandle<ProjectStore>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
fs: Arc<FakeFs>,
|
||||
buffers: HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>,
|
||||
|
@ -6155,7 +6144,6 @@ impl TestClient {
|
|||
Project::local(
|
||||
self.client.clone(),
|
||||
self.user_store.clone(),
|
||||
self.project_store.clone(),
|
||||
self.language_registry.clone(),
|
||||
self.fs.clone(),
|
||||
cx,
|
||||
|
@ -6183,7 +6171,6 @@ impl TestClient {
|
|||
host_project_id,
|
||||
self.client.clone(),
|
||||
self.user_store.clone(),
|
||||
self.project_store.clone(),
|
||||
self.language_registry.clone(),
|
||||
FakeFs::new(cx.background()),
|
||||
cx,
|
||||
|
@ -6319,7 +6306,6 @@ impl TestClient {
|
|||
remote_project_id,
|
||||
client.client.clone(),
|
||||
client.user_store.clone(),
|
||||
client.project_store.clone(),
|
||||
client.language_registry.clone(),
|
||||
FakeFs::new(cx.background()),
|
||||
cx.to_async(),
|
||||
|
@ -6569,7 +6555,7 @@ impl TestClient {
|
|||
buffers.extend(search.await?.into_keys());
|
||||
}
|
||||
}
|
||||
60..=69 => {
|
||||
60..=79 => {
|
||||
let worktree = project
|
||||
.read_with(cx, |project, cx| {
|
||||
project
|
||||
|
|
|
@ -1,9 +1,21 @@
|
|||
pub mod api;
|
||||
pub mod auth;
|
||||
pub mod db;
|
||||
pub mod env;
|
||||
#[cfg(test)]
|
||||
mod integration_tests;
|
||||
pub mod rpc;
|
||||
|
||||
use axum::{http::StatusCode, response::IntoResponse};
|
||||
use db::Database;
|
||||
use serde::Deserialize;
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
pub type Result<T, E = Error> = std::result::Result<T, E>;
|
||||
|
||||
pub enum Error {
|
||||
Http(StatusCode, String),
|
||||
Database(sea_orm::error::DbErr),
|
||||
Internal(anyhow::Error),
|
||||
}
|
||||
|
||||
|
@ -13,9 +25,9 @@ impl From<anyhow::Error> for Error {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<sqlx::Error> for Error {
|
||||
fn from(error: sqlx::Error) -> Self {
|
||||
Self::Internal(error.into())
|
||||
impl From<sea_orm::error::DbErr> for Error {
|
||||
fn from(error: sea_orm::error::DbErr) -> Self {
|
||||
Self::Database(error)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -41,6 +53,9 @@ impl IntoResponse for Error {
|
|||
fn into_response(self) -> axum::response::Response {
|
||||
match self {
|
||||
Error::Http(code, message) => (code, message).into_response(),
|
||||
Error::Database(error) => {
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, format!("{}", &error)).into_response()
|
||||
}
|
||||
Error::Internal(error) => {
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, format!("{}", &error)).into_response()
|
||||
}
|
||||
|
@ -52,6 +67,7 @@ impl std::fmt::Debug for Error {
|
|||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Error::Http(code, message) => (code, message).fmt(f),
|
||||
Error::Database(error) => error.fmt(f),
|
||||
Error::Internal(error) => error.fmt(f),
|
||||
}
|
||||
}
|
||||
|
@ -61,9 +77,64 @@ impl std::fmt::Display for Error {
|
|||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Error::Http(code, message) => write!(f, "{code}: {message}"),
|
||||
Error::Database(error) => error.fmt(f),
|
||||
Error::Internal(error) => error.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
pub struct Config {
|
||||
pub http_port: u16,
|
||||
pub database_url: String,
|
||||
pub api_token: String,
|
||||
pub invite_link_prefix: String,
|
||||
pub live_kit_server: Option<String>,
|
||||
pub live_kit_key: Option<String>,
|
||||
pub live_kit_secret: Option<String>,
|
||||
pub rust_log: Option<String>,
|
||||
pub log_json: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
pub struct MigrateConfig {
|
||||
pub database_url: String,
|
||||
pub migrations_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub struct AppState {
|
||||
pub db: Arc<Database>,
|
||||
pub live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
|
||||
pub config: Config,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
pub async fn new(config: Config) -> Result<Arc<Self>> {
|
||||
let mut db_options = db::ConnectOptions::new(config.database_url.clone());
|
||||
db_options.max_connections(5);
|
||||
let db = Database::new(db_options).await?;
|
||||
let live_kit_client = if let Some(((server, key), secret)) = config
|
||||
.live_kit_server
|
||||
.as_ref()
|
||||
.zip(config.live_kit_key.as_ref())
|
||||
.zip(config.live_kit_secret.as_ref())
|
||||
{
|
||||
Some(Arc::new(live_kit_server::api::LiveKitClient::new(
|
||||
server.clone(),
|
||||
key.clone(),
|
||||
secret.clone(),
|
||||
)) as Arc<dyn live_kit_server::api::Client>)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let this = Self {
|
||||
db: Arc::new(db),
|
||||
live_kit_client,
|
||||
config,
|
||||
};
|
||||
Ok(Arc::new(this))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,86 +1,18 @@
|
|||
mod api;
|
||||
mod auth;
|
||||
mod db;
|
||||
mod env;
|
||||
mod rpc;
|
||||
|
||||
#[cfg(test)]
|
||||
mod db_tests;
|
||||
#[cfg(test)]
|
||||
mod integration_tests;
|
||||
|
||||
use crate::rpc::ResultExt as _;
|
||||
use anyhow::anyhow;
|
||||
use axum::{routing::get, Router};
|
||||
use collab::{Error, Result};
|
||||
use db::DefaultDb as Db;
|
||||
use serde::Deserialize;
|
||||
use collab::{db, env, AppState, Config, MigrateConfig, Result};
|
||||
use db::Database;
|
||||
use std::{
|
||||
env::args,
|
||||
net::{SocketAddr, TcpListener},
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
path::Path,
|
||||
};
|
||||
use tokio::signal;
|
||||
use tracing_log::LogTracer;
|
||||
use tracing_subscriber::{filter::EnvFilter, fmt::format::JsonFields, Layer};
|
||||
use util::ResultExt;
|
||||
|
||||
const VERSION: &'static str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
pub struct Config {
|
||||
pub http_port: u16,
|
||||
pub database_url: String,
|
||||
pub api_token: String,
|
||||
pub invite_link_prefix: String,
|
||||
pub live_kit_server: Option<String>,
|
||||
pub live_kit_key: Option<String>,
|
||||
pub live_kit_secret: Option<String>,
|
||||
pub rust_log: Option<String>,
|
||||
pub log_json: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
pub struct MigrateConfig {
|
||||
pub database_url: String,
|
||||
pub migrations_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub struct AppState {
|
||||
db: Arc<Db>,
|
||||
live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
|
||||
config: Config,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
async fn new(config: Config) -> Result<Arc<Self>> {
|
||||
let db = Db::new(&config.database_url, 5).await?;
|
||||
let live_kit_client = if let Some(((server, key), secret)) = config
|
||||
.live_kit_server
|
||||
.as_ref()
|
||||
.zip(config.live_kit_key.as_ref())
|
||||
.zip(config.live_kit_secret.as_ref())
|
||||
{
|
||||
Some(Arc::new(live_kit_server::api::LiveKitClient::new(
|
||||
server.clone(),
|
||||
key.clone(),
|
||||
secret.clone(),
|
||||
)) as Arc<dyn live_kit_server::api::Client>)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let this = Self {
|
||||
db: Arc::new(db),
|
||||
live_kit_client,
|
||||
config,
|
||||
};
|
||||
Ok(Arc::new(this))
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
if let Err(error) = env::load_dotenv() {
|
||||
|
@ -96,7 +28,9 @@ async fn main() -> Result<()> {
|
|||
}
|
||||
Some("migrate") => {
|
||||
let config = envy::from_env::<MigrateConfig>().expect("error loading config");
|
||||
let db = Db::new(&config.database_url, 5).await?;
|
||||
let mut db_options = db::ConnectOptions::new(config.database_url.clone());
|
||||
db_options.max_connections(5);
|
||||
let db = Database::new(db_options).await?;
|
||||
|
||||
let migrations_path = config
|
||||
.migrations_path
|
||||
|
@ -118,18 +52,19 @@ async fn main() -> Result<()> {
|
|||
init_tracing(&config);
|
||||
|
||||
let state = AppState::new(config).await?;
|
||||
state.db.clear_stale_data().await?;
|
||||
|
||||
let listener = TcpListener::bind(&format!("0.0.0.0:{}", state.config.http_port))
|
||||
.expect("failed to bind TCP listener");
|
||||
|
||||
let rpc_server = rpc::Server::new(state.clone());
|
||||
let rpc_server = collab::rpc::Server::new(state.clone());
|
||||
|
||||
let app = api::routes(rpc_server.clone(), state.clone())
|
||||
.merge(rpc::routes(rpc_server.clone()))
|
||||
let app = collab::api::routes(rpc_server.clone(), state.clone())
|
||||
.merge(collab::rpc::routes(rpc_server.clone()))
|
||||
.merge(Router::new().route("/", get(handle_root)));
|
||||
|
||||
axum::Server::from_tcp(listener)?
|
||||
.serve(app.into_make_service_with_connect_info::<SocketAddr>())
|
||||
.with_graceful_shutdown(graceful_shutdown(rpc_server, state))
|
||||
.await?;
|
||||
}
|
||||
_ => {
|
||||
|
@ -174,52 +109,3 @@ pub fn init_tracing(config: &Config) -> Option<()> {
|
|||
|
||||
None
|
||||
}
|
||||
|
||||
async fn graceful_shutdown(rpc_server: Arc<rpc::Server>, state: Arc<AppState>) {
|
||||
let ctrl_c = async {
|
||||
signal::ctrl_c()
|
||||
.await
|
||||
.expect("failed to install Ctrl+C handler");
|
||||
};
|
||||
|
||||
#[cfg(unix)]
|
||||
let terminate = async {
|
||||
signal::unix::signal(signal::unix::SignalKind::terminate())
|
||||
.expect("failed to install signal handler")
|
||||
.recv()
|
||||
.await;
|
||||
};
|
||||
|
||||
#[cfg(not(unix))]
|
||||
let terminate = std::future::pending::<()>();
|
||||
|
||||
tokio::select! {
|
||||
_ = ctrl_c => {},
|
||||
_ = terminate => {},
|
||||
}
|
||||
|
||||
if let Some(live_kit) = state.live_kit_client.as_ref() {
|
||||
let deletions = rpc_server
|
||||
.store()
|
||||
.await
|
||||
.rooms()
|
||||
.values()
|
||||
.map(|room| {
|
||||
let name = room.live_kit_room.clone();
|
||||
async {
|
||||
live_kit.delete_room(name).await.trace_err();
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
tracing::info!("deleting all live-kit rooms");
|
||||
if let Err(_) = tokio::time::timeout(
|
||||
Duration::from_secs(10),
|
||||
futures::future::join_all(deletions),
|
||||
)
|
||||
.await
|
||||
{
|
||||
tracing::error!("timed out waiting for live-kit room deletion");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
93
crates/collab/src/rpc/connection_pool.rs
Normal file
93
crates/collab/src/rpc/connection_pool.rs
Normal file
|
@ -0,0 +1,93 @@
|
|||
use crate::db::UserId;
|
||||
use anyhow::{anyhow, Result};
|
||||
use collections::{BTreeMap, HashSet};
|
||||
use rpc::ConnectionId;
|
||||
use serde::Serialize;
|
||||
use tracing::instrument;
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
pub struct ConnectionPool {
|
||||
connections: BTreeMap<ConnectionId, Connection>,
|
||||
connected_users: BTreeMap<UserId, ConnectedUser>,
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct ConnectedUser {
|
||||
connection_ids: HashSet<ConnectionId>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Connection {
|
||||
pub user_id: UserId,
|
||||
pub admin: bool,
|
||||
}
|
||||
|
||||
impl ConnectionPool {
|
||||
#[instrument(skip(self))]
|
||||
pub fn add_connection(&mut self, connection_id: ConnectionId, user_id: UserId, admin: bool) {
|
||||
self.connections
|
||||
.insert(connection_id, Connection { user_id, admin });
|
||||
let connected_user = self.connected_users.entry(user_id).or_default();
|
||||
connected_user.connection_ids.insert(connection_id);
|
||||
}
|
||||
|
||||
#[instrument(skip(self))]
|
||||
pub fn remove_connection(&mut self, connection_id: ConnectionId) -> Result<()> {
|
||||
let connection = self
|
||||
.connections
|
||||
.get_mut(&connection_id)
|
||||
.ok_or_else(|| anyhow!("no such connection"))?;
|
||||
|
||||
let user_id = connection.user_id;
|
||||
let connected_user = self.connected_users.get_mut(&user_id).unwrap();
|
||||
connected_user.connection_ids.remove(&connection_id);
|
||||
if connected_user.connection_ids.is_empty() {
|
||||
self.connected_users.remove(&user_id);
|
||||
}
|
||||
self.connections.remove(&connection_id).unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn connections(&self) -> impl Iterator<Item = &Connection> {
|
||||
self.connections.values()
|
||||
}
|
||||
|
||||
pub fn user_connection_ids(&self, user_id: UserId) -> impl Iterator<Item = ConnectionId> + '_ {
|
||||
self.connected_users
|
||||
.get(&user_id)
|
||||
.into_iter()
|
||||
.map(|state| &state.connection_ids)
|
||||
.flatten()
|
||||
.copied()
|
||||
}
|
||||
|
||||
pub fn is_user_online(&self, user_id: UserId) -> bool {
|
||||
!self
|
||||
.connected_users
|
||||
.get(&user_id)
|
||||
.unwrap_or(&Default::default())
|
||||
.connection_ids
|
||||
.is_empty()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn check_invariants(&self) {
|
||||
for (connection_id, connection) in &self.connections {
|
||||
assert!(self
|
||||
.connected_users
|
||||
.get(&connection.user_id)
|
||||
.unwrap()
|
||||
.connection_ids
|
||||
.contains(connection_id));
|
||||
}
|
||||
|
||||
for (user_id, state) in &self.connected_users {
|
||||
for connection_id in &state.connection_ids {
|
||||
assert_eq!(
|
||||
self.connections.get(connection_id).unwrap().user_id,
|
||||
*user_id
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -43,7 +43,6 @@ pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
|
|||
project_id,
|
||||
app_state.client.clone(),
|
||||
app_state.user_store.clone(),
|
||||
app_state.project_store.clone(),
|
||||
app_state.languages.clone(),
|
||||
app_state.fs.clone(),
|
||||
cx.clone(),
|
||||
|
|
|
@ -74,7 +74,7 @@ impl IncomingCallNotification {
|
|||
let active_call = ActiveCall::global(cx);
|
||||
if action.accept {
|
||||
let join = active_call.update(cx, |active_call, cx| active_call.accept_incoming(cx));
|
||||
let caller_user_id = self.call.caller.id;
|
||||
let caller_user_id = self.call.calling_user.id;
|
||||
let initial_project_id = self.call.initial_project.as_ref().map(|project| project.id);
|
||||
cx.spawn_weak(|_, mut cx| async move {
|
||||
join.await?;
|
||||
|
@ -105,7 +105,7 @@ impl IncomingCallNotification {
|
|||
.as_ref()
|
||||
.unwrap_or(&default_project);
|
||||
Flex::row()
|
||||
.with_children(self.call.caller.avatar.clone().map(|avatar| {
|
||||
.with_children(self.call.calling_user.avatar.clone().map(|avatar| {
|
||||
Image::new(avatar)
|
||||
.with_style(theme.caller_avatar)
|
||||
.aligned()
|
||||
|
@ -115,7 +115,7 @@ impl IncomingCallNotification {
|
|||
Flex::column()
|
||||
.with_child(
|
||||
Label::new(
|
||||
self.call.caller.github_login.clone(),
|
||||
self.call.calling_user.github_login.clone(),
|
||||
theme.caller_username.text.clone(),
|
||||
)
|
||||
.contained()
|
||||
|
|
|
@ -66,21 +66,32 @@ struct DeterministicState {
|
|||
rng: rand::prelude::StdRng,
|
||||
seed: u64,
|
||||
scheduled_from_foreground: collections::HashMap<usize, Vec<ForegroundRunnable>>,
|
||||
scheduled_from_background: Vec<Runnable>,
|
||||
scheduled_from_background: Vec<BackgroundRunnable>,
|
||||
forbid_parking: bool,
|
||||
block_on_ticks: std::ops::RangeInclusive<usize>,
|
||||
now: std::time::Instant,
|
||||
next_timer_id: usize,
|
||||
pending_timers: Vec<(usize, std::time::Instant, postage::barrier::Sender)>,
|
||||
waiting_backtrace: Option<backtrace::Backtrace>,
|
||||
next_runnable_id: usize,
|
||||
poll_history: Vec<usize>,
|
||||
enable_runnable_backtraces: bool,
|
||||
runnable_backtraces: collections::HashMap<usize, backtrace::Backtrace>,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
struct ForegroundRunnable {
|
||||
id: usize,
|
||||
runnable: Runnable,
|
||||
main: bool,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
struct BackgroundRunnable {
|
||||
id: usize,
|
||||
runnable: Runnable,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub struct Deterministic {
|
||||
state: Arc<parking_lot::Mutex<DeterministicState>>,
|
||||
|
@ -117,11 +128,29 @@ impl Deterministic {
|
|||
next_timer_id: Default::default(),
|
||||
pending_timers: Default::default(),
|
||||
waiting_backtrace: None,
|
||||
next_runnable_id: 0,
|
||||
poll_history: Default::default(),
|
||||
enable_runnable_backtraces: false,
|
||||
runnable_backtraces: Default::default(),
|
||||
})),
|
||||
parker: Default::default(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn runnable_history(&self) -> Vec<usize> {
|
||||
self.state.lock().poll_history.clone()
|
||||
}
|
||||
|
||||
pub fn enable_runnable_backtrace(&self) {
|
||||
self.state.lock().enable_runnable_backtraces = true;
|
||||
}
|
||||
|
||||
pub fn runnable_backtrace(&self, runnable_id: usize) -> backtrace::Backtrace {
|
||||
let mut backtrace = self.state.lock().runnable_backtraces[&runnable_id].clone();
|
||||
backtrace.resolve();
|
||||
backtrace
|
||||
}
|
||||
|
||||
pub fn build_background(self: &Arc<Self>) -> Arc<Background> {
|
||||
Arc::new(Background::Deterministic {
|
||||
executor: self.clone(),
|
||||
|
@ -142,6 +171,17 @@ impl Deterministic {
|
|||
main: bool,
|
||||
) -> AnyLocalTask {
|
||||
let state = self.state.clone();
|
||||
let id;
|
||||
{
|
||||
let mut state = state.lock();
|
||||
id = util::post_inc(&mut state.next_runnable_id);
|
||||
if state.enable_runnable_backtraces {
|
||||
state
|
||||
.runnable_backtraces
|
||||
.insert(id, backtrace::Backtrace::new_unresolved());
|
||||
}
|
||||
}
|
||||
|
||||
let unparker = self.parker.lock().unparker();
|
||||
let (runnable, task) = async_task::spawn_local(future, move |runnable| {
|
||||
let mut state = state.lock();
|
||||
|
@ -149,7 +189,7 @@ impl Deterministic {
|
|||
.scheduled_from_foreground
|
||||
.entry(cx_id)
|
||||
.or_default()
|
||||
.push(ForegroundRunnable { runnable, main });
|
||||
.push(ForegroundRunnable { id, runnable, main });
|
||||
unparker.unpark();
|
||||
});
|
||||
runnable.schedule();
|
||||
|
@ -158,10 +198,23 @@ impl Deterministic {
|
|||
|
||||
fn spawn(&self, future: AnyFuture) -> AnyTask {
|
||||
let state = self.state.clone();
|
||||
let id;
|
||||
{
|
||||
let mut state = state.lock();
|
||||
id = util::post_inc(&mut state.next_runnable_id);
|
||||
if state.enable_runnable_backtraces {
|
||||
state
|
||||
.runnable_backtraces
|
||||
.insert(id, backtrace::Backtrace::new_unresolved());
|
||||
}
|
||||
}
|
||||
|
||||
let unparker = self.parker.lock().unparker();
|
||||
let (runnable, task) = async_task::spawn(future, move |runnable| {
|
||||
let mut state = state.lock();
|
||||
state.scheduled_from_background.push(runnable);
|
||||
state
|
||||
.scheduled_from_background
|
||||
.push(BackgroundRunnable { id, runnable });
|
||||
unparker.unpark();
|
||||
});
|
||||
runnable.schedule();
|
||||
|
@ -178,15 +231,27 @@ impl Deterministic {
|
|||
let woken = Arc::new(AtomicBool::new(false));
|
||||
|
||||
let state = self.state.clone();
|
||||
let id;
|
||||
{
|
||||
let mut state = state.lock();
|
||||
id = util::post_inc(&mut state.next_runnable_id);
|
||||
if state.enable_runnable_backtraces {
|
||||
state
|
||||
.runnable_backtraces
|
||||
.insert(id, backtrace::Backtrace::new_unresolved());
|
||||
}
|
||||
}
|
||||
|
||||
let unparker = self.parker.lock().unparker();
|
||||
let (runnable, mut main_task) = unsafe {
|
||||
async_task::spawn_unchecked(main_future, move |runnable| {
|
||||
let mut state = state.lock();
|
||||
let state = &mut *state.lock();
|
||||
state
|
||||
.scheduled_from_foreground
|
||||
.entry(cx_id)
|
||||
.or_default()
|
||||
.push(ForegroundRunnable {
|
||||
id: util::post_inc(&mut state.next_runnable_id),
|
||||
runnable,
|
||||
main: true,
|
||||
});
|
||||
|
@ -248,9 +313,10 @@ impl Deterministic {
|
|||
if !state.scheduled_from_background.is_empty() && state.rng.gen() {
|
||||
let background_len = state.scheduled_from_background.len();
|
||||
let ix = state.rng.gen_range(0..background_len);
|
||||
let runnable = state.scheduled_from_background.remove(ix);
|
||||
let background_runnable = state.scheduled_from_background.remove(ix);
|
||||
state.poll_history.push(background_runnable.id);
|
||||
drop(state);
|
||||
runnable.run();
|
||||
background_runnable.runnable.run();
|
||||
} else if !state.scheduled_from_foreground.is_empty() {
|
||||
let available_cx_ids = state
|
||||
.scheduled_from_foreground
|
||||
|
@ -266,6 +332,7 @@ impl Deterministic {
|
|||
if scheduled_from_cx.is_empty() {
|
||||
state.scheduled_from_foreground.remove(&cx_id_to_run);
|
||||
}
|
||||
state.poll_history.push(foreground_runnable.id);
|
||||
|
||||
drop(state);
|
||||
|
||||
|
@ -298,9 +365,10 @@ impl Deterministic {
|
|||
let runnable_count = state.scheduled_from_background.len();
|
||||
let ix = state.rng.gen_range(0..=runnable_count);
|
||||
if ix < state.scheduled_from_background.len() {
|
||||
let runnable = state.scheduled_from_background.remove(ix);
|
||||
let background_runnable = state.scheduled_from_background.remove(ix);
|
||||
state.poll_history.push(background_runnable.id);
|
||||
drop(state);
|
||||
runnable.run();
|
||||
background_runnable.runnable.run();
|
||||
} else {
|
||||
drop(state);
|
||||
if let Poll::Ready(result) = future.poll(&mut cx) {
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
use crate::{
|
||||
elements::Empty, executor, platform, Element, ElementBox, Entity, FontCache, Handle,
|
||||
LeakDetector, MutableAppContext, Platform, RenderContext, Subscription, TestAppContext, View,
|
||||
elements::Empty, executor, platform, util::CwdBacktrace, Element, ElementBox, Entity,
|
||||
FontCache, Handle, LeakDetector, MutableAppContext, Platform, RenderContext, Subscription,
|
||||
TestAppContext, View,
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use parking_lot::Mutex;
|
||||
use smol::channel;
|
||||
use std::{
|
||||
fmt::Write,
|
||||
panic::{self, RefUnwindSafe},
|
||||
rc::Rc,
|
||||
sync::{
|
||||
|
@ -29,13 +31,13 @@ pub fn run_test(
|
|||
mut num_iterations: u64,
|
||||
mut starting_seed: u64,
|
||||
max_retries: usize,
|
||||
detect_nondeterminism: bool,
|
||||
test_fn: &mut (dyn RefUnwindSafe
|
||||
+ Fn(
|
||||
&mut MutableAppContext,
|
||||
Rc<platform::test::ForegroundPlatform>,
|
||||
Arc<executor::Deterministic>,
|
||||
u64,
|
||||
bool,
|
||||
)),
|
||||
fn_name: String,
|
||||
) {
|
||||
|
@ -60,16 +62,20 @@ pub fn run_test(
|
|||
let platform = Arc::new(platform::test::platform());
|
||||
let font_system = platform.fonts();
|
||||
let font_cache = Arc::new(FontCache::new(font_system));
|
||||
let mut prev_runnable_history: Option<Vec<usize>> = None;
|
||||
|
||||
loop {
|
||||
let seed = atomic_seed.fetch_add(1, SeqCst);
|
||||
let is_last_iteration = seed + 1 >= starting_seed + num_iterations;
|
||||
for _ in 0..num_iterations {
|
||||
let seed = atomic_seed.load(SeqCst);
|
||||
|
||||
if is_randomized {
|
||||
dbg!(seed);
|
||||
}
|
||||
|
||||
let deterministic = executor::Deterministic::new(seed);
|
||||
if detect_nondeterminism {
|
||||
deterministic.enable_runnable_backtrace();
|
||||
}
|
||||
|
||||
let leak_detector = Arc::new(Mutex::new(LeakDetector::default()));
|
||||
let mut cx = TestAppContext::new(
|
||||
foreground_platform.clone(),
|
||||
|
@ -82,13 +88,7 @@ pub fn run_test(
|
|||
fn_name.clone(),
|
||||
);
|
||||
cx.update(|cx| {
|
||||
test_fn(
|
||||
cx,
|
||||
foreground_platform.clone(),
|
||||
deterministic.clone(),
|
||||
seed,
|
||||
is_last_iteration,
|
||||
);
|
||||
test_fn(cx, foreground_platform.clone(), deterministic.clone(), seed);
|
||||
});
|
||||
|
||||
cx.update(|cx| cx.remove_all_windows());
|
||||
|
@ -96,8 +96,64 @@ pub fn run_test(
|
|||
cx.update(|cx| cx.clear_globals());
|
||||
|
||||
leak_detector.lock().detect();
|
||||
if is_last_iteration {
|
||||
break;
|
||||
|
||||
if detect_nondeterminism {
|
||||
let curr_runnable_history = deterministic.runnable_history();
|
||||
if let Some(prev_runnable_history) = prev_runnable_history {
|
||||
let mut prev_entries = prev_runnable_history.iter().fuse();
|
||||
let mut curr_entries = curr_runnable_history.iter().fuse();
|
||||
|
||||
let mut nondeterministic = false;
|
||||
let mut common_history_prefix = Vec::new();
|
||||
let mut prev_history_suffix = Vec::new();
|
||||
let mut curr_history_suffix = Vec::new();
|
||||
loop {
|
||||
match (prev_entries.next(), curr_entries.next()) {
|
||||
(None, None) => break,
|
||||
(None, Some(curr_id)) => curr_history_suffix.push(*curr_id),
|
||||
(Some(prev_id), None) => prev_history_suffix.push(*prev_id),
|
||||
(Some(prev_id), Some(curr_id)) => {
|
||||
if nondeterministic {
|
||||
prev_history_suffix.push(*prev_id);
|
||||
curr_history_suffix.push(*curr_id);
|
||||
} else if prev_id == curr_id {
|
||||
common_history_prefix.push(*curr_id);
|
||||
} else {
|
||||
nondeterministic = true;
|
||||
prev_history_suffix.push(*prev_id);
|
||||
curr_history_suffix.push(*curr_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if nondeterministic {
|
||||
let mut error = String::new();
|
||||
writeln!(&mut error, "Common prefix: {:?}", common_history_prefix)
|
||||
.unwrap();
|
||||
writeln!(&mut error, "Previous suffix: {:?}", prev_history_suffix)
|
||||
.unwrap();
|
||||
writeln!(&mut error, "Current suffix: {:?}", curr_history_suffix)
|
||||
.unwrap();
|
||||
|
||||
let last_common_backtrace = common_history_prefix
|
||||
.last()
|
||||
.map(|runnable_id| deterministic.runnable_backtrace(*runnable_id));
|
||||
|
||||
writeln!(
|
||||
&mut error,
|
||||
"Last future that ran on both executions: {:?}",
|
||||
last_common_backtrace.as_ref().map(CwdBacktrace)
|
||||
)
|
||||
.unwrap();
|
||||
panic!("Detected non-determinism.\n{}", error);
|
||||
}
|
||||
}
|
||||
prev_runnable_history = Some(curr_runnable_history);
|
||||
}
|
||||
|
||||
if !detect_nondeterminism {
|
||||
atomic_seed.fetch_add(1, SeqCst);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -112,7 +168,7 @@ pub fn run_test(
|
|||
println!("retrying: attempt {}", retries);
|
||||
} else {
|
||||
if is_randomized {
|
||||
eprintln!("failing seed: {}", atomic_seed.load(SeqCst) - 1);
|
||||
eprintln!("failing seed: {}", atomic_seed.load(SeqCst));
|
||||
}
|
||||
panic::resume_unwind(error);
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
|||
let mut max_retries = 0;
|
||||
let mut num_iterations = 1;
|
||||
let mut starting_seed = 0;
|
||||
let mut detect_nondeterminism = false;
|
||||
|
||||
for arg in args {
|
||||
match arg {
|
||||
|
@ -26,6 +27,9 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
|||
let key_name = meta.path.get_ident().map(|i| i.to_string());
|
||||
let result = (|| {
|
||||
match key_name.as_deref() {
|
||||
Some("detect_nondeterminism") => {
|
||||
detect_nondeterminism = parse_bool(&meta.lit)?
|
||||
}
|
||||
Some("retries") => max_retries = parse_int(&meta.lit)?,
|
||||
Some("iterations") => num_iterations = parse_int(&meta.lit)?,
|
||||
Some("seed") => starting_seed = parse_int(&meta.lit)?,
|
||||
|
@ -77,10 +81,6 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
|||
inner_fn_args.extend(quote!(rand::SeedableRng::seed_from_u64(seed),));
|
||||
continue;
|
||||
}
|
||||
Some("bool") => {
|
||||
inner_fn_args.extend(quote!(is_last_iteration,));
|
||||
continue;
|
||||
}
|
||||
Some("Arc") => {
|
||||
if let syn::PathArguments::AngleBracketed(args) =
|
||||
&last_segment.unwrap().arguments
|
||||
|
@ -146,7 +146,8 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
|||
#num_iterations as u64,
|
||||
#starting_seed as u64,
|
||||
#max_retries,
|
||||
&mut |cx, foreground_platform, deterministic, seed, is_last_iteration| {
|
||||
#detect_nondeterminism,
|
||||
&mut |cx, foreground_platform, deterministic, seed| {
|
||||
#cx_vars
|
||||
cx.foreground().run(#inner_fn_name(#inner_fn_args));
|
||||
#cx_teardowns
|
||||
|
@ -165,9 +166,6 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
|||
Some("StdRng") => {
|
||||
inner_fn_args.extend(quote!(rand::SeedableRng::seed_from_u64(seed),));
|
||||
}
|
||||
Some("bool") => {
|
||||
inner_fn_args.extend(quote!(is_last_iteration,));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
} else {
|
||||
|
@ -189,7 +187,8 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
|||
#num_iterations as u64,
|
||||
#starting_seed as u64,
|
||||
#max_retries,
|
||||
&mut |cx, _, _, seed, is_last_iteration| #inner_fn_name(#inner_fn_args),
|
||||
#detect_nondeterminism,
|
||||
&mut |cx, _, _, seed| #inner_fn_name(#inner_fn_args),
|
||||
stringify!(#outer_fn_name).to_string(),
|
||||
);
|
||||
}
|
||||
|
@ -209,3 +208,13 @@ fn parse_int(literal: &Lit) -> Result<usize, TokenStream> {
|
|||
|
||||
result.map_err(|err| TokenStream::from(err.into_compile_error()))
|
||||
}
|
||||
|
||||
fn parse_bool(literal: &Lit) -> Result<bool, TokenStream> {
|
||||
let result = if let Lit::Bool(result) = &literal {
|
||||
Ok(result.value)
|
||||
} else {
|
||||
Err(syn::Error::new(literal.span(), "must be a boolean"))
|
||||
};
|
||||
|
||||
result.map_err(|err| TokenStream::from(err.into_compile_error()))
|
||||
}
|
||||
|
|
|
@ -10,7 +10,11 @@ use anyhow::{anyhow, Context, Result};
|
|||
use client::{proto, Client, PeerId, TypedEnvelope, UserStore};
|
||||
use clock::ReplicaId;
|
||||
use collections::{hash_map, BTreeMap, HashMap, HashSet};
|
||||
use futures::{future::Shared, AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt};
|
||||
use futures::{
|
||||
channel::{mpsc, oneshot},
|
||||
future::Shared,
|
||||
AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
|
||||
};
|
||||
|
||||
use gpui::{
|
||||
AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
|
||||
|
@ -45,12 +49,10 @@ use std::{
|
|||
cell::RefCell,
|
||||
cmp::{self, Ordering},
|
||||
convert::TryInto,
|
||||
ffi::OsString,
|
||||
hash::Hash,
|
||||
mem,
|
||||
num::NonZeroU32,
|
||||
ops::Range,
|
||||
os::unix::{ffi::OsStrExt, prelude::OsStringExt},
|
||||
path::{Component, Path, PathBuf},
|
||||
rc::Rc,
|
||||
str,
|
||||
|
@ -70,10 +72,6 @@ pub trait Item: Entity {
|
|||
fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
|
||||
}
|
||||
|
||||
pub struct ProjectStore {
|
||||
projects: Vec<WeakModelHandle<Project>>,
|
||||
}
|
||||
|
||||
// Language server state is stored across 3 collections:
|
||||
// language_servers =>
|
||||
// a mapping from unique server id to LanguageServerState which can either be a task for a
|
||||
|
@ -102,7 +100,6 @@ pub struct Project {
|
|||
next_entry_id: Arc<AtomicUsize>,
|
||||
next_diagnostic_group_id: usize,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
project_store: ModelHandle<ProjectStore>,
|
||||
fs: Arc<dyn Fs>,
|
||||
client_state: Option<ProjectClientState>,
|
||||
collaborators: HashMap<PeerId, Collaborator>,
|
||||
|
@ -152,6 +149,8 @@ enum WorktreeHandle {
|
|||
enum ProjectClientState {
|
||||
Local {
|
||||
remote_id: u64,
|
||||
metadata_changed: mpsc::UnboundedSender<oneshot::Sender<()>>,
|
||||
_maintain_metadata: Task<()>,
|
||||
_detect_unshare: Task<Option<()>>,
|
||||
},
|
||||
Remote {
|
||||
|
@ -412,46 +411,39 @@ impl Project {
|
|||
pub fn local(
|
||||
client: Arc<Client>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
project_store: ModelHandle<ProjectStore>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> ModelHandle<Self> {
|
||||
cx.add_model(|cx: &mut ModelContext<Self>| {
|
||||
let handle = cx.weak_handle();
|
||||
project_store.update(cx, |store, cx| store.add_project(handle, cx));
|
||||
|
||||
Self {
|
||||
worktrees: Default::default(),
|
||||
collaborators: Default::default(),
|
||||
opened_buffers: Default::default(),
|
||||
shared_buffers: Default::default(),
|
||||
incomplete_buffers: Default::default(),
|
||||
loading_buffers: Default::default(),
|
||||
loading_local_worktrees: Default::default(),
|
||||
buffer_snapshots: Default::default(),
|
||||
client_state: None,
|
||||
opened_buffer: watch::channel(),
|
||||
client_subscriptions: Vec::new(),
|
||||
_subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
|
||||
_maintain_buffer_languages: Self::maintain_buffer_languages(&languages, cx),
|
||||
active_entry: None,
|
||||
languages,
|
||||
client,
|
||||
user_store,
|
||||
project_store,
|
||||
fs,
|
||||
next_entry_id: Default::default(),
|
||||
next_diagnostic_group_id: Default::default(),
|
||||
language_servers: Default::default(),
|
||||
language_server_ids: Default::default(),
|
||||
language_server_statuses: Default::default(),
|
||||
last_workspace_edits_by_language_server: Default::default(),
|
||||
language_server_settings: Default::default(),
|
||||
buffers_being_formatted: Default::default(),
|
||||
next_language_server_id: 0,
|
||||
nonce: StdRng::from_entropy().gen(),
|
||||
}
|
||||
cx.add_model(|cx: &mut ModelContext<Self>| Self {
|
||||
worktrees: Default::default(),
|
||||
collaborators: Default::default(),
|
||||
opened_buffers: Default::default(),
|
||||
shared_buffers: Default::default(),
|
||||
incomplete_buffers: Default::default(),
|
||||
loading_buffers: Default::default(),
|
||||
loading_local_worktrees: Default::default(),
|
||||
buffer_snapshots: Default::default(),
|
||||
client_state: None,
|
||||
opened_buffer: watch::channel(),
|
||||
client_subscriptions: Vec::new(),
|
||||
_subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
|
||||
_maintain_buffer_languages: Self::maintain_buffer_languages(&languages, cx),
|
||||
active_entry: None,
|
||||
languages,
|
||||
client,
|
||||
user_store,
|
||||
fs,
|
||||
next_entry_id: Default::default(),
|
||||
next_diagnostic_group_id: Default::default(),
|
||||
language_servers: Default::default(),
|
||||
language_server_ids: Default::default(),
|
||||
language_server_statuses: Default::default(),
|
||||
last_workspace_edits_by_language_server: Default::default(),
|
||||
language_server_settings: Default::default(),
|
||||
buffers_being_formatted: Default::default(),
|
||||
next_language_server_id: 0,
|
||||
nonce: StdRng::from_entropy().gen(),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -459,31 +451,28 @@ impl Project {
|
|||
remote_id: u64,
|
||||
client: Arc<Client>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
project_store: ModelHandle<ProjectStore>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
fs: Arc<dyn Fs>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<ModelHandle<Self>, JoinProjectError> {
|
||||
client.authenticate_and_connect(true, &cx).await?;
|
||||
|
||||
let subscription = client.subscribe_to_entity(remote_id);
|
||||
let response = client
|
||||
.request(proto::JoinProject {
|
||||
project_id: remote_id,
|
||||
})
|
||||
.await?;
|
||||
let this = cx.add_model(|cx| {
|
||||
let replica_id = response.replica_id as ReplicaId;
|
||||
|
||||
let replica_id = response.replica_id as ReplicaId;
|
||||
|
||||
let mut worktrees = Vec::new();
|
||||
for worktree in response.worktrees {
|
||||
let worktree = cx
|
||||
.update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
|
||||
worktrees.push(worktree);
|
||||
}
|
||||
|
||||
let this = cx.add_model(|cx: &mut ModelContext<Self>| {
|
||||
let handle = cx.weak_handle();
|
||||
project_store.update(cx, |store, cx| store.add_project(handle, cx));
|
||||
let mut worktrees = Vec::new();
|
||||
for worktree in response.worktrees {
|
||||
let worktree = cx.update(|cx| {
|
||||
Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx)
|
||||
});
|
||||
worktrees.push(worktree);
|
||||
}
|
||||
|
||||
let mut this = Self {
|
||||
worktrees: Vec::new(),
|
||||
|
@ -497,11 +486,10 @@ impl Project {
|
|||
_maintain_buffer_languages: Self::maintain_buffer_languages(&languages, cx),
|
||||
languages,
|
||||
user_store: user_store.clone(),
|
||||
project_store,
|
||||
fs,
|
||||
next_entry_id: Default::default(),
|
||||
next_diagnostic_group_id: Default::default(),
|
||||
client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
|
||||
client_subscriptions: Default::default(),
|
||||
_subscriptions: Default::default(),
|
||||
client: client.clone(),
|
||||
client_state: Some(ProjectClientState::Remote {
|
||||
|
@ -550,10 +538,11 @@ impl Project {
|
|||
nonce: StdRng::from_entropy().gen(),
|
||||
};
|
||||
for worktree in worktrees {
|
||||
this.add_worktree(&worktree, cx);
|
||||
let _ = this.add_worktree(&worktree, cx);
|
||||
}
|
||||
this
|
||||
});
|
||||
let subscription = subscription.set_model(&this, &mut cx);
|
||||
|
||||
let user_ids = response
|
||||
.collaborators
|
||||
|
@ -571,6 +560,7 @@ impl Project {
|
|||
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.collaborators = collaborators;
|
||||
this.client_subscriptions.push(subscription);
|
||||
});
|
||||
|
||||
Ok(this)
|
||||
|
@ -593,9 +583,7 @@ impl Project {
|
|||
let http_client = client::test::FakeHttpClient::with_404_response();
|
||||
let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
|
||||
let project_store = cx.add_model(|_| ProjectStore::new());
|
||||
let project =
|
||||
cx.update(|cx| Project::local(client, user_store, project_store, languages, fs, cx));
|
||||
let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
|
||||
for path in root_paths {
|
||||
let (tree, _) = project
|
||||
.update(cx, |project, cx| {
|
||||
|
@ -676,10 +664,6 @@ impl Project {
|
|||
self.user_store.clone()
|
||||
}
|
||||
|
||||
pub fn project_store(&self) -> ModelHandle<ProjectStore> {
|
||||
self.project_store.clone()
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn check_invariants(&self, cx: &AppContext) {
|
||||
if self.is_local() {
|
||||
|
@ -751,53 +735,22 @@ impl Project {
|
|||
}
|
||||
}
|
||||
|
||||
fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
|
||||
if let Some(ProjectClientState::Local { remote_id, .. }) = &self.client_state {
|
||||
let project_id = *remote_id;
|
||||
// Broadcast worktrees only if the project is online.
|
||||
let worktrees = self
|
||||
.worktrees
|
||||
.iter()
|
||||
.filter_map(|worktree| {
|
||||
worktree
|
||||
.upgrade(cx)
|
||||
.map(|worktree| worktree.read(cx).as_local().unwrap().metadata_proto())
|
||||
})
|
||||
.collect();
|
||||
self.client
|
||||
.send(proto::UpdateProject {
|
||||
project_id,
|
||||
worktrees,
|
||||
})
|
||||
.log_err();
|
||||
|
||||
let worktrees = self.visible_worktrees(cx).collect::<Vec<_>>();
|
||||
let scans_complete = futures::future::join_all(
|
||||
worktrees
|
||||
.iter()
|
||||
.filter_map(|worktree| Some(worktree.read(cx).as_local()?.scan_complete())),
|
||||
);
|
||||
|
||||
let worktrees = worktrees.into_iter().map(|handle| handle.downgrade());
|
||||
|
||||
cx.spawn_weak(move |_, cx| async move {
|
||||
scans_complete.await;
|
||||
cx.read(|cx| {
|
||||
for worktree in worktrees {
|
||||
if let Some(worktree) = worktree
|
||||
.upgrade(cx)
|
||||
.and_then(|worktree| worktree.read(cx).as_local())
|
||||
{
|
||||
worktree.send_extension_counts(project_id);
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
.detach();
|
||||
fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) -> impl Future<Output = ()> {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
if let Some(ProjectClientState::Local {
|
||||
metadata_changed, ..
|
||||
}) = &mut self.client_state
|
||||
{
|
||||
let _ = metadata_changed.unbounded_send(tx);
|
||||
}
|
||||
|
||||
self.project_store.update(cx, |_, cx| cx.notify());
|
||||
cx.notify();
|
||||
|
||||
async move {
|
||||
// If the project is shared, this will resolve when the `_maintain_metadata` task has
|
||||
// a chance to update the metadata. Otherwise, it will resolve right away because `tx`
|
||||
// will get dropped.
|
||||
let _ = rx.await;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
|
||||
|
@ -899,7 +852,7 @@ impl Project {
|
|||
.request(proto::CreateProjectEntry {
|
||||
worktree_id: project_path.worktree_id.to_proto(),
|
||||
project_id,
|
||||
path: project_path.path.as_os_str().as_bytes().to_vec(),
|
||||
path: project_path.path.to_string_lossy().into(),
|
||||
is_directory,
|
||||
})
|
||||
.await?;
|
||||
|
@ -943,7 +896,7 @@ impl Project {
|
|||
.request(proto::CopyProjectEntry {
|
||||
project_id,
|
||||
entry_id: entry_id.to_proto(),
|
||||
new_path: new_path.as_os_str().as_bytes().to_vec(),
|
||||
new_path: new_path.to_string_lossy().into(),
|
||||
})
|
||||
.await?;
|
||||
let entry = response
|
||||
|
@ -986,7 +939,7 @@ impl Project {
|
|||
.request(proto::RenameProjectEntry {
|
||||
project_id,
|
||||
entry_id: entry_id.to_proto(),
|
||||
new_path: new_path.as_os_str().as_bytes().to_vec(),
|
||||
new_path: new_path.to_string_lossy().into(),
|
||||
})
|
||||
.await?;
|
||||
let entry = response
|
||||
|
@ -1087,15 +1040,51 @@ impl Project {
|
|||
});
|
||||
}
|
||||
|
||||
self.client_subscriptions
|
||||
.push(self.client.add_model_for_remote_entity(project_id, cx));
|
||||
self.metadata_changed(cx);
|
||||
self.client_subscriptions.push(
|
||||
self.client
|
||||
.subscribe_to_entity(project_id)
|
||||
.set_model(&cx.handle(), &mut cx.to_async()),
|
||||
);
|
||||
let _ = self.metadata_changed(cx);
|
||||
cx.emit(Event::RemoteIdChanged(Some(project_id)));
|
||||
cx.notify();
|
||||
|
||||
let mut status = self.client.status();
|
||||
let (metadata_changed_tx, mut metadata_changed_rx) = mpsc::unbounded();
|
||||
self.client_state = Some(ProjectClientState::Local {
|
||||
remote_id: project_id,
|
||||
metadata_changed: metadata_changed_tx,
|
||||
_maintain_metadata: cx.spawn_weak(move |this, cx| async move {
|
||||
while let Some(tx) = metadata_changed_rx.next().await {
|
||||
let mut txs = vec![tx];
|
||||
while let Ok(Some(next_tx)) = metadata_changed_rx.try_next() {
|
||||
txs.push(next_tx);
|
||||
}
|
||||
|
||||
let Some(this) = this.upgrade(&cx) else { break };
|
||||
this.read_with(&cx, |this, cx| {
|
||||
let worktrees = this
|
||||
.worktrees
|
||||
.iter()
|
||||
.filter_map(|worktree| {
|
||||
worktree.upgrade(cx).map(|worktree| {
|
||||
worktree.read(cx).as_local().unwrap().metadata_proto()
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
this.client.request(proto::UpdateProject {
|
||||
project_id,
|
||||
worktrees,
|
||||
})
|
||||
})
|
||||
.await
|
||||
.log_err();
|
||||
|
||||
for tx in txs {
|
||||
let _ = tx.send(());
|
||||
}
|
||||
}
|
||||
}),
|
||||
_detect_unshare: cx.spawn_weak(move |this, mut cx| {
|
||||
async move {
|
||||
let is_connected = status.next().await.map_or(false, |s| s.is_connected());
|
||||
|
@ -1145,7 +1134,7 @@ impl Project {
|
|||
}
|
||||
}
|
||||
|
||||
self.metadata_changed(cx);
|
||||
let _ = self.metadata_changed(cx);
|
||||
cx.notify();
|
||||
self.client.send(proto::UnshareProject {
|
||||
project_id: remote_id,
|
||||
|
@ -1634,10 +1623,6 @@ impl Project {
|
|||
operations: vec![language::proto::serialize_operation(operation)],
|
||||
});
|
||||
cx.background().spawn(request).detach_and_log_err(cx);
|
||||
} else if let Some(project_id) = self.remote_id() {
|
||||
let _ = self
|
||||
.client
|
||||
.send(proto::RegisterProjectActivity { project_id });
|
||||
}
|
||||
}
|
||||
BufferEvent::Edited { .. } => {
|
||||
|
@ -3429,19 +3414,29 @@ impl Project {
|
|||
position: Some(language::proto::serialize_anchor(&anchor)),
|
||||
version: serialize_version(&source_buffer.version()),
|
||||
};
|
||||
cx.spawn_weak(|_, mut cx| async move {
|
||||
cx.spawn_weak(|this, mut cx| async move {
|
||||
let response = rpc.request(message).await?;
|
||||
|
||||
source_buffer_handle
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(response.version))
|
||||
})
|
||||
.await;
|
||||
if this
|
||||
.upgrade(&cx)
|
||||
.ok_or_else(|| anyhow!("project was dropped"))?
|
||||
.read_with(&cx, |this, _| this.is_read_only())
|
||||
{
|
||||
return Err(anyhow!(
|
||||
"failed to get completions: project was disconnected"
|
||||
));
|
||||
} else {
|
||||
source_buffer_handle
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(response.version))
|
||||
})
|
||||
.await;
|
||||
|
||||
let completions = response.completions.into_iter().map(|completion| {
|
||||
language::proto::deserialize_completion(completion, language.clone())
|
||||
});
|
||||
futures::future::try_join_all(completions).await
|
||||
let completions = response.completions.into_iter().map(|completion| {
|
||||
language::proto::deserialize_completion(completion, language.clone())
|
||||
});
|
||||
futures::future::try_join_all(completions).await
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Task::ready(Ok(Default::default()))
|
||||
|
@ -3618,7 +3613,7 @@ impl Project {
|
|||
} else if let Some(project_id) = self.remote_id() {
|
||||
let rpc = self.client.clone();
|
||||
let version = buffer.version();
|
||||
cx.spawn_weak(|_, mut cx| async move {
|
||||
cx.spawn_weak(|this, mut cx| async move {
|
||||
let response = rpc
|
||||
.request(proto::GetCodeActions {
|
||||
project_id,
|
||||
|
@ -3629,17 +3624,27 @@ impl Project {
|
|||
})
|
||||
.await?;
|
||||
|
||||
buffer_handle
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(response.version))
|
||||
})
|
||||
.await;
|
||||
if this
|
||||
.upgrade(&cx)
|
||||
.ok_or_else(|| anyhow!("project was dropped"))?
|
||||
.read_with(&cx, |this, _| this.is_read_only())
|
||||
{
|
||||
return Err(anyhow!(
|
||||
"failed to get code actions: project was disconnected"
|
||||
));
|
||||
} else {
|
||||
buffer_handle
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(response.version))
|
||||
})
|
||||
.await;
|
||||
|
||||
response
|
||||
.actions
|
||||
.into_iter()
|
||||
.map(language::proto::deserialize_code_action)
|
||||
.collect()
|
||||
response
|
||||
.actions
|
||||
.into_iter()
|
||||
.map(language::proto::deserialize_code_action)
|
||||
.collect()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Task::ready(Ok(Default::default()))
|
||||
|
@ -4148,9 +4153,13 @@ impl Project {
|
|||
let message = request.to_proto(project_id, buffer);
|
||||
return cx.spawn(|this, cx| async move {
|
||||
let response = rpc.request(message).await?;
|
||||
request
|
||||
.response_from_proto(response, this, buffer_handle, cx)
|
||||
.await
|
||||
if this.read_with(&cx, |this, _| this.is_read_only()) {
|
||||
Err(anyhow!("disconnected before completing request"))
|
||||
} else {
|
||||
request
|
||||
.response_from_proto(response, this, buffer_handle, cx)
|
||||
.await
|
||||
}
|
||||
});
|
||||
}
|
||||
Task::ready(Ok(Default::default()))
|
||||
|
@ -4228,12 +4237,13 @@ impl Project {
|
|||
});
|
||||
let worktree = worktree?;
|
||||
|
||||
let project_id = project.update(&mut cx, |project, cx| {
|
||||
project.add_worktree(&worktree, cx);
|
||||
project.remote_id()
|
||||
});
|
||||
project
|
||||
.update(&mut cx, |project, cx| project.add_worktree(&worktree, cx))
|
||||
.await;
|
||||
|
||||
if let Some(project_id) = project_id {
|
||||
if let Some(project_id) =
|
||||
project.read_with(&cx, |project, _| project.remote_id())
|
||||
{
|
||||
worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
worktree.as_local_mut().unwrap().share(project_id, cx)
|
||||
|
@ -4257,7 +4267,11 @@ impl Project {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
|
||||
pub fn remove_worktree(
|
||||
&mut self,
|
||||
id_to_remove: WorktreeId,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> impl Future<Output = ()> {
|
||||
self.worktrees.retain(|worktree| {
|
||||
if let Some(worktree) = worktree.upgrade(cx) {
|
||||
let id = worktree.read(cx).id();
|
||||
|
@ -4271,11 +4285,14 @@ impl Project {
|
|||
false
|
||||
}
|
||||
});
|
||||
self.metadata_changed(cx);
|
||||
cx.notify();
|
||||
self.metadata_changed(cx)
|
||||
}
|
||||
|
||||
fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
|
||||
fn add_worktree(
|
||||
&mut self,
|
||||
worktree: &ModelHandle<Worktree>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> impl Future<Output = ()> {
|
||||
cx.observe(worktree, |_, _, cx| cx.notify()).detach();
|
||||
if worktree.read(cx).is_local() {
|
||||
cx.subscribe(worktree, |this, worktree, event, cx| match event {
|
||||
|
@ -4299,15 +4316,13 @@ impl Project {
|
|||
.push(WorktreeHandle::Weak(worktree.downgrade()));
|
||||
}
|
||||
|
||||
self.metadata_changed(cx);
|
||||
cx.observe_release(worktree, |this, worktree, cx| {
|
||||
this.remove_worktree(worktree.id(), cx);
|
||||
cx.notify();
|
||||
let _ = this.remove_worktree(worktree.id(), cx);
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.emit(Event::WorktreeAdded);
|
||||
cx.notify();
|
||||
self.metadata_changed(cx)
|
||||
}
|
||||
|
||||
fn update_local_worktree_buffers(
|
||||
|
@ -4624,11 +4639,11 @@ impl Project {
|
|||
} else {
|
||||
let worktree =
|
||||
Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
|
||||
this.add_worktree(&worktree, cx);
|
||||
let _ = this.add_worktree(&worktree, cx);
|
||||
}
|
||||
}
|
||||
|
||||
this.metadata_changed(cx);
|
||||
let _ = this.metadata_changed(cx);
|
||||
for (id, _) in old_worktrees_by_id {
|
||||
cx.emit(Event::WorktreeRemoved(id));
|
||||
}
|
||||
|
@ -4670,7 +4685,7 @@ impl Project {
|
|||
let entry = worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
let worktree = worktree.as_local_mut().unwrap();
|
||||
let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
|
||||
let path = PathBuf::from(envelope.payload.path);
|
||||
worktree.create_entry(path, envelope.payload.is_directory, cx)
|
||||
})
|
||||
.await?;
|
||||
|
@ -4694,7 +4709,7 @@ impl Project {
|
|||
let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
|
||||
let entry = worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
|
||||
let new_path = PathBuf::from(envelope.payload.new_path);
|
||||
worktree
|
||||
.as_local_mut()
|
||||
.unwrap()
|
||||
|
@ -4722,7 +4737,7 @@ impl Project {
|
|||
let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
|
||||
let entry = worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
|
||||
let new_path = PathBuf::from(envelope.payload.new_path);
|
||||
worktree
|
||||
.as_local_mut()
|
||||
.unwrap()
|
||||
|
@ -5864,48 +5879,6 @@ impl Project {
|
|||
}
|
||||
}
|
||||
|
||||
impl ProjectStore {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
projects: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn projects<'a>(
|
||||
&'a self,
|
||||
cx: &'a AppContext,
|
||||
) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
|
||||
self.projects
|
||||
.iter()
|
||||
.filter_map(|project| project.upgrade(cx))
|
||||
}
|
||||
|
||||
fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
|
||||
if let Err(ix) = self
|
||||
.projects
|
||||
.binary_search_by_key(&project.id(), WeakModelHandle::id)
|
||||
{
|
||||
self.projects.insert(ix, project);
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
|
||||
let mut did_change = false;
|
||||
self.projects.retain(|project| {
|
||||
if project.is_upgradable(cx) {
|
||||
true
|
||||
} else {
|
||||
did_change = true;
|
||||
false
|
||||
}
|
||||
});
|
||||
if did_change {
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl WorktreeHandle {
|
||||
pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
|
||||
match self {
|
||||
|
@ -5984,16 +5957,10 @@ impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl Entity for ProjectStore {
|
||||
type Event = ();
|
||||
}
|
||||
|
||||
impl Entity for Project {
|
||||
type Event = Event;
|
||||
|
||||
fn release(&mut self, cx: &mut gpui::MutableAppContext) {
|
||||
self.project_store.update(cx, ProjectStore::prune_projects);
|
||||
|
||||
fn release(&mut self, _: &mut gpui::MutableAppContext) {
|
||||
match &self.client_state {
|
||||
Some(ProjectClientState::Local { remote_id, .. }) => {
|
||||
self.client
|
||||
|
|
|
@ -2166,7 +2166,11 @@ async fn test_rescan_and_remote_updates(
|
|||
proto::WorktreeMetadata {
|
||||
id: initial_snapshot.id().to_proto(),
|
||||
root_name: initial_snapshot.root_name().into(),
|
||||
abs_path: initial_snapshot.abs_path().as_os_str().as_bytes().to_vec(),
|
||||
abs_path: initial_snapshot
|
||||
.abs_path()
|
||||
.as_os_str()
|
||||
.to_string_lossy()
|
||||
.into(),
|
||||
visible: true,
|
||||
},
|
||||
rpc.clone(),
|
||||
|
|
|
@ -41,7 +41,6 @@ use std::{
|
|||
future::Future,
|
||||
mem,
|
||||
ops::{Deref, DerefMut},
|
||||
os::unix::prelude::{OsStrExt, OsStringExt},
|
||||
path::{Path, PathBuf},
|
||||
sync::{atomic::AtomicUsize, Arc},
|
||||
task::Poll,
|
||||
|
@ -83,6 +82,7 @@ pub struct RemoteWorktree {
|
|||
replica_id: ReplicaId,
|
||||
diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
|
||||
visible: bool,
|
||||
disconnected: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -168,7 +168,7 @@ enum ScanState {
|
|||
struct ShareState {
|
||||
project_id: u64,
|
||||
snapshots_tx: watch::Sender<LocalSnapshot>,
|
||||
_maintain_remote_snapshot: Option<Task<Option<()>>>,
|
||||
_maintain_remote_snapshot: Task<Option<()>>,
|
||||
}
|
||||
|
||||
pub enum Event {
|
||||
|
@ -222,7 +222,7 @@ impl Worktree {
|
|||
let root_name = worktree.root_name.clone();
|
||||
let visible = worktree.visible;
|
||||
|
||||
let abs_path = PathBuf::from(OsString::from_vec(worktree.abs_path));
|
||||
let abs_path = PathBuf::from(worktree.abs_path);
|
||||
let snapshot = Snapshot {
|
||||
id: WorktreeId(remote_id as usize),
|
||||
abs_path: Arc::from(abs_path.deref()),
|
||||
|
@ -248,6 +248,7 @@ impl Worktree {
|
|||
client: client.clone(),
|
||||
diagnostic_summaries: Default::default(),
|
||||
visible,
|
||||
disconnected: false,
|
||||
})
|
||||
});
|
||||
|
||||
|
@ -660,7 +661,7 @@ impl LocalWorktree {
|
|||
id: self.id().to_proto(),
|
||||
root_name: self.root_name().to_string(),
|
||||
visible: self.visible,
|
||||
abs_path: self.abs_path().as_os_str().as_bytes().to_vec(),
|
||||
abs_path: self.abs_path().as_os_str().to_string_lossy().into(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -972,11 +973,10 @@ impl LocalWorktree {
|
|||
let _ = share_tx.send(Ok(()));
|
||||
} else {
|
||||
let (snapshots_tx, mut snapshots_rx) = watch::channel_with(self.snapshot());
|
||||
let rpc = self.client.clone();
|
||||
let worktree_id = cx.model_id() as u64;
|
||||
|
||||
for (path, summary) in self.diagnostic_summaries.iter() {
|
||||
if let Err(e) = rpc.send(proto::UpdateDiagnosticSummary {
|
||||
if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
|
||||
project_id,
|
||||
worktree_id,
|
||||
summary: Some(summary.to_proto(&path.0)),
|
||||
|
@ -986,15 +986,14 @@ impl LocalWorktree {
|
|||
}
|
||||
|
||||
let maintain_remote_snapshot = cx.background().spawn({
|
||||
let rpc = rpc;
|
||||
|
||||
let rpc = self.client.clone();
|
||||
async move {
|
||||
let mut prev_snapshot = match snapshots_rx.recv().await {
|
||||
Some(snapshot) => {
|
||||
let update = proto::UpdateWorktree {
|
||||
project_id,
|
||||
worktree_id,
|
||||
abs_path: snapshot.abs_path().as_os_str().as_bytes().to_vec(),
|
||||
abs_path: snapshot.abs_path().to_string_lossy().into(),
|
||||
root_name: snapshot.root_name().to_string(),
|
||||
updated_entries: snapshot
|
||||
.entries_by_path
|
||||
|
@ -1034,10 +1033,11 @@ impl LocalWorktree {
|
|||
}
|
||||
.log_err()
|
||||
});
|
||||
|
||||
self.share = Some(ShareState {
|
||||
project_id,
|
||||
snapshots_tx,
|
||||
_maintain_remote_snapshot: Some(maintain_remote_snapshot),
|
||||
_maintain_remote_snapshot: maintain_remote_snapshot,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1055,25 +1055,6 @@ impl LocalWorktree {
|
|||
pub fn is_shared(&self) -> bool {
|
||||
self.share.is_some()
|
||||
}
|
||||
|
||||
pub fn send_extension_counts(&self, project_id: u64) {
|
||||
let mut extensions = Vec::new();
|
||||
let mut counts = Vec::new();
|
||||
|
||||
for (extension, count) in self.extension_counts() {
|
||||
extensions.push(extension.to_string_lossy().to_string());
|
||||
counts.push(*count as u32);
|
||||
}
|
||||
|
||||
self.client
|
||||
.send(proto::UpdateWorktreeExtensions {
|
||||
project_id,
|
||||
worktree_id: self.id().to_proto(),
|
||||
extensions,
|
||||
counts,
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
|
||||
impl RemoteWorktree {
|
||||
|
@ -1090,6 +1071,7 @@ impl RemoteWorktree {
|
|||
pub fn disconnected_from_host(&mut self) {
|
||||
self.updates_tx.take();
|
||||
self.snapshot_subscriptions.clear();
|
||||
self.disconnected = true;
|
||||
}
|
||||
|
||||
pub fn update_from_remote(&mut self, update: proto::UpdateWorktree) {
|
||||
|
@ -1104,10 +1086,12 @@ impl RemoteWorktree {
|
|||
self.scan_id > scan_id || (self.scan_id == scan_id && self.is_complete)
|
||||
}
|
||||
|
||||
fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = ()> {
|
||||
fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = Result<()>> {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
if self.observed_snapshot(scan_id) {
|
||||
let _ = tx.send(());
|
||||
} else if self.disconnected {
|
||||
drop(tx);
|
||||
} else {
|
||||
match self
|
||||
.snapshot_subscriptions
|
||||
|
@ -1118,7 +1102,8 @@ impl RemoteWorktree {
|
|||
}
|
||||
|
||||
async move {
|
||||
let _ = rx.await;
|
||||
rx.await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1147,7 +1132,7 @@ impl RemoteWorktree {
|
|||
) -> Task<Result<Entry>> {
|
||||
let wait_for_snapshot = self.wait_for_snapshot(scan_id);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
wait_for_snapshot.await;
|
||||
wait_for_snapshot.await?;
|
||||
this.update(&mut cx, |worktree, _| {
|
||||
let worktree = worktree.as_remote_mut().unwrap();
|
||||
let mut snapshot = worktree.background_snapshot.lock();
|
||||
|
@ -1166,7 +1151,7 @@ impl RemoteWorktree {
|
|||
) -> Task<Result<()>> {
|
||||
let wait_for_snapshot = self.wait_for_snapshot(scan_id);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
wait_for_snapshot.await;
|
||||
wait_for_snapshot.await?;
|
||||
this.update(&mut cx, |worktree, _| {
|
||||
let worktree = worktree.as_remote_mut().unwrap();
|
||||
let mut snapshot = worktree.background_snapshot.lock();
|
||||
|
@ -1404,7 +1389,7 @@ impl LocalSnapshot {
|
|||
proto::UpdateWorktree {
|
||||
project_id,
|
||||
worktree_id: self.id().to_proto(),
|
||||
abs_path: self.abs_path().as_os_str().as_bytes().to_vec(),
|
||||
abs_path: self.abs_path().to_string_lossy().into(),
|
||||
root_name,
|
||||
updated_entries: self.entries_by_path.iter().map(Into::into).collect(),
|
||||
removed_entries: Default::default(),
|
||||
|
@ -1472,7 +1457,7 @@ impl LocalSnapshot {
|
|||
proto::UpdateWorktree {
|
||||
project_id,
|
||||
worktree_id,
|
||||
abs_path: self.abs_path().as_os_str().as_bytes().to_vec(),
|
||||
abs_path: self.abs_path().to_string_lossy().into(),
|
||||
root_name: self.root_name().to_string(),
|
||||
updated_entries,
|
||||
removed_entries,
|
||||
|
@ -2951,7 +2936,7 @@ impl<'a> From<&'a Entry> for proto::Entry {
|
|||
Self {
|
||||
id: entry.id.to_proto(),
|
||||
is_dir: entry.is_dir(),
|
||||
path: entry.path.as_os_str().as_bytes().to_vec(),
|
||||
path: entry.path.to_string_lossy().into(),
|
||||
inode: entry.inode,
|
||||
mtime: Some(entry.mtime.into()),
|
||||
is_symlink: entry.is_symlink,
|
||||
|
@ -2969,14 +2954,10 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
|
|||
EntryKind::Dir
|
||||
} else {
|
||||
let mut char_bag = *root_char_bag;
|
||||
char_bag.extend(
|
||||
String::from_utf8_lossy(&entry.path)
|
||||
.chars()
|
||||
.map(|c| c.to_ascii_lowercase()),
|
||||
);
|
||||
char_bag.extend(entry.path.chars().map(|c| c.to_ascii_lowercase()));
|
||||
EntryKind::File(char_bag)
|
||||
};
|
||||
let path: Arc<Path> = PathBuf::from(OsString::from_vec(entry.path)).into();
|
||||
let path: Arc<Path> = PathBuf::from(entry.path).into();
|
||||
Ok(Entry {
|
||||
id: ProjectEntryId::from_proto(entry.id),
|
||||
kind,
|
||||
|
|
|
@ -48,9 +48,7 @@ message Envelope {
|
|||
OpenBufferForSymbolResponse open_buffer_for_symbol_response = 40;
|
||||
|
||||
UpdateProject update_project = 41;
|
||||
RegisterProjectActivity register_project_activity = 42;
|
||||
UpdateWorktree update_worktree = 43;
|
||||
UpdateWorktreeExtensions update_worktree_extensions = 44;
|
||||
|
||||
CreateProjectEntry create_project_entry = 45;
|
||||
RenameProjectEntry rename_project_entry = 46;
|
||||
|
@ -158,14 +156,12 @@ message JoinRoomResponse {
|
|||
optional LiveKitConnectionInfo live_kit_connection_info = 2;
|
||||
}
|
||||
|
||||
message LeaveRoom {
|
||||
uint64 id = 1;
|
||||
}
|
||||
message LeaveRoom {}
|
||||
|
||||
message Room {
|
||||
uint64 id = 1;
|
||||
repeated Participant participants = 2;
|
||||
repeated uint64 pending_participant_user_ids = 3;
|
||||
repeated PendingParticipant pending_participants = 3;
|
||||
string live_kit_room = 4;
|
||||
}
|
||||
|
||||
|
@ -176,6 +172,12 @@ message Participant {
|
|||
ParticipantLocation location = 4;
|
||||
}
|
||||
|
||||
message PendingParticipant {
|
||||
uint64 user_id = 1;
|
||||
uint64 calling_user_id = 2;
|
||||
optional uint64 initial_project_id = 3;
|
||||
}
|
||||
|
||||
message ParticipantProject {
|
||||
uint64 id = 1;
|
||||
repeated string worktree_root_names = 2;
|
||||
|
@ -199,13 +201,13 @@ message ParticipantLocation {
|
|||
|
||||
message Call {
|
||||
uint64 room_id = 1;
|
||||
uint64 recipient_user_id = 2;
|
||||
uint64 called_user_id = 2;
|
||||
optional uint64 initial_project_id = 3;
|
||||
}
|
||||
|
||||
message IncomingCall {
|
||||
uint64 room_id = 1;
|
||||
uint64 caller_user_id = 2;
|
||||
uint64 calling_user_id = 2;
|
||||
repeated uint64 participant_user_ids = 3;
|
||||
optional ParticipantProject initial_project = 4;
|
||||
}
|
||||
|
@ -214,7 +216,7 @@ message CallCanceled {}
|
|||
|
||||
message CancelCall {
|
||||
uint64 room_id = 1;
|
||||
uint64 recipient_user_id = 2;
|
||||
uint64 called_user_id = 2;
|
||||
}
|
||||
|
||||
message DeclineCall {
|
||||
|
@ -253,10 +255,6 @@ message UpdateProject {
|
|||
repeated WorktreeMetadata worktrees = 2;
|
||||
}
|
||||
|
||||
message RegisterProjectActivity {
|
||||
uint64 project_id = 1;
|
||||
}
|
||||
|
||||
message JoinProject {
|
||||
uint64 project_id = 1;
|
||||
}
|
||||
|
@ -280,33 +278,26 @@ message UpdateWorktree {
|
|||
repeated uint64 removed_entries = 5;
|
||||
uint64 scan_id = 6;
|
||||
bool is_last_update = 7;
|
||||
bytes abs_path = 8;
|
||||
}
|
||||
|
||||
message UpdateWorktreeExtensions {
|
||||
uint64 project_id = 1;
|
||||
uint64 worktree_id = 2;
|
||||
repeated string extensions = 3;
|
||||
repeated uint32 counts = 4;
|
||||
string abs_path = 8;
|
||||
}
|
||||
|
||||
message CreateProjectEntry {
|
||||
uint64 project_id = 1;
|
||||
uint64 worktree_id = 2;
|
||||
bytes path = 3;
|
||||
string path = 3;
|
||||
bool is_directory = 4;
|
||||
}
|
||||
|
||||
message RenameProjectEntry {
|
||||
uint64 project_id = 1;
|
||||
uint64 entry_id = 2;
|
||||
bytes new_path = 3;
|
||||
string new_path = 3;
|
||||
}
|
||||
|
||||
message CopyProjectEntry {
|
||||
uint64 project_id = 1;
|
||||
uint64 entry_id = 2;
|
||||
bytes new_path = 3;
|
||||
string new_path = 3;
|
||||
}
|
||||
|
||||
message DeleteProjectEntry {
|
||||
|
@ -894,7 +885,7 @@ message File {
|
|||
message Entry {
|
||||
uint64 id = 1;
|
||||
bool is_dir = 2;
|
||||
bytes path = 3;
|
||||
string path = 3;
|
||||
uint64 inode = 4;
|
||||
Timestamp mtime = 5;
|
||||
bool is_symlink = 6;
|
||||
|
@ -1078,7 +1069,7 @@ message WorktreeMetadata {
|
|||
uint64 id = 1;
|
||||
string root_name = 2;
|
||||
bool visible = 3;
|
||||
bytes abs_path = 4;
|
||||
string abs_path = 4;
|
||||
}
|
||||
|
||||
message UpdateDiffBase {
|
||||
|
|
|
@ -24,7 +24,7 @@ use std::{
|
|||
};
|
||||
use tracing::instrument;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
|
||||
#[derive(Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
|
||||
pub struct ConnectionId(pub u32);
|
||||
|
||||
impl fmt::Display for ConnectionId {
|
||||
|
|
|
@ -140,12 +140,11 @@ messages!(
|
|||
(OpenBufferResponse, Background),
|
||||
(PerformRename, Background),
|
||||
(PerformRenameResponse, Background),
|
||||
(Ping, Foreground),
|
||||
(PrepareRename, Background),
|
||||
(PrepareRenameResponse, Background),
|
||||
(ProjectEntryResponse, Foreground),
|
||||
(RemoveContact, Foreground),
|
||||
(Ping, Foreground),
|
||||
(RegisterProjectActivity, Foreground),
|
||||
(ReloadBuffers, Foreground),
|
||||
(ReloadBuffersResponse, Foreground),
|
||||
(RemoveProjectCollaborator, Foreground),
|
||||
|
@ -175,7 +174,6 @@ messages!(
|
|||
(UpdateParticipantLocation, Foreground),
|
||||
(UpdateProject, Foreground),
|
||||
(UpdateWorktree, Foreground),
|
||||
(UpdateWorktreeExtensions, Background),
|
||||
(UpdateDiffBase, Background),
|
||||
(GetPrivateUserInfo, Foreground),
|
||||
(GetPrivateUserInfoResponse, Foreground),
|
||||
|
@ -231,6 +229,7 @@ request_messages!(
|
|||
(Test, Test),
|
||||
(UpdateBuffer, Ack),
|
||||
(UpdateParticipantLocation, Ack),
|
||||
(UpdateProject, Ack),
|
||||
(UpdateWorktree, Ack),
|
||||
);
|
||||
|
||||
|
@ -262,7 +261,6 @@ entity_messages!(
|
|||
OpenBufferForSymbol,
|
||||
PerformRename,
|
||||
PrepareRename,
|
||||
RegisterProjectActivity,
|
||||
ReloadBuffers,
|
||||
RemoveProjectCollaborator,
|
||||
RenameProjectEntry,
|
||||
|
@ -278,7 +276,6 @@ entity_messages!(
|
|||
UpdateLanguageServer,
|
||||
UpdateProject,
|
||||
UpdateWorktree,
|
||||
UpdateWorktreeExtensions,
|
||||
UpdateDiffBase
|
||||
);
|
||||
|
||||
|
|
|
@ -6,4 +6,4 @@ pub use conn::Connection;
|
|||
pub use peer::*;
|
||||
mod macros;
|
||||
|
||||
pub const PROTOCOL_VERSION: u32 = 39;
|
||||
pub const PROTOCOL_VERSION: u32 = 40;
|
||||
|
|
|
@ -8,7 +8,7 @@ edition = "2021"
|
|||
[dependencies]
|
||||
anyhow = { version = "1.0.38", features = ["backtrace"] }
|
||||
indoc = "1.0.7"
|
||||
libsqlite3-sys = { version = "0.25.2", features = ["bundled"] }
|
||||
libsqlite3-sys = { version = "0.24", features = ["bundled"] }
|
||||
smol = "1.2"
|
||||
thread_local = "1.1.4"
|
||||
lazy_static = "1.4"
|
||||
|
|
|
@ -53,7 +53,7 @@ pub use persistence::{
|
|||
WorkspaceDb,
|
||||
};
|
||||
use postage::prelude::Stream;
|
||||
use project::{Project, ProjectEntryId, ProjectPath, ProjectStore, Worktree, WorktreeId};
|
||||
use project::{Project, ProjectEntryId, ProjectPath, Worktree, WorktreeId};
|
||||
use serde::Deserialize;
|
||||
use settings::{Autosave, DockAnchor, Settings};
|
||||
use shared_screen::SharedScreen;
|
||||
|
@ -372,7 +372,6 @@ pub struct AppState {
|
|||
pub themes: Arc<ThemeRegistry>,
|
||||
pub client: Arc<client::Client>,
|
||||
pub user_store: ModelHandle<client::UserStore>,
|
||||
pub project_store: ModelHandle<ProjectStore>,
|
||||
pub fs: Arc<dyn fs::Fs>,
|
||||
pub build_window_options: fn() -> WindowOptions<'static>,
|
||||
pub initialize_workspace: fn(&mut Workspace, &Arc<AppState>, &mut ViewContext<Workspace>),
|
||||
|
@ -392,7 +391,6 @@ impl AppState {
|
|||
let languages = Arc::new(LanguageRegistry::test());
|
||||
let http_client = client::test::FakeHttpClient::with_404_response();
|
||||
let client = Client::new(http_client.clone(), cx);
|
||||
let project_store = cx.add_model(|_| ProjectStore::new());
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
|
||||
let themes = ThemeRegistry::new((), cx.font_cache().clone());
|
||||
Arc::new(Self {
|
||||
|
@ -401,7 +399,6 @@ impl AppState {
|
|||
fs,
|
||||
languages,
|
||||
user_store,
|
||||
project_store,
|
||||
initialize_workspace: |_, _, _| {},
|
||||
build_window_options: Default::default,
|
||||
default_item_factory: |_, _| unimplemented!(),
|
||||
|
@ -663,7 +660,6 @@ impl Workspace {
|
|||
let project_handle = Project::local(
|
||||
app_state.client.clone(),
|
||||
app_state.user_store.clone(),
|
||||
app_state.project_store.clone(),
|
||||
app_state.languages.clone(),
|
||||
app_state.fs.clone(),
|
||||
cx,
|
||||
|
@ -1035,8 +1031,10 @@ impl Workspace {
|
|||
RemoveWorktreeFromProject(worktree_id): &RemoveWorktreeFromProject,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.project
|
||||
let future = self
|
||||
.project
|
||||
.update(cx, |project, cx| project.remove_worktree(*worktree_id, cx));
|
||||
cx.foreground().spawn(future).detach();
|
||||
}
|
||||
|
||||
fn project_path_for_path(
|
||||
|
@ -2866,9 +2864,9 @@ mod tests {
|
|||
);
|
||||
|
||||
// Remove a project folder
|
||||
project.update(cx, |project, cx| {
|
||||
project.remove_worktree(worktree_id, cx);
|
||||
});
|
||||
project
|
||||
.update(cx, |project, cx| project.remove_worktree(worktree_id, cx))
|
||||
.await;
|
||||
assert_eq!(
|
||||
cx.current_window_title(window_id).as_deref(),
|
||||
Some("one.txt — root2")
|
||||
|
|
|
@ -23,7 +23,7 @@ use isahc::{config::Configurable, Request};
|
|||
use language::LanguageRegistry;
|
||||
use log::LevelFilter;
|
||||
use parking_lot::Mutex;
|
||||
use project::{Fs, HomeDir, ProjectStore};
|
||||
use project::{Fs, HomeDir};
|
||||
use serde_json::json;
|
||||
use settings::{
|
||||
self, settings_file::SettingsFile, KeymapFileContent, Settings, SettingsFileContent,
|
||||
|
@ -139,8 +139,6 @@ fn main() {
|
|||
})
|
||||
.detach();
|
||||
|
||||
let project_store = cx.add_model(|_| ProjectStore::new());
|
||||
|
||||
client.start_telemetry();
|
||||
client.report_event("start app", Default::default());
|
||||
|
||||
|
@ -149,7 +147,6 @@ fn main() {
|
|||
themes,
|
||||
client: client.clone(),
|
||||
user_store,
|
||||
project_store,
|
||||
fs,
|
||||
build_window_options,
|
||||
initialize_workspace,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue