Start work on using sqlite in tests

This commit is contained in:
Max Brunsfeld 2022-11-09 19:15:05 -08:00
parent d14dd27cdc
commit 7e02ac772a
7 changed files with 250 additions and 96 deletions

37
Cargo.lock generated
View file

@ -1953,6 +1953,18 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7bad48618fdb549078c333a7a8528acb57af271d0433bdecd523eb620628364e" checksum = "7bad48618fdb549078c333a7a8528acb57af271d0433bdecd523eb620628364e"
[[package]]
name = "flume"
version = "0.10.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1657b4441c3403d9f7b3409e47575237dac27b1b5726df654a6ecbf92f0f7577"
dependencies = [
"futures-core",
"futures-sink",
"pin-project",
"spin 0.9.4",
]
[[package]] [[package]]
name = "fnv" name = "fnv"
version = "1.0.7" version = "1.0.7"
@ -3022,7 +3034,7 @@ version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
dependencies = [ dependencies = [
"spin", "spin 0.5.2",
] ]
[[package]] [[package]]
@ -4725,7 +4737,7 @@ dependencies = [
"cc", "cc",
"libc", "libc",
"once_cell", "once_cell",
"spin", "spin 0.5.2",
"untrusted", "untrusted",
"web-sys", "web-sys",
"winapi 0.3.9", "winapi 0.3.9",
@ -5563,6 +5575,15 @@ version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
[[package]]
name = "spin"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f6002a767bff9e83f8eeecf883ecb8011875a21ae8da43bffb817a57e78cc09"
dependencies = [
"lock_api",
]
[[package]] [[package]]
name = "spsc-buffer" name = "spsc-buffer"
version = "0.1.1" version = "0.1.1"
@ -5583,8 +5604,6 @@ dependencies = [
[[package]] [[package]]
name = "sqlx" name = "sqlx"
version = "0.6.2" version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9249290c05928352f71c077cc44a464d880c63f26f7534728cca008e135c0428"
dependencies = [ dependencies = [
"sqlx-core", "sqlx-core",
"sqlx-macros", "sqlx-macros",
@ -5593,8 +5612,6 @@ dependencies = [
[[package]] [[package]]
name = "sqlx-core" name = "sqlx-core"
version = "0.6.2" version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcbc16ddba161afc99e14d1713a453747a2b07fc097d2009f4c300ec99286105"
dependencies = [ dependencies = [
"ahash", "ahash",
"atoi", "atoi",
@ -5608,8 +5625,10 @@ dependencies = [
"dotenvy", "dotenvy",
"either", "either",
"event-listener", "event-listener",
"flume",
"futures-channel", "futures-channel",
"futures-core", "futures-core",
"futures-executor",
"futures-intrusive", "futures-intrusive",
"futures-util", "futures-util",
"hashlink", "hashlink",
@ -5619,6 +5638,7 @@ dependencies = [
"indexmap", "indexmap",
"itoa", "itoa",
"libc", "libc",
"libsqlite3-sys",
"log", "log",
"md-5", "md-5",
"memchr", "memchr",
@ -5648,8 +5668,6 @@ dependencies = [
[[package]] [[package]]
name = "sqlx-macros" name = "sqlx-macros"
version = "0.6.2" version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b850fa514dc11f2ee85be9d055c512aa866746adfacd1cb42d867d68e6a5b0d9"
dependencies = [ dependencies = [
"dotenvy", "dotenvy",
"either", "either",
@ -5657,6 +5675,7 @@ dependencies = [
"once_cell", "once_cell",
"proc-macro2", "proc-macro2",
"quote", "quote",
"serde_json",
"sha2 0.10.6", "sha2 0.10.6",
"sqlx-core", "sqlx-core",
"sqlx-rt", "sqlx-rt",
@ -5667,8 +5686,6 @@ dependencies = [
[[package]] [[package]]
name = "sqlx-rt" name = "sqlx-rt"
version = "0.6.2" version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24c5b2d25fa654cc5f841750b8e1cdedbe21189bf9a9382ee90bfa9dd3562396"
dependencies = [ dependencies = [
"once_cell", "once_cell",
"tokio", "tokio",

View file

@ -50,8 +50,9 @@ tracing-log = "0.1.3"
tracing-subscriber = { version = "0.3.11", features = ["env-filter", "json"] } tracing-subscriber = { version = "0.3.11", features = ["env-filter", "json"] }
[dependencies.sqlx] [dependencies.sqlx]
version = "0.6" # version = "0.6"
features = ["runtime-tokio-rustls", "postgres", "time", "uuid"] path = "../../../sqlx"
features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid"]
[dev-dependencies] [dev-dependencies]
collections = { path = "../collections", features = ["test-support"] } collections = { path = "../collections", features = ["test-support"] }
@ -78,5 +79,10 @@ lazy_static = "1.4"
serde_json = { version = "1.0", features = ["preserve_order"] } serde_json = { version = "1.0", features = ["preserve_order"] }
unindent = "0.1" unindent = "0.1"
[dev-dependencies.sqlx]
# version = "0.6"
path = "../../../sqlx"
features = ["sqlite"]
[features] [features]
seed-support = ["clap", "lipsum", "reqwest"] seed-support = ["clap", "lipsum", "reqwest"]

View file

@ -0,0 +1,127 @@
CREATE TABLE IF NOT EXISTS "sessions" (
"id" VARCHAR NOT NULL PRIMARY KEY,
"expires" TIMESTAMP WITH TIME ZONE NULL,
"session" TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS "users" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"github_login" VARCHAR,
"admin" BOOLEAN,
email_address VARCHAR(255) DEFAULT NULL,
invite_code VARCHAR(64),
invite_count INTEGER NOT NULL DEFAULT 0,
inviter_id INTEGER REFERENCES users (id),
connected_once BOOLEAN NOT NULL DEFAULT false,
created_at TIMESTAMP NOT NULL DEFAULT now,
"github_user_id" INTEGER
);
CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login");
CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code");
CREATE INDEX "index_users_on_email_address" ON "users" ("email_address");
CREATE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id");
CREATE TABLE IF NOT EXISTS "access_tokens" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"user_id" INTEGER REFERENCES users (id),
"hash" VARCHAR(128)
);
CREATE INDEX "index_access_tokens_user_id" ON "access_tokens" ("user_id");
CREATE TABLE IF NOT EXISTS "orgs" (
"id" SERIAL PRIMARY KEY,
"name" VARCHAR NOT NULL,
"slug" VARCHAR NOT NULL
);
CREATE UNIQUE INDEX "index_orgs_slug" ON "orgs" ("slug");
CREATE TABLE IF NOT EXISTS "org_memberships" (
"id" SERIAL PRIMARY KEY,
"org_id" INTEGER REFERENCES orgs (id) NOT NULL,
"user_id" INTEGER REFERENCES users (id) NOT NULL,
"admin" BOOLEAN NOT NULL
);
CREATE INDEX "index_org_memberships_user_id" ON "org_memberships" ("user_id");
CREATE UNIQUE INDEX "index_org_memberships_org_id_and_user_id" ON "org_memberships" ("org_id", "user_id");
CREATE TABLE IF NOT EXISTS "channels" (
"id" SERIAL PRIMARY KEY,
"owner_id" INTEGER NOT NULL,
"owner_is_user" BOOLEAN NOT NULL,
"name" VARCHAR NOT NULL
);
CREATE UNIQUE INDEX "index_channels_owner_and_name" ON "channels" ("owner_is_user", "owner_id", "name");
CREATE TABLE IF NOT EXISTS "channel_memberships" (
"id" SERIAL PRIMARY KEY,
"channel_id" INTEGER REFERENCES channels (id) NOT NULL,
"user_id" INTEGER REFERENCES users (id) NOT NULL,
"admin" BOOLEAN NOT NULL
);
CREATE INDEX "index_channel_memberships_user_id" ON "channel_memberships" ("user_id");
CREATE UNIQUE INDEX "index_channel_memberships_channel_id_and_user_id" ON "channel_memberships" ("channel_id", "user_id");
CREATE TABLE IF NOT EXISTS "channel_messages" (
"id" SERIAL PRIMARY KEY,
"channel_id" INTEGER REFERENCES channels (id) NOT NULL,
"sender_id" INTEGER REFERENCES users (id) NOT NULL,
"body" TEXT NOT NULL,
"sent_at" TIMESTAMP
);
CREATE INDEX "index_channel_messages_channel_id" ON "channel_messages" ("channel_id");
CREATE TABLE IF NOT EXISTS "contacts" (
"id" SERIAL PRIMARY KEY,
"user_id_a" INTEGER REFERENCES users (id) NOT NULL,
"user_id_b" INTEGER REFERENCES users (id) NOT NULL,
"a_to_b" BOOLEAN NOT NULL,
"should_notify" BOOLEAN NOT NULL,
"accepted" BOOLEAN NOT NULL
);
CREATE UNIQUE INDEX "index_contacts_user_ids" ON "contacts" ("user_id_a", "user_id_b");
CREATE INDEX "index_contacts_user_id_b" ON "contacts" ("user_id_b");
CREATE TABLE IF NOT EXISTS "projects" (
"id" SERIAL PRIMARY KEY,
"host_user_id" INTEGER REFERENCES users (id) NOT NULL,
"unregistered" BOOLEAN NOT NULL DEFAULT false
);
CREATE TABLE IF NOT EXISTS "worktree_extensions" (
"id" SERIAL PRIMARY KEY,
"project_id" INTEGER REFERENCES projects (id) NOT NULL,
"worktree_id" INTEGER NOT NULL,
"extension" VARCHAR(255),
"count" INTEGER NOT NULL
);
CREATE UNIQUE INDEX "index_worktree_extensions_on_project_id_and_worktree_id_and_extension" ON "worktree_extensions" ("project_id", "worktree_id", "extension");
CREATE TABLE IF NOT EXISTS "project_activity_periods" (
"id" SERIAL PRIMARY KEY,
"duration_millis" INTEGER NOT NULL,
"ended_at" TIMESTAMP NOT NULL,
"user_id" INTEGER REFERENCES users (id) NOT NULL,
"project_id" INTEGER REFERENCES projects (id) NOT NULL
);
CREATE INDEX "index_project_activity_periods_on_ended_at" ON "project_activity_periods" ("ended_at");
CREATE TABLE IF NOT EXISTS "signups" (
"id" SERIAL PRIMARY KEY,
"email_address" VARCHAR NOT NULL,
"email_confirmation_code" VARCHAR(64) NOT NULL,
"email_confirmation_sent" BOOLEAN NOT NULL,
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"device_id" VARCHAR,
"user_id" INTEGER REFERENCES users (id) ON DELETE CASCADE,
"inviting_user_id" INTEGER REFERENCES users (id) ON DELETE SET NULL,
"platform_mac" BOOLEAN NOT NULL,
"platform_linux" BOOLEAN NOT NULL,
"platform_windows" BOOLEAN NOT NULL,
"platform_unknown" BOOLEAN NOT NULL,
"editor_features" VARCHAR[],
"programming_languages" VARCHAR[]
);
CREATE UNIQUE INDEX "index_signups_on_email_address" ON "signups" ("email_address");
CREATE INDEX "index_signups_on_email_confirmation_sent" ON "signups" ("email_confirmation_sent");

View file

@ -5,7 +5,6 @@ use axum::http::StatusCode;
use collections::HashMap; use collections::HashMap;
use futures::StreamExt; use futures::StreamExt;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
pub use sqlx::postgres::PgPoolOptions as DbOptions;
use sqlx::{ use sqlx::{
migrate::{Migrate as _, Migration, MigrationSource}, migrate::{Migrate as _, Migration, MigrationSource},
types::Uuid, types::Uuid,
@ -181,11 +180,14 @@ pub trait Db: Send + Sync {
pub const DEFAULT_MIGRATIONS_PATH: Option<&'static str> = pub const DEFAULT_MIGRATIONS_PATH: Option<&'static str> =
Some(concat!(env!("CARGO_MANIFEST_DIR"), "/migrations")); Some(concat!(env!("CARGO_MANIFEST_DIR"), "/migrations"));
pub const TEST_MIGRATIONS_PATH: Option<&'static str> =
Some(concat!(env!("CARGO_MANIFEST_DIR"), "/migrations.sqlite"));
#[cfg(not(any(test, debug_assertions)))] #[cfg(not(any(test, debug_assertions)))]
pub const DEFAULT_MIGRATIONS_PATH: Option<&'static str> = None; pub const DEFAULT_MIGRATIONS_PATH: Option<&'static str> = None;
pub struct PostgresDb { pub struct RealDb {
pool: sqlx::PgPool, pool: sqlx::SqlitePool,
} }
macro_rules! test_support { macro_rules! test_support {
@ -202,13 +204,13 @@ macro_rules! test_support {
}}; }};
} }
impl PostgresDb { impl RealDb {
pub async fn new(url: &str, max_connections: u32) -> Result<Self> { pub async fn new(url: &str, max_connections: u32) -> Result<Self> {
let pool = DbOptions::new() eprintln!("{url}");
.max_connections(max_connections) let pool = sqlx::sqlite::SqlitePoolOptions::new()
.max_connections(1)
.connect(url) .connect(url)
.await .await?;
.context("failed to connect to postgres database")?;
Ok(Self { pool }) Ok(Self { pool })
} }
@ -267,7 +269,7 @@ impl PostgresDb {
} }
#[async_trait] #[async_trait]
impl Db for PostgresDb { impl Db for RealDb {
// users // users
async fn create_user( async fn create_user(
@ -280,8 +282,8 @@ impl Db for PostgresDb {
let query = " let query = "
INSERT INTO users (email_address, github_login, github_user_id, admin) INSERT INTO users (email_address, github_login, github_user_id, admin)
VALUES ($1, $2, $3, $4) VALUES ($1, $2, $3, $4)
ON CONFLICT (github_login) DO UPDATE SET github_login = excluded.github_login -- ON CONFLICT (github_login) DO UPDATE SET github_login = excluded.github_login
RETURNING id, metrics_id::text RETURNING id, 'the-metrics-id'
"; ";
let (user_id, metrics_id): (UserId, String) = sqlx::query_as(query) let (user_id, metrics_id): (UserId, String) = sqlx::query_as(query)
@ -331,8 +333,18 @@ impl Db for PostgresDb {
} }
async fn get_user_by_id(&self, id: UserId) -> Result<Option<User>> { async fn get_user_by_id(&self, id: UserId) -> Result<Option<User>> {
let users = self.get_users_by_ids(vec![id]).await?; test_support!(self, {
Ok(users.into_iter().next()) let query = "
SELECT users.*
FROM users
WHERE id = $1
LIMIT 1
";
Ok(sqlx::query_as(query)
.bind(&id)
.fetch_optional(&self.pool)
.await?)
})
} }
async fn get_user_metrics_id(&self, id: UserId) -> Result<String> { async fn get_user_metrics_id(&self, id: UserId) -> Result<String> {
@ -351,14 +363,13 @@ impl Db for PostgresDb {
async fn get_users_by_ids(&self, ids: Vec<UserId>) -> Result<Vec<User>> { async fn get_users_by_ids(&self, ids: Vec<UserId>) -> Result<Vec<User>> {
test_support!(self, { test_support!(self, {
let ids = ids.into_iter().map(|id| id.0).collect::<Vec<_>>();
let query = " let query = "
SELECT users.* SELECT users.*
FROM users FROM users
WHERE users.id = ANY ($1) WHERE users.id IN (SELECT value from json_each($1))
"; ";
Ok(sqlx::query_as(query) Ok(sqlx::query_as(query)
.bind(&ids) .bind(&serde_json::json!(ids))
.fetch_all(&self.pool) .fetch_all(&self.pool)
.await?) .await?)
}) })
@ -493,7 +504,7 @@ impl Db for PostgresDb {
device_id device_id
) )
VALUES VALUES
($1, $2, 'f', $3, $4, $5, 'f', $6, $7, $8) ($1, $2, FALSE, $3, $4, $5, FALSE, $6)
RETURNING id RETURNING id
", ",
) )
@ -502,8 +513,8 @@ impl Db for PostgresDb {
.bind(&signup.platform_linux) .bind(&signup.platform_linux)
.bind(&signup.platform_mac) .bind(&signup.platform_mac)
.bind(&signup.platform_windows) .bind(&signup.platform_windows)
.bind(&signup.editor_features) // .bind(&signup.editor_features)
.bind(&signup.programming_languages) // .bind(&signup.programming_languages)
.bind(&signup.device_id) .bind(&signup.device_id)
.execute(&self.pool) .execute(&self.pool)
.await?; .await?;
@ -555,21 +566,21 @@ impl Db for PostgresDb {
async fn record_sent_invites(&self, invites: &[Invite]) -> Result<()> { async fn record_sent_invites(&self, invites: &[Invite]) -> Result<()> {
test_support!(self, { test_support!(self, {
sqlx::query( // sqlx::query(
" // "
UPDATE signups // UPDATE signups
SET email_confirmation_sent = 't' // SET email_confirmation_sent = TRUE
WHERE email_address = ANY ($1) // WHERE email_address = ANY ($1)
", // ",
) // )
.bind( // .bind(
&invites // &invites
.iter() // .iter()
.map(|s| s.email_address.as_str()) // .map(|s| s.email_address.as_str())
.collect::<Vec<_>>(), // .collect::<Vec<_>>(),
) // )
.execute(&self.pool) // .execute(&self.pool)
.await?; // .await?;
Ok(()) Ok(())
}) })
} }
@ -611,7 +622,7 @@ impl Db for PostgresDb {
INSERT INTO users INSERT INTO users
(email_address, github_login, github_user_id, admin, invite_count, invite_code) (email_address, github_login, github_user_id, admin, invite_count, invite_code)
VALUES VALUES
($1, $2, $3, 'f', $4, $5) ($1, $2, $3, FALSE, $4, $5)
ON CONFLICT (github_login) DO UPDATE SET ON CONFLICT (github_login) DO UPDATE SET
email_address = excluded.email_address, email_address = excluded.email_address,
github_user_id = excluded.github_user_id, github_user_id = excluded.github_user_id,
@ -664,7 +675,7 @@ impl Db for PostgresDb {
INSERT INTO contacts INSERT INTO contacts
(user_id_a, user_id_b, a_to_b, should_notify, accepted) (user_id_a, user_id_b, a_to_b, should_notify, accepted)
VALUES VALUES
($1, $2, 't', 't', 't') ($1, $2, TRUE, TRUE, TRUE)
ON CONFLICT DO NOTHING ON CONFLICT DO NOTHING
", ",
) )
@ -824,7 +835,7 @@ impl Db for PostgresDb {
device_id device_id
) )
VALUES VALUES
($1, $2, 'f', $3, 'f', 'f', 'f', 't', $4) ($1, $2, FALSE, $3, FALSE, FALSE, FALSE, TRUE, $4)
ON CONFLICT (email_address) ON CONFLICT (email_address)
DO UPDATE SET DO UPDATE SET
inviting_user_id = excluded.inviting_user_id inviting_user_id = excluded.inviting_user_id
@ -870,7 +881,7 @@ impl Db for PostgresDb {
sqlx::query( sqlx::query(
" "
UPDATE projects UPDATE projects
SET unregistered = 't' SET unregistered = TRUE
WHERE id = $1 WHERE id = $1
", ",
) )
@ -1274,7 +1285,7 @@ impl Db for PostgresDb {
let query = " let query = "
SELECT 1 FROM contacts SELECT 1 FROM contacts
WHERE user_id_a = $1 AND user_id_b = $2 AND accepted = 't' WHERE user_id_a = $1 AND user_id_b = $2 AND accepted = TRUE
LIMIT 1 LIMIT 1
"; ";
Ok(sqlx::query_scalar::<_, i32>(query) Ok(sqlx::query_scalar::<_, i32>(query)
@ -1295,11 +1306,11 @@ impl Db for PostgresDb {
}; };
let query = " let query = "
INSERT into contacts (user_id_a, user_id_b, a_to_b, accepted, should_notify) INSERT into contacts (user_id_a, user_id_b, a_to_b, accepted, should_notify)
VALUES ($1, $2, $3, 'f', 't') VALUES ($1, $2, $3, FALSE, TRUE)
ON CONFLICT (user_id_a, user_id_b) DO UPDATE ON CONFLICT (user_id_a, user_id_b) DO UPDATE
SET SET
accepted = 't', accepted = TRUE,
should_notify = 'f' should_notify = FALSE
WHERE WHERE
NOT contacts.accepted AND NOT contacts.accepted AND
((contacts.a_to_b = excluded.a_to_b AND contacts.user_id_a = excluded.user_id_b) OR ((contacts.a_to_b = excluded.a_to_b AND contacts.user_id_a = excluded.user_id_b) OR
@ -1359,7 +1370,7 @@ impl Db for PostgresDb {
let query = " let query = "
UPDATE contacts UPDATE contacts
SET should_notify = 'f' SET should_notify = FALSE
WHERE WHERE
user_id_a = $1 AND user_id_b = $2 AND user_id_a = $1 AND user_id_b = $2 AND
( (
@ -1398,7 +1409,7 @@ impl Db for PostgresDb {
let result = if accept { let result = if accept {
let query = " let query = "
UPDATE contacts UPDATE contacts
SET accepted = 't', should_notify = 't' SET accepted = TRUE, should_notify = TRUE
WHERE user_id_a = $1 AND user_id_b = $2 AND a_to_b = $3; WHERE user_id_a = $1 AND user_id_b = $2 AND a_to_b = $3;
"; ";
sqlx::query(query) sqlx::query(query)
@ -1706,7 +1717,7 @@ impl Db for PostgresDb {
"; ";
sqlx::query(query).execute(&self.pool).await.log_err(); sqlx::query(query).execute(&self.pool).await.log_err();
self.pool.close().await; self.pool.close().await;
<sqlx::Postgres as sqlx::migrate::MigrateDatabase>::drop_database(url) <sqlx::Sqlite as sqlx::migrate::MigrateDatabase>::drop_database(url)
.await .await
.log_err(); .log_err();
eprintln!("tore down database: {:?}", start.elapsed()); eprintln!("tore down database: {:?}", start.elapsed());
@ -1929,10 +1940,9 @@ mod test {
use anyhow::anyhow; use anyhow::anyhow;
use collections::BTreeMap; use collections::BTreeMap;
use gpui::executor::Background; use gpui::executor::Background;
use lazy_static::lazy_static;
use parking_lot::Mutex; use parking_lot::Mutex;
use rand::prelude::*; use rand::prelude::*;
use sqlx::{migrate::MigrateDatabase, Postgres}; use sqlx::{migrate::MigrateDatabase, Sqlite};
use std::sync::Arc; use std::sync::Arc;
use util::post_inc; use util::post_inc;
@ -2587,22 +2597,14 @@ mod test {
impl TestDb { impl TestDb {
#[allow(clippy::await_holding_lock)] #[allow(clippy::await_holding_lock)]
pub async fn postgres() -> Self { pub async fn real() -> Self {
lazy_static! {
static ref LOCK: Mutex<()> = Mutex::new(());
}
eprintln!("creating database..."); eprintln!("creating database...");
let start = std::time::Instant::now(); let start = std::time::Instant::now();
let _guard = LOCK.lock();
let mut rng = StdRng::from_entropy(); let mut rng = StdRng::from_entropy();
let name = format!("zed-test-{}", rng.gen::<u128>()); let url = format!("/tmp/zed-test-{}", rng.gen::<u128>());
let url = format!("postgres://postgres@localhost:5433/{}", name); Sqlite::create_database(&url).await.unwrap();
Postgres::create_database(&url) let db = RealDb::new(&url, 5).await.unwrap();
.await db.migrate(Path::new(TEST_MIGRATIONS_PATH.unwrap()), false)
.expect("failed to create test db");
let db = PostgresDb::new(&url, 5).await.unwrap();
db.migrate(Path::new(DEFAULT_MIGRATIONS_PATH.unwrap()), false)
.await .await
.unwrap(); .unwrap();
@ -2628,7 +2630,7 @@ mod test {
impl Drop for TestDb { impl Drop for TestDb {
fn drop(&mut self) { fn drop(&mut self) {
if let Some(db) = self.db.take() { if let Some(db) = self.db.take() {
futures::executor::block_on(db.teardown(&self.url)); std::fs::remove_file(&self.url).ok();
} }
} }
} }

View file

@ -7,7 +7,7 @@ use time::OffsetDateTime;
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_get_users_by_ids() { async fn test_get_users_by_ids() {
for test_db in [ for test_db in [
TestDb::postgres().await, TestDb::real().await,
TestDb::fake(build_background_executor()), TestDb::fake(build_background_executor()),
] { ] {
let db = test_db.db(); let db = test_db.db();
@ -73,7 +73,7 @@ async fn test_get_users_by_ids() {
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_get_user_by_github_account() { async fn test_get_user_by_github_account() {
for test_db in [ for test_db in [
TestDb::postgres().await, TestDb::real().await,
TestDb::fake(build_background_executor()), TestDb::fake(build_background_executor()),
] { ] {
let db = test_db.db(); let db = test_db.db();
@ -132,7 +132,7 @@ async fn test_get_user_by_github_account() {
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_worktree_extensions() { async fn test_worktree_extensions() {
let test_db = TestDb::postgres().await; let test_db = TestDb::real().await;
let db = test_db.db(); let db = test_db.db();
let user = db let user = db
@ -204,7 +204,7 @@ async fn test_worktree_extensions() {
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_user_activity() { async fn test_user_activity() {
let test_db = TestDb::postgres().await; let test_db = TestDb::real().await;
let db = test_db.db(); let db = test_db.db();
let mut user_ids = Vec::new(); let mut user_ids = Vec::new();
@ -448,7 +448,7 @@ async fn test_user_activity() {
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_recent_channel_messages() { async fn test_recent_channel_messages() {
for test_db in [ for test_db in [
TestDb::postgres().await, TestDb::real().await,
TestDb::fake(build_background_executor()), TestDb::fake(build_background_executor()),
] { ] {
let db = test_db.db(); let db = test_db.db();
@ -493,7 +493,7 @@ async fn test_recent_channel_messages() {
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_channel_message_nonces() { async fn test_channel_message_nonces() {
for test_db in [ for test_db in [
TestDb::postgres().await, TestDb::real().await,
TestDb::fake(build_background_executor()), TestDb::fake(build_background_executor()),
] { ] {
let db = test_db.db(); let db = test_db.db();
@ -538,7 +538,7 @@ async fn test_channel_message_nonces() {
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_create_access_tokens() { async fn test_create_access_tokens() {
let test_db = TestDb::postgres().await; let test_db = TestDb::real().await;
let db = test_db.db(); let db = test_db.db();
let user = db let user = db
.create_user( .create_user(
@ -582,14 +582,14 @@ async fn test_create_access_tokens() {
#[test] #[test]
fn test_fuzzy_like_string() { fn test_fuzzy_like_string() {
assert_eq!(PostgresDb::fuzzy_like_string("abcd"), "%a%b%c%d%"); assert_eq!(RealDb::fuzzy_like_string("abcd"), "%a%b%c%d%");
assert_eq!(PostgresDb::fuzzy_like_string("x y"), "%x%y%"); assert_eq!(RealDb::fuzzy_like_string("x y"), "%x%y%");
assert_eq!(PostgresDb::fuzzy_like_string(" z "), "%z%"); assert_eq!(RealDb::fuzzy_like_string(" z "), "%z%");
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_fuzzy_search_users() { async fn test_fuzzy_search_users() {
let test_db = TestDb::postgres().await; let test_db = TestDb::real().await;
let db = test_db.db(); let db = test_db.db();
for (i, github_login) in [ for (i, github_login) in [
"California", "California",
@ -638,7 +638,7 @@ async fn test_fuzzy_search_users() {
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_add_contacts() { async fn test_add_contacts() {
for test_db in [ for test_db in [
TestDb::postgres().await, TestDb::real().await,
TestDb::fake(build_background_executor()), TestDb::fake(build_background_executor()),
] { ] {
let db = test_db.db(); let db = test_db.db();
@ -805,7 +805,7 @@ async fn test_add_contacts() {
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_invite_codes() { async fn test_invite_codes() {
let postgres = TestDb::postgres().await; let postgres = TestDb::real().await;
let db = postgres.db(); let db = postgres.db();
let NewUserResult { user_id: user1, .. } = db let NewUserResult { user_id: user1, .. } = db
.create_user( .create_user(
@ -1000,7 +1000,7 @@ async fn test_invite_codes() {
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_signups() { async fn test_signups() {
let postgres = TestDb::postgres().await; let postgres = TestDb::real().await;
let db = postgres.db(); let db = postgres.db();
// people sign up on the waitlist // people sign up on the waitlist
@ -1146,7 +1146,7 @@ async fn test_signups() {
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_metrics_id() { async fn test_metrics_id() {
let postgres = TestDb::postgres().await; let postgres = TestDb::real().await;
let db = postgres.db(); let db = postgres.db();
let NewUserResult { let NewUserResult {

View file

@ -53,7 +53,6 @@ use std::{
time::Duration, time::Duration,
}; };
use theme::ThemeRegistry; use theme::ThemeRegistry;
use tokio::runtime::{EnterGuard, Runtime};
use unindent::Unindent as _; use unindent::Unindent as _;
use util::post_inc; use util::post_inc;
use workspace::{shared_screen::SharedScreen, Item, SplitDirection, ToggleFollow, Workspace}; use workspace::{shared_screen::SharedScreen, Item, SplitDirection, ToggleFollow, Workspace};
@ -80,7 +79,6 @@ async fn test_basic_calls(
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
let start = std::time::Instant::now(); let start = std::time::Instant::now();
eprintln!("test_basic_calls");
let client_a = server.create_client(cx_a, "user_a").await; let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await; let client_b = server.create_client(cx_b, "user_b").await;
@ -6106,7 +6104,7 @@ impl TestServer {
.enable_time() .enable_time()
.build() .build()
.unwrap() .unwrap()
.block_on(TestDb::postgres()); .block_on(TestDb::real());
let live_kit_server_id = NEXT_LIVE_KIT_SERVER_ID.fetch_add(1, SeqCst); let live_kit_server_id = NEXT_LIVE_KIT_SERVER_ID.fetch_add(1, SeqCst);
let live_kit_server = live_kit_client::TestServer::create( let live_kit_server = live_kit_client::TestServer::create(
format!("http://livekit.{}.test", live_kit_server_id), format!("http://livekit.{}.test", live_kit_server_id),
@ -6162,7 +6160,7 @@ impl TestServer {
}, },
) )
.await .await
.unwrap() .expect("creating user failed")
.user_id .user_id
}; };
let client_name = name.to_string(); let client_name = name.to_string();
@ -6202,7 +6200,11 @@ impl TestServer {
let (client_conn, server_conn, killed) = let (client_conn, server_conn, killed) =
Connection::in_memory(cx.background()); Connection::in_memory(cx.background());
let (connection_id_tx, connection_id_rx) = oneshot::channel(); let (connection_id_tx, connection_id_rx) = oneshot::channel();
let user = db.get_user_by_id(user_id).await.unwrap().unwrap(); let user = db
.get_user_by_id(user_id)
.await
.expect("retrieving user failed")
.unwrap();
cx.background() cx.background()
.spawn(server.handle_connection( .spawn(server.handle_connection(
server_conn, server_conn,

View file

@ -13,7 +13,7 @@ use crate::rpc::ResultExt as _;
use anyhow::anyhow; use anyhow::anyhow;
use axum::{routing::get, Router}; use axum::{routing::get, Router};
use collab::{Error, Result}; use collab::{Error, Result};
use db::{Db, PostgresDb}; use db::{Db, RealDb};
use serde::Deserialize; use serde::Deserialize;
use std::{ use std::{
env::args, env::args,
@ -56,7 +56,7 @@ pub struct AppState {
impl AppState { impl AppState {
async fn new(config: Config) -> Result<Arc<Self>> { async fn new(config: Config) -> Result<Arc<Self>> {
let db = PostgresDb::new(&config.database_url, 5).await?; let db = RealDb::new(&config.database_url, 5).await?;
let live_kit_client = if let Some(((server, key), secret)) = config let live_kit_client = if let Some(((server, key), secret)) = config
.live_kit_server .live_kit_server
.as_ref() .as_ref()
@ -96,7 +96,7 @@ async fn main() -> Result<()> {
} }
Some("migrate") => { Some("migrate") => {
let config = envy::from_env::<MigrateConfig>().expect("error loading config"); let config = envy::from_env::<MigrateConfig>().expect("error loading config");
let db = PostgresDb::new(&config.database_url, 5).await?; let db = RealDb::new(&config.database_url, 5).await?;
let migrations_path = config let migrations_path = config
.migrations_path .migrations_path