Add app events (#3372)

Adds app events (`first open` and `open`). For the time being, I'm
abandonding trying to add `close`, after running into many issues
trying. The code is in place for me to continue on that work, but at the
moment, we require having the telemetry settings in hand when calling
any of the methods that log an event, so we can honor the user's
preference for sending telemetry or not, but when running the
`on_app_close` method, to send off an app `close` event, the settings
are no longer available (probably the order of teardown?), which causes
some tests to end up failing. I'm not sure how to solve this. Maybe we
keep the settings on the telemetry struct and update it each time any
event is logged, then, on app shutdown, when logging the app `close`
event, we can use the stored version (idk).

Release Notes:

- N/A
This commit is contained in:
Joseph T. Lyons 2023-11-22 23:16:28 -05:00 committed by GitHub
commit f33c0e80a6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 120 additions and 40 deletions

View file

@ -469,7 +469,7 @@ impl ActiveCall {
&self.pending_invites
}
pub fn report_call_event(&self, operation: &'static str, cx: &AppContext) {
pub fn report_call_event(&self, operation: &'static str, cx: &mut AppContext) {
if let Some(room) = self.room() {
let room = room.read(cx);
report_call_event_for_room(operation, room.id(), room.channel_id(), &self.client, cx);
@ -482,7 +482,7 @@ pub fn report_call_event_for_room(
room_id: u64,
channel_id: Option<u64>,
client: &Arc<Client>,
cx: &AppContext,
cx: &mut AppContext,
) {
let telemetry = client.telemetry();
let telemetry_settings = *TelemetrySettings::get_global(cx);

View file

@ -109,6 +109,10 @@ pub enum ClickhouseEvent {
virtual_memory_in_bytes: u64,
milliseconds_since_first_event: i64,
},
App {
operation: &'static str,
milliseconds_since_first_event: i64,
},
}
#[cfg(debug_assertions)]
@ -168,13 +172,8 @@ impl Telemetry {
let mut state = self.state.lock();
state.installation_id = installation_id.map(|id| id.into());
state.session_id = Some(session_id.into());
let has_clickhouse_events = !state.clickhouse_events_queue.is_empty();
drop(state);
if has_clickhouse_events {
self.flush_clickhouse_events();
}
let this = self.clone();
cx.spawn(|mut cx| async move {
// Avoiding calling `System::new_all()`, as there have been crashes related to it
@ -256,7 +255,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings)
self.report_clickhouse_event(event, telemetry_settings, false)
}
pub fn report_copilot_event(
@ -273,7 +272,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings)
self.report_clickhouse_event(event, telemetry_settings, false)
}
pub fn report_assistant_event(
@ -290,7 +289,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings)
self.report_clickhouse_event(event, telemetry_settings, false)
}
pub fn report_call_event(
@ -307,7 +306,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings)
self.report_clickhouse_event(event, telemetry_settings, false)
}
pub fn report_cpu_event(
@ -322,7 +321,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings)
self.report_clickhouse_event(event, telemetry_settings, false)
}
pub fn report_memory_event(
@ -337,7 +336,21 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings)
self.report_clickhouse_event(event, telemetry_settings, false)
}
// app_events are called at app open and app close, so flush is set to immediately send
pub fn report_app_event(
self: &Arc<Self>,
telemetry_settings: TelemetrySettings,
operation: &'static str,
) {
let event = ClickhouseEvent::App {
operation,
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings, true)
}
fn milliseconds_since_first_event(&self) -> i64 {
@ -358,6 +371,7 @@ impl Telemetry {
self: &Arc<Self>,
event: ClickhouseEvent,
telemetry_settings: TelemetrySettings,
immediate_flush: bool,
) {
if !telemetry_settings.metrics {
return;
@ -370,7 +384,7 @@ impl Telemetry {
.push(ClickhouseEventWrapper { signed_in, event });
if state.installation_id.is_some() {
if state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN {
if immediate_flush || state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN {
drop(state);
self.flush_clickhouse_events();
} else {

View file

@ -382,7 +382,7 @@ impl settings::Settings for TelemetrySettings {
}
impl Client {
pub fn new(http: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> {
pub fn new(http: Arc<dyn HttpClient>, cx: &mut AppContext) -> Arc<Self> {
Arc::new(Self {
id: AtomicU64::new(0),
peer: Peer::new(0),

View file

@ -107,6 +107,10 @@ pub enum ClickhouseEvent {
virtual_memory_in_bytes: u64,
milliseconds_since_first_event: i64,
},
App {
operation: &'static str,
milliseconds_since_first_event: i64,
},
}
#[cfg(debug_assertions)]
@ -122,12 +126,13 @@ const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(1);
const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(30);
impl Telemetry {
pub fn new(client: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> {
pub fn new(client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Arc<Self> {
let release_channel = if cx.has_global::<ReleaseChannel>() {
Some(cx.global::<ReleaseChannel>().display_name())
} else {
None
};
// TODO: Replace all hardware stuff with nested SystemSpecs json
let this = Arc::new(Self {
http_client: client,
@ -147,9 +152,22 @@ impl Telemetry {
}),
});
// We should only ever have one instance of Telemetry, leak the subscription to keep it alive
// rather than store in TelemetryState, complicating spawn as subscriptions are not Send
// std::mem::forget(cx.on_app_quit({
// let this = this.clone();
// move |cx| this.shutdown_telemetry(cx)
// }));
this
}
// fn shutdown_telemetry(self: &Arc<Self>, cx: &mut AppContext) -> impl Future<Output = ()> {
// let telemetry_settings = TelemetrySettings::get_global(cx).clone();
// self.report_app_event(telemetry_settings, "close");
// Task::ready(())
// }
pub fn log_file_path(&self) -> Option<PathBuf> {
Some(self.state.lock().log_file.as_ref()?.path().to_path_buf())
}
@ -163,13 +181,8 @@ impl Telemetry {
let mut state = self.state.lock();
state.installation_id = installation_id.map(|id| id.into());
state.session_id = Some(session_id.into());
let has_clickhouse_events = !state.clickhouse_events_queue.is_empty();
drop(state);
if has_clickhouse_events {
self.flush_clickhouse_events();
}
let this = self.clone();
cx.spawn(|cx| async move {
// Avoiding calling `System::new_all()`, as there have been crashes related to it
@ -257,7 +270,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings)
self.report_clickhouse_event(event, telemetry_settings, false)
}
pub fn report_copilot_event(
@ -274,7 +287,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings)
self.report_clickhouse_event(event, telemetry_settings, false)
}
pub fn report_assistant_event(
@ -291,7 +304,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings)
self.report_clickhouse_event(event, telemetry_settings, false)
}
pub fn report_call_event(
@ -308,7 +321,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings)
self.report_clickhouse_event(event, telemetry_settings, false)
}
pub fn report_cpu_event(
@ -323,7 +336,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings)
self.report_clickhouse_event(event, telemetry_settings, false)
}
pub fn report_memory_event(
@ -338,7 +351,21 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings)
self.report_clickhouse_event(event, telemetry_settings, false)
}
// app_events are called at app open and app close, so flush is set to immediately send
pub fn report_app_event(
self: &Arc<Self>,
telemetry_settings: TelemetrySettings,
operation: &'static str,
) {
let event = ClickhouseEvent::App {
operation,
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings, true)
}
fn milliseconds_since_first_event(&self) -> i64 {
@ -359,6 +386,7 @@ impl Telemetry {
self: &Arc<Self>,
event: ClickhouseEvent,
telemetry_settings: TelemetrySettings,
immediate_flush: bool,
) {
if !telemetry_settings.metrics {
return;
@ -371,7 +399,7 @@ impl Telemetry {
.push(ClickhouseEventWrapper { signed_in, event });
if state.installation_id.is_some() {
if state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN {
if immediate_flush || state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN {
drop(state);
self.flush_clickhouse_events();
} else {

View file

@ -149,7 +149,7 @@ impl TestServer {
.user_id
};
let client_name = name.to_string();
let mut client = cx.read(|cx| Client::new(http.clone(), cx));
let mut client = cx.update(|cx| Client::new(http.clone(), cx));
let server = self.server.clone();
let db = self.app_state.db.clone();
let connection_killers = self.connection_killers.clone();

View file

@ -10,6 +10,7 @@ pub use entity_map::*;
pub use model_context::*;
use refineable::Refineable;
use smallvec::SmallVec;
use smol::future::FutureExt;
#[cfg(any(test, feature = "test-support"))]
pub use test_context::*;
@ -983,6 +984,22 @@ impl AppContext {
pub fn all_action_names(&self) -> &[SharedString] {
self.actions.all_action_names()
}
pub fn on_app_quit<Fut>(
&mut self,
mut on_quit: impl FnMut(&mut AppContext) -> Fut + 'static,
) -> Subscription
where
Fut: 'static + Future<Output = ()>,
{
self.quit_observers.insert(
(),
Box::new(move |cx| {
let future = on_quit(cx);
async move { future.await }.boxed_local()
}),
)
}
}
impl Context for AppContext {

View file

@ -1056,7 +1056,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
init_test(cx);
cx.executor().allow_parking();
let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let client_fake = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let fs_fake = FakeFs::new(cx.background_executor.clone());
fs_fake
@ -1096,7 +1096,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
});
let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let client_real = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let fs_real = Arc::new(RealFs);
let temp_root = temp_tree(json!({
@ -2181,7 +2181,7 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
let http_client = FakeHttpClient::with_404_response();
cx.read(|cx| Client::new(http_client, cx))
cx.update(|cx| Client::new(http_client, cx))
}
#[track_caller]

View file

@ -65,7 +65,8 @@ fn main() {
log::info!("========== starting zed ==========");
let mut app = gpui::App::new(Assets).unwrap();
let installation_id = app.background().block(installation_id()).ok();
let (installation_id, existing_installation_id_found) =
app.background().block(installation_id()).ok().unzip();
let session_id = Uuid::new_v4().to_string();
init_panic_hook(&app, installation_id.clone(), session_id.clone());
@ -166,6 +167,14 @@ fn main() {
.detach();
client.telemetry().start(installation_id, session_id, cx);
let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
let event_operation = match existing_installation_id_found {
Some(false) => "first open",
_ => "open",
};
client
.telemetry()
.report_app_event(telemetry_settings, event_operation);
let app_state = Arc::new(AppState {
languages,
@ -317,11 +326,11 @@ async fn authenticate(client: Arc<Client>, cx: &AsyncAppContext) -> Result<()> {
Ok::<_, anyhow::Error>(())
}
async fn installation_id() -> Result<String> {
async fn installation_id() -> Result<(String, bool)> {
let legacy_key_name = "device_id";
if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(legacy_key_name) {
Ok(installation_id)
Ok((installation_id, true))
} else {
let installation_id = Uuid::new_v4().to_string();
@ -329,7 +338,7 @@ async fn installation_id() -> Result<String> {
.write_kvp(legacy_key_name.to_string(), installation_id.clone())
.await?;
Ok(installation_id)
Ok((installation_id, false))
}
}

View file

@ -71,7 +71,11 @@ fn main() {
log::info!("========== starting zed ==========");
let app = App::production(Arc::new(Assets));
let installation_id = app.background_executor().block(installation_id()).ok();
let (installation_id, existing_installation_id_found) = app
.background_executor()
.block(installation_id())
.ok()
.unzip();
let session_id = Uuid::new_v4().to_string();
init_panic_hook(&app, installation_id.clone(), session_id.clone());
@ -173,6 +177,14 @@ fn main() {
// .detach();
client.telemetry().start(installation_id, session_id, cx);
let telemetry_settings = *client::TelemetrySettings::get_global(cx);
let event_operation = match existing_installation_id_found {
Some(false) => "first open",
_ => "open",
};
client
.telemetry()
.report_app_event(telemetry_settings, event_operation);
let app_state = Arc::new(AppState {
languages,
@ -334,11 +346,11 @@ fn main() {
// Ok::<_, anyhow::Error>(())
// }
async fn installation_id() -> Result<String> {
async fn installation_id() -> Result<(String, bool)> {
let legacy_key_name = "device_id";
if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(legacy_key_name) {
Ok(installation_id)
Ok((installation_id, true))
} else {
let installation_id = Uuid::new_v4().to_string();
@ -346,7 +358,7 @@ async fn installation_id() -> Result<String> {
.write_kvp(legacy_key_name.to_string(), installation_id.clone())
.await?;
Ok(installation_id)
Ok((installation_id, false))
}
}