Revert "Convert telemetry into a model"

This reverts commit 6e4268a471.
This commit is contained in:
Joseph T. Lyons 2023-11-22 16:16:44 -05:00
parent c86e99928b
commit ee2b6834bd
9 changed files with 199 additions and 208 deletions

View file

@ -302,11 +302,7 @@ impl AutoUpdater {
let mut dmg_file = File::create(&dmg_path).await?;
let (installation_id, release_channel, telemetry) = cx.update(|cx| {
let installation_id = cx
.global::<Arc<Client>>()
.telemetry()
.read(cx)
.installation_id();
let installation_id = cx.global::<Arc<Client>>().telemetry().installation_id();
let release_channel = cx
.has_global::<ReleaseChannel>()
.then(|| cx.global::<ReleaseChannel>().display_name());

View file

@ -482,26 +482,27 @@ pub fn report_call_event_for_room(
let telemetry = client.telemetry();
let telemetry_settings = *TelemetrySettings::get_global(cx);
telemetry.update(cx, |this, cx| {
this.report_call_event(telemetry_settings, operation, Some(room_id), channel_id, cx)
});
telemetry.report_call_event(telemetry_settings, operation, Some(room_id), channel_id)
}
pub fn report_call_event_for_channel(
operation: &'static str,
channel_id: u64,
client: &Arc<Client>,
cx: &mut AppContext,
cx: &AppContext,
) {
let room = ActiveCall::global(cx).read(cx).room();
let room_id = room.map(|r| r.read(cx).id());
let telemetry = client.telemetry();
let telemetry_settings = *TelemetrySettings::get_global(cx);
telemetry.update(cx, |this, cx| {
this.report_call_event(telemetry_settings, operation, room_id, Some(channel_id), cx)
});
telemetry.report_call_event(
telemetry_settings,
operation,
room.map(|r| r.read(cx).id()),
Some(channel_id),
)
}
#[cfg(test)]

View file

@ -121,7 +121,7 @@ pub struct Client {
id: AtomicU64,
peer: Arc<Peer>,
http: Arc<dyn HttpClient>,
telemetry: Model<Telemetry>,
telemetry: Arc<Telemetry>,
state: RwLock<ClientState>,
#[allow(clippy::type_complexity)]
@ -501,12 +501,8 @@ impl Client {
}));
}
Status::SignedOut | Status::UpgradeRequired => {
cx.update(|cx| {
self.telemetry.update(cx, |this, cx| {
this.set_authenticated_user_info(None, false, cx)
})
})
.log_err();
cx.update(|cx| self.telemetry.set_authenticated_user_info(None, false, cx))
.log_err();
state._reconnect_task.take();
}
_ => {}
@ -1324,7 +1320,7 @@ impl Client {
}
}
pub fn telemetry(&self) -> &Model<Telemetry> {
pub fn telemetry(&self) -> &Arc<Telemetry> {
&self.telemetry
}
}

View file

@ -1,12 +1,12 @@
use crate::{TelemetrySettings, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL};
use chrono::{DateTime, Utc};
use futures::Future;
use gpui::{serde_json, AppContext, AppMetadata, Context, Model, ModelContext, Task};
use gpui::{serde_json, AppContext, AppMetadata, BackgroundExecutor, Task};
use lazy_static::lazy_static;
use parking_lot::Mutex;
use serde::Serialize;
use settings::Settings;
use std::io::Write;
use std::{env, mem, path::PathBuf, sync::Arc, time::Duration};
use std::{env, io::Write, mem, path::PathBuf, sync::Arc, time::Duration};
use sysinfo::{
CpuRefreshKind, Pid, PidExt, ProcessExt, ProcessRefreshKind, RefreshKind, System, SystemExt,
};
@ -16,6 +16,11 @@ use util::{channel::ReleaseChannel, TryFutureExt};
pub struct Telemetry {
http_client: Arc<dyn HttpClient>,
executor: BackgroundExecutor,
state: Mutex<TelemetryState>,
}
struct TelemetryState {
metrics_id: Option<Arc<str>>, // Per logged-in user
installation_id: Option<Arc<str>>, // Per app installation (different for dev, nightly, preview, and stable)
session_id: Option<Arc<str>>, // Per app launch
@ -122,7 +127,7 @@ const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(1);
const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(30);
impl Telemetry {
pub fn new(client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Model<Self> {
pub fn new(client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Arc<Self> {
let release_channel = if cx.has_global::<ReleaseChannel>() {
Some(cx.global::<ReleaseChannel>().display_name())
} else {
@ -130,48 +135,57 @@ impl Telemetry {
};
// TODO: Replace all hardware stuff with nested SystemSpecs json
let this = cx.build_model(|cx| Self {
let this = Arc::new(Self {
http_client: client,
app_metadata: cx.app_metadata(),
architecture: env::consts::ARCH,
release_channel,
installation_id: None,
metrics_id: None,
session_id: None,
clickhouse_events_queue: Default::default(),
flush_clickhouse_events_task: Default::default(),
log_file: None,
is_staff: None,
first_event_datetime: None,
executor: cx.background_executor().clone(),
state: Mutex::new(TelemetryState {
app_metadata: cx.app_metadata(),
architecture: env::consts::ARCH,
release_channel,
installation_id: None,
metrics_id: None,
session_id: None,
clickhouse_events_queue: Default::default(),
flush_clickhouse_events_task: Default::default(),
log_file: None,
is_staff: None,
first_event_datetime: None,
}),
});
// We should only ever have one instance of Telemetry, leak the subscription to keep it alive
// rather than store in TelemetryState, complicating spawn as subscriptions are not Send
std::mem::forget(this.update(cx, |_, cx| cx.on_app_quit(Self::shutdown_telemetry)));
std::mem::forget(cx.on_app_quit({
let this = this.clone();
move |cx| this.shutdown_telemetry(cx)
}));
this
}
fn shutdown_telemetry(&mut self, cx: &mut ModelContext<Self>) -> impl Future<Output = ()> {
fn shutdown_telemetry(self: &Arc<Self>, cx: &mut AppContext) -> impl Future<Output = ()> {
let telemetry_settings = TelemetrySettings::get_global(cx).clone();
self.report_app_event(telemetry_settings, "close", cx);
self.report_app_event(telemetry_settings, "close");
Task::ready(())
}
pub fn log_file_path(&self) -> Option<PathBuf> {
Some(self.log_file.as_ref()?.path().to_path_buf())
Some(self.state.lock().log_file.as_ref()?.path().to_path_buf())
}
pub fn start(
&mut self,
self: &Arc<Self>,
installation_id: Option<String>,
session_id: String,
cx: &mut ModelContext<Self>,
cx: &mut AppContext,
) {
self.installation_id = installation_id.map(|id| id.into());
self.session_id = Some(session_id.into());
let mut state = self.state.lock();
state.installation_id = installation_id.map(|id| id.into());
state.session_id = Some(session_id.into());
drop(state);
cx.spawn(|this, mut cx| async move {
let this = self.clone();
cx.spawn(|cx| async move {
// Avoiding calling `System::new_all()`, as there have been crashes related to it
let refresh_kind = RefreshKind::new()
.with_memory() // For memory usage
@ -207,28 +221,23 @@ impl Telemetry {
break;
};
this.update(&mut cx, |this, cx| {
this.report_memory_event(
telemetry_settings,
process.memory(),
process.virtual_memory(),
cx,
);
this.report_cpu_event(
telemetry_settings,
process.cpu_usage(),
system.cpus().len() as u32,
cx,
);
})
.ok();
this.report_memory_event(
telemetry_settings,
process.memory(),
process.virtual_memory(),
);
this.report_cpu_event(
telemetry_settings,
process.cpu_usage(),
system.cpus().len() as u32,
);
}
})
.detach();
}
pub fn set_authenticated_user_info(
&mut self,
self: &Arc<Self>,
metrics_id: Option<String>,
is_staff: bool,
cx: &AppContext,
@ -237,20 +246,21 @@ impl Telemetry {
return;
}
let mut state = self.state.lock();
let metrics_id: Option<Arc<str>> = metrics_id.map(|id| id.into());
self.metrics_id = metrics_id.clone();
self.is_staff = Some(is_staff);
state.metrics_id = metrics_id.clone();
state.is_staff = Some(is_staff);
drop(state);
}
pub fn report_editor_event(
&mut self,
self: &Arc<Self>,
telemetry_settings: TelemetrySettings,
file_extension: Option<String>,
vim_mode: bool,
operation: &'static str,
copilot_enabled: bool,
copilot_enabled_for_language: bool,
cx: &ModelContext<Self>,
) {
let event = ClickhouseEvent::Editor {
file_extension,
@ -261,16 +271,15 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings, false, cx)
self.report_clickhouse_event(event, telemetry_settings, false)
}
pub fn report_copilot_event(
&mut self,
self: &Arc<Self>,
telemetry_settings: TelemetrySettings,
suggestion_id: Option<String>,
suggestion_accepted: bool,
file_extension: Option<String>,
cx: &ModelContext<Self>,
) {
let event = ClickhouseEvent::Copilot {
suggestion_id,
@ -279,16 +288,15 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings, false, cx)
self.report_clickhouse_event(event, telemetry_settings, false)
}
pub fn report_assistant_event(
&mut self,
self: &Arc<Self>,
telemetry_settings: TelemetrySettings,
conversation_id: Option<String>,
kind: AssistantKind,
model: &'static str,
cx: &ModelContext<Self>,
) {
let event = ClickhouseEvent::Assistant {
conversation_id,
@ -297,16 +305,15 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings, false, cx)
self.report_clickhouse_event(event, telemetry_settings, false)
}
pub fn report_call_event(
&mut self,
self: &Arc<Self>,
telemetry_settings: TelemetrySettings,
operation: &'static str,
room_id: Option<u64>,
channel_id: Option<u64>,
cx: &ModelContext<Self>,
) {
let event = ClickhouseEvent::Call {
operation,
@ -315,15 +322,14 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings, false, cx)
self.report_clickhouse_event(event, telemetry_settings, false)
}
pub fn report_cpu_event(
&mut self,
self: &Arc<Self>,
telemetry_settings: TelemetrySettings,
usage_as_percentage: f32,
core_count: u32,
cx: &ModelContext<Self>,
) {
let event = ClickhouseEvent::Cpu {
usage_as_percentage,
@ -331,15 +337,14 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings, false, cx)
self.report_clickhouse_event(event, telemetry_settings, false)
}
pub fn report_memory_event(
&mut self,
self: &Arc<Self>,
telemetry_settings: TelemetrySettings,
memory_in_bytes: u64,
virtual_memory_in_bytes: u64,
cx: &ModelContext<Self>,
) {
let event = ClickhouseEvent::Memory {
memory_in_bytes,
@ -347,90 +352,94 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings, false, cx)
self.report_clickhouse_event(event, telemetry_settings, false)
}
// app_events are called at app open and app close, so flush is set to immediately send
pub fn report_app_event(
&mut self,
self: &Arc<Self>,
telemetry_settings: TelemetrySettings,
operation: &'static str,
cx: &ModelContext<Self>,
) {
let event = ClickhouseEvent::App {
operation,
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings, true, cx)
self.report_clickhouse_event(event, telemetry_settings, true)
}
fn milliseconds_since_first_event(&mut self) -> i64 {
match self.first_event_datetime {
fn milliseconds_since_first_event(&self) -> i64 {
let mut state = self.state.lock();
match state.first_event_datetime {
Some(first_event_datetime) => {
let now: DateTime<Utc> = Utc::now();
now.timestamp_millis() - first_event_datetime.timestamp_millis()
}
None => {
self.first_event_datetime = Some(Utc::now());
state.first_event_datetime = Some(Utc::now());
0
}
}
}
fn report_clickhouse_event(
&mut self,
self: &Arc<Self>,
event: ClickhouseEvent,
telemetry_settings: TelemetrySettings,
immediate_flush: bool,
cx: &ModelContext<Self>,
) {
if !telemetry_settings.metrics {
return;
}
let signed_in = self.metrics_id.is_some();
self.clickhouse_events_queue
let mut state = self.state.lock();
let signed_in = state.metrics_id.is_some();
state
.clickhouse_events_queue
.push(ClickhouseEventWrapper { signed_in, event });
if self.installation_id.is_some() {
if immediate_flush || self.clickhouse_events_queue.len() >= MAX_QUEUE_LEN {
self.flush_clickhouse_events(cx);
if state.installation_id.is_some() {
if immediate_flush || state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN {
drop(state);
self.flush_clickhouse_events();
} else {
self.flush_clickhouse_events_task = Some(cx.spawn(|this, mut cx| async move {
smol::Timer::after(DEBOUNCE_INTERVAL).await;
this.update(&mut cx, |this, cx| this.flush_clickhouse_events(cx))
.ok();
let this = self.clone();
let executor = self.executor.clone();
state.flush_clickhouse_events_task = Some(self.executor.spawn(async move {
executor.timer(DEBOUNCE_INTERVAL).await;
this.flush_clickhouse_events();
}));
}
}
}
pub fn metrics_id(&self) -> Option<Arc<str>> {
self.metrics_id.clone()
pub fn metrics_id(self: &Arc<Self>) -> Option<Arc<str>> {
self.state.lock().metrics_id.clone()
}
pub fn installation_id(&self) -> Option<Arc<str>> {
self.installation_id.clone()
pub fn installation_id(self: &Arc<Self>) -> Option<Arc<str>> {
self.state.lock().installation_id.clone()
}
pub fn is_staff(&self) -> Option<bool> {
self.is_staff
pub fn is_staff(self: &Arc<Self>) -> Option<bool> {
self.state.lock().is_staff
}
fn flush_clickhouse_events(&mut self, cx: &ModelContext<Self>) {
self.first_event_datetime = None;
let mut events = mem::take(&mut self.clickhouse_events_queue);
self.flush_clickhouse_events_task.take();
fn flush_clickhouse_events(self: &Arc<Self>) {
let mut state = self.state.lock();
state.first_event_datetime = None;
let mut events = mem::take(&mut state.clickhouse_events_queue);
state.flush_clickhouse_events_task.take();
drop(state);
let http_client = self.http_client.clone();
let this = self.clone();
self.executor
.spawn(
async move {
let mut json_bytes = Vec::new();
cx.spawn(|this, mut cx| {
async move {
let mut json_bytes = Vec::new();
this.update(&mut cx, |this, _| {
if let Some(file) = &mut this.log_file {
if let Some(file) = &mut this.state.lock().log_file {
let file = file.as_file_mut();
for event in &mut events {
json_bytes.clear();
@ -440,43 +449,39 @@ impl Telemetry {
}
}
std::io::Result::Ok(())
})??;
{
let state = this.state.lock();
let request_body = ClickhouseEventRequestBody {
token: ZED_SECRET_CLIENT_TOKEN,
installation_id: state.installation_id.clone(),
session_id: state.session_id.clone(),
is_staff: state.is_staff.clone(),
app_version: state
.app_metadata
.app_version
.map(|version| version.to_string()),
os_name: state.app_metadata.os_name,
os_version: state
.app_metadata
.os_version
.map(|version| version.to_string()),
architecture: state.architecture,
if let Ok(Ok(json_bytes)) = this.update(&mut cx, |this, _| {
let request_body = ClickhouseEventRequestBody {
token: ZED_SECRET_CLIENT_TOKEN,
installation_id: this.installation_id.clone(),
session_id: this.session_id.clone(),
is_staff: this.is_staff.clone(),
app_version: this
.app_metadata
.app_version
.map(|version| version.to_string()),
os_name: this.app_metadata.os_name,
os_version: this
.app_metadata
.os_version
.map(|version| version.to_string()),
architecture: this.architecture,
release_channel: state.release_channel,
events,
};
dbg!(&request_body);
json_bytes.clear();
serde_json::to_writer(&mut json_bytes, &request_body)?;
}
release_channel: this.release_channel,
events,
};
json_bytes.clear();
serde_json::to_writer(&mut json_bytes, &request_body)?;
std::io::Result::Ok(json_bytes)
}) {
http_client
this.http_client
.post_json(CLICKHOUSE_EVENTS_URL.as_str(), json_bytes.into())
.await?;
anyhow::Ok(())
}
anyhow::Ok(())
}
.log_err()
})
.detach();
.log_err(),
)
.detach();
}
}

View file

@ -168,13 +168,11 @@ impl UserStore {
cx.update(|cx| {
if let Some(info) = info {
cx.update_flags(info.staff, info.flags);
client.telemetry.update(cx, |this, cx| {
this.set_authenticated_user_info(
Some(info.metrics_id.clone()),
info.staff,
cx,
)
})
client.telemetry.set_authenticated_user_info(
Some(info.metrics_id.clone()),
info.staff,
cx,
)
}
})?;

View file

@ -8954,7 +8954,7 @@ impl Editor {
&self,
suggestion_id: Option<String>,
suggestion_accepted: bool,
cx: &mut AppContext,
cx: &AppContext,
) {
let Some(project) = &self.project else { return };
@ -8971,15 +8971,12 @@ impl Editor {
let telemetry = project.read(cx).client().telemetry().clone();
let telemetry_settings = *TelemetrySettings::get_global(cx);
telemetry.update(cx, |this, cx| {
this.report_copilot_event(
telemetry_settings,
suggestion_id,
suggestion_accepted,
file_extension,
cx,
)
});
telemetry.report_copilot_event(
telemetry_settings,
suggestion_id,
suggestion_accepted,
file_extension,
)
}
#[cfg(any(test, feature = "test-support"))]
@ -8987,7 +8984,7 @@ impl Editor {
&self,
_operation: &'static str,
_file_extension: Option<String>,
_cx: &mut AppContext,
_cx: &AppContext,
) {
}
@ -8996,7 +8993,7 @@ impl Editor {
&self,
operation: &'static str,
file_extension: Option<String>,
cx: &mut AppContext,
cx: &AppContext,
) {
let Some(project) = &self.project else { return };
@ -9026,17 +9023,14 @@ impl Editor {
.show_copilot_suggestions;
let telemetry = project.read(cx).client().telemetry().clone();
telemetry.update(cx, |this, cx| {
this.report_editor_event(
telemetry_settings,
file_extension,
vim_mode,
operation,
copilot_enabled,
copilot_enabled_for_language,
cx,
)
});
telemetry.report_editor_event(
telemetry_settings,
file_extension,
vim_mode,
operation,
copilot_enabled,
copilot_enabled_for_language,
)
}
/// Copy the highlighted chunks to the clipboard as JSON. The format is an array of lines,

View file

@ -10,6 +10,7 @@ pub use entity_map::*;
pub use model_context::*;
use refineable::Refineable;
use smallvec::SmallVec;
use smol::future::FutureExt;
#[cfg(any(test, feature = "test-support"))]
pub use test_context::*;
@ -984,21 +985,21 @@ impl AppContext {
self.actions.all_action_names()
}
// pub fn on_app_quit<Fut>(
// &mut self,
// mut on_quit: impl FnMut(&mut AppContext) -> Fut + 'static,
// ) -> Subscription
// where
// Fut: 'static + Future<Output = ()>,
// {
// self.quit_observers.insert(
// (),
// Box::new(move |cx| {
// let future = on_quit(cx);
// async move { future.await }.boxed_local()
// }),
// )
// }
pub fn on_app_quit<Fut>(
&mut self,
mut on_quit: impl FnMut(&mut AppContext) -> Fut + 'static,
) -> Subscription
where
Fut: 'static + Future<Output = ()>,
{
self.quit_observers.insert(
(),
Box::new(move |cx| {
let future = on_quit(cx);
async move { future.await }.boxed_local()
}),
)
}
}
impl Context for AppContext {

View file

@ -176,15 +176,15 @@ fn main() {
// })
// .detach();
client.telemetry().update(cx, |this, cx| {
this.start(installation_id, session_id, cx);
let telemetry_settings = *client::TelemetrySettings::get_global(cx);
let event_operation = match existing_installation_id_found {
Some(false) => "first open",
_ => "open",
};
this.report_app_event(telemetry_settings, event_operation, cx);
});
client.telemetry().start(installation_id, session_id, cx);
let telemetry_settings = *client::TelemetrySettings::get_global(cx);
let event_operation = match existing_installation_id_found {
Some(false) => "first open",
_ => "open",
};
client
.telemetry()
.report_app_event(telemetry_settings, event_operation);
let app_state = Arc::new(AppState {
languages,

View file

@ -10,8 +10,8 @@ pub use assets::*;
use collections::VecDeque;
use editor::{Editor, MultiBuffer};
use gpui::{
actions, point, px, AppContext, AsyncAppContext, Context, FocusableView, PromptLevel,
TitlebarOptions, ViewContext, VisualContext, WindowBounds, WindowKind, WindowOptions,
actions, point, px, AppContext, Context, FocusableView, PromptLevel, TitlebarOptions,
ViewContext, VisualContext, WindowBounds, WindowKind, WindowOptions,
};
pub use only_instance::*;
pub use open_listener::*;
@ -628,12 +628,12 @@ fn open_telemetry_log_file(workspace: &mut Workspace, cx: &mut ViewContext<Works
workspace.with_local_workspace(cx, move |workspace, cx| {
let app_state = workspace.app_state().clone();
cx.spawn(|workspace, mut cx| async move {
async fn fetch_log_string(app_state: &Arc<AppState>, cx: &AsyncAppContext) -> Option<String> {
let path = cx.update(|cx| app_state.client.telemetry().read(cx).log_file_path()).ok()??;
async fn fetch_log_string(app_state: &Arc<AppState>) -> Option<String> {
let path = app_state.client.telemetry().log_file_path()?;
app_state.fs.load(&path).await.log_err()
}
let log = fetch_log_string(&app_state, &cx).await.unwrap_or_else(|| "// No data has been collected yet".to_string());
let log = fetch_log_string(&app_state).await.unwrap_or_else(|| "// No data has been collected yet".to_string());
const MAX_TELEMETRY_LOG_LEN: usize = 5 * 1024 * 1024;
let mut start_offset = log.len().saturating_sub(MAX_TELEMETRY_LOG_LEN);