From 010d43b17fcf41df256d954a4c5c3772d0c200fc Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 22 Nov 2023 23:16:28 -0500 Subject: [PATCH] Add app events (#3372) Adds app events (`first open` and `open`). For the time being, I'm abandonding trying to add `close`, after running into many issues trying. The code is in place for me to continue on that work, but at the moment, we require having the telemetry settings in hand when calling any of the methods that log an event, so we can honor the user's preference for sending telemetry or not, but when running the `on_app_close` method, to send off an app `close` event, the settings are no longer available (probably the order of teardown?), which causes some tests to end up failing. I'm not sure how to solve this. Maybe we keep the settings on the telemetry struct and update it each time any event is logged, then, on app shutdown, when logging the app `close` event, we can use the stored version (idk). Release Notes: - N/A --- crates/call2/src/call2.rs | 4 +- crates/client/src/telemetry.rs | 38 +++++++++++------ crates/client2/src/client2.rs | 2 +- crates/client2/src/telemetry.rs | 54 +++++++++++++++++++------ crates/collab2/src/tests/test_server.rs | 2 +- crates/gpui2/src/app.rs | 17 ++++++++ crates/project2/src/worktree_tests.rs | 6 +-- crates/zed/src/main.rs | 17 ++++++-- crates/zed2/src/main.rs | 20 +++++++-- 9 files changed, 120 insertions(+), 40 deletions(-) diff --git a/crates/call2/src/call2.rs b/crates/call2/src/call2.rs index 1f11e0650ddf3808adce0997afbce58cdc389819..14cb28c32d6b932c8db9f2fa54b5c0125bbf8011 100644 --- a/crates/call2/src/call2.rs +++ b/crates/call2/src/call2.rs @@ -464,7 +464,7 @@ impl ActiveCall { &self.pending_invites } - pub fn report_call_event(&self, operation: &'static str, cx: &AppContext) { + pub fn report_call_event(&self, operation: &'static str, cx: &mut AppContext) { if let Some(room) = self.room() { let room = room.read(cx); report_call_event_for_room(operation, room.id(), room.channel_id(), &self.client, cx); @@ -477,7 +477,7 @@ pub fn report_call_event_for_room( room_id: u64, channel_id: Option, client: &Arc, - cx: &AppContext, + cx: &mut AppContext, ) { let telemetry = client.telemetry(); let telemetry_settings = *TelemetrySettings::get_global(cx); diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 8f7fbeb83d3e974aadcbc64dbee0f3d5482242cd..a3e7449cf8a4be5a25b4f7eaaa4f3d2379d70d68 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -109,6 +109,10 @@ pub enum ClickhouseEvent { virtual_memory_in_bytes: u64, milliseconds_since_first_event: i64, }, + App { + operation: &'static str, + milliseconds_since_first_event: i64, + }, } #[cfg(debug_assertions)] @@ -168,13 +172,8 @@ impl Telemetry { let mut state = self.state.lock(); state.installation_id = installation_id.map(|id| id.into()); state.session_id = Some(session_id.into()); - let has_clickhouse_events = !state.clickhouse_events_queue.is_empty(); drop(state); - if has_clickhouse_events { - self.flush_clickhouse_events(); - } - let this = self.clone(); cx.spawn(|mut cx| async move { // Avoiding calling `System::new_all()`, as there have been crashes related to it @@ -256,7 +255,7 @@ impl Telemetry { milliseconds_since_first_event: self.milliseconds_since_first_event(), }; - self.report_clickhouse_event(event, telemetry_settings) + self.report_clickhouse_event(event, telemetry_settings, false) } pub fn report_copilot_event( @@ -273,7 +272,7 @@ impl Telemetry { milliseconds_since_first_event: self.milliseconds_since_first_event(), }; - self.report_clickhouse_event(event, telemetry_settings) + self.report_clickhouse_event(event, telemetry_settings, false) } pub fn report_assistant_event( @@ -290,7 +289,7 @@ impl Telemetry { milliseconds_since_first_event: self.milliseconds_since_first_event(), }; - self.report_clickhouse_event(event, telemetry_settings) + self.report_clickhouse_event(event, telemetry_settings, false) } pub fn report_call_event( @@ -307,7 +306,7 @@ impl Telemetry { milliseconds_since_first_event: self.milliseconds_since_first_event(), }; - self.report_clickhouse_event(event, telemetry_settings) + self.report_clickhouse_event(event, telemetry_settings, false) } pub fn report_cpu_event( @@ -322,7 +321,7 @@ impl Telemetry { milliseconds_since_first_event: self.milliseconds_since_first_event(), }; - self.report_clickhouse_event(event, telemetry_settings) + self.report_clickhouse_event(event, telemetry_settings, false) } pub fn report_memory_event( @@ -337,7 +336,21 @@ impl Telemetry { milliseconds_since_first_event: self.milliseconds_since_first_event(), }; - self.report_clickhouse_event(event, telemetry_settings) + self.report_clickhouse_event(event, telemetry_settings, false) + } + + // app_events are called at app open and app close, so flush is set to immediately send + pub fn report_app_event( + self: &Arc, + telemetry_settings: TelemetrySettings, + operation: &'static str, + ) { + let event = ClickhouseEvent::App { + operation, + milliseconds_since_first_event: self.milliseconds_since_first_event(), + }; + + self.report_clickhouse_event(event, telemetry_settings, true) } fn milliseconds_since_first_event(&self) -> i64 { @@ -358,6 +371,7 @@ impl Telemetry { self: &Arc, event: ClickhouseEvent, telemetry_settings: TelemetrySettings, + immediate_flush: bool, ) { if !telemetry_settings.metrics { return; @@ -370,7 +384,7 @@ impl Telemetry { .push(ClickhouseEventWrapper { signed_in, event }); if state.installation_id.is_some() { - if state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN { + if immediate_flush || state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN { drop(state); self.flush_clickhouse_events(); } else { diff --git a/crates/client2/src/client2.rs b/crates/client2/src/client2.rs index b4279b023ecd7412d8ea0c4a69ddc0215be97fb2..4ad354f2f91bd56cdb0c1f657137d1beac9a3e4f 100644 --- a/crates/client2/src/client2.rs +++ b/crates/client2/src/client2.rs @@ -382,7 +382,7 @@ impl settings::Settings for TelemetrySettings { } impl Client { - pub fn new(http: Arc, cx: &AppContext) -> Arc { + pub fn new(http: Arc, cx: &mut AppContext) -> Arc { Arc::new(Self { id: AtomicU64::new(0), peer: Peer::new(0), diff --git a/crates/client2/src/telemetry.rs b/crates/client2/src/telemetry.rs index 9c88d1102c255d6892212e6e28d5346e5b55b3fd..37651ebcfbd5d22e8e7c372e894e820f88d7004c 100644 --- a/crates/client2/src/telemetry.rs +++ b/crates/client2/src/telemetry.rs @@ -107,6 +107,10 @@ pub enum ClickhouseEvent { virtual_memory_in_bytes: u64, milliseconds_since_first_event: i64, }, + App { + operation: &'static str, + milliseconds_since_first_event: i64, + }, } #[cfg(debug_assertions)] @@ -122,12 +126,13 @@ const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(1); const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(30); impl Telemetry { - pub fn new(client: Arc, cx: &AppContext) -> Arc { + pub fn new(client: Arc, cx: &mut AppContext) -> Arc { let release_channel = if cx.has_global::() { Some(cx.global::().display_name()) } else { None }; + // TODO: Replace all hardware stuff with nested SystemSpecs json let this = Arc::new(Self { http_client: client, @@ -147,9 +152,22 @@ impl Telemetry { }), }); + // We should only ever have one instance of Telemetry, leak the subscription to keep it alive + // rather than store in TelemetryState, complicating spawn as subscriptions are not Send + // std::mem::forget(cx.on_app_quit({ + // let this = this.clone(); + // move |cx| this.shutdown_telemetry(cx) + // })); + this } + // fn shutdown_telemetry(self: &Arc, cx: &mut AppContext) -> impl Future { + // let telemetry_settings = TelemetrySettings::get_global(cx).clone(); + // self.report_app_event(telemetry_settings, "close"); + // Task::ready(()) + // } + pub fn log_file_path(&self) -> Option { Some(self.state.lock().log_file.as_ref()?.path().to_path_buf()) } @@ -163,13 +181,8 @@ impl Telemetry { let mut state = self.state.lock(); state.installation_id = installation_id.map(|id| id.into()); state.session_id = Some(session_id.into()); - let has_clickhouse_events = !state.clickhouse_events_queue.is_empty(); drop(state); - if has_clickhouse_events { - self.flush_clickhouse_events(); - } - let this = self.clone(); cx.spawn(|cx| async move { // Avoiding calling `System::new_all()`, as there have been crashes related to it @@ -257,7 +270,7 @@ impl Telemetry { milliseconds_since_first_event: self.milliseconds_since_first_event(), }; - self.report_clickhouse_event(event, telemetry_settings) + self.report_clickhouse_event(event, telemetry_settings, false) } pub fn report_copilot_event( @@ -274,7 +287,7 @@ impl Telemetry { milliseconds_since_first_event: self.milliseconds_since_first_event(), }; - self.report_clickhouse_event(event, telemetry_settings) + self.report_clickhouse_event(event, telemetry_settings, false) } pub fn report_assistant_event( @@ -291,7 +304,7 @@ impl Telemetry { milliseconds_since_first_event: self.milliseconds_since_first_event(), }; - self.report_clickhouse_event(event, telemetry_settings) + self.report_clickhouse_event(event, telemetry_settings, false) } pub fn report_call_event( @@ -308,7 +321,7 @@ impl Telemetry { milliseconds_since_first_event: self.milliseconds_since_first_event(), }; - self.report_clickhouse_event(event, telemetry_settings) + self.report_clickhouse_event(event, telemetry_settings, false) } pub fn report_cpu_event( @@ -323,7 +336,7 @@ impl Telemetry { milliseconds_since_first_event: self.milliseconds_since_first_event(), }; - self.report_clickhouse_event(event, telemetry_settings) + self.report_clickhouse_event(event, telemetry_settings, false) } pub fn report_memory_event( @@ -338,7 +351,21 @@ impl Telemetry { milliseconds_since_first_event: self.milliseconds_since_first_event(), }; - self.report_clickhouse_event(event, telemetry_settings) + self.report_clickhouse_event(event, telemetry_settings, false) + } + + // app_events are called at app open and app close, so flush is set to immediately send + pub fn report_app_event( + self: &Arc, + telemetry_settings: TelemetrySettings, + operation: &'static str, + ) { + let event = ClickhouseEvent::App { + operation, + milliseconds_since_first_event: self.milliseconds_since_first_event(), + }; + + self.report_clickhouse_event(event, telemetry_settings, true) } fn milliseconds_since_first_event(&self) -> i64 { @@ -359,6 +386,7 @@ impl Telemetry { self: &Arc, event: ClickhouseEvent, telemetry_settings: TelemetrySettings, + immediate_flush: bool, ) { if !telemetry_settings.metrics { return; @@ -371,7 +399,7 @@ impl Telemetry { .push(ClickhouseEventWrapper { signed_in, event }); if state.installation_id.is_some() { - if state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN { + if immediate_flush || state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN { drop(state); self.flush_clickhouse_events(); } else { diff --git a/crates/collab2/src/tests/test_server.rs b/crates/collab2/src/tests/test_server.rs index 090a32d4caef33386da38f0b5cd60f83a6db5afc..6bb57e11ab1d582031930f34b8bfe67b96a2581e 100644 --- a/crates/collab2/src/tests/test_server.rs +++ b/crates/collab2/src/tests/test_server.rs @@ -149,7 +149,7 @@ impl TestServer { .user_id }; let client_name = name.to_string(); - let mut client = cx.read(|cx| Client::new(http.clone(), cx)); + let mut client = cx.update(|cx| Client::new(http.clone(), cx)); let server = self.server.clone(); let db = self.app_state.db.clone(); let connection_killers = self.connection_killers.clone(); diff --git a/crates/gpui2/src/app.rs b/crates/gpui2/src/app.rs index f64512970656909cf3f1a003a9b04acd3cc58eda..617c0b5600742a07029863468b785376c1d53224 100644 --- a/crates/gpui2/src/app.rs +++ b/crates/gpui2/src/app.rs @@ -10,6 +10,7 @@ pub use entity_map::*; pub use model_context::*; use refineable::Refineable; use smallvec::SmallVec; +use smol::future::FutureExt; #[cfg(any(test, feature = "test-support"))] pub use test_context::*; @@ -983,6 +984,22 @@ impl AppContext { pub fn all_action_names(&self) -> &[SharedString] { self.actions.all_action_names() } + + pub fn on_app_quit( + &mut self, + mut on_quit: impl FnMut(&mut AppContext) -> Fut + 'static, + ) -> Subscription + where + Fut: 'static + Future, + { + self.quit_observers.insert( + (), + Box::new(move |cx| { + let future = on_quit(cx); + async move { future.await }.boxed_local() + }), + ) + } } impl Context for AppContext { diff --git a/crates/project2/src/worktree_tests.rs b/crates/project2/src/worktree_tests.rs index df7307f694cbead126690e6fa270023ff4847926..a77f5396e1503b662c5c1c8a6a0d223ba80fc240 100644 --- a/crates/project2/src/worktree_tests.rs +++ b/crates/project2/src/worktree_tests.rs @@ -1056,7 +1056,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) { init_test(cx); cx.executor().allow_parking(); - let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); + let client_fake = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); let fs_fake = FakeFs::new(cx.background_executor.clone()); fs_fake @@ -1096,7 +1096,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) { assert!(tree.entry_for_path("a/b/").unwrap().is_dir()); }); - let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); + let client_real = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); let fs_real = Arc::new(RealFs); let temp_root = temp_tree(json!({ @@ -2181,7 +2181,7 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) { fn build_client(cx: &mut TestAppContext) -> Arc { let http_client = FakeHttpClient::with_404_response(); - cx.read(|cx| Client::new(http_client, cx)) + cx.update(|cx| Client::new(http_client, cx)) } #[track_caller] diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 5f2a7c525e0aea471583af9e361ad2063397d4fc..20b93ae6bb4f3ddf0368748fd949ddb5c6b83e63 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -65,7 +65,8 @@ fn main() { log::info!("========== starting zed =========="); let mut app = gpui::App::new(Assets).unwrap(); - let installation_id = app.background().block(installation_id()).ok(); + let (installation_id, existing_installation_id_found) = + app.background().block(installation_id()).ok().unzip(); let session_id = Uuid::new_v4().to_string(); init_panic_hook(&app, installation_id.clone(), session_id.clone()); @@ -166,6 +167,14 @@ fn main() { .detach(); client.telemetry().start(installation_id, session_id, cx); + let telemetry_settings = *settings::get::(cx); + let event_operation = match existing_installation_id_found { + Some(false) => "first open", + _ => "open", + }; + client + .telemetry() + .report_app_event(telemetry_settings, event_operation); let app_state = Arc::new(AppState { languages, @@ -317,11 +326,11 @@ async fn authenticate(client: Arc, cx: &AsyncAppContext) -> Result<()> { Ok::<_, anyhow::Error>(()) } -async fn installation_id() -> Result { +async fn installation_id() -> Result<(String, bool)> { let legacy_key_name = "device_id"; if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(legacy_key_name) { - Ok(installation_id) + Ok((installation_id, true)) } else { let installation_id = Uuid::new_v4().to_string(); @@ -329,7 +338,7 @@ async fn installation_id() -> Result { .write_kvp(legacy_key_name.to_string(), installation_id.clone()) .await?; - Ok(installation_id) + Ok((installation_id, false)) } } diff --git a/crates/zed2/src/main.rs b/crates/zed2/src/main.rs index c6737628a9d52f548738165ca68e39be137823b8..46be582129685b125b3a17806f6a7c265b620e08 100644 --- a/crates/zed2/src/main.rs +++ b/crates/zed2/src/main.rs @@ -71,7 +71,11 @@ fn main() { log::info!("========== starting zed =========="); let app = App::production(Arc::new(Assets)); - let installation_id = app.background_executor().block(installation_id()).ok(); + let (installation_id, existing_installation_id_found) = app + .background_executor() + .block(installation_id()) + .ok() + .unzip(); let session_id = Uuid::new_v4().to_string(); init_panic_hook(&app, installation_id.clone(), session_id.clone()); @@ -173,6 +177,14 @@ fn main() { // .detach(); client.telemetry().start(installation_id, session_id, cx); + let telemetry_settings = *client::TelemetrySettings::get_global(cx); + let event_operation = match existing_installation_id_found { + Some(false) => "first open", + _ => "open", + }; + client + .telemetry() + .report_app_event(telemetry_settings, event_operation); let app_state = Arc::new(AppState { languages, @@ -333,11 +345,11 @@ fn main() { // Ok::<_, anyhow::Error>(()) // } -async fn installation_id() -> Result { +async fn installation_id() -> Result<(String, bool)> { let legacy_key_name = "device_id"; if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(legacy_key_name) { - Ok(installation_id) + Ok((installation_id, true)) } else { let installation_id = Uuid::new_v4().to_string(); @@ -345,7 +357,7 @@ async fn installation_id() -> Result { .write_kvp(legacy_key_name.to_string(), installation_id.clone()) .await?; - Ok(installation_id) + Ok((installation_id, false)) } }