Add tests for project discovery telemetry (#32782)
Release Notes: - N/A
This commit is contained in:
parent
ef61ebe049
commit
1660438a2a
3 changed files with 153 additions and 30 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -2822,6 +2822,7 @@ dependencies = [
|
||||||
"collections",
|
"collections",
|
||||||
"credentials_provider",
|
"credentials_provider",
|
||||||
"feature_flags",
|
"feature_flags",
|
||||||
|
"fs",
|
||||||
"futures 0.3.31",
|
"futures 0.3.31",
|
||||||
"gpui",
|
"gpui",
|
||||||
"gpui_tokio",
|
"gpui_tokio",
|
||||||
|
|
|
@ -64,11 +64,12 @@ workspace-hack.workspace = true
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
clock = { workspace = true, features = ["test-support"] }
|
clock = { workspace = true, features = ["test-support"] }
|
||||||
collections = { workspace = true, features = ["test-support"] }
|
collections = { workspace = true, features = ["test-support"] }
|
||||||
|
fs.workspace = true
|
||||||
gpui = { workspace = true, features = ["test-support"] }
|
gpui = { workspace = true, features = ["test-support"] }
|
||||||
|
http_client = { workspace = true, features = ["test-support"] }
|
||||||
rpc = { workspace = true, features = ["test-support"] }
|
rpc = { workspace = true, features = ["test-support"] }
|
||||||
settings = { workspace = true, features = ["test-support"] }
|
settings = { workspace = true, features = ["test-support"] }
|
||||||
util = { workspace = true, features = ["test-support"] }
|
util = { workspace = true, features = ["test-support"] }
|
||||||
http_client = { workspace = true, features = ["test-support"] }
|
|
||||||
|
|
||||||
[target.'cfg(target_os = "windows")'.dependencies]
|
[target.'cfg(target_os = "windows")'.dependencies]
|
||||||
windows.workspace = true
|
windows.workspace = true
|
||||||
|
|
|
@ -384,41 +384,47 @@ impl Telemetry {
|
||||||
worktree_id: WorktreeId,
|
worktree_id: WorktreeId,
|
||||||
updated_entries_set: &UpdatedEntriesSet,
|
updated_entries_set: &UpdatedEntriesSet,
|
||||||
) {
|
) {
|
||||||
let project_type_names: Vec<String> = {
|
let project_type_names = self.detect_project_types(worktree_id, updated_entries_set);
|
||||||
let mut state = self.state.lock();
|
|
||||||
state
|
|
||||||
.project_marker_patterns
|
|
||||||
.0
|
|
||||||
.iter_mut()
|
|
||||||
.filter_map(|(pattern, project_cache)| {
|
|
||||||
if project_cache.worktree_ids_reported.contains(&worktree_id) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let project_file_found = updated_entries_set.iter().any(|(path, _, _)| {
|
|
||||||
path.as_ref()
|
|
||||||
.file_name()
|
|
||||||
.and_then(|name| name.to_str())
|
|
||||||
.map(|name_str| pattern.is_match(name_str))
|
|
||||||
.unwrap_or(false)
|
|
||||||
});
|
|
||||||
|
|
||||||
if !project_file_found {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
project_cache.worktree_ids_reported.insert(worktree_id);
|
|
||||||
|
|
||||||
Some(project_cache.name.clone())
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
};
|
|
||||||
|
|
||||||
for project_type_name in project_type_names {
|
for project_type_name in project_type_names {
|
||||||
telemetry::event!("Project Opened", project_type = project_type_name);
|
telemetry::event!("Project Opened", project_type = project_type_name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn detect_project_types(
|
||||||
|
self: &Arc<Self>,
|
||||||
|
worktree_id: WorktreeId,
|
||||||
|
updated_entries_set: &UpdatedEntriesSet,
|
||||||
|
) -> Vec<String> {
|
||||||
|
let mut state = self.state.lock();
|
||||||
|
state
|
||||||
|
.project_marker_patterns
|
||||||
|
.0
|
||||||
|
.iter_mut()
|
||||||
|
.filter_map(|(pattern, project_cache)| {
|
||||||
|
if project_cache.worktree_ids_reported.contains(&worktree_id) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let project_file_found = updated_entries_set.iter().any(|(path, _, _)| {
|
||||||
|
path.as_ref()
|
||||||
|
.file_name()
|
||||||
|
.and_then(|name| name.to_str())
|
||||||
|
.map(|name_str| pattern.is_match(name_str))
|
||||||
|
.unwrap_or(false)
|
||||||
|
});
|
||||||
|
|
||||||
|
if !project_file_found {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
project_cache.worktree_ids_reported.insert(worktree_id);
|
||||||
|
|
||||||
|
Some(project_cache.name.clone())
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
fn report_event(self: &Arc<Self>, event: Event) {
|
fn report_event(self: &Arc<Self>, event: Event) {
|
||||||
let mut state = self.state.lock();
|
let mut state = self.state.lock();
|
||||||
// RUST_LOG=telemetry=trace to debug telemetry events
|
// RUST_LOG=telemetry=trace to debug telemetry events
|
||||||
|
@ -583,6 +589,7 @@ mod tests {
|
||||||
use http_client::FakeHttpClient;
|
use http_client::FakeHttpClient;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use telemetry_events::FlexibleEvent;
|
use telemetry_events::FlexibleEvent;
|
||||||
|
use worktree::{PathChange, ProjectEntryId, WorktreeId};
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
fn test_telemetry_flush_on_max_queue_size(cx: &mut TestAppContext) {
|
fn test_telemetry_flush_on_max_queue_size(cx: &mut TestAppContext) {
|
||||||
|
@ -700,6 +707,88 @@ mod tests {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
fn test_project_discovery_does_not_double_report(cx: &mut gpui::TestAppContext) {
|
||||||
|
init_test(cx);
|
||||||
|
|
||||||
|
let clock = Arc::new(FakeSystemClock::new());
|
||||||
|
let http = FakeHttpClient::with_200_response();
|
||||||
|
let telemetry = cx.update(|cx| Telemetry::new(clock.clone(), http, cx));
|
||||||
|
let worktree_id = 1;
|
||||||
|
|
||||||
|
// First scan of worktree 1 returns project types
|
||||||
|
test_project_discovery_helper(
|
||||||
|
telemetry.clone(),
|
||||||
|
vec!["package.json"],
|
||||||
|
vec!["node"],
|
||||||
|
worktree_id,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Rescan of worktree 1 returns nothing as it has already been reported
|
||||||
|
test_project_discovery_helper(telemetry.clone(), vec!["package.json"], vec![], worktree_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
fn test_pnpm_project_discovery(cx: &mut gpui::TestAppContext) {
|
||||||
|
init_test(cx);
|
||||||
|
|
||||||
|
let clock = Arc::new(FakeSystemClock::new());
|
||||||
|
let http = FakeHttpClient::with_200_response();
|
||||||
|
let telemetry = cx.update(|cx| Telemetry::new(clock.clone(), http, cx));
|
||||||
|
|
||||||
|
test_project_discovery_helper(
|
||||||
|
telemetry.clone(),
|
||||||
|
vec!["package.json", "pnpm-lock.yaml"],
|
||||||
|
vec!["node", "pnpm"],
|
||||||
|
1,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
fn test_yarn_project_discovery(cx: &mut gpui::TestAppContext) {
|
||||||
|
init_test(cx);
|
||||||
|
|
||||||
|
let clock = Arc::new(FakeSystemClock::new());
|
||||||
|
let http = FakeHttpClient::with_200_response();
|
||||||
|
let telemetry = cx.update(|cx| Telemetry::new(clock.clone(), http, cx));
|
||||||
|
|
||||||
|
test_project_discovery_helper(
|
||||||
|
telemetry.clone(),
|
||||||
|
vec!["package.json", "yarn.lock"],
|
||||||
|
vec!["node", "yarn"],
|
||||||
|
1,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
fn test_dotnet_project_discovery(cx: &mut gpui::TestAppContext) {
|
||||||
|
init_test(cx);
|
||||||
|
|
||||||
|
let clock = Arc::new(FakeSystemClock::new());
|
||||||
|
let http = FakeHttpClient::with_200_response();
|
||||||
|
let telemetry = cx.update(|cx| Telemetry::new(clock.clone(), http, cx));
|
||||||
|
|
||||||
|
// Using different worktrees, as production code blocks from reporting a
|
||||||
|
// project type for the same worktree multiple times
|
||||||
|
|
||||||
|
test_project_discovery_helper(
|
||||||
|
telemetry.clone().clone(),
|
||||||
|
vec!["global.json"],
|
||||||
|
vec!["dotnet"],
|
||||||
|
1,
|
||||||
|
);
|
||||||
|
test_project_discovery_helper(
|
||||||
|
telemetry.clone(),
|
||||||
|
vec!["Directory.Build.props"],
|
||||||
|
vec!["dotnet"],
|
||||||
|
2,
|
||||||
|
);
|
||||||
|
test_project_discovery_helper(telemetry.clone(), vec!["file.csproj"], vec!["dotnet"], 3);
|
||||||
|
test_project_discovery_helper(telemetry.clone(), vec!["file.fsproj"], vec!["dotnet"], 4);
|
||||||
|
test_project_discovery_helper(telemetry.clone(), vec!["file.vbproj"], vec!["dotnet"], 5);
|
||||||
|
test_project_discovery_helper(telemetry, vec!["file.sln"], vec!["dotnet"], 6);
|
||||||
|
}
|
||||||
|
|
||||||
// TODO:
|
// TODO:
|
||||||
// Test settings
|
// Test settings
|
||||||
// Update FakeHTTPClient to keep track of the number of requests and assert on it
|
// Update FakeHTTPClient to keep track of the number of requests and assert on it
|
||||||
|
@ -716,4 +805,36 @@ mod tests {
|
||||||
&& telemetry.state.lock().flush_events_task.is_none()
|
&& telemetry.state.lock().flush_events_task.is_none()
|
||||||
&& telemetry.state.lock().first_event_date_time.is_none()
|
&& telemetry.state.lock().first_event_date_time.is_none()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn test_project_discovery_helper(
|
||||||
|
telemetry: Arc<Telemetry>,
|
||||||
|
file_paths: Vec<&str>,
|
||||||
|
expected_project_types: Vec<&str>,
|
||||||
|
worktree_id_num: usize,
|
||||||
|
) {
|
||||||
|
let worktree_id = WorktreeId::from_usize(worktree_id_num);
|
||||||
|
let entries: Vec<_> = file_paths
|
||||||
|
.into_iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, path)| {
|
||||||
|
(
|
||||||
|
Arc::from(std::path::Path::new(path)),
|
||||||
|
ProjectEntryId::from_proto(i as u64 + 1),
|
||||||
|
PathChange::Added,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
let updated_entries: UpdatedEntriesSet = Arc::from(entries.as_slice());
|
||||||
|
|
||||||
|
let mut detected_types = telemetry.detect_project_types(worktree_id, &updated_entries);
|
||||||
|
detected_types.sort();
|
||||||
|
|
||||||
|
let mut expected_sorted = expected_project_types
|
||||||
|
.into_iter()
|
||||||
|
.map(String::from)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
expected_sorted.sort();
|
||||||
|
|
||||||
|
assert_eq!(detected_types, expected_sorted);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue