|
1 | 1 | use crate::prelude::*;
|
2 | 2 | use std::path::{Path, PathBuf};
|
| 3 | +use std::sync::Mutex; |
3 | 4 | use tempfile::TempDir;
|
4 | 5 |
|
5 |
| -fn setup_test_project(project_name: &str) -> (TempDir, PathBuf) { |
6 |
| - let project_path = Path::new("testdata/projects").join(project_name); |
7 |
| - let temp_dir = TempDir::new().unwrap(); |
8 |
| - let test_dir = temp_dir.path(); |
| 6 | +fn setup_test_project(project_name: &str) -> anyhow::Result<TempDir> { |
| 7 | + let project_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) |
| 8 | + .join("testdata/projects") |
| 9 | + .join(project_name); |
| 10 | + println!("Project path: {}", project_path.display()); |
9 | 11 |
|
10 |
| - crate::utils::copy_dir_all(&project_path, test_dir.join(project_name)).unwrap(); |
11 |
| - let project_test_path = test_dir.join(project_name); |
| 12 | + let temp_dir = TempDir::new()?; |
| 13 | + crate::utils::copy_dir_all(&project_path, &temp_dir)?; |
12 | 14 |
|
13 |
| - std::env::set_current_dir(&project_test_path).unwrap(); |
| 15 | + Ok(temp_dir) |
| 16 | +} |
| 17 | + |
| 18 | +fn assert_benchmarks_created(binary_dir: &Path, n: usize) { |
| 19 | + assert!(binary_dir.exists()); |
14 | 20 |
|
15 |
| - (temp_dir, project_test_path) |
| 21 | + let entries: Vec<_> = std::fs::read_dir(binary_dir) |
| 22 | + .unwrap() |
| 23 | + .collect::<Result<Vec<_>, _>>() |
| 24 | + .unwrap(); |
| 25 | + assert_eq!(entries.len(), n); |
16 | 26 | }
|
17 | 27 |
|
18 |
| -/// Integration tests with real Go projects to ensure end-to-end functionality |
19 |
| -#[cfg(test)] |
20 |
| -mod tests { |
21 |
| - use super::*; |
22 |
| - |
23 |
| - fn assert_benchmarks_created(n: usize) { |
24 |
| - let codspeed_dir = std::env::current_dir() |
25 |
| - .unwrap() |
26 |
| - .join(".codspeed") |
27 |
| - .join("walltime"); |
28 |
| - assert!(codspeed_dir.exists()); |
29 |
| - |
30 |
| - let entries: Vec<_> = std::fs::read_dir(&codspeed_dir) |
31 |
| - .unwrap() |
32 |
| - .collect::<Result<Vec<_>, _>>() |
33 |
| - .unwrap(); |
34 |
| - assert_eq!(entries.len(), n); |
35 |
| - } |
| 28 | +fn run_benchmark_for_project(project_name: &str) { |
| 29 | + let temp_dir = setup_test_project(project_name).unwrap(); |
36 | 30 |
|
37 |
| - #[test] |
38 |
| - fn test_caddy_benchmarks() { |
39 |
| - let (_temp_dir, caddy_test_path) = setup_test_project("caddy"); |
| 31 | + let binary_dir = temp_dir.path().join(".codspeed").join("walltime"); |
| 32 | + let binaries = crate::build_benchmarks(temp_dir.path(), &binary_dir).unwrap(); |
| 33 | + assert!(!binaries.is_empty(), "No benchmark binaries were created"); |
| 34 | + assert_benchmarks_created(&binary_dir, binaries.len()); |
40 | 35 |
|
41 |
| - let binaries = crate::build_benchmarks(&caddy_test_path).unwrap(); |
42 |
| - assert!(!binaries.is_empty(), "No benchmark binaries were created"); |
43 |
| - assert_benchmarks_created(binaries.len()); |
| 36 | + // Mutex to prevent concurrent tests from interfering with CODSPEED_PROFILE_FOLDER env var |
| 37 | + static ENV_MUTEX: Mutex<()> = Mutex::new(()); |
| 38 | + let _guard = ENV_MUTEX.lock().unwrap(); |
44 | 39 |
|
45 |
| - // FIXME: Find a better solution for this |
46 |
| - unsafe { |
47 |
| - std::env::set_var("CODSPEED_PROFILE_FOLDER", caddy_test_path.join("profile")); |
48 |
| - } |
49 |
| - crate::run_benchmarks(".").unwrap(); |
| 40 | + unsafe { std::env::set_var("CODSPEED_PROFILE_FOLDER", temp_dir.path().join("profile")) }; |
| 41 | + if let Err(error) = crate::run_benchmarks(".", &binary_dir) { |
| 42 | + panic!("Benchmarks couldn't run: {error}"); |
50 | 43 | }
|
| 44 | + |
| 45 | + // TODO: Assert that we have a results.json? |
| 46 | +} |
| 47 | + |
| 48 | +#[ignore = "doesn't work atm"] |
| 49 | +#[test] |
| 50 | +fn test_caddy_benchmarks() { |
| 51 | + run_benchmark_for_project("caddy"); |
| 52 | +} |
| 53 | + |
| 54 | +#[test] |
| 55 | +fn test_fzf_benchmarks() { |
| 56 | + run_benchmark_for_project("fzf"); |
| 57 | +} |
| 58 | + |
| 59 | +#[test] |
| 60 | +fn test_opentelemetry_go_benchmarks() { |
| 61 | + run_benchmark_for_project("opentelemetry-go"); |
| 62 | +} |
| 63 | + |
| 64 | +#[test] |
| 65 | +fn test_golang_benchmarks() { |
| 66 | + run_benchmark_for_project("golang-benchmarks"); |
51 | 67 | }
|
0 commit comments