diff --git a/lib/LinearSolveAutotune/src/LinearSolveAutotune.jl b/lib/LinearSolveAutotune/src/LinearSolveAutotune.jl index 6c6bdf0d0..e472e7f71 100644 --- a/lib/LinearSolveAutotune/src/LinearSolveAutotune.jl +++ b/lib/LinearSolveAutotune/src/LinearSolveAutotune.jl @@ -66,7 +66,13 @@ function Base.show(io::IO, results::AutotuneResults) # System info summary println(io, "\nšŸ“Š System Information:") - println(io, " • CPU: ", get(results.sysinfo, "cpu_name", "Unknown")) + # Use cpu_model if available, otherwise fall back to cpu_name + cpu_display = get(results.sysinfo, "cpu_model", get(results.sysinfo, "cpu_name", "Unknown")) + println(io, " • CPU: ", cpu_display) + cpu_speed = get(results.sysinfo, "cpu_speed_mhz", 0) + if cpu_speed > 0 + println(io, " • Speed: ", cpu_speed, " MHz") + end println(io, " • OS: ", get(results.sysinfo, "os_name", "Unknown"), " (", get(results.sysinfo, "os", "Unknown"), ")") println(io, " • Julia: ", get(results.sysinfo, "julia_version", "Unknown")) println(io, " • Threads: ", get(results.sysinfo, "num_threads", "Unknown"), " (BLAS: ", get(results.sysinfo, "blas_num_threads", "Unknown"), ")") diff --git a/lib/LinearSolveAutotune/src/gpu_detection.jl b/lib/LinearSolveAutotune/src/gpu_detection.jl index e748b6bd9..6a2ad8aac 100644 --- a/lib/LinearSolveAutotune/src/gpu_detection.jl +++ b/lib/LinearSolveAutotune/src/gpu_detection.jl @@ -2,6 +2,7 @@ using CPUSummary using Pkg +using Statistics: mean """ is_cuda_available() @@ -82,11 +83,32 @@ function get_system_info() info["os_name"] = Sys.iswindows() ? "Windows" : Sys.islinux() ? "Linux" : Sys.isapple() ? "macOS" : "Other" info["arch"] = string(Sys.ARCH) - # Use CPUSummary where available, fallback to Sys otherwise + # Get detailed CPU information from Sys.cpu_info() + cpu_info = Sys.cpu_info() + if !isempty(cpu_info) + first_cpu = cpu_info[1] + info["cpu_model"] = first_cpu.model + info["cpu_speed_mhz"] = first_cpu.speed + + # Count unique CPU models (for heterogeneous systems) + cpu_models = unique([cpu.model for cpu in cpu_info]) + if length(cpu_models) > 1 + info["cpu_models"] = join(cpu_models, ", ") + info["heterogeneous_cpus"] = true + else + info["heterogeneous_cpus"] = false + end + else + info["cpu_model"] = "Unknown" + info["cpu_speed_mhz"] = 0 + end + + # Legacy CPU name for backward compatibility try info["cpu_name"] = string(Sys.CPU_NAME) catch - info["cpu_name"] = "Unknown" + # Fallback to cpu_model if CPU_NAME not available + info["cpu_name"] = get(info, "cpu_model", "Unknown") end # CPUSummary.num_cores() returns the physical cores (as Static.StaticInt) @@ -295,11 +317,45 @@ function get_detailed_system_info() system_data["machine"] = "unknown" end - # CPU details + # CPU details from Sys.cpu_info() + try + cpu_info = Sys.cpu_info() + if !isempty(cpu_info) + first_cpu = cpu_info[1] + system_data["cpu_model"] = first_cpu.model + system_data["cpu_speed_mhz"] = first_cpu.speed + + # Check for heterogeneous CPUs + cpu_models = unique([cpu.model for cpu in cpu_info]) + if length(cpu_models) > 1 + system_data["cpu_models"] = join(cpu_models, ", ") + system_data["heterogeneous_cpus"] = true + else + system_data["heterogeneous_cpus"] = false + end + + # Calculate average CPU speed if speeds vary + cpu_speeds = [cpu.speed for cpu in cpu_info] + if length(unique(cpu_speeds)) > 1 + system_data["cpu_speed_avg_mhz"] = round(mean(cpu_speeds), digits=0) + system_data["cpu_speed_min_mhz"] = minimum(cpu_speeds) + system_data["cpu_speed_max_mhz"] = maximum(cpu_speeds) + end + else + system_data["cpu_model"] = "unknown" + system_data["cpu_speed_mhz"] = 0 + end + catch + system_data["cpu_model"] = "unknown" + system_data["cpu_speed_mhz"] = 0 + end + + # Legacy CPU name for backward compatibility try system_data["cpu_name"] = string(Sys.CPU_NAME) catch - system_data["cpu_name"] = "unknown" + # Fallback to cpu_model if available + system_data["cpu_name"] = get(system_data, "cpu_model", "unknown") end try diff --git a/lib/LinearSolveAutotune/src/telemetry.jl b/lib/LinearSolveAutotune/src/telemetry.jl index e83d3f864..32827ac7f 100644 --- a/lib/LinearSolveAutotune/src/telemetry.jl +++ b/lib/LinearSolveAutotune/src/telemetry.jl @@ -206,7 +206,24 @@ function format_system_info_markdown(system_info::Dict) push!(lines, "- **OS**: $os_display ($os_kernel)") # Handle both "arch" and "architecture" keys push!(lines, "- **Architecture**: $(get(system_info, "architecture", get(system_info, "arch", "unknown")))") - push!(lines, "- **CPU**: $(get(system_info, "cpu_name", "unknown"))") + + # Enhanced CPU information + cpu_model = get(system_info, "cpu_model", nothing) + if cpu_model !== nothing && cpu_model != "unknown" + push!(lines, "- **CPU Model**: $cpu_model") + cpu_speed = get(system_info, "cpu_speed_mhz", 0) + if cpu_speed > 0 + push!(lines, "- **CPU Speed**: $(cpu_speed) MHz") + end + # Show if heterogeneous CPUs detected + if get(system_info, "heterogeneous_cpus", false) + push!(lines, "- **CPU Models**: $(get(system_info, "cpu_models", ""))") + end + else + # Fallback to legacy CPU name + push!(lines, "- **CPU**: $(get(system_info, "cpu_name", "unknown"))") + end + # Handle both "num_cores" and "cpu_cores" keys push!(lines, "- **Cores**: $(get(system_info, "cpu_cores", get(system_info, "num_cores", "unknown")))") # Handle both "num_threads" and "julia_threads" keys @@ -416,20 +433,20 @@ function upload_to_github(content::String, plot_files, auth_info::Tuple, target_repo = "SciML/LinearSolve.jl" issue_number = 669 # The existing issue for collecting autotune results - # Construct comment body - cpu_name = get(system_info, "cpu_name", "unknown") + # Construct comment body - use cpu_model if available for more specific info + cpu_display = get(system_info, "cpu_model", get(system_info, "cpu_name", "unknown")) os_name = get(system_info, "os", "unknown") timestamp = Dates.format(Dates.now(), "yyyy-mm-dd HH:MM") comment_body = """ - ## Benchmark Results: $cpu_name on $os_name ($timestamp) + ## Benchmark Results: $cpu_display on $os_name ($timestamp) $content --- ### System Summary - - **CPU:** $cpu_name + - **CPU:** $cpu_display - **OS:** $os_name - **Timestamp:** $timestamp