Skip to content

Commit ba72b23

Browse files
committed
release: v0.1.13
Ship MCP wire-format fixes, guard-mode enforcement, and coordinator/vendor normalization improvements.
1 parent d945b42 commit ba72b23

File tree

11 files changed

+1186
-100
lines changed

11 files changed

+1186
-100
lines changed

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ members = [
66

77
[package]
88
name = "gpukill"
9-
version = "0.1.12"
9+
version = "0.1.13"
1010
edition = "2021"
1111
authors = ["Kage <info@treadie.com>"]
1212
description = "A CLI tool for GPU management and monitoring supporting NVIDIA, AMD, Intel, and Apple Silicon GPUs"

mcp/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "gpukill-mcp"
3-
version = "0.1.3"
3+
version = "0.1.4"
44
edition = "2021"
55
authors = ["GPU Kill Team"]
66
description = "MCP server for GPU Kill - AI-accessible GPU management"

mcp/src/resources.rs

Lines changed: 29 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,21 @@ impl ResourceHandler {
8787

8888
async fn get_gpu_list(&self) -> anyhow::Result<ResourceContents> {
8989
let gpus = self.gpu_manager.get_all_snapshots()?;
90+
let processes = self.gpu_manager.get_all_processes()?;
91+
let mut processes_by_gpu: HashMap<u16, Vec<GpuProcess>> = HashMap::new();
92+
93+
for proc in processes {
94+
processes_by_gpu
95+
.entry(proc.gpu_index)
96+
.or_default()
97+
.push(GpuProcess {
98+
pid: proc.pid,
99+
name: proc.proc_name,
100+
memory_usage: proc.used_mem_mb as f64,
101+
user: Some(proc.user),
102+
});
103+
}
104+
90105
let gpu_info: Vec<GpuInfo> = gpus
91106
.into_iter()
92107
.map(|gpu| GpuInfo {
@@ -98,16 +113,10 @@ impl ResourceHandler {
98113
utilization: gpu.util_pct as f64,
99114
temperature: Some(gpu.temp_c as f64),
100115
power_usage: Some(gpu.power_w as f64),
101-
processes: gpu
102-
.top_proc
103-
.map(|proc| GpuProcess {
104-
pid: proc.pid,
105-
name: proc.proc_name,
106-
memory_usage: proc.used_mem_mb as f64,
107-
user: Some(proc.user),
108-
})
109-
.into_iter()
110-
.collect(),
116+
processes: processes_by_gpu
117+
.get(&gpu.gpu_index)
118+
.cloned()
119+
.unwrap_or_default(),
111120
})
112121
.collect();
113122

@@ -122,19 +131,16 @@ impl ResourceHandler {
122131
}
123132

124133
async fn get_gpu_processes(&self) -> anyhow::Result<ResourceContents> {
125-
let gpus = self.gpu_manager.get_all_snapshots()?;
126-
let mut all_processes = Vec::new();
127-
128-
for gpu in gpus {
129-
if let Some(proc) = gpu.top_proc {
130-
all_processes.push(GpuProcess {
131-
pid: proc.pid,
132-
name: proc.proc_name,
133-
memory_usage: proc.used_mem_mb as f64,
134-
user: Some(proc.user),
135-
});
136-
}
137-
}
134+
let processes = self.gpu_manager.get_all_processes()?;
135+
let all_processes: Vec<GpuProcess> = processes
136+
.into_iter()
137+
.map(|proc| GpuProcess {
138+
pid: proc.pid,
139+
name: proc.proc_name,
140+
memory_usage: proc.used_mem_mb as f64,
141+
user: Some(proc.user),
142+
})
143+
.collect();
138144

139145
let json_text = serde_json::to_string_pretty(&all_processes)?;
140146

mcp/src/server.rs

Lines changed: 15 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ use crate::tools::ToolHandler;
55
use crate::types::*;
66
use crate::MCP_VERSION;
77
use anyhow::Result;
8+
use axum::response::IntoResponse;
89
use serde_json::json;
910
use std::sync::Arc;
1011
use tokio::sync::RwLock;
@@ -29,8 +30,9 @@ impl GpuKillMCPServer {
2930
}
3031

3132
/// Handle an MCP request
32-
pub async fn handle_request(&self, request: JsonRpcRequest) -> Result<JsonRpcResponse> {
33+
pub async fn handle_request(&self, request: JsonRpcRequest) -> Result<Option<JsonRpcResponse>> {
3334
debug!("Handling MCP request: {}", request.method);
35+
let request_id = request.id.clone();
3436

3537
let result = match request.method.as_str() {
3638
"initialize" => self.handle_initialize(request.params).await,
@@ -42,24 +44,24 @@ impl GpuKillMCPServer {
4244
};
4345

4446
match result {
45-
Ok(data) => Ok(JsonRpcResponse {
47+
Ok(data) => Ok(request_id.map(|id| JsonRpcResponse {
4648
jsonrpc: "2.0".to_string(),
47-
id: request.id,
49+
id,
4850
result: Some(data),
4951
error: None,
50-
}),
52+
})),
5153
Err(e) => {
5254
error!("Error handling request {}: {}", request.method, e);
53-
Ok(JsonRpcResponse {
55+
Ok(request_id.map(|id| JsonRpcResponse {
5456
jsonrpc: "2.0".to_string(),
55-
id: request.id,
57+
id,
5658
result: None,
5759
error: Some(JsonRpcError {
5860
code: -32603,
5961
message: "Internal error".to_string(),
6062
data: Some(json!({ "details": e.to_string() })),
6163
}),
62-
})
64+
}))
6365
}
6466
}
6567
}
@@ -107,7 +109,7 @@ impl GpuKillMCPServer {
107109
.ok_or_else(|| anyhow::anyhow!("Missing uri parameter"))?;
108110

109111
let contents = self.resource_handler.get_resource(uri).await?;
110-
Ok(json!({ "contents": contents }))
112+
Ok(json!({ "contents": [contents] }))
111113
}
112114

113115
async fn handle_tools_list(&self) -> Result<serde_json::Value> {
@@ -153,21 +155,14 @@ impl GpuKillMCPServer {
153155
move |request: axum::extract::Json<JsonRpcRequest>| {
154156
let server = server.clone();
155157
async move {
156-
let request_id = request.0.id.clone();
157158
match server.handle_request(request.0).await {
158-
Ok(response) => axum::response::Json(response),
159+
Ok(Some(response)) => {
160+
axum::response::Json(response).into_response()
161+
}
162+
Ok(None) => axum::http::StatusCode::NO_CONTENT.into_response(),
159163
Err(e) => {
160164
error!("Failed to handle HTTP request: {}", e);
161-
axum::response::Json(JsonRpcResponse {
162-
jsonrpc: "2.0".to_string(),
163-
id: request_id,
164-
result: None,
165-
error: Some(JsonRpcError {
166-
code: -32603,
167-
message: "Internal error".to_string(),
168-
data: Some(json!({ "details": e.to_string() })),
169-
}),
170-
})
165+
axum::http::StatusCode::INTERNAL_SERVER_ERROR.into_response()
171166
}
172167
}
173168
}

0 commit comments

Comments
 (0)