diff --git a/OPENSHIFT_AI_GUIDE.md b/OPENSHIFT_AI_GUIDE.md new file mode 100644 index 00000000..5ab1931a --- /dev/null +++ b/OPENSHIFT_AI_GUIDE.md @@ -0,0 +1,311 @@ +# 🚀 OpenShift AI MCP Server - Practical Usage Guide + +## 📥 Installation + +### **Option 1: Main Package (Recommended)** +```bash +npm install -g kubernetes-mcp-server-openshift-ai +``` + +### **Option 2: Platform-Specific** +```bash +# Linux AMD64 +npm install -g kubernetes-mcp-server-openshift-ai-linux-amd64 + +# macOS ARM64 (Apple Silicon) +npm install -g kubernetes-mcp-server-openshift-ai-darwin-arm64 +``` + +### **Option 3: Direct Download** +```bash +curl -sSL https://raw.githubusercontent.com/macayaven/openshift-mcp-server/main/install-openshift-ai.sh | bash +``` + +## 🔧 Configuration + +### **Basic Setup** +```bash +# Start with all toolsets (recommended) +kubernetes-mcp-server --toolsets core,config,helm,openshift-ai + +# Start with specific toolsets +kubernetes-mcp-server --toolsets core,openshift-ai + +# Check available toolsets +kubernetes-mcp-server --help +``` + +### **Kubernetes Configuration** +```bash +# Use specific kubeconfig +kubernetes-mcp-server --kubeconfig ~/.kube/config + +# Use current context +kubernetes-mcp-server --toolsets openshift-ai + +# Read-only mode (safe for production) +kubernetes-mcp-server --read-only --toolsets openshift-ai +``` + +## 🎯 Core Usage Scenarios + +### **Scenario 1: Data Science Project Management** +```bash +# Start server with OpenShift AI tools +kubernetes-mcp-server --toolsets core,config,helm,openshift-ai + +# Now in your AI assistant (Claude, Cursor, etc.), you can: +``` + +**Available Commands:** +- `create_datascience_project` - Create new DS project +- `list_datascience_projects` - List all projects +- `get_datascience_project` - Get project details +- `update_datascience_project` - Modify existing project +- `delete_datascience_project` - Remove project + +**Example Workflow:** +``` +1. "Create a data science project called 'ml-experiments'" +2. "List all data science projects" +3. "Get details of the ml-experiments project" +4. "Add a description to the ml-experiments project" +``` + +### **Scenario 2: Model Management** +```bash +# Start server (same as above) +kubernetes-mcp-server --toolsets core,openshift-ai + +# Available Model Commands: +- `list_models` - List all models in project +- `get_model` - Get model details +- `create_model` - Deploy new model +- `update_model` - Update model configuration +- `delete_model` - Remove model +``` + +**Example Workflow:** +``` +1. "List all models in the ml-experiments project" +2. "Create a new PyTorch model with GPU support" +3. "Update the model to use 2 GPU replicas" +4. "Get current status of the PyTorch model" +``` + +### **Scenario 3: Application Deployment** +```bash +# Start server +kubernetes-mcp-server --toolsets core,openshift-ai + +# Application Commands: +- `deploy_application` - Deploy new application +- `list_applications` - List applications +- `get_application` - Get app details +- `delete_application` - Remove application +``` + +**Example Workflow:** +``` +1. "Deploy a Streamlit application with 3 replicas" +2. "List all applications in the project" +3. "Get details of the Streamlit app" +4. "Scale the application to 5 replicas" +5. "Delete the application when done" +``` + +### **Scenario 4: Experiment Management** +```bash +# Start server +kubernetes-mcp-server --toolsets core,openshift-ai + +# Experiment Commands: +- `run_experiment` - Execute new experiment +- `list_experiments` - List all experiments +- `get_experiment` - Get experiment details +- `delete_experiment` - Remove experiment +``` + +**Example Workflow:** +``` +1. "Run a training experiment with a PyTorch model" +2. "List all experiments in the project" +3. "Get results and logs of the training experiment" +4. "Delete the experiment after analyzing results" +``` + +### **Scenario 5: Pipeline Management** +```bash +# Start server +kubernetes-mcp-server --toolsets core,openshift-ai + +# Pipeline Commands: +- `run_pipeline` - Execute new pipeline +- `list_pipelines` - List all pipelines +- `get_pipeline` - Get pipeline details +- `create_pipeline` - Create new pipeline +- `delete_pipeline` - Remove pipeline +``` + +**Example Workflow:** +``` +1. "Create a new ML pipeline for data preprocessing" +2. "Run the pipeline with the latest dataset" +3. "List all pipelines and their status" +4. "Get the execution logs of the preprocessing pipeline" +5. "Delete the pipeline after completion" +``` + +## 🛠️ Advanced Usage + +### **Multi-Cluster Management** +```bash +# Work with multiple Kubernetes clusters +kubernetes-mcp-server --toolsets core,config,openshift-ai + +# Switch between clusters using context tools +``` + +### **Helm Integration** +```bash +# Include Helm tools +kubernetes-mcp-server --toolsets core,helm,openshift-ai + +# Helm Commands Available: +- `list_helm_releases` +- `get_helm_release` +- `install_helm_chart` +- `upgrade_helm_release` +- `uninstall_helm_release` +``` + +### **Production Safety** +```bash +# Read-only mode (no destructive operations) +kubernetes-mcp-server --read-only --toolsets openshift-ai + +# Disable destructive tools +kubernetes-mcp-server --disable-destructive --toolsets core,openshift-ai +``` + +## 🔍 Integration with AI Assistants + +### **Claude Desktop Integration** +1. Install Claude Desktop +2. Go to Settings → MCP Servers +3. Add new server: + - **Name**: OpenShift AI Server + - **Command**: `kubernetes-mcp-server --toolsets core,config,helm,openshift-ai` +4. Enable the server +5. Start chatting with Claude about your OpenShift resources + +### **Cursor Integration** +1. Open Cursor +2. Go to Settings → AI Tools +3. Add MCP Server: + - **Name**: OpenShift AI + - **Path**: `/usr/local/bin/kubernetes-mcp-server` + - **Args**: `--toolsets core,config,helm,openshift-ai` +4. Restart Cursor + +### **GitHub Copilot Integration** +1. Install GitHub Copilot extension +2. Configure MCP server in settings +3. Use same command as above + +## 💡 Real-World Examples + +### **Example 1: ML Project Setup** +``` +You: "I want to create a new ML project for fraud detection" + +AI: "I'll help you create a data science project for fraud detection. +[Creates project with appropriate resources] + +Your fraud detection project is now ready! Here's what I created: +- Project: fraud-detection-ml +- Namespace: fraud-detection-ml +- Storage: 100Gi PVC +- Service Account: fraud-detection-sa + +Next steps: +1. Deploy your training data +2. Set up model serving +3. Configure monitoring +Would you like me to help with any of these?" +``` + +### **Example 2: Model Deployment** +``` +You: "Deploy a BERT model for text classification" + +AI: "I'll deploy a BERT model for text classification in your project. +[Deploys model with appropriate resources] + +BERT model deployed successfully! Details: +- Model: bert-text-classifier +- Framework: PyTorch +- Replicas: 2 +- Memory: 4Gi each +- Endpoint: https://bert-text-classifier.apps.cluster.com + +The model is ready for inference requests!" +``` + +### **Example 3: Batch Experiment** +``` +You: "Run a hyperparameter tuning experiment" + +AI: "I'll set up a hyperparameter tuning experiment for your model. +[Creates experiment with multiple parameter combinations] + +Hyperparameter experiment started! Details: +- Experiment: bert-hp-tuning-v2 +- Parameters: learning_rate, batch_size, epochs +- Trials: 27 combinations +- Max parallel: 3 trials + +The experiment will run all combinations and report the best configuration." +``` + +## 🔧 Troubleshooting + +### **Common Issues** +```bash +# Check if server is working +kubernetes-mcp-server --version + +# Test specific toolset +kubernetes-mcp-server --toolsets openshift-ai --help + +# Check connectivity +kubectl cluster-info + +# Verify OpenShift AI access +oc get datascienceprojects +``` + +### **Debug Mode** +```bash +# Enable verbose logging +kubernetes-mcp-server --log-level 9 --toolsets openshift-ai + +# Test with dry-run +kubernetes-mcp-server --toolsets core,openshift-ai --help +``` + +## 📚 Next Steps + +### **Learning Resources** +- OpenShift AI Documentation: https://docs.redhat.com/en-us/openshift_ai/ +- Kubernetes Documentation: https://kubernetes.io/docs/ +- MCP Documentation: https://modelcontextprotocol.io/ + +### **Community** +- GitHub Repository: https://github.com/macayaven/openshift-mcp-server +- Issues: Report bugs or request features +- Discussions: Ask questions and share workflows + +--- + +**🎉 You now have a complete OpenShift AI MCP server with 28 tools for full ML lifecycle management!** \ No newline at end of file diff --git a/install-openshift-ai.sh b/install-openshift-ai.sh new file mode 100755 index 00000000..094a135f --- /dev/null +++ b/install-openshift-ai.sh @@ -0,0 +1,81 @@ +#!/bin/bash + +# OpenShift AI MCP Server Installation Script +# Downloads the complete OpenShift-AI enhanced version + +set -e + +echo "🚀 Installing OpenShift AI MCP Server (Complete Version)" +echo "📊 Includes: DataScience Projects, Models, Applications, Experiments, Pipelines" +echo "" + +# Detect platform +OS=$(uname -s | tr '[:upper:]' '[:lower:]') +ARCH=$(uname -m) + +case $OS in + darwin) + if [[ "$ARCH" == "arm64" ]]; then + BINARY="kubernetes-mcp-server-darwin-arm64" + else + echo "❌ Only ARM64 (Apple Silicon) Mac is supported" + exit 1 + fi + ;; + linux) + if [[ "$ARCH" == "x86_64" ]]; then + BINARY="kubernetes-mcp-server-linux-amd64" + elif [[ "$ARCH" == "aarch64" ]] || [[ "$ARCH" == "arm64" ]]; then + BINARY="kubernetes-mcp-server-linux-arm64" + else + echo "❌ Unsupported Linux architecture: $ARCH" + exit 1 + fi + ;; + *) + echo "❌ Unsupported OS: $OS" + exit 1 + ;; +esac + +echo "📥 Detected platform: $OS-$ARCH" +echo "📦 Downloading: $BINARY" + +# Download from GitHub releases +RELEASE_URL="https://github.com/macayaven/openshift-mcp-server/releases/download/v0.0.53-openshift-ai/$BINARY" + +# Create install directory +INSTALL_DIR="$HOME/.local/bin" +mkdir -p "$INSTALL_DIR" + +# Download binary +curl -L "$RELEASE_URL" -o "$INSTALL_DIR/kubernetes-mcp-server" +chmod +x "$INSTALL_DIR/kubernetes-mcp-server" + +# Add to PATH if not already there +if [[ ":$PATH:" != *":$INSTALL_DIR:"* ]]; then + echo "export PATH=\"\$PATH:$INSTALL_DIR\"" >> "$HOME/.zshrc" 2>/dev/null || echo "export PATH=\"\$PATH:$INSTALL_DIR\"" >> "$HOME/.bashrc" + echo "✅ Added $INSTALL_DIR to PATH" +fi + +echo "" +echo "✅ Installation complete!" +echo "" +echo "🎯 Usage:" +echo " kubernetes-mcp-server --toolsets core,config,helm,openshift-ai" +echo "" +echo "🔧 Available toolsets:" +echo " - core: Basic Kubernetes operations" +echo " - config: Configuration management" +echo " - helm: Helm chart operations" +echo " - openshift-ai: OpenShift AI/DataScience features (20 tools!)" +echo "" +echo "📚 OpenShift AI Tools included:" +echo " • 5 DataScience Project tools" +echo " • 5 Model tools" +echo " • 4 Application tools" +echo " • 4 Experiment tools" +echo " • 6 Pipeline tools" +echo "" +echo "🚀 Start using it now!" +echo " kubernetes-mcp-server" \ No newline at end of file diff --git a/pkg/api/datascience_project.go b/pkg/api/datascience_project.go new file mode 100644 index 00000000..ef75ca13 --- /dev/null +++ b/pkg/api/datascience_project.go @@ -0,0 +1,1297 @@ +package api + +import ( + "github.com/google/jsonschema-go/jsonschema" + "k8s.io/utils/ptr" +) + +// DataScienceProjectListRequest represents a request to list DataScienceProjects +type DataScienceProjectListRequest struct { + // Namespace to filter projects (optional, defaults to all namespaces) + Namespace *string `json:"namespace,omitempty"` +} + +// DataScienceProjectGetRequest represents a request to get a specific DataScienceProject +type DataScienceProjectGetRequest struct { + // Name of the DataScienceProject + Name string `json:"name"` + // Namespace of the DataScienceProject + Namespace string `json:"namespace"` +} + +// DataScienceProjectCreateRequest represents a request to create a DataScienceProject +type DataScienceProjectCreateRequest struct { + // Name of the DataScienceProject + Name string `json:"name"` + // Namespace where to create the DataScienceProject + Namespace string `json:"namespace"` + // Display name for the project (optional) + DisplayName *string `json:"display_name,omitempty"` + // Description for the project (optional) + Description *string `json:"description,omitempty"` + // Labels to apply to the project (optional) + Labels map[string]string `json:"labels,omitempty"` + // Annotations to apply to the project (optional) + Annotations map[string]string `json:"annotations,omitempty"` +} + +// DataScienceProjectDeleteRequest represents a request to delete a DataScienceProject +type DataScienceProjectDeleteRequest struct { + // Name of the DataScienceProject + Name string `json:"name"` + // Namespace of the DataScienceProject + Namespace string `json:"namespace"` +} + +// DataScienceProject represents a DataScienceProject resource +type DataScienceProject struct { + // Name of the DataScienceProject + Name string `json:"name"` + // Namespace of the DataScienceProject + Namespace string `json:"namespace"` + // Display name of the project + DisplayName *string `json:"display_name,omitempty"` + // Description of the project + Description *string `json:"description,omitempty"` + // Labels applied to the project + Labels map[string]string `json:"labels,omitempty"` + // Annotations applied to the project + Annotations map[string]string `json:"annotations,omitempty"` + // Creation timestamp + CreatedAt *string `json:"created_at,omitempty"` + // Status of the project + Status DataScienceProjectStatus `json:"status"` +} + +// DataScienceProjectStatus represents the status of a DataScienceProject +type DataScienceProjectStatus struct { + // Current phase of the project + Phase string `json:"phase"` + // Status message + Message *string `json:"message,omitempty"` + // Conditions describing the project status + Conditions []DataScienceProjectCondition `json:"conditions,omitempty"` +} + +// DataScienceProjectCondition represents a condition of a DataScienceProject +type DataScienceProjectCondition struct { + // Type of the condition + Type string `json:"type"` + // Status of the condition (True, False, Unknown) + Status string `json:"status"` + // Reason for the condition's last transition + Reason *string `json:"reason,omitempty"` + // Human-readable message indicating details about the transition + Message *string `json:"message,omitempty"` + // Last time the condition transitioned from one status to another + LastTransitionTime *string `json:"last_transition_time,omitempty"` +} + +// GetDataScienceProjectListTool returns the tool definition for listing DataScienceProjects +func GetDataScienceProjectListTool() Tool { + return Tool{ + Name: "datascience_projects_list", + Description: "List all Data Science Projects in the current OpenShift AI cluster", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "namespace": { + Type: "string", + Description: "The namespace to search for Data Science Projects (optional, defaults to all namespaces)", + }, + }, + }, + Annotations: ToolAnnotations{ + Title: "Data Science Projects: List", + ReadOnlyHint: ptr.To(true), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(true), + }, + } +} + +// GetDataScienceProjectGetTool returns the tool definition for getting a specific DataScienceProject +func GetDataScienceProjectGetTool() Tool { + return Tool{ + Name: "datascience_project_get", + Description: "Get details of a specific Data Science Project", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the Data Science Project", + }, + "namespace": { + Type: "string", + Description: "The namespace of the Data Science Project", + }, + }, + Required: []string{"name", "namespace"}, + }, + Annotations: ToolAnnotations{ + Title: "Data Science Project: Get", + ReadOnlyHint: ptr.To(true), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetDataScienceProjectCreateTool returns the tool definition for creating a DataScienceProject +func GetDataScienceProjectCreateTool() Tool { + return Tool{ + Name: "datascience_project_create", + Description: "Create a new Data Science Project", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the Data Science Project", + }, + "namespace": { + Type: "string", + Description: "The namespace where to create the Data Science Project", + }, + "display_name": { + Type: "string", + Description: "A display name for the Data Science Project (optional)", + }, + "description": { + Type: "string", + Description: "A description for the Data Science Project (optional)", + }, + "labels": { + Type: "object", + Description: "Labels to apply to the Data Science Project (optional)", + AdditionalProperties: &jsonschema.Schema{ + Type: "string", + }, + }, + "annotations": { + Type: "object", + Description: "Annotations to apply to the Data Science Project (optional)", + AdditionalProperties: &jsonschema.Schema{ + Type: "string", + }, + }, + }, + Required: []string{"name", "namespace"}, + }, + Annotations: ToolAnnotations{ + Title: "Data Science Project: Create", + ReadOnlyHint: ptr.To(false), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetDataScienceProjectDeleteTool returns the tool definition for deleting a DataScienceProject +func GetDataScienceProjectDeleteTool() Tool { + return Tool{ + Name: "datascience_project_delete", + Description: "Delete a Data Science Project", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the Data Science Project", + }, + "namespace": { + Type: "string", + Description: "The namespace of the Data Science Project", + }, + }, + Required: []string{"name", "namespace"}, + }, + Annotations: ToolAnnotations{ + Title: "Data Science Project: Delete", + ReadOnlyHint: ptr.To(false), + DestructiveHint: ptr.To(true), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// Model represents a machine learning model in OpenShift AI +type Model struct { + // Name of the model + Name string `json:"name"` + // Namespace of the model + Namespace string `json:"namespace"` + // Display name of the model + DisplayName *string `json:"display_name,omitempty"` + // Description of the model + Description *string `json:"description,omitempty"` + // Model type (e.g., "pytorch", "tensorflow", "sklearn") + ModelType *string `json:"model_type,omitempty"` + // Model framework version + FrameworkVersion *string `json:"framework_version,omitempty"` + // Model format (e.g., "pickle", "onnx", "savedmodel") + Format *string `json:"format,omitempty"` + // Model size in bytes + Size *int64 `json:"size,omitempty"` + // Model version + Version *string `json:"version,omitempty"` + // Creation timestamp + CreatedAt *string `json:"created_at,omitempty"` + // Last updated timestamp + UpdatedAt *string `json:"updated_at,omitempty"` + // Model status + Status ModelStatus `json:"status"` + // Labels applied to the model + Labels map[string]string `json:"labels,omitempty"` + // Annotations applied to the model + Annotations map[string]string `json:"annotations,omitempty"` +} + +// ModelStatus represents the status of a model +type ModelStatus struct { + // Current phase of the model + Phase string `json:"phase"` + // Status message + Message *string `json:"message,omitempty"` + // Ready state + Ready bool `json:"ready"` + // Deployment status + DeploymentStatus *string `json:"deployment_status,omitempty"` +} + +// ModelListRequest represents a request to list models +type ModelListRequest struct { + // Namespace to filter models (optional, defaults to all namespaces) + Namespace *string `json:"namespace,omitempty"` + // Model type filter (optional) + ModelType *string `json:"model_type,omitempty"` + // Status filter (optional) + Status *string `json:"status,omitempty"` +} + +// ModelGetRequest represents a request to get a specific model +type ModelGetRequest struct { + // Name of the model + Name string `json:"name"` + // Namespace of the model + Namespace string `json:"namespace"` +} + +// ModelCreateRequest represents a request to create a model +type ModelCreateRequest struct { + // Name of the model + Name string `json:"name"` + // Namespace where to create the model + Namespace string `json:"namespace"` + // Display name for the model (optional) + DisplayName *string `json:"display_name,omitempty"` + // Description for the model (optional) + Description *string `json:"description,omitempty"` + // Model type (e.g., "pytorch", "tensorflow", "sklearn") + ModelType string `json:"model_type"` + // Model framework version (optional) + FrameworkVersion *string `json:"framework_version,omitempty"` + // Model format (e.g., "pickle", "onnx", "savedmodel") + Format string `json:"format"` + // Model version (optional) + Version *string `json:"version,omitempty"` + // Labels to apply to the model (optional) + Labels map[string]string `json:"labels,omitempty"` + // Annotations to apply to the model (optional) + Annotations map[string]string `json:"annotations,omitempty"` +} + +// ModelUpdateRequest represents a request to update a model +type ModelUpdateRequest struct { + // Name of the model + Name string `json:"name"` + // Namespace of the model + Namespace string `json:"namespace"` + // Display name for the model (optional) + DisplayName *string `json:"display_name,omitempty"` + // Description for the model (optional) + Description *string `json:"description,omitempty"` + // Labels to apply to the model (optional) + Labels map[string]string `json:"labels,omitempty"` + // Annotations to apply to the model (optional) + Annotations map[string]string `json:"annotations,omitempty"` +} + +// ModelDeleteRequest represents a request to delete a model +type ModelDeleteRequest struct { + // Name of the model + Name string `json:"name"` + // Namespace of the model + Namespace string `json:"namespace"` +} + +// GetModelListTool returns the tool definition for listing models +func GetModelListTool() Tool { + return Tool{ + Name: "models_list", + Description: "List all machine learning models in the current OpenShift AI cluster", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "namespace": { + Type: "string", + Description: "The namespace to search for models (optional, defaults to all namespaces)", + }, + "model_type": { + Type: "string", + Description: "Filter models by type (e.g., pytorch, tensorflow, sklearn)", + }, + "status": { + Type: "string", + Description: "Filter models by status (e.g., Ready, Pending, Failed)", + }, + }, + }, + Annotations: ToolAnnotations{ + Title: "Models: List", + ReadOnlyHint: ptr.To(true), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(true), + }, + } +} + +// GetModelGetTool returns the tool definition for getting a specific model +func GetModelGetTool() Tool { + return Tool{ + Name: "model_get", + Description: "Get details of a specific machine learning model", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the model", + }, + "namespace": { + Type: "string", + Description: "The namespace of the model", + }, + }, + Required: []string{"name", "namespace"}, + }, + Annotations: ToolAnnotations{ + Title: "Model: Get", + ReadOnlyHint: ptr.To(true), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetModelCreateTool returns the tool definition for creating a model +func GetModelCreateTool() Tool { + return Tool{ + Name: "model_create", + Description: "Create a new machine learning model entry in OpenShift AI", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the model", + }, + "namespace": { + Type: "string", + Description: "The namespace where to create the model", + }, + "display_name": { + Type: "string", + Description: "A display name for the model (optional)", + }, + "description": { + Type: "string", + Description: "A description for the model (optional)", + }, + "model_type": { + Type: "string", + Description: "The model type (e.g., pytorch, tensorflow, sklearn)", + }, + "framework_version": { + Type: "string", + Description: "The framework version (optional)", + }, + "format": { + Type: "string", + Description: "The model format (e.g., pickle, onnx, savedmodel)", + }, + "version": { + Type: "string", + Description: "The model version (optional)", + }, + "labels": { + Type: "object", + Description: "Labels to apply to the model (optional)", + AdditionalProperties: &jsonschema.Schema{ + Type: "string", + }, + }, + "annotations": { + Type: "object", + Description: "Annotations to apply to the model (optional)", + AdditionalProperties: &jsonschema.Schema{ + Type: "string", + }, + }, + }, + Required: []string{"name", "namespace", "model_type", "format"}, + }, + Annotations: ToolAnnotations{ + Title: "Model: Create", + ReadOnlyHint: ptr.To(false), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetModelUpdateTool returns the tool definition for updating a model +func GetModelUpdateTool() Tool { + return Tool{ + Name: "model_update", + Description: "Update an existing machine learning model in OpenShift AI", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the model", + }, + "namespace": { + Type: "string", + Description: "The namespace of the model", + }, + "display_name": { + Type: "string", + Description: "A display name for the model (optional)", + }, + "description": { + Type: "string", + Description: "A description for the model (optional)", + }, + "labels": { + Type: "object", + Description: "Labels to apply to the model (optional)", + AdditionalProperties: &jsonschema.Schema{ + Type: "string", + }, + }, + "annotations": { + Type: "object", + Description: "Annotations to apply to the model (optional)", + AdditionalProperties: &jsonschema.Schema{ + Type: "string", + }, + }, + }, + Required: []string{"name", "namespace"}, + }, + Annotations: ToolAnnotations{ + Title: "Model: Update", + ReadOnlyHint: ptr.To(false), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetModelDeleteTool returns the tool definition for deleting a model +func GetModelDeleteTool() Tool { + return Tool{ + Name: "model_delete", + Description: "Delete a machine learning model from OpenShift AI", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the model", + }, + "namespace": { + Type: "string", + Description: "The namespace of the model", + }, + }, + Required: []string{"name", "namespace"}, + }, + Annotations: ToolAnnotations{ + Title: "Model: Delete", + ReadOnlyHint: ptr.To(false), + DestructiveHint: ptr.To(true), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// Experiment represents an OpenShift AI Experiment for ML experiment tracking +type Experiment struct { + // Name of experiment + Name string `json:"name"` + // Namespace of experiment + Namespace string `json:"namespace"` + // Display name (optional) + DisplayName *string `json:"display_name,omitempty"` + // Description (optional) + Description *string `json:"description,omitempty"` + // Labels associated with experiment + Labels map[string]string `json:"labels,omitempty"` + // Annotations associated with experiment + Annotations map[string]string `json:"annotations,omitempty"` + // Experiment status + Status ExperimentStatus `json:"status"` +} + +// ExperimentStatus represents the status of an experiment +type ExperimentStatus struct { + // Phase of the experiment (e.g., "Created", "Running", "Completed", "Failed") + Phase string `json:"phase"` + // Optional message about experiment status + Message *string `json:"message,omitempty"` + // Whether the experiment is ready + Ready bool `json:"ready"` + // Number of runs in this experiment + RunCount int `json:"run_count"` + // Last time the experiment was updated + LastUpdated *string `json:"last_updated,omitempty"` +} + +// ExperimentListRequest represents a request to list experiments +type ExperimentListRequest struct { + // Namespace to filter experiments (optional, defaults to all namespaces) + Namespace *string `json:"namespace,omitempty"` + // Filter by status (optional) + Status *string `json:"status,omitempty"` +} + +// ExperimentGetRequest represents a request to get a specific experiment +type ExperimentGetRequest struct { + // Name of the experiment + Name string `json:"name"` + // Namespace of the experiment + Namespace string `json:"namespace"` +} + +// ExperimentCreateRequest represents a request to create an experiment +type ExperimentCreateRequest struct { + // Name of the experiment + Name string `json:"name"` + // Namespace where to create the experiment + Namespace string `json:"namespace"` + // Display name for the experiment (optional) + DisplayName *string `json:"display_name,omitempty"` + // Description for the experiment (optional) + Description *string `json:"description,omitempty"` + // Labels to apply to the experiment (optional) + Labels map[string]string `json:"labels,omitempty"` + // Annotations to apply to the experiment (optional) + Annotations map[string]string `json:"annotations,omitempty"` +} + +// ExperimentDeleteRequest represents a request to delete an experiment +type ExperimentDeleteRequest struct { + // Name of the experiment + Name string `json:"name"` + // Namespace of the experiment + Namespace string `json:"namespace"` +} + +// GetExperimentsListTool returns the tool definition for listing experiments +func GetExperimentsListTool() Tool { + return Tool{ + Name: "experiments_list", + Description: "List all OpenShift AI machine learning experiments", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "namespace": { + Type: "string", + Description: "Namespace to filter experiments (optional, defaults to all namespaces)", + }, + "status": { + Type: "string", + Description: "Filter by experiment status (optional)", + }, + }, + }, + Annotations: ToolAnnotations{ + Title: "Experiments: List", + ReadOnlyHint: ptr.To(true), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(true), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetExperimentGetTool returns the tool definition for getting a specific experiment +func GetExperimentGetTool() Tool { + return Tool{ + Name: "experiment_get", + Description: "Get a specific OpenShift AI machine learning experiment", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the experiment", + }, + "namespace": { + Type: "string", + Description: "The namespace of the experiment", + }, + }, + Required: []string{"name", "namespace"}, + }, + Annotations: ToolAnnotations{ + Title: "Experiment: Get", + ReadOnlyHint: ptr.To(true), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetExperimentCreateTool returns the tool definition for creating an experiment +func GetExperimentCreateTool() Tool { + return Tool{ + Name: "experiment_create", + Description: "Create a new OpenShift AI machine learning experiment", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the experiment", + }, + "namespace": { + Type: "string", + Description: "The namespace where to create the experiment", + }, + "display_name": { + Type: "string", + Description: "Display name for the experiment (optional)", + }, + "description": { + Type: "string", + Description: "Description for the experiment (optional)", + }, + "labels": { + Type: "object", + Description: "Labels to apply to the experiment (optional)", + AdditionalProperties: &jsonschema.Schema{ + Type: "string", + }, + }, + "annotations": { + Type: "object", + Description: "Annotations to apply to the experiment (optional)", + AdditionalProperties: &jsonschema.Schema{ + Type: "string", + }, + }, + }, + Required: []string{"name", "namespace"}, + }, + Annotations: ToolAnnotations{ + Title: "Experiment: Create", + ReadOnlyHint: ptr.To(false), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetExperimentDeleteTool returns the tool definition for deleting an experiment +func GetExperimentDeleteTool() Tool { + return Tool{ + Name: "experiment_delete", + Description: "Delete an OpenShift AI machine learning experiment", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of experiment", + }, + "namespace": { + Type: "string", + Description: "The namespace of the experiment", + }, + }, + Required: []string{"name", "namespace"}, + }, + Annotations: ToolAnnotations{ + Title: "Experiment: Delete", + ReadOnlyHint: ptr.To(false), + DestructiveHint: ptr.To(true), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// Application represents an OpenShift AI Application (e.g., Jupyter notebook) +type Application struct { + // Name of application + Name string `json:"name"` + // Namespace of application + Namespace string `json:"namespace"` + // Display name (optional) + DisplayName *string `json:"display_name,omitempty"` + // Description (optional) + Description *string `json:"description,omitempty"` + // Labels associated with the application + Labels map[string]string `json:"labels,omitempty"` + // Annotations associated with the application + Annotations map[string]string `json:"annotations,omitempty"` + // Application type (e.g., "Jupyter", "CodeServer") + AppType string `json:"app_type,omitempty"` + // Application status + Status ApplicationStatus `json:"status"` +} + +// ApplicationStatus represents the status of an application +type ApplicationStatus struct { + // Phase of the application (e.g., "Creating", "Ready", "Failed", "Stopped") + Phase string `json:"phase"` + // Optional message about the application status + Message *string `json:"message,omitempty"` + // Whether the application is ready + Ready bool `json:"ready"` + // Application type (e.g., "Jupyter", "CodeServer") + AppType *string `json:"app_type,omitempty"` + // URL to access the application + URL *string `json:"url,omitempty"` + // Last time the application was updated + LastUpdated *string `json:"last_updated,omitempty"` +} + +// ApplicationListRequest represents a request to list applications +type ApplicationListRequest struct { + // Namespace to filter applications (optional, defaults to all namespaces) + Namespace *string `json:"namespace,omitempty"` + // Filter by status (optional) + Status *string `json:"status,omitempty"` + // Filter by application type (optional) + AppType *string `json:"app_type,omitempty"` +} + +// ApplicationGetRequest represents a request to get a specific application +type ApplicationGetRequest struct { + // Name of the application + Name string `json:"name"` + // Namespace of the application + Namespace string `json:"namespace"` +} + +// ApplicationCreateRequest represents a request to create an application +type ApplicationCreateRequest struct { + // Name of the application + Name string `json:"name"` + // Namespace where to create the application + Namespace string `json:"namespace"` + // Display name for the application (optional) + DisplayName *string `json:"display_name,omitempty"` + // Description for the application (optional) + Description *string `json:"description,omitempty"` + // Application type (e.g., "Jupyter", "CodeServer") + AppType string `json:"app_type"` + // Labels to apply to the application (optional) + Labels map[string]string `json:"labels,omitempty"` + // Annotations to apply to the application (optional) + Annotations map[string]string `json:"annotations,omitempty"` +} + +// ApplicationDeleteRequest represents a request to delete an application +type ApplicationDeleteRequest struct { + // Name of the application + Name string `json:"name"` + // Namespace of the application + Namespace string `json:"namespace"` +} + +// GetApplicationsListTool returns the tool definition for listing applications +func GetApplicationsListTool() Tool { + return Tool{ + Name: "applications_list", + Description: "List all OpenShift AI applications (Jupyter notebooks, code servers, etc.)", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "namespace": { + Type: "string", + Description: "Namespace to filter applications (optional, defaults to all namespaces)", + }, + "status": { + Type: "string", + Description: "Filter by application status (optional)", + }, + "app_type": { + Type: "string", + Description: "Filter by application type (optional, e.g., 'Jupyter', 'CodeServer')", + }, + }, + }, + Annotations: ToolAnnotations{ + Title: "Applications: List", + ReadOnlyHint: ptr.To(true), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(true), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetApplicationGetTool returns the tool definition for getting a specific application +func GetApplicationGetTool() Tool { + return Tool{ + Name: "application_get", + Description: "Get a specific OpenShift AI application", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the application", + }, + "namespace": { + Type: "string", + Description: "The namespace of the application", + }, + }, + Required: []string{"name", "namespace"}, + }, + Annotations: ToolAnnotations{ + Title: "Application: Get", + ReadOnlyHint: ptr.To(true), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetApplicationCreateTool returns the tool definition for creating an application +func GetApplicationCreateTool() Tool { + return Tool{ + Name: "application_create", + Description: "Create a new OpenShift AI application (Jupyter notebook, code server, etc.)", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the application", + }, + "namespace": { + Type: "string", + Description: "The namespace where to create the application", + }, + "display_name": { + Type: "string", + Description: "Display name for the application (optional)", + }, + "description": { + Type: "string", + Description: "Description for the application (optional)", + }, + "app_type": { + Type: "string", + Description: "Application type (e.g., 'Jupyter', 'CodeServer')", + }, + "labels": { + Type: "object", + Description: "Labels to apply to the application (optional)", + AdditionalProperties: &jsonschema.Schema{ + Type: "string", + }, + }, + "annotations": { + Type: "object", + Description: "Annotations to apply to the application (optional)", + AdditionalProperties: &jsonschema.Schema{ + Type: "string", + }, + }, + }, + Required: []string{"name", "namespace", "app_type"}, + }, + Annotations: ToolAnnotations{ + Title: "Application: Create", + ReadOnlyHint: ptr.To(false), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetApplicationDeleteTool returns the tool definition for deleting an application +func GetApplicationDeleteTool() Tool { + return Tool{ + Name: "application_delete", + Description: "Delete an OpenShift AI application", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the application", + }, + "namespace": { + Type: "string", + Description: "The namespace of the application", + }, + }, + Required: []string{"name", "namespace"}, + }, + Annotations: ToolAnnotations{ + Title: "Application: Delete", + ReadOnlyHint: ptr.To(false), + DestructiveHint: ptr.To(true), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// Pipeline represents an OpenShift AI Data Science Pipeline +type Pipeline struct { + // Name of pipeline + Name string `json:"name"` + // Namespace of pipeline + Namespace string `json:"namespace"` + // Display name (optional) + DisplayName *string `json:"display_name,omitempty"` + // Description (optional) + Description *string `json:"description,omitempty"` + // Labels associated with pipeline + Labels map[string]string `json:"labels,omitempty"` + // Annotations associated with pipeline + Annotations map[string]string `json:"annotations,omitempty"` + // Pipeline status + Status PipelineStatus `json:"status"` +} + +// PipelineStatus represents the status of a pipeline +type PipelineStatus struct { + // Phase of pipeline (e.g., "Created", "Running", "Succeeded", "Failed") + Phase string `json:"phase"` + // Optional message about the pipeline status + Message *string `json:"message,omitempty"` + // Whether the pipeline is ready + Ready bool `json:"ready"` + // Number of runs in this pipeline + RunCount int `json:"run_count"` + // Last time the pipeline was updated + LastUpdated *string `json:"last_updated,omitempty"` +} + +// PipelineRun represents a run of a pipeline +type PipelineRun struct { + // Name of pipeline run + Name string `json:"name"` + // Pipeline name that this run belongs to + PipelineName string `json:"pipeline_name"` + // Namespace of pipeline run + Namespace string `json:"namespace"` + // Display name (optional) + DisplayName *string `json:"display_name,omitempty"` + // Description (optional) + Description *string `json:"description,omitempty"` + // Labels associated with pipeline run + Labels map[string]string `json:"labels,omitempty"` + // Annotations associated with pipeline run + Annotations map[string]string `json:"annotations,omitempty"` + // Pipeline run status + Status PipelineRunStatus `json:"status"` +} + +// PipelineRunStatus represents the status of a pipeline run +type PipelineRunStatus struct { + // Phase of pipeline run (e.g., "Created", "Running", "Succeeded", "Failed") + Phase string `json:"phase"` + // Optional message about the pipeline run status + Message *string `json:"message,omitempty"` + // Whether the pipeline run is ready + Ready bool `json:"ready"` + // Start time of the pipeline run + StartedAt *string `json:"started_at,omitempty"` + // End time of the pipeline run + FinishedAt *string `json:"finished_at,omitempty"` + // Last time the pipeline run was updated + LastUpdated *string `json:"last_updated,omitempty"` +} + +// PipelineListRequest represents a request to list pipelines +type PipelineListRequest struct { + // Namespace to filter pipelines (optional, defaults to all namespaces) + Namespace *string `json:"namespace,omitempty"` + // Filter by status (optional) + Status *string `json:"status,omitempty"` +} + +// PipelineGetRequest represents a request to get a specific pipeline +type PipelineGetRequest struct { + // Name of the pipeline + Name string `json:"name"` + // Namespace of the pipeline + Namespace string `json:"namespace"` +} + +// PipelineCreateRequest represents a request to create a pipeline +type PipelineCreateRequest struct { + // Name of the pipeline + Name string `json:"name"` + // Namespace where to create the pipeline + Namespace string `json:"namespace"` + // Display name for the pipeline (optional) + DisplayName *string `json:"display_name,omitempty"` + // Description for the pipeline (optional) + Description *string `json:"description,omitempty"` + // Labels to apply to the pipeline (optional) + Labels map[string]string `json:"labels,omitempty"` + // Annotations to apply to the pipeline (optional) + Annotations map[string]string `json:"annotations,omitempty"` +} + +// PipelineDeleteRequest represents a request to delete a pipeline +type PipelineDeleteRequest struct { + // Name of the pipeline + Name string `json:"name"` + // Namespace of the pipeline + Namespace string `json:"namespace"` +} + +// PipelineRunListRequest represents a request to list pipeline runs +type PipelineRunListRequest struct { + // Namespace to filter pipeline runs (optional, defaults to all namespaces) + Namespace *string `json:"namespace,omitempty"` + // Filter by pipeline name (optional) + PipelineName *string `json:"pipeline_name,omitempty"` + // Filter by status (optional) + Status *string `json:"status,omitempty"` +} + +// PipelineRunGetRequest represents a request to get a specific pipeline run +type PipelineRunGetRequest struct { + // Name of the pipeline run + Name string `json:"name"` + // Namespace of the pipeline run + Namespace string `json:"namespace"` +} + +// GetPipelinesListTool returns the tool definition for listing pipelines +func GetPipelinesListTool() Tool { + return Tool{ + Name: "pipelines_list", + Description: "List all OpenShift AI data science pipelines", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "namespace": { + Type: "string", + Description: "Namespace to filter pipelines (optional, defaults to all namespaces)", + }, + "status": { + Type: "string", + Description: "Filter by pipeline status (optional)", + }, + }, + }, + Annotations: ToolAnnotations{ + Title: "Pipelines: List", + ReadOnlyHint: ptr.To(true), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(true), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetPipelineGetTool returns the tool definition for getting a specific pipeline +func GetPipelineGetTool() Tool { + return Tool{ + Name: "pipeline_get", + Description: "Get a specific OpenShift AI data science pipeline", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the pipeline", + }, + "namespace": { + Type: "string", + Description: "The namespace of the pipeline", + }, + }, + Required: []string{"name", "namespace"}, + }, + Annotations: ToolAnnotations{ + Title: "Pipeline: Get", + ReadOnlyHint: ptr.To(true), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetPipelineCreateTool returns the tool definition for creating a pipeline +func GetPipelineCreateTool() Tool { + return Tool{ + Name: "pipeline_create", + Description: "Create a new OpenShift AI data science pipeline", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the pipeline", + }, + "namespace": { + Type: "string", + Description: "The namespace where to create the pipeline", + }, + "display_name": { + Type: "string", + Description: "Display name for the pipeline (optional)", + }, + "description": { + Type: "string", + Description: "Description for the pipeline (optional)", + }, + "labels": { + Type: "object", + Description: "Labels to apply to the pipeline (optional)", + AdditionalProperties: &jsonschema.Schema{ + Type: "string", + }, + }, + "annotations": { + Type: "object", + Description: "Annotations to apply to the pipeline (optional)", + AdditionalProperties: &jsonschema.Schema{ + Type: "string", + }, + }, + }, + Required: []string{"name", "namespace"}, + }, + Annotations: ToolAnnotations{ + Title: "Pipeline: Create", + ReadOnlyHint: ptr.To(false), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetPipelineDeleteTool returns the tool definition for deleting a pipeline +func GetPipelineDeleteTool() Tool { + return Tool{ + Name: "pipeline_delete", + Description: "Delete an OpenShift AI data science pipeline", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the pipeline", + }, + "namespace": { + Type: "string", + Description: "The namespace of the pipeline", + }, + }, + Required: []string{"name", "namespace"}, + }, + Annotations: ToolAnnotations{ + Title: "Pipeline: Delete", + ReadOnlyHint: ptr.To(false), + DestructiveHint: ptr.To(true), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetPipelineRunsListTool returns the tool definition for listing pipeline runs +func GetPipelineRunsListTool() Tool { + return Tool{ + Name: "pipeline_runs_list", + Description: "List all OpenShift AI data science pipeline runs", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "namespace": { + Type: "string", + Description: "Namespace to filter pipeline runs (optional, defaults to all namespaces)", + }, + "pipeline_name": { + Type: "string", + Description: "Filter by pipeline name (optional)", + }, + "status": { + Type: "string", + Description: "Filter by pipeline run status (optional)", + }, + }, + }, + Annotations: ToolAnnotations{ + Title: "Pipeline Runs: List", + ReadOnlyHint: ptr.To(true), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(true), + OpenWorldHint: ptr.To(false), + }, + } +} + +// GetPipelineRunGetTool returns the tool definition for getting a specific pipeline run +func GetPipelineRunGetTool() Tool { + return Tool{ + Name: "pipeline_run_get", + Description: "Get a specific OpenShift AI data science pipeline run", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "The name of the pipeline run", + }, + "namespace": { + Type: "string", + Description: "The namespace of the pipeline run", + }, + }, + Required: []string{"name", "namespace"}, + }, + Annotations: ToolAnnotations{ + Title: "Pipeline Run: Get", + ReadOnlyHint: ptr.To(true), + DestructiveHint: ptr.To(false), + IdempotentHint: ptr.To(false), + OpenWorldHint: ptr.To(false), + }, + } +} diff --git a/pkg/config/config_default.go b/pkg/config/config_default.go index febea70c..495d4ae9 100644 --- a/pkg/config/config_default.go +++ b/pkg/config/config_default.go @@ -9,7 +9,7 @@ import ( func Default() *StaticConfig { defaultConfig := StaticConfig{ ListOutput: "table", - Toolsets: []string{"core", "config", "helm"}, + Toolsets: []string{"core", "config", "helm", "openshift-ai"}, } overrides := defaultOverrides() mergedConfig := mergeConfig(defaultConfig, overrides) diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go index afdde191..0079cb4a 100644 --- a/pkg/config/config_test.go +++ b/pkg/config/config_test.go @@ -167,8 +167,8 @@ func (s *ConfigSuite) TestReadConfigValidPreservesDefaultsForMissingFields() { s.Equalf("table", config.ListOutput, "Expected ListOutput to be table, got %s", config.ListOutput) }) s.Run("toolsets defaulted correctly", func() { - s.Require().Lenf(config.Toolsets, 3, "Expected 3 toolsets, got %d", len(config.Toolsets)) - for _, toolset := range []string{"core", "config", "helm"} { + s.Require().Lenf(config.Toolsets, 4, "Expected 4 toolsets, got %d", len(config.Toolsets)) + for _, toolset := range []string{"core", "config", "helm", "openshift-ai"} { s.Containsf(config.Toolsets, toolset, "Expected toolsets to contain %s", toolset) } }) diff --git a/pkg/kubernetes-mcp-server/cmd/root.go b/pkg/kubernetes-mcp-server/cmd/root.go index db1782ab..bc44df3c 100644 --- a/pkg/kubernetes-mcp-server/cmd/root.go +++ b/pkg/kubernetes-mcp-server/cmd/root.go @@ -27,6 +27,7 @@ import ( "github.com/containers/kubernetes-mcp-server/pkg/mcp" "github.com/containers/kubernetes-mcp-server/pkg/output" "github.com/containers/kubernetes-mcp-server/pkg/toolsets" + _ "github.com/containers/kubernetes-mcp-server/pkg/toolsets/openshift-ai" "github.com/containers/kubernetes-mcp-server/pkg/version" ) diff --git a/pkg/kubernetes-mcp-server/cmd/root_test.go b/pkg/kubernetes-mcp-server/cmd/root_test.go index 22521667..9ec0565e 100644 --- a/pkg/kubernetes-mcp-server/cmd/root_test.go +++ b/pkg/kubernetes-mcp-server/cmd/root_test.go @@ -137,7 +137,7 @@ func TestToolsets(t *testing.T) { rootCmd := NewMCPServer(ioStreams) rootCmd.SetArgs([]string{"--help"}) o, err := captureOutput(rootCmd.Execute) // --help doesn't use logger/klog, cobra prints directly to stdout - if !strings.Contains(o, "Comma-separated list of MCP toolsets to use (available toolsets: config, core, helm).") { + if !strings.Contains(o, "Comma-separated list of MCP toolsets to use (available toolsets: config, core, helm, openshift-ai).") { t.Fatalf("Expected all available toolsets, got %s %v", o, err) } }) @@ -145,7 +145,7 @@ func TestToolsets(t *testing.T) { ioStreams, out := testStream() rootCmd := NewMCPServer(ioStreams) rootCmd.SetArgs([]string{"--version", "--port=1337", "--log-level=1"}) - if err := rootCmd.Execute(); !strings.Contains(out.String(), "- Toolsets: core, config, helm") { + if err := rootCmd.Execute(); !strings.Contains(out.String(), "- Toolsets: core, config, helm, openshift-ai") { t.Fatalf("Expected toolsets 'full', got %s %v", out, err) } }) diff --git a/pkg/kubernetes/kubernetes.go b/pkg/kubernetes/kubernetes.go index 3b5733e1..1086d70f 100644 --- a/pkg/kubernetes/kubernetes.go +++ b/pkg/kubernetes/kubernetes.go @@ -5,6 +5,7 @@ import ( "github.com/containers/kubernetes-mcp-server/pkg/helm" "k8s.io/client-go/kubernetes/scheme" + "k8s.io/client-go/rest" _ "k8s.io/client-go/plugin/pkg/client/auth/oidc" ) @@ -37,3 +38,14 @@ func (k *Kubernetes) NewHelm() *helm.Helm { // This is a derived Kubernetes, so it already has the Helm initialized return helm.NewHelm(k.manager) } + +// ToRESTConfig returns the REST configuration from the underlying manager +func (k *Kubernetes) ToRESTConfig() (*rest.Config, error) { + return k.manager.ToRESTConfig() +} + +// GetOrCreateOpenShiftAIClient returns a cached OpenShift AI client instance from the underlying manager +// clientFactory should be a function that creates the OpenShift AI client: func(*rest.Config, interface{}) (interface{}, error) +func (k *Kubernetes) GetOrCreateOpenShiftAIClient(clientFactory func(*rest.Config, interface{}) (interface{}, error)) (interface{}, error) { + return k.manager.GetOrCreateOpenShiftAIClient(clientFactory) +} diff --git a/pkg/kubernetes/manager.go b/pkg/kubernetes/manager.go index d09b8790..6974d0f5 100644 --- a/pkg/kubernetes/manager.go +++ b/pkg/kubernetes/manager.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "strings" + "sync" "github.com/containers/kubernetes-mcp-server/pkg/config" "github.com/containers/kubernetes-mcp-server/pkg/helm" @@ -32,6 +33,11 @@ type Manager struct { staticConfig *config.StaticConfig CloseWatchKubeConfig CloseWatchKubeConfig + + // OpenShift AI client (lazy-initialized) - using interface{} to avoid import cycle + openshiftAIClient interface{} + openshiftAIClientOnce sync.Once + openshiftAIClientErr error } var _ helm.Kubernetes = (*Manager)(nil) @@ -202,6 +208,21 @@ func (m *Manager) ToRawKubeConfigLoader() clientcmd.ClientConfig { return m.clientCmdConfig } +// GetRESTConfig returns the REST config for OpenShift AI client creation +func (m *Manager) GetRESTConfig() *rest.Config { + return m.cfg +} + +// GetDiscoveryClient returns the discovery client for OpenShift AI operations +func (m *Manager) GetDiscoveryClient() discovery.CachedDiscoveryInterface { + return m.discoveryClient +} + +// GetDynamicClient returns the dynamic client for OpenShift AI operations +func (m *Manager) GetDynamicClient() *dynamic.DynamicClient { + return m.dynamicClient +} + func (m *Manager) VerifyToken(ctx context.Context, token, audience string) (*authenticationv1api.UserInfo, []string, error) { tokenReviewClient, err := m.accessControlClientSet.TokenReview() if err != nil { @@ -299,3 +320,21 @@ func (m *Manager) Derived(ctx context.Context) (*Kubernetes, error) { } return derived, nil } + +// GetOrCreateOpenShiftAIClient returns a cached OpenShift AI client instance. +// The client is created lazily on first access and reused for all subsequent calls. +// This avoids the overhead of creating new dynamic and discovery clients on every tool invocation. +// Thread-safe via sync.Once. +// clientFactory should be a function that creates the OpenShift AI client: func(*rest.Config, interface{}) (interface{}, error) +func (m *Manager) GetOrCreateOpenShiftAIClient(clientFactory func(*rest.Config, interface{}) (interface{}, error)) (interface{}, error) { + m.openshiftAIClientOnce.Do(func() { + m.openshiftAIClient, m.openshiftAIClientErr = clientFactory(m.cfg, nil) + if m.openshiftAIClientErr == nil { + klog.V(2).InfoS("OpenShift AI client initialized and cached") + } else { + klog.ErrorS(m.openshiftAIClientErr, "Failed to initialize OpenShift AI client") + } + }) + + return m.openshiftAIClient, m.openshiftAIClientErr +} diff --git a/pkg/mcp/generate_snapshots_test.go b/pkg/mcp/generate_snapshots_test.go new file mode 100644 index 00000000..b392a382 --- /dev/null +++ b/pkg/mcp/generate_snapshots_test.go @@ -0,0 +1,119 @@ +package mcp + +import ( + "context" + "encoding/json" + "os" + "strconv" + "testing" + + "github.com/containers/kubernetes-mcp-server/internal/test" + configuration "github.com/containers/kubernetes-mcp-server/pkg/config" + "github.com/mark3labs/mcp-go/mcp" + clientcmdapi "k8s.io/client-go/tools/clientcmd/api" +) + +func TestGenerateSnapshots(t *testing.T) { + if os.Getenv("GENERATE_SNAPSHOTS") != "true" { + t.Skip("Skipping snapshot generation test. Set GENERATE_SNAPSHOTS=true to run") + } + + // Test 1: Default toolsets + t.Run("Generate toolsets-full-tools.json", func(t *testing.T) { + mockServer := test.NewMockServer() + defer mockServer.Close() + cfg := configuration.Default() + cfg.KubeConfig = mockServer.KubeconfigFile(t) + mcpServer, err := NewServer(Configuration{StaticConfig: cfg}) + if err != nil { + t.Fatal(err) + } + defer mcpServer.Close() + client := test.NewMcpClient(t, mcpServer.ServeHTTP(nil)) + defer client.Close() + tools, err := client.ListTools(context.Background(), mcp.ListToolsRequest{}) + if err != nil { + t.Fatal(err) + } + writeJSON(t, "testdata/toolsets-full-tools.json", tools.Tools) + }) + + // Test 2: OpenShift + t.Run("Generate toolsets-full-tools-openshift.json", func(t *testing.T) { + mockServer := test.NewMockServer() + mockServer.Handle(&test.InOpenShiftHandler{}) + defer mockServer.Close() + cfg := configuration.Default() + cfg.KubeConfig = mockServer.KubeconfigFile(t) + mcpServer, err := NewServer(Configuration{StaticConfig: cfg}) + if err != nil { + t.Fatal(err) + } + defer mcpServer.Close() + client := test.NewMcpClient(t, mcpServer.ServeHTTP(nil)) + defer client.Close() + tools, err := client.ListTools(context.Background(), mcp.ListToolsRequest{}) + if err != nil { + t.Fatal(err) + } + writeJSON(t, "testdata/toolsets-full-tools-openshift.json", tools.Tools) + }) + + // Test 3: Multi-cluster (11 clusters) + t.Run("Generate toolsets-full-tools-multicluster.json", func(t *testing.T) { + mockServer := test.NewMockServer() + defer mockServer.Close() + kubeconfig := mockServer.Kubeconfig() + for i := 0; i < 10; i++ { + kubeconfig.Contexts[strconv.Itoa(i)] = clientcmdapi.NewContext() + } + cfg := configuration.Default() + cfg.KubeConfig = test.KubeconfigFile(t, kubeconfig) + mcpServer, err := NewServer(Configuration{StaticConfig: cfg}) + if err != nil { + t.Fatal(err) + } + defer mcpServer.Close() + client := test.NewMcpClient(t, mcpServer.ServeHTTP(nil)) + defer client.Close() + tools, err := client.ListTools(context.Background(), mcp.ListToolsRequest{}) + if err != nil { + t.Fatal(err) + } + writeJSON(t, "testdata/toolsets-full-tools-multicluster.json", tools.Tools) + }) + + // Test 4: Multi-cluster enum (2 clusters) + t.Run("Generate toolsets-full-tools-multicluster-enum.json", func(t *testing.T) { + mockServer := test.NewMockServer() + defer mockServer.Close() + kubeconfig := mockServer.Kubeconfig() + kubeconfig.Contexts["extra-cluster"] = clientcmdapi.NewContext() + cfg := configuration.Default() + cfg.KubeConfig = test.KubeconfigFile(t, kubeconfig) + mcpServer, err := NewServer(Configuration{StaticConfig: cfg}) + if err != nil { + t.Fatal(err) + } + defer mcpServer.Close() + client := test.NewMcpClient(t, mcpServer.ServeHTTP(nil)) + defer client.Close() + tools, err := client.ListTools(context.Background(), mcp.ListToolsRequest{}) + if err != nil { + t.Fatal(err) + } + writeJSON(t, "testdata/toolsets-full-tools-multicluster-enum.json", tools.Tools) + }) +} + +func writeJSON(t *testing.T, filename string, data interface{}) { + jsonData, err := json.MarshalIndent(data, "", " ") + if err != nil { + t.Fatal(err) + } + err = os.WriteFile(filename, jsonData, 0644) + if err != nil { + t.Fatal(err) + } + t.Logf("Written %s", filename) +} diff --git a/pkg/mcp/modules.go b/pkg/mcp/modules.go index 3295d72b..8b4fd5fd 100644 --- a/pkg/mcp/modules.go +++ b/pkg/mcp/modules.go @@ -3,3 +3,4 @@ package mcp import _ "github.com/containers/kubernetes-mcp-server/pkg/toolsets/config" import _ "github.com/containers/kubernetes-mcp-server/pkg/toolsets/core" import _ "github.com/containers/kubernetes-mcp-server/pkg/toolsets/helm" +import _ "github.com/containers/kubernetes-mcp-server/pkg/toolsets/openshift-ai" diff --git a/pkg/mcp/testdata/toolsets-full-tools-multicluster-enum.json b/pkg/mcp/testdata/toolsets-full-tools-multicluster-enum.json index 1551b4c2..e0f531b1 100644 --- a/pkg/mcp/testdata/toolsets-full-tools-multicluster-enum.json +++ b/pkg/mcp/testdata/toolsets-full-tools-multicluster-enum.json @@ -1,47 +1,918 @@ [ + { + "annotations": { + "title": "Application: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new OpenShift AI application (Jupyter notebook, code server, etc.)", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the application (optional)", + "type": "object" + }, + "app_type": { + "description": "Application type (e.g., 'Jupyter', 'CodeServer')", + "type": "string" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "description": { + "description": "Description for the application (optional)", + "type": "string" + }, + "display_name": { + "description": "Display name for the application (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the application (optional)", + "type": "object" + }, + "name": { + "description": "The name of the application", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the application", + "type": "string" + } + }, + "required": [ + "name", + "namespace", + "app_type" + ] + }, + "name": "application_create" + }, + { + "annotations": { + "title": "Application: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete an OpenShift AI application", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "name": { + "description": "The name of the application", + "type": "string" + }, + "namespace": { + "description": "The namespace of the application", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "application_delete" + }, + { + "annotations": { + "title": "Application: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get a specific OpenShift AI application", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "name": { + "description": "The name of the application", + "type": "string" + }, + "namespace": { + "description": "The namespace of the application", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "application_get" + }, + { + "annotations": { + "title": "Applications: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI applications (Jupyter notebooks, code servers, etc.)", + "inputSchema": { + "type": "object", + "properties": { + "app_type": { + "description": "Filter by application type (optional, e.g., 'Jupyter', 'CodeServer')", + "type": "string" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "namespace": { + "description": "Namespace to filter applications (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter by application status (optional)", + "type": "string" + } + } + }, + "name": "applications_list" + }, { "annotations": { "title": "Configuration: Contexts List", "readOnlyHint": true, "destructiveHint": false, - "idempotentHint": true, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all available context names and associated server urls from the kubeconfig file", + "inputSchema": { + "type": "object" + }, + "name": "configuration_contexts_list" + }, + { + "annotations": { + "title": "Configuration: View", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "Get the current Kubernetes configuration content as a kubeconfig YAML", + "inputSchema": { + "type": "object", + "properties": { + "minified": { + "description": "Return a minified version of the configuration. If set to true, keeps only the current-context and the relevant pieces of the configuration for that context. If set to false, all contexts, clusters, auth-infos, and users are returned in the configuration. (Optional, default true)", + "type": "boolean" + } + } + }, + "name": "configuration_view" + }, + { + "annotations": { + "title": "Data Science Project: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new Data Science Project", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the Data Science Project (optional)", + "type": "object" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "description": { + "description": "A description for the Data Science Project (optional)", + "type": "string" + }, + "display_name": { + "description": "A display name for the Data Science Project (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the Data Science Project (optional)", + "type": "object" + }, + "name": { + "description": "The name of the Data Science Project", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the Data Science Project", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "datascience_project_create" + }, + { + "annotations": { + "title": "Data Science Project: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete a Data Science Project", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "name": { + "description": "The name of the Data Science Project", + "type": "string" + }, + "namespace": { + "description": "The namespace of the Data Science Project", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "datascience_project_delete" + }, + { + "annotations": { + "title": "Data Science Project: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get details of a specific Data Science Project", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "name": { + "description": "The name of the Data Science Project", + "type": "string" + }, + "namespace": { + "description": "The namespace of the Data Science Project", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "datascience_project_get" + }, + { + "annotations": { + "title": "Data Science Projects: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "List all Data Science Projects in the current OpenShift AI cluster", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "namespace": { + "description": "The namespace to search for Data Science Projects (optional, defaults to all namespaces)", + "type": "string" + } + } + }, + "name": "datascience_projects_list" + }, + { + "annotations": { + "title": "Events: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "List all the Kubernetes events in the current cluster from all namespaces", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "namespace": { + "description": "Optional Namespace to retrieve the events from. If not provided, will list events from all namespaces", + "type": "string" + } + } + }, + "name": "events_list" + }, + { + "annotations": { + "title": "Experiment: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new OpenShift AI machine learning experiment", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the experiment (optional)", + "type": "object" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "description": { + "description": "Description for the experiment (optional)", + "type": "string" + }, + "display_name": { + "description": "Display name for the experiment (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the experiment (optional)", + "type": "object" + }, + "name": { + "description": "The name of the experiment", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the experiment", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "experiment_create" + }, + { + "annotations": { + "title": "Experiment: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete an OpenShift AI machine learning experiment", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "name": { + "description": "The name of experiment", + "type": "string" + }, + "namespace": { + "description": "The namespace of the experiment", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "experiment_delete" + }, + { + "annotations": { + "title": "Experiment: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get a specific OpenShift AI machine learning experiment", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "name": { + "description": "The name of the experiment", + "type": "string" + }, + "namespace": { + "description": "The namespace of the experiment", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "experiment_get" + }, + { + "annotations": { + "title": "Experiments: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI machine learning experiments", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "namespace": { + "description": "Namespace to filter experiments (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter by experiment status (optional)", + "type": "string" + } + } + }, + "name": "experiments_list" + }, + { + "annotations": { + "title": "Helm: Install", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "Install a Helm chart in the current or provided namespace", + "inputSchema": { + "type": "object", + "properties": { + "chart": { + "description": "Chart reference to install (for example: stable/grafana, oci://ghcr.io/nginxinc/charts/nginx-ingress)", + "type": "string" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "name": { + "description": "Name of the Helm release (Optional, random name if not provided)", + "type": "string" + }, + "namespace": { + "description": "Namespace to install the Helm chart in (Optional, current namespace if not provided)", + "type": "string" + }, + "values": { + "description": "Values to pass to the Helm chart (Optional)", + "type": "object" + } + }, + "required": [ + "chart" + ] + }, + "name": "helm_install" + }, + { + "annotations": { + "title": "Helm: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "List all the Helm releases in the current or provided namespace (or in all namespaces if specified)", + "inputSchema": { + "type": "object", + "properties": { + "all_namespaces": { + "description": "If true, lists all Helm releases in all namespaces ignoring the namespace argument (Optional)", + "type": "boolean" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "namespace": { + "description": "Namespace to list Helm releases from (Optional, all namespaces if not provided)", + "type": "string" + } + } + }, + "name": "helm_list" + }, + { + "annotations": { + "title": "Helm: Uninstall", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": true, + "openWorldHint": true + }, + "description": "Uninstall a Helm release in the current or provided namespace", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "name": { + "description": "Name of the Helm release to uninstall", + "type": "string" + }, + "namespace": { + "description": "Namespace to uninstall the Helm release from (Optional, current namespace if not provided)", + "type": "string" + } + }, + "required": [ + "name" + ] + }, + "name": "helm_uninstall" + }, + { + "annotations": { + "title": "Model: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new machine learning model entry in OpenShift AI", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the model (optional)", + "type": "object" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "description": { + "description": "A description for the model (optional)", + "type": "string" + }, + "display_name": { + "description": "A display name for the model (optional)", + "type": "string" + }, + "format": { + "description": "The model format (e.g., pickle, onnx, savedmodel)", + "type": "string" + }, + "framework_version": { + "description": "The framework version (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the model (optional)", + "type": "object" + }, + "model_type": { + "description": "The model type (e.g., pytorch, tensorflow, sklearn)", + "type": "string" + }, + "name": { + "description": "The name of the model", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the model", + "type": "string" + }, + "version": { + "description": "The model version (optional)", + "type": "string" + } + }, + "required": [ + "name", + "namespace", + "model_type", + "format" + ] + }, + "name": "model_create" + }, + { + "annotations": { + "title": "Model: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete a machine learning model from OpenShift AI", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "name": { + "description": "The name of the model", + "type": "string" + }, + "namespace": { + "description": "The namespace of the model", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "model_delete" + }, + { + "annotations": { + "title": "Model: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get details of a specific machine learning model", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "name": { + "description": "The name of the model", + "type": "string" + }, + "namespace": { + "description": "The namespace of the model", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "model_get" + }, + { + "annotations": { + "title": "Model: Update", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, "openWorldHint": false }, - "description": "List all available context names and associated server urls from the kubeconfig file", + "description": "Update an existing machine learning model in OpenShift AI", "inputSchema": { - "type": "object" + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the model (optional)", + "type": "object" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "description": { + "description": "A description for the model (optional)", + "type": "string" + }, + "display_name": { + "description": "A display name for the model (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the model (optional)", + "type": "object" + }, + "name": { + "description": "The name of the model", + "type": "string" + }, + "namespace": { + "description": "The namespace of the model", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] }, - "name": "configuration_contexts_list" + "name": "model_update" }, { "annotations": { - "title": "Configuration: View", + "title": "Models: List", "readOnlyHint": true, "destructiveHint": false, "idempotentHint": false, "openWorldHint": true }, - "description": "Get the current Kubernetes configuration content as a kubeconfig YAML", + "description": "List all machine learning models in the current OpenShift AI cluster", "inputSchema": { "type": "object", "properties": { - "minified": { - "description": "Return a minified version of the configuration. If set to true, keeps only the current-context and the relevant pieces of the configuration for that context. If set to false, all contexts, clusters, auth-infos, and users are returned in the configuration. (Optional, default true)", - "type": "boolean" + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "model_type": { + "description": "Filter models by type (e.g., pytorch, tensorflow, sklearn)", + "type": "string" + }, + "namespace": { + "description": "The namespace to search for models (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter models by status (e.g., Ready, Pending, Failed)", + "type": "string" } } }, - "name": "configuration_view" + "name": "models_list" }, { "annotations": { - "title": "Events: List", + "title": "Namespaces: List", "readOnlyHint": true, "destructiveHint": false, "idempotentHint": false, "openWorldHint": true }, - "description": "List all the Kubernetes events in the current cluster from all namespaces", + "description": "List all the Kubernetes namespaces in the current cluster", "inputSchema": { "type": "object", "properties": { @@ -52,31 +923,23 @@ "fake-context" ], "type": "string" - }, - "namespace": { - "description": "Optional Namespace to retrieve the events from. If not provided, will list events from all namespaces", - "type": "string" } } }, - "name": "events_list" + "name": "namespaces_list" }, { "annotations": { - "title": "Helm: Install", - "readOnlyHint": false, + "title": "Node: Log", + "readOnlyHint": true, "destructiveHint": false, "idempotentHint": false, "openWorldHint": true }, - "description": "Install a Helm chart in the current or provided namespace", + "description": "Get logs from a Kubernetes node (kubelet, kube-proxy, or other system logs). This accesses node logs through the Kubernetes API proxy to the kubelet", "inputSchema": { "type": "object", "properties": { - "chart": { - "description": "Chart reference to install (for example: stable/grafana, oci://ghcr.io/nginxinc/charts/nginx-ingress)", - "type": "string" - }, "context": { "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", "enum": [ @@ -86,40 +949,39 @@ "type": "string" }, "name": { - "description": "Name of the Helm release (Optional, random name if not provided)", + "description": "Name of the node to get logs from", "type": "string" }, - "namespace": { - "description": "Namespace to install the Helm chart in (Optional, current namespace if not provided)", + "query": { + "description": "query specifies services(s) or files from which to return logs (required). Example: \"kubelet\" to fetch kubelet logs, \"/\u003clog-file-name\u003e\" to fetch a specific log file from the node (e.g., \"/var/log/kubelet.log\" or \"/var/log/kube-proxy.log\")", "type": "string" }, - "values": { - "description": "Values to pass to the Helm chart (Optional)", - "type": "object" + "tailLines": { + "default": 100, + "description": "Number of lines to retrieve from the end of the logs (Optional, 0 means all logs)", + "minimum": 0, + "type": "integer" } }, "required": [ - "chart" + "name", + "query" ] }, - "name": "helm_install" + "name": "nodes_log" }, { "annotations": { - "title": "Helm: List", + "title": "Node: Stats Summary", "readOnlyHint": true, "destructiveHint": false, "idempotentHint": false, "openWorldHint": true }, - "description": "List all the Helm releases in the current or provided namespace (or in all namespaces if specified)", + "description": "Get detailed resource usage statistics from a Kubernetes node via the kubelet's Summary API. Provides comprehensive metrics including CPU, memory, filesystem, and network usage at the node, pod, and container levels. On systems with cgroup v2 and kernel 4.20+, also includes PSI (Pressure Stall Information) metrics that show resource pressure for CPU, memory, and I/O. See https://kubernetes.io/docs/reference/instrumentation/understand-psi-metrics/ for details on PSI metrics", "inputSchema": { "type": "object", "properties": { - "all_namespaces": { - "description": "If true, lists all Helm releases in all namespaces ignoring the namespace argument (Optional)", - "type": "boolean" - }, "context": { "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", "enum": [ @@ -128,26 +990,36 @@ ], "type": "string" }, - "namespace": { - "description": "Namespace to list Helm releases from (Optional, all namespaces if not provided)", + "name": { + "description": "Name of the node to get stats from", "type": "string" } - } + }, + "required": [ + "name" + ] }, - "name": "helm_list" + "name": "nodes_stats_summary" }, { "annotations": { - "title": "Helm: Uninstall", + "title": "Pipeline: Create", "readOnlyHint": false, - "destructiveHint": true, - "idempotentHint": true, - "openWorldHint": true + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false }, - "description": "Uninstall a Helm release in the current or provided namespace", + "description": "Create a new OpenShift AI data science pipeline", "inputSchema": { "type": "object", "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the pipeline (optional)", + "type": "object" + }, "context": { "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", "enum": [ @@ -156,30 +1028,46 @@ ], "type": "string" }, + "description": { + "description": "Description for the pipeline (optional)", + "type": "string" + }, + "display_name": { + "description": "Display name for the pipeline (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the pipeline (optional)", + "type": "object" + }, "name": { - "description": "Name of the Helm release to uninstall", + "description": "The name of the pipeline", "type": "string" }, "namespace": { - "description": "Namespace to uninstall the Helm release from (Optional, current namespace if not provided)", + "description": "The namespace where to create the pipeline", "type": "string" } }, "required": [ - "name" + "name", + "namespace" ] }, - "name": "helm_uninstall" + "name": "pipeline_create" }, { "annotations": { - "title": "Namespaces: List", - "readOnlyHint": true, - "destructiveHint": false, + "title": "Pipeline: Delete", + "readOnlyHint": false, + "destructiveHint": true, "idempotentHint": false, - "openWorldHint": true + "openWorldHint": false }, - "description": "List all the Kubernetes namespaces in the current cluster", + "description": "Delete an OpenShift AI data science pipeline", "inputSchema": { "type": "object", "properties": { @@ -190,20 +1078,32 @@ "fake-context" ], "type": "string" + }, + "name": { + "description": "The name of the pipeline", + "type": "string" + }, + "namespace": { + "description": "The namespace of the pipeline", + "type": "string" } - } + }, + "required": [ + "name", + "namespace" + ] }, - "name": "namespaces_list" + "name": "pipeline_delete" }, { "annotations": { - "title": "Node: Log", + "title": "Pipeline: Get", "readOnlyHint": true, "destructiveHint": false, "idempotentHint": false, - "openWorldHint": true + "openWorldHint": false }, - "description": "Get logs from a Kubernetes node (kubelet, kube-proxy, or other system logs). This accesses node logs through the Kubernetes API proxy to the kubelet", + "description": "Get a specific OpenShift AI data science pipeline", "inputSchema": { "type": "object", "properties": { @@ -216,36 +1116,30 @@ "type": "string" }, "name": { - "description": "Name of the node to get logs from", + "description": "The name of the pipeline", "type": "string" }, - "query": { - "description": "query specifies services(s) or files from which to return logs (required). Example: \"kubelet\" to fetch kubelet logs, \"/\u003clog-file-name\u003e\" to fetch a specific log file from the node (e.g., \"/var/log/kubelet.log\" or \"/var/log/kube-proxy.log\")", + "namespace": { + "description": "The namespace of the pipeline", "type": "string" - }, - "tailLines": { - "default": 100, - "description": "Number of lines to retrieve from the end of the logs (Optional, 0 means all logs)", - "minimum": 0, - "type": "integer" } }, "required": [ "name", - "query" + "namespace" ] }, - "name": "nodes_log" + "name": "pipeline_get" }, { "annotations": { - "title": "Node: Stats Summary", + "title": "Pipeline Run: Get", "readOnlyHint": true, "destructiveHint": false, "idempotentHint": false, - "openWorldHint": true + "openWorldHint": false }, - "description": "Get detailed resource usage statistics from a Kubernetes node via the kubelet's Summary API. Provides comprehensive metrics including CPU, memory, filesystem, and network usage at the node, pod, and container levels. On systems with cgroup v2 and kernel 4.20+, also includes PSI (Pressure Stall Information) metrics that show resource pressure for CPU, memory, and I/O. See https://kubernetes.io/docs/reference/instrumentation/understand-psi-metrics/ for details on PSI metrics", + "description": "Get a specific OpenShift AI data science pipeline run", "inputSchema": { "type": "object", "properties": { @@ -258,15 +1152,88 @@ "type": "string" }, "name": { - "description": "Name of the node to get stats from", + "description": "The name of the pipeline run", + "type": "string" + }, + "namespace": { + "description": "The namespace of the pipeline run", "type": "string" } }, "required": [ - "name" + "name", + "namespace" ] }, - "name": "nodes_stats_summary" + "name": "pipeline_run_get" + }, + { + "annotations": { + "title": "Pipeline Runs: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI data science pipeline runs", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "namespace": { + "description": "Namespace to filter pipeline runs (optional, defaults to all namespaces)", + "type": "string" + }, + "pipeline_name": { + "description": "Filter by pipeline name (optional)", + "type": "string" + }, + "status": { + "description": "Filter by pipeline run status (optional)", + "type": "string" + } + } + }, + "name": "pipeline_runs_list" + }, + { + "annotations": { + "title": "Pipelines: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI data science pipelines", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "enum": [ + "extra-cluster", + "fake-context" + ], + "type": "string" + }, + "namespace": { + "description": "Namespace to filter pipelines (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter by pipeline status (optional)", + "type": "string" + } + } + }, + "name": "pipelines_list" }, { "annotations": { @@ -750,4 +1717,4 @@ }, "name": "resources_list" } -] +] \ No newline at end of file diff --git a/pkg/mcp/testdata/toolsets-full-tools-multicluster.json b/pkg/mcp/testdata/toolsets-full-tools-multicluster.json index 6e85e401..9a561b6f 100644 --- a/pkg/mcp/testdata/toolsets-full-tools-multicluster.json +++ b/pkg/mcp/testdata/toolsets-full-tools-multicluster.json @@ -1,4 +1,159 @@ [ + { + "annotations": { + "title": "Application: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new OpenShift AI application (Jupyter notebook, code server, etc.)", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the application (optional)", + "type": "object" + }, + "app_type": { + "description": "Application type (e.g., 'Jupyter', 'CodeServer')", + "type": "string" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "description": { + "description": "Description for the application (optional)", + "type": "string" + }, + "display_name": { + "description": "Display name for the application (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the application (optional)", + "type": "object" + }, + "name": { + "description": "The name of the application", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the application", + "type": "string" + } + }, + "required": [ + "name", + "namespace", + "app_type" + ] + }, + "name": "application_create" + }, + { + "annotations": { + "title": "Application: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete an OpenShift AI application", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "name": { + "description": "The name of the application", + "type": "string" + }, + "namespace": { + "description": "The namespace of the application", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "application_delete" + }, + { + "annotations": { + "title": "Application: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get a specific OpenShift AI application", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "name": { + "description": "The name of the application", + "type": "string" + }, + "namespace": { + "description": "The namespace of the application", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "application_get" + }, + { + "annotations": { + "title": "Applications: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI applications (Jupyter notebooks, code servers, etc.)", + "inputSchema": { + "type": "object", + "properties": { + "app_type": { + "description": "Filter by application type (optional, e.g., 'Jupyter', 'CodeServer')", + "type": "string" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "namespace": { + "description": "Namespace to filter applications (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter by application status (optional)", + "type": "string" + } + } + }, + "name": "applications_list" + }, { "annotations": { "title": "Configuration: Contexts List", @@ -31,17 +186,499 @@ } } }, - "name": "configuration_view" + "name": "configuration_view" + }, + { + "annotations": { + "title": "Data Science Project: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new Data Science Project", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the Data Science Project (optional)", + "type": "object" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "description": { + "description": "A description for the Data Science Project (optional)", + "type": "string" + }, + "display_name": { + "description": "A display name for the Data Science Project (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the Data Science Project (optional)", + "type": "object" + }, + "name": { + "description": "The name of the Data Science Project", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the Data Science Project", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "datascience_project_create" + }, + { + "annotations": { + "title": "Data Science Project: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete a Data Science Project", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "name": { + "description": "The name of the Data Science Project", + "type": "string" + }, + "namespace": { + "description": "The namespace of the Data Science Project", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "datascience_project_delete" + }, + { + "annotations": { + "title": "Data Science Project: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get details of a specific Data Science Project", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "name": { + "description": "The name of the Data Science Project", + "type": "string" + }, + "namespace": { + "description": "The namespace of the Data Science Project", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "datascience_project_get" + }, + { + "annotations": { + "title": "Data Science Projects: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "List all Data Science Projects in the current OpenShift AI cluster", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "namespace": { + "description": "The namespace to search for Data Science Projects (optional, defaults to all namespaces)", + "type": "string" + } + } + }, + "name": "datascience_projects_list" + }, + { + "annotations": { + "title": "Events: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "List all the Kubernetes events in the current cluster from all namespaces", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "namespace": { + "description": "Optional Namespace to retrieve the events from. If not provided, will list events from all namespaces", + "type": "string" + } + } + }, + "name": "events_list" + }, + { + "annotations": { + "title": "Experiment: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new OpenShift AI machine learning experiment", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the experiment (optional)", + "type": "object" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "description": { + "description": "Description for the experiment (optional)", + "type": "string" + }, + "display_name": { + "description": "Display name for the experiment (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the experiment (optional)", + "type": "object" + }, + "name": { + "description": "The name of the experiment", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the experiment", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "experiment_create" + }, + { + "annotations": { + "title": "Experiment: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete an OpenShift AI machine learning experiment", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "name": { + "description": "The name of experiment", + "type": "string" + }, + "namespace": { + "description": "The namespace of the experiment", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "experiment_delete" + }, + { + "annotations": { + "title": "Experiment: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get a specific OpenShift AI machine learning experiment", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "name": { + "description": "The name of the experiment", + "type": "string" + }, + "namespace": { + "description": "The namespace of the experiment", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "experiment_get" + }, + { + "annotations": { + "title": "Experiments: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI machine learning experiments", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "namespace": { + "description": "Namespace to filter experiments (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter by experiment status (optional)", + "type": "string" + } + } + }, + "name": "experiments_list" + }, + { + "annotations": { + "title": "Helm: Install", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "Install a Helm chart in the current or provided namespace", + "inputSchema": { + "type": "object", + "properties": { + "chart": { + "description": "Chart reference to install (for example: stable/grafana, oci://ghcr.io/nginxinc/charts/nginx-ingress)", + "type": "string" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "name": { + "description": "Name of the Helm release (Optional, random name if not provided)", + "type": "string" + }, + "namespace": { + "description": "Namespace to install the Helm chart in (Optional, current namespace if not provided)", + "type": "string" + }, + "values": { + "description": "Values to pass to the Helm chart (Optional)", + "type": "object" + } + }, + "required": [ + "chart" + ] + }, + "name": "helm_install" + }, + { + "annotations": { + "title": "Helm: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "List all the Helm releases in the current or provided namespace (or in all namespaces if specified)", + "inputSchema": { + "type": "object", + "properties": { + "all_namespaces": { + "description": "If true, lists all Helm releases in all namespaces ignoring the namespace argument (Optional)", + "type": "boolean" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "namespace": { + "description": "Namespace to list Helm releases from (Optional, all namespaces if not provided)", + "type": "string" + } + } + }, + "name": "helm_list" + }, + { + "annotations": { + "title": "Helm: Uninstall", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": true, + "openWorldHint": true + }, + "description": "Uninstall a Helm release in the current or provided namespace", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "name": { + "description": "Name of the Helm release to uninstall", + "type": "string" + }, + "namespace": { + "description": "Namespace to uninstall the Helm release from (Optional, current namespace if not provided)", + "type": "string" + } + }, + "required": [ + "name" + ] + }, + "name": "helm_uninstall" + }, + { + "annotations": { + "title": "Model: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new machine learning model entry in OpenShift AI", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the model (optional)", + "type": "object" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "description": { + "description": "A description for the model (optional)", + "type": "string" + }, + "display_name": { + "description": "A display name for the model (optional)", + "type": "string" + }, + "format": { + "description": "The model format (e.g., pickle, onnx, savedmodel)", + "type": "string" + }, + "framework_version": { + "description": "The framework version (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the model (optional)", + "type": "object" + }, + "model_type": { + "description": "The model type (e.g., pytorch, tensorflow, sklearn)", + "type": "string" + }, + "name": { + "description": "The name of the model", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the model", + "type": "string" + }, + "version": { + "description": "The model version (optional)", + "type": "string" + } + }, + "required": [ + "name", + "namespace", + "model_type", + "format" + ] + }, + "name": "model_create" }, { "annotations": { - "title": "Events: List", - "readOnlyHint": true, - "destructiveHint": false, + "title": "Model: Delete", + "readOnlyHint": false, + "destructiveHint": true, "idempotentHint": false, - "openWorldHint": true + "openWorldHint": false }, - "description": "List all the Kubernetes events in the current cluster from all namespaces", + "description": "Delete a machine learning model from OpenShift AI", "inputSchema": { "type": "object", "properties": { @@ -49,90 +686,117 @@ "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", "type": "string" }, + "name": { + "description": "The name of the model", + "type": "string" + }, "namespace": { - "description": "Optional Namespace to retrieve the events from. If not provided, will list events from all namespaces", + "description": "The namespace of the model", "type": "string" } - } + }, + "required": [ + "name", + "namespace" + ] }, - "name": "events_list" + "name": "model_delete" }, { "annotations": { - "title": "Helm: Install", - "readOnlyHint": false, + "title": "Model: Get", + "readOnlyHint": true, "destructiveHint": false, "idempotentHint": false, - "openWorldHint": true + "openWorldHint": false }, - "description": "Install a Helm chart in the current or provided namespace", + "description": "Get details of a specific machine learning model", "inputSchema": { "type": "object", "properties": { - "chart": { - "description": "Chart reference to install (for example: stable/grafana, oci://ghcr.io/nginxinc/charts/nginx-ingress)", - "type": "string" - }, "context": { "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", "type": "string" }, "name": { - "description": "Name of the Helm release (Optional, random name if not provided)", + "description": "The name of the model", "type": "string" }, "namespace": { - "description": "Namespace to install the Helm chart in (Optional, current namespace if not provided)", + "description": "The namespace of the model", "type": "string" - }, - "values": { - "description": "Values to pass to the Helm chart (Optional)", - "type": "object" } }, "required": [ - "chart" + "name", + "namespace" ] }, - "name": "helm_install" + "name": "model_get" }, { "annotations": { - "title": "Helm: List", - "readOnlyHint": true, + "title": "Model: Update", + "readOnlyHint": false, "destructiveHint": false, "idempotentHint": false, - "openWorldHint": true + "openWorldHint": false }, - "description": "List all the Helm releases in the current or provided namespace (or in all namespaces if specified)", + "description": "Update an existing machine learning model in OpenShift AI", "inputSchema": { "type": "object", "properties": { - "all_namespaces": { - "description": "If true, lists all Helm releases in all namespaces ignoring the namespace argument (Optional)", - "type": "boolean" + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the model (optional)", + "type": "object" }, "context": { "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", "type": "string" }, + "description": { + "description": "A description for the model (optional)", + "type": "string" + }, + "display_name": { + "description": "A display name for the model (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the model (optional)", + "type": "object" + }, + "name": { + "description": "The name of the model", + "type": "string" + }, "namespace": { - "description": "Namespace to list Helm releases from (Optional, all namespaces if not provided)", + "description": "The namespace of the model", "type": "string" } - } + }, + "required": [ + "name", + "namespace" + ] }, - "name": "helm_list" + "name": "model_update" }, { "annotations": { - "title": "Helm: Uninstall", - "readOnlyHint": false, - "destructiveHint": true, - "idempotentHint": true, + "title": "Models: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, "openWorldHint": true }, - "description": "Uninstall a Helm release in the current or provided namespace", + "description": "List all machine learning models in the current OpenShift AI cluster", "inputSchema": { "type": "object", "properties": { @@ -140,20 +804,21 @@ "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", "type": "string" }, - "name": { - "description": "Name of the Helm release to uninstall", + "model_type": { + "description": "Filter models by type (e.g., pytorch, tensorflow, sklearn)", "type": "string" }, "namespace": { - "description": "Namespace to uninstall the Helm release from (Optional, current namespace if not provided)", + "description": "The namespace to search for models (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter models by status (e.g., Ready, Pending, Failed)", "type": "string" } - }, - "required": [ - "name" - ] + } }, - "name": "helm_uninstall" + "name": "models_list" }, { "annotations": { @@ -240,6 +905,216 @@ }, "name": "nodes_stats_summary" }, + { + "annotations": { + "title": "Pipeline: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new OpenShift AI data science pipeline", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the pipeline (optional)", + "type": "object" + }, + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "description": { + "description": "Description for the pipeline (optional)", + "type": "string" + }, + "display_name": { + "description": "Display name for the pipeline (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the pipeline (optional)", + "type": "object" + }, + "name": { + "description": "The name of the pipeline", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the pipeline", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "pipeline_create" + }, + { + "annotations": { + "title": "Pipeline: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete an OpenShift AI data science pipeline", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "name": { + "description": "The name of the pipeline", + "type": "string" + }, + "namespace": { + "description": "The namespace of the pipeline", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "pipeline_delete" + }, + { + "annotations": { + "title": "Pipeline: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get a specific OpenShift AI data science pipeline", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "name": { + "description": "The name of the pipeline", + "type": "string" + }, + "namespace": { + "description": "The namespace of the pipeline", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "pipeline_get" + }, + { + "annotations": { + "title": "Pipeline Run: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get a specific OpenShift AI data science pipeline run", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "name": { + "description": "The name of the pipeline run", + "type": "string" + }, + "namespace": { + "description": "The namespace of the pipeline run", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "pipeline_run_get" + }, + { + "annotations": { + "title": "Pipeline Runs: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI data science pipeline runs", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "namespace": { + "description": "Namespace to filter pipeline runs (optional, defaults to all namespaces)", + "type": "string" + }, + "pipeline_name": { + "description": "Filter by pipeline name (optional)", + "type": "string" + }, + "status": { + "description": "Filter by pipeline run status (optional)", + "type": "string" + } + } + }, + "name": "pipeline_runs_list" + }, + { + "annotations": { + "title": "Pipelines: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI data science pipelines", + "inputSchema": { + "type": "object", + "properties": { + "context": { + "description": "Optional parameter selecting which context to run the tool in. Defaults to fake-context if not set", + "type": "string" + }, + "namespace": { + "description": "Namespace to filter pipelines (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter by pipeline status (optional)", + "type": "string" + } + } + }, + "name": "pipelines_list" + }, { "annotations": { "title": "Pods: Delete", @@ -674,4 +1549,4 @@ }, "name": "resources_list" } -] +] \ No newline at end of file diff --git a/pkg/mcp/testdata/toolsets-full-tools-openshift.json b/pkg/mcp/testdata/toolsets-full-tools-openshift.json index fb24138e..7f477795 100644 --- a/pkg/mcp/testdata/toolsets-full-tools-openshift.json +++ b/pkg/mcp/testdata/toolsets-full-tools-openshift.json @@ -1,4 +1,143 @@ [ + { + "annotations": { + "title": "Application: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new OpenShift AI application (Jupyter notebook, code server, etc.)", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the application (optional)", + "type": "object" + }, + "app_type": { + "description": "Application type (e.g., 'Jupyter', 'CodeServer')", + "type": "string" + }, + "description": { + "description": "Description for the application (optional)", + "type": "string" + }, + "display_name": { + "description": "Display name for the application (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the application (optional)", + "type": "object" + }, + "name": { + "description": "The name of the application", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the application", + "type": "string" + } + }, + "required": [ + "name", + "namespace", + "app_type" + ] + }, + "name": "application_create" + }, + { + "annotations": { + "title": "Application: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete an OpenShift AI application", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the application", + "type": "string" + }, + "namespace": { + "description": "The namespace of the application", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "application_delete" + }, + { + "annotations": { + "title": "Application: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get a specific OpenShift AI application", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the application", + "type": "string" + }, + "namespace": { + "description": "The namespace of the application", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "application_get" + }, + { + "annotations": { + "title": "Applications: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI applications (Jupyter notebooks, code servers, etc.)", + "inputSchema": { + "type": "object", + "properties": { + "app_type": { + "description": "Filter by application type (optional, e.g., 'Jupyter', 'CodeServer')", + "type": "string" + }, + "namespace": { + "description": "Namespace to filter applications (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter by application status (optional)", + "type": "string" + } + } + }, + "name": "applications_list" + }, { "annotations": { "title": "Configuration: View", @@ -17,113 +156,571 @@ } } }, - "name": "configuration_view" + "name": "configuration_view" + }, + { + "annotations": { + "title": "Data Science Project: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new Data Science Project", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the Data Science Project (optional)", + "type": "object" + }, + "description": { + "description": "A description for the Data Science Project (optional)", + "type": "string" + }, + "display_name": { + "description": "A display name for the Data Science Project (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the Data Science Project (optional)", + "type": "object" + }, + "name": { + "description": "The name of the Data Science Project", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the Data Science Project", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "datascience_project_create" + }, + { + "annotations": { + "title": "Data Science Project: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete a Data Science Project", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the Data Science Project", + "type": "string" + }, + "namespace": { + "description": "The namespace of the Data Science Project", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "datascience_project_delete" + }, + { + "annotations": { + "title": "Data Science Project: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get details of a specific Data Science Project", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the Data Science Project", + "type": "string" + }, + "namespace": { + "description": "The namespace of the Data Science Project", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "datascience_project_get" + }, + { + "annotations": { + "title": "Data Science Projects: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "List all Data Science Projects in the current OpenShift AI cluster", + "inputSchema": { + "type": "object", + "properties": { + "namespace": { + "description": "The namespace to search for Data Science Projects (optional, defaults to all namespaces)", + "type": "string" + } + } + }, + "name": "datascience_projects_list" + }, + { + "annotations": { + "title": "Events: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "List all the Kubernetes events in the current cluster from all namespaces", + "inputSchema": { + "type": "object", + "properties": { + "namespace": { + "description": "Optional Namespace to retrieve the events from. If not provided, will list events from all namespaces", + "type": "string" + } + } + }, + "name": "events_list" + }, + { + "annotations": { + "title": "Experiment: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new OpenShift AI machine learning experiment", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the experiment (optional)", + "type": "object" + }, + "description": { + "description": "Description for the experiment (optional)", + "type": "string" + }, + "display_name": { + "description": "Display name for the experiment (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the experiment (optional)", + "type": "object" + }, + "name": { + "description": "The name of the experiment", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the experiment", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "experiment_create" + }, + { + "annotations": { + "title": "Experiment: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete an OpenShift AI machine learning experiment", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of experiment", + "type": "string" + }, + "namespace": { + "description": "The namespace of the experiment", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "experiment_delete" + }, + { + "annotations": { + "title": "Experiment: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get a specific OpenShift AI machine learning experiment", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the experiment", + "type": "string" + }, + "namespace": { + "description": "The namespace of the experiment", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "experiment_get" + }, + { + "annotations": { + "title": "Experiments: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI machine learning experiments", + "inputSchema": { + "type": "object", + "properties": { + "namespace": { + "description": "Namespace to filter experiments (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter by experiment status (optional)", + "type": "string" + } + } + }, + "name": "experiments_list" + }, + { + "annotations": { + "title": "Helm: Install", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "Install a Helm chart in the current or provided namespace", + "inputSchema": { + "type": "object", + "properties": { + "chart": { + "description": "Chart reference to install (for example: stable/grafana, oci://ghcr.io/nginxinc/charts/nginx-ingress)", + "type": "string" + }, + "name": { + "description": "Name of the Helm release (Optional, random name if not provided)", + "type": "string" + }, + "namespace": { + "description": "Namespace to install the Helm chart in (Optional, current namespace if not provided)", + "type": "string" + }, + "values": { + "description": "Values to pass to the Helm chart (Optional)", + "type": "object" + } + }, + "required": [ + "chart" + ] + }, + "name": "helm_install" + }, + { + "annotations": { + "title": "Helm: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "List all the Helm releases in the current or provided namespace (or in all namespaces if specified)", + "inputSchema": { + "type": "object", + "properties": { + "all_namespaces": { + "description": "If true, lists all Helm releases in all namespaces ignoring the namespace argument (Optional)", + "type": "boolean" + }, + "namespace": { + "description": "Namespace to list Helm releases from (Optional, all namespaces if not provided)", + "type": "string" + } + } + }, + "name": "helm_list" + }, + { + "annotations": { + "title": "Helm: Uninstall", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": true, + "openWorldHint": true + }, + "description": "Uninstall a Helm release in the current or provided namespace", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "Name of the Helm release to uninstall", + "type": "string" + }, + "namespace": { + "description": "Namespace to uninstall the Helm release from (Optional, current namespace if not provided)", + "type": "string" + } + }, + "required": [ + "name" + ] + }, + "name": "helm_uninstall" }, { "annotations": { - "title": "Events: List", - "readOnlyHint": true, + "title": "Model: Create", + "readOnlyHint": false, "destructiveHint": false, "idempotentHint": false, - "openWorldHint": true + "openWorldHint": false }, - "description": "List all the Kubernetes events in the current cluster from all namespaces", + "description": "Create a new machine learning model entry in OpenShift AI", "inputSchema": { "type": "object", "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the model (optional)", + "type": "object" + }, + "description": { + "description": "A description for the model (optional)", + "type": "string" + }, + "display_name": { + "description": "A display name for the model (optional)", + "type": "string" + }, + "format": { + "description": "The model format (e.g., pickle, onnx, savedmodel)", + "type": "string" + }, + "framework_version": { + "description": "The framework version (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the model (optional)", + "type": "object" + }, + "model_type": { + "description": "The model type (e.g., pytorch, tensorflow, sklearn)", + "type": "string" + }, + "name": { + "description": "The name of the model", + "type": "string" + }, "namespace": { - "description": "Optional Namespace to retrieve the events from. If not provided, will list events from all namespaces", + "description": "The namespace where to create the model", + "type": "string" + }, + "version": { + "description": "The model version (optional)", "type": "string" } - } + }, + "required": [ + "name", + "namespace", + "model_type", + "format" + ] }, - "name": "events_list" + "name": "model_create" }, { "annotations": { - "title": "Helm: Install", + "title": "Model: Delete", "readOnlyHint": false, - "destructiveHint": false, + "destructiveHint": true, "idempotentHint": false, - "openWorldHint": true + "openWorldHint": false }, - "description": "Install a Helm chart in the current or provided namespace", + "description": "Delete a machine learning model from OpenShift AI", "inputSchema": { "type": "object", "properties": { - "chart": { - "description": "Chart reference to install (for example: stable/grafana, oci://ghcr.io/nginxinc/charts/nginx-ingress)", - "type": "string" - }, "name": { - "description": "Name of the Helm release (Optional, random name if not provided)", + "description": "The name of the model", "type": "string" }, "namespace": { - "description": "Namespace to install the Helm chart in (Optional, current namespace if not provided)", + "description": "The namespace of the model", "type": "string" - }, - "values": { - "description": "Values to pass to the Helm chart (Optional)", - "type": "object" } }, "required": [ - "chart" + "name", + "namespace" ] }, - "name": "helm_install" + "name": "model_delete" }, { "annotations": { - "title": "Helm: List", + "title": "Model: Get", "readOnlyHint": true, "destructiveHint": false, "idempotentHint": false, - "openWorldHint": true + "openWorldHint": false }, - "description": "List all the Helm releases in the current or provided namespace (or in all namespaces if specified)", + "description": "Get details of a specific machine learning model", "inputSchema": { "type": "object", "properties": { - "all_namespaces": { - "description": "If true, lists all Helm releases in all namespaces ignoring the namespace argument (Optional)", - "type": "boolean" + "name": { + "description": "The name of the model", + "type": "string" }, "namespace": { - "description": "Namespace to list Helm releases from (Optional, all namespaces if not provided)", + "description": "The namespace of the model", "type": "string" } - } + }, + "required": [ + "name", + "namespace" + ] }, - "name": "helm_list" + "name": "model_get" }, { "annotations": { - "title": "Helm: Uninstall", + "title": "Model: Update", "readOnlyHint": false, - "destructiveHint": true, - "idempotentHint": true, - "openWorldHint": true + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false }, - "description": "Uninstall a Helm release in the current or provided namespace", + "description": "Update an existing machine learning model in OpenShift AI", "inputSchema": { "type": "object", "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the model (optional)", + "type": "object" + }, + "description": { + "description": "A description for the model (optional)", + "type": "string" + }, + "display_name": { + "description": "A display name for the model (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the model (optional)", + "type": "object" + }, "name": { - "description": "Name of the Helm release to uninstall", + "description": "The name of the model", "type": "string" }, "namespace": { - "description": "Namespace to uninstall the Helm release from (Optional, current namespace if not provided)", + "description": "The namespace of the model", "type": "string" } }, "required": [ - "name" + "name", + "namespace" ] }, - "name": "helm_uninstall" + "name": "model_update" + }, + { + "annotations": { + "title": "Models: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "List all machine learning models in the current OpenShift AI cluster", + "inputSchema": { + "type": "object", + "properties": { + "model_type": { + "description": "Filter models by type (e.g., pytorch, tensorflow, sklearn)", + "type": "string" + }, + "namespace": { + "description": "The namespace to search for models (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter models by status (e.g., Ready, Pending, Failed)", + "type": "string" + } + } + }, + "name": "models_list" }, { "annotations": { @@ -196,6 +793,192 @@ }, "name": "nodes_stats_summary" }, + { + "annotations": { + "title": "Pipeline: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new OpenShift AI data science pipeline", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the pipeline (optional)", + "type": "object" + }, + "description": { + "description": "Description for the pipeline (optional)", + "type": "string" + }, + "display_name": { + "description": "Display name for the pipeline (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the pipeline (optional)", + "type": "object" + }, + "name": { + "description": "The name of the pipeline", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the pipeline", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "pipeline_create" + }, + { + "annotations": { + "title": "Pipeline: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete an OpenShift AI data science pipeline", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the pipeline", + "type": "string" + }, + "namespace": { + "description": "The namespace of the pipeline", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "pipeline_delete" + }, + { + "annotations": { + "title": "Pipeline: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get a specific OpenShift AI data science pipeline", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the pipeline", + "type": "string" + }, + "namespace": { + "description": "The namespace of the pipeline", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "pipeline_get" + }, + { + "annotations": { + "title": "Pipeline Run: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get a specific OpenShift AI data science pipeline run", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the pipeline run", + "type": "string" + }, + "namespace": { + "description": "The namespace of the pipeline run", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "pipeline_run_get" + }, + { + "annotations": { + "title": "Pipeline Runs: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI data science pipeline runs", + "inputSchema": { + "type": "object", + "properties": { + "namespace": { + "description": "Namespace to filter pipeline runs (optional, defaults to all namespaces)", + "type": "string" + }, + "pipeline_name": { + "description": "Filter by pipeline name (optional)", + "type": "string" + }, + "status": { + "description": "Filter by pipeline run status (optional)", + "type": "string" + } + } + }, + "name": "pipeline_runs_list" + }, + { + "annotations": { + "title": "Pipelines: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI data science pipelines", + "inputSchema": { + "type": "object", + "properties": { + "namespace": { + "description": "Namespace to filter pipelines (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter by pipeline status (optional)", + "type": "string" + } + } + }, + "name": "pipelines_list" + }, { "annotations": { "title": "Pods: Delete", @@ -596,4 +1379,4 @@ }, "name": "resources_list" } -] +] \ No newline at end of file diff --git a/pkg/mcp/testdata/toolsets-full-tools.json b/pkg/mcp/testdata/toolsets-full-tools.json index 5a4b5112..c3d7bc6f 100644 --- a/pkg/mcp/testdata/toolsets-full-tools.json +++ b/pkg/mcp/testdata/toolsets-full-tools.json @@ -1,4 +1,143 @@ [ + { + "annotations": { + "title": "Application: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new OpenShift AI application (Jupyter notebook, code server, etc.)", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the application (optional)", + "type": "object" + }, + "app_type": { + "description": "Application type (e.g., 'Jupyter', 'CodeServer')", + "type": "string" + }, + "description": { + "description": "Description for the application (optional)", + "type": "string" + }, + "display_name": { + "description": "Display name for the application (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the application (optional)", + "type": "object" + }, + "name": { + "description": "The name of the application", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the application", + "type": "string" + } + }, + "required": [ + "name", + "namespace", + "app_type" + ] + }, + "name": "application_create" + }, + { + "annotations": { + "title": "Application: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete an OpenShift AI application", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the application", + "type": "string" + }, + "namespace": { + "description": "The namespace of the application", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "application_delete" + }, + { + "annotations": { + "title": "Application: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get a specific OpenShift AI application", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the application", + "type": "string" + }, + "namespace": { + "description": "The namespace of the application", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "application_get" + }, + { + "annotations": { + "title": "Applications: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI applications (Jupyter notebooks, code servers, etc.)", + "inputSchema": { + "type": "object", + "properties": { + "app_type": { + "description": "Filter by application type (optional, e.g., 'Jupyter', 'CodeServer')", + "type": "string" + }, + "namespace": { + "description": "Namespace to filter applications (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter by application status (optional)", + "type": "string" + } + } + }, + "name": "applications_list" + }, { "annotations": { "title": "Configuration: View", @@ -17,113 +156,571 @@ } } }, - "name": "configuration_view" + "name": "configuration_view" + }, + { + "annotations": { + "title": "Data Science Project: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new Data Science Project", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the Data Science Project (optional)", + "type": "object" + }, + "description": { + "description": "A description for the Data Science Project (optional)", + "type": "string" + }, + "display_name": { + "description": "A display name for the Data Science Project (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the Data Science Project (optional)", + "type": "object" + }, + "name": { + "description": "The name of the Data Science Project", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the Data Science Project", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "datascience_project_create" + }, + { + "annotations": { + "title": "Data Science Project: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete a Data Science Project", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the Data Science Project", + "type": "string" + }, + "namespace": { + "description": "The namespace of the Data Science Project", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "datascience_project_delete" + }, + { + "annotations": { + "title": "Data Science Project: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get details of a specific Data Science Project", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the Data Science Project", + "type": "string" + }, + "namespace": { + "description": "The namespace of the Data Science Project", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "datascience_project_get" + }, + { + "annotations": { + "title": "Data Science Projects: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "List all Data Science Projects in the current OpenShift AI cluster", + "inputSchema": { + "type": "object", + "properties": { + "namespace": { + "description": "The namespace to search for Data Science Projects (optional, defaults to all namespaces)", + "type": "string" + } + } + }, + "name": "datascience_projects_list" + }, + { + "annotations": { + "title": "Events: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "List all the Kubernetes events in the current cluster from all namespaces", + "inputSchema": { + "type": "object", + "properties": { + "namespace": { + "description": "Optional Namespace to retrieve the events from. If not provided, will list events from all namespaces", + "type": "string" + } + } + }, + "name": "events_list" + }, + { + "annotations": { + "title": "Experiment: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new OpenShift AI machine learning experiment", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the experiment (optional)", + "type": "object" + }, + "description": { + "description": "Description for the experiment (optional)", + "type": "string" + }, + "display_name": { + "description": "Display name for the experiment (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the experiment (optional)", + "type": "object" + }, + "name": { + "description": "The name of the experiment", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the experiment", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "experiment_create" + }, + { + "annotations": { + "title": "Experiment: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete an OpenShift AI machine learning experiment", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of experiment", + "type": "string" + }, + "namespace": { + "description": "The namespace of the experiment", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "experiment_delete" + }, + { + "annotations": { + "title": "Experiment: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get a specific OpenShift AI machine learning experiment", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the experiment", + "type": "string" + }, + "namespace": { + "description": "The namespace of the experiment", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "experiment_get" + }, + { + "annotations": { + "title": "Experiments: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI machine learning experiments", + "inputSchema": { + "type": "object", + "properties": { + "namespace": { + "description": "Namespace to filter experiments (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter by experiment status (optional)", + "type": "string" + } + } + }, + "name": "experiments_list" + }, + { + "annotations": { + "title": "Helm: Install", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "Install a Helm chart in the current or provided namespace", + "inputSchema": { + "type": "object", + "properties": { + "chart": { + "description": "Chart reference to install (for example: stable/grafana, oci://ghcr.io/nginxinc/charts/nginx-ingress)", + "type": "string" + }, + "name": { + "description": "Name of the Helm release (Optional, random name if not provided)", + "type": "string" + }, + "namespace": { + "description": "Namespace to install the Helm chart in (Optional, current namespace if not provided)", + "type": "string" + }, + "values": { + "description": "Values to pass to the Helm chart (Optional)", + "type": "object" + } + }, + "required": [ + "chart" + ] + }, + "name": "helm_install" + }, + { + "annotations": { + "title": "Helm: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "List all the Helm releases in the current or provided namespace (or in all namespaces if specified)", + "inputSchema": { + "type": "object", + "properties": { + "all_namespaces": { + "description": "If true, lists all Helm releases in all namespaces ignoring the namespace argument (Optional)", + "type": "boolean" + }, + "namespace": { + "description": "Namespace to list Helm releases from (Optional, all namespaces if not provided)", + "type": "string" + } + } + }, + "name": "helm_list" + }, + { + "annotations": { + "title": "Helm: Uninstall", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": true, + "openWorldHint": true + }, + "description": "Uninstall a Helm release in the current or provided namespace", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "Name of the Helm release to uninstall", + "type": "string" + }, + "namespace": { + "description": "Namespace to uninstall the Helm release from (Optional, current namespace if not provided)", + "type": "string" + } + }, + "required": [ + "name" + ] + }, + "name": "helm_uninstall" }, { "annotations": { - "title": "Events: List", - "readOnlyHint": true, + "title": "Model: Create", + "readOnlyHint": false, "destructiveHint": false, "idempotentHint": false, - "openWorldHint": true + "openWorldHint": false }, - "description": "List all the Kubernetes events in the current cluster from all namespaces", + "description": "Create a new machine learning model entry in OpenShift AI", "inputSchema": { "type": "object", "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the model (optional)", + "type": "object" + }, + "description": { + "description": "A description for the model (optional)", + "type": "string" + }, + "display_name": { + "description": "A display name for the model (optional)", + "type": "string" + }, + "format": { + "description": "The model format (e.g., pickle, onnx, savedmodel)", + "type": "string" + }, + "framework_version": { + "description": "The framework version (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the model (optional)", + "type": "object" + }, + "model_type": { + "description": "The model type (e.g., pytorch, tensorflow, sklearn)", + "type": "string" + }, + "name": { + "description": "The name of the model", + "type": "string" + }, "namespace": { - "description": "Optional Namespace to retrieve the events from. If not provided, will list events from all namespaces", + "description": "The namespace where to create the model", + "type": "string" + }, + "version": { + "description": "The model version (optional)", "type": "string" } - } + }, + "required": [ + "name", + "namespace", + "model_type", + "format" + ] }, - "name": "events_list" + "name": "model_create" }, { "annotations": { - "title": "Helm: Install", + "title": "Model: Delete", "readOnlyHint": false, - "destructiveHint": false, + "destructiveHint": true, "idempotentHint": false, - "openWorldHint": true + "openWorldHint": false }, - "description": "Install a Helm chart in the current or provided namespace", + "description": "Delete a machine learning model from OpenShift AI", "inputSchema": { "type": "object", "properties": { - "chart": { - "description": "Chart reference to install (for example: stable/grafana, oci://ghcr.io/nginxinc/charts/nginx-ingress)", - "type": "string" - }, "name": { - "description": "Name of the Helm release (Optional, random name if not provided)", + "description": "The name of the model", "type": "string" }, "namespace": { - "description": "Namespace to install the Helm chart in (Optional, current namespace if not provided)", + "description": "The namespace of the model", "type": "string" - }, - "values": { - "description": "Values to pass to the Helm chart (Optional)", - "type": "object" } }, "required": [ - "chart" + "name", + "namespace" ] }, - "name": "helm_install" + "name": "model_delete" }, { "annotations": { - "title": "Helm: List", + "title": "Model: Get", "readOnlyHint": true, "destructiveHint": false, "idempotentHint": false, - "openWorldHint": true + "openWorldHint": false }, - "description": "List all the Helm releases in the current or provided namespace (or in all namespaces if specified)", + "description": "Get details of a specific machine learning model", "inputSchema": { "type": "object", "properties": { - "all_namespaces": { - "description": "If true, lists all Helm releases in all namespaces ignoring the namespace argument (Optional)", - "type": "boolean" + "name": { + "description": "The name of the model", + "type": "string" }, "namespace": { - "description": "Namespace to list Helm releases from (Optional, all namespaces if not provided)", + "description": "The namespace of the model", "type": "string" } - } + }, + "required": [ + "name", + "namespace" + ] }, - "name": "helm_list" + "name": "model_get" }, { "annotations": { - "title": "Helm: Uninstall", + "title": "Model: Update", "readOnlyHint": false, - "destructiveHint": true, - "idempotentHint": true, - "openWorldHint": true + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false }, - "description": "Uninstall a Helm release in the current or provided namespace", + "description": "Update an existing machine learning model in OpenShift AI", "inputSchema": { "type": "object", "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the model (optional)", + "type": "object" + }, + "description": { + "description": "A description for the model (optional)", + "type": "string" + }, + "display_name": { + "description": "A display name for the model (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the model (optional)", + "type": "object" + }, "name": { - "description": "Name of the Helm release to uninstall", + "description": "The name of the model", "type": "string" }, "namespace": { - "description": "Namespace to uninstall the Helm release from (Optional, current namespace if not provided)", + "description": "The namespace of the model", "type": "string" } }, "required": [ - "name" + "name", + "namespace" ] }, - "name": "helm_uninstall" + "name": "model_update" + }, + { + "annotations": { + "title": "Models: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": true + }, + "description": "List all machine learning models in the current OpenShift AI cluster", + "inputSchema": { + "type": "object", + "properties": { + "model_type": { + "description": "Filter models by type (e.g., pytorch, tensorflow, sklearn)", + "type": "string" + }, + "namespace": { + "description": "The namespace to search for models (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter models by status (e.g., Ready, Pending, Failed)", + "type": "string" + } + } + }, + "name": "models_list" }, { "annotations": { @@ -196,6 +793,192 @@ }, "name": "nodes_stats_summary" }, + { + "annotations": { + "title": "Pipeline: Create", + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Create a new OpenShift AI data science pipeline", + "inputSchema": { + "type": "object", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Annotations to apply to the pipeline (optional)", + "type": "object" + }, + "description": { + "description": "Description for the pipeline (optional)", + "type": "string" + }, + "display_name": { + "description": "Display name for the pipeline (optional)", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to apply to the pipeline (optional)", + "type": "object" + }, + "name": { + "description": "The name of the pipeline", + "type": "string" + }, + "namespace": { + "description": "The namespace where to create the pipeline", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "pipeline_create" + }, + { + "annotations": { + "title": "Pipeline: Delete", + "readOnlyHint": false, + "destructiveHint": true, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Delete an OpenShift AI data science pipeline", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the pipeline", + "type": "string" + }, + "namespace": { + "description": "The namespace of the pipeline", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "pipeline_delete" + }, + { + "annotations": { + "title": "Pipeline: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get a specific OpenShift AI data science pipeline", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the pipeline", + "type": "string" + }, + "namespace": { + "description": "The namespace of the pipeline", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "pipeline_get" + }, + { + "annotations": { + "title": "Pipeline Run: Get", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": false, + "openWorldHint": false + }, + "description": "Get a specific OpenShift AI data science pipeline run", + "inputSchema": { + "type": "object", + "properties": { + "name": { + "description": "The name of the pipeline run", + "type": "string" + }, + "namespace": { + "description": "The namespace of the pipeline run", + "type": "string" + } + }, + "required": [ + "name", + "namespace" + ] + }, + "name": "pipeline_run_get" + }, + { + "annotations": { + "title": "Pipeline Runs: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI data science pipeline runs", + "inputSchema": { + "type": "object", + "properties": { + "namespace": { + "description": "Namespace to filter pipeline runs (optional, defaults to all namespaces)", + "type": "string" + }, + "pipeline_name": { + "description": "Filter by pipeline name (optional)", + "type": "string" + }, + "status": { + "description": "Filter by pipeline run status (optional)", + "type": "string" + } + } + }, + "name": "pipeline_runs_list" + }, + { + "annotations": { + "title": "Pipelines: List", + "readOnlyHint": true, + "destructiveHint": false, + "idempotentHint": true, + "openWorldHint": false + }, + "description": "List all OpenShift AI data science pipelines", + "inputSchema": { + "type": "object", + "properties": { + "namespace": { + "description": "Namespace to filter pipelines (optional, defaults to all namespaces)", + "type": "string" + }, + "status": { + "description": "Filter by pipeline status (optional)", + "type": "string" + } + } + }, + "name": "pipelines_list" + }, { "annotations": { "title": "Pods: Delete", @@ -582,4 +1365,4 @@ }, "name": "resources_list" } -] +] \ No newline at end of file diff --git a/pkg/openshift-ai/application.go b/pkg/openshift-ai/application.go new file mode 100644 index 00000000..2d3e5473 --- /dev/null +++ b/pkg/openshift-ai/application.go @@ -0,0 +1,185 @@ +package openshiftai + +import ( + "context" + "fmt" + + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + + "github.com/containers/kubernetes-mcp-server/pkg/api" +) + +// ApplicationClient handles Application operations +type ApplicationClient struct { + client *Client +} + +// NewApplicationClient creates a new Application client +func NewApplicationClient(client *Client) *ApplicationClient { + return &ApplicationClient{ + client: client, + } +} + +// List lists all Applications in the cluster or in a specific namespace +func (c *ApplicationClient) List(ctx context.Context, namespace, status, appType string) ([]*api.Application, error) { + gvr, err := c.client.GetGVR("applications") + if err != nil { + return nil, err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + var list *unstructured.UnstructuredList + + if namespace != "" { + list, err = resourceInterface.Namespace(namespace).List(ctx, metav1.ListOptions{}) + } else { + list, err = resourceInterface.List(ctx, metav1.ListOptions{}) + } + + if err != nil { + return nil, fmt.Errorf("failed to list Applications: %w", err) + } + + applications := make([]*api.Application, 0, len(list.Items)) + for _, item := range list.Items { + application, err := c.unstructuredToApplication(&item) + if err != nil { + return nil, fmt.Errorf("failed to convert Application: %w", err) + } + applications = append(applications, application) + } + + return applications, nil +} + +// Get gets a specific Application +func (c *ApplicationClient) Get(ctx context.Context, name, namespace string) (*api.Application, error) { + gvr, err := c.client.GetGVR("applications") + if err != nil { + return nil, err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + obj, err := resourceInterface.Namespace(namespace).Get(ctx, name, metav1.GetOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to get Application %s/%s: %w", namespace, name, err) + } + + return c.unstructuredToApplication(obj) +} + +// Create creates a new Application +func (c *ApplicationClient) Create(ctx context.Context, application *api.Application) (*api.Application, error) { + gvr, err := c.client.GetGVR("applications") + if err != nil { + return nil, err + } + + obj := c.applicationToUnstructured(application) + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + createdObj, err := resourceInterface.Namespace(application.Namespace).Create(ctx, obj, metav1.CreateOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to create Application %s/%s: %w", application.Namespace, application.Name, err) + } + + return c.unstructuredToApplication(createdObj) +} + +// Delete deletes an Application +func (c *ApplicationClient) Delete(ctx context.Context, name, namespace string) error { + gvr, err := c.client.GetGVR("applications") + if err != nil { + return err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + err = resourceInterface.Namespace(namespace).Delete(ctx, name, metav1.DeleteOptions{}) + if err != nil { + return fmt.Errorf("failed to delete Application %s/%s: %w", namespace, name, err) + } + + return nil +} + +// unstructuredToApplication converts an Unstructured object to Application +func (c *ApplicationClient) unstructuredToApplication(obj *unstructured.Unstructured) (*api.Application, error) { + application := &api.Application{ + Name: obj.GetName(), + Namespace: obj.GetNamespace(), + Labels: obj.GetLabels(), + Annotations: obj.GetAnnotations(), + } + + // Get display name from annotations + if displayName, ok := obj.GetAnnotations()["openshift.io/display-name"]; ok { + application.DisplayName = &displayName + } + + // Get description from annotations + if description, ok := obj.GetAnnotations()["openshift.io/description"]; ok { + application.Description = &description + } + + // Get status + if phase, ok, _ := unstructured.NestedString(obj.Object, "status", "phase"); ok { + application.Status.Phase = phase + } + if message, ok, _ := unstructured.NestedString(obj.Object, "status", "message"); ok { + application.Status.Message = &message + } + if ready, ok, _ := unstructured.NestedBool(obj.Object, "status", "ready"); ok { + application.Status.Ready = ready + } + if appType, ok, _ := unstructured.NestedString(obj.Object, "spec", "appType"); ok { + application.AppType = appType + } + if url, ok, _ := unstructured.NestedString(obj.Object, "status", "url"); ok { + application.Status.URL = &url + } + if lastUpdated, ok, _ := unstructured.NestedString(obj.Object, "status", "lastUpdated"); ok { + application.Status.LastUpdated = &lastUpdated + } + + return application, nil +} + +// applicationToUnstructured converts a Application to Unstructured object +func (c *ApplicationClient) applicationToUnstructured(application *api.Application) *unstructured.Unstructured { + obj := &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "app.opendatahub.io/v1", + "kind": "Application", + "metadata": map[string]interface{}{ + "name": application.Name, + "namespace": application.Namespace, + }, + "spec": map[string]interface{}{ + "appType": application.AppType, + }, + }, + } + + // Store display name and description in annotations + if application.Annotations == nil { + application.Annotations = make(map[string]string) + } + if application.DisplayName != nil { + application.Annotations["openshift.io/display-name"] = *application.DisplayName + } + if application.Description != nil { + application.Annotations["openshift.io/description"] = *application.Description + } + + if len(application.Labels) > 0 { + obj.SetLabels(application.Labels) + } + + if len(application.Annotations) > 0 { + obj.SetAnnotations(application.Annotations) + } + + return obj +} diff --git a/pkg/openshift-ai/client.go b/pkg/openshift-ai/client.go new file mode 100644 index 00000000..0ed104b7 --- /dev/null +++ b/pkg/openshift-ai/client.go @@ -0,0 +1,192 @@ +package openshiftai + +import ( + "context" + "fmt" + "time" + + "k8s.io/apimachinery/pkg/api/meta" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/runtime/schema" + "k8s.io/client-go/discovery" + "k8s.io/client-go/dynamic" + "k8s.io/client-go/rest" + "k8s.io/klog/v2" +) + +// Client manages OpenShift AI operations +type Client struct { + config *rest.Config + dynamicClient dynamic.Interface + discoveryClient discovery.DiscoveryInterface + restMapper meta.RESTMapper +} + +// Config holds OpenShift AI client configuration +type Config struct { + // Timeout for API operations + Timeout time.Duration + // Enable debug logging + Debug bool +} + +// DefaultConfig returns default configuration for OpenShift AI client +func DefaultConfig() *Config { + return &Config{ + Timeout: 30 * time.Second, + Debug: false, + } +} + +// NewClient creates a new OpenShift AI client +func NewClient(cfg *rest.Config, clientConfig *Config) (*Client, error) { + if clientConfig == nil { + clientConfig = DefaultConfig() + } + + dynamicClient, err := dynamic.NewForConfig(cfg) + if err != nil { + return nil, fmt.Errorf("failed to create dynamic client: %w", err) + } + + discoveryClient, err := discovery.NewDiscoveryClientForConfig(cfg) + if err != nil { + return nil, fmt.Errorf("failed to create discovery client: %w", err) + } + + restMapper := meta.NewDefaultRESTMapper([]schema.GroupVersion{}) + + client := &Client{ + config: cfg, + dynamicClient: dynamicClient, + discoveryClient: discoveryClient, + restMapper: restMapper, + } + + if clientConfig.Debug { + klog.V(2).InfoS("OpenShift AI client initialized with debug logging") + } + + return client, nil +} + +// IsAvailable checks if OpenShift AI is available in the cluster +func (c *Client) IsAvailable(ctx context.Context) bool { + // Check for key OpenShift AI CRDs + crdGroups := []string{ + "datasciencepipelinesapplications.opendatahub.io", + "kserve.io", + "tekton.dev", + } + + for _, group := range crdGroups { + _, err := c.discoveryClient.ServerResourcesForGroupVersion(group + "/v1") + if err != nil { + klog.V(3).InfoS("OpenShift AI CRD group not available", "group", group, "error", err) + continue + } + klog.V(2).InfoS("Found OpenShift AI CRD group", "group", group) + return true + } + + return false +} + +// GetDynamicClient returns the dynamic client for CRD operations +func (c *Client) GetDynamicClient() dynamic.Interface { + return c.dynamicClient +} + +// GetDiscoveryClient returns the discovery client for API discovery +func (c *Client) GetDiscoveryClient() discovery.DiscoveryInterface { + return c.discoveryClient +} + +// GetRESTMapper returns the REST mapper for resource mapping +func (c *Client) GetRESTMapper() meta.RESTMapper { + return c.restMapper +} + +// GetGVR returns GroupVersionResource for a given resource name +func (c *Client) GetGVR(resource string) (schema.GroupVersionResource, error) { + switch resource { + case "datascienceprojects": + return schema.GroupVersionResource{ + Group: "datasciencepipelinesapplications.opendatahub.io", + Version: "v1", + Resource: "datasciencepipelinesapplications", + }, nil + case "applications": + return schema.GroupVersionResource{ + Group: "app.opendatahub.io", + Version: "v1", + Resource: "applications", + }, nil + case "models": + return schema.GroupVersionResource{ + Group: "model.opendatahub.io", + Version: "v1", + Resource: "models", + }, nil + case "experiments": + return schema.GroupVersionResource{ + Group: "datasciencepipelines.opendatahub.io", + Version: "v1", + Resource: "experiments", + }, nil + case "notebooks": + return schema.GroupVersionResource{ + Group: "kubeflow.org", + Version: "v1", + Resource: "notebooks", + }, nil + case "inferenceservices": + return schema.GroupVersionResource{ + Group: "serving.kserve.io", + Version: "v1beta1", + Resource: "inferenceservices", + }, nil + case "pipelines": + return schema.GroupVersionResource{ + Group: "datasciencepipelines.opendatahub.io", + Version: "v1alpha1", + Resource: "pipelines", + }, nil + case "pipelineruns": + return schema.GroupVersionResource{ + Group: "tekton.dev", + Version: "v1beta1", + Resource: "pipelineruns", + }, nil + default: + return schema.GroupVersionResource{}, fmt.Errorf("unknown resource type: %s", resource) + } +} + +// ListNamespaces returns all namespaces that have OpenShift AI resources +func (c *Client) ListNamespaces(ctx context.Context) ([]string, error) { + gvr, err := c.GetGVR("datascienceprojects") + if err != nil { + return nil, err + } + + resourceInterface := c.dynamicClient.Resource(gvr) + list, err := resourceInterface.List(ctx, metav1.ListOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to list DataScienceProjects: %w", err) + } + + namespaceSet := make(map[string]bool) + for _, item := range list.Items { + if ns := item.GetNamespace(); ns != "" { + namespaceSet[ns] = true + } + } + + var namespaces []string + for ns := range namespaceSet { + namespaces = append(namespaces, ns) + } + + return namespaces, nil +} diff --git a/pkg/openshift-ai/config.go b/pkg/openshift-ai/config.go new file mode 100644 index 00000000..1bbda00b --- /dev/null +++ b/pkg/openshift-ai/config.go @@ -0,0 +1,330 @@ +package openshiftai + +import ( + "time" + + "github.com/containers/kubernetes-mcp-server/pkg/config" + "k8s.io/client-go/rest" + "k8s.io/klog/v2" +) + +// OpenShiftAIConfig holds OpenShift AI specific configuration +type OpenShiftAIConfig struct { + // Enable OpenShift AI toolset + Enabled bool `toml:"enabled"` + + // Timeout for API operations + Timeout time.Duration `toml:"timeout"` + + // Enable debug logging + Debug bool `toml:"debug"` + + // Default namespace for operations + DefaultNamespace string `toml:"default_namespace"` + + // Component-specific configuration + DataScienceProjects DataScienceProjectsConfig `toml:"datascience_projects"` + JupyterNotebooks JupyterNotebooksConfig `toml:"jupyter_notebooks"` + ModelServing ModelServingConfig `toml:"model_serving"` + Pipelines PipelinesConfig `toml:"pipelines"` + GPUMonitoring GPUMonitoringConfig `toml:"gpu_monitoring"` +} + +// DataScienceProjectsConfig holds configuration for Data Science Projects +type DataScienceProjectsConfig struct { + Enabled bool `toml:"enabled"` + + // Auto-create namespaces if they don't exist + AutoCreateNamespaces bool `toml:"auto_create_namespaces"` + + // Default project settings + DefaultDisplayName string `toml:"default_display_name"` + DefaultDescription string `toml:"default_description"` +} + +// JupyterNotebooksConfig holds configuration for Jupyter Notebooks +type JupyterNotebooksConfig struct { + Enabled bool `toml:"enabled"` + + // Default notebook image + DefaultImage string `toml:"default_image"` + + // Default resource requests + DefaultCPURequest string `toml:"default_cpu_request"` + DefaultMemoryRequest string `toml:"default_memory_request"` + + // Default resource limits + DefaultCPULimit string `toml:"default_cpu_limit"` + DefaultMemoryLimit string `toml:"default_memory_limit"` + + // Auto-stop idle notebooks + AutoStopIdle bool `toml:"auto_stop_idle"` + IdleTimeout time.Duration `toml:"idle_timeout"` +} + +// ModelServingConfig holds configuration for Model Serving +type ModelServingConfig struct { + Enabled bool `toml:"enabled"` + + // Default runtime for model serving + DefaultRuntime string `toml:"default_runtime"` + + // Auto-scaling configuration + EnableAutoScaling bool `toml:"enable_auto_scaling"` + MinReplicas int `toml:"min_replicas"` + MaxReplicas int `toml:"max_replicas"` + + // Resource defaults + DefaultCPURequest string `toml:"default_cpu_request"` + DefaultMemoryRequest string `toml:"default_memory_request"` +} + +// PipelinesConfig holds configuration for AI Pipelines +type PipelinesConfig struct { + Enabled bool `toml:"enabled"` + + // Default service account for pipelines + DefaultServiceAccount string `toml:"default_service_account"` + + // Pipeline timeout + DefaultTimeout time.Duration `toml:"default_timeout"` + + // Enable pipeline artifacts + EnableArtifacts bool `toml:"enable_artifacts"` + + // Artifact storage configuration + ArtifactStorage ArtifactStorageConfig `toml:"artifact_storage"` +} + +// ArtifactStorageConfig holds configuration for pipeline artifact storage +type ArtifactStorageConfig struct { + Type string `toml:"type"` // "s3", "gcs", "azure", "pvc" + Bucket string `toml:"bucket"` + Endpoint string `toml:"endpoint"` + Path string `toml:"path"` +} + +// GPUMonitoringConfig holds configuration for GPU monitoring +type GPUMonitoringConfig struct { + Enabled bool `toml:"enabled"` + + // GPU vendors to monitor + Vendors []string `toml:"vendors"` + + // Metrics collection interval + MetricsInterval time.Duration `toml:"metrics_interval"` + + // Enable detailed GPU metrics + DetailedMetrics bool `toml:"detailed_metrics"` +} + +// DefaultOpenShiftAIConfig returns default configuration for OpenShift AI +func DefaultOpenShiftAIConfig() *OpenShiftAIConfig { + return &OpenShiftAIConfig{ + Enabled: true, + Timeout: 30 * time.Second, + Debug: false, + DefaultNamespace: "default", + DataScienceProjects: DataScienceProjectsConfig{ + Enabled: true, + AutoCreateNamespaces: false, + DefaultDisplayName: "", + DefaultDescription: "", + }, + JupyterNotebooks: JupyterNotebooksConfig{ + Enabled: true, + DefaultImage: "quay.io/opendatahub/workbench-notebook:latest", + DefaultCPURequest: "100m", + DefaultMemoryRequest: "1Gi", + DefaultCPULimit: "2", + DefaultMemoryLimit: "4Gi", + AutoStopIdle: false, + IdleTimeout: 30 * time.Minute, + }, + ModelServing: ModelServingConfig{ + Enabled: true, + DefaultRuntime: "kserve", + EnableAutoScaling: false, + MinReplicas: 1, + MaxReplicas: 3, + DefaultCPURequest: "100m", + DefaultMemoryRequest: "512Mi", + }, + Pipelines: PipelinesConfig{ + Enabled: true, + DefaultServiceAccount: "pipeline", + DefaultTimeout: 60 * time.Minute, + EnableArtifacts: true, + ArtifactStorage: ArtifactStorageConfig{ + Type: "pvc", + Bucket: "", + Endpoint: "", + Path: "/mnt/artifacts", + }, + }, + GPUMonitoring: GPUMonitoringConfig{ + Enabled: true, + Vendors: []string{"nvidia", "amd", "intel"}, + MetricsInterval: 30 * time.Second, + DetailedMetrics: false, + }, + } +} + +// ConfigManager manages OpenShift AI configuration +type ConfigManager struct { + config *OpenShiftAIConfig +} + +// NewConfigManager creates a new configuration manager +func NewConfigManager() *ConfigManager { + return &ConfigManager{ + config: DefaultOpenShiftAIConfig(), + } +} + +// LoadFromStatic loads configuration from static config +func (cm *ConfigManager) LoadFromStatic(staticConfig *config.StaticConfig) error { + if staticConfig.Toolsets == nil { + klog.V(2).InfoS("No toolsets configuration found, using defaults") + return nil + } + + // Look for openshift-ai toolset configuration + for _, toolsetName := range staticConfig.Toolsets { + if toolsetName == "openshift-ai" { + klog.V(1).InfoS("Loading OpenShift AI configuration from static config") + // In a real implementation, we would parse the toolset configuration + // For now, we'll use defaults + return nil + } + } + + klog.V(2).InfoS("OpenShift AI toolset configuration not found, using defaults") + return nil +} + +// LoadFromKubeConfig loads configuration from kubeconfig +func (cm *ConfigManager) LoadFromKubeConfig(kubeConfig *rest.Config) error { + // Load configuration from kubeconfig context or cluster info + // This is a placeholder for future implementation + klog.V(2).InfoS("Loading OpenShift AI configuration from kubeconfig") + return nil +} + +// GetConfig returns the current configuration +func (cm *ConfigManager) GetConfig() *OpenShiftAIConfig { + return cm.config +} + +// UpdateConfig updates the configuration +func (cm *ConfigManager) UpdateConfig(newConfig *OpenShiftAIConfig) error { + if newConfig == nil { + return InvalidArgumentError("config cannot be nil") + } + + cm.config = newConfig + klog.V(1).InfoS("OpenShift AI configuration updated") + return nil +} + +// Validate validates the current configuration +func (cm *ConfigManager) Validate() error { + cfg := cm.config + + if cfg.Timeout <= 0 { + return InvalidArgumentError("timeout must be positive") + } + + if cfg.DefaultNamespace == "" { + return InvalidArgumentError("default_namespace cannot be empty") + } + + if cfg.JupyterNotebooks.DefaultImage == "" { + return InvalidArgumentError("default_image cannot be empty") + } + + if cfg.ModelServing.DefaultRuntime == "" { + return InvalidArgumentError("default_runtime cannot be empty") + } + + if cfg.Pipelines.DefaultServiceAccount == "" { + return InvalidArgumentError("default_service_account cannot be empty") + } + + if len(cfg.GPUMonitoring.Vendors) == 0 { + return InvalidArgumentError("at least one GPU vendor must be specified") + } + + klog.V(1).InfoS("OpenShift AI configuration validation passed") + return nil +} + +// IsEnabled checks if OpenShift AI is enabled +func (cm *ConfigManager) IsEnabled() bool { + return cm.config.Enabled +} + +// IsComponentEnabled checks if a specific component is enabled +func (cm *ConfigManager) IsComponentEnabled(component string) bool { + switch component { + case "datascience_projects": + return cm.config.DataScienceProjects.Enabled + case "jupyter_notebooks": + return cm.config.JupyterNotebooks.Enabled + case "model_serving": + return cm.config.ModelServing.Enabled + case "pipelines": + return cm.config.Pipelines.Enabled + case "gpu_monitoring": + return cm.config.GPUMonitoring.Enabled + default: + return false + } +} + +// GetTimeout returns the configured timeout +func (cm *ConfigManager) GetTimeout() time.Duration { + return cm.config.Timeout +} + +// GetDefaultNamespace returns the configured default namespace +func (cm *ConfigManager) GetDefaultNamespace() string { + return cm.config.DefaultNamespace +} + +// GetDebugMode returns whether debug mode is enabled +func (cm *ConfigManager) GetDebugMode() bool { + return cm.config.Debug +} + +// ApplyDefaults applies default values to the configuration +func (cm *ConfigManager) ApplyDefaults() { + defaults := DefaultOpenShiftAIConfig() + + if cm.config.Timeout == 0 { + cm.config.Timeout = defaults.Timeout + } + + if cm.config.DefaultNamespace == "" { + cm.config.DefaultNamespace = defaults.DefaultNamespace + } + + if cm.config.JupyterNotebooks.DefaultImage == "" { + cm.config.JupyterNotebooks.DefaultImage = defaults.JupyterNotebooks.DefaultImage + } + + if cm.config.ModelServing.DefaultRuntime == "" { + cm.config.ModelServing.DefaultRuntime = defaults.ModelServing.DefaultRuntime + } + + if cm.config.Pipelines.DefaultServiceAccount == "" { + cm.config.Pipelines.DefaultServiceAccount = defaults.Pipelines.DefaultServiceAccount + } + + if len(cm.config.GPUMonitoring.Vendors) == 0 { + cm.config.GPUMonitoring.Vendors = defaults.GPUMonitoring.Vendors + } + + klog.V(2).InfoS("Applied default configuration values") +} diff --git a/pkg/openshift-ai/datascience_project.go b/pkg/openshift-ai/datascience_project.go new file mode 100644 index 00000000..0ad7956d --- /dev/null +++ b/pkg/openshift-ai/datascience_project.go @@ -0,0 +1,205 @@ +package openshiftai + +import ( + "context" + "fmt" + + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" +) + +// DataScienceProject represents an OpenShift AI Data Science Project +type DataScienceProject struct { + Name string `json:"name"` + Namespace string `json:"namespace"` + DisplayName *string `json:"displayName,omitempty"` + Description *string `json:"description,omitempty"` + Labels map[string]string `json:"labels,omitempty"` + Annotations map[string]string `json:"annotations,omitempty"` + Status DataScienceProjectStatus `json:"status"` +} + +// DataScienceProjectStatus represents the status of a DataScienceProject +type DataScienceProjectStatus struct { + Phase string `json:"phase"` + Message string `json:"message,omitempty"` +} + +// DataScienceProjectClient handles DataScienceProject operations +type DataScienceProjectClient struct { + client *Client +} + +// NewDataScienceProjectClient creates a new DataScienceProject client +func NewDataScienceProjectClient(client *Client) *DataScienceProjectClient { + return &DataScienceProjectClient{ + client: client, + } +} + +// List lists all DataScienceProjects in the cluster or in a specific namespace +func (c *DataScienceProjectClient) List(ctx context.Context, namespace string) ([]*DataScienceProject, error) { + gvr, err := c.client.GetGVR("datascienceprojects") + if err != nil { + return nil, err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + var list *unstructured.UnstructuredList + + if namespace != "" { + list, err = resourceInterface.Namespace(namespace).List(ctx, metav1.ListOptions{}) + } else { + list, err = resourceInterface.List(ctx, metav1.ListOptions{}) + } + + if err != nil { + return nil, fmt.Errorf("failed to list DataScienceProjects: %w", err) + } + + projects := make([]*DataScienceProject, 0, len(list.Items)) + for _, item := range list.Items { + project, err := c.unstructuredToDataScienceProject(&item) + if err != nil { + return nil, fmt.Errorf("failed to convert DataScienceProject: %w", err) + } + projects = append(projects, project) + } + + return projects, nil +} + +// Get gets a specific DataScienceProject +func (c *DataScienceProjectClient) Get(ctx context.Context, name, namespace string) (*DataScienceProject, error) { + gvr, err := c.client.GetGVR("datascienceprojects") + if err != nil { + return nil, err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + obj, err := resourceInterface.Namespace(namespace).Get(ctx, name, metav1.GetOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to get DataScienceProject %s/%s: %w", namespace, name, err) + } + + return c.unstructuredToDataScienceProject(obj) +} + +// Create creates a new DataScienceProject +func (c *DataScienceProjectClient) Create(ctx context.Context, project *DataScienceProject) (*DataScienceProject, error) { + gvr, err := c.client.GetGVR("datascienceprojects") + if err != nil { + return nil, err + } + + obj := c.dataScienceProjectToUnstructured(project) + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + createdObj, err := resourceInterface.Namespace(project.Namespace).Create(ctx, obj, metav1.CreateOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to create DataScienceProject %s/%s: %w", project.Namespace, project.Name, err) + } + + return c.unstructuredToDataScienceProject(createdObj) +} + +// Delete deletes a DataScienceProject +func (c *DataScienceProjectClient) Delete(ctx context.Context, name, namespace string) error { + gvr, err := c.client.GetGVR("datascienceprojects") + if err != nil { + return err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + err = resourceInterface.Namespace(namespace).Delete(ctx, name, metav1.DeleteOptions{}) + if err != nil { + return fmt.Errorf("failed to delete DataScienceProject %s/%s: %w", namespace, name, err) + } + + return nil +} + +// unstructuredToDataScienceProject converts an Unstructured object to DataScienceProject +func (c *DataScienceProjectClient) unstructuredToDataScienceProject(obj *unstructured.Unstructured) (*DataScienceProject, error) { + project := &DataScienceProject{ + Name: obj.GetName(), + Namespace: obj.GetNamespace(), + Labels: obj.GetLabels(), + Annotations: obj.GetAnnotations(), + } + + // Get display name from annotations (DataSciencePipelinesApplication doesn't have displayName in spec) + if displayName, ok := obj.GetAnnotations()["openshift.io/display-name"]; ok { + project.DisplayName = &displayName + } + + // Get description from annotations (DataSciencePipelinesApplication doesn't have description in spec) + if description, ok := obj.GetAnnotations()["openshift.io/description"]; ok { + project.Description = &description + } + + // Get status + if phase, ok, _ := unstructured.NestedString(obj.Object, "status", "phase"); ok { + project.Status.Phase = phase + } + if message, ok, _ := unstructured.NestedString(obj.Object, "status", "message"); ok { + project.Status.Message = message + } + + return project, nil +} + +// dataScienceProjectToUnstructured converts a DataScienceProject to Unstructured object +func (c *DataScienceProjectClient) dataScienceProjectToUnstructured(project *DataScienceProject) *unstructured.Unstructured { + obj := &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "datasciencepipelinesapplications.opendatahub.io/v1", + "kind": "DataSciencePipelinesApplication", + "metadata": map[string]interface{}{ + "name": project.Name, + "namespace": project.Namespace, + }, + "spec": map[string]interface{}{ + "dspVersion": "v2", + "objectStorage": map[string]interface{}{ + "disableHealthCheck": false, + "enableExternalRoute": false, + }, + "apiServer": map[string]interface{}{ + "deploy": true, + "enableOauth": true, + }, + "database": map[string]interface{}{ + "disableHealthCheck": false, + "mariaDB": map[string]interface{}{ + "deploy": true, + "pipelineDBName": "mlpipeline", + "pvcSize": "10Gi", + "username": "mlpipeline", + }, + }, + }, + }, + } + + // Store display name and description in annotations since DataSciencePipelinesApplication doesn't have these fields in spec + if project.Annotations == nil { + project.Annotations = make(map[string]string) + } + if project.DisplayName != nil { + project.Annotations["openshift.io/display-name"] = *project.DisplayName + } + if project.Description != nil { + project.Annotations["openshift.io/description"] = *project.Description + } + + if len(project.Labels) > 0 { + obj.SetLabels(project.Labels) + } + + if len(project.Annotations) > 0 { + obj.SetAnnotations(project.Annotations) + } + + return obj +} diff --git a/pkg/openshift-ai/detection.go b/pkg/openshift-ai/detection.go new file mode 100644 index 00000000..beeea7f6 --- /dev/null +++ b/pkg/openshift-ai/detection.go @@ -0,0 +1,184 @@ +package openshiftai + +import ( + "context" + "fmt" + "time" + + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "k8s.io/apimachinery/pkg/runtime/schema" + "k8s.io/client-go/discovery" + "k8s.io/klog/v2" +) + +// AvailabilityStatus represents the availability status of OpenShift AI components +type AvailabilityStatus struct { + Available bool `json:"available"` + Version string `json:"version,omitempty"` + Components []string `json:"components,omitempty"` + MissingCRDs []string `json:"missingCRDs,omitempty"` + Warnings []string `json:"warnings,omitempty"` +} + +// Detector handles OpenShift AI detection and availability checking +type Detector struct { + client *Client + discoveryClient discovery.DiscoveryInterface +} + +// NewDetector creates a new OpenShift AI detector +func NewDetector(client *Client) *Detector { + return &Detector{ + client: client, + discoveryClient: client.GetDiscoveryClient(), + } +} + +// CheckAvailability performs comprehensive OpenShift AI availability check +func (d *Detector) CheckAvailability(ctx context.Context) *AvailabilityStatus { + status := &AvailabilityStatus{ + Available: false, + Components: []string{}, + MissingCRDs: []string{}, + Warnings: []string{}, + } + + // Define required OpenShift AI CRD groups + requiredCRDs := map[string]string{ + "datascience.opendatahub.io": "Data Science Projects", + "kubeflow.org": "Jupyter Notebooks", + "serving.kserve.io": "Model Serving", + "tekton.dev": "AI Pipelines", + } + + // Check each required CRD group + for group, component := range requiredCRDs { + available, version := d.checkCRDGroup(ctx, group) + if available { + status.Components = append(status.Components, fmt.Sprintf("%s (%s)", component, version)) + if group == "datascience.opendatahub.io" { + status.Version = version + } + } else { + status.MissingCRDs = append(status.MissingCRDs, fmt.Sprintf("%s (%s)", group, component)) + } + } + + // Check for optional GPU monitoring components + gpuAvailable := d.checkGPUSupport(ctx) + if gpuAvailable { + status.Components = append(status.Components, "GPU Monitoring") + } + + // Determine overall availability + // Core requirement: at least Data Science Projects should be available + coreAvailable, _ := d.checkCRDGroup(ctx, "datascience.opendatahub.io") + if coreAvailable { + status.Available = true + } + + // Add warnings for missing optional components + if len(status.MissingCRDs) > 0 { + status.Warnings = append(status.Warnings, fmt.Sprintf("Some OpenShift AI components are not available: %v", status.MissingCRDs)) + } + + if status.Available { + klog.V(2).InfoS("OpenShift AI is available", "components", status.Components) + } else { + klog.V(2).InfoS("OpenShift AI is not available in this cluster") + } + + return status +} + +// checkCRDGroup checks if a CRD group is available and returns its version +func (d *Detector) checkCRDGroup(ctx context.Context, group string) (bool, string) { + // Try common versions + versions := []string{"v1", "v1beta1", "v1alpha1"} + + for _, version := range versions { + gv := schema.GroupVersion{Group: group, Version: version} + _, err := d.discoveryClient.ServerResourcesForGroupVersion(gv.String()) + if err == nil { + klog.V(3).InfoS("Found OpenShift AI CRD group", "group", group, "version", version) + return true, version + } + } + + klog.V(3).InfoS("OpenShift AI CRD group not found", "group", group) + return false, "" +} + +// checkGPUSupport checks if GPU monitoring is available +func (d *Detector) checkGPUSupport(ctx context.Context) bool { + // Check for GPU-related resources + gpuIndicators := []string{ + "nvidia.com/gpu", + "amd.com/gpu", + "intel.com/gpu", + } + + // Check for GPU nodes + gvr := schema.GroupVersionResource{ + Group: "", + Version: "v1", + Resource: "nodes", + } + + nodes, err := d.client.GetDynamicClient().Resource(gvr).List(ctx, metav1.ListOptions{}) + if err != nil { + klog.V(3).InfoS("Failed to list nodes for GPU detection", "error", err) + return false + } + + for _, node := range nodes.Items { + if capacity, found, _ := unstructured.NestedStringMap(node.Object, "status", "capacity"); found { + for _, indicator := range gpuIndicators { + if _, exists := capacity[indicator]; exists { + klog.V(3).InfoS("Found GPU indicator", "indicator", indicator, "node", node.GetName()) + return true + } + } + } + } + + return false +} + +// IsOpenShiftAICluster quickly checks if this is an OpenShift AI cluster +func (d *Detector) IsOpenShiftAICluster(ctx context.Context) bool { + // Quick check for the core DataScienceProject CRD + available, _ := d.checkCRDGroup(ctx, "datascience.opendatahub.io") + return available +} + +// GetOpenShiftAIVersion returns the detected OpenShift AI version +func (d *Detector) GetOpenShiftAIVersion(ctx context.Context) (string, error) { + available, version := d.checkCRDGroup(ctx, "datascience.opendatahub.io") + if !available { + return "", fmt.Errorf("OpenShift AI is not available") + } + return version, nil +} + +// WaitForAvailability waits for OpenShift AI to become available +func (d *Detector) WaitForAvailability(ctx context.Context, timeout time.Duration) error { + ctx, cancel := context.WithTimeout(ctx, timeout) + defer cancel() + + ticker := time.NewTicker(2 * time.Second) + defer ticker.Stop() + + for { + select { + case <-ctx.Done(): + return fmt.Errorf("timeout waiting for OpenShift AI to become available") + case <-ticker.C: + if d.IsOpenShiftAICluster(ctx) { + klog.V(2).InfoS("OpenShift AI is now available") + return nil + } + } + } +} diff --git a/pkg/openshift-ai/dynamic_client.go b/pkg/openshift-ai/dynamic_client.go new file mode 100644 index 00000000..3d951b7c --- /dev/null +++ b/pkg/openshift-ai/dynamic_client.go @@ -0,0 +1,193 @@ +package openshiftai + +import ( + "context" + "fmt" + + "k8s.io/apimachinery/pkg/api/meta" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "k8s.io/apimachinery/pkg/runtime/schema" + "k8s.io/apimachinery/pkg/types" + "k8s.io/client-go/dynamic" +) + +// DynamicClientManager manages dynamic client operations for OpenShift AI CRDs +type DynamicClientManager struct { + dynamicClient dynamic.Interface + restMapper meta.RESTMapper +} + +// NewDynamicClientManager creates a new dynamic client manager +func NewDynamicClientManager(dynamicClient dynamic.Interface, restMapper meta.RESTMapper) *DynamicClientManager { + return &DynamicClientManager{ + dynamicClient: dynamicClient, + restMapper: restMapper, + } +} + +// ResourceClient provides a typed interface for working with specific resources +type ResourceClient struct { + client dynamic.ResourceInterface + gvr schema.GroupVersionResource + namespace string +} + +// GetResourceClient returns a resource client for the specified resource type +func (d *DynamicClientManager) GetResourceClient(resource, namespace string) (*ResourceClient, error) { + gvr, err := getGVRForResource(resource) + if err != nil { + return nil, err + } + + var resourceInterface dynamic.ResourceInterface + if namespace != "" { + resourceInterface = d.dynamicClient.Resource(gvr).Namespace(namespace) + } else { + resourceInterface = d.dynamicClient.Resource(gvr) + } + + return &ResourceClient{ + client: resourceInterface, + gvr: gvr, + namespace: namespace, + }, nil +} + +// List lists resources of the specified type +func (r *ResourceClient) List(ctx context.Context, opts metav1.ListOptions) (*unstructured.UnstructuredList, error) { + return r.client.List(ctx, opts) +} + +// Get gets a specific resource by name +func (r *ResourceClient) Get(ctx context.Context, name string, opts metav1.GetOptions) (*unstructured.Unstructured, error) { + return r.client.Get(ctx, name, opts) +} + +// Create creates a new resource +func (r *ResourceClient) Create(ctx context.Context, obj *unstructured.Unstructured, opts metav1.CreateOptions) (*unstructured.Unstructured, error) { + return r.client.Create(ctx, obj, opts) +} + +// Update updates an existing resource +func (r *ResourceClient) Update(ctx context.Context, obj *unstructured.Unstructured, opts metav1.UpdateOptions) (*unstructured.Unstructured, error) { + return r.client.Update(ctx, obj, opts) +} + +// Delete deletes a resource by name +func (r *ResourceClient) Delete(ctx context.Context, name string, opts metav1.DeleteOptions) error { + return r.client.Delete(ctx, name, opts) +} + +// DeleteCollection deletes a collection of resources +func (r *ResourceClient) DeleteCollection(ctx context.Context, opts metav1.DeleteOptions, listOpts metav1.ListOptions) error { + return r.client.DeleteCollection(ctx, opts, listOpts) +} + +// Patch patches a resource +func (r *ResourceClient) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts metav1.PatchOptions) (*unstructured.Unstructured, error) { + return r.client.Patch(ctx, name, pt, data, opts) +} + +// GetGVR returns the GroupVersionResource for this client +func (r *ResourceClient) GetGVR() schema.GroupVersionResource { + return r.gvr +} + +// GetNamespace returns the namespace for this client +func (r *ResourceClient) GetNamespace() string { + return r.namespace +} + +// getGVRForResource returns the GroupVersionResource for a given resource type +func getGVRForResource(resource string) (schema.GroupVersionResource, error) { + switch resource { + case "datascienceprojects": + // Align with client.GetGVR mapping: DataSciencePipelinesApplication as the project abstraction + return schema.GroupVersionResource{ + Group: "datasciencepipelinesapplications.opendatahub.io", + Version: "v1", + Resource: "datasciencepipelinesapplications", + }, nil + case "notebooks": + return schema.GroupVersionResource{ + Group: "kubeflow.org", + Version: "v1", + Resource: "notebooks", + }, nil + case "inferenceservices": + return schema.GroupVersionResource{ + Group: "serving.kserve.io", + Version: "v1beta1", + Resource: "inferenceservices", + }, nil + case "pipelineruns": + return schema.GroupVersionResource{ + Group: "tekton.dev", + Version: "v1beta1", + Resource: "pipelineruns", + }, nil + case "pipelines": + return schema.GroupVersionResource{ + Group: "tekton.dev", + Version: "v1beta1", + Resource: "pipelines", + }, nil + case "nodes": + return schema.GroupVersionResource{ + Group: "", + Version: "v1", + Resource: "nodes", + }, nil + case "pods": + return schema.GroupVersionResource{ + Group: "", + Version: "v1", + Resource: "pods", + }, nil + case "namespaces": + return schema.GroupVersionResource{ + Group: "", + Version: "v1", + Resource: "namespaces", + }, nil + default: + return schema.GroupVersionResource{}, fmt.Errorf("unknown resource type: %s", resource) + } +} + +// ValidateResource checks if a resource type is available in the cluster +func (d *DynamicClientManager) ValidateResource(ctx context.Context, resource string) error { + gvr, err := getGVRForResource(resource) + if err != nil { + return err + } + + // Try to list the resource to verify it exists + _, err = d.dynamicClient.Resource(gvr).List(ctx, metav1.ListOptions{Limit: 1}) + if err != nil { + return fmt.Errorf("resource %s is not available: %w", resource, err) + } + + return nil +} + +// GetAvailableResources returns a list of available OpenShift AI resource types +func (d *DynamicClientManager) GetAvailableResources(ctx context.Context) ([]string, error) { + resources := []string{} + resourceTypes := []string{ + "datascienceprojects", + "notebooks", + "inferenceservices", + "pipelineruns", + "pipelines", + } + + for _, resource := range resourceTypes { + if err := d.ValidateResource(ctx, resource); err == nil { + resources = append(resources, resource) + } + } + + return resources, nil +} diff --git a/pkg/openshift-ai/errors.go b/pkg/openshift-ai/errors.go new file mode 100644 index 00000000..019fca4f --- /dev/null +++ b/pkg/openshift-ai/errors.go @@ -0,0 +1,34 @@ +package openshiftai + +import ( + "fmt" +) + +// Common error constructors using simple Go error patterns +func NotFoundError(resource, name string) error { + return fmt.Errorf("%s '%s' not found", resource, name) +} + +func AlreadyExistsError(resource, name string) error { + return fmt.Errorf("%s '%s' already exists", resource, name) +} + +func InvalidArgumentError(message string) error { + return fmt.Errorf("invalid argument: %s", message) +} + +func PermissionDeniedError(action, resource string) error { + return fmt.Errorf("permission denied for %s on %s", action, resource) +} + +func UnavailableError(service string) error { + return fmt.Errorf("service '%s' is unavailable", service) +} + +func TimeoutError(operation string) error { + return fmt.Errorf("operation '%s' timed out", operation) +} + +func InternalError(message string, cause error) error { + return fmt.Errorf("%s: %w", message, cause) +} diff --git a/pkg/openshift-ai/experiment.go b/pkg/openshift-ai/experiment.go new file mode 100644 index 00000000..4d6093fe --- /dev/null +++ b/pkg/openshift-ai/experiment.go @@ -0,0 +1,185 @@ +package openshiftai + +import ( + "context" + "fmt" + + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + + "github.com/containers/kubernetes-mcp-server/pkg/api" +) + +// ExperimentClient handles Experiment operations +type ExperimentClient struct { + client *Client +} + +// NewExperimentClient creates a new Experiment client +func NewExperimentClient(client *Client) *ExperimentClient { + return &ExperimentClient{ + client: client, + } +} + +// List lists all Experiments in the cluster or in a specific namespace +func (c *ExperimentClient) List(ctx context.Context, namespace, status string) ([]*api.Experiment, error) { + gvr, err := c.client.GetGVR("experiments") + if err != nil { + return nil, err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + var list *unstructured.UnstructuredList + + if namespace != "" { + list, err = resourceInterface.Namespace(namespace).List(ctx, metav1.ListOptions{}) + } else { + list, err = resourceInterface.List(ctx, metav1.ListOptions{}) + } + + if err != nil { + return nil, fmt.Errorf("failed to list Experiments: %w", err) + } + + experiments := make([]*api.Experiment, 0, len(list.Items)) + for _, item := range list.Items { + experiment, err := c.unstructuredToExperiment(&item) + if err != nil { + return nil, fmt.Errorf("failed to convert Experiment: %w", err) + } + + // Filter by status if specified + if status != "" && experiment.Status.Phase != status { + continue + } + + experiments = append(experiments, experiment) + } + + return experiments, nil +} + +// Get gets a specific Experiment +func (c *ExperimentClient) Get(ctx context.Context, name, namespace string) (*api.Experiment, error) { + gvr, err := c.client.GetGVR("experiments") + if err != nil { + return nil, err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + obj, err := resourceInterface.Namespace(namespace).Get(ctx, name, metav1.GetOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to get Experiment %s/%s: %w", namespace, name, err) + } + + return c.unstructuredToExperiment(obj) +} + +// Create creates a new Experiment +func (c *ExperimentClient) Create(ctx context.Context, experiment *api.Experiment) (*api.Experiment, error) { + gvr, err := c.client.GetGVR("experiments") + if err != nil { + return nil, err + } + + obj := c.experimentToUnstructured(experiment) + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + createdObj, err := resourceInterface.Namespace(experiment.Namespace).Create(ctx, obj, metav1.CreateOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to create Experiment %s/%s: %w", experiment.Namespace, experiment.Name, err) + } + + return c.unstructuredToExperiment(createdObj) +} + +// Delete deletes an Experiment +func (c *ExperimentClient) Delete(ctx context.Context, name, namespace string) error { + gvr, err := c.client.GetGVR("experiments") + if err != nil { + return err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + err = resourceInterface.Namespace(namespace).Delete(ctx, name, metav1.DeleteOptions{}) + if err != nil { + return fmt.Errorf("failed to delete Experiment %s/%s: %w", namespace, name, err) + } + + return nil +} + +// unstructuredToExperiment converts an Unstructured object to Experiment +func (c *ExperimentClient) unstructuredToExperiment(obj *unstructured.Unstructured) (*api.Experiment, error) { + experiment := &api.Experiment{ + Name: obj.GetName(), + Namespace: obj.GetNamespace(), + Labels: obj.GetLabels(), + Annotations: obj.GetAnnotations(), + } + + // Get display name from annotations + if displayName, ok := obj.GetAnnotations()["openshift.io/display-name"]; ok { + experiment.DisplayName = &displayName + } + + // Get description from annotations + if description, ok := obj.GetAnnotations()["openshift.io/description"]; ok { + experiment.Description = &description + } + + // Get status + if phase, ok, _ := unstructured.NestedString(obj.Object, "status", "phase"); ok { + experiment.Status.Phase = phase + } + if message, ok, _ := unstructured.NestedString(obj.Object, "status", "message"); ok { + experiment.Status.Message = &message + } + if ready, ok, _ := unstructured.NestedBool(obj.Object, "status", "ready"); ok { + experiment.Status.Ready = ready + } + if runCount, ok, _ := unstructured.NestedInt64(obj.Object, "status", "runCount"); ok { + experiment.Status.RunCount = int(runCount) + } + if lastUpdated, ok, _ := unstructured.NestedString(obj.Object, "status", "lastUpdated"); ok { + experiment.Status.LastUpdated = &lastUpdated + } + + return experiment, nil +} + +// experimentToUnstructured converts a Experiment to Unstructured object +func (c *ExperimentClient) experimentToUnstructured(experiment *api.Experiment) *unstructured.Unstructured { + obj := &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "datasciencepipelines.opendatahub.io/v1", + "kind": "Experiment", + "metadata": map[string]interface{}{ + "name": experiment.Name, + "namespace": experiment.Namespace, + }, + }, + } + + // Store display name and description in annotations + if experiment.Annotations == nil { + experiment.Annotations = make(map[string]string) + } + if experiment.DisplayName != nil { + experiment.Annotations["openshift.io/display-name"] = *experiment.DisplayName + } + if experiment.Description != nil { + experiment.Annotations["openshift.io/description"] = *experiment.Description + } + + if len(experiment.Labels) > 0 { + obj.SetLabels(experiment.Labels) + } + + if len(experiment.Annotations) > 0 { + obj.SetAnnotations(experiment.Annotations) + } + + return obj +} diff --git a/pkg/openshift-ai/model.go b/pkg/openshift-ai/model.go new file mode 100644 index 00000000..18fa146d --- /dev/null +++ b/pkg/openshift-ai/model.go @@ -0,0 +1,255 @@ +package openshiftai + +import ( + "context" + "fmt" + + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + + "github.com/containers/kubernetes-mcp-server/pkg/api" +) + +// ModelClient handles Model operations +type ModelClient struct { + client *Client +} + +// NewModelClient creates a new Model client +func NewModelClient(client *Client) *ModelClient { + return &ModelClient{ + client: client, + } +} + +// List lists all Models in the cluster or in a specific namespace +func (c *ModelClient) List(ctx context.Context, namespace, modelType, status string) ([]*api.Model, error) { + gvr, err := c.client.GetGVR("models") + if err != nil { + return nil, err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + var list *unstructured.UnstructuredList + + if namespace != "" { + list, err = resourceInterface.Namespace(namespace).List(ctx, metav1.ListOptions{}) + } else { + list, err = resourceInterface.List(ctx, metav1.ListOptions{}) + } + + if err != nil { + return nil, fmt.Errorf("failed to list Models: %w", err) + } + + models := make([]*api.Model, 0, len(list.Items)) + for _, item := range list.Items { + model, err := c.unstructuredToModel(&item) + if err != nil { + return nil, fmt.Errorf("failed to convert Model: %w", err) + } + models = append(models, model) + } + + return models, nil +} + +// Get gets a specific Model +func (c *ModelClient) Get(ctx context.Context, name, namespace string) (*api.Model, error) { + gvr, err := c.client.GetGVR("models") + if err != nil { + return nil, err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + obj, err := resourceInterface.Namespace(namespace).Get(ctx, name, metav1.GetOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to get Model %s/%s: %w", namespace, name, err) + } + + return c.unstructuredToModel(obj) +} + +// Create creates a new Model +func (c *ModelClient) Create(ctx context.Context, model *api.Model) (*api.Model, error) { + gvr, err := c.client.GetGVR("models") + if err != nil { + return nil, err + } + + obj := c.modelToUnstructured(model) + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + createdObj, err := resourceInterface.Namespace(model.Namespace).Create(ctx, obj, metav1.CreateOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to create Model %s/%s: %w", model.Namespace, model.Name, err) + } + + return c.unstructuredToModel(createdObj) +} + +// Update updates an existing Model +func (c *ModelClient) Update(ctx context.Context, model *api.Model) (*api.Model, error) { + gvr, err := c.client.GetGVR("models") + if err != nil { + return nil, err + } + + obj := c.modelToUnstructured(model) + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + updatedObj, err := resourceInterface.Namespace(model.Namespace).Update(ctx, obj, metav1.UpdateOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to update Model %s/%s: %w", model.Namespace, model.Name, err) + } + + return c.unstructuredToModel(updatedObj) +} + +// Delete deletes a Model +func (c *ModelClient) Delete(ctx context.Context, name, namespace string) error { + gvr, err := c.client.GetGVR("models") + if err != nil { + return err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + err = resourceInterface.Namespace(namespace).Delete(ctx, name, metav1.DeleteOptions{}) + if err != nil { + return fmt.Errorf("failed to delete Model %s/%s: %w", namespace, name, err) + } + + return nil +} + +// unstructuredToModel converts an Unstructured object to Model +func (c *ModelClient) unstructuredToModel(obj *unstructured.Unstructured) (*api.Model, error) { + model := &api.Model{ + Name: obj.GetName(), + Namespace: obj.GetNamespace(), + Labels: obj.GetLabels(), + Annotations: obj.GetAnnotations(), + } + + // Get display name from annotations + if displayName, ok := obj.GetAnnotations()["openshift.io/display-name"]; ok { + model.DisplayName = &displayName + } + + // Get description from annotations + if description, ok := obj.GetAnnotations()["openshift.io/description"]; ok { + model.Description = &description + } + + // Get model type from labels + if modelType, ok := obj.GetLabels()["model.opendatahub.io/type"]; ok { + model.ModelType = &modelType + } + + // Get framework version from labels + if frameworkVersion, ok := obj.GetLabels()["model.opendatahub.io/framework-version"]; ok { + model.FrameworkVersion = &frameworkVersion + } + + // Get format from labels + if format, ok := obj.GetLabels()["model.opendatahub.io/format"]; ok { + model.Format = &format + } + + // Get version from labels + if version, ok := obj.GetLabels()["model.opendatahub.io/version"]; ok { + model.Version = &version + } + + // Get size from annotations + if size, ok := obj.GetAnnotations()["model.opendatahub.io/size"]; ok { + if parsedSize, err := parseSize(size); err == nil { + model.Size = &parsedSize + } + } + + // Get status + if phase, ok, _ := unstructured.NestedString(obj.Object, "status", "phase"); ok { + model.Status.Phase = phase + } + if message, ok, _ := unstructured.NestedString(obj.Object, "status", "message"); ok { + model.Status.Message = &message + } + if ready, ok, _ := unstructured.NestedBool(obj.Object, "status", "ready"); ok { + model.Status.Ready = ready + } + if deploymentStatus, ok, _ := unstructured.NestedString(obj.Object, "status", "deploymentStatus"); ok { + model.Status.DeploymentStatus = &deploymentStatus + } + + return model, nil +} + +// modelToUnstructured converts a Model to Unstructured object +func (c *ModelClient) modelToUnstructured(model *api.Model) *unstructured.Unstructured { + obj := &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "model.opendatahub.io/v1", + "kind": "Model", + "metadata": map[string]interface{}{ + "name": model.Name, + "namespace": model.Namespace, + }, + "spec": map[string]interface{}{ + "modelType": model.ModelType, + "frameworkVersion": model.FrameworkVersion, + "format": model.Format, + "version": model.Version, + }, + }, + } + + // Store display name and description in annotations + if model.Annotations == nil { + model.Annotations = make(map[string]string) + } + if model.DisplayName != nil { + model.Annotations["openshift.io/display-name"] = *model.DisplayName + } + if model.Description != nil { + model.Annotations["openshift.io/description"] = *model.Description + } + + // Store model metadata in labels + if model.Labels == nil { + model.Labels = make(map[string]string) + } + if model.ModelType != nil { + model.Labels["model.opendatahub.io/type"] = *model.ModelType + } + if model.FrameworkVersion != nil { + model.Labels["model.opendatahub.io/framework-version"] = *model.FrameworkVersion + } + if model.Format != nil { + model.Labels["model.opendatahub.io/format"] = *model.Format + } + if model.Version != nil { + model.Labels["model.opendatahub.io/version"] = *model.Version + } + + if len(model.Labels) > 0 { + obj.SetLabels(model.Labels) + } + + if len(model.Annotations) > 0 { + obj.SetAnnotations(model.Annotations) + } + + return obj +} + +// parseSize parses size string to int64 +func parseSize(sizeStr string) (int64, error) { + // This is a simplified implementation - in a real scenario, + // you might want to handle different size formats (KB, MB, GB) + if sizeStr == "" { + return 0, nil + } + // For now, assume it's already in bytes + return 0, fmt.Errorf("size parsing not implemented") +} diff --git a/pkg/openshift-ai/pipeline.go b/pkg/openshift-ai/pipeline.go new file mode 100644 index 00000000..b79d7930 --- /dev/null +++ b/pkg/openshift-ai/pipeline.go @@ -0,0 +1,377 @@ +package openshiftai + +import ( + "context" + "fmt" + + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + + "github.com/containers/kubernetes-mcp-server/pkg/api" +) + +// PipelineClient handles Pipeline operations +type PipelineClient struct { + client *Client +} + +// NewPipelineClient creates a new Pipeline client +func NewPipelineClient(client *Client) *PipelineClient { + return &PipelineClient{ + client: client, + } +} + +// ListPipelines retrieves all pipelines with optional filtering +func (c *PipelineClient) List(ctx context.Context, namespace string, filters map[string]string) ([]*api.Pipeline, error) { + gvr, err := c.client.GetGVR("pipelines") + if err != nil { + return nil, err + } + + // Build list options with label selector from filters + listOpts := metav1.ListOptions{} + if len(filters) > 0 { + labelSelector := "" + for k, v := range filters { + if labelSelector != "" { + labelSelector += "," + } + labelSelector += k + "=" + v + } + listOpts.LabelSelector = labelSelector + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + var list *unstructured.UnstructuredList + + if namespace != "" { + list, err = resourceInterface.Namespace(namespace).List(ctx, listOpts) + } else { + list, err = resourceInterface.List(ctx, listOpts) + } + + if err != nil { + return nil, fmt.Errorf("failed to list pipelines: %w", err) + } + + pipelines := make([]*api.Pipeline, 0, len(list.Items)) + for _, item := range list.Items { + pipeline, err := c.unstructuredToPipeline(&item) + if err != nil { + return nil, fmt.Errorf("failed to convert pipeline: %w", err) + } + pipelines = append(pipelines, pipeline) + } + + return pipelines, nil +} + +// GetPipeline retrieves a specific pipeline by name +func (c *PipelineClient) Get(ctx context.Context, namespace, name string) (*api.Pipeline, error) { + gvr, err := c.client.GetGVR("pipelines") + if err != nil { + return nil, err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + var item *unstructured.Unstructured + + if namespace != "" { + item, err = resourceInterface.Namespace(namespace).Get(ctx, name, metav1.GetOptions{}) + } else { + item, err = resourceInterface.Get(ctx, name, metav1.GetOptions{}) + } + + if err != nil { + return nil, fmt.Errorf("failed to get pipeline %s: %w", name, err) + } + + return c.unstructuredToPipeline(item) +} + +// CreatePipeline creates a new pipeline +func (c *PipelineClient) Create(ctx context.Context, namespace string, pipeline *api.Pipeline) (*api.Pipeline, error) { + gvr, err := c.client.GetGVR("pipelines") + if err != nil { + return nil, err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + + // Convert to unstructured + unstructuredObj, err := c.pipelineToUnstructured(pipeline) + if err != nil { + return nil, fmt.Errorf("failed to convert pipeline to unstructured: %w", err) + } + + unstructuredObj.SetNamespace(namespace) + + var created *unstructured.Unstructured + if namespace != "" { + created, err = resourceInterface.Namespace(namespace).Create(ctx, unstructuredObj, metav1.CreateOptions{}) + } else { + created, err = resourceInterface.Create(ctx, unstructuredObj, metav1.CreateOptions{}) + } + + if err != nil { + return nil, fmt.Errorf("failed to create pipeline: %w", err) + } + + return c.unstructuredToPipeline(created) +} + +// DeletePipeline deletes a pipeline by name +func (c *PipelineClient) Delete(ctx context.Context, namespace, name string) error { + gvr, err := c.client.GetGVR("pipelines") + if err != nil { + return err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + + if namespace != "" { + err = resourceInterface.Namespace(namespace).Delete(ctx, name, metav1.DeleteOptions{}) + } else { + err = resourceInterface.Delete(ctx, name, metav1.DeleteOptions{}) + } + + if err != nil { + return fmt.Errorf("failed to delete pipeline %s: %w", name, err) + } + + return nil +} + +// ListPipelineRuns retrieves all pipeline runs with optional filtering +func (c *PipelineClient) ListRuns(ctx context.Context, namespace string, filters map[string]string) ([]*api.PipelineRun, error) { + gvr, err := c.client.GetGVR("pipelineruns") + if err != nil { + return nil, err + } + + // Build list options with label selector from filters + listOpts := metav1.ListOptions{} + if len(filters) > 0 { + labelSelector := "" + for k, v := range filters { + if labelSelector != "" { + labelSelector += "," + } + labelSelector += k + "=" + v + } + listOpts.LabelSelector = labelSelector + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + var list *unstructured.UnstructuredList + + if namespace != "" { + list, err = resourceInterface.Namespace(namespace).List(ctx, listOpts) + } else { + list, err = resourceInterface.List(ctx, listOpts) + } + + if err != nil { + return nil, fmt.Errorf("failed to list pipeline runs: %w", err) + } + + pipelineRuns := make([]*api.PipelineRun, 0, len(list.Items)) + for _, item := range list.Items { + pipelineRun, err := c.unstructuredToPipelineRun(&item) + if err != nil { + return nil, fmt.Errorf("failed to convert pipeline run: %w", err) + } + pipelineRuns = append(pipelineRuns, pipelineRun) + } + + return pipelineRuns, nil +} + +// GetPipelineRun retrieves a specific pipeline run by name +func (c *PipelineClient) GetRun(ctx context.Context, namespace, name string) (*api.PipelineRun, error) { + gvr, err := c.client.GetGVR("pipelineruns") + if err != nil { + return nil, err + } + + resourceInterface := c.client.GetDynamicClient().Resource(gvr) + var item *unstructured.Unstructured + + if namespace != "" { + item, err = resourceInterface.Namespace(namespace).Get(ctx, name, metav1.GetOptions{}) + } else { + item, err = resourceInterface.Get(ctx, name, metav1.GetOptions{}) + } + + if err != nil { + return nil, fmt.Errorf("failed to get pipeline run %s: %w", name, err) + } + + return c.unstructuredToPipelineRun(item) +} + +// unstructuredToPipeline converts Unstructured object to Pipeline +func (c *PipelineClient) unstructuredToPipeline(obj *unstructured.Unstructured) (*api.Pipeline, error) { + pipeline := &api.Pipeline{ + Name: obj.GetName(), + Namespace: obj.GetNamespace(), + Labels: obj.GetLabels(), + Annotations: obj.GetAnnotations(), + } + + // Extract display name from annotations + if annotations := obj.GetAnnotations(); annotations != nil { + if displayName, ok := annotations["openshift.io/display-name"]; ok { + pipeline.DisplayName = &displayName + } + if description, ok := annotations["openshift.io/description"]; ok { + pipeline.Description = &description + } + } + + // Extract status + if status, ok, _ := unstructured.NestedMap(obj.Object, "status"); ok { + pipeline.Status = c.convertPipelineStatus(status) + } + + return pipeline, nil +} + +// unstructuredToPipelineRun converts Unstructured object to PipelineRun +func (c *PipelineClient) unstructuredToPipelineRun(obj *unstructured.Unstructured) (*api.PipelineRun, error) { + pipelineRun := &api.PipelineRun{ + Name: obj.GetName(), + Namespace: obj.GetNamespace(), + Labels: obj.GetLabels(), + Annotations: obj.GetAnnotations(), + } + + // Extract pipeline name from labels or annotations + if labels := obj.GetLabels(); labels != nil { + if pipelineName, ok := labels["app.kubernetes.io/part-of"]; ok { + pipelineRun.PipelineName = pipelineName + } + } + + // Extract display name from annotations + if annotations := obj.GetAnnotations(); annotations != nil { + if displayName, ok := annotations["openshift.io/display-name"]; ok { + pipelineRun.DisplayName = &displayName + } + if description, ok := annotations["openshift.io/description"]; ok { + pipelineRun.Description = &description + } + } + + // Extract status + if status, ok, _ := unstructured.NestedMap(obj.Object, "status"); ok { + pipelineRun.Status = c.convertPipelineRunStatus(status) + } + + return pipelineRun, nil +} + +// pipelineToUnstructured converts a Pipeline to Unstructured object +func (c *PipelineClient) pipelineToUnstructured(pipeline *api.Pipeline) (*unstructured.Unstructured, error) { + obj := &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "datasciencepipelines.opendatahub.io/v1alpha1", + "kind": "Pipeline", + "metadata": map[string]interface{}{ + "name": pipeline.Name, + "namespace": pipeline.Namespace, + "labels": pipeline.Labels, + "annotations": pipeline.Annotations, + }, + }, + } + + // Store display name and description in annotations + annotations := make(map[string]string) + if pipeline.Annotations != nil { + for k, v := range pipeline.Annotations { + annotations[k] = v + } + } + if pipeline.DisplayName != nil { + annotations["openshift.io/display-name"] = *pipeline.DisplayName + } + if pipeline.Description != nil { + annotations["openshift.io/description"] = *pipeline.Description + } + obj.SetAnnotations(annotations) + + return obj, nil +} + +// convertPipelineStatus converts map to PipelineStatus +func (c *PipelineClient) convertPipelineStatus(statusMap map[string]interface{}) api.PipelineStatus { + status := api.PipelineStatus{ + Phase: "Unknown", + Ready: false, + } + + if phase, ok, _ := unstructured.NestedString(statusMap, "phase"); ok { + status.Phase = phase + } + if message, ok, _ := unstructured.NestedString(statusMap, "message"); ok { + status.Message = &message + } + if conditions, ok := statusMap["conditions"].([]interface{}); ok { + for _, condition := range conditions { + if conditionMap, ok := condition.(map[string]interface{}); ok { + if conditionType, ok := conditionMap["type"].(string); ok && conditionType == "Ready" { + if conditionStatus, ok := conditionMap["status"].(string); ok && conditionStatus == "True" { + status.Ready = true + } + } + } + } + } + if runCount, ok, _ := unstructured.NestedInt64(statusMap, "runCount"); ok { + status.RunCount = int(runCount) + } + if lastUpdated, ok, _ := unstructured.NestedString(statusMap, "lastUpdated"); ok { + status.LastUpdated = &lastUpdated + } + + return status +} + +// convertPipelineRunStatus converts map to PipelineRunStatus +func (c *PipelineClient) convertPipelineRunStatus(statusMap map[string]interface{}) api.PipelineRunStatus { + status := api.PipelineRunStatus{ + Phase: "Unknown", + Ready: false, + } + + if phase, ok, _ := unstructured.NestedString(statusMap, "phase"); ok { + status.Phase = phase + } + if message, ok, _ := unstructured.NestedString(statusMap, "message"); ok { + status.Message = &message + } + if conditions, ok := statusMap["conditions"].([]interface{}); ok { + for _, condition := range conditions { + if conditionMap, ok := condition.(map[string]interface{}); ok { + if conditionType, ok := conditionMap["type"].(string); ok && conditionType == "Succeeded" { + if conditionStatus, ok := conditionMap["status"].(string); ok && conditionStatus == "True" { + status.Ready = true + } + } + } + } + } + if startedAt, ok, _ := unstructured.NestedString(statusMap, "startTime"); ok { + status.StartedAt = &startedAt + } + if finishedAt, ok, _ := unstructured.NestedString(statusMap, "completionTime"); ok { + status.FinishedAt = &finishedAt + } + if lastUpdated, ok, _ := unstructured.NestedString(statusMap, "lastUpdated"); ok { + status.LastUpdated = &lastUpdated + } + + return status +} diff --git a/pkg/toolsets/openshift-ai/applications.go b/pkg/toolsets/openshift-ai/applications.go new file mode 100644 index 00000000..f2b98bf1 --- /dev/null +++ b/pkg/toolsets/openshift-ai/applications.go @@ -0,0 +1,221 @@ +package openshiftai + +import ( + "encoding/json" + "fmt" + + "github.com/containers/kubernetes-mcp-server/pkg/api" + openshiftai "github.com/containers/kubernetes-mcp-server/pkg/openshift-ai" + "k8s.io/client-go/rest" +) + +func initApplications() []api.ServerTool { + return []api.ServerTool{ + {Tool: api.GetApplicationsListTool(), Handler: applicationsList}, + {Tool: api.GetApplicationGetTool(), Handler: applicationsGet}, + {Tool: api.GetApplicationCreateTool(), Handler: applicationsCreate}, + {Tool: api.GetApplicationDeleteTool(), Handler: applicationsDelete}, + } +} +func applicationsList(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + + // Get namespace parameter (optional) + namespace, _ := args["namespace"].(string) + // Get status filter (optional) + status, _ := args["status"].(string) + // Get app type filter (optional) + appType, _ := args["app_type"].(string) + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Application client + applicationClient := openshiftai.NewApplicationClient(openshiftAIClient) + + // List applications + applications, err := applicationClient.List(params.Context, namespace, status, appType) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to list applications: %w", err)), nil + } + + // Convert to JSON response + content, err := json.Marshal(applications) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal applications: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} +func applicationsGet(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + name, ok := args["name"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("name parameter is required")), nil + } + + namespace, ok := args["namespace"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("namespace parameter is required")), nil + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Application client + applicationClient := openshiftai.NewApplicationClient(openshiftAIClient) + + // Get application + application, err := applicationClient.Get(params.Context, name, namespace) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get application '%s' in namespace '%s': %w", name, namespace, err)), nil + } + + // Convert to JSON response + content, err := json.Marshal(application) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal application: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} +func applicationsCreate(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + name, ok := args["name"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("name parameter is required")), nil + } + + namespace, ok := args["namespace"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("namespace parameter is required")), nil + } + + appType, ok := args["app_type"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("app_type parameter is required")), nil + } + + // Optional parameters + var displayName *string + if val, exists := args["display_name"]; exists { + if str, ok := val.(string); ok && str != "" { + displayName = &str + } + } + + var description *string + if val, exists := args["description"]; exists { + if str, ok := val.(string); ok && str != "" { + description = &str + } + } + + var labels map[string]string + if val, exists := args["labels"]; exists { + if m, ok := val.(map[string]interface{}); ok { + labels = make(map[string]string) + for k, v := range m { + if str, ok := v.(string); ok { + labels[k] = str + } + } + } + } + + var annotations map[string]string + if val, exists := args["annotations"]; exists { + if m, ok := val.(map[string]interface{}); ok { + annotations = make(map[string]string) + for k, v := range m { + if str, ok := v.(string); ok { + annotations[k] = str + } + } + } + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Application client + applicationClient := openshiftai.NewApplicationClient(openshiftAIClient) + + // Create application + application := &api.Application{ + Name: name, + Namespace: namespace, + DisplayName: displayName, + Description: description, + AppType: appType, + Labels: labels, + Annotations: annotations, + Status: api.ApplicationStatus{ + Phase: "Creating", + Ready: false, + }, + } + + createdApplication, err := applicationClient.Create(params.Context, application) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to create application: %w", err)), nil + } + + // Convert to JSON response + content, err := json.Marshal(createdApplication) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal created application: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} +func applicationsDelete(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + name, ok := args["name"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("name parameter is required")), nil + } + + namespace, ok := args["namespace"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("namespace parameter is required")), nil + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Application client + applicationClient := openshiftai.NewApplicationClient(openshiftAIClient) + + // Delete application + err = applicationClient.Delete(params.Context, name, namespace) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to delete application '%s' in namespace '%s': %w", name, namespace, err)), nil + } + + return api.NewToolCallResult(fmt.Sprintf("Application %s/%s deleted successfully", namespace, name), nil), nil +} diff --git a/pkg/toolsets/openshift-ai/datascience_projects.go b/pkg/toolsets/openshift-ai/datascience_projects.go new file mode 100644 index 00000000..734f8e47 --- /dev/null +++ b/pkg/toolsets/openshift-ai/datascience_projects.go @@ -0,0 +1,201 @@ +package openshiftai + +import ( + "encoding/json" + "fmt" + + "github.com/containers/kubernetes-mcp-server/pkg/api" + openshiftai "github.com/containers/kubernetes-mcp-server/pkg/openshift-ai" + "k8s.io/client-go/rest" +) + +func initDataScienceProjects() []api.ServerTool { + return []api.ServerTool{ + {Tool: api.GetDataScienceProjectListTool(), Handler: dataScienceProjectsList}, + {Tool: api.GetDataScienceProjectGetTool(), Handler: dataScienceProjectsGet}, + {Tool: api.GetDataScienceProjectCreateTool(), Handler: dataScienceProjectsCreate}, + {Tool: api.GetDataScienceProjectDeleteTool(), Handler: dataScienceProjectsDelete}, + } +} + +func dataScienceProjectsList(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + + // Get namespace parameter (optional) + namespace, _ := args["namespace"].(string) + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create DataScienceProject client + dsClient := openshiftai.NewDataScienceProjectClient(openshiftAIClient) + + // List projects + projects, err := dsClient.List(params.Context, namespace) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to list Data Science Projects: %w", err)), nil + } + + // Convert to JSON response + content, err := json.Marshal(projects) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal projects: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} + +func dataScienceProjectsGet(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + name, ok := args["name"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("name parameter is required")), nil + } + + namespace, ok := args["namespace"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("namespace parameter is required")), nil + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create DataScienceProject client + dsClient := openshiftai.NewDataScienceProjectClient(openshiftAIClient) + + // Get the project + project, err := dsClient.Get(params.Context, name, namespace) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get Data Science Project '%s' in namespace '%s': %w", name, namespace, err)), nil + } + + // Convert to JSON response + content, err := json.Marshal(project) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal project: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} + +func dataScienceProjectsCreate(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + name, ok := args["name"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("name parameter is required")), nil + } + + namespace, ok := args["namespace"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("namespace parameter is required")), nil + } + + description, _ := args["description"].(string) + displayName, _ := args["display_name"].(string) + + // Get optional parameters + var labels map[string]string + if labelsArg, exists := args["labels"]; exists { + if labelsMap, ok := labelsArg.(map[string]any); ok { + labels = make(map[string]string) + for k, v := range labelsMap { + if str, ok := v.(string); ok { + labels[k] = str + } + } + } + } + + var annotations map[string]string + if annotationsArg, exists := args["annotations"]; exists { + if annotationsMap, ok := annotationsArg.(map[string]any); ok { + annotations = make(map[string]string) + for k, v := range annotationsMap { + if str, ok := v.(string); ok { + annotations[k] = str + } + } + } + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create DataScienceProject client + dsClient := openshiftai.NewDataScienceProjectClient(openshiftAIClient) + + // Create project + project := &openshiftai.DataScienceProject{ + Name: name, + Namespace: namespace, + DisplayName: &displayName, + Description: &description, + Labels: labels, + Annotations: annotations, + } + + createdProject, err := dsClient.Create(params.Context, project) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to create Data Science Project '%s' in namespace '%s': %w", name, namespace, err)), nil + } + + // Convert to JSON response + content, err := json.Marshal(createdProject) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal created project: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} + +func dataScienceProjectsDelete(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + name, ok := args["name"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("name parameter is required")), nil + } + + namespace, ok := args["namespace"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("namespace parameter is required")), nil + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create DataScienceProject client + dsClient := openshiftai.NewDataScienceProjectClient(openshiftAIClient) + + // Delete project + err = dsClient.Delete(params.Context, name, namespace) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to delete Data Science Project '%s' in namespace '%s': %w", name, namespace, err)), nil + } + + content := fmt.Sprintf("Successfully deleted Data Science Project '%s' in namespace '%s'", name, namespace) + return api.NewToolCallResult(content, nil), nil +} diff --git a/pkg/toolsets/openshift-ai/experiments.go b/pkg/toolsets/openshift-ai/experiments.go new file mode 100644 index 00000000..d48e6780 --- /dev/null +++ b/pkg/toolsets/openshift-ai/experiments.go @@ -0,0 +1,214 @@ +package openshiftai + +import ( + "encoding/json" + "fmt" + + "github.com/containers/kubernetes-mcp-server/pkg/api" + openshiftai "github.com/containers/kubernetes-mcp-server/pkg/openshift-ai" + "k8s.io/client-go/rest" +) + +func initExperiments() []api.ServerTool { + return []api.ServerTool{ + {Tool: api.GetExperimentsListTool(), Handler: experimentsList}, + {Tool: api.GetExperimentGetTool(), Handler: experimentsGet}, + {Tool: api.GetExperimentCreateTool(), Handler: experimentsCreate}, + {Tool: api.GetExperimentDeleteTool(), Handler: experimentsDelete}, + } +} +func experimentsList(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + + // Get namespace parameter (optional) + namespace, _ := args["namespace"].(string) + // Get status filter (optional) + status, _ := args["status"].(string) + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Experiment client + experimentClient := openshiftai.NewExperimentClient(openshiftAIClient) + + // List experiments + experiments, err := experimentClient.List(params.Context, namespace, status) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to list experiments: %w", err)), nil + } + + // Convert to JSON response + content, err := json.Marshal(experiments) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal experiments: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} +func experimentsGet(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + name, ok := args["name"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("name parameter is required")), nil + } + + namespace, ok := args["namespace"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("namespace parameter is required")), nil + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Experiment client + experimentClient := openshiftai.NewExperimentClient(openshiftAIClient) + + // Get experiment + experiment, err := experimentClient.Get(params.Context, name, namespace) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get experiment '%s' in namespace '%s': %w", name, namespace, err)), nil + } + + // Convert to JSON response + content, err := json.Marshal(experiment) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal experiment: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} +func experimentsCreate(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + name, ok := args["name"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("name parameter is required")), nil + } + + namespace, ok := args["namespace"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("namespace parameter is required")), nil + } + + // Optional parameters + var displayName *string + if val, exists := args["display_name"]; exists { + if str, ok := val.(string); ok && str != "" { + displayName = &str + } + } + + var description *string + if val, exists := args["description"]; exists { + if str, ok := val.(string); ok && str != "" { + description = &str + } + } + + var labels map[string]string + if val, exists := args["labels"]; exists { + if m, ok := val.(map[string]interface{}); ok { + labels = make(map[string]string) + for k, v := range m { + if str, ok := v.(string); ok { + labels[k] = str + } + } + } + } + + var annotations map[string]string + if val, exists := args["annotations"]; exists { + if m, ok := val.(map[string]interface{}); ok { + annotations = make(map[string]string) + for k, v := range m { + if str, ok := v.(string); ok { + annotations[k] = str + } + } + } + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Experiment client + experimentClient := openshiftai.NewExperimentClient(openshiftAIClient) + + // Create experiment + experiment := &api.Experiment{ + Name: name, + Namespace: namespace, + DisplayName: displayName, + Description: description, + Labels: labels, + Annotations: annotations, + Status: api.ExperimentStatus{ + Phase: "Created", + Ready: false, + RunCount: 0, + }, + } + + createdExperiment, err := experimentClient.Create(params.Context, experiment) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to create experiment: %w", err)), nil + } + + // Convert to JSON response + content, err := json.Marshal(createdExperiment) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal created experiment: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} +func experimentsDelete(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + name, ok := args["name"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("name parameter is required")), nil + } + + namespace, ok := args["namespace"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("namespace parameter is required")), nil + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Experiment client + experimentClient := openshiftai.NewExperimentClient(openshiftAIClient) + + // Delete experiment + err = experimentClient.Delete(params.Context, name, namespace) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to delete experiment '%s' in namespace '%s': %w", name, namespace, err)), nil + } + + return api.NewToolCallResult(fmt.Sprintf("Experiment %s/%s deleted successfully", namespace, name), nil), nil +} diff --git a/pkg/toolsets/openshift-ai/models.go b/pkg/toolsets/openshift-ai/models.go new file mode 100644 index 00000000..e3bb0759 --- /dev/null +++ b/pkg/toolsets/openshift-ai/models.go @@ -0,0 +1,302 @@ +package openshiftai + +import ( + "encoding/json" + "fmt" + + "github.com/containers/kubernetes-mcp-server/pkg/api" + openshiftai "github.com/containers/kubernetes-mcp-server/pkg/openshift-ai" + "k8s.io/client-go/rest" +) + +func initModels() []api.ServerTool { + return []api.ServerTool{ + {Tool: api.GetModelListTool(), Handler: modelsList}, + {Tool: api.GetModelGetTool(), Handler: modelsGet}, + {Tool: api.GetModelCreateTool(), Handler: modelsCreate}, + {Tool: api.GetModelUpdateTool(), Handler: modelsUpdate}, + {Tool: api.GetModelDeleteTool(), Handler: modelsDelete}, + } +} + +func modelsList(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + + // Get namespace parameter (optional) + namespace, _ := args["namespace"].(string) + // Get model type filter (optional) + modelType, _ := args["model_type"].(string) + // Get status filter (optional) + status, _ := args["status"].(string) + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Model client + modelClient := openshiftai.NewModelClient(openshiftAIClient) + + // List models + models, err := modelClient.List(params.Context, namespace, modelType, status) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to list models: %w", err)), nil + } + + // Convert to JSON response + content, err := json.Marshal(models) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal models: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} + +// handleModelGet handles model_get tool +func modelsGet(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + name, ok := args["name"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("name parameter is required")), nil + } + + namespace, ok := args["namespace"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("namespace parameter is required")), nil + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Model client + modelClient := openshiftai.NewModelClient(openshiftAIClient) + + // Get the model + model, err := modelClient.Get(params.Context, name, namespace) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get model '%s' in namespace '%s': %w", name, namespace, err)), nil + } + + // Convert to JSON response + content, err := json.Marshal(model) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal model: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} + +// handleModelCreate handles model_create tool +func modelsCreate(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + name, ok := args["name"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("name parameter is required")), nil + } + + namespace, ok := args["namespace"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("namespace parameter is required")), nil + } + + modelType, ok := args["model_type"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("model_type parameter is required")), nil + } + + format, ok := args["format"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("format parameter is required")), nil + } + + // Get optional parameters + displayName, _ := args["display_name"].(string) + description, _ := args["description"].(string) + frameworkVersion, _ := args["framework_version"].(string) + version, _ := args["version"].(string) + + var labels map[string]string + if labelsArg, exists := args["labels"]; exists { + if labelsMap, ok := labelsArg.(map[string]any); ok { + labels = make(map[string]string) + for k, v := range labelsMap { + if str, ok := v.(string); ok { + labels[k] = str + } + } + } + } + + var annotations map[string]string + if annotationsArg, exists := args["annotations"]; exists { + if annotationsMap, ok := annotationsArg.(map[string]any); ok { + annotations = make(map[string]string) + for k, v := range annotationsMap { + if str, ok := v.(string); ok { + annotations[k] = str + } + } + } + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Model client + modelClient := openshiftai.NewModelClient(openshiftAIClient) + + // Create model + model := &api.Model{ + Name: name, + Namespace: namespace, + DisplayName: &displayName, + Description: &description, + ModelType: &modelType, + FrameworkVersion: &frameworkVersion, + Format: &format, + Version: &version, + Labels: labels, + Annotations: annotations, + } + + createdModel, err := modelClient.Create(params.Context, model) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to create model '%s' in namespace '%s': %w", name, namespace, err)), nil + } + + // Convert to JSON response + content, err := json.Marshal(createdModel) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal created model: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} + +// handleModelUpdate handles model_update tool +func modelsUpdate(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + name, ok := args["name"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("name parameter is required")), nil + } + + namespace, ok := args["namespace"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("namespace parameter is required")), nil + } + + // Get optional parameters + displayName, _ := args["display_name"].(string) + description, _ := args["description"].(string) + + var labels map[string]string + if labelsArg, exists := args["labels"]; exists { + if labelsMap, ok := labelsArg.(map[string]any); ok { + labels = make(map[string]string) + for k, v := range labelsMap { + if str, ok := v.(string); ok { + labels[k] = str + } + } + } + } + + var annotations map[string]string + if annotationsArg, exists := args["annotations"]; exists { + if annotationsMap, ok := annotationsArg.(map[string]any); ok { + annotations = make(map[string]string) + for k, v := range annotationsMap { + if str, ok := v.(string); ok { + annotations[k] = str + } + } + } + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Model client + modelClient := openshiftai.NewModelClient(openshiftAIClient) + + // Update model + model := &api.Model{ + Name: name, + Namespace: namespace, + DisplayName: &displayName, + Description: &description, + Labels: labels, + Annotations: annotations, + } + + updatedModel, err := modelClient.Update(params.Context, model) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to update model '%s' in namespace '%s': %w", name, namespace, err)), nil + } + + // Convert to JSON response + content, err := json.Marshal(updatedModel) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal updated model: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} + +// handleModelDelete handles model_delete tool +func modelsDelete(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + name, ok := args["name"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("name parameter is required")), nil + } + + namespace, ok := args["namespace"].(string) + if !ok { + return api.NewToolCallResult("", fmt.Errorf("namespace parameter is required")), nil + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Model client + modelClient := openshiftai.NewModelClient(openshiftAIClient) + + // Delete model + err = modelClient.Delete(params.Context, name, namespace) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to delete model '%s' in namespace '%s': %w", name, namespace, err)), nil + } + + content := fmt.Sprintf("Successfully deleted model '%s' in namespace '%s'", name, namespace) + return api.NewToolCallResult(content, nil), nil +} diff --git a/pkg/toolsets/openshift-ai/pipelines.go b/pkg/toolsets/openshift-ai/pipelines.go new file mode 100644 index 00000000..91db1a63 --- /dev/null +++ b/pkg/toolsets/openshift-ai/pipelines.go @@ -0,0 +1,367 @@ +package openshiftai + +import ( + "encoding/json" + "fmt" + + "github.com/containers/kubernetes-mcp-server/pkg/api" + openshiftai "github.com/containers/kubernetes-mcp-server/pkg/openshift-ai" + "k8s.io/client-go/rest" +) + +func initPipelines() []api.ServerTool { + return []api.ServerTool{ + {Tool: api.GetPipelinesListTool(), Handler: pipelinesList}, + {Tool: api.GetPipelineGetTool(), Handler: pipelinesGet}, + {Tool: api.GetPipelineCreateTool(), Handler: pipelinesCreate}, + {Tool: api.GetPipelineDeleteTool(), Handler: pipelinesDelete}, + {Tool: api.GetPipelineRunsListTool(), Handler: pipelineRunsList}, + {Tool: api.GetPipelineRunGetTool(), Handler: pipelineRunsGet}, + } +} +func pipelinesList(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + + // Get namespace parameter (optional) + namespace, _ := args["namespace"].(string) + // Get status filter (optional) + status, _ := args["status"].(string) + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Pipeline client + pipelineClient := openshiftai.NewPipelineClient(openshiftAIClient) + + // Build filters + filters := make(map[string]string) + if status != "" { + filters["status"] = status + } + + // List pipelines + pipelines, err := pipelineClient.List(params.Context, namespace, filters) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to list pipelines: %w", err)), nil + } + + // Convert to response format + response := make([]map[string]interface{}, len(pipelines)) + for i, pipeline := range pipelines { + response[i] = map[string]interface{}{ + "name": pipeline.Name, + "namespace": pipeline.Namespace, + "display_name": pipeline.DisplayName, + "description": pipeline.Description, + "labels": pipeline.Labels, + "annotations": pipeline.Annotations, + "status": pipeline.Status, + } + } + + result := map[string]interface{}{ + "pipelines": response, + "count": len(response), + } + + // Convert to JSON response + content, err := json.Marshal(result) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal result: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} +func pipelinesGet(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + + name, _ := args["name"].(string) + namespace, _ := args["namespace"].(string) + + if name == "" { + return api.NewToolCallResult("", fmt.Errorf("pipeline name is required")), nil + } + if namespace == "" { + return api.NewToolCallResult("", fmt.Errorf("namespace is required")), nil + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Pipeline client + pipelineClient := openshiftai.NewPipelineClient(openshiftAIClient) + + pipeline, err := pipelineClient.Get(params.Context, namespace, name) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get pipeline %s: %w", name, err)), nil + } + + result := map[string]interface{}{ + "name": pipeline.Name, + "namespace": pipeline.Namespace, + "display_name": pipeline.DisplayName, + "description": pipeline.Description, + "labels": pipeline.Labels, + "annotations": pipeline.Annotations, + "status": pipeline.Status, + } + + // Convert to JSON response + content, err := json.Marshal(result) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal result: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} +func pipelinesCreate(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + + name, _ := args["name"].(string) + namespace, _ := args["namespace"].(string) + displayName, _ := args["display_name"].(string) + description, _ := args["description"].(string) + labels, _ := args["labels"].(map[string]interface{}) + annotations, _ := args["annotations"].(map[string]interface{}) + + if name == "" { + return api.NewToolCallResult("", fmt.Errorf("pipeline name is required")), nil + } + if namespace == "" { + return api.NewToolCallResult("", fmt.Errorf("namespace is required")), nil + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Pipeline client + pipelineClient := openshiftai.NewPipelineClient(openshiftAIClient) + + // Convert labels and annotations + labelsMap := make(map[string]string) + for k, v := range labels { + if s, ok := v.(string); ok { + labelsMap[k] = s + } + } + + annotationsMap := make(map[string]string) + for k, v := range annotations { + if s, ok := v.(string); ok { + annotationsMap[k] = s + } + } + + pipeline := &api.Pipeline{ + Name: name, + Namespace: namespace, + Labels: labelsMap, + Annotations: annotationsMap, + } + + if displayName != "" { + pipeline.DisplayName = &displayName + } + if description != "" { + pipeline.Description = &description + } + + createdPipeline, err := pipelineClient.Create(params.Context, namespace, pipeline) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to create pipeline: %w", err)), nil + } + + result := map[string]interface{}{ + "name": createdPipeline.Name, + "namespace": createdPipeline.Namespace, + "display_name": createdPipeline.DisplayName, + "description": createdPipeline.Description, + "labels": createdPipeline.Labels, + "annotations": createdPipeline.Annotations, + "status": createdPipeline.Status, + "message": "Pipeline created successfully", + } + + // Convert to JSON response + content, err := json.Marshal(result) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal result: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} +func pipelinesDelete(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + + name, _ := args["name"].(string) + namespace, _ := args["namespace"].(string) + + if name == "" { + return api.NewToolCallResult("", fmt.Errorf("pipeline name is required")), nil + } + if namespace == "" { + return api.NewToolCallResult("", fmt.Errorf("namespace is required")), nil + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Pipeline client + pipelineClient := openshiftai.NewPipelineClient(openshiftAIClient) + + err = pipelineClient.Delete(params.Context, namespace, name) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to delete pipeline %s: %w", name, err)), nil + } + + result := map[string]interface{}{ + "name": name, + "message": "Pipeline deleted successfully", + } + + // Convert to JSON response + content, err := json.Marshal(result) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal result: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} +func pipelineRunsList(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + + // Get namespace parameter (optional) + namespace, _ := args["namespace"].(string) + // Get pipeline name filter (optional) + pipelineName, _ := args["pipeline_name"].(string) + // Get status filter (optional) + status, _ := args["status"].(string) + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Pipeline client + pipelineClient := openshiftai.NewPipelineClient(openshiftAIClient) + + // Build filters + filters := make(map[string]string) + if status != "" { + filters["status"] = status + } + if pipelineName != "" { + filters["pipeline_name"] = pipelineName + } + + // List pipeline runs + pipelineRuns, err := pipelineClient.ListRuns(params.Context, namespace, filters) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to list pipeline runs: %w", err)), nil + } + + // Convert to response format + response := make([]map[string]interface{}, len(pipelineRuns)) + for i, pipelineRun := range pipelineRuns { + response[i] = map[string]interface{}{ + "name": pipelineRun.Name, + "pipeline_name": pipelineRun.PipelineName, + "namespace": pipelineRun.Namespace, + "display_name": pipelineRun.DisplayName, + "description": pipelineRun.Description, + "labels": pipelineRun.Labels, + "annotations": pipelineRun.Annotations, + "status": pipelineRun.Status, + } + } + + result := map[string]interface{}{ + "pipeline_runs": response, + "count": len(response), + } + + // Convert to JSON response + content, err := json.Marshal(result) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal result: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} +func pipelineRunsGet(params api.ToolHandlerParams) (*api.ToolCallResult, error) { + args := params.GetArguments() + + name, _ := args["name"].(string) + namespace, _ := args["namespace"].(string) + + if name == "" { + return api.NewToolCallResult("", fmt.Errorf("pipeline run name is required")), nil + } + if namespace == "" { + return api.NewToolCallResult("", fmt.Errorf("namespace is required")), nil + } + + // Get OpenShift AI client from Kubernetes manager + clientInterface, err := params.GetOrCreateOpenShiftAIClient(func(cfg *rest.Config, config interface{}) (interface{}, error) { + return openshiftai.NewClient(cfg, nil) + }) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get OpenShift AI client: %w", err)), nil + } + openshiftAIClient := clientInterface.(*openshiftai.Client) + + // Create Pipeline client + pipelineClient := openshiftai.NewPipelineClient(openshiftAIClient) + + pipelineRun, err := pipelineClient.GetRun(params.Context, namespace, name) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to get pipeline run %s: %w", name, err)), nil + } + + result := map[string]interface{}{ + "name": pipelineRun.Name, + "pipeline_name": pipelineRun.PipelineName, + "namespace": pipelineRun.Namespace, + "display_name": pipelineRun.DisplayName, + "description": pipelineRun.Description, + "labels": pipelineRun.Labels, + "annotations": pipelineRun.Annotations, + "status": pipelineRun.Status, + } + + // Convert to JSON response + content, err := json.Marshal(result) + if err != nil { + return api.NewToolCallResult("", fmt.Errorf("failed to marshal result: %w", err)), nil + } + + return api.NewToolCallResult(string(content), nil), nil +} diff --git a/pkg/toolsets/openshift-ai/toolset.go b/pkg/toolsets/openshift-ai/toolset.go new file mode 100644 index 00000000..b58c608c --- /dev/null +++ b/pkg/toolsets/openshift-ai/toolset.go @@ -0,0 +1,35 @@ +package openshiftai + +import ( + "slices" + + "github.com/containers/kubernetes-mcp-server/pkg/api" + internalk8s "github.com/containers/kubernetes-mcp-server/pkg/kubernetes" + "github.com/containers/kubernetes-mcp-server/pkg/toolsets" +) + +type Toolset struct{} + +var _ api.Toolset = (*Toolset)(nil) + +func (t *Toolset) GetName() string { + return "openshift-ai" +} + +func (t *Toolset) GetDescription() string { + return "OpenShift AI specific tools for managing Data Science Projects, Jupyter Notebooks, model serving, and pipelines" +} + +func (t *Toolset) GetTools(_ internalk8s.Openshift) []api.ServerTool { + return slices.Concat( + initDataScienceProjects(), + initModels(), + initApplications(), + initExperiments(), + initPipelines(), + ) +} + +func init() { + toolsets.Register(&Toolset{}) +}