|
| 1 | +package client |
| 2 | + |
| 3 | +import ( |
| 4 | + "archive/zip" |
| 5 | + "bytes" |
| 6 | + "context" |
| 7 | + "encoding/csv" |
| 8 | + "fmt" |
| 9 | + "io" |
| 10 | + "net/http" |
| 11 | + "strings" |
| 12 | + |
| 13 | + "github.com/harness/harness-mcp/client/dto" |
| 14 | +) |
| 15 | + |
| 16 | +const ( |
| 17 | + dashboardSearchPath = "dashboard/v1/search" |
| 18 | + dashboardDataPath = "dashboard/download/dashboards/%s/csv" |
| 19 | +) |
| 20 | + |
| 21 | +// DashboardService handles all dashboard-related API interactions |
| 22 | +type DashboardService struct { |
| 23 | + Client *Client |
| 24 | +} |
| 25 | + |
| 26 | +// ListDashboards fetches all dashboards from Harness |
| 27 | +func (d *DashboardService) ListDashboards(ctx context.Context, scope dto.Scope, page int, pageSize int, folderID string, tags string) (*dto.DashboardListResponse, error) { |
| 28 | + path := dashboardSearchPath |
| 29 | + params := make(map[string]string) |
| 30 | + |
| 31 | + // Add scope parameters |
| 32 | + addScope(scope, params) |
| 33 | + |
| 34 | + params["page"] = fmt.Sprintf("%d", page) |
| 35 | + params["pageSize"] = fmt.Sprintf("%d", pageSize) |
| 36 | + |
| 37 | + // Add optional parameters if they exist |
| 38 | + if folderID != "" { |
| 39 | + params["folderId"] = folderID |
| 40 | + } |
| 41 | + if tags != "" { |
| 42 | + params["tags"] = tags |
| 43 | + } |
| 44 | + |
| 45 | + response := new(dto.DashboardListResponse) |
| 46 | + err := d.Client.Get(ctx, path, params, nil, response) |
| 47 | + if err != nil { |
| 48 | + return nil, fmt.Errorf("failed to list dashboards: %w", err) |
| 49 | + } |
| 50 | + |
| 51 | + return response, nil |
| 52 | +} |
| 53 | + |
| 54 | +// GetDashboardData fetches data for a specific dashboard |
| 55 | +func (d *DashboardService) GetDashboardData(ctx context.Context, scope dto.Scope, dashboardID string, reportingTimeframe int) (*dto.DashboardData, error) { |
| 56 | + // Format the path with the dashboard ID using the standard pattern |
| 57 | + path := fmt.Sprintf(dashboardDataPath, dashboardID) |
| 58 | + |
| 59 | + // Create params map for query parameters |
| 60 | + params := make(map[string]string) |
| 61 | + |
| 62 | + // Add scope parameters including account ID |
| 63 | + if scope.AccountID == "" { |
| 64 | + return nil, fmt.Errorf("accountIdentifier cannot be null") |
| 65 | + } |
| 66 | + addScope(scope, params) |
| 67 | + |
| 68 | + // Set default reporting timeframe if not provided |
| 69 | + if reportingTimeframe <= 0 { |
| 70 | + reportingTimeframe = 30 // Default to 30 days |
| 71 | + } |
| 72 | + params["filters"] = fmt.Sprintf("Reporting+Timeframe=%d", reportingTimeframe) |
| 73 | + params["expanded_tables"] = "true" |
| 74 | + |
| 75 | + // For this specific endpoint, we need the raw response to process the ZIP file |
| 76 | + // Use the standard URL construction but handle the response manually |
| 77 | + httpReq, err := http.NewRequestWithContext( |
| 78 | + ctx, |
| 79 | + http.MethodGet, |
| 80 | + appendPath(d.Client.BaseURL.String(), path), |
| 81 | + nil, |
| 82 | + ) |
| 83 | + if err != nil { |
| 84 | + return nil, fmt.Errorf("failed to create request: %w", err) |
| 85 | + } |
| 86 | + |
| 87 | + // Add query parameters using the standard helper function |
| 88 | + addQueryParams(httpReq, params) |
| 89 | + |
| 90 | + resp, err := d.Client.Do(httpReq) |
| 91 | + if err != nil { |
| 92 | + return nil, fmt.Errorf("failed to execute request: %w", err) |
| 93 | + } |
| 94 | + defer resp.Body.Close() |
| 95 | + |
| 96 | + // Check if response status is not OK |
| 97 | + if resp.StatusCode != http.StatusOK { |
| 98 | + bodyBytes, _ := io.ReadAll(resp.Body) |
| 99 | + return nil, fmt.Errorf("unexpected status code %d: %s", resp.StatusCode, string(bodyBytes)) |
| 100 | + } |
| 101 | + |
| 102 | + // Read the response body into memory |
| 103 | + bodyBytes, err := io.ReadAll(resp.Body) |
| 104 | + if err != nil { |
| 105 | + return nil, fmt.Errorf("failed to read response body: %w", err) |
| 106 | + } |
| 107 | + |
| 108 | + // Create a reader for the ZIP content |
| 109 | + zipReader, err := zip.NewReader(bytes.NewReader(bodyBytes), int64(len(bodyBytes))) |
| 110 | + if err != nil { |
| 111 | + return nil, fmt.Errorf("failed to parse ZIP content: %w", err) |
| 112 | + } |
| 113 | + |
| 114 | + // Process the CSV files in the ZIP |
| 115 | + dashboardData := &dto.DashboardData{ |
| 116 | + Tables: make(map[string][]map[string]string), |
| 117 | + } |
| 118 | + |
| 119 | + for _, zipFile := range zipReader.File { |
| 120 | + // Skip directories and non-CSV files |
| 121 | + if zipFile.FileInfo().IsDir() || !strings.HasSuffix(zipFile.Name, ".csv") { |
| 122 | + continue |
| 123 | + } |
| 124 | + |
| 125 | + // Extract table name from file name |
| 126 | + tableName := strings.TrimSuffix(zipFile.Name, ".csv") |
| 127 | + |
| 128 | + // Open the file inside the zip |
| 129 | + rc, err := zipFile.Open() |
| 130 | + if err != nil { |
| 131 | + return nil, fmt.Errorf("failed to open file %s in ZIP: %w", zipFile.Name, err) |
| 132 | + } |
| 133 | + |
| 134 | + // Parse the CSV content |
| 135 | + csvData, err := parseCSV(rc) |
| 136 | + if err != nil { |
| 137 | + rc.Close() |
| 138 | + return nil, fmt.Errorf("failed to parse CSV file %s: %w", zipFile.Name, err) |
| 139 | + } |
| 140 | + rc.Close() |
| 141 | + |
| 142 | + // Add table data to the dashboard data |
| 143 | + dashboardData.Tables[tableName] = csvData |
| 144 | + } |
| 145 | + |
| 146 | + return dashboardData, nil |
| 147 | +} |
| 148 | + |
| 149 | +// Helper function to parse CSV data using the standard library's csv package for robust parsing |
| 150 | +func parseCSV(reader io.Reader) ([]map[string]string, error) { |
| 151 | + // Create a new CSV reader |
| 152 | + csvReader := csv.NewReader(reader) |
| 153 | + |
| 154 | + // Read all records at once |
| 155 | + records, err := csvReader.ReadAll() |
| 156 | + if err != nil { |
| 157 | + return nil, fmt.Errorf("failed to read CSV content: %w", err) |
| 158 | + } |
| 159 | + |
| 160 | + // Check if we have enough data (at least header row) |
| 161 | + if len(records) < 1 { |
| 162 | + return nil, fmt.Errorf("CSV content empty, no header row") |
| 163 | + } |
| 164 | + |
| 165 | + // Extract headers from the first row |
| 166 | + headers := records[0] |
| 167 | + for i, header := range headers { |
| 168 | + headers[i] = strings.TrimSpace(header) |
| 169 | + } |
| 170 | + |
| 171 | + // No data rows |
| 172 | + if len(records) < 2 { |
| 173 | + return []map[string]string{}, nil // Return empty result, not an error |
| 174 | + } |
| 175 | + |
| 176 | + // Process data rows |
| 177 | + results := make([]map[string]string, 0, len(records)-1) |
| 178 | + for i := 1; i < len(records); i++ { |
| 179 | + values := records[i] |
| 180 | + |
| 181 | + // Skip rows with mismatched field counts |
| 182 | + if len(values) != len(headers) { |
| 183 | + continue |
| 184 | + } |
| 185 | + |
| 186 | + // Create a map for this row |
| 187 | + row := make(map[string]string) |
| 188 | + for j, value := range values { |
| 189 | + row[headers[j]] = strings.TrimSpace(value) |
| 190 | + } |
| 191 | + |
| 192 | + results = append(results, row) |
| 193 | + } |
| 194 | + |
| 195 | + return results, nil |
| 196 | +} |
0 commit comments