Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
208 changes: 208 additions & 0 deletions integrations/source_api_processors/grafana_api_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,9 @@ def fetch_dashboards(self):
def fetch_dashboard_details(self, uid):
try:
url = '{}/api/dashboards/uid/{}'.format(self.__host, uid)
print(url)
response = requests.get(url, headers=self.headers, verify=self.__ssl_verify)
print(response.text)
if response and response.status_code == 200:
return response.json()
except Exception as e:
Expand Down Expand Up @@ -139,6 +141,212 @@ def fetch_dashboard_variable_label_values(self, promql_datasource_uid, label_nam
logger.error(f"Exception occurred while fetching promql metric labels for {label_name} with error: {e}")
return []

def get_datasource_by_uid(self, ds_uid):
"""Fetches datasource details by its UID."""
try:
url = f'{self.__host}/api/datasources/uid/{ds_uid}'
response = requests.get(url, headers=self.headers, verify=self.__ssl_verify)
if response and response.status_code == 200:
return response.json()
logger.error(f"Failed to get datasource for uid {ds_uid}. Status: {response.status_code}, Body: {response.text}")
return None
except Exception as e:
logger.error(f"Exception fetching datasource {ds_uid}: {e}")
return None

def get_default_datasource_by_type(self, ds_type):
"""Fetches the default datasource of a given type."""
try:
datasources = self.fetch_data_sources()
if not datasources:
return None

# Find the default datasource of the specified type
for ds in datasources:
if ds.get('type') == ds_type and ds.get('isDefault', False):
return ds
# If no default found, return the first one of that type
for ds in datasources:
if ds.get('type') == ds_type:
return ds
return None
except Exception as e:
logger.error(f"Exception fetching default datasource of type {ds_type}: {e}")
return None

def get_dashboard_variables(self, dashboard_uid):
"""
Fetches and resolves all variables for a given Grafana dashboard.
Handles dependencies between variables and supports multiple variable types.
"""
import re

try:
dashboard_data = self.fetch_dashboard_details(dashboard_uid)

if not dashboard_data or 'dashboard' not in dashboard_data:
logger.error("Could not fetch or parse dashboard data.")
return {}

dashboard_json = dashboard_data['dashboard']
variables = dashboard_json.get('templating', {}).get('list', [])

resolved_variables = {}

for var in variables:
var_name = var.get('name')
var_type = var.get('type')

if not var_name or not var_type:
continue

values = []
if var_type == 'query':
values = self._resolve_query_variable(var, resolved_variables)
elif var_type == 'datasource':
values = self._resolve_datasource_variable(var)
elif var_type == 'custom':
query = self._substitute_variables(var.get('query', ''), resolved_variables)
values = [v.strip() for v in query.split(',')]
elif var_type == 'constant':
values = [self._substitute_variables(var.get('query', ''), resolved_variables)]
elif var_type == 'textbox':
current_val = var.get('current', {}).get('value')
query_val = self._substitute_variables(var.get('query', ''), resolved_variables)
values = [current_val or query_val]
elif var_type == 'interval':
query = self._substitute_variables(var.get('query', ''), resolved_variables)
values = [v.strip() for v in query.split(',')]

if values:
resolved_variables[var_name] = values

logger.info(f"For dashboard '{dashboard_json.get('title')}', fetched variable values: {resolved_variables}")
return {
'dashboard_title': dashboard_json.get('title'),
'dashboard_uid': dashboard_uid,
'variables': resolved_variables
}
except Exception as e:
logger.error(f"Exception occurred while fetching dashboard variables for {dashboard_uid}: {e}")
return {}

def _substitute_variables(self, query_string, resolved_variables):
"""Substitutes variables in query strings."""
import re

for name, value in resolved_variables.items():
sub_value = value[0] if isinstance(value, list) and value else (value if isinstance(value, str) else "")
query_string = re.sub(r'\$' + re.escape(name) + r'\b', sub_value, query_string)
query_string = re.sub(r'\$\{' + re.escape(name) + r'\}', sub_value, query_string)
return query_string

def _resolve_datasource_variable(self, var):
"""Resolves a 'datasource' type variable."""
ds_type = var.get('query')
if not ds_type:
return []

try:
datasources = self.fetch_data_sources()
if not datasources:
return []

# Get all datasources of this type
matching_datasources = [ds['uid'] for ds in datasources if ds.get('type') == ds_type]

# If the current value is 'default', we should return the UID of the default datasource
current_value = var.get('current', {}).get('value')
if current_value == 'default':
default_ds = self.get_default_datasource_by_type(ds_type)
if default_ds:
return [default_ds['uid']]

return matching_datasources
except Exception as e:
logger.error(f"Exception fetching datasources: {e}")
return []

def _resolve_query_variable(self, var, resolved_variables):
"""
Resolves a 'query' type variable.
Currently supports Prometheus datasources.
"""
import re

datasource = var.get('datasource')
query = var.get('query')

if not datasource or not query:
return []

ds_uid = datasource.get('uid') if isinstance(datasource, dict) else datasource
ds_uid = self._substitute_variables(ds_uid, resolved_variables)

# Handle the case where ds_uid is "default" - need to resolve it to actual datasource
if ds_uid == 'default':
ds_type = datasource.get('type') if isinstance(datasource, dict) else 'prometheus' # assume prometheus if not specified
datasource_details = self.get_default_datasource_by_type(ds_type)
if datasource_details:
ds_uid = datasource_details['uid']
else:
logger.warning(f"Could not find default datasource of type '{ds_type}' for query variable '{var.get('name')}'.")
return []
else:
datasource_details = self.get_datasource_by_uid(ds_uid)

if not datasource_details or datasource_details.get('type') != 'prometheus':
logger.warning(f"Unsupported or unknown datasource type for query variable '{var.get('name')}'.")
return []

query = self._substitute_variables(str(query), resolved_variables)

# Prometheus query handling
# Case 1: label_values(label) or label_values(metric, label)
label_values_match = re.search(r'label_values\((?:.*\s*,\s*)?(\w+)\)', query)
if label_values_match:
label = label_values_match.group(1)
return self.fetch_dashboard_variable_label_values(ds_uid, label)

# Case 2: metrics(pattern) -> label_values(__name__)
if re.match(r'metrics\(.*\)', query):
return self.fetch_dashboard_variable_label_values(ds_uid, '__name__')

# Case 3: Generic PromQL query (including query_result(query))
try:
if query.startswith('query_result(') and query.endswith(')'):
query = query[len('query_result('):-1]

url = f'{self.__host}/api/datasources/proxy/uid/{ds_uid}/api/v1/query'
params = {'query': query}
response = requests.get(url, headers=self.headers, params=params, verify=self.__ssl_verify)
if response and response.status_code == 200:
results = response.json().get('data', {}).get('result', [])
values = []
for res in results:
metric_labels = res.get('metric', {})
metric_str = "{" + ", ".join([f'{k}="{v}"' for k,v in metric_labels.items()]) + "}"

if 'regex' in var and var['regex']:
match = re.search(var['regex'], metric_str)
if match:
values.append(match.group(1) if len(match.groups()) > 0 else match.group(0))
else:
# Default behavior: extract value of a label if there is one other than __name__
# otherwise, the __name__
non_name_labels = {k: v for k, v in metric_labels.items() if k != '__name__'}
if len(non_name_labels) == 1:
values.append(list(non_name_labels.values())[0])
else:
values.append(metric_labels.get('__name__', metric_str))
return sorted(list(set(values)))
else:
logger.error(f"Query failed for '{query}'. Status: {response.status_code}, Body: {response.text}")
return []
except Exception as e:
logger.error(f"Exception during generic query execution for '{query}': {e}")
return []

def panel_query_datasource_api(self, tr: TimeRange, queries, interval_ms=300000):
try:
if not queries or len(queries) == 0:
Expand Down
77 changes: 77 additions & 0 deletions integrations/source_manangers/grafana_source_manager.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import json
import logging
import re
import string
Expand Down Expand Up @@ -52,6 +53,7 @@ def __init__(self):
self.task_type_callable_map = {
Grafana.TaskType.PROMETHEUS_DATASOURCE_METRIC_EXECUTION: {
"executor": self.execute_prometheus_datasource_metric_execution,
"asset_descriptor": self.execute_prometheus_datasource_metric_asset_descriptor,
"model_types": [SourceModelType.GRAFANA_PROMETHEUS_DATASOURCE],
"result_type": PlaybookTaskResultType.API_RESPONSE,
"display_name": "Query any of your Prometheus Data Sources from Grafana",
Expand Down Expand Up @@ -95,6 +97,7 @@ def __init__(self):
},
Grafana.TaskType.QUERY_DASHBOARD_PANEL_METRIC: {
"executor": self.execute_query_dashboard_panel_metric_execution,
"asset_descriptor": self.query_dashboard_panel_metric_asset_descriptor,
"model_types": [SourceModelType.GRAFANA_DASHBOARD],
"result_type": PlaybookTaskResultType.API_RESPONSE,
"display_name": "Query any of your dashboard panels from Grafana",
Expand Down Expand Up @@ -197,6 +200,23 @@ def __init__(self):
),
],
},
Grafana.TaskType.FETCH_DASHBOARD_VARIABLES: {
"executor": self.execute_fetch_dashboard_variables,
"asset_descriptor": self.query_dashboard_panel_metric_asset_descriptor,
"model_types": [SourceModelType.GRAFANA_DASHBOARD],
"result_type": PlaybookTaskResultType.API_RESPONSE,
"display_name": "Fetch all variables and their values from a Grafana Dashboard",
"category": "Metrics",
"form_fields": [
FormField(
key_name=StringValue(value="dashboard_uid"),
display_name=StringValue(value="Dashboard UID"),
description=StringValue(value="Select Dashboard UID to fetch variables from"),
data_type=LiteralType.STRING,
form_field_type=FormFieldType.TYPING_DROPDOWN_FT,
),
],
},
}

def get_connector_processor(self, grafana_connector, **kwargs):
Expand Down Expand Up @@ -299,6 +319,63 @@ def execute_fetch_dashboard_variable_label_values(self, time_range: TimeRange, g
except Exception as e:
raise Exception(f"Error while executing Grafana fetch dashboard variable label values task: {e}") from e

def execute_fetch_dashboard_variables(self, time_range: TimeRange, grafana_task: Grafana,
grafana_connector: ConnectorProto):
try:
if not grafana_connector:
raise Exception("Task execution Failed:: No Grafana source found")

# Access the task using the correct attribute name from the proto
if hasattr(grafana_task, 'fetch_dashboard_variables'):
task = grafana_task.fetch_dashboard_variables
else:
# Fallback for proto generation issues
logger.warning("fetch_dashboard_variables attribute not found, trying alternative access")
# Try to access by index in the oneof if the attribute is not available
return PlaybookTaskResult(
type=PlaybookTaskResultType.TEXT,
text=TextResult(output=StringValue(value="Task type not properly configured in proto")),
source=self.source,
)

dashboard_uid = task.dashboard_uid.value

grafana_api_processor = self.get_connector_processor(grafana_connector)

print(
f"Playbook Task Downstream Request: Type -> Grafana FETCH_DASHBOARD_VARIABLES, Dashboard_UID -> {dashboard_uid}",
flush=True,
)

variables_data = grafana_api_processor.get_dashboard_variables(dashboard_uid)

if not variables_data or not variables_data.get('variables'):
return PlaybookTaskResult(
type=PlaybookTaskResultType.TEXT,
text=TextResult(output=StringValue(value=f"No variables found for dashboard: {dashboard_uid}")),
source=self.source,
)

# Ensure we have a proper Struct instance
if isinstance(variables_data, dict):
response_struct = Struct()
response_struct.update(variables_data)
else:
response_struct = dict_to_proto(variables_data, Struct)

output = ApiResponseResult(response_body=response_struct)

task_result = PlaybookTaskResult(source=self.source, type=PlaybookTaskResultType.API_RESPONSE,
api_response=output)
return task_result
except Exception as e:
logger.error(f"Error while executing Grafana fetch dashboard variables task: {e}")
return PlaybookTaskResult(
type=PlaybookTaskResultType.TEXT,
text=TextResult(output=StringValue(value=f"Error executing dashboard variables task: {str(e)}")),
source=self.source,
)

def execute_query_dashboard_panel_metric_execution(self, time_range: TimeRange, grafana_task: Grafana,
grafana_connector: ConnectorProto):
try:
Expand Down
6 changes: 6 additions & 0 deletions protos/playbooks/source_task_definitions/grafana_task.proto
Original file line number Diff line number Diff line change
Expand Up @@ -45,13 +45,18 @@ message Grafana {
google.protobuf.StringValue label_name = 2;
}

message FetchDashboardVariablesTask {
google.protobuf.StringValue dashboard_uid = 1;
}

enum TaskType {
UNKNOWN = 0;
PROMQL_METRIC_EXECUTION = 1;
PROMETHEUS_DATASOURCE_METRIC_EXECUTION = 2;
QUERY_DASHBOARD_PANEL_METRIC = 3;
EXECUTE_ALL_DASHBOARD_PANELS = 4;
FETCH_DASHBOARD_VARIABLE_LABEL_VALUES = 5;
FETCH_DASHBOARD_VARIABLES = 6;
}

TaskType type = 1;
Expand All @@ -61,5 +66,6 @@ message Grafana {
QueryDashboardPanelMetricTask query_dashboard_panel_metric = 5;
ExecuteAllDashboardPanelsTask execute_all_dashboard_panels = 6;
FetchDashboardVariableLabelValuesTask fetch_dashboard_variable_label_values = 7;
FetchDashboardVariablesTask fetch_dashboard_variables = 8;
}
}
Loading