diff --git a/requirements.txt b/requirements.txt index 60aa8c3..c32b06c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,3 +7,4 @@ mock>=4.0.2 importlib-resources>=3.0.0 pathos>=0.2.7 plotly==5.18.0 +beautifulsoup4>=4.12.3 diff --git a/smsdk/client.py b/smsdk/client.py index 42a9db6..bb7c57f 100644 --- a/smsdk/client.py +++ b/smsdk/client.py @@ -411,6 +411,15 @@ def get_kpi_data_viz( ] = self.get_machine_source_from_clean_name(kwargs) return kpi_entity(self.session, base_url).get_kpi_data_viz(**kwargs) + def get_widget_data(self, model, url_params): + widget_entity = smsdkentities.get("dataViz") + base_url = get_url( + self.config["protocol"], self.tenant, self.config["site.domain"] + ) + return widget_entity(self.session, base_url).get_dashboard_widget_data( + model, **url_params + ) + @version_check_decorator def get_type_from_machine(self, machine_source=None, **kwargs): machine = smsdkentities.get("machine") @@ -698,6 +707,7 @@ def create_share_link( xAxis=X_AXIS_TIME, model="cycle", time_selection=ONE_WEEK_RELATIVE, + are_line_params_available=False, *args, **kwargs, ): @@ -726,21 +736,31 @@ def create_share_link( if model == "kpi" and not isinstance(yAxis, list): yAxis = [yAxis] - - if model == "line" and isinstance(yAxis, list): - newYAxis = [] - for y in yAxis: - if y.get("machineType"): - newYAxis.append(y) - else: - y["machineType"] = self.get_type_from_machine(y["machineName"]) - newYAxis.append(y) - yAxis = newYAxis - elif model == "line": - if not yAxis.get("machineType"): - yAxis["machineType"] = self.get_type_from_machine(yAxis["machineName"]) + if not are_line_params_available: + if model == "line" and isinstance(yAxis, list): + newYAxis = [] + for y in yAxis: + if y.get("machineType"): + newYAxis.append(y) + else: + y["machineType"] = self.get_type_from_machine(y["machineName"]) + newYAxis.append(y) + yAxis = newYAxis + elif model == "line": + if not yAxis.get("machineType"): + yAxis["machineType"] = self.get_type_from_machine( + yAxis["machineName"] + ) return dataViz(self.session, base_url).create_share_link( - *args, assets, chartType, yAxis, xAxis, model, time_selection, **kwargs + asset=assets, + chartType=chartType, + yAxis=yAxis, + xAxis=xAxis, + model=model, + time_selection=time_selection, + are_line_params_available=are_line_params_available, + *args, + **kwargs, ) @version_check_decorator @@ -828,7 +848,6 @@ def get_machine_type_names(self, clean_strings_out=True): machine_types = self.get_data_v1( "machine_type_v1", "get_machine_types", True, **query_params ) - if clean_strings_out: return machine_types["source_type_clean"].unique().tolist() else: @@ -861,3 +880,52 @@ def get_raw_data( kwargs["offset"] = offset return self.get_data_v1("raw_data", "get_raw_data", True, *args, **kwargs) + + @version_check_decorator + def get_dashboard_data(self, dashboard_id=""): + base_url = get_url( + self.config["protocol"], self.tenant, self.config["site.domain"] + ) + # load the entity class and initialize it + cls = smsdkentities.get("dashboard")(self.session, base_url) + panels = getattr(cls, "get_dashboards")(dashboard_id) + return panels + + @version_check_decorator + def create_widget_share_link(self, context="/analysis/datavis", **kwargs): + dataViz = smsdkentities.get("dataViz") + base_url = get_url( + self.config["protocol"], self.tenant, self.config["site.domain"] + ) + return dataViz(self.session, base_url).create_widget_share_link( + context, **kwargs + ) + + @version_check_decorator + def fetch_list_of_udf(self): + base_url = get_url( + self.config["protocol"], self.tenant, self.config["site.domain"] + ) + # load the entity class and initialize it + cls = smsdkentities.get("dev_udf")(self.session, base_url) + udf_list = getattr(cls, "get_list_of_udf")() + return udf_list + + @version_check_decorator + def get_udf_data(self, udf_name, **params): + if udf_name: + existing_udf_list = self.fetch_list_of_udf() + if udf_name in existing_udf_list: + base_url = get_url( + self.config["protocol"], self.tenant, self.config["site.domain"] + ) + # load the entity class and initialize it + cls = smsdkentities.get("dev_udf")(self.session, base_url) + udf_data = getattr(cls, "get_udf_data")(udf_name, params) + return udf_data + else: + log.error( + f'UDF named "{udf_name}" does not exist. Please check the name again' + ) + else: + log.error("Name of user defined function is required") diff --git a/smsdk/config/api_endpoints.json b/smsdk/config/api_endpoints.json index bd3497c..67efd80 100644 --- a/smsdk/config/api_endpoints.json +++ b/smsdk/config/api_endpoints.json @@ -33,12 +33,14 @@ "estimate_cycle": "/v1/selector/datavis/estimate/cycle", "estimate_part": "/v1/selector/datavis/estimate/part", "task": "/v1/datavis/task/async", + "analytics_task": "/v1/analytics/task/async", + "line_task": "/v1/linevis/task/async", "share_link": "/v1/obj/ui_share_state" }, "KPI": { "availible_kpis_for_asset": "/v1/selector/datavis/kpi/y_axis" }, - "Assets": { + "Assets": { "url":"/v1/selector/assets" }, "Cookbook": { @@ -52,5 +54,9 @@ }, "RawData": { "url": "/v1/datatab/raw_data" + }, + "UDF_dev": { + "url": "/v1/udf/task/async", + "list_url":"/dev/udf/notebooks" } } diff --git a/smsdk/ma_session.py b/smsdk/ma_session.py index 0488ae6..67ac84b 100644 --- a/smsdk/ma_session.py +++ b/smsdk/ma_session.py @@ -242,9 +242,11 @@ def _complete_async_task( db_mode: str = "sql", **url_params: t_.Any, ) -> t_.Any: - if url_params.get("db_mode") == None: + is_analytics = url_params.get("is_analytics") + if url_params.get("db_mode") == None and not is_analytics: url_params["db_mode"] = db_mode try: + url_params.pop("is_analytics", None) response = getattr(self.session, method.lower())(endpoint, json=url_params) if response.status_code not in [200, 201]: raise ValueError("Error - {}".format(response.text)) @@ -261,8 +263,11 @@ def _complete_async_task( data = response.json() state = data["response"]["state"] if state == "SUCCESS": - return data["response"]["meta"]["results"] - + if not is_analytics: + return data["response"]["meta"]["results"] + else: + response = data["response"]["data"] + return response if state == "FAILURE" or state == "REVOKED": raise ValueError("Error - {}".format(response.text)) except Exception as e: @@ -270,6 +275,16 @@ def _complete_async_task( except Exception as e: raise e + def _get_dashboard_panels(self, endpoint: str, method: str = "get") -> t_.Any: + try: + response = getattr(self.session, method.lower())(endpoint) + if response.status_code not in [200, 201]: + raise ValueError(f"Error - {response.text}") + data = response.json().get("panels", []) + except Exception as e: + raise ValueError("Failed to fetch dashboard panels") from e + return data + def get_json_headers(self) -> CaseInsensitiveDict: """ Headers for json requests diff --git a/smsdk/smsdk_entities/dashboard/__init__.py b/smsdk/smsdk_entities/dashboard/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/smsdk/smsdk_entities/dashboard/dashboard.py b/smsdk/smsdk_entities/dashboard/dashboard.py new file mode 100644 index 0000000..ef8b68f --- /dev/null +++ b/smsdk/smsdk_entities/dashboard/dashboard.py @@ -0,0 +1,41 @@ +from typing import List, Any, Dict, Tuple +import json + +import importlib.resources as pkg_resources +from smsdk.tool_register import SmsdkEntities, smsdkentities +from smsdk.utils import module_utility +from smsdk import config +from smsdk.ma_session import MaSession +import logging + +log = logging.getLogger(__name__) + +ENDPOINTS = json.loads(pkg_resources.read_text(config, "api_endpoints.json")) + + +@smsdkentities.register("dashboard") +class DashboardData(SmsdkEntities, MaSession): + # Decorator to register a function as utility + # Only the registered utilites would be accessible + # to outside world via client.get_data() + mod_util = module_utility() + log = log + + def __init__(self, session: Any, base_url: str) -> None: + self.session = session + self.base_url = base_url + + @mod_util + def get_utilities( + self, *args: Tuple[Any, ...], **kwargs: Dict[str, Any] + ) -> List[Any]: + return [*self.mod_util.all] + + @mod_util + def get_dashboards(self, dashboard_id: str) -> List[Any]: + """ + Utility function to get the panels data for dashboard + """ + url = "{}{}{}".format(self.base_url, "/v1/obj/dashboard/", dashboard_id) + panels: List[Any] = self._get_dashboard_panels(url, method="get") + return panels diff --git a/smsdk/smsdk_entities/data_viz/data_viz.py b/smsdk/smsdk_entities/data_viz/data_viz.py index 57924fd..7715f4a 100644 --- a/smsdk/smsdk_entities/data_viz/data_viz.py +++ b/smsdk/smsdk_entities/data_viz/data_viz.py @@ -2,6 +2,7 @@ from datetime import datetime, timedelta from typing import List import uuid +from smsdk.Auth.auth import X_SM_WORKSPACE_ID import numpy as np @@ -43,7 +44,16 @@ def get_utilities(self, *args, **kwargs) -> List: @mod_util def create_share_link( - self, asset, chartType, yAxis, xAxis, model, time_selection, *args, **kwargs + self, + asset, + chartType, + yAxis, + xAxis, + model, + time_selection, + are_line_params_available=False, + *args, + **kwargs ): """ Creates a share link @@ -52,6 +62,13 @@ def create_share_link( url_params = {} url_params["state_hash"] = str(uuid.uuid4())[:8] url_params["context"] = "/analysis/datavis" + try: + url_params["in_use_workspace"] = self.session.headers[X_SM_WORKSPACE_ID] + except: + print( + "Creating dashboard over prod since we didn't get WORKSPACE_ID from headers" + ) + pass if time_selection["time_type"] == "relative": dateRange = { "mode": "relative", @@ -67,6 +84,8 @@ def create_share_link( "endDate": time_selection["end_time"], "selectedTimeZone": time_selection["time_zone"], } + else: + dateRange = {} url_params["state"] = { "dataModel": model, "asset": asset, @@ -75,7 +94,7 @@ def create_share_link( "dateRange": dateRange, } url_params["state"].update(kwargs) - if model == "line": + if model == "line" and not are_line_params_available: del url_params["state"]["asset"] url_params["state"]["lineProcess"] = {} if not isinstance(asset, List) and asset.get("assetOffsets"): @@ -138,6 +157,49 @@ def create_share_link( else: url_params["state"]["yAxis"] = yAxis response = getattr(self.session, "post")(url, json=url_params) - return "{}/#/analysis/datavis/s/{}".format( - self.base_url, response.json()["state_hash"] + return "{}/#{}/s/{}".format( + self.base_url, url_params["context"], response.json()["state_hash"] + ) + + @mod_util + def get_dashboard_widget_data(self, model, *args, **kwargs): + """ + Takes a query params from the widget in dashboard + Returns Data info for that query + """ + is_analytics = False + if model == "line": + endpoint = ENDPOINTS["DataViz"]["line_task"] + elif model == "cycle": + endpoint = ENDPOINTS["DataViz"]["task"] + kwargs["model"] = model + else: + endpoint = ENDPOINTS["DataViz"]["analytics_task"] + is_analytics = True + kwargs["is_analytics"] = is_analytics + url = "{}{}".format(self.base_url, endpoint) + records = self._complete_async_task(url, **kwargs) + + if not isinstance(records, List) and not is_analytics: + raise ValueError("Error - {}".format(records)) + + return records + + @mod_util + def create_widget_share_link(self, context, **kwargs): + url = "{}{}".format(self.base_url, ENDPOINTS["DataViz"]["share_link"]) + url_params = {} + url_params["state_hash"] = str(uuid.uuid4())[:8] + url_params["context"] = context + try: + url_params["in_use_workspace"] = self.session.headers[X_SM_WORKSPACE_ID] + except: + print( + "Creating dashboard over prod since we didn't get WORKSPACE_ID from headers" + ) + pass + url_params["state"] = kwargs + response = getattr(self.session, "post")(url, json=url_params) + return "{}/#{}/s/{}".format( + self.base_url, url_params["context"], response.json()["state_hash"] ) diff --git a/smsdk/smsdk_entities/dev_udf/__init__.py b/smsdk/smsdk_entities/dev_udf/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/smsdk/smsdk_entities/dev_udf/dev_udf.py b/smsdk/smsdk_entities/dev_udf/dev_udf.py new file mode 100644 index 0000000..215ae4c --- /dev/null +++ b/smsdk/smsdk_entities/dev_udf/dev_udf.py @@ -0,0 +1,88 @@ +import time +from typing import List, Any, Dict, Union, Tuple +from bs4 import BeautifulSoup +import json +import importlib.resources as pkg_resources +from smsdk.tool_register import SmsdkEntities, smsdkentities +from smsdk.utils import module_utility +from smsdk import config +from smsdk.ma_session import MaSession +import logging + +log = logging.getLogger(__name__) + +ENDPOINTS = json.loads(pkg_resources.read_text(config, "api_endpoints.json")) + + +@smsdkentities.register("dev_udf") +class UDFData(SmsdkEntities, MaSession): + # Decorator to register a function as utility + # Only the registered utilites would be accessible + # to outside world via client.get_data() + mod_util = module_utility() + log = log + + def __init__(self, session: Any, base_url: str) -> None: + self.session = session + self.base_url = base_url + + @mod_util + def get_utilities( + self, *args: Tuple[Any, ...], **kwargs: Dict[str, Any] + ) -> List[Any]: + return [*self.mod_util.all] + + @mod_util + def get_list_of_udf(self) -> List[Any]: + """ + Utility function to get list of UDF present in dev tool + """ + url = "{}{}".format(self.base_url, ENDPOINTS["UDF_dev"]["list_url"]) + response = self.session.get(url).json() + list_of_udfs = [udf["name"] for udf in response] + return list_of_udfs + + @mod_util + def get_udf_data(self, udf_name: str, params: Dict[str, Any]) -> List[Any]: + """ + Utility function to get the data after executing udf notebook + """ + url = "{}{}".format(self.base_url, ENDPOINTS["UDF_dev"]["url"]) + payload: Dict[str, Any] = {"name": udf_name} + + if params: + if isinstance(params, dict): + payload["parameters"] = params + else: + raise TypeError("Expected 'params' to be a dictionary or None.") + MAX_WAIT_TIME = 60 # Maximum time to wait (in seconds) + POLL_INTERVAL = 5 # Time between polling attempts (in seconds) + start_time = time.time() + async_task_id: str = "" + while time.time() - start_time < MAX_WAIT_TIME: + results = self.session.post(url, json=payload) + if results and results.status_code in [200, 201]: + async_task_id = results.json().get("response").get("task_id") + break + time.sleep(POLL_INTERVAL) + if not async_task_id: + raise TimeoutError( + f"Task {async_task_id} did not complete within {MAX_WAIT_TIME} seconds" + ) + + results = self.session.get(url + "/" + async_task_id).json() + + response = results.get("response") + if ( + not response + or "meta" not in response + or not isinstance(response["meta"], list) + ): + raise ValueError("Response does not contain a valid 'meta' structure.") + + meta = response["meta"] + if not meta or not isinstance(meta[0], dict) or "data" not in meta[0]: + raise ValueError("Meta does not contain a valid 'data' key.") + + data: List[Any] = meta[0]["data"] + return data diff --git a/smsdk/smsdk_entities/workspace/__init__.py b/smsdk/smsdk_entities/workspace/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/smsdk/smsdk_entities/workspace/workspace.py b/smsdk/smsdk_entities/workspace/workspace.py new file mode 100644 index 0000000..b1d10cb --- /dev/null +++ b/smsdk/smsdk_entities/workspace/workspace.py @@ -0,0 +1,57 @@ +from typing import List, Any, Tuple, Dict +import json +import importlib.resources as pkg_resources +from smsdk.tool_register import SmsdkEntities, smsdkentities +from smsdk.utils import module_utility +from smsdk import config +from smsdk.ma_session import MaSession + +import logging + +log = logging.getLogger(__name__) + +ENDPOINTS = json.loads(pkg_resources.read_text(config, "api_endpoints.json")) + + +@smsdkentities.register("workspace") +class Workspace(SmsdkEntities, MaSession): + # Decorator to register a function as utility + # Only the registered utilites would be accessible + # to outside world via client.get_data() + mod_util = module_utility() + + def __init__(self, session: Any, base_url: str) -> None: + self.session = session + self.base_url = base_url + + @mod_util + def get_utilities( + self, *args: Tuple[Any, ...], **kwargs: Dict[str, Any] + ) -> List[Any]: + """ + Get the list of registered utilites by name + """ + return [*self.mod_util.all] + + @mod_util + def get_cycles(self, *args: Tuple[Any, ...], **kwargs: Any) -> List[Any]: + """ + Utility function to get the cycles + from MA API + Recommend to use 'enable_pagination':True for larger datasets + """ + """ + Utility function to get the cycles + from MA API + Recommend to use 'enable_pagination':True for larger datasets + """ + url = "{}{}".format(self.base_url, ENDPOINTS["Cycle"]["alt_url"]) + + if "machine__source" not in kwargs and "machine__source__in" not in kwargs: + log.warn("Machine source not specified.") + return [] + + records = self._get_records(url, **kwargs) + if not isinstance(records, List): + raise ValueError("Error - {}".format(records)) + return records