From 1895356e25d6b4447fe7b0fa7dbd4bf3fe5c524c Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Thu, 24 Oct 2024 12:46:19 -0400 Subject: [PATCH 1/7] gen --- .gitmodules | 2 +- hatchet_sdk/clients/rest/__init__.py | 12 + hatchet_sdk/clients/rest/api/api_token_api.py | 33 +- hatchet_sdk/clients/rest/api/default_api.py | 66 +- hatchet_sdk/clients/rest/api/event_api.py | 88 +- hatchet_sdk/clients/rest/api/github_api.py | 11 +- .../clients/rest/api/healthcheck_api.py | 8 +- hatchet_sdk/clients/rest/api/log_api.py | 11 +- hatchet_sdk/clients/rest/api/metadata_api.py | 33 +- .../clients/rest/api/rate_limits_api.py | 11 +- hatchet_sdk/clients/rest/api/slack_api.py | 22 +- hatchet_sdk/clients/rest/api/sns_api.py | 33 +- hatchet_sdk/clients/rest/api/step_run_api.py | 77 +- hatchet_sdk/clients/rest/api/tenant_api.py | 176 +- hatchet_sdk/clients/rest/api/user_api.py | 90 +- hatchet_sdk/clients/rest/api/worker_api.py | 33 +- hatchet_sdk/clients/rest/api/workflow_api.py | 2183 +++++++++++------ .../clients/rest/api/workflow_run_api.py | 44 +- hatchet_sdk/clients/rest/api_client.py | 56 +- hatchet_sdk/clients/rest/configuration.py | 18 +- hatchet_sdk/clients/rest/models/__init__.py | 12 + hatchet_sdk/clients/rest/models/api_errors.py | 6 +- .../rest/models/bulk_create_event_request.py | 6 +- .../rest/models/bulk_create_event_response.py | 6 +- .../clients/rest/models/cron_workflows.py | 105 + .../rest/models/cron_workflows_list.py | 101 + .../models/cron_workflows_order_by_field.py | 36 + hatchet_sdk/clients/rest/models/event_list.py | 6 +- .../rest/models/get_step_run_diff_response.py | 6 +- hatchet_sdk/clients/rest/models/job.py | 6 +- hatchet_sdk/clients/rest/models/job_run.py | 6 +- .../rest/models/list_api_tokens_response.py | 6 +- .../models/list_pull_requests_response.py | 6 +- .../rest/models/list_slack_webhooks.py | 6 +- .../rest/models/list_sns_integrations.py | 6 +- .../clients/rest/models/log_line_list.py | 6 +- .../clients/rest/models/rate_limit_list.py | 6 +- .../models/replay_workflow_runs_response.py | 6 +- .../rest/models/scheduled_workflows.py | 106 + .../rest/models/scheduled_workflows_list.py | 101 + .../scheduled_workflows_order_by_field.py | 36 + .../rest/models/step_run_archive_list.py | 6 +- .../rest/models/step_run_event_list.py | 6 +- .../models/tenant_alert_email_group_list.py | 6 +- .../clients/rest/models/tenant_invite_list.py | 6 +- .../clients/rest/models/tenant_list.py | 6 +- .../clients/rest/models/tenant_member_list.py | 6 +- .../rest/models/tenant_queue_metrics.py | 16 - .../rest/models/tenant_resource_policy.py | 6 +- .../models/tenant_step_run_queue_metrics.py | 2 +- .../models/user_tenant_memberships_list.py | 6 +- .../models/webhook_worker_list_response.py | 6 +- .../webhook_worker_request_list_response.py | 6 +- hatchet_sdk/clients/rest/models/worker.py | 18 +- .../clients/rest/models/worker_list.py | 6 +- hatchet_sdk/clients/rest/models/workflow.py | 18 +- .../clients/rest/models/workflow_list.py | 6 +- .../clients/rest/models/workflow_run.py | 6 +- .../clients/rest/models/workflow_run_list.py | 6 +- .../clients/rest/models/workflow_run_shape.py | 6 +- .../clients/rest/models/workflow_triggers.py | 12 +- .../clients/rest/models/workflow_version.py | 6 +- hatchet_sdk/clients/rest/rest.py | 11 +- 63 files changed, 2395 insertions(+), 1366 deletions(-) create mode 100644 hatchet_sdk/clients/rest/models/cron_workflows.py create mode 100644 hatchet_sdk/clients/rest/models/cron_workflows_list.py create mode 100644 hatchet_sdk/clients/rest/models/cron_workflows_order_by_field.py create mode 100644 hatchet_sdk/clients/rest/models/scheduled_workflows.py create mode 100644 hatchet_sdk/clients/rest/models/scheduled_workflows_list.py create mode 100644 hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py diff --git a/.gitmodules b/.gitmodules index 2e2e6198..9f1a616a 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,4 +1,4 @@ [submodule "hatchet"] path = hatchet url = git@github.com:hatchet-dev/hatchet.git - branch = main + branch = feat--scheduled-improvements diff --git a/hatchet_sdk/clients/rest/__init__.py b/hatchet_sdk/clients/rest/__init__.py index 31cf4825..ab4e0b16 100644 --- a/hatchet_sdk/clients/rest/__init__.py +++ b/hatchet_sdk/clients/rest/__init__.py @@ -84,6 +84,11 @@ CreateTenantInviteRequest, ) from hatchet_sdk.clients.rest.models.create_tenant_request import CreateTenantRequest +from hatchet_sdk.clients.rest.models.cron_workflows import CronWorkflows +from hatchet_sdk.clients.rest.models.cron_workflows_list import CronWorkflowsList +from hatchet_sdk.clients.rest.models.cron_workflows_order_by_field import ( + CronWorkflowsOrderByField, +) from hatchet_sdk.clients.rest.models.event import Event from hatchet_sdk.clients.rest.models.event_data import EventData from hatchet_sdk.clients.rest.models.event_key_list import EventKeyList @@ -141,6 +146,13 @@ ReplayWorkflowRunsResponse, ) from hatchet_sdk.clients.rest.models.rerun_step_run_request import RerunStepRunRequest +from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows +from hatchet_sdk.clients.rest.models.scheduled_workflows_list import ( + ScheduledWorkflowsList, +) +from hatchet_sdk.clients.rest.models.scheduled_workflows_order_by_field import ( + ScheduledWorkflowsOrderByField, +) from hatchet_sdk.clients.rest.models.semaphore_slots import SemaphoreSlots from hatchet_sdk.clients.rest.models.slack_webhook import SlackWebhook from hatchet_sdk.clients.rest.models.sns_integration import SNSIntegration diff --git a/hatchet_sdk/clients/rest/api/api_token_api.py b/hatchet_sdk/clients/rest/api/api_token_api.py index 054ccc6b..5b32d0aa 100644 --- a/hatchet_sdk/clients/rest/api/api_token_api.py +++ b/hatchet_sdk/clients/rest/api/api_token_api.py @@ -282,9 +282,7 @@ def _api_token_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -298,10 +296,9 @@ def _api_token_create_serialize( _body_params = create_api_token_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -557,9 +554,7 @@ def _api_token_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -571,10 +566,9 @@ def _api_token_list_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -820,9 +814,7 @@ def _api_token_update_revoke_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -834,10 +826,9 @@ def _api_token_update_revoke_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] diff --git a/hatchet_sdk/clients/rest/api/default_api.py b/hatchet_sdk/clients/rest/api/default_api.py index 27b77ab7..5cd078e4 100644 --- a/hatchet_sdk/clients/rest/api/default_api.py +++ b/hatchet_sdk/clients/rest/api/default_api.py @@ -308,9 +308,7 @@ def _tenant_invite_delete_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -324,10 +322,9 @@ def _tenant_invite_delete_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -626,9 +623,7 @@ def _tenant_invite_update_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -644,10 +639,9 @@ def _tenant_invite_update_serialize( _body_params = update_tenant_invite_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -919,9 +913,7 @@ def _webhook_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -935,10 +927,9 @@ def _webhook_create_serialize( _body_params = webhook_worker_create_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -1197,9 +1188,7 @@ def _webhook_delete_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1211,10 +1200,9 @@ def _webhook_delete_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -1463,9 +1451,7 @@ def _webhook_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1477,10 +1463,9 @@ def _webhook_list_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -1729,9 +1714,7 @@ def _webhook_requests_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1743,10 +1726,9 @@ def _webhook_requests_list_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] diff --git a/hatchet_sdk/clients/rest/api/event_api.py b/hatchet_sdk/clients/rest/api/event_api.py index bb1500d6..e7544196 100644 --- a/hatchet_sdk/clients/rest/api/event_api.py +++ b/hatchet_sdk/clients/rest/api/event_api.py @@ -303,9 +303,7 @@ def _event_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -319,10 +317,9 @@ def _event_create_serialize( _body_params = create_event_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -600,9 +597,7 @@ def _event_create_bulk_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -616,10 +611,9 @@ def _event_create_bulk_serialize( _body_params = bulk_create_event_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -875,9 +869,7 @@ def _event_data_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -889,10 +881,9 @@ def _event_data_get_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -1138,9 +1129,7 @@ def _event_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1152,10 +1141,9 @@ def _event_get_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -1401,9 +1389,7 @@ def _event_key_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1415,10 +1401,9 @@ def _event_key_list_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -1878,9 +1863,7 @@ def _event_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1932,10 +1915,9 @@ def _event_list_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -2203,9 +2185,7 @@ def _event_update_cancel_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2219,10 +2199,9 @@ def _event_update_cancel_serialize( _body_params = cancel_event_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -2500,9 +2479,7 @@ def _event_update_replay_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2516,10 +2493,9 @@ def _event_update_replay_serialize( _body_params = replay_event_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: diff --git a/hatchet_sdk/clients/rest/api/github_api.py b/hatchet_sdk/clients/rest/api/github_api.py index 23c1b269..121441df 100644 --- a/hatchet_sdk/clients/rest/api/github_api.py +++ b/hatchet_sdk/clients/rest/api/github_api.py @@ -291,9 +291,7 @@ def _sns_update_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -307,10 +305,9 @@ def _sns_update_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = [] diff --git a/hatchet_sdk/clients/rest/api/healthcheck_api.py b/hatchet_sdk/clients/rest/api/healthcheck_api.py index 4b7793eb..7eb18a36 100644 --- a/hatchet_sdk/clients/rest/api/healthcheck_api.py +++ b/hatchet_sdk/clients/rest/api/healthcheck_api.py @@ -229,9 +229,7 @@ def _liveness_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -453,9 +451,7 @@ def _readiness_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/log_api.py b/hatchet_sdk/clients/rest/api/log_api.py index eaf16677..dd941af0 100644 --- a/hatchet_sdk/clients/rest/api/log_api.py +++ b/hatchet_sdk/clients/rest/api/log_api.py @@ -385,9 +385,7 @@ def _log_line_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -423,10 +421,9 @@ def _log_line_list_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] diff --git a/hatchet_sdk/clients/rest/api/metadata_api.py b/hatchet_sdk/clients/rest/api/metadata_api.py index 61659069..44248e20 100644 --- a/hatchet_sdk/clients/rest/api/metadata_api.py +++ b/hatchet_sdk/clients/rest/api/metadata_api.py @@ -232,9 +232,7 @@ def _cloud_metadata_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -244,10 +242,9 @@ def _cloud_metadata_get_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = [] @@ -462,9 +459,7 @@ def _metadata_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -474,10 +469,9 @@ def _metadata_get_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = [] @@ -692,9 +686,7 @@ def _metadata_list_integrations_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -704,10 +696,9 @@ def _metadata_list_integrations_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] diff --git a/hatchet_sdk/clients/rest/api/rate_limits_api.py b/hatchet_sdk/clients/rest/api/rate_limits_api.py index c5e7e4ee..3445856c 100644 --- a/hatchet_sdk/clients/rest/api/rate_limits_api.py +++ b/hatchet_sdk/clients/rest/api/rate_limits_api.py @@ -365,9 +365,7 @@ def _rate_limit_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -399,10 +397,9 @@ def _rate_limit_list_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] diff --git a/hatchet_sdk/clients/rest/api/slack_api.py b/hatchet_sdk/clients/rest/api/slack_api.py index 9b0e637d..6a0a0e43 100644 --- a/hatchet_sdk/clients/rest/api/slack_api.py +++ b/hatchet_sdk/clients/rest/api/slack_api.py @@ -273,9 +273,7 @@ def _slack_webhook_delete_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -287,10 +285,9 @@ def _slack_webhook_delete_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -539,9 +536,7 @@ def _slack_webhook_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -553,10 +548,9 @@ def _slack_webhook_list_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] diff --git a/hatchet_sdk/clients/rest/api/sns_api.py b/hatchet_sdk/clients/rest/api/sns_api.py index bb020ceb..f3214c03 100644 --- a/hatchet_sdk/clients/rest/api/sns_api.py +++ b/hatchet_sdk/clients/rest/api/sns_api.py @@ -281,9 +281,7 @@ def _sns_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -297,10 +295,9 @@ def _sns_create_serialize( _body_params = create_sns_integration_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -568,9 +565,7 @@ def _sns_delete_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -582,10 +577,9 @@ def _sns_delete_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -834,9 +828,7 @@ def _sns_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -848,10 +840,9 @@ def _sns_list_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] diff --git a/hatchet_sdk/clients/rest/api/step_run_api.py b/hatchet_sdk/clients/rest/api/step_run_api.py index 851ed174..5c60f199 100644 --- a/hatchet_sdk/clients/rest/api/step_run_api.py +++ b/hatchet_sdk/clients/rest/api/step_run_api.py @@ -295,9 +295,7 @@ def _step_run_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -311,10 +309,9 @@ def _step_run_get_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -591,9 +588,7 @@ def _step_run_get_schema_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -607,10 +602,9 @@ def _step_run_get_schema_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -897,9 +891,7 @@ def _step_run_list_archives_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -919,10 +911,9 @@ def _step_run_list_archives_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -1209,9 +1200,7 @@ def _step_run_list_events_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1231,10 +1220,9 @@ def _step_run_list_events_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -1508,9 +1496,7 @@ def _step_run_update_cancel_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1524,10 +1510,9 @@ def _step_run_update_cancel_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -1820,9 +1805,7 @@ def _step_run_update_rerun_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1838,10 +1821,9 @@ def _step_run_update_rerun_serialize( _body_params = rerun_step_run_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -2156,9 +2138,7 @@ def _workflow_run_list_step_run_events_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2176,10 +2156,9 @@ def _workflow_run_list_step_run_events_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] diff --git a/hatchet_sdk/clients/rest/api/tenant_api.py b/hatchet_sdk/clients/rest/api/tenant_api.py index cd5e4f07..16fe9310 100644 --- a/hatchet_sdk/clients/rest/api/tenant_api.py +++ b/hatchet_sdk/clients/rest/api/tenant_api.py @@ -313,9 +313,7 @@ def _alert_email_group_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -329,10 +327,9 @@ def _alert_email_group_create_serialize( _body_params = create_tenant_alert_email_group_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -597,9 +594,7 @@ def _alert_email_group_delete_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -611,10 +606,9 @@ def _alert_email_group_delete_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -860,9 +854,7 @@ def _alert_email_group_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -874,10 +866,9 @@ def _alert_email_group_list_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -1154,9 +1145,7 @@ def _alert_email_group_update_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1170,10 +1159,9 @@ def _alert_email_group_update_serialize( _body_params = update_tenant_alert_email_group_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -1429,9 +1417,7 @@ def _tenant_alerting_settings_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1443,10 +1429,9 @@ def _tenant_alerting_settings_get_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -1683,9 +1668,7 @@ def _tenant_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1697,10 +1680,9 @@ def _tenant_create_serialize( _body_params = create_tenant_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -1959,9 +1941,7 @@ def _tenant_get_step_run_queue_metrics_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1973,10 +1953,9 @@ def _tenant_get_step_run_queue_metrics_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -2207,9 +2186,7 @@ def _tenant_invite_accept_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2221,10 +2198,9 @@ def _tenant_invite_accept_serialize( _body_params = accept_invite_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -2499,9 +2475,7 @@ def _tenant_invite_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2515,10 +2489,9 @@ def _tenant_invite_create_serialize( _body_params = create_tenant_invite_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -2774,9 +2747,7 @@ def _tenant_invite_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2788,10 +2759,9 @@ def _tenant_invite_list_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -3022,9 +2992,7 @@ def _tenant_invite_reject_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -3036,10 +3004,9 @@ def _tenant_invite_reject_serialize( _body_params = reject_invite_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -3335,9 +3302,7 @@ def _tenant_member_delete_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -3351,10 +3316,9 @@ def _tenant_member_delete_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -3600,9 +3564,7 @@ def _tenant_member_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -3614,10 +3576,9 @@ def _tenant_member_list_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -3863,9 +3824,7 @@ def _tenant_resource_policy_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -3877,10 +3836,9 @@ def _tenant_resource_policy_get_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -4145,9 +4103,7 @@ def _tenant_update_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -4161,10 +4117,9 @@ def _tenant_update_serialize( _body_params = update_tenant_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -4392,9 +4347,7 @@ def _user_list_tenant_invites_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -4404,10 +4357,9 @@ def _user_list_tenant_invites_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth"] diff --git a/hatchet_sdk/clients/rest/api/user_api.py b/hatchet_sdk/clients/rest/api/user_api.py index a9e7a35f..a0617fd3 100644 --- a/hatchet_sdk/clients/rest/api/user_api.py +++ b/hatchet_sdk/clients/rest/api/user_api.py @@ -241,9 +241,7 @@ def _tenant_memberships_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -253,10 +251,9 @@ def _tenant_memberships_list_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth"] @@ -490,9 +487,7 @@ def _user_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -504,10 +499,9 @@ def _user_create_serialize( _body_params = user_register_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -738,9 +732,7 @@ def _user_get_current_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -750,10 +742,9 @@ def _user_get_current_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth"] @@ -965,9 +956,7 @@ def _user_update_github_oauth_callback_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1186,9 +1175,7 @@ def _user_update_github_oauth_start_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1407,9 +1394,7 @@ def _user_update_google_oauth_callback_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1628,9 +1613,7 @@ def _user_update_google_oauth_start_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1871,9 +1854,7 @@ def _user_update_login_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1885,10 +1866,9 @@ def _user_update_login_serialize( _body_params = user_login_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -2119,9 +2099,7 @@ def _user_update_logout_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2131,10 +2109,9 @@ def _user_update_logout_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth"] @@ -2368,9 +2345,7 @@ def _user_update_password_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2382,10 +2357,9 @@ def _user_update_password_serialize( _body_params = user_change_password_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -2607,9 +2581,7 @@ def _user_update_slack_oauth_callback_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2856,9 +2828,7 @@ def _user_update_slack_oauth_start_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/worker_api.py b/hatchet_sdk/clients/rest/api/worker_api.py index f1be3e82..e50f80b9 100644 --- a/hatchet_sdk/clients/rest/api/worker_api.py +++ b/hatchet_sdk/clients/rest/api/worker_api.py @@ -263,9 +263,7 @@ def _worker_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -277,10 +275,9 @@ def _worker_get_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -526,9 +523,7 @@ def _worker_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -540,10 +535,9 @@ def _worker_list_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -808,9 +802,7 @@ def _worker_update_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -824,10 +816,9 @@ def _worker_update_serialize( _body_params = update_worker_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: diff --git a/hatchet_sdk/clients/rest/api/workflow_api.py b/hatchet_sdk/clients/rest/api/workflow_api.py index 87532339..7b110f6d 100644 --- a/hatchet_sdk/clients/rest/api/workflow_api.py +++ b/hatchet_sdk/clients/rest/api/workflow_api.py @@ -20,6 +20,16 @@ from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized from hatchet_sdk.clients.rest.api_response import ApiResponse +from hatchet_sdk.clients.rest.models.cron_workflows_list import CronWorkflowsList +from hatchet_sdk.clients.rest.models.cron_workflows_order_by_field import ( + CronWorkflowsOrderByField, +) +from hatchet_sdk.clients.rest.models.scheduled_workflows_list import ( + ScheduledWorkflowsList, +) +from hatchet_sdk.clients.rest.models.scheduled_workflows_order_by_field import ( + ScheduledWorkflowsOrderByField, +) from hatchet_sdk.clients.rest.models.tenant_queue_metrics import TenantQueueMetrics from hatchet_sdk.clients.rest.models.workflow import Workflow from hatchet_sdk.clients.rest.models.workflow_kind import WorkflowKind @@ -57,7 +67,7 @@ def __init__(self, api_client=None) -> None: self.api_client = api_client @validate_call - async def tenant_get_queue_metrics( + async def cron_workflow_list( self, tenant: Annotated[ str, @@ -65,14 +75,27 @@ async def tenant_get_queue_metrics( min_length=36, strict=True, max_length=36, description="The tenant id" ), ], - workflows: Annotated[ - Optional[List[StrictStr]], - Field(description="A list of workflow IDs to filter by"), + offset: Annotated[ + Optional[StrictInt], Field(description="The number to skip") + ] = None, + limit: Annotated[ + Optional[StrictInt], Field(description="The number to limit by") + ] = None, + workflow_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The workflow id to get runs for."), ] = None, additional_metadata: Annotated[ Optional[List[StrictStr]], Field(description="A list of metadata key value pairs to filter by"), ] = None, + order_by_field: Annotated[ + Optional[CronWorkflowsOrderByField], Field(description="The order by field") + ] = None, + order_by_direction: Annotated[ + Optional[WorkflowRunOrderByDirection], + Field(description="The order by direction"), + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -84,17 +107,25 @@ async def tenant_get_queue_metrics( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> TenantQueueMetrics: - """Get workflow metrics + ) -> CronWorkflowsList: + """Get workflow runs - Get the queue metrics for the tenant + Get all cron job workflow runs for a tenant :param tenant: The tenant id (required) :type tenant: str - :param workflows: A list of workflow IDs to filter by - :type workflows: List[str] + :param offset: The number to skip + :type offset: int + :param limit: The number to limit by + :type limit: int + :param workflow_id: The workflow id to get runs for. + :type workflow_id: str :param additional_metadata: A list of metadata key value pairs to filter by :type additional_metadata: List[str] + :param order_by_field: The order by field + :type order_by_field: CronWorkflowsOrderByField + :param order_by_direction: The order by direction + :type order_by_direction: WorkflowRunOrderByDirection :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -117,10 +148,14 @@ async def tenant_get_queue_metrics( :return: Returns the result object. """ # noqa: E501 - _param = self._tenant_get_queue_metrics_serialize( + _param = self._cron_workflow_list_serialize( tenant=tenant, - workflows=workflows, + offset=offset, + limit=limit, + workflow_id=workflow_id, additional_metadata=additional_metadata, + order_by_field=order_by_field, + order_by_direction=order_by_direction, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -128,10 +163,9 @@ async def tenant_get_queue_metrics( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "TenantQueueMetrics", + "200": "CronWorkflowsList", "400": "APIErrors", "403": "APIErrors", - "404": "APIErrors", } response_data = await self.api_client.call_api( *_param, _request_timeout=_request_timeout @@ -143,7 +177,7 @@ async def tenant_get_queue_metrics( ).data @validate_call - async def tenant_get_queue_metrics_with_http_info( + async def cron_workflow_list_with_http_info( self, tenant: Annotated[ str, @@ -151,14 +185,27 @@ async def tenant_get_queue_metrics_with_http_info( min_length=36, strict=True, max_length=36, description="The tenant id" ), ], - workflows: Annotated[ - Optional[List[StrictStr]], - Field(description="A list of workflow IDs to filter by"), + offset: Annotated[ + Optional[StrictInt], Field(description="The number to skip") + ] = None, + limit: Annotated[ + Optional[StrictInt], Field(description="The number to limit by") + ] = None, + workflow_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The workflow id to get runs for."), ] = None, additional_metadata: Annotated[ Optional[List[StrictStr]], Field(description="A list of metadata key value pairs to filter by"), ] = None, + order_by_field: Annotated[ + Optional[CronWorkflowsOrderByField], Field(description="The order by field") + ] = None, + order_by_direction: Annotated[ + Optional[WorkflowRunOrderByDirection], + Field(description="The order by direction"), + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -170,17 +217,25 @@ async def tenant_get_queue_metrics_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[TenantQueueMetrics]: - """Get workflow metrics + ) -> ApiResponse[CronWorkflowsList]: + """Get workflow runs - Get the queue metrics for the tenant + Get all cron job workflow runs for a tenant :param tenant: The tenant id (required) :type tenant: str - :param workflows: A list of workflow IDs to filter by - :type workflows: List[str] + :param offset: The number to skip + :type offset: int + :param limit: The number to limit by + :type limit: int + :param workflow_id: The workflow id to get runs for. + :type workflow_id: str :param additional_metadata: A list of metadata key value pairs to filter by :type additional_metadata: List[str] + :param order_by_field: The order by field + :type order_by_field: CronWorkflowsOrderByField + :param order_by_direction: The order by direction + :type order_by_direction: WorkflowRunOrderByDirection :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -203,10 +258,14 @@ async def tenant_get_queue_metrics_with_http_info( :return: Returns the result object. """ # noqa: E501 - _param = self._tenant_get_queue_metrics_serialize( + _param = self._cron_workflow_list_serialize( tenant=tenant, - workflows=workflows, + offset=offset, + limit=limit, + workflow_id=workflow_id, additional_metadata=additional_metadata, + order_by_field=order_by_field, + order_by_direction=order_by_direction, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -214,10 +273,9 @@ async def tenant_get_queue_metrics_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "TenantQueueMetrics", + "200": "CronWorkflowsList", "400": "APIErrors", "403": "APIErrors", - "404": "APIErrors", } response_data = await self.api_client.call_api( *_param, _request_timeout=_request_timeout @@ -229,7 +287,7 @@ async def tenant_get_queue_metrics_with_http_info( ) @validate_call - async def tenant_get_queue_metrics_without_preload_content( + async def cron_workflow_list_without_preload_content( self, tenant: Annotated[ str, @@ -237,14 +295,27 @@ async def tenant_get_queue_metrics_without_preload_content( min_length=36, strict=True, max_length=36, description="The tenant id" ), ], - workflows: Annotated[ - Optional[List[StrictStr]], - Field(description="A list of workflow IDs to filter by"), + offset: Annotated[ + Optional[StrictInt], Field(description="The number to skip") + ] = None, + limit: Annotated[ + Optional[StrictInt], Field(description="The number to limit by") + ] = None, + workflow_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The workflow id to get runs for."), ] = None, additional_metadata: Annotated[ Optional[List[StrictStr]], Field(description="A list of metadata key value pairs to filter by"), ] = None, + order_by_field: Annotated[ + Optional[CronWorkflowsOrderByField], Field(description="The order by field") + ] = None, + order_by_direction: Annotated[ + Optional[WorkflowRunOrderByDirection], + Field(description="The order by direction"), + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -257,16 +328,24 @@ async def tenant_get_queue_metrics_without_preload_content( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> RESTResponseType: - """Get workflow metrics + """Get workflow runs - Get the queue metrics for the tenant + Get all cron job workflow runs for a tenant :param tenant: The tenant id (required) :type tenant: str - :param workflows: A list of workflow IDs to filter by - :type workflows: List[str] + :param offset: The number to skip + :type offset: int + :param limit: The number to limit by + :type limit: int + :param workflow_id: The workflow id to get runs for. + :type workflow_id: str :param additional_metadata: A list of metadata key value pairs to filter by :type additional_metadata: List[str] + :param order_by_field: The order by field + :type order_by_field: CronWorkflowsOrderByField + :param order_by_direction: The order by direction + :type order_by_direction: WorkflowRunOrderByDirection :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -289,10 +368,14 @@ async def tenant_get_queue_metrics_without_preload_content( :return: Returns the result object. """ # noqa: E501 - _param = self._tenant_get_queue_metrics_serialize( + _param = self._cron_workflow_list_serialize( tenant=tenant, - workflows=workflows, + offset=offset, + limit=limit, + workflow_id=workflow_id, additional_metadata=additional_metadata, + order_by_field=order_by_field, + order_by_direction=order_by_direction, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -300,21 +383,24 @@ async def tenant_get_queue_metrics_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "TenantQueueMetrics", + "200": "CronWorkflowsList", "400": "APIErrors", "403": "APIErrors", - "404": "APIErrors", } response_data = await self.api_client.call_api( *_param, _request_timeout=_request_timeout ) return response_data.response - def _tenant_get_queue_metrics_serialize( + def _cron_workflow_list_serialize( self, tenant, - workflows, + offset, + limit, + workflow_id, additional_metadata, + order_by_field, + order_by_direction, _request_auth, _content_type, _headers, @@ -324,7 +410,6 @@ def _tenant_get_queue_metrics_serialize( _host = None _collection_formats: Dict[str, str] = { - "workflows": "multi", "additionalMetadata": "multi", } @@ -332,39 +417,52 @@ def _tenant_get_queue_metrics_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters if tenant is not None: _path_params["tenant"] = tenant # process the query parameters - if workflows is not None: + if offset is not None: - _query_params.append(("workflows", workflows)) + _query_params.append(("offset", offset)) + + if limit is not None: + + _query_params.append(("limit", limit)) + + if workflow_id is not None: + + _query_params.append(("workflowId", workflow_id)) if additional_metadata is not None: _query_params.append(("additionalMetadata", additional_metadata)) + if order_by_field is not None: + + _query_params.append(("orderByField", order_by_field.value)) + + if order_by_direction is not None: + + _query_params.append(("orderByDirection", order_by_direction.value)) + # process the header parameters # process the form parameters # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] return self.api_client.param_serialize( method="GET", - resource_path="/api/v1/tenants/{tenant}/queue-metrics", + resource_path="/api/v1/tenants/{tenant}/workflows/crons", path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -378,14 +476,22 @@ def _tenant_get_queue_metrics_serialize( ) @validate_call - async def workflow_delete( + async def tenant_get_queue_metrics( self, - workflow: Annotated[ + tenant: Annotated[ str, Field( - min_length=36, strict=True, max_length=36, description="The workflow id" + min_length=36, strict=True, max_length=36, description="The tenant id" ), ], + workflows: Annotated[ + Optional[List[StrictStr]], + Field(description="A list of workflow IDs to filter by"), + ] = None, + additional_metadata: Annotated[ + Optional[List[StrictStr]], + Field(description="A list of metadata key value pairs to filter by"), + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -397,13 +503,17 @@ async def workflow_delete( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> None: - """Delete workflow + ) -> TenantQueueMetrics: + """Get workflow metrics - Delete a workflow for a tenant + Get the queue metrics for the tenant - :param workflow: The workflow id (required) - :type workflow: str + :param tenant: The tenant id (required) + :type tenant: str + :param workflows: A list of workflow IDs to filter by + :type workflows: List[str] + :param additional_metadata: A list of metadata key value pairs to filter by + :type additional_metadata: List[str] :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -426,8 +536,10 @@ async def workflow_delete( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_delete_serialize( - workflow=workflow, + _param = self._tenant_get_queue_metrics_serialize( + tenant=tenant, + workflows=workflows, + additional_metadata=additional_metadata, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -435,7 +547,7 @@ async def workflow_delete( ) _response_types_map: Dict[str, Optional[str]] = { - "204": None, + "200": "TenantQueueMetrics", "400": "APIErrors", "403": "APIErrors", "404": "APIErrors", @@ -450,14 +562,22 @@ async def workflow_delete( ).data @validate_call - async def workflow_delete_with_http_info( + async def tenant_get_queue_metrics_with_http_info( self, - workflow: Annotated[ + tenant: Annotated[ str, Field( - min_length=36, strict=True, max_length=36, description="The workflow id" + min_length=36, strict=True, max_length=36, description="The tenant id" ), ], + workflows: Annotated[ + Optional[List[StrictStr]], + Field(description="A list of workflow IDs to filter by"), + ] = None, + additional_metadata: Annotated[ + Optional[List[StrictStr]], + Field(description="A list of metadata key value pairs to filter by"), + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -469,13 +589,17 @@ async def workflow_delete_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[None]: - """Delete workflow + ) -> ApiResponse[TenantQueueMetrics]: + """Get workflow metrics - Delete a workflow for a tenant + Get the queue metrics for the tenant - :param workflow: The workflow id (required) - :type workflow: str + :param tenant: The tenant id (required) + :type tenant: str + :param workflows: A list of workflow IDs to filter by + :type workflows: List[str] + :param additional_metadata: A list of metadata key value pairs to filter by + :type additional_metadata: List[str] :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -498,8 +622,10 @@ async def workflow_delete_with_http_info( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_delete_serialize( - workflow=workflow, + _param = self._tenant_get_queue_metrics_serialize( + tenant=tenant, + workflows=workflows, + additional_metadata=additional_metadata, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -507,7 +633,7 @@ async def workflow_delete_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { - "204": None, + "200": "TenantQueueMetrics", "400": "APIErrors", "403": "APIErrors", "404": "APIErrors", @@ -522,14 +648,22 @@ async def workflow_delete_with_http_info( ) @validate_call - async def workflow_delete_without_preload_content( + async def tenant_get_queue_metrics_without_preload_content( self, - workflow: Annotated[ + tenant: Annotated[ str, Field( - min_length=36, strict=True, max_length=36, description="The workflow id" + min_length=36, strict=True, max_length=36, description="The tenant id" ), ], + workflows: Annotated[ + Optional[List[StrictStr]], + Field(description="A list of workflow IDs to filter by"), + ] = None, + additional_metadata: Annotated[ + Optional[List[StrictStr]], + Field(description="A list of metadata key value pairs to filter by"), + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -542,13 +676,17 @@ async def workflow_delete_without_preload_content( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> RESTResponseType: - """Delete workflow + """Get workflow metrics - Delete a workflow for a tenant + Get the queue metrics for the tenant - :param workflow: The workflow id (required) - :type workflow: str - :param _request_timeout: timeout setting for this request. If one + :param tenant: The tenant id (required) + :type tenant: str + :param workflows: A list of workflow IDs to filter by + :type workflows: List[str] + :param additional_metadata: A list of metadata key value pairs to filter by + :type additional_metadata: List[str] + :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. @@ -570,8 +708,10 @@ async def workflow_delete_without_preload_content( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_delete_serialize( - workflow=workflow, + _param = self._tenant_get_queue_metrics_serialize( + tenant=tenant, + workflows=workflows, + additional_metadata=additional_metadata, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -579,7 +719,7 @@ async def workflow_delete_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { - "204": None, + "200": "TenantQueueMetrics", "400": "APIErrors", "403": "APIErrors", "404": "APIErrors", @@ -589,9 +729,11 @@ async def workflow_delete_without_preload_content( ) return response_data.response - def _workflow_delete_serialize( + def _tenant_get_queue_metrics_serialize( self, - workflow, + tenant, + workflows, + additional_metadata, _request_auth, _content_type, _headers, @@ -600,37 +742,45 @@ def _workflow_delete_serialize( _host = None - _collection_formats: Dict[str, str] = {} + _collection_formats: Dict[str, str] = { + "workflows": "multi", + "additionalMetadata": "multi", + } _path_params: Dict[str, str] = {} _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters - if workflow is not None: - _path_params["workflow"] = workflow + if tenant is not None: + _path_params["tenant"] = tenant # process the query parameters + if workflows is not None: + + _query_params.append(("workflows", workflows)) + + if additional_metadata is not None: + + _query_params.append(("additionalMetadata", additional_metadata)) + # process the header parameters # process the form parameters # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] return self.api_client.param_serialize( - method="DELETE", - resource_path="/api/v1/workflows/{workflow}", + method="GET", + resource_path="/api/v1/tenants/{tenant}/queue-metrics", path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -644,7 +794,7 @@ def _workflow_delete_serialize( ) @validate_call - async def workflow_get( + async def workflow_delete( self, workflow: Annotated[ str, @@ -663,10 +813,10 @@ async def workflow_get( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> Workflow: - """Get workflow + ) -> None: + """Delete workflow - Get a workflow for a tenant + Delete a workflow for a tenant :param workflow: The workflow id (required) :type workflow: str @@ -692,7 +842,7 @@ async def workflow_get( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_get_serialize( + _param = self._workflow_delete_serialize( workflow=workflow, _request_auth=_request_auth, _content_type=_content_type, @@ -701,7 +851,7 @@ async def workflow_get( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "Workflow", + "204": None, "400": "APIErrors", "403": "APIErrors", "404": "APIErrors", @@ -716,7 +866,7 @@ async def workflow_get( ).data @validate_call - async def workflow_get_with_http_info( + async def workflow_delete_with_http_info( self, workflow: Annotated[ str, @@ -735,10 +885,10 @@ async def workflow_get_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[Workflow]: - """Get workflow + ) -> ApiResponse[None]: + """Delete workflow - Get a workflow for a tenant + Delete a workflow for a tenant :param workflow: The workflow id (required) :type workflow: str @@ -764,7 +914,7 @@ async def workflow_get_with_http_info( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_get_serialize( + _param = self._workflow_delete_serialize( workflow=workflow, _request_auth=_request_auth, _content_type=_content_type, @@ -773,7 +923,7 @@ async def workflow_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "Workflow", + "204": None, "400": "APIErrors", "403": "APIErrors", "404": "APIErrors", @@ -788,7 +938,7 @@ async def workflow_get_with_http_info( ) @validate_call - async def workflow_get_without_preload_content( + async def workflow_delete_without_preload_content( self, workflow: Annotated[ str, @@ -808,9 +958,9 @@ async def workflow_get_without_preload_content( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> RESTResponseType: - """Get workflow + """Delete workflow - Get a workflow for a tenant + Delete a workflow for a tenant :param workflow: The workflow id (required) :type workflow: str @@ -836,7 +986,7 @@ async def workflow_get_without_preload_content( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_get_serialize( + _param = self._workflow_delete_serialize( workflow=workflow, _request_auth=_request_auth, _content_type=_content_type, @@ -845,7 +995,7 @@ async def workflow_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "Workflow", + "204": None, "400": "APIErrors", "403": "APIErrors", "404": "APIErrors", @@ -855,7 +1005,7 @@ async def workflow_get_without_preload_content( ) return response_data.response - def _workflow_get_serialize( + def _workflow_delete_serialize( self, workflow, _request_auth, @@ -872,9 +1022,7 @@ def _workflow_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -886,16 +1034,15 @@ def _workflow_get_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] return self.api_client.param_serialize( - method="GET", + method="DELETE", resource_path="/api/v1/workflows/{workflow}", path_params=_path_params, query_params=_query_params, @@ -910,7 +1057,7 @@ def _workflow_get_serialize( ) @validate_call - async def workflow_get_metrics( + async def workflow_get( self, workflow: Annotated[ str, @@ -918,13 +1065,6 @@ async def workflow_get_metrics( min_length=36, strict=True, max_length=36, description="The workflow id" ), ], - status: Annotated[ - Optional[WorkflowRunStatus], - Field(description="A status of workflow run statuses to filter by"), - ] = None, - group_key: Annotated[ - Optional[StrictStr], Field(description="A group key to filter metrics by") - ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -936,17 +1076,13 @@ async def workflow_get_metrics( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> WorkflowMetrics: - """Get workflow metrics + ) -> Workflow: + """Get workflow - Get the metrics for a workflow version + Get a workflow for a tenant :param workflow: The workflow id (required) :type workflow: str - :param status: A status of workflow run statuses to filter by - :type status: WorkflowRunStatus - :param group_key: A group key to filter metrics by - :type group_key: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -969,10 +1105,8 @@ async def workflow_get_metrics( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_get_metrics_serialize( + _param = self._workflow_get_serialize( workflow=workflow, - status=status, - group_key=group_key, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -980,7 +1114,7 @@ async def workflow_get_metrics( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowMetrics", + "200": "Workflow", "400": "APIErrors", "403": "APIErrors", "404": "APIErrors", @@ -995,7 +1129,7 @@ async def workflow_get_metrics( ).data @validate_call - async def workflow_get_metrics_with_http_info( + async def workflow_get_with_http_info( self, workflow: Annotated[ str, @@ -1003,13 +1137,6 @@ async def workflow_get_metrics_with_http_info( min_length=36, strict=True, max_length=36, description="The workflow id" ), ], - status: Annotated[ - Optional[WorkflowRunStatus], - Field(description="A status of workflow run statuses to filter by"), - ] = None, - group_key: Annotated[ - Optional[StrictStr], Field(description="A group key to filter metrics by") - ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1021,17 +1148,13 @@ async def workflow_get_metrics_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[WorkflowMetrics]: - """Get workflow metrics + ) -> ApiResponse[Workflow]: + """Get workflow - Get the metrics for a workflow version + Get a workflow for a tenant :param workflow: The workflow id (required) :type workflow: str - :param status: A status of workflow run statuses to filter by - :type status: WorkflowRunStatus - :param group_key: A group key to filter metrics by - :type group_key: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1054,10 +1177,8 @@ async def workflow_get_metrics_with_http_info( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_get_metrics_serialize( + _param = self._workflow_get_serialize( workflow=workflow, - status=status, - group_key=group_key, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1065,7 +1186,7 @@ async def workflow_get_metrics_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowMetrics", + "200": "Workflow", "400": "APIErrors", "403": "APIErrors", "404": "APIErrors", @@ -1080,7 +1201,7 @@ async def workflow_get_metrics_with_http_info( ) @validate_call - async def workflow_get_metrics_without_preload_content( + async def workflow_get_without_preload_content( self, workflow: Annotated[ str, @@ -1088,13 +1209,6 @@ async def workflow_get_metrics_without_preload_content( min_length=36, strict=True, max_length=36, description="The workflow id" ), ], - status: Annotated[ - Optional[WorkflowRunStatus], - Field(description="A status of workflow run statuses to filter by"), - ] = None, - group_key: Annotated[ - Optional[StrictStr], Field(description="A group key to filter metrics by") - ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1107,16 +1221,12 @@ async def workflow_get_metrics_without_preload_content( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> RESTResponseType: - """Get workflow metrics + """Get workflow - Get the metrics for a workflow version + Get a workflow for a tenant :param workflow: The workflow id (required) :type workflow: str - :param status: A status of workflow run statuses to filter by - :type status: WorkflowRunStatus - :param group_key: A group key to filter metrics by - :type group_key: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1139,10 +1249,8 @@ async def workflow_get_metrics_without_preload_content( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_get_metrics_serialize( + _param = self._workflow_get_serialize( workflow=workflow, - status=status, - group_key=group_key, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1150,7 +1258,7 @@ async def workflow_get_metrics_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowMetrics", + "200": "Workflow", "400": "APIErrors", "403": "APIErrors", "404": "APIErrors", @@ -1160,11 +1268,9 @@ async def workflow_get_metrics_without_preload_content( ) return response_data.response - def _workflow_get_metrics_serialize( + def _workflow_get_serialize( self, workflow, - status, - group_key, _request_auth, _content_type, _headers, @@ -1179,39 +1285,28 @@ def _workflow_get_metrics_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters if workflow is not None: _path_params["workflow"] = workflow # process the query parameters - if status is not None: - - _query_params.append(("status", status.value)) - - if group_key is not None: - - _query_params.append(("groupKey", group_key)) - # process the header parameters # process the form parameters # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] return self.api_client.param_serialize( method="GET", - resource_path="/api/v1/workflows/{workflow}/metrics", + resource_path="/api/v1/workflows/{workflow}", path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -1225,20 +1320,21 @@ def _workflow_get_metrics_serialize( ) @validate_call - async def workflow_get_workers_count( + async def workflow_get_metrics( self, - tenant: Annotated[ - str, - Field( - min_length=36, strict=True, max_length=36, description="The tenant id" - ), - ], workflow: Annotated[ str, Field( min_length=36, strict=True, max_length=36, description="The workflow id" ), ], + status: Annotated[ + Optional[WorkflowRunStatus], + Field(description="A status of workflow run statuses to filter by"), + ] = None, + group_key: Annotated[ + Optional[StrictStr], Field(description="A group key to filter metrics by") + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1250,15 +1346,17 @@ async def workflow_get_workers_count( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> WorkflowWorkersCount: - """Get workflow worker count + ) -> WorkflowMetrics: + """Get workflow metrics - Get a count of the workers available for workflow + Get the metrics for a workflow version - :param tenant: The tenant id (required) - :type tenant: str :param workflow: The workflow id (required) :type workflow: str + :param status: A status of workflow run statuses to filter by + :type status: WorkflowRunStatus + :param group_key: A group key to filter metrics by + :type group_key: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1281,9 +1379,10 @@ async def workflow_get_workers_count( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_get_workers_count_serialize( - tenant=tenant, + _param = self._workflow_get_metrics_serialize( workflow=workflow, + status=status, + group_key=group_key, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1291,9 +1390,10 @@ async def workflow_get_workers_count( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowWorkersCount", + "200": "WorkflowMetrics", "400": "APIErrors", "403": "APIErrors", + "404": "APIErrors", } response_data = await self.api_client.call_api( *_param, _request_timeout=_request_timeout @@ -1305,20 +1405,21 @@ async def workflow_get_workers_count( ).data @validate_call - async def workflow_get_workers_count_with_http_info( + async def workflow_get_metrics_with_http_info( self, - tenant: Annotated[ - str, - Field( - min_length=36, strict=True, max_length=36, description="The tenant id" - ), - ], workflow: Annotated[ str, Field( min_length=36, strict=True, max_length=36, description="The workflow id" ), ], + status: Annotated[ + Optional[WorkflowRunStatus], + Field(description="A status of workflow run statuses to filter by"), + ] = None, + group_key: Annotated[ + Optional[StrictStr], Field(description="A group key to filter metrics by") + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1330,15 +1431,17 @@ async def workflow_get_workers_count_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[WorkflowWorkersCount]: - """Get workflow worker count + ) -> ApiResponse[WorkflowMetrics]: + """Get workflow metrics - Get a count of the workers available for workflow + Get the metrics for a workflow version - :param tenant: The tenant id (required) - :type tenant: str :param workflow: The workflow id (required) :type workflow: str + :param status: A status of workflow run statuses to filter by + :type status: WorkflowRunStatus + :param group_key: A group key to filter metrics by + :type group_key: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1361,9 +1464,10 @@ async def workflow_get_workers_count_with_http_info( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_get_workers_count_serialize( - tenant=tenant, + _param = self._workflow_get_metrics_serialize( workflow=workflow, + status=status, + group_key=group_key, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1371,9 +1475,10 @@ async def workflow_get_workers_count_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowWorkersCount", + "200": "WorkflowMetrics", "400": "APIErrors", "403": "APIErrors", + "404": "APIErrors", } response_data = await self.api_client.call_api( *_param, _request_timeout=_request_timeout @@ -1385,22 +1490,23 @@ async def workflow_get_workers_count_with_http_info( ) @validate_call - async def workflow_get_workers_count_without_preload_content( + async def workflow_get_metrics_without_preload_content( self, - tenant: Annotated[ - str, - Field( - min_length=36, strict=True, max_length=36, description="The tenant id" - ), - ], workflow: Annotated[ str, Field( min_length=36, strict=True, max_length=36, description="The workflow id" ), ], - _request_timeout: Union[ - None, + status: Annotated[ + Optional[WorkflowRunStatus], + Field(description="A status of workflow run statuses to filter by"), + ] = None, + group_key: Annotated[ + Optional[StrictStr], Field(description="A group key to filter metrics by") + ] = None, + _request_timeout: Union[ + None, Annotated[StrictFloat, Field(gt=0)], Tuple[ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] @@ -1411,14 +1517,16 @@ async def workflow_get_workers_count_without_preload_content( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> RESTResponseType: - """Get workflow worker count + """Get workflow metrics - Get a count of the workers available for workflow + Get the metrics for a workflow version - :param tenant: The tenant id (required) - :type tenant: str :param workflow: The workflow id (required) :type workflow: str + :param status: A status of workflow run statuses to filter by + :type status: WorkflowRunStatus + :param group_key: A group key to filter metrics by + :type group_key: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1441,9 +1549,10 @@ async def workflow_get_workers_count_without_preload_content( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_get_workers_count_serialize( - tenant=tenant, + _param = self._workflow_get_metrics_serialize( workflow=workflow, + status=status, + group_key=group_key, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1451,19 +1560,21 @@ async def workflow_get_workers_count_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowWorkersCount", + "200": "WorkflowMetrics", "400": "APIErrors", "403": "APIErrors", + "404": "APIErrors", } response_data = await self.api_client.call_api( *_param, _request_timeout=_request_timeout ) return response_data.response - def _workflow_get_workers_count_serialize( + def _workflow_get_metrics_serialize( self, - tenant, workflow, + status, + group_key, _request_auth, _content_type, _headers, @@ -1478,33 +1589,36 @@ def _workflow_get_workers_count_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters - if tenant is not None: - _path_params["tenant"] = tenant if workflow is not None: _path_params["workflow"] = workflow # process the query parameters + if status is not None: + + _query_params.append(("status", status.value)) + + if group_key is not None: + + _query_params.append(("groupKey", group_key)) + # process the header parameters # process the form parameters # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] return self.api_client.param_serialize( method="GET", - resource_path="/api/v1/tenants/{tenant}/workflows/{workflow}/worker-count", + resource_path="/api/v1/workflows/{workflow}/metrics", path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -1518,7 +1632,7 @@ def _workflow_get_workers_count_serialize( ) @validate_call - async def workflow_list( + async def workflow_get_workers_count( self, tenant: Annotated[ str, @@ -1526,6 +1640,12 @@ async def workflow_list( min_length=36, strict=True, max_length=36, description="The tenant id" ), ], + workflow: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The workflow id" + ), + ], _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1537,13 +1657,15 @@ async def workflow_list( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> WorkflowList: - """Get workflows + ) -> WorkflowWorkersCount: + """Get workflow worker count - Get all workflows for a tenant + Get a count of the workers available for workflow :param tenant: The tenant id (required) :type tenant: str + :param workflow: The workflow id (required) + :type workflow: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1566,8 +1688,9 @@ async def workflow_list( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_list_serialize( + _param = self._workflow_get_workers_count_serialize( tenant=tenant, + workflow=workflow, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1575,7 +1698,7 @@ async def workflow_list( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowList", + "200": "WorkflowWorkersCount", "400": "APIErrors", "403": "APIErrors", } @@ -1589,7 +1712,7 @@ async def workflow_list( ).data @validate_call - async def workflow_list_with_http_info( + async def workflow_get_workers_count_with_http_info( self, tenant: Annotated[ str, @@ -1597,6 +1720,12 @@ async def workflow_list_with_http_info( min_length=36, strict=True, max_length=36, description="The tenant id" ), ], + workflow: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The workflow id" + ), + ], _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1608,13 +1737,15 @@ async def workflow_list_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[WorkflowList]: - """Get workflows + ) -> ApiResponse[WorkflowWorkersCount]: + """Get workflow worker count - Get all workflows for a tenant + Get a count of the workers available for workflow :param tenant: The tenant id (required) :type tenant: str + :param workflow: The workflow id (required) + :type workflow: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1637,8 +1768,9 @@ async def workflow_list_with_http_info( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_list_serialize( + _param = self._workflow_get_workers_count_serialize( tenant=tenant, + workflow=workflow, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1646,7 +1778,7 @@ async def workflow_list_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowList", + "200": "WorkflowWorkersCount", "400": "APIErrors", "403": "APIErrors", } @@ -1660,7 +1792,7 @@ async def workflow_list_with_http_info( ) @validate_call - async def workflow_list_without_preload_content( + async def workflow_get_workers_count_without_preload_content( self, tenant: Annotated[ str, @@ -1668,6 +1800,12 @@ async def workflow_list_without_preload_content( min_length=36, strict=True, max_length=36, description="The tenant id" ), ], + workflow: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The workflow id" + ), + ], _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1680,12 +1818,14 @@ async def workflow_list_without_preload_content( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> RESTResponseType: - """Get workflows + """Get workflow worker count - Get all workflows for a tenant + Get a count of the workers available for workflow :param tenant: The tenant id (required) :type tenant: str + :param workflow: The workflow id (required) + :type workflow: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1708,8 +1848,9 @@ async def workflow_list_without_preload_content( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_list_serialize( + _param = self._workflow_get_workers_count_serialize( tenant=tenant, + workflow=workflow, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1717,7 +1858,7 @@ async def workflow_list_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowList", + "200": "WorkflowWorkersCount", "400": "APIErrors", "403": "APIErrors", } @@ -1726,9 +1867,10 @@ async def workflow_list_without_preload_content( ) return response_data.response - def _workflow_list_serialize( + def _workflow_get_workers_count_serialize( self, tenant, + workflow, _request_auth, _content_type, _headers, @@ -1743,31 +1885,30 @@ def _workflow_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters if tenant is not None: _path_params["tenant"] = tenant + if workflow is not None: + _path_params["workflow"] = workflow # process the query parameters # process the header parameters # process the form parameters # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] return self.api_client.param_serialize( method="GET", - resource_path="/api/v1/tenants/{tenant}/workflows", + resource_path="/api/v1/tenants/{tenant}/workflows/{workflow}/worker-count", path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -1781,7 +1922,7 @@ def _workflow_list_serialize( ) @validate_call - async def workflow_run_get( + async def workflow_list( self, tenant: Annotated[ str, @@ -1789,15 +1930,6 @@ async def workflow_run_get( min_length=36, strict=True, max_length=36, description="The tenant id" ), ], - workflow_run: Annotated[ - str, - Field( - min_length=36, - strict=True, - max_length=36, - description="The workflow run id", - ), - ], _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1809,15 +1941,13 @@ async def workflow_run_get( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> WorkflowRun: - """Get workflow run + ) -> WorkflowList: + """Get workflows - Get a workflow run for a tenant + Get all workflows for a tenant :param tenant: The tenant id (required) :type tenant: str - :param workflow_run: The workflow run id (required) - :type workflow_run: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1840,9 +1970,8 @@ async def workflow_run_get( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_run_get_serialize( + _param = self._workflow_list_serialize( tenant=tenant, - workflow_run=workflow_run, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1850,7 +1979,7 @@ async def workflow_run_get( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowRun", + "200": "WorkflowList", "400": "APIErrors", "403": "APIErrors", } @@ -1864,7 +1993,7 @@ async def workflow_run_get( ).data @validate_call - async def workflow_run_get_with_http_info( + async def workflow_list_with_http_info( self, tenant: Annotated[ str, @@ -1872,15 +2001,6 @@ async def workflow_run_get_with_http_info( min_length=36, strict=True, max_length=36, description="The tenant id" ), ], - workflow_run: Annotated[ - str, - Field( - min_length=36, - strict=True, - max_length=36, - description="The workflow run id", - ), - ], _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1892,15 +2012,13 @@ async def workflow_run_get_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[WorkflowRun]: - """Get workflow run + ) -> ApiResponse[WorkflowList]: + """Get workflows - Get a workflow run for a tenant + Get all workflows for a tenant :param tenant: The tenant id (required) :type tenant: str - :param workflow_run: The workflow run id (required) - :type workflow_run: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1923,9 +2041,8 @@ async def workflow_run_get_with_http_info( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_run_get_serialize( + _param = self._workflow_list_serialize( tenant=tenant, - workflow_run=workflow_run, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1933,7 +2050,7 @@ async def workflow_run_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowRun", + "200": "WorkflowList", "400": "APIErrors", "403": "APIErrors", } @@ -1947,7 +2064,7 @@ async def workflow_run_get_with_http_info( ) @validate_call - async def workflow_run_get_without_preload_content( + async def workflow_list_without_preload_content( self, tenant: Annotated[ str, @@ -1955,15 +2072,6 @@ async def workflow_run_get_without_preload_content( min_length=36, strict=True, max_length=36, description="The tenant id" ), ], - workflow_run: Annotated[ - str, - Field( - min_length=36, - strict=True, - max_length=36, - description="The workflow run id", - ), - ], _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1976,14 +2084,12 @@ async def workflow_run_get_without_preload_content( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> RESTResponseType: - """Get workflow run + """Get workflows - Get a workflow run for a tenant + Get all workflows for a tenant :param tenant: The tenant id (required) :type tenant: str - :param workflow_run: The workflow run id (required) - :type workflow_run: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -2006,9 +2112,8 @@ async def workflow_run_get_without_preload_content( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_run_get_serialize( + _param = self._workflow_list_serialize( tenant=tenant, - workflow_run=workflow_run, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2016,7 +2121,7 @@ async def workflow_run_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowRun", + "200": "WorkflowList", "400": "APIErrors", "403": "APIErrors", } @@ -2025,10 +2130,9 @@ async def workflow_run_get_without_preload_content( ) return response_data.response - def _workflow_run_get_serialize( + def _workflow_list_serialize( self, tenant, - workflow_run, _request_auth, _content_type, _headers, @@ -2043,33 +2147,28 @@ def _workflow_run_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters if tenant is not None: _path_params["tenant"] = tenant - if workflow_run is not None: - _path_params["workflow-run"] = workflow_run # process the query parameters # process the header parameters # process the form parameters # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] return self.api_client.param_serialize( method="GET", - resource_path="/api/v1/tenants/{tenant}/workflow-runs/{workflow-run}", + resource_path="/api/v1/tenants/{tenant}/workflows", path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -2083,7 +2182,7 @@ def _workflow_run_get_serialize( ) @validate_call - async def workflow_run_get_metrics( + async def workflow_run_get( self, tenant: Annotated[ str, @@ -2091,34 +2190,15 @@ async def workflow_run_get_metrics( min_length=36, strict=True, max_length=36, description="The tenant id" ), ], - event_id: Annotated[ - Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], - Field(description="The event id to get runs for."), - ] = None, - workflow_id: Annotated[ - Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], - Field(description="The workflow id to get runs for."), - ] = None, - parent_workflow_run_id: Annotated[ - Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], - Field(description="The parent workflow run id"), - ] = None, - parent_step_run_id: Annotated[ - Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], - Field(description="The parent step run id"), - ] = None, - additional_metadata: Annotated[ - Optional[List[StrictStr]], - Field(description="A list of metadata key value pairs to filter by"), - ] = None, - created_after: Annotated[ - Optional[datetime], - Field(description="The time after the workflow run was created"), - ] = None, - created_before: Annotated[ - Optional[datetime], - Field(description="The time before the workflow run was created"), - ] = None, + workflow_run: Annotated[ + str, + Field( + min_length=36, + strict=True, + max_length=36, + description="The workflow run id", + ), + ], _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -2130,27 +2210,15 @@ async def workflow_run_get_metrics( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> WorkflowRunsMetrics: - """Get workflow runs + ) -> WorkflowRun: + """Get workflow run - Get a summary of workflow run metrics for a tenant + Get a workflow run for a tenant :param tenant: The tenant id (required) :type tenant: str - :param event_id: The event id to get runs for. - :type event_id: str - :param workflow_id: The workflow id to get runs for. - :type workflow_id: str - :param parent_workflow_run_id: The parent workflow run id - :type parent_workflow_run_id: str - :param parent_step_run_id: The parent step run id - :type parent_step_run_id: str - :param additional_metadata: A list of metadata key value pairs to filter by - :type additional_metadata: List[str] - :param created_after: The time after the workflow run was created - :type created_after: datetime - :param created_before: The time before the workflow run was created - :type created_before: datetime + :param workflow_run: The workflow run id (required) + :type workflow_run: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -2173,15 +2241,9 @@ async def workflow_run_get_metrics( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_run_get_metrics_serialize( + _param = self._workflow_run_get_serialize( tenant=tenant, - event_id=event_id, - workflow_id=workflow_id, - parent_workflow_run_id=parent_workflow_run_id, - parent_step_run_id=parent_step_run_id, - additional_metadata=additional_metadata, - created_after=created_after, - created_before=created_before, + workflow_run=workflow_run, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2189,7 +2251,7 @@ async def workflow_run_get_metrics( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowRunsMetrics", + "200": "WorkflowRun", "400": "APIErrors", "403": "APIErrors", } @@ -2203,7 +2265,223 @@ async def workflow_run_get_metrics( ).data @validate_call - async def workflow_run_get_metrics_with_http_info( + async def workflow_run_get_with_http_info( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + workflow_run: Annotated[ + str, + Field( + min_length=36, + strict=True, + max_length=36, + description="The workflow run id", + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[WorkflowRun]: + """Get workflow run + + Get a workflow run for a tenant + + :param tenant: The tenant id (required) + :type tenant: str + :param workflow_run: The workflow run id (required) + :type workflow_run: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_run_get_serialize( + tenant=tenant, + workflow_run=workflow_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "WorkflowRun", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + await response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + async def workflow_run_get_without_preload_content( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + workflow_run: Annotated[ + str, + Field( + min_length=36, + strict=True, + max_length=36, + description="The workflow run id", + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get workflow run + + Get a workflow run for a tenant + + :param tenant: The tenant id (required) + :type tenant: str + :param workflow_run: The workflow run id (required) + :type workflow_run: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_run_get_serialize( + tenant=tenant, + workflow_run=workflow_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "WorkflowRun", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + return response_data.response + + def _workflow_run_get_serialize( + self, + tenant, + workflow_run, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if tenant is not None: + _path_params["tenant"] = tenant + if workflow_run is not None: + _path_params["workflow-run"] = workflow_run + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) + + # authentication setting + _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] + + return self.api_client.param_serialize( + method="GET", + resource_path="/api/v1/tenants/{tenant}/workflow-runs/{workflow-run}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + async def workflow_run_get_metrics( self, tenant: Annotated[ str, @@ -2250,7 +2528,7 @@ async def workflow_run_get_metrics_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[WorkflowRunsMetrics]: + ) -> WorkflowRunsMetrics: """Get workflow runs Get a summary of workflow run metrics for a tenant @@ -2320,10 +2598,10 @@ async def workflow_run_get_metrics_with_http_info( return self.api_client.response_deserialize( response_data=response_data, response_types_map=_response_types_map, - ) + ).data @validate_call - async def workflow_run_get_metrics_without_preload_content( + async def workflow_run_get_metrics_with_http_info( self, tenant: Annotated[ str, @@ -2370,7 +2648,7 @@ async def workflow_run_get_metrics_without_preload_content( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: + ) -> ApiResponse[WorkflowRunsMetrics]: """Get workflow runs Get a summary of workflow run metrics for a tenant @@ -2436,19 +2714,139 @@ async def workflow_run_get_metrics_without_preload_content( response_data = await self.api_client.call_api( *_param, _request_timeout=_request_timeout ) - return response_data.response + await response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - def _workflow_run_get_metrics_serialize( + @validate_call + async def workflow_run_get_metrics_without_preload_content( self, - tenant, - event_id, - workflow_id, - parent_workflow_run_id, - parent_step_run_id, - additional_metadata, - created_after, - created_before, - _request_auth, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + event_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The event id to get runs for."), + ] = None, + workflow_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The workflow id to get runs for."), + ] = None, + parent_workflow_run_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The parent workflow run id"), + ] = None, + parent_step_run_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The parent step run id"), + ] = None, + additional_metadata: Annotated[ + Optional[List[StrictStr]], + Field(description="A list of metadata key value pairs to filter by"), + ] = None, + created_after: Annotated[ + Optional[datetime], + Field(description="The time after the workflow run was created"), + ] = None, + created_before: Annotated[ + Optional[datetime], + Field(description="The time before the workflow run was created"), + ] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get workflow runs + + Get a summary of workflow run metrics for a tenant + + :param tenant: The tenant id (required) + :type tenant: str + :param event_id: The event id to get runs for. + :type event_id: str + :param workflow_id: The workflow id to get runs for. + :type workflow_id: str + :param parent_workflow_run_id: The parent workflow run id + :type parent_workflow_run_id: str + :param parent_step_run_id: The parent step run id + :type parent_step_run_id: str + :param additional_metadata: A list of metadata key value pairs to filter by + :type additional_metadata: List[str] + :param created_after: The time after the workflow run was created + :type created_after: datetime + :param created_before: The time before the workflow run was created + :type created_before: datetime + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_run_get_metrics_serialize( + tenant=tenant, + event_id=event_id, + workflow_id=workflow_id, + parent_workflow_run_id=parent_workflow_run_id, + parent_step_run_id=parent_step_run_id, + additional_metadata=additional_metadata, + created_after=created_after, + created_before=created_before, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "WorkflowRunsMetrics", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + return response_data.response + + def _workflow_run_get_metrics_serialize( + self, + tenant, + event_id, + workflow_id, + parent_workflow_run_id, + parent_step_run_id, + additional_metadata, + created_after, + created_before, + _request_auth, _content_type, _headers, _host_index, @@ -2464,9 +2862,7 @@ def _workflow_run_get_metrics_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2520,10 +2916,9 @@ def _workflow_run_get_metrics_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -2806,9 +3201,7 @@ def _workflow_run_get_shape_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2822,10 +3215,9 @@ def _workflow_run_get_shape_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -2947,16 +3339,640 @@ async def workflow_run_list( :type kinds: List[WorkflowKind] :param additional_metadata: A list of metadata key value pairs to filter by :type additional_metadata: List[str] - :param created_after: The time after the workflow run was created - :type created_after: datetime - :param created_before: The time before the workflow run was created - :type created_before: datetime - :param finished_after: The time after the workflow run was finished - :type finished_after: datetime - :param finished_before: The time before the workflow run was finished - :type finished_before: datetime + :param created_after: The time after the workflow run was created + :type created_after: datetime + :param created_before: The time before the workflow run was created + :type created_before: datetime + :param finished_after: The time after the workflow run was finished + :type finished_after: datetime + :param finished_before: The time before the workflow run was finished + :type finished_before: datetime + :param order_by_field: The order by field + :type order_by_field: WorkflowRunOrderByField + :param order_by_direction: The order by direction + :type order_by_direction: WorkflowRunOrderByDirection + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_run_list_serialize( + tenant=tenant, + offset=offset, + limit=limit, + event_id=event_id, + workflow_id=workflow_id, + parent_workflow_run_id=parent_workflow_run_id, + parent_step_run_id=parent_step_run_id, + statuses=statuses, + kinds=kinds, + additional_metadata=additional_metadata, + created_after=created_after, + created_before=created_before, + finished_after=finished_after, + finished_before=finished_before, + order_by_field=order_by_field, + order_by_direction=order_by_direction, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "WorkflowRunList", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + await response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + async def workflow_run_list_with_http_info( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + offset: Annotated[ + Optional[StrictInt], Field(description="The number to skip") + ] = None, + limit: Annotated[ + Optional[StrictInt], Field(description="The number to limit by") + ] = None, + event_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The event id to get runs for."), + ] = None, + workflow_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The workflow id to get runs for."), + ] = None, + parent_workflow_run_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The parent workflow run id"), + ] = None, + parent_step_run_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The parent step run id"), + ] = None, + statuses: Annotated[ + Optional[List[WorkflowRunStatus]], + Field(description="A list of workflow run statuses to filter by"), + ] = None, + kinds: Annotated[ + Optional[List[WorkflowKind]], + Field(description="A list of workflow kinds to filter by"), + ] = None, + additional_metadata: Annotated[ + Optional[List[StrictStr]], + Field(description="A list of metadata key value pairs to filter by"), + ] = None, + created_after: Annotated[ + Optional[datetime], + Field(description="The time after the workflow run was created"), + ] = None, + created_before: Annotated[ + Optional[datetime], + Field(description="The time before the workflow run was created"), + ] = None, + finished_after: Annotated[ + Optional[datetime], + Field(description="The time after the workflow run was finished"), + ] = None, + finished_before: Annotated[ + Optional[datetime], + Field(description="The time before the workflow run was finished"), + ] = None, + order_by_field: Annotated[ + Optional[WorkflowRunOrderByField], Field(description="The order by field") + ] = None, + order_by_direction: Annotated[ + Optional[WorkflowRunOrderByDirection], + Field(description="The order by direction"), + ] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[WorkflowRunList]: + """Get workflow runs + + Get all workflow runs for a tenant + + :param tenant: The tenant id (required) + :type tenant: str + :param offset: The number to skip + :type offset: int + :param limit: The number to limit by + :type limit: int + :param event_id: The event id to get runs for. + :type event_id: str + :param workflow_id: The workflow id to get runs for. + :type workflow_id: str + :param parent_workflow_run_id: The parent workflow run id + :type parent_workflow_run_id: str + :param parent_step_run_id: The parent step run id + :type parent_step_run_id: str + :param statuses: A list of workflow run statuses to filter by + :type statuses: List[WorkflowRunStatus] + :param kinds: A list of workflow kinds to filter by + :type kinds: List[WorkflowKind] + :param additional_metadata: A list of metadata key value pairs to filter by + :type additional_metadata: List[str] + :param created_after: The time after the workflow run was created + :type created_after: datetime + :param created_before: The time before the workflow run was created + :type created_before: datetime + :param finished_after: The time after the workflow run was finished + :type finished_after: datetime + :param finished_before: The time before the workflow run was finished + :type finished_before: datetime + :param order_by_field: The order by field + :type order_by_field: WorkflowRunOrderByField + :param order_by_direction: The order by direction + :type order_by_direction: WorkflowRunOrderByDirection + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_run_list_serialize( + tenant=tenant, + offset=offset, + limit=limit, + event_id=event_id, + workflow_id=workflow_id, + parent_workflow_run_id=parent_workflow_run_id, + parent_step_run_id=parent_step_run_id, + statuses=statuses, + kinds=kinds, + additional_metadata=additional_metadata, + created_after=created_after, + created_before=created_before, + finished_after=finished_after, + finished_before=finished_before, + order_by_field=order_by_field, + order_by_direction=order_by_direction, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "WorkflowRunList", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + await response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + async def workflow_run_list_without_preload_content( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + offset: Annotated[ + Optional[StrictInt], Field(description="The number to skip") + ] = None, + limit: Annotated[ + Optional[StrictInt], Field(description="The number to limit by") + ] = None, + event_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The event id to get runs for."), + ] = None, + workflow_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The workflow id to get runs for."), + ] = None, + parent_workflow_run_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The parent workflow run id"), + ] = None, + parent_step_run_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The parent step run id"), + ] = None, + statuses: Annotated[ + Optional[List[WorkflowRunStatus]], + Field(description="A list of workflow run statuses to filter by"), + ] = None, + kinds: Annotated[ + Optional[List[WorkflowKind]], + Field(description="A list of workflow kinds to filter by"), + ] = None, + additional_metadata: Annotated[ + Optional[List[StrictStr]], + Field(description="A list of metadata key value pairs to filter by"), + ] = None, + created_after: Annotated[ + Optional[datetime], + Field(description="The time after the workflow run was created"), + ] = None, + created_before: Annotated[ + Optional[datetime], + Field(description="The time before the workflow run was created"), + ] = None, + finished_after: Annotated[ + Optional[datetime], + Field(description="The time after the workflow run was finished"), + ] = None, + finished_before: Annotated[ + Optional[datetime], + Field(description="The time before the workflow run was finished"), + ] = None, + order_by_field: Annotated[ + Optional[WorkflowRunOrderByField], Field(description="The order by field") + ] = None, + order_by_direction: Annotated[ + Optional[WorkflowRunOrderByDirection], + Field(description="The order by direction"), + ] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get workflow runs + + Get all workflow runs for a tenant + + :param tenant: The tenant id (required) + :type tenant: str + :param offset: The number to skip + :type offset: int + :param limit: The number to limit by + :type limit: int + :param event_id: The event id to get runs for. + :type event_id: str + :param workflow_id: The workflow id to get runs for. + :type workflow_id: str + :param parent_workflow_run_id: The parent workflow run id + :type parent_workflow_run_id: str + :param parent_step_run_id: The parent step run id + :type parent_step_run_id: str + :param statuses: A list of workflow run statuses to filter by + :type statuses: List[WorkflowRunStatus] + :param kinds: A list of workflow kinds to filter by + :type kinds: List[WorkflowKind] + :param additional_metadata: A list of metadata key value pairs to filter by + :type additional_metadata: List[str] + :param created_after: The time after the workflow run was created + :type created_after: datetime + :param created_before: The time before the workflow run was created + :type created_before: datetime + :param finished_after: The time after the workflow run was finished + :type finished_after: datetime + :param finished_before: The time before the workflow run was finished + :type finished_before: datetime + :param order_by_field: The order by field + :type order_by_field: WorkflowRunOrderByField + :param order_by_direction: The order by direction + :type order_by_direction: WorkflowRunOrderByDirection + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_run_list_serialize( + tenant=tenant, + offset=offset, + limit=limit, + event_id=event_id, + workflow_id=workflow_id, + parent_workflow_run_id=parent_workflow_run_id, + parent_step_run_id=parent_step_run_id, + statuses=statuses, + kinds=kinds, + additional_metadata=additional_metadata, + created_after=created_after, + created_before=created_before, + finished_after=finished_after, + finished_before=finished_before, + order_by_field=order_by_field, + order_by_direction=order_by_direction, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "WorkflowRunList", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + return response_data.response + + def _workflow_run_list_serialize( + self, + tenant, + offset, + limit, + event_id, + workflow_id, + parent_workflow_run_id, + parent_step_run_id, + statuses, + kinds, + additional_metadata, + created_after, + created_before, + finished_after, + finished_before, + order_by_field, + order_by_direction, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + "statuses": "multi", + "kinds": "multi", + "additionalMetadata": "multi", + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if tenant is not None: + _path_params["tenant"] = tenant + # process the query parameters + if offset is not None: + + _query_params.append(("offset", offset)) + + if limit is not None: + + _query_params.append(("limit", limit)) + + if event_id is not None: + + _query_params.append(("eventId", event_id)) + + if workflow_id is not None: + + _query_params.append(("workflowId", workflow_id)) + + if parent_workflow_run_id is not None: + + _query_params.append(("parentWorkflowRunId", parent_workflow_run_id)) + + if parent_step_run_id is not None: + + _query_params.append(("parentStepRunId", parent_step_run_id)) + + if statuses is not None: + + _query_params.append(("statuses", statuses)) + + if kinds is not None: + + _query_params.append(("kinds", kinds)) + + if additional_metadata is not None: + + _query_params.append(("additionalMetadata", additional_metadata)) + + if created_after is not None: + if isinstance(created_after, datetime): + _query_params.append( + ( + "createdAfter", + created_after.isoformat(), + ) + ) + else: + _query_params.append(("createdAfter", created_after)) + + if created_before is not None: + if isinstance(created_before, datetime): + _query_params.append( + ( + "createdBefore", + created_before.isoformat(), + ) + ) + else: + _query_params.append(("createdBefore", created_before)) + + if finished_after is not None: + if isinstance(finished_after, datetime): + _query_params.append( + ( + "finishedAfter", + finished_after.strftime( + self.api_client.configuration.datetime_format + ), + ) + ) + else: + _query_params.append(("finishedAfter", finished_after)) + + if finished_before is not None: + if isinstance(finished_before, datetime): + _query_params.append( + ( + "finishedBefore", + finished_before.strftime( + self.api_client.configuration.datetime_format + ), + ) + ) + else: + _query_params.append(("finishedBefore", finished_before)) + + if order_by_field is not None: + + _query_params.append(("orderByField", order_by_field.value)) + + if order_by_direction is not None: + + _query_params.append(("orderByDirection", order_by_direction.value)) + + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) + + # authentication setting + _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] + + return self.api_client.param_serialize( + method="GET", + resource_path="/api/v1/tenants/{tenant}/workflows/runs", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + async def workflow_scheduled_list( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + offset: Annotated[ + Optional[StrictInt], Field(description="The number to skip") + ] = None, + limit: Annotated[ + Optional[StrictInt], Field(description="The number to limit by") + ] = None, + workflow_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The workflow id to get runs for."), + ] = None, + additional_metadata: Annotated[ + Optional[List[StrictStr]], + Field(description="A list of metadata key value pairs to filter by"), + ] = None, + order_by_field: Annotated[ + Optional[ScheduledWorkflowsOrderByField], + Field(description="The order by field"), + ] = None, + order_by_direction: Annotated[ + Optional[WorkflowRunOrderByDirection], + Field(description="The order by direction"), + ] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ScheduledWorkflowsList: + """Get workflow runs + + Get all scheduled workflow runs for a tenant + + :param tenant: The tenant id (required) + :type tenant: str + :param offset: The number to skip + :type offset: int + :param limit: The number to limit by + :type limit: int + :param workflow_id: The workflow id to get runs for. + :type workflow_id: str + :param additional_metadata: A list of metadata key value pairs to filter by + :type additional_metadata: List[str] :param order_by_field: The order by field - :type order_by_field: WorkflowRunOrderByField + :type order_by_field: ScheduledWorkflowsOrderByField :param order_by_direction: The order by direction :type order_by_direction: WorkflowRunOrderByDirection :param _request_timeout: timeout setting for this request. If one @@ -2981,21 +3997,12 @@ async def workflow_run_list( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_run_list_serialize( + _param = self._workflow_scheduled_list_serialize( tenant=tenant, offset=offset, limit=limit, - event_id=event_id, workflow_id=workflow_id, - parent_workflow_run_id=parent_workflow_run_id, - parent_step_run_id=parent_step_run_id, - statuses=statuses, - kinds=kinds, additional_metadata=additional_metadata, - created_after=created_after, - created_before=created_before, - finished_after=finished_after, - finished_before=finished_before, order_by_field=order_by_field, order_by_direction=order_by_direction, _request_auth=_request_auth, @@ -3005,7 +4012,7 @@ async def workflow_run_list( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowRunList", + "200": "ScheduledWorkflowsList", "400": "APIErrors", "403": "APIErrors", } @@ -3019,7 +4026,7 @@ async def workflow_run_list( ).data @validate_call - async def workflow_run_list_with_http_info( + async def workflow_scheduled_list_with_http_info( self, tenant: Annotated[ str, @@ -3033,52 +4040,17 @@ async def workflow_run_list_with_http_info( limit: Annotated[ Optional[StrictInt], Field(description="The number to limit by") ] = None, - event_id: Annotated[ - Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], - Field(description="The event id to get runs for."), - ] = None, workflow_id: Annotated[ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], Field(description="The workflow id to get runs for."), ] = None, - parent_workflow_run_id: Annotated[ - Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], - Field(description="The parent workflow run id"), - ] = None, - parent_step_run_id: Annotated[ - Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], - Field(description="The parent step run id"), - ] = None, - statuses: Annotated[ - Optional[List[WorkflowRunStatus]], - Field(description="A list of workflow run statuses to filter by"), - ] = None, - kinds: Annotated[ - Optional[List[WorkflowKind]], - Field(description="A list of workflow kinds to filter by"), - ] = None, additional_metadata: Annotated[ Optional[List[StrictStr]], Field(description="A list of metadata key value pairs to filter by"), ] = None, - created_after: Annotated[ - Optional[datetime], - Field(description="The time after the workflow run was created"), - ] = None, - created_before: Annotated[ - Optional[datetime], - Field(description="The time before the workflow run was created"), - ] = None, - finished_after: Annotated[ - Optional[datetime], - Field(description="The time after the workflow run was finished"), - ] = None, - finished_before: Annotated[ - Optional[datetime], - Field(description="The time before the workflow run was finished"), - ] = None, order_by_field: Annotated[ - Optional[WorkflowRunOrderByField], Field(description="The order by field") + Optional[ScheduledWorkflowsOrderByField], + Field(description="The order by field"), ] = None, order_by_direction: Annotated[ Optional[WorkflowRunOrderByDirection], @@ -3095,10 +4067,10 @@ async def workflow_run_list_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[WorkflowRunList]: + ) -> ApiResponse[ScheduledWorkflowsList]: """Get workflow runs - Get all workflow runs for a tenant + Get all scheduled workflow runs for a tenant :param tenant: The tenant id (required) :type tenant: str @@ -3106,30 +4078,12 @@ async def workflow_run_list_with_http_info( :type offset: int :param limit: The number to limit by :type limit: int - :param event_id: The event id to get runs for. - :type event_id: str :param workflow_id: The workflow id to get runs for. :type workflow_id: str - :param parent_workflow_run_id: The parent workflow run id - :type parent_workflow_run_id: str - :param parent_step_run_id: The parent step run id - :type parent_step_run_id: str - :param statuses: A list of workflow run statuses to filter by - :type statuses: List[WorkflowRunStatus] - :param kinds: A list of workflow kinds to filter by - :type kinds: List[WorkflowKind] :param additional_metadata: A list of metadata key value pairs to filter by :type additional_metadata: List[str] - :param created_after: The time after the workflow run was created - :type created_after: datetime - :param created_before: The time before the workflow run was created - :type created_before: datetime - :param finished_after: The time after the workflow run was finished - :type finished_after: datetime - :param finished_before: The time before the workflow run was finished - :type finished_before: datetime :param order_by_field: The order by field - :type order_by_field: WorkflowRunOrderByField + :type order_by_field: ScheduledWorkflowsOrderByField :param order_by_direction: The order by direction :type order_by_direction: WorkflowRunOrderByDirection :param _request_timeout: timeout setting for this request. If one @@ -3154,21 +4108,12 @@ async def workflow_run_list_with_http_info( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_run_list_serialize( + _param = self._workflow_scheduled_list_serialize( tenant=tenant, offset=offset, limit=limit, - event_id=event_id, workflow_id=workflow_id, - parent_workflow_run_id=parent_workflow_run_id, - parent_step_run_id=parent_step_run_id, - statuses=statuses, - kinds=kinds, additional_metadata=additional_metadata, - created_after=created_after, - created_before=created_before, - finished_after=finished_after, - finished_before=finished_before, order_by_field=order_by_field, order_by_direction=order_by_direction, _request_auth=_request_auth, @@ -3178,7 +4123,7 @@ async def workflow_run_list_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowRunList", + "200": "ScheduledWorkflowsList", "400": "APIErrors", "403": "APIErrors", } @@ -3192,7 +4137,7 @@ async def workflow_run_list_with_http_info( ) @validate_call - async def workflow_run_list_without_preload_content( + async def workflow_scheduled_list_without_preload_content( self, tenant: Annotated[ str, @@ -3206,52 +4151,17 @@ async def workflow_run_list_without_preload_content( limit: Annotated[ Optional[StrictInt], Field(description="The number to limit by") ] = None, - event_id: Annotated[ - Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], - Field(description="The event id to get runs for."), - ] = None, workflow_id: Annotated[ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], Field(description="The workflow id to get runs for."), ] = None, - parent_workflow_run_id: Annotated[ - Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], - Field(description="The parent workflow run id"), - ] = None, - parent_step_run_id: Annotated[ - Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], - Field(description="The parent step run id"), - ] = None, - statuses: Annotated[ - Optional[List[WorkflowRunStatus]], - Field(description="A list of workflow run statuses to filter by"), - ] = None, - kinds: Annotated[ - Optional[List[WorkflowKind]], - Field(description="A list of workflow kinds to filter by"), - ] = None, additional_metadata: Annotated[ Optional[List[StrictStr]], Field(description="A list of metadata key value pairs to filter by"), ] = None, - created_after: Annotated[ - Optional[datetime], - Field(description="The time after the workflow run was created"), - ] = None, - created_before: Annotated[ - Optional[datetime], - Field(description="The time before the workflow run was created"), - ] = None, - finished_after: Annotated[ - Optional[datetime], - Field(description="The time after the workflow run was finished"), - ] = None, - finished_before: Annotated[ - Optional[datetime], - Field(description="The time before the workflow run was finished"), - ] = None, order_by_field: Annotated[ - Optional[WorkflowRunOrderByField], Field(description="The order by field") + Optional[ScheduledWorkflowsOrderByField], + Field(description="The order by field"), ] = None, order_by_direction: Annotated[ Optional[WorkflowRunOrderByDirection], @@ -3271,7 +4181,7 @@ async def workflow_run_list_without_preload_content( ) -> RESTResponseType: """Get workflow runs - Get all workflow runs for a tenant + Get all scheduled workflow runs for a tenant :param tenant: The tenant id (required) :type tenant: str @@ -3279,30 +4189,12 @@ async def workflow_run_list_without_preload_content( :type offset: int :param limit: The number to limit by :type limit: int - :param event_id: The event id to get runs for. - :type event_id: str :param workflow_id: The workflow id to get runs for. :type workflow_id: str - :param parent_workflow_run_id: The parent workflow run id - :type parent_workflow_run_id: str - :param parent_step_run_id: The parent step run id - :type parent_step_run_id: str - :param statuses: A list of workflow run statuses to filter by - :type statuses: List[WorkflowRunStatus] - :param kinds: A list of workflow kinds to filter by - :type kinds: List[WorkflowKind] :param additional_metadata: A list of metadata key value pairs to filter by :type additional_metadata: List[str] - :param created_after: The time after the workflow run was created - :type created_after: datetime - :param created_before: The time before the workflow run was created - :type created_before: datetime - :param finished_after: The time after the workflow run was finished - :type finished_after: datetime - :param finished_before: The time before the workflow run was finished - :type finished_before: datetime :param order_by_field: The order by field - :type order_by_field: WorkflowRunOrderByField + :type order_by_field: ScheduledWorkflowsOrderByField :param order_by_direction: The order by direction :type order_by_direction: WorkflowRunOrderByDirection :param _request_timeout: timeout setting for this request. If one @@ -3327,21 +4219,12 @@ async def workflow_run_list_without_preload_content( :return: Returns the result object. """ # noqa: E501 - _param = self._workflow_run_list_serialize( + _param = self._workflow_scheduled_list_serialize( tenant=tenant, offset=offset, limit=limit, - event_id=event_id, workflow_id=workflow_id, - parent_workflow_run_id=parent_workflow_run_id, - parent_step_run_id=parent_step_run_id, - statuses=statuses, - kinds=kinds, additional_metadata=additional_metadata, - created_after=created_after, - created_before=created_before, - finished_after=finished_after, - finished_before=finished_before, order_by_field=order_by_field, order_by_direction=order_by_direction, _request_auth=_request_auth, @@ -3351,7 +4234,7 @@ async def workflow_run_list_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { - "200": "WorkflowRunList", + "200": "ScheduledWorkflowsList", "400": "APIErrors", "403": "APIErrors", } @@ -3360,22 +4243,13 @@ async def workflow_run_list_without_preload_content( ) return response_data.response - def _workflow_run_list_serialize( + def _workflow_scheduled_list_serialize( self, tenant, offset, limit, - event_id, workflow_id, - parent_workflow_run_id, - parent_step_run_id, - statuses, - kinds, additional_metadata, - created_after, - created_before, - finished_after, - finished_before, order_by_field, order_by_direction, _request_auth, @@ -3387,8 +4261,6 @@ def _workflow_run_list_serialize( _host = None _collection_formats: Dict[str, str] = { - "statuses": "multi", - "kinds": "multi", "additionalMetadata": "multi", } @@ -3396,9 +4268,7 @@ def _workflow_run_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -3413,82 +4283,14 @@ def _workflow_run_list_serialize( _query_params.append(("limit", limit)) - if event_id is not None: - - _query_params.append(("eventId", event_id)) - if workflow_id is not None: _query_params.append(("workflowId", workflow_id)) - if parent_workflow_run_id is not None: - - _query_params.append(("parentWorkflowRunId", parent_workflow_run_id)) - - if parent_step_run_id is not None: - - _query_params.append(("parentStepRunId", parent_step_run_id)) - - if statuses is not None: - - _query_params.append(("statuses", statuses)) - - if kinds is not None: - - _query_params.append(("kinds", kinds)) - if additional_metadata is not None: _query_params.append(("additionalMetadata", additional_metadata)) - if created_after is not None: - if isinstance(created_after, datetime): - _query_params.append( - ( - "createdAfter", - created_after.isoformat(), - ) - ) - else: - _query_params.append(("createdAfter", created_after)) - - if created_before is not None: - if isinstance(created_before, datetime): - _query_params.append( - ( - "createdBefore", - created_before.isoformat(), - ) - ) - else: - _query_params.append(("createdBefore", created_before)) - - if finished_after is not None: - if isinstance(finished_after, datetime): - _query_params.append( - ( - "finishedAfter", - finished_after.strftime( - self.api_client.configuration.datetime_format - ), - ) - ) - else: - _query_params.append(("finishedAfter", finished_after)) - - if finished_before is not None: - if isinstance(finished_before, datetime): - _query_params.append( - ( - "finishedBefore", - finished_before.strftime( - self.api_client.configuration.datetime_format - ), - ) - ) - else: - _query_params.append(("finishedBefore", finished_before)) - if order_by_field is not None: _query_params.append(("orderByField", order_by_field.value)) @@ -3502,17 +4304,16 @@ def _workflow_run_list_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] return self.api_client.param_serialize( method="GET", - resource_path="/api/v1/tenants/{tenant}/workflows/runs", + resource_path="/api/v1/tenants/{tenant}/workflows/scheduled", path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -3770,9 +4571,7 @@ def _workflow_update_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -3786,10 +4585,9 @@ def _workflow_update_serialize( _body_params = workflow_update_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -4076,9 +4874,7 @@ def _workflow_version_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -4094,10 +4890,9 @@ def _workflow_version_get_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] diff --git a/hatchet_sdk/clients/rest/api/workflow_run_api.py b/hatchet_sdk/clients/rest/api/workflow_run_api.py index d0e1aeb4..7a4c6c6b 100644 --- a/hatchet_sdk/clients/rest/api/workflow_run_api.py +++ b/hatchet_sdk/clients/rest/api/workflow_run_api.py @@ -298,9 +298,7 @@ def _workflow_run_cancel_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -314,10 +312,9 @@ def _workflow_run_cancel_serialize( _body_params = workflow_runs_cancel_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -629,9 +626,7 @@ def _workflow_run_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -649,10 +644,9 @@ def _workflow_run_create_serialize( _body_params = trigger_workflow_run_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: @@ -948,9 +942,7 @@ def _workflow_run_get_input_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -964,10 +956,9 @@ def _workflow_run_get_input_serialize( # process the body parameter # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # authentication setting _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] @@ -1238,9 +1229,7 @@ def _workflow_run_update_replay_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} + _files: Dict[str, Union[str, bytes]] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1254,10 +1243,9 @@ def _workflow_run_update_replay_serialize( _body_params = replay_workflow_runs_request # set the HTTP header `Accept` - if "Accept" not in _header_params: - _header_params["Accept"] = self.api_client.select_header_accept( - ["application/json"] - ) + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # set the HTTP header `Content-Type` if _content_type: diff --git a/hatchet_sdk/clients/rest/api_client.py b/hatchet_sdk/clients/rest/api_client.py index 76446dda..2980d8c1 100644 --- a/hatchet_sdk/clients/rest/api_client.py +++ b/hatchet_sdk/clients/rest/api_client.py @@ -13,7 +13,6 @@ import datetime -import decimal import json import mimetypes import os @@ -69,7 +68,6 @@ class ApiClient: "bool": bool, "date": datetime.date, "datetime": datetime.datetime, - "decimal": decimal.Decimal, "object": object, } _pool = None @@ -222,7 +220,7 @@ def param_serialize( body = self.sanitize_for_serialization(body) # request url - if _host is None or self.configuration.ignore_operation_servers: + if _host is None: url = self.configuration.host + resource_path else: # use server/host defined in path or operation instead @@ -313,9 +311,12 @@ def response_deserialize( match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type) encoding = match.group(1) if match else "utf-8" response_text = response_data.data.decode(encoding) - return_data = self.deserialize( - response_text, response_type, content_type - ) + if response_type in ["bytearray", "str"]: + return_data = self.__deserialize_primitive( + response_text, response_type + ) + else: + return_data = self.deserialize(response_text, response_type) finally: if not 200 <= response_data.status <= 299: raise ApiException.from_response( @@ -339,7 +340,6 @@ def sanitize_for_serialization(self, obj): If obj is str, int, long, float, bool, return directly. If obj is datetime.datetime, datetime.date convert to string in iso8601 format. - If obj is decimal.Decimal return string representation. If obj is list, sanitize each element in the list. If obj is dict, return the dict. If obj is OpenAPI model, return the properties dict. @@ -361,8 +361,6 @@ def sanitize_for_serialization(self, obj): return tuple(self.sanitize_for_serialization(sub_obj) for sub_obj in obj) elif isinstance(obj, (datetime.datetime, datetime.date)): return obj.isoformat() - elif isinstance(obj, decimal.Decimal): - return str(obj) elif isinstance(obj, dict): obj_dict = obj @@ -381,40 +379,21 @@ def sanitize_for_serialization(self, obj): key: self.sanitize_for_serialization(val) for key, val in obj_dict.items() } - def deserialize( - self, response_text: str, response_type: str, content_type: Optional[str] - ): + def deserialize(self, response_text, response_type): """Deserializes response into an object. :param response: RESTResponse object to be deserialized. :param response_type: class literal for deserialized object, or string of class name. - :param content_type: content type of response. :return: deserialized object. """ # fetch data from response object - if content_type is None: - try: - data = json.loads(response_text) - except ValueError: - data = response_text - elif re.match( - r"^application/(json|[\w!#$&.+-^_]+\+json)\s*(;|$)", - content_type, - re.IGNORECASE, - ): - if response_text == "": - data = "" - else: - data = json.loads(response_text) - elif re.match(r"^text\/[a-z.+-]+\s*(;|$)", content_type, re.IGNORECASE): + try: + data = json.loads(response_text) + except ValueError: data = response_text - else: - raise ApiException( - status=0, reason="Unsupported content type: {0}".format(content_type) - ) return self.__deserialize(data, response_type) @@ -456,8 +435,6 @@ def __deserialize(self, data, klass): return self.__deserialize_date(data) elif klass == datetime.datetime: return self.__deserialize_datetime(data) - elif klass == decimal.Decimal: - return decimal.Decimal(data) elif issubclass(klass, Enum): return self.__deserialize_enum(data, klass) else: @@ -531,10 +508,7 @@ def parameters_to_url_query(self, params, collection_formats): return "&".join(["=".join(map(str, item)) for item in new_params]) - def files_parameters( - self, - files: Dict[str, Union[str, bytes, List[str], List[bytes], Tuple[str, bytes]]], - ): + def files_parameters(self, files: Dict[str, Union[str, bytes]]): """Builds form parameters. :param files: File parameters. @@ -549,12 +523,6 @@ def files_parameters( elif isinstance(v, bytes): filename = k filedata = v - elif isinstance(v, tuple): - filename, filedata = v - elif isinstance(v, list): - for file_param in v: - params.extend(self.files_parameters({k: file_param})) - continue else: raise ValueError("Unsupported file value") mimetype = mimetypes.guess_type(filename)[0] or "application/octet-stream" diff --git a/hatchet_sdk/clients/rest/configuration.py b/hatchet_sdk/clients/rest/configuration.py index 03743d0b..e33efa68 100644 --- a/hatchet_sdk/clients/rest/configuration.py +++ b/hatchet_sdk/clients/rest/configuration.py @@ -39,9 +39,6 @@ class Configuration: """This class contains various settings of the API client. :param host: Base url. - :param ignore_operation_servers - Boolean to ignore operation servers for the API client. - Config will use `host` as the base url regardless of the operation servers. :param api_key: Dict to store API key(s). Each entry in the dict specifies an API key. The dict key is the name of the security scheme in the OAS specification. @@ -64,7 +61,6 @@ class Configuration: values before. :param ssl_ca_cert: str - the path to a file of concatenated CA certificates in PEM format. - :param retries: Number of retries for API requests. :Example: @@ -102,11 +98,7 @@ def __init__( server_variables=None, server_operation_index=None, server_operation_variables=None, - ignore_operation_servers=False, ssl_ca_cert=None, - retries=None, - *, - debug: Optional[bool] = None ) -> None: """Constructor""" self._base_path = "http://localhost" if host is None else host @@ -120,9 +112,6 @@ def __init__( self.server_operation_variables = server_operation_variables or {} """Default server variables """ - self.ignore_operation_servers = ignore_operation_servers - """Ignore operation servers - """ self.temp_folder_path = None """Temp file folder for downloading files """ @@ -166,10 +155,7 @@ def __init__( self.logger_file = None """Debug file location """ - if debug is not None: - self.debug = debug - else: - self.__debug = False + self.debug = False """Debug switch """ @@ -209,7 +195,7 @@ def __init__( self.safe_chars_for_path_param = "" """Safe chars for path_param """ - self.retries = retries + self.retries = None """Adding retries to override urllib3 default value 3 """ # Enable client side validation diff --git a/hatchet_sdk/clients/rest/models/__init__.py b/hatchet_sdk/clients/rest/models/__init__.py index 9c550c2e..f82be7c3 100644 --- a/hatchet_sdk/clients/rest/models/__init__.py +++ b/hatchet_sdk/clients/rest/models/__init__.py @@ -51,6 +51,11 @@ CreateTenantInviteRequest, ) from hatchet_sdk.clients.rest.models.create_tenant_request import CreateTenantRequest +from hatchet_sdk.clients.rest.models.cron_workflows import CronWorkflows +from hatchet_sdk.clients.rest.models.cron_workflows_list import CronWorkflowsList +from hatchet_sdk.clients.rest.models.cron_workflows_order_by_field import ( + CronWorkflowsOrderByField, +) from hatchet_sdk.clients.rest.models.event import Event from hatchet_sdk.clients.rest.models.event_data import EventData from hatchet_sdk.clients.rest.models.event_key_list import EventKeyList @@ -108,6 +113,13 @@ ReplayWorkflowRunsResponse, ) from hatchet_sdk.clients.rest.models.rerun_step_run_request import RerunStepRunRequest +from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows +from hatchet_sdk.clients.rest.models.scheduled_workflows_list import ( + ScheduledWorkflowsList, +) +from hatchet_sdk.clients.rest.models.scheduled_workflows_order_by_field import ( + ScheduledWorkflowsOrderByField, +) from hatchet_sdk.clients.rest.models.semaphore_slots import SemaphoreSlots from hatchet_sdk.clients.rest.models.slack_webhook import SlackWebhook from hatchet_sdk.clients.rest.models.sns_integration import SNSIntegration diff --git a/hatchet_sdk/clients/rest/models/api_errors.py b/hatchet_sdk/clients/rest/models/api_errors.py index e4dfed11..e41cf5fc 100644 --- a/hatchet_sdk/clients/rest/models/api_errors.py +++ b/hatchet_sdk/clients/rest/models/api_errors.py @@ -73,9 +73,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in errors (list) _items = [] if self.errors: - for _item_errors in self.errors: - if _item_errors: - _items.append(_item_errors.to_dict()) + for _item in self.errors: + if _item: + _items.append(_item.to_dict()) _dict["errors"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/bulk_create_event_request.py b/hatchet_sdk/clients/rest/models/bulk_create_event_request.py index 8d08d394..2c053ee1 100644 --- a/hatchet_sdk/clients/rest/models/bulk_create_event_request.py +++ b/hatchet_sdk/clients/rest/models/bulk_create_event_request.py @@ -73,9 +73,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in events (list) _items = [] if self.events: - for _item_events in self.events: - if _item_events: - _items.append(_item_events.to_dict()) + for _item in self.events: + if _item: + _items.append(_item.to_dict()) _dict["events"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/bulk_create_event_response.py b/hatchet_sdk/clients/rest/models/bulk_create_event_response.py index 768c5c90..5fd1f3a1 100644 --- a/hatchet_sdk/clients/rest/models/bulk_create_event_response.py +++ b/hatchet_sdk/clients/rest/models/bulk_create_event_response.py @@ -78,9 +78,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in events (list) _items = [] if self.events: - for _item_events in self.events: - if _item_events: - _items.append(_item_events.to_dict()) + for _item in self.events: + if _item: + _items.append(_item.to_dict()) _dict["events"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/cron_workflows.py b/hatchet_sdk/clients/rest/models/cron_workflows.py new file mode 100644 index 00000000..cbc551d6 --- /dev/null +++ b/hatchet_sdk/clients/rest/models/cron_workflows.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta +from typing import Optional, Set +from typing_extensions import Self + +class CronWorkflows(BaseModel): + """ + CronWorkflows + """ # noqa: E501 + metadata: APIResourceMeta + tenant_id: StrictStr = Field(alias="tenantId") + workflow_version_id: StrictStr = Field(alias="workflowVersionId") + workflow_id: StrictStr = Field(alias="workflowId") + workflow_name: StrictStr = Field(alias="workflowName") + cron: StrictStr + input: Optional[Dict[str, Any]] = None + additional_metadata: Optional[Dict[str, Any]] = Field(default=None, alias="additionalMetadata") + __properties: ClassVar[List[str]] = ["metadata", "tenantId", "workflowVersionId", "workflowId", "workflowName", "cron", "input", "additionalMetadata"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CronWorkflows from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CronWorkflows from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "metadata": APIResourceMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "tenantId": obj.get("tenantId"), + "workflowVersionId": obj.get("workflowVersionId"), + "workflowId": obj.get("workflowId"), + "workflowName": obj.get("workflowName"), + "cron": obj.get("cron"), + "input": obj.get("input"), + "additionalMetadata": obj.get("additionalMetadata") + }) + return _obj + + diff --git a/hatchet_sdk/clients/rest/models/cron_workflows_list.py b/hatchet_sdk/clients/rest/models/cron_workflows_list.py new file mode 100644 index 00000000..312030d6 --- /dev/null +++ b/hatchet_sdk/clients/rest/models/cron_workflows_list.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from hatchet_sdk.clients.rest.models.cron_workflows import CronWorkflows +from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse +from typing import Optional, Set +from typing_extensions import Self + +class CronWorkflowsList(BaseModel): + """ + CronWorkflowsList + """ # noqa: E501 + rows: Optional[List[CronWorkflows]] = None + pagination: Optional[PaginationResponse] = None + __properties: ClassVar[List[str]] = ["rows", "pagination"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CronWorkflowsList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in rows (list) + _items = [] + if self.rows: + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) + _dict['rows'] = _items + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CronWorkflowsList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "rows": [CronWorkflows.from_dict(_item) for _item in obj["rows"]] if obj.get("rows") is not None else None, + "pagination": PaginationResponse.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None + }) + return _obj + + diff --git a/hatchet_sdk/clients/rest/models/cron_workflows_order_by_field.py b/hatchet_sdk/clients/rest/models/cron_workflows_order_by_field.py new file mode 100644 index 00000000..41d6567f --- /dev/null +++ b/hatchet_sdk/clients/rest/models/cron_workflows_order_by_field.py @@ -0,0 +1,36 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class CronWorkflowsOrderByField(str, Enum): + """ + CronWorkflowsOrderByField + """ + + """ + allowed enum values + """ + CREATEDAT = 'createdAt' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of CronWorkflowsOrderByField from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/hatchet_sdk/clients/rest/models/event_list.py b/hatchet_sdk/clients/rest/models/event_list.py index 5c928005..e12aa656 100644 --- a/hatchet_sdk/clients/rest/models/event_list.py +++ b/hatchet_sdk/clients/rest/models/event_list.py @@ -78,9 +78,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/get_step_run_diff_response.py b/hatchet_sdk/clients/rest/models/get_step_run_diff_response.py index b9dbc435..c01018b6 100644 --- a/hatchet_sdk/clients/rest/models/get_step_run_diff_response.py +++ b/hatchet_sdk/clients/rest/models/get_step_run_diff_response.py @@ -73,9 +73,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in diffs (list) _items = [] if self.diffs: - for _item_diffs in self.diffs: - if _item_diffs: - _items.append(_item_diffs.to_dict()) + for _item in self.diffs: + if _item: + _items.append(_item.to_dict()) _dict["diffs"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/job.py b/hatchet_sdk/clients/rest/models/job.py index c412ef68..aceaf6f4 100644 --- a/hatchet_sdk/clients/rest/models/job.py +++ b/hatchet_sdk/clients/rest/models/job.py @@ -95,9 +95,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in steps (list) _items = [] if self.steps: - for _item_steps in self.steps: - if _item_steps: - _items.append(_item_steps.to_dict()) + for _item in self.steps: + if _item: + _items.append(_item.to_dict()) _dict["steps"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/job_run.py b/hatchet_sdk/clients/rest/models/job_run.py index 3a7ec051..a9b0da3b 100644 --- a/hatchet_sdk/clients/rest/models/job_run.py +++ b/hatchet_sdk/clients/rest/models/job_run.py @@ -117,9 +117,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in step_runs (list) _items = [] if self.step_runs: - for _item_step_runs in self.step_runs: - if _item_step_runs: - _items.append(_item_step_runs.to_dict()) + for _item in self.step_runs: + if _item: + _items.append(_item.to_dict()) _dict["stepRuns"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/list_api_tokens_response.py b/hatchet_sdk/clients/rest/models/list_api_tokens_response.py index b3590ab3..df9b60ac 100644 --- a/hatchet_sdk/clients/rest/models/list_api_tokens_response.py +++ b/hatchet_sdk/clients/rest/models/list_api_tokens_response.py @@ -78,9 +78,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/list_pull_requests_response.py b/hatchet_sdk/clients/rest/models/list_pull_requests_response.py index 589d4c45..6cfd61bb 100644 --- a/hatchet_sdk/clients/rest/models/list_pull_requests_response.py +++ b/hatchet_sdk/clients/rest/models/list_pull_requests_response.py @@ -73,9 +73,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in pull_requests (list) _items = [] if self.pull_requests: - for _item_pull_requests in self.pull_requests: - if _item_pull_requests: - _items.append(_item_pull_requests.to_dict()) + for _item in self.pull_requests: + if _item: + _items.append(_item.to_dict()) _dict["pullRequests"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/list_slack_webhooks.py b/hatchet_sdk/clients/rest/models/list_slack_webhooks.py index e86956d3..647bc276 100644 --- a/hatchet_sdk/clients/rest/models/list_slack_webhooks.py +++ b/hatchet_sdk/clients/rest/models/list_slack_webhooks.py @@ -78,9 +78,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/list_sns_integrations.py b/hatchet_sdk/clients/rest/models/list_sns_integrations.py index 130e9127..ecf67484 100644 --- a/hatchet_sdk/clients/rest/models/list_sns_integrations.py +++ b/hatchet_sdk/clients/rest/models/list_sns_integrations.py @@ -78,9 +78,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/log_line_list.py b/hatchet_sdk/clients/rest/models/log_line_list.py index e05d186a..306ee2c7 100644 --- a/hatchet_sdk/clients/rest/models/log_line_list.py +++ b/hatchet_sdk/clients/rest/models/log_line_list.py @@ -78,9 +78,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/rate_limit_list.py b/hatchet_sdk/clients/rest/models/rate_limit_list.py index e9f2847d..24df2f3a 100644 --- a/hatchet_sdk/clients/rest/models/rate_limit_list.py +++ b/hatchet_sdk/clients/rest/models/rate_limit_list.py @@ -78,9 +78,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/replay_workflow_runs_response.py b/hatchet_sdk/clients/rest/models/replay_workflow_runs_response.py index d8a9609d..6f0f780f 100644 --- a/hatchet_sdk/clients/rest/models/replay_workflow_runs_response.py +++ b/hatchet_sdk/clients/rest/models/replay_workflow_runs_response.py @@ -73,9 +73,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in workflow_runs (list) _items = [] if self.workflow_runs: - for _item_workflow_runs in self.workflow_runs: - if _item_workflow_runs: - _items.append(_item_workflow_runs.to_dict()) + for _item in self.workflow_runs: + if _item: + _items.append(_item.to_dict()) _dict["workflowRuns"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/scheduled_workflows.py b/hatchet_sdk/clients/rest/models/scheduled_workflows.py new file mode 100644 index 00000000..a92a252f --- /dev/null +++ b/hatchet_sdk/clients/rest/models/scheduled_workflows.py @@ -0,0 +1,106 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta +from typing import Optional, Set +from typing_extensions import Self + +class ScheduledWorkflows(BaseModel): + """ + ScheduledWorkflows + """ # noqa: E501 + metadata: APIResourceMeta + tenant_id: StrictStr = Field(alias="tenantId") + workflow_version_id: StrictStr = Field(alias="workflowVersionId") + workflow_id: StrictStr = Field(alias="workflowId") + workflow_name: StrictStr = Field(alias="workflowName") + trigger_at: datetime = Field(alias="triggerAt") + input: Optional[Dict[str, Any]] = None + additional_metadata: Optional[Dict[str, Any]] = Field(default=None, alias="additionalMetadata") + __properties: ClassVar[List[str]] = ["metadata", "tenantId", "workflowVersionId", "workflowId", "workflowName", "triggerAt", "input", "additionalMetadata"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ScheduledWorkflows from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ScheduledWorkflows from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "metadata": APIResourceMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "tenantId": obj.get("tenantId"), + "workflowVersionId": obj.get("workflowVersionId"), + "workflowId": obj.get("workflowId"), + "workflowName": obj.get("workflowName"), + "triggerAt": obj.get("triggerAt"), + "input": obj.get("input"), + "additionalMetadata": obj.get("additionalMetadata") + }) + return _obj + + diff --git a/hatchet_sdk/clients/rest/models/scheduled_workflows_list.py b/hatchet_sdk/clients/rest/models/scheduled_workflows_list.py new file mode 100644 index 00000000..7702a435 --- /dev/null +++ b/hatchet_sdk/clients/rest/models/scheduled_workflows_list.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse +from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows +from typing import Optional, Set +from typing_extensions import Self + +class ScheduledWorkflowsList(BaseModel): + """ + ScheduledWorkflowsList + """ # noqa: E501 + rows: Optional[List[ScheduledWorkflows]] = None + pagination: Optional[PaginationResponse] = None + __properties: ClassVar[List[str]] = ["rows", "pagination"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ScheduledWorkflowsList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in rows (list) + _items = [] + if self.rows: + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) + _dict['rows'] = _items + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ScheduledWorkflowsList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "rows": [ScheduledWorkflows.from_dict(_item) for _item in obj["rows"]] if obj.get("rows") is not None else None, + "pagination": PaginationResponse.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None + }) + return _obj + + diff --git a/hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py b/hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py new file mode 100644 index 00000000..1b99b85b --- /dev/null +++ b/hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py @@ -0,0 +1,36 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class ScheduledWorkflowsOrderByField(str, Enum): + """ + ScheduledWorkflowsOrderByField + """ + + """ + allowed enum values + """ + TRIGGERAT = 'triggerAt' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ScheduledWorkflowsOrderByField from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/hatchet_sdk/clients/rest/models/step_run_archive_list.py b/hatchet_sdk/clients/rest/models/step_run_archive_list.py index eb4bcef2..fcc1419c 100644 --- a/hatchet_sdk/clients/rest/models/step_run_archive_list.py +++ b/hatchet_sdk/clients/rest/models/step_run_archive_list.py @@ -78,9 +78,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/step_run_event_list.py b/hatchet_sdk/clients/rest/models/step_run_event_list.py index f146eb8e..a46f2089 100644 --- a/hatchet_sdk/clients/rest/models/step_run_event_list.py +++ b/hatchet_sdk/clients/rest/models/step_run_event_list.py @@ -78,9 +78,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/tenant_alert_email_group_list.py b/hatchet_sdk/clients/rest/models/tenant_alert_email_group_list.py index 73d67df4..9e1a4fc1 100644 --- a/hatchet_sdk/clients/rest/models/tenant_alert_email_group_list.py +++ b/hatchet_sdk/clients/rest/models/tenant_alert_email_group_list.py @@ -80,9 +80,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/tenant_invite_list.py b/hatchet_sdk/clients/rest/models/tenant_invite_list.py index 0ed078ef..95e4ba4d 100644 --- a/hatchet_sdk/clients/rest/models/tenant_invite_list.py +++ b/hatchet_sdk/clients/rest/models/tenant_invite_list.py @@ -78,9 +78,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/tenant_list.py b/hatchet_sdk/clients/rest/models/tenant_list.py index 2dbb320e..623d6206 100644 --- a/hatchet_sdk/clients/rest/models/tenant_list.py +++ b/hatchet_sdk/clients/rest/models/tenant_list.py @@ -78,9 +78,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/tenant_member_list.py b/hatchet_sdk/clients/rest/models/tenant_member_list.py index 5aabdcd1..6627c281 100644 --- a/hatchet_sdk/clients/rest/models/tenant_member_list.py +++ b/hatchet_sdk/clients/rest/models/tenant_member_list.py @@ -78,9 +78,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/tenant_queue_metrics.py b/hatchet_sdk/clients/rest/models/tenant_queue_metrics.py index 4043d47f..02807090 100644 --- a/hatchet_sdk/clients/rest/models/tenant_queue_metrics.py +++ b/hatchet_sdk/clients/rest/models/tenant_queue_metrics.py @@ -77,13 +77,6 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of total if self.total: _dict["total"] = self.total.to_dict() - # override the default output from pydantic by calling `to_dict()` of each value in workflow (dict) - _field_dict = {} - if self.workflow: - for _key_workflow in self.workflow: - if self.workflow[_key_workflow]: - _field_dict[_key_workflow] = self.workflow[_key_workflow].to_dict() - _dict["workflow"] = _field_dict return _dict @classmethod @@ -102,15 +95,6 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: if obj.get("total") is not None else None ), - "workflow": ( - dict( - (_k, QueueMetrics.from_dict(_v)) - for _k, _v in obj["workflow"].items() - ) - if obj.get("workflow") is not None - else None - ), - "queues": obj.get("queues"), } ) return _obj diff --git a/hatchet_sdk/clients/rest/models/tenant_resource_policy.py b/hatchet_sdk/clients/rest/models/tenant_resource_policy.py index b9e5181f..c8f10af0 100644 --- a/hatchet_sdk/clients/rest/models/tenant_resource_policy.py +++ b/hatchet_sdk/clients/rest/models/tenant_resource_policy.py @@ -75,9 +75,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in limits (list) _items = [] if self.limits: - for _item_limits in self.limits: - if _item_limits: - _items.append(_item_limits.to_dict()) + for _item in self.limits: + if _item: + _items.append(_item.to_dict()) _dict["limits"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py b/hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py index 4b9bfc81..90f85ae6 100644 --- a/hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py +++ b/hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py @@ -79,5 +79,5 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: if not isinstance(obj, dict): return cls.model_validate(obj) - _obj = cls.model_validate({"queues": obj.get("queues")}) + _obj = cls.model_validate({}) return _obj diff --git a/hatchet_sdk/clients/rest/models/user_tenant_memberships_list.py b/hatchet_sdk/clients/rest/models/user_tenant_memberships_list.py index 98b8041b..1f45b260 100644 --- a/hatchet_sdk/clients/rest/models/user_tenant_memberships_list.py +++ b/hatchet_sdk/clients/rest/models/user_tenant_memberships_list.py @@ -78,9 +78,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/webhook_worker_list_response.py b/hatchet_sdk/clients/rest/models/webhook_worker_list_response.py index a221e182..2d9e08c7 100644 --- a/hatchet_sdk/clients/rest/models/webhook_worker_list_response.py +++ b/hatchet_sdk/clients/rest/models/webhook_worker_list_response.py @@ -78,9 +78,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/webhook_worker_request_list_response.py b/hatchet_sdk/clients/rest/models/webhook_worker_request_list_response.py index ec813a38..30915cd0 100644 --- a/hatchet_sdk/clients/rest/models/webhook_worker_request_list_response.py +++ b/hatchet_sdk/clients/rest/models/webhook_worker_request_list_response.py @@ -75,9 +75,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in requests (list) _items = [] if self.requests: - for _item_requests in self.requests: - if _item_requests: - _items.append(_item_requests.to_dict()) + for _item in self.requests: + if _item: + _items.append(_item.to_dict()) _dict["requests"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/worker.py b/hatchet_sdk/clients/rest/models/worker.py index 0d89492a..48e6eda8 100644 --- a/hatchet_sdk/clients/rest/models/worker.py +++ b/hatchet_sdk/clients/rest/models/worker.py @@ -169,23 +169,23 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in slots (list) _items = [] if self.slots: - for _item_slots in self.slots: - if _item_slots: - _items.append(_item_slots.to_dict()) + for _item in self.slots: + if _item: + _items.append(_item.to_dict()) _dict["slots"] = _items # override the default output from pydantic by calling `to_dict()` of each item in recent_step_runs (list) _items = [] if self.recent_step_runs: - for _item_recent_step_runs in self.recent_step_runs: - if _item_recent_step_runs: - _items.append(_item_recent_step_runs.to_dict()) + for _item in self.recent_step_runs: + if _item: + _items.append(_item.to_dict()) _dict["recentStepRuns"] = _items # override the default output from pydantic by calling `to_dict()` of each item in labels (list) _items = [] if self.labels: - for _item_labels in self.labels: - if _item_labels: - _items.append(_item_labels.to_dict()) + for _item in self.labels: + if _item: + _items.append(_item.to_dict()) _dict["labels"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/worker_list.py b/hatchet_sdk/clients/rest/models/worker_list.py index bb02d792..3ffa4349 100644 --- a/hatchet_sdk/clients/rest/models/worker_list.py +++ b/hatchet_sdk/clients/rest/models/worker_list.py @@ -78,9 +78,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/workflow.py b/hatchet_sdk/clients/rest/models/workflow.py index f3107144..42fd94dd 100644 --- a/hatchet_sdk/clients/rest/models/workflow.py +++ b/hatchet_sdk/clients/rest/models/workflow.py @@ -100,23 +100,23 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in versions (list) _items = [] if self.versions: - for _item_versions in self.versions: - if _item_versions: - _items.append(_item_versions.to_dict()) + for _item in self.versions: + if _item: + _items.append(_item.to_dict()) _dict["versions"] = _items # override the default output from pydantic by calling `to_dict()` of each item in tags (list) _items = [] if self.tags: - for _item_tags in self.tags: - if _item_tags: - _items.append(_item_tags.to_dict()) + for _item in self.tags: + if _item: + _items.append(_item.to_dict()) _dict["tags"] = _items # override the default output from pydantic by calling `to_dict()` of each item in jobs (list) _items = [] if self.jobs: - for _item_jobs in self.jobs: - if _item_jobs: - _items.append(_item_jobs.to_dict()) + for _item in self.jobs: + if _item: + _items.append(_item.to_dict()) _dict["jobs"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/workflow_list.py b/hatchet_sdk/clients/rest/models/workflow_list.py index 9eb14aee..72f9fb90 100644 --- a/hatchet_sdk/clients/rest/models/workflow_list.py +++ b/hatchet_sdk/clients/rest/models/workflow_list.py @@ -80,9 +80,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items # override the default output from pydantic by calling `to_dict()` of pagination if self.pagination: diff --git a/hatchet_sdk/clients/rest/models/workflow_run.py b/hatchet_sdk/clients/rest/models/workflow_run.py index 903da30f..3362b830 100644 --- a/hatchet_sdk/clients/rest/models/workflow_run.py +++ b/hatchet_sdk/clients/rest/models/workflow_run.py @@ -125,9 +125,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in job_runs (list) _items = [] if self.job_runs: - for _item_job_runs in self.job_runs: - if _item_job_runs: - _items.append(_item_job_runs.to_dict()) + for _item in self.job_runs: + if _item: + _items.append(_item.to_dict()) _dict["jobRuns"] = _items # override the default output from pydantic by calling `to_dict()` of triggered_by if self.triggered_by: diff --git a/hatchet_sdk/clients/rest/models/workflow_run_list.py b/hatchet_sdk/clients/rest/models/workflow_run_list.py index a56d3feb..e57a14f4 100644 --- a/hatchet_sdk/clients/rest/models/workflow_run_list.py +++ b/hatchet_sdk/clients/rest/models/workflow_run_list.py @@ -75,9 +75,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in rows (list) _items = [] if self.rows: - for _item_rows in self.rows: - if _item_rows: - _items.append(_item_rows.to_dict()) + for _item in self.rows: + if _item: + _items.append(_item.to_dict()) _dict["rows"] = _items # override the default output from pydantic by calling `to_dict()` of pagination if self.pagination: diff --git a/hatchet_sdk/clients/rest/models/workflow_run_shape.py b/hatchet_sdk/clients/rest/models/workflow_run_shape.py index 426e7ef9..dec6a4fd 100644 --- a/hatchet_sdk/clients/rest/models/workflow_run_shape.py +++ b/hatchet_sdk/clients/rest/models/workflow_run_shape.py @@ -128,9 +128,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in job_runs (list) _items = [] if self.job_runs: - for _item_job_runs in self.job_runs: - if _item_job_runs: - _items.append(_item_job_runs.to_dict()) + for _item in self.job_runs: + if _item: + _items.append(_item.to_dict()) _dict["jobRuns"] = _items # override the default output from pydantic by calling `to_dict()` of triggered_by if self.triggered_by: diff --git a/hatchet_sdk/clients/rest/models/workflow_triggers.py b/hatchet_sdk/clients/rest/models/workflow_triggers.py index fd2f07ef..d3fff3f1 100644 --- a/hatchet_sdk/clients/rest/models/workflow_triggers.py +++ b/hatchet_sdk/clients/rest/models/workflow_triggers.py @@ -92,16 +92,16 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in events (list) _items = [] if self.events: - for _item_events in self.events: - if _item_events: - _items.append(_item_events.to_dict()) + for _item in self.events: + if _item: + _items.append(_item.to_dict()) _dict["events"] = _items # override the default output from pydantic by calling `to_dict()` of each item in crons (list) _items = [] if self.crons: - for _item_crons in self.crons: - if _item_crons: - _items.append(_item_crons.to_dict()) + for _item in self.crons: + if _item: + _items.append(_item.to_dict()) _dict["crons"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/models/workflow_version.py b/hatchet_sdk/clients/rest/models/workflow_version.py index 47554e56..87d0cb29 100644 --- a/hatchet_sdk/clients/rest/models/workflow_version.py +++ b/hatchet_sdk/clients/rest/models/workflow_version.py @@ -117,9 +117,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of each item in jobs (list) _items = [] if self.jobs: - for _item_jobs in self.jobs: - if _item_jobs: - _items.append(_item_jobs.to_dict()) + for _item in self.jobs: + if _item: + _items.append(_item.to_dict()) _dict["jobs"] = _items return _dict diff --git a/hatchet_sdk/clients/rest/rest.py b/hatchet_sdk/clients/rest/rest.py index 56286e14..67566fa1 100644 --- a/hatchet_sdk/clients/rest/rest.py +++ b/hatchet_sdk/clients/rest/rest.py @@ -81,7 +81,7 @@ def __init__(self, configuration) -> None: self.retry_client = aiohttp_retry.RetryClient( client_session=self.pool_manager, retry_options=aiohttp_retry.ExponentialRetry( - attempts=retries, factor=2.0, start_timeout=0.1, max_timeout=120.0 + attempts=retries, factor=0.0, start_timeout=0.0, max_timeout=120.0 ), ) else: @@ -156,18 +156,13 @@ async def request( if isinstance(v, tuple) and len(v) == 3: data.add_field(k, value=v[1], filename=v[0], content_type=v[2]) else: - # Ensures that dict objects are serialized - if isinstance(v, dict): - v = json.dumps(v) - elif isinstance(v, int): - v = str(v) data.add_field(k, v) args["data"] = data - # Pass a `bytes` or `str` parameter directly in the body to support + # Pass a `bytes` parameter directly in the body to support # other content types than Json when `body` argument is provided # in serialized form - elif isinstance(body, str) or isinstance(body, bytes): + elif isinstance(body, bytes): args["data"] = body else: # Cannot generate the request from given parameters From 9c03d1cc5e366f080984f962e475889d0c8b6889 Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Thu, 24 Oct 2024 12:48:08 -0400 Subject: [PATCH 2/7] gen --- .../clients/rest/models/cron_workflows.py | 62 +++++++++++------- .../rest/models/cron_workflows_list.py | 41 +++++++----- .../models/cron_workflows_order_by_field.py | 6 +- .../rest/models/scheduled_workflows.py | 64 ++++++++++++------- .../rest/models/scheduled_workflows_list.py | 41 +++++++----- .../scheduled_workflows_order_by_field.py | 6 +- hatchet_sdk/contracts/workflows_pb2.py | 64 +++++++++---------- hatchet_sdk/contracts/workflows_pb2.pyi | 6 +- 8 files changed, 171 insertions(+), 119 deletions(-) diff --git a/hatchet_sdk/clients/rest/models/cron_workflows.py b/hatchet_sdk/clients/rest/models/cron_workflows.py index cbc551d6..cb407070 100644 --- a/hatchet_sdk/clients/rest/models/cron_workflows.py +++ b/hatchet_sdk/clients/rest/models/cron_workflows.py @@ -13,20 +13,23 @@ from __future__ import annotations + +import json import pprint import re # noqa: F401 -import json +from typing import Any, ClassVar, Dict, List, Optional, Set from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta -from typing import Optional, Set from typing_extensions import Self +from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta + + class CronWorkflows(BaseModel): """ CronWorkflows - """ # noqa: E501 + """ # noqa: E501 + metadata: APIResourceMeta tenant_id: StrictStr = Field(alias="tenantId") workflow_version_id: StrictStr = Field(alias="workflowVersionId") @@ -34,8 +37,19 @@ class CronWorkflows(BaseModel): workflow_name: StrictStr = Field(alias="workflowName") cron: StrictStr input: Optional[Dict[str, Any]] = None - additional_metadata: Optional[Dict[str, Any]] = Field(default=None, alias="additionalMetadata") - __properties: ClassVar[List[str]] = ["metadata", "tenantId", "workflowVersionId", "workflowId", "workflowName", "cron", "input", "additionalMetadata"] + additional_metadata: Optional[Dict[str, Any]] = Field( + default=None, alias="additionalMetadata" + ) + __properties: ClassVar[List[str]] = [ + "metadata", + "tenantId", + "workflowVersionId", + "workflowId", + "workflowName", + "cron", + "input", + "additionalMetadata", + ] model_config = ConfigDict( populate_by_name=True, @@ -43,7 +57,6 @@ class CronWorkflows(BaseModel): protected_namespaces=(), ) - def to_str(self) -> str: """Returns the string representation of the model using alias""" return pprint.pformat(self.model_dump(by_alias=True)) @@ -68,8 +81,7 @@ def to_dict(self) -> Dict[str, Any]: were set at model initialization. Other fields with value `None` are ignored. """ - excluded_fields: Set[str] = set([ - ]) + excluded_fields: Set[str] = set([]) _dict = self.model_dump( by_alias=True, @@ -78,7 +90,7 @@ def to_dict(self) -> Dict[str, Any]: ) # override the default output from pydantic by calling `to_dict()` of metadata if self.metadata: - _dict['metadata'] = self.metadata.to_dict() + _dict["metadata"] = self.metadata.to_dict() return _dict @classmethod @@ -90,16 +102,20 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: if not isinstance(obj, dict): return cls.model_validate(obj) - _obj = cls.model_validate({ - "metadata": APIResourceMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, - "tenantId": obj.get("tenantId"), - "workflowVersionId": obj.get("workflowVersionId"), - "workflowId": obj.get("workflowId"), - "workflowName": obj.get("workflowName"), - "cron": obj.get("cron"), - "input": obj.get("input"), - "additionalMetadata": obj.get("additionalMetadata") - }) + _obj = cls.model_validate( + { + "metadata": ( + APIResourceMeta.from_dict(obj["metadata"]) + if obj.get("metadata") is not None + else None + ), + "tenantId": obj.get("tenantId"), + "workflowVersionId": obj.get("workflowVersionId"), + "workflowId": obj.get("workflowId"), + "workflowName": obj.get("workflowName"), + "cron": obj.get("cron"), + "input": obj.get("input"), + "additionalMetadata": obj.get("additionalMetadata"), + } + ) return _obj - - diff --git a/hatchet_sdk/clients/rest/models/cron_workflows_list.py b/hatchet_sdk/clients/rest/models/cron_workflows_list.py index 312030d6..5d794b79 100644 --- a/hatchet_sdk/clients/rest/models/cron_workflows_list.py +++ b/hatchet_sdk/clients/rest/models/cron_workflows_list.py @@ -13,21 +13,24 @@ from __future__ import annotations + +import json import pprint import re # noqa: F401 -import json +from typing import Any, ClassVar, Dict, List, Optional, Set from pydantic import BaseModel, ConfigDict -from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Self + from hatchet_sdk.clients.rest.models.cron_workflows import CronWorkflows from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse -from typing import Optional, Set -from typing_extensions import Self + class CronWorkflowsList(BaseModel): """ CronWorkflowsList - """ # noqa: E501 + """ # noqa: E501 + rows: Optional[List[CronWorkflows]] = None pagination: Optional[PaginationResponse] = None __properties: ClassVar[List[str]] = ["rows", "pagination"] @@ -38,7 +41,6 @@ class CronWorkflowsList(BaseModel): protected_namespaces=(), ) - def to_str(self) -> str: """Returns the string representation of the model using alias""" return pprint.pformat(self.model_dump(by_alias=True)) @@ -63,8 +65,7 @@ def to_dict(self) -> Dict[str, Any]: were set at model initialization. Other fields with value `None` are ignored. """ - excluded_fields: Set[str] = set([ - ]) + excluded_fields: Set[str] = set([]) _dict = self.model_dump( by_alias=True, @@ -77,10 +78,10 @@ def to_dict(self) -> Dict[str, Any]: for _item in self.rows: if _item: _items.append(_item.to_dict()) - _dict['rows'] = _items + _dict["rows"] = _items # override the default output from pydantic by calling `to_dict()` of pagination if self.pagination: - _dict['pagination'] = self.pagination.to_dict() + _dict["pagination"] = self.pagination.to_dict() return _dict @classmethod @@ -92,10 +93,18 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: if not isinstance(obj, dict): return cls.model_validate(obj) - _obj = cls.model_validate({ - "rows": [CronWorkflows.from_dict(_item) for _item in obj["rows"]] if obj.get("rows") is not None else None, - "pagination": PaginationResponse.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None - }) + _obj = cls.model_validate( + { + "rows": ( + [CronWorkflows.from_dict(_item) for _item in obj["rows"]] + if obj.get("rows") is not None + else None + ), + "pagination": ( + PaginationResponse.from_dict(obj["pagination"]) + if obj.get("pagination") is not None + else None + ), + } + ) return _obj - - diff --git a/hatchet_sdk/clients/rest/models/cron_workflows_order_by_field.py b/hatchet_sdk/clients/rest/models/cron_workflows_order_by_field.py index 41d6567f..9b550ddb 100644 --- a/hatchet_sdk/clients/rest/models/cron_workflows_order_by_field.py +++ b/hatchet_sdk/clients/rest/models/cron_workflows_order_by_field.py @@ -13,8 +13,10 @@ from __future__ import annotations + import json from enum import Enum + from typing_extensions import Self @@ -26,11 +28,9 @@ class CronWorkflowsOrderByField(str, Enum): """ allowed enum values """ - CREATEDAT = 'createdAt' + CREATEDAT = "createdAt" @classmethod def from_json(cls, json_str: str) -> Self: """Create an instance of CronWorkflowsOrderByField from a JSON string""" return cls(json.loads(json_str)) - - diff --git a/hatchet_sdk/clients/rest/models/scheduled_workflows.py b/hatchet_sdk/clients/rest/models/scheduled_workflows.py index a92a252f..786df24c 100644 --- a/hatchet_sdk/clients/rest/models/scheduled_workflows.py +++ b/hatchet_sdk/clients/rest/models/scheduled_workflows.py @@ -13,21 +13,24 @@ from __future__ import annotations + +import json import pprint import re # noqa: F401 -import json - from datetime import datetime +from typing import Any, ClassVar, Dict, List, Optional, Set + from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta -from typing import Optional, Set from typing_extensions import Self +from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta + + class ScheduledWorkflows(BaseModel): """ ScheduledWorkflows - """ # noqa: E501 + """ # noqa: E501 + metadata: APIResourceMeta tenant_id: StrictStr = Field(alias="tenantId") workflow_version_id: StrictStr = Field(alias="workflowVersionId") @@ -35,8 +38,19 @@ class ScheduledWorkflows(BaseModel): workflow_name: StrictStr = Field(alias="workflowName") trigger_at: datetime = Field(alias="triggerAt") input: Optional[Dict[str, Any]] = None - additional_metadata: Optional[Dict[str, Any]] = Field(default=None, alias="additionalMetadata") - __properties: ClassVar[List[str]] = ["metadata", "tenantId", "workflowVersionId", "workflowId", "workflowName", "triggerAt", "input", "additionalMetadata"] + additional_metadata: Optional[Dict[str, Any]] = Field( + default=None, alias="additionalMetadata" + ) + __properties: ClassVar[List[str]] = [ + "metadata", + "tenantId", + "workflowVersionId", + "workflowId", + "workflowName", + "triggerAt", + "input", + "additionalMetadata", + ] model_config = ConfigDict( populate_by_name=True, @@ -44,7 +58,6 @@ class ScheduledWorkflows(BaseModel): protected_namespaces=(), ) - def to_str(self) -> str: """Returns the string representation of the model using alias""" return pprint.pformat(self.model_dump(by_alias=True)) @@ -69,8 +82,7 @@ def to_dict(self) -> Dict[str, Any]: were set at model initialization. Other fields with value `None` are ignored. """ - excluded_fields: Set[str] = set([ - ]) + excluded_fields: Set[str] = set([]) _dict = self.model_dump( by_alias=True, @@ -79,7 +91,7 @@ def to_dict(self) -> Dict[str, Any]: ) # override the default output from pydantic by calling `to_dict()` of metadata if self.metadata: - _dict['metadata'] = self.metadata.to_dict() + _dict["metadata"] = self.metadata.to_dict() return _dict @classmethod @@ -91,16 +103,20 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: if not isinstance(obj, dict): return cls.model_validate(obj) - _obj = cls.model_validate({ - "metadata": APIResourceMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, - "tenantId": obj.get("tenantId"), - "workflowVersionId": obj.get("workflowVersionId"), - "workflowId": obj.get("workflowId"), - "workflowName": obj.get("workflowName"), - "triggerAt": obj.get("triggerAt"), - "input": obj.get("input"), - "additionalMetadata": obj.get("additionalMetadata") - }) + _obj = cls.model_validate( + { + "metadata": ( + APIResourceMeta.from_dict(obj["metadata"]) + if obj.get("metadata") is not None + else None + ), + "tenantId": obj.get("tenantId"), + "workflowVersionId": obj.get("workflowVersionId"), + "workflowId": obj.get("workflowId"), + "workflowName": obj.get("workflowName"), + "triggerAt": obj.get("triggerAt"), + "input": obj.get("input"), + "additionalMetadata": obj.get("additionalMetadata"), + } + ) return _obj - - diff --git a/hatchet_sdk/clients/rest/models/scheduled_workflows_list.py b/hatchet_sdk/clients/rest/models/scheduled_workflows_list.py index 7702a435..0594756f 100644 --- a/hatchet_sdk/clients/rest/models/scheduled_workflows_list.py +++ b/hatchet_sdk/clients/rest/models/scheduled_workflows_list.py @@ -13,21 +13,24 @@ from __future__ import annotations + +import json import pprint import re # noqa: F401 -import json +from typing import Any, ClassVar, Dict, List, Optional, Set from pydantic import BaseModel, ConfigDict -from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Self + from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows -from typing import Optional, Set -from typing_extensions import Self + class ScheduledWorkflowsList(BaseModel): """ ScheduledWorkflowsList - """ # noqa: E501 + """ # noqa: E501 + rows: Optional[List[ScheduledWorkflows]] = None pagination: Optional[PaginationResponse] = None __properties: ClassVar[List[str]] = ["rows", "pagination"] @@ -38,7 +41,6 @@ class ScheduledWorkflowsList(BaseModel): protected_namespaces=(), ) - def to_str(self) -> str: """Returns the string representation of the model using alias""" return pprint.pformat(self.model_dump(by_alias=True)) @@ -63,8 +65,7 @@ def to_dict(self) -> Dict[str, Any]: were set at model initialization. Other fields with value `None` are ignored. """ - excluded_fields: Set[str] = set([ - ]) + excluded_fields: Set[str] = set([]) _dict = self.model_dump( by_alias=True, @@ -77,10 +78,10 @@ def to_dict(self) -> Dict[str, Any]: for _item in self.rows: if _item: _items.append(_item.to_dict()) - _dict['rows'] = _items + _dict["rows"] = _items # override the default output from pydantic by calling `to_dict()` of pagination if self.pagination: - _dict['pagination'] = self.pagination.to_dict() + _dict["pagination"] = self.pagination.to_dict() return _dict @classmethod @@ -92,10 +93,18 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: if not isinstance(obj, dict): return cls.model_validate(obj) - _obj = cls.model_validate({ - "rows": [ScheduledWorkflows.from_dict(_item) for _item in obj["rows"]] if obj.get("rows") is not None else None, - "pagination": PaginationResponse.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None - }) + _obj = cls.model_validate( + { + "rows": ( + [ScheduledWorkflows.from_dict(_item) for _item in obj["rows"]] + if obj.get("rows") is not None + else None + ), + "pagination": ( + PaginationResponse.from_dict(obj["pagination"]) + if obj.get("pagination") is not None + else None + ), + } + ) return _obj - - diff --git a/hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py b/hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py index 1b99b85b..bf880006 100644 --- a/hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py +++ b/hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py @@ -13,8 +13,10 @@ from __future__ import annotations + import json from enum import Enum + from typing_extensions import Self @@ -26,11 +28,9 @@ class ScheduledWorkflowsOrderByField(str, Enum): """ allowed enum values """ - TRIGGERAT = 'triggerAt' + TRIGGERAT = "triggerAt" @classmethod def from_json(cls, json_str: str) -> Self: """Create an instance of ScheduledWorkflowsOrderByField from a JSON string""" return cls(json.loads(json_str)) - - diff --git a/hatchet_sdk/contracts/workflows_pb2.py b/hatchet_sdk/contracts/workflows_pb2.py index 113609bf..833a901c 100644 --- a/hatchet_sdk/contracts/workflows_pb2.py +++ b/hatchet_sdk/contracts/workflows_pb2.py @@ -15,7 +15,7 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fworkflows.proto\x1a\x1fgoogle/protobuf/timestamp.proto\">\n\x12PutWorkflowRequest\x12(\n\x04opts\x18\x01 \x01(\x0b\x32\x1a.CreateWorkflowVersionOpts\"\xbf\x04\n\x19\x43reateWorkflowVersionOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x16\n\x0e\x65vent_triggers\x18\x04 \x03(\t\x12\x15\n\rcron_triggers\x18\x05 \x03(\t\x12\x36\n\x12scheduled_triggers\x18\x06 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12$\n\x04jobs\x18\x07 \x03(\x0b\x32\x16.CreateWorkflowJobOpts\x12-\n\x0b\x63oncurrency\x18\x08 \x01(\x0b\x32\x18.WorkflowConcurrencyOpts\x12\x1d\n\x10schedule_timeout\x18\t \x01(\tH\x00\x88\x01\x01\x12\x17\n\ncron_input\x18\n \x01(\tH\x01\x88\x01\x01\x12\x33\n\x0eon_failure_job\x18\x0b \x01(\x0b\x32\x16.CreateWorkflowJobOptsH\x02\x88\x01\x01\x12$\n\x06sticky\x18\x0c \x01(\x0e\x32\x0f.StickyStrategyH\x03\x88\x01\x01\x12 \n\x04kind\x18\r \x01(\x0e\x32\r.WorkflowKindH\x04\x88\x01\x01\x12\x1d\n\x10\x64\x65\x66\x61ult_priority\x18\x0e \x01(\x05H\x05\x88\x01\x01\x42\x13\n\x11_schedule_timeoutB\r\n\x0b_cron_inputB\x11\n\x0f_on_failure_jobB\t\n\x07_stickyB\x07\n\x05_kindB\x13\n\x11_default_priority\"\xd0\x01\n\x17WorkflowConcurrencyOpts\x12\x13\n\x06\x61\x63tion\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08max_runs\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x36\n\x0elimit_strategy\x18\x03 \x01(\x0e\x32\x19.ConcurrencyLimitStrategyH\x02\x88\x01\x01\x12\x17\n\nexpression\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\t\n\x07_actionB\x0b\n\t_max_runsB\x11\n\x0f_limit_strategyB\r\n\x0b_expression\"h\n\x15\x43reateWorkflowJobOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12&\n\x05steps\x18\x04 \x03(\x0b\x32\x17.CreateWorkflowStepOptsJ\x04\x08\x03\x10\x04\"\xe1\x01\n\x13\x44\x65siredWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x15\n\x08required\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12/\n\ncomparator\x18\x04 \x01(\x0e\x32\x16.WorkerLabelComparatorH\x03\x88\x01\x01\x12\x13\n\x06weight\x18\x05 \x01(\x05H\x04\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValueB\x0b\n\t_requiredB\r\n\x0b_comparatorB\t\n\x07_weight\"\xcb\x02\n\x16\x43reateWorkflowStepOpts\x12\x13\n\x0breadable_id\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\t\x12\x0e\n\x06inputs\x18\x04 \x01(\t\x12\x0f\n\x07parents\x18\x05 \x03(\t\x12\x11\n\tuser_data\x18\x06 \x01(\t\x12\x0f\n\x07retries\x18\x07 \x01(\x05\x12)\n\x0brate_limits\x18\x08 \x03(\x0b\x32\x14.CreateStepRateLimit\x12@\n\rworker_labels\x18\t \x03(\x0b\x32).CreateWorkflowStepOpts.WorkerLabelsEntry\x1aI\n\x11WorkerLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.DesiredWorkerLabels:\x02\x38\x01\"\xfa\x01\n\x13\x43reateStepRateLimit\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x12\n\x05units\x18\x02 \x01(\x05H\x00\x88\x01\x01\x12\x15\n\x08key_expr\x18\x03 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nunits_expr\x18\x04 \x01(\tH\x02\x88\x01\x01\x12\x1e\n\x11limit_values_expr\x18\x05 \x01(\tH\x03\x88\x01\x01\x12)\n\x08\x64uration\x18\x06 \x01(\x0e\x32\x12.RateLimitDurationH\x04\x88\x01\x01\x42\x08\n\x06_unitsB\x0b\n\t_key_exprB\r\n\x0b_units_exprB\x14\n\x12_limit_values_exprB\x0b\n\t_duration\"\x16\n\x14ListWorkflowsRequest\"\x93\x02\n\x17ScheduleWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tschedules\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05input\x18\x03 \x01(\t\x12\x16\n\tparent_id\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x06 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x07 \x01(\tH\x03\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_key\"\xb2\x01\n\x0fWorkflowVersion\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07version\x18\x05 \x01(\t\x12\r\n\x05order\x18\x06 \x01(\x05\x12\x13\n\x0bworkflow_id\x18\x07 \x01(\t\"?\n\x17WorkflowTriggerEventRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x11\n\tevent_key\x18\x02 \x01(\t\"9\n\x16WorkflowTriggerCronRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x0c\n\x04\x63ron\x18\x02 \x01(\t\"H\n\x1a\x42ulkTriggerWorkflowRequest\x12*\n\tworkflows\x18\x01 \x03(\x0b\x32\x17.TriggerWorkflowRequest\"7\n\x1b\x42ulkTriggerWorkflowResponse\x12\x18\n\x10workflow_run_ids\x18\x01 \x03(\t\"\xf7\x02\n\x16TriggerWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x02 \x01(\t\x12\x16\n\tparent_id\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x04 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x05 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x06 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x07 \x01(\tH\x04\x88\x01\x01\x12\x1e\n\x11\x64\x65sired_worker_id\x18\x08 \x01(\tH\x05\x88\x01\x01\x12\x15\n\x08priority\x18\t \x01(\x05H\x06\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadataB\x14\n\x12_desired_worker_idB\x0b\n\t_priority\"2\n\x17TriggerWorkflowResponse\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"W\n\x13PutRateLimitRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05limit\x18\x02 \x01(\x05\x12$\n\x08\x64uration\x18\x03 \x01(\x0e\x32\x12.RateLimitDuration\"\x16\n\x14PutRateLimitResponse*$\n\x0eStickyStrategy\x12\x08\n\x04SOFT\x10\x00\x12\x08\n\x04HARD\x10\x01*2\n\x0cWorkflowKind\x12\x0c\n\x08\x46UNCTION\x10\x00\x12\x0b\n\x07\x44URABLE\x10\x01\x12\x07\n\x03\x44\x41G\x10\x02*l\n\x18\x43oncurrencyLimitStrategy\x12\x16\n\x12\x43\x41NCEL_IN_PROGRESS\x10\x00\x12\x0f\n\x0b\x44ROP_NEWEST\x10\x01\x12\x10\n\x0cQUEUE_NEWEST\x10\x02\x12\x15\n\x11GROUP_ROUND_ROBIN\x10\x03*\x85\x01\n\x15WorkerLabelComparator\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x10\n\x0cGREATER_THAN\x10\x02\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x05*]\n\x11RateLimitDuration\x12\n\n\x06SECOND\x10\x00\x12\n\n\x06MINUTE\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\x07\n\x03\x44\x41Y\x10\x03\x12\x08\n\x04WEEK\x10\x04\x12\t\n\x05MONTH\x10\x05\x12\x08\n\x04YEAR\x10\x06\x32\xdc\x02\n\x0fWorkflowService\x12\x34\n\x0bPutWorkflow\x12\x13.PutWorkflowRequest\x1a\x10.WorkflowVersion\x12>\n\x10ScheduleWorkflow\x12\x18.ScheduleWorkflowRequest\x1a\x10.WorkflowVersion\x12\x44\n\x0fTriggerWorkflow\x12\x17.TriggerWorkflowRequest\x1a\x18.TriggerWorkflowResponse\x12P\n\x13\x42ulkTriggerWorkflow\x12\x1b.BulkTriggerWorkflowRequest\x1a\x1c.BulkTriggerWorkflowResponse\x12;\n\x0cPutRateLimit\x12\x14.PutRateLimitRequest\x1a\x15.PutRateLimitResponseBBZ@github.com/hatchet-dev/hatchet/internal/services/admin/contractsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fworkflows.proto\x1a\x1fgoogle/protobuf/timestamp.proto\">\n\x12PutWorkflowRequest\x12(\n\x04opts\x18\x01 \x01(\x0b\x32\x1a.CreateWorkflowVersionOpts\"\xbf\x04\n\x19\x43reateWorkflowVersionOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x16\n\x0e\x65vent_triggers\x18\x04 \x03(\t\x12\x15\n\rcron_triggers\x18\x05 \x03(\t\x12\x36\n\x12scheduled_triggers\x18\x06 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12$\n\x04jobs\x18\x07 \x03(\x0b\x32\x16.CreateWorkflowJobOpts\x12-\n\x0b\x63oncurrency\x18\x08 \x01(\x0b\x32\x18.WorkflowConcurrencyOpts\x12\x1d\n\x10schedule_timeout\x18\t \x01(\tH\x00\x88\x01\x01\x12\x17\n\ncron_input\x18\n \x01(\tH\x01\x88\x01\x01\x12\x33\n\x0eon_failure_job\x18\x0b \x01(\x0b\x32\x16.CreateWorkflowJobOptsH\x02\x88\x01\x01\x12$\n\x06sticky\x18\x0c \x01(\x0e\x32\x0f.StickyStrategyH\x03\x88\x01\x01\x12 \n\x04kind\x18\r \x01(\x0e\x32\r.WorkflowKindH\x04\x88\x01\x01\x12\x1d\n\x10\x64\x65\x66\x61ult_priority\x18\x0e \x01(\x05H\x05\x88\x01\x01\x42\x13\n\x11_schedule_timeoutB\r\n\x0b_cron_inputB\x11\n\x0f_on_failure_jobB\t\n\x07_stickyB\x07\n\x05_kindB\x13\n\x11_default_priority\"\xd0\x01\n\x17WorkflowConcurrencyOpts\x12\x13\n\x06\x61\x63tion\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08max_runs\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x36\n\x0elimit_strategy\x18\x03 \x01(\x0e\x32\x19.ConcurrencyLimitStrategyH\x02\x88\x01\x01\x12\x17\n\nexpression\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\t\n\x07_actionB\x0b\n\t_max_runsB\x11\n\x0f_limit_strategyB\r\n\x0b_expression\"h\n\x15\x43reateWorkflowJobOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12&\n\x05steps\x18\x04 \x03(\x0b\x32\x17.CreateWorkflowStepOptsJ\x04\x08\x03\x10\x04\"\xe1\x01\n\x13\x44\x65siredWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x15\n\x08required\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12/\n\ncomparator\x18\x04 \x01(\x0e\x32\x16.WorkerLabelComparatorH\x03\x88\x01\x01\x12\x13\n\x06weight\x18\x05 \x01(\x05H\x04\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValueB\x0b\n\t_requiredB\r\n\x0b_comparatorB\t\n\x07_weight\"\xcb\x02\n\x16\x43reateWorkflowStepOpts\x12\x13\n\x0breadable_id\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\t\x12\x0e\n\x06inputs\x18\x04 \x01(\t\x12\x0f\n\x07parents\x18\x05 \x03(\t\x12\x11\n\tuser_data\x18\x06 \x01(\t\x12\x0f\n\x07retries\x18\x07 \x01(\x05\x12)\n\x0brate_limits\x18\x08 \x03(\x0b\x32\x14.CreateStepRateLimit\x12@\n\rworker_labels\x18\t \x03(\x0b\x32).CreateWorkflowStepOpts.WorkerLabelsEntry\x1aI\n\x11WorkerLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.DesiredWorkerLabels:\x02\x38\x01\"\xfa\x01\n\x13\x43reateStepRateLimit\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x12\n\x05units\x18\x02 \x01(\x05H\x00\x88\x01\x01\x12\x15\n\x08key_expr\x18\x03 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nunits_expr\x18\x04 \x01(\tH\x02\x88\x01\x01\x12\x1e\n\x11limit_values_expr\x18\x05 \x01(\tH\x03\x88\x01\x01\x12)\n\x08\x64uration\x18\x06 \x01(\x0e\x32\x12.RateLimitDurationH\x04\x88\x01\x01\x42\x08\n\x06_unitsB\x0b\n\t_key_exprB\r\n\x0b_units_exprB\x14\n\x12_limit_values_exprB\x0b\n\t_duration\"\x16\n\x14ListWorkflowsRequest\"\xcd\x02\n\x17ScheduleWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tschedules\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05input\x18\x03 \x01(\t\x12\x16\n\tparent_id\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x06 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x07 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x08 \x01(\tH\x04\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadata\"\xb2\x01\n\x0fWorkflowVersion\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07version\x18\x05 \x01(\t\x12\r\n\x05order\x18\x06 \x01(\x05\x12\x13\n\x0bworkflow_id\x18\x07 \x01(\t\"?\n\x17WorkflowTriggerEventRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x11\n\tevent_key\x18\x02 \x01(\t\"9\n\x16WorkflowTriggerCronRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x0c\n\x04\x63ron\x18\x02 \x01(\t\"H\n\x1a\x42ulkTriggerWorkflowRequest\x12*\n\tworkflows\x18\x01 \x03(\x0b\x32\x17.TriggerWorkflowRequest\"7\n\x1b\x42ulkTriggerWorkflowResponse\x12\x18\n\x10workflow_run_ids\x18\x01 \x03(\t\"\xf7\x02\n\x16TriggerWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x02 \x01(\t\x12\x16\n\tparent_id\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x04 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x05 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x06 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x07 \x01(\tH\x04\x88\x01\x01\x12\x1e\n\x11\x64\x65sired_worker_id\x18\x08 \x01(\tH\x05\x88\x01\x01\x12\x15\n\x08priority\x18\t \x01(\x05H\x06\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadataB\x14\n\x12_desired_worker_idB\x0b\n\t_priority\"2\n\x17TriggerWorkflowResponse\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"W\n\x13PutRateLimitRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05limit\x18\x02 \x01(\x05\x12$\n\x08\x64uration\x18\x03 \x01(\x0e\x32\x12.RateLimitDuration\"\x16\n\x14PutRateLimitResponse*$\n\x0eStickyStrategy\x12\x08\n\x04SOFT\x10\x00\x12\x08\n\x04HARD\x10\x01*2\n\x0cWorkflowKind\x12\x0c\n\x08\x46UNCTION\x10\x00\x12\x0b\n\x07\x44URABLE\x10\x01\x12\x07\n\x03\x44\x41G\x10\x02*l\n\x18\x43oncurrencyLimitStrategy\x12\x16\n\x12\x43\x41NCEL_IN_PROGRESS\x10\x00\x12\x0f\n\x0b\x44ROP_NEWEST\x10\x01\x12\x10\n\x0cQUEUE_NEWEST\x10\x02\x12\x15\n\x11GROUP_ROUND_ROBIN\x10\x03*\x85\x01\n\x15WorkerLabelComparator\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x10\n\x0cGREATER_THAN\x10\x02\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x05*]\n\x11RateLimitDuration\x12\n\n\x06SECOND\x10\x00\x12\n\n\x06MINUTE\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\x07\n\x03\x44\x41Y\x10\x03\x12\x08\n\x04WEEK\x10\x04\x12\t\n\x05MONTH\x10\x05\x12\x08\n\x04YEAR\x10\x06\x32\xdc\x02\n\x0fWorkflowService\x12\x34\n\x0bPutWorkflow\x12\x13.PutWorkflowRequest\x1a\x10.WorkflowVersion\x12>\n\x10ScheduleWorkflow\x12\x18.ScheduleWorkflowRequest\x1a\x10.WorkflowVersion\x12\x44\n\x0fTriggerWorkflow\x12\x17.TriggerWorkflowRequest\x1a\x18.TriggerWorkflowResponse\x12P\n\x13\x42ulkTriggerWorkflow\x12\x1b.BulkTriggerWorkflowRequest\x1a\x1c.BulkTriggerWorkflowResponse\x12;\n\x0cPutRateLimit\x12\x14.PutRateLimitRequest\x1a\x15.PutRateLimitResponseBBZ@github.com/hatchet-dev/hatchet/internal/services/admin/contractsb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -25,16 +25,16 @@ _globals['DESCRIPTOR']._serialized_options = b'Z@github.com/hatchet-dev/hatchet/internal/services/admin/contracts' _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._options = None _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_options = b'8\001' - _globals['_STICKYSTRATEGY']._serialized_start=3107 - _globals['_STICKYSTRATEGY']._serialized_end=3143 - _globals['_WORKFLOWKIND']._serialized_start=3145 - _globals['_WORKFLOWKIND']._serialized_end=3195 - _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_start=3197 - _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_end=3305 - _globals['_WORKERLABELCOMPARATOR']._serialized_start=3308 - _globals['_WORKERLABELCOMPARATOR']._serialized_end=3441 - _globals['_RATELIMITDURATION']._serialized_start=3443 - _globals['_RATELIMITDURATION']._serialized_end=3536 + _globals['_STICKYSTRATEGY']._serialized_start=3165 + _globals['_STICKYSTRATEGY']._serialized_end=3201 + _globals['_WORKFLOWKIND']._serialized_start=3203 + _globals['_WORKFLOWKIND']._serialized_end=3253 + _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_start=3255 + _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_end=3363 + _globals['_WORKERLABELCOMPARATOR']._serialized_start=3366 + _globals['_WORKERLABELCOMPARATOR']._serialized_end=3499 + _globals['_RATELIMITDURATION']._serialized_start=3501 + _globals['_RATELIMITDURATION']._serialized_end=3594 _globals['_PUTWORKFLOWREQUEST']._serialized_start=52 _globals['_PUTWORKFLOWREQUEST']._serialized_end=114 _globals['_CREATEWORKFLOWVERSIONOPTS']._serialized_start=117 @@ -54,25 +54,25 @@ _globals['_LISTWORKFLOWSREQUEST']._serialized_start=1826 _globals['_LISTWORKFLOWSREQUEST']._serialized_end=1848 _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_start=1851 - _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_end=2126 - _globals['_WORKFLOWVERSION']._serialized_start=2129 - _globals['_WORKFLOWVERSION']._serialized_end=2307 - _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_start=2309 - _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_end=2372 - _globals['_WORKFLOWTRIGGERCRONREF']._serialized_start=2374 - _globals['_WORKFLOWTRIGGERCRONREF']._serialized_end=2431 - _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_start=2433 - _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_end=2505 - _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_start=2507 - _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_end=2562 - _globals['_TRIGGERWORKFLOWREQUEST']._serialized_start=2565 - _globals['_TRIGGERWORKFLOWREQUEST']._serialized_end=2940 - _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_start=2942 - _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_end=2992 - _globals['_PUTRATELIMITREQUEST']._serialized_start=2994 - _globals['_PUTRATELIMITREQUEST']._serialized_end=3081 - _globals['_PUTRATELIMITRESPONSE']._serialized_start=3083 - _globals['_PUTRATELIMITRESPONSE']._serialized_end=3105 - _globals['_WORKFLOWSERVICE']._serialized_start=3539 - _globals['_WORKFLOWSERVICE']._serialized_end=3887 + _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_end=2184 + _globals['_WORKFLOWVERSION']._serialized_start=2187 + _globals['_WORKFLOWVERSION']._serialized_end=2365 + _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_start=2367 + _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_end=2430 + _globals['_WORKFLOWTRIGGERCRONREF']._serialized_start=2432 + _globals['_WORKFLOWTRIGGERCRONREF']._serialized_end=2489 + _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_start=2491 + _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_end=2563 + _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_start=2565 + _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_end=2620 + _globals['_TRIGGERWORKFLOWREQUEST']._serialized_start=2623 + _globals['_TRIGGERWORKFLOWREQUEST']._serialized_end=2998 + _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_start=3000 + _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_end=3050 + _globals['_PUTRATELIMITREQUEST']._serialized_start=3052 + _globals['_PUTRATELIMITREQUEST']._serialized_end=3139 + _globals['_PUTRATELIMITRESPONSE']._serialized_start=3141 + _globals['_PUTRATELIMITRESPONSE']._serialized_end=3163 + _globals['_WORKFLOWSERVICE']._serialized_start=3597 + _globals['_WORKFLOWSERVICE']._serialized_end=3945 # @@protoc_insertion_point(module_scope) diff --git a/hatchet_sdk/contracts/workflows_pb2.pyi b/hatchet_sdk/contracts/workflows_pb2.pyi index 219c12bf..c395b89b 100644 --- a/hatchet_sdk/contracts/workflows_pb2.pyi +++ b/hatchet_sdk/contracts/workflows_pb2.pyi @@ -190,7 +190,7 @@ class ListWorkflowsRequest(_message.Message): def __init__(self) -> None: ... class ScheduleWorkflowRequest(_message.Message): - __slots__ = ("name", "schedules", "input", "parent_id", "parent_step_run_id", "child_index", "child_key") + __slots__ = ("name", "schedules", "input", "parent_id", "parent_step_run_id", "child_index", "child_key", "additional_metadata") NAME_FIELD_NUMBER: _ClassVar[int] SCHEDULES_FIELD_NUMBER: _ClassVar[int] INPUT_FIELD_NUMBER: _ClassVar[int] @@ -198,6 +198,7 @@ class ScheduleWorkflowRequest(_message.Message): PARENT_STEP_RUN_ID_FIELD_NUMBER: _ClassVar[int] CHILD_INDEX_FIELD_NUMBER: _ClassVar[int] CHILD_KEY_FIELD_NUMBER: _ClassVar[int] + ADDITIONAL_METADATA_FIELD_NUMBER: _ClassVar[int] name: str schedules: _containers.RepeatedCompositeFieldContainer[_timestamp_pb2.Timestamp] input: str @@ -205,7 +206,8 @@ class ScheduleWorkflowRequest(_message.Message): parent_step_run_id: str child_index: int child_key: str - def __init__(self, name: _Optional[str] = ..., schedules: _Optional[_Iterable[_Union[_timestamp_pb2.Timestamp, _Mapping]]] = ..., input: _Optional[str] = ..., parent_id: _Optional[str] = ..., parent_step_run_id: _Optional[str] = ..., child_index: _Optional[int] = ..., child_key: _Optional[str] = ...) -> None: ... + additional_metadata: str + def __init__(self, name: _Optional[str] = ..., schedules: _Optional[_Iterable[_Union[_timestamp_pb2.Timestamp, _Mapping]]] = ..., input: _Optional[str] = ..., parent_id: _Optional[str] = ..., parent_step_run_id: _Optional[str] = ..., child_index: _Optional[int] = ..., child_key: _Optional[str] = ..., additional_metadata: _Optional[str] = ...) -> None: ... class WorkflowVersion(_message.Message): __slots__ = ("id", "created_at", "updated_at", "version", "order", "workflow_id") From 6323f60a7bfe305dee076f41399f271c04a49cf7 Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Thu, 24 Oct 2024 12:51:55 -0400 Subject: [PATCH 3/7] fix: trigger --- examples/delayed/event.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/examples/delayed/event.py b/examples/delayed/event.py index cdc16ac6..34d63045 100644 --- a/examples/delayed/event.py +++ b/examples/delayed/event.py @@ -1,9 +1,16 @@ +from datetime import datetime, timedelta + from dotenv import load_dotenv -from hatchet_sdk import new_client +from hatchet_sdk import Hatchet load_dotenv() -client = new_client() +hatchet = Hatchet() -client.event.push("printer:schedule", {"message": "test"}) +hatchet.admin.schedule_workflow( + "PrintPrinter", + [datetime.now() + timedelta(seconds=15)], + {"message": "test"}, + options={"additional_metadata": {"triggeredBy": "script"}}, +) From 12bbb21f237485b0689ff91a0ab69e0030e4987b Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Thu, 24 Oct 2024 12:52:07 -0400 Subject: [PATCH 4/7] feat: expose addl meta on schedule --- hatchet_sdk/clients/admin.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/hatchet_sdk/clients/admin.py b/hatchet_sdk/clients/admin.py index 18141bc9..14424500 100644 --- a/hatchet_sdk/clients/admin.py +++ b/hatchet_sdk/clients/admin.py @@ -39,6 +39,7 @@ class ScheduleTriggerWorkflowOptions(TypedDict): parent_step_run_id: Optional[str] child_index: Optional[int] child_key: Optional[str] + additional_metadata: Dict[str, str] | None = None namespace: Optional[str] @@ -145,6 +146,11 @@ def _prepare_schedule_workflow_request( "Invalid schedule type. Must be datetime or timestamp_pb2.Timestamp." ) + if options is not None and "additional_metadata" in options: + options["additional_metadata"] = json.dumps( + options["additional_metadata"] + ).encode("utf-8") + return ScheduleWorkflowRequest( name=name, schedules=timestamp_schedules, From 7fe2c0a9f109af3bfff5ab569c850d42c044857f Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Fri, 25 Oct 2024 08:44:56 -0400 Subject: [PATCH 5/7] chore: generate --- hatchet | 2 +- hatchet_sdk/clients/rest/__init__.py | 1 + hatchet_sdk/clients/rest/api/workflow_api.py | 776 ++++++++++++++++-- hatchet_sdk/clients/rest/models/__init__.py | 1 + .../rest/models/scheduled_run_status.py | 42 + .../rest/models/scheduled_workflows.py | 23 +- .../scheduled_workflows_order_by_field.py | 1 + hatchet_sdk/contracts/workflows_pb2.py | 64 +- hatchet_sdk/contracts/workflows_pb2.pyi | 14 +- 9 files changed, 842 insertions(+), 82 deletions(-) create mode 100644 hatchet_sdk/clients/rest/models/scheduled_run_status.py diff --git a/hatchet b/hatchet index dd5bc904..37b71f95 160000 --- a/hatchet +++ b/hatchet @@ -1 +1 @@ -Subproject commit dd5bc9049759fdb44aef08256b5d4142325a70fe +Subproject commit 37b71f95846165649afc11d7bff7963ae3507c5e diff --git a/hatchet_sdk/clients/rest/__init__.py b/hatchet_sdk/clients/rest/__init__.py index ab4e0b16..cb694fd1 100644 --- a/hatchet_sdk/clients/rest/__init__.py +++ b/hatchet_sdk/clients/rest/__init__.py @@ -146,6 +146,7 @@ ReplayWorkflowRunsResponse, ) from hatchet_sdk.clients.rest.models.rerun_step_run_request import RerunStepRunRequest +from hatchet_sdk.clients.rest.models.scheduled_run_status import ScheduledRunStatus from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows from hatchet_sdk.clients.rest.models.scheduled_workflows_list import ( ScheduledWorkflowsList, diff --git a/hatchet_sdk/clients/rest/api/workflow_api.py b/hatchet_sdk/clients/rest/api/workflow_api.py index 7b110f6d..c7f85e70 100644 --- a/hatchet_sdk/clients/rest/api/workflow_api.py +++ b/hatchet_sdk/clients/rest/api/workflow_api.py @@ -24,6 +24,8 @@ from hatchet_sdk.clients.rest.models.cron_workflows_order_by_field import ( CronWorkflowsOrderByField, ) +from hatchet_sdk.clients.rest.models.scheduled_run_status import ScheduledRunStatus +from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows from hatchet_sdk.clients.rest.models.scheduled_workflows_list import ( ScheduledWorkflowsList, ) @@ -3914,6 +3916,607 @@ def _workflow_run_list_serialize( _request_auth=_request_auth, ) + @validate_call + async def workflow_scheduled_delete( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + scheduled_id: Annotated[ + str, + Field( + min_length=36, + strict=True, + max_length=36, + description="The scheduled workflow id", + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete tenant alert email group + + Delete a scheduled workflow run for a tenant + + :param tenant: The tenant id (required) + :type tenant: str + :param scheduled_id: The scheduled workflow id (required) + :type scheduled_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_scheduled_delete_serialize( + tenant=tenant, + scheduled_id=scheduled_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "204": None, + "400": "APIErrors", + "403": "APIError", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + await response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + async def workflow_scheduled_delete_with_http_info( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + scheduled_id: Annotated[ + str, + Field( + min_length=36, + strict=True, + max_length=36, + description="The scheduled workflow id", + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Delete tenant alert email group + + Delete a scheduled workflow run for a tenant + + :param tenant: The tenant id (required) + :type tenant: str + :param scheduled_id: The scheduled workflow id (required) + :type scheduled_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_scheduled_delete_serialize( + tenant=tenant, + scheduled_id=scheduled_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "204": None, + "400": "APIErrors", + "403": "APIError", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + await response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + async def workflow_scheduled_delete_without_preload_content( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + scheduled_id: Annotated[ + str, + Field( + min_length=36, + strict=True, + max_length=36, + description="The scheduled workflow id", + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete tenant alert email group + + Delete a scheduled workflow run for a tenant + + :param tenant: The tenant id (required) + :type tenant: str + :param scheduled_id: The scheduled workflow id (required) + :type scheduled_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_scheduled_delete_serialize( + tenant=tenant, + scheduled_id=scheduled_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "204": None, + "400": "APIErrors", + "403": "APIError", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + return response_data.response + + def _workflow_scheduled_delete_serialize( + self, + tenant, + scheduled_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if tenant is not None: + _path_params["tenant"] = tenant + if scheduled_id is not None: + _path_params["scheduledId"] = scheduled_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) + + # authentication setting + _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] + + return self.api_client.param_serialize( + method="DELETE", + resource_path="/api/v1/tenants/{tenant}/workflows/scheduled/{scheduledId}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + async def workflow_scheduled_get( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + scheduled_id: Annotated[ + str, + Field( + min_length=36, + strict=True, + max_length=36, + description="The scheduled workflow id", + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ScheduledWorkflows: + """Get workflow runs + + Get a scheduled workflow run for a tenant + + :param tenant: The tenant id (required) + :type tenant: str + :param scheduled_id: The scheduled workflow id (required) + :type scheduled_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_scheduled_get_serialize( + tenant=tenant, + scheduled_id=scheduled_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ScheduledWorkflows", + "400": "APIErrors", + "403": "APIErrors", + "404": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + await response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + async def workflow_scheduled_get_with_http_info( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + scheduled_id: Annotated[ + str, + Field( + min_length=36, + strict=True, + max_length=36, + description="The scheduled workflow id", + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ScheduledWorkflows]: + """Get workflow runs + + Get a scheduled workflow run for a tenant + + :param tenant: The tenant id (required) + :type tenant: str + :param scheduled_id: The scheduled workflow id (required) + :type scheduled_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_scheduled_get_serialize( + tenant=tenant, + scheduled_id=scheduled_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ScheduledWorkflows", + "400": "APIErrors", + "403": "APIErrors", + "404": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + await response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + async def workflow_scheduled_get_without_preload_content( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + scheduled_id: Annotated[ + str, + Field( + min_length=36, + strict=True, + max_length=36, + description="The scheduled workflow id", + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get workflow runs + + Get a scheduled workflow run for a tenant + + :param tenant: The tenant id (required) + :type tenant: str + :param scheduled_id: The scheduled workflow id (required) + :type scheduled_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_scheduled_get_serialize( + tenant=tenant, + scheduled_id=scheduled_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ScheduledWorkflows", + "400": "APIErrors", + "403": "APIErrors", + "404": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + return response_data.response + + def _workflow_scheduled_get_serialize( + self, + tenant, + scheduled_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if tenant is not None: + _path_params["tenant"] = tenant + if scheduled_id is not None: + _path_params["scheduledId"] = scheduled_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) + + # authentication setting + _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] + + return self.api_client.param_serialize( + method="GET", + resource_path="/api/v1/tenants/{tenant}/workflows/scheduled/{scheduledId}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + @validate_call async def workflow_scheduled_list( self, @@ -3929,21 +4532,33 @@ async def workflow_scheduled_list( limit: Annotated[ Optional[StrictInt], Field(description="The number to limit by") ] = None, + order_by_field: Annotated[ + Optional[ScheduledWorkflowsOrderByField], + Field(description="The order by field"), + ] = None, + order_by_direction: Annotated[ + Optional[WorkflowRunOrderByDirection], + Field(description="The order by direction"), + ] = None, workflow_id: Annotated[ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], Field(description="The workflow id to get runs for."), ] = None, + parent_workflow_run_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The parent workflow run id"), + ] = None, + parent_step_run_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The parent step run id"), + ] = None, additional_metadata: Annotated[ Optional[List[StrictStr]], Field(description="A list of metadata key value pairs to filter by"), ] = None, - order_by_field: Annotated[ - Optional[ScheduledWorkflowsOrderByField], - Field(description="The order by field"), - ] = None, - order_by_direction: Annotated[ - Optional[WorkflowRunOrderByDirection], - Field(description="The order by direction"), + statuses: Annotated[ + Optional[List[ScheduledRunStatus]], + Field(description="A list of scheduled run statuses to filter by"), ] = None, _request_timeout: Union[ None, @@ -3967,14 +4582,20 @@ async def workflow_scheduled_list( :type offset: int :param limit: The number to limit by :type limit: int - :param workflow_id: The workflow id to get runs for. - :type workflow_id: str - :param additional_metadata: A list of metadata key value pairs to filter by - :type additional_metadata: List[str] :param order_by_field: The order by field :type order_by_field: ScheduledWorkflowsOrderByField :param order_by_direction: The order by direction :type order_by_direction: WorkflowRunOrderByDirection + :param workflow_id: The workflow id to get runs for. + :type workflow_id: str + :param parent_workflow_run_id: The parent workflow run id + :type parent_workflow_run_id: str + :param parent_step_run_id: The parent step run id + :type parent_step_run_id: str + :param additional_metadata: A list of metadata key value pairs to filter by + :type additional_metadata: List[str] + :param statuses: A list of scheduled run statuses to filter by + :type statuses: List[ScheduledRunStatus] :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -4001,10 +4622,13 @@ async def workflow_scheduled_list( tenant=tenant, offset=offset, limit=limit, - workflow_id=workflow_id, - additional_metadata=additional_metadata, order_by_field=order_by_field, order_by_direction=order_by_direction, + workflow_id=workflow_id, + parent_workflow_run_id=parent_workflow_run_id, + parent_step_run_id=parent_step_run_id, + additional_metadata=additional_metadata, + statuses=statuses, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -4040,21 +4664,33 @@ async def workflow_scheduled_list_with_http_info( limit: Annotated[ Optional[StrictInt], Field(description="The number to limit by") ] = None, + order_by_field: Annotated[ + Optional[ScheduledWorkflowsOrderByField], + Field(description="The order by field"), + ] = None, + order_by_direction: Annotated[ + Optional[WorkflowRunOrderByDirection], + Field(description="The order by direction"), + ] = None, workflow_id: Annotated[ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], Field(description="The workflow id to get runs for."), ] = None, + parent_workflow_run_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The parent workflow run id"), + ] = None, + parent_step_run_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The parent step run id"), + ] = None, additional_metadata: Annotated[ Optional[List[StrictStr]], Field(description="A list of metadata key value pairs to filter by"), ] = None, - order_by_field: Annotated[ - Optional[ScheduledWorkflowsOrderByField], - Field(description="The order by field"), - ] = None, - order_by_direction: Annotated[ - Optional[WorkflowRunOrderByDirection], - Field(description="The order by direction"), + statuses: Annotated[ + Optional[List[ScheduledRunStatus]], + Field(description="A list of scheduled run statuses to filter by"), ] = None, _request_timeout: Union[ None, @@ -4078,14 +4714,20 @@ async def workflow_scheduled_list_with_http_info( :type offset: int :param limit: The number to limit by :type limit: int - :param workflow_id: The workflow id to get runs for. - :type workflow_id: str - :param additional_metadata: A list of metadata key value pairs to filter by - :type additional_metadata: List[str] :param order_by_field: The order by field :type order_by_field: ScheduledWorkflowsOrderByField :param order_by_direction: The order by direction :type order_by_direction: WorkflowRunOrderByDirection + :param workflow_id: The workflow id to get runs for. + :type workflow_id: str + :param parent_workflow_run_id: The parent workflow run id + :type parent_workflow_run_id: str + :param parent_step_run_id: The parent step run id + :type parent_step_run_id: str + :param additional_metadata: A list of metadata key value pairs to filter by + :type additional_metadata: List[str] + :param statuses: A list of scheduled run statuses to filter by + :type statuses: List[ScheduledRunStatus] :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -4112,10 +4754,13 @@ async def workflow_scheduled_list_with_http_info( tenant=tenant, offset=offset, limit=limit, - workflow_id=workflow_id, - additional_metadata=additional_metadata, order_by_field=order_by_field, order_by_direction=order_by_direction, + workflow_id=workflow_id, + parent_workflow_run_id=parent_workflow_run_id, + parent_step_run_id=parent_step_run_id, + additional_metadata=additional_metadata, + statuses=statuses, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -4151,21 +4796,33 @@ async def workflow_scheduled_list_without_preload_content( limit: Annotated[ Optional[StrictInt], Field(description="The number to limit by") ] = None, + order_by_field: Annotated[ + Optional[ScheduledWorkflowsOrderByField], + Field(description="The order by field"), + ] = None, + order_by_direction: Annotated[ + Optional[WorkflowRunOrderByDirection], + Field(description="The order by direction"), + ] = None, workflow_id: Annotated[ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], Field(description="The workflow id to get runs for."), ] = None, + parent_workflow_run_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The parent workflow run id"), + ] = None, + parent_step_run_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The parent step run id"), + ] = None, additional_metadata: Annotated[ Optional[List[StrictStr]], Field(description="A list of metadata key value pairs to filter by"), ] = None, - order_by_field: Annotated[ - Optional[ScheduledWorkflowsOrderByField], - Field(description="The order by field"), - ] = None, - order_by_direction: Annotated[ - Optional[WorkflowRunOrderByDirection], - Field(description="The order by direction"), + statuses: Annotated[ + Optional[List[ScheduledRunStatus]], + Field(description="A list of scheduled run statuses to filter by"), ] = None, _request_timeout: Union[ None, @@ -4189,14 +4846,20 @@ async def workflow_scheduled_list_without_preload_content( :type offset: int :param limit: The number to limit by :type limit: int - :param workflow_id: The workflow id to get runs for. - :type workflow_id: str - :param additional_metadata: A list of metadata key value pairs to filter by - :type additional_metadata: List[str] :param order_by_field: The order by field :type order_by_field: ScheduledWorkflowsOrderByField :param order_by_direction: The order by direction :type order_by_direction: WorkflowRunOrderByDirection + :param workflow_id: The workflow id to get runs for. + :type workflow_id: str + :param parent_workflow_run_id: The parent workflow run id + :type parent_workflow_run_id: str + :param parent_step_run_id: The parent step run id + :type parent_step_run_id: str + :param additional_metadata: A list of metadata key value pairs to filter by + :type additional_metadata: List[str] + :param statuses: A list of scheduled run statuses to filter by + :type statuses: List[ScheduledRunStatus] :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -4223,10 +4886,13 @@ async def workflow_scheduled_list_without_preload_content( tenant=tenant, offset=offset, limit=limit, - workflow_id=workflow_id, - additional_metadata=additional_metadata, order_by_field=order_by_field, order_by_direction=order_by_direction, + workflow_id=workflow_id, + parent_workflow_run_id=parent_workflow_run_id, + parent_step_run_id=parent_step_run_id, + additional_metadata=additional_metadata, + statuses=statuses, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -4248,10 +4914,13 @@ def _workflow_scheduled_list_serialize( tenant, offset, limit, - workflow_id, - additional_metadata, order_by_field, order_by_direction, + workflow_id, + parent_workflow_run_id, + parent_step_run_id, + additional_metadata, + statuses, _request_auth, _content_type, _headers, @@ -4262,6 +4931,7 @@ def _workflow_scheduled_list_serialize( _collection_formats: Dict[str, str] = { "additionalMetadata": "multi", + "statuses": "multi", } _path_params: Dict[str, str] = {} @@ -4283,21 +4953,33 @@ def _workflow_scheduled_list_serialize( _query_params.append(("limit", limit)) + if order_by_field is not None: + + _query_params.append(("orderByField", order_by_field.value)) + + if order_by_direction is not None: + + _query_params.append(("orderByDirection", order_by_direction.value)) + if workflow_id is not None: _query_params.append(("workflowId", workflow_id)) - if additional_metadata is not None: + if parent_workflow_run_id is not None: - _query_params.append(("additionalMetadata", additional_metadata)) + _query_params.append(("parentWorkflowRunId", parent_workflow_run_id)) - if order_by_field is not None: + if parent_step_run_id is not None: - _query_params.append(("orderByField", order_by_field.value)) + _query_params.append(("parentStepRunId", parent_step_run_id)) - if order_by_direction is not None: + if additional_metadata is not None: - _query_params.append(("orderByDirection", order_by_direction.value)) + _query_params.append(("additionalMetadata", additional_metadata)) + + if statuses is not None: + + _query_params.append(("statuses", statuses)) # process the header parameters # process the form parameters diff --git a/hatchet_sdk/clients/rest/models/__init__.py b/hatchet_sdk/clients/rest/models/__init__.py index f82be7c3..a7a46a5e 100644 --- a/hatchet_sdk/clients/rest/models/__init__.py +++ b/hatchet_sdk/clients/rest/models/__init__.py @@ -113,6 +113,7 @@ ReplayWorkflowRunsResponse, ) from hatchet_sdk.clients.rest.models.rerun_step_run_request import RerunStepRunRequest +from hatchet_sdk.clients.rest.models.scheduled_run_status import ScheduledRunStatus from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows from hatchet_sdk.clients.rest.models.scheduled_workflows_list import ( ScheduledWorkflowsList, diff --git a/hatchet_sdk/clients/rest/models/scheduled_run_status.py b/hatchet_sdk/clients/rest/models/scheduled_run_status.py new file mode 100644 index 00000000..b1ce7ab0 --- /dev/null +++ b/hatchet_sdk/clients/rest/models/scheduled_run_status.py @@ -0,0 +1,42 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class ScheduledRunStatus(str, Enum): + """ + ScheduledRunStatus + """ + + """ + allowed enum values + """ + PENDING = 'PENDING' + RUNNING = 'RUNNING' + SUCCEEDED = 'SUCCEEDED' + FAILED = 'FAILED' + CANCELLED = 'CANCELLED' + QUEUED = 'QUEUED' + SCHEDULED = 'SCHEDULED' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ScheduledRunStatus from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/hatchet_sdk/clients/rest/models/scheduled_workflows.py b/hatchet_sdk/clients/rest/models/scheduled_workflows.py index 786df24c..f4796023 100644 --- a/hatchet_sdk/clients/rest/models/scheduled_workflows.py +++ b/hatchet_sdk/clients/rest/models/scheduled_workflows.py @@ -21,9 +21,10 @@ from typing import Any, ClassVar, Dict, List, Optional, Set from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing_extensions import Self +from typing_extensions import Annotated, Self from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta +from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatus class ScheduledWorkflows(BaseModel): @@ -41,6 +42,18 @@ class ScheduledWorkflows(BaseModel): additional_metadata: Optional[Dict[str, Any]] = Field( default=None, alias="additionalMetadata" ) + workflow_run_created_at: Optional[datetime] = Field( + default=None, alias="workflowRunCreatedAt" + ) + workflow_run_name: Optional[StrictStr] = Field( + default=None, alias="workflowRunName" + ) + workflow_run_status: Optional[WorkflowRunStatus] = Field( + default=None, alias="workflowRunStatus" + ) + workflow_run_id: Optional[ + Annotated[str, Field(min_length=36, strict=True, max_length=36)] + ] = Field(default=None, alias="workflowRunId") __properties: ClassVar[List[str]] = [ "metadata", "tenantId", @@ -50,6 +63,10 @@ class ScheduledWorkflows(BaseModel): "triggerAt", "input", "additionalMetadata", + "workflowRunCreatedAt", + "workflowRunName", + "workflowRunStatus", + "workflowRunId", ] model_config = ConfigDict( @@ -117,6 +134,10 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: "triggerAt": obj.get("triggerAt"), "input": obj.get("input"), "additionalMetadata": obj.get("additionalMetadata"), + "workflowRunCreatedAt": obj.get("workflowRunCreatedAt"), + "workflowRunName": obj.get("workflowRunName"), + "workflowRunStatus": obj.get("workflowRunStatus"), + "workflowRunId": obj.get("workflowRunId"), } ) return _obj diff --git a/hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py b/hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py index bf880006..0372abd3 100644 --- a/hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py +++ b/hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py @@ -29,6 +29,7 @@ class ScheduledWorkflowsOrderByField(str, Enum): allowed enum values """ TRIGGERAT = "triggerAt" + CREATEDAT = "createdAt" @classmethod def from_json(cls, json_str: str) -> Self: diff --git a/hatchet_sdk/contracts/workflows_pb2.py b/hatchet_sdk/contracts/workflows_pb2.py index 833a901c..de0412e2 100644 --- a/hatchet_sdk/contracts/workflows_pb2.py +++ b/hatchet_sdk/contracts/workflows_pb2.py @@ -15,7 +15,7 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fworkflows.proto\x1a\x1fgoogle/protobuf/timestamp.proto\">\n\x12PutWorkflowRequest\x12(\n\x04opts\x18\x01 \x01(\x0b\x32\x1a.CreateWorkflowVersionOpts\"\xbf\x04\n\x19\x43reateWorkflowVersionOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x16\n\x0e\x65vent_triggers\x18\x04 \x03(\t\x12\x15\n\rcron_triggers\x18\x05 \x03(\t\x12\x36\n\x12scheduled_triggers\x18\x06 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12$\n\x04jobs\x18\x07 \x03(\x0b\x32\x16.CreateWorkflowJobOpts\x12-\n\x0b\x63oncurrency\x18\x08 \x01(\x0b\x32\x18.WorkflowConcurrencyOpts\x12\x1d\n\x10schedule_timeout\x18\t \x01(\tH\x00\x88\x01\x01\x12\x17\n\ncron_input\x18\n \x01(\tH\x01\x88\x01\x01\x12\x33\n\x0eon_failure_job\x18\x0b \x01(\x0b\x32\x16.CreateWorkflowJobOptsH\x02\x88\x01\x01\x12$\n\x06sticky\x18\x0c \x01(\x0e\x32\x0f.StickyStrategyH\x03\x88\x01\x01\x12 \n\x04kind\x18\r \x01(\x0e\x32\r.WorkflowKindH\x04\x88\x01\x01\x12\x1d\n\x10\x64\x65\x66\x61ult_priority\x18\x0e \x01(\x05H\x05\x88\x01\x01\x42\x13\n\x11_schedule_timeoutB\r\n\x0b_cron_inputB\x11\n\x0f_on_failure_jobB\t\n\x07_stickyB\x07\n\x05_kindB\x13\n\x11_default_priority\"\xd0\x01\n\x17WorkflowConcurrencyOpts\x12\x13\n\x06\x61\x63tion\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08max_runs\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x36\n\x0elimit_strategy\x18\x03 \x01(\x0e\x32\x19.ConcurrencyLimitStrategyH\x02\x88\x01\x01\x12\x17\n\nexpression\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\t\n\x07_actionB\x0b\n\t_max_runsB\x11\n\x0f_limit_strategyB\r\n\x0b_expression\"h\n\x15\x43reateWorkflowJobOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12&\n\x05steps\x18\x04 \x03(\x0b\x32\x17.CreateWorkflowStepOptsJ\x04\x08\x03\x10\x04\"\xe1\x01\n\x13\x44\x65siredWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x15\n\x08required\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12/\n\ncomparator\x18\x04 \x01(\x0e\x32\x16.WorkerLabelComparatorH\x03\x88\x01\x01\x12\x13\n\x06weight\x18\x05 \x01(\x05H\x04\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValueB\x0b\n\t_requiredB\r\n\x0b_comparatorB\t\n\x07_weight\"\xcb\x02\n\x16\x43reateWorkflowStepOpts\x12\x13\n\x0breadable_id\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\t\x12\x0e\n\x06inputs\x18\x04 \x01(\t\x12\x0f\n\x07parents\x18\x05 \x03(\t\x12\x11\n\tuser_data\x18\x06 \x01(\t\x12\x0f\n\x07retries\x18\x07 \x01(\x05\x12)\n\x0brate_limits\x18\x08 \x03(\x0b\x32\x14.CreateStepRateLimit\x12@\n\rworker_labels\x18\t \x03(\x0b\x32).CreateWorkflowStepOpts.WorkerLabelsEntry\x1aI\n\x11WorkerLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.DesiredWorkerLabels:\x02\x38\x01\"\xfa\x01\n\x13\x43reateStepRateLimit\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x12\n\x05units\x18\x02 \x01(\x05H\x00\x88\x01\x01\x12\x15\n\x08key_expr\x18\x03 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nunits_expr\x18\x04 \x01(\tH\x02\x88\x01\x01\x12\x1e\n\x11limit_values_expr\x18\x05 \x01(\tH\x03\x88\x01\x01\x12)\n\x08\x64uration\x18\x06 \x01(\x0e\x32\x12.RateLimitDurationH\x04\x88\x01\x01\x42\x08\n\x06_unitsB\x0b\n\t_key_exprB\r\n\x0b_units_exprB\x14\n\x12_limit_values_exprB\x0b\n\t_duration\"\x16\n\x14ListWorkflowsRequest\"\xcd\x02\n\x17ScheduleWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tschedules\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05input\x18\x03 \x01(\t\x12\x16\n\tparent_id\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x06 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x07 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x08 \x01(\tH\x04\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadata\"\xb2\x01\n\x0fWorkflowVersion\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07version\x18\x05 \x01(\t\x12\r\n\x05order\x18\x06 \x01(\x05\x12\x13\n\x0bworkflow_id\x18\x07 \x01(\t\"?\n\x17WorkflowTriggerEventRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x11\n\tevent_key\x18\x02 \x01(\t\"9\n\x16WorkflowTriggerCronRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x0c\n\x04\x63ron\x18\x02 \x01(\t\"H\n\x1a\x42ulkTriggerWorkflowRequest\x12*\n\tworkflows\x18\x01 \x03(\x0b\x32\x17.TriggerWorkflowRequest\"7\n\x1b\x42ulkTriggerWorkflowResponse\x12\x18\n\x10workflow_run_ids\x18\x01 \x03(\t\"\xf7\x02\n\x16TriggerWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x02 \x01(\t\x12\x16\n\tparent_id\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x04 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x05 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x06 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x07 \x01(\tH\x04\x88\x01\x01\x12\x1e\n\x11\x64\x65sired_worker_id\x18\x08 \x01(\tH\x05\x88\x01\x01\x12\x15\n\x08priority\x18\t \x01(\x05H\x06\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadataB\x14\n\x12_desired_worker_idB\x0b\n\t_priority\"2\n\x17TriggerWorkflowResponse\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"W\n\x13PutRateLimitRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05limit\x18\x02 \x01(\x05\x12$\n\x08\x64uration\x18\x03 \x01(\x0e\x32\x12.RateLimitDuration\"\x16\n\x14PutRateLimitResponse*$\n\x0eStickyStrategy\x12\x08\n\x04SOFT\x10\x00\x12\x08\n\x04HARD\x10\x01*2\n\x0cWorkflowKind\x12\x0c\n\x08\x46UNCTION\x10\x00\x12\x0b\n\x07\x44URABLE\x10\x01\x12\x07\n\x03\x44\x41G\x10\x02*l\n\x18\x43oncurrencyLimitStrategy\x12\x16\n\x12\x43\x41NCEL_IN_PROGRESS\x10\x00\x12\x0f\n\x0b\x44ROP_NEWEST\x10\x01\x12\x10\n\x0cQUEUE_NEWEST\x10\x02\x12\x15\n\x11GROUP_ROUND_ROBIN\x10\x03*\x85\x01\n\x15WorkerLabelComparator\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x10\n\x0cGREATER_THAN\x10\x02\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x05*]\n\x11RateLimitDuration\x12\n\n\x06SECOND\x10\x00\x12\n\n\x06MINUTE\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\x07\n\x03\x44\x41Y\x10\x03\x12\x08\n\x04WEEK\x10\x04\x12\t\n\x05MONTH\x10\x05\x12\x08\n\x04YEAR\x10\x06\x32\xdc\x02\n\x0fWorkflowService\x12\x34\n\x0bPutWorkflow\x12\x13.PutWorkflowRequest\x1a\x10.WorkflowVersion\x12>\n\x10ScheduleWorkflow\x12\x18.ScheduleWorkflowRequest\x1a\x10.WorkflowVersion\x12\x44\n\x0fTriggerWorkflow\x12\x17.TriggerWorkflowRequest\x1a\x18.TriggerWorkflowResponse\x12P\n\x13\x42ulkTriggerWorkflow\x12\x1b.BulkTriggerWorkflowRequest\x1a\x1c.BulkTriggerWorkflowResponse\x12;\n\x0cPutRateLimit\x12\x14.PutRateLimitRequest\x1a\x15.PutRateLimitResponseBBZ@github.com/hatchet-dev/hatchet/internal/services/admin/contractsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fworkflows.proto\x1a\x1fgoogle/protobuf/timestamp.proto\">\n\x12PutWorkflowRequest\x12(\n\x04opts\x18\x01 \x01(\x0b\x32\x1a.CreateWorkflowVersionOpts\"\xbf\x04\n\x19\x43reateWorkflowVersionOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x16\n\x0e\x65vent_triggers\x18\x04 \x03(\t\x12\x15\n\rcron_triggers\x18\x05 \x03(\t\x12\x36\n\x12scheduled_triggers\x18\x06 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12$\n\x04jobs\x18\x07 \x03(\x0b\x32\x16.CreateWorkflowJobOpts\x12-\n\x0b\x63oncurrency\x18\x08 \x01(\x0b\x32\x18.WorkflowConcurrencyOpts\x12\x1d\n\x10schedule_timeout\x18\t \x01(\tH\x00\x88\x01\x01\x12\x17\n\ncron_input\x18\n \x01(\tH\x01\x88\x01\x01\x12\x33\n\x0eon_failure_job\x18\x0b \x01(\x0b\x32\x16.CreateWorkflowJobOptsH\x02\x88\x01\x01\x12$\n\x06sticky\x18\x0c \x01(\x0e\x32\x0f.StickyStrategyH\x03\x88\x01\x01\x12 \n\x04kind\x18\r \x01(\x0e\x32\r.WorkflowKindH\x04\x88\x01\x01\x12\x1d\n\x10\x64\x65\x66\x61ult_priority\x18\x0e \x01(\x05H\x05\x88\x01\x01\x42\x13\n\x11_schedule_timeoutB\r\n\x0b_cron_inputB\x11\n\x0f_on_failure_jobB\t\n\x07_stickyB\x07\n\x05_kindB\x13\n\x11_default_priority\"\xd0\x01\n\x17WorkflowConcurrencyOpts\x12\x13\n\x06\x61\x63tion\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08max_runs\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x36\n\x0elimit_strategy\x18\x03 \x01(\x0e\x32\x19.ConcurrencyLimitStrategyH\x02\x88\x01\x01\x12\x17\n\nexpression\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\t\n\x07_actionB\x0b\n\t_max_runsB\x11\n\x0f_limit_strategyB\r\n\x0b_expression\"h\n\x15\x43reateWorkflowJobOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12&\n\x05steps\x18\x04 \x03(\x0b\x32\x17.CreateWorkflowStepOptsJ\x04\x08\x03\x10\x04\"\xe1\x01\n\x13\x44\x65siredWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x15\n\x08required\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12/\n\ncomparator\x18\x04 \x01(\x0e\x32\x16.WorkerLabelComparatorH\x03\x88\x01\x01\x12\x13\n\x06weight\x18\x05 \x01(\x05H\x04\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValueB\x0b\n\t_requiredB\r\n\x0b_comparatorB\t\n\x07_weight\"\xcb\x02\n\x16\x43reateWorkflowStepOpts\x12\x13\n\x0breadable_id\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\t\x12\x0e\n\x06inputs\x18\x04 \x01(\t\x12\x0f\n\x07parents\x18\x05 \x03(\t\x12\x11\n\tuser_data\x18\x06 \x01(\t\x12\x0f\n\x07retries\x18\x07 \x01(\x05\x12)\n\x0brate_limits\x18\x08 \x03(\x0b\x32\x14.CreateStepRateLimit\x12@\n\rworker_labels\x18\t \x03(\x0b\x32).CreateWorkflowStepOpts.WorkerLabelsEntry\x1aI\n\x11WorkerLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.DesiredWorkerLabels:\x02\x38\x01\"\xfa\x01\n\x13\x43reateStepRateLimit\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x12\n\x05units\x18\x02 \x01(\x05H\x00\x88\x01\x01\x12\x15\n\x08key_expr\x18\x03 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nunits_expr\x18\x04 \x01(\tH\x02\x88\x01\x01\x12\x1e\n\x11limit_values_expr\x18\x05 \x01(\tH\x03\x88\x01\x01\x12)\n\x08\x64uration\x18\x06 \x01(\x0e\x32\x12.RateLimitDurationH\x04\x88\x01\x01\x42\x08\n\x06_unitsB\x0b\n\t_key_exprB\r\n\x0b_units_exprB\x14\n\x12_limit_values_exprB\x0b\n\t_duration\"\x16\n\x14ListWorkflowsRequest\"\xcd\x02\n\x17ScheduleWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tschedules\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05input\x18\x03 \x01(\t\x12\x16\n\tparent_id\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x06 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x07 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x08 \x01(\tH\x04\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadata\"O\n\x11ScheduledWorkflow\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ntrigger_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xe3\x01\n\x0fWorkflowVersion\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07version\x18\x05 \x01(\t\x12\r\n\x05order\x18\x06 \x01(\x03\x12\x13\n\x0bworkflow_id\x18\x07 \x01(\t\x12/\n\x13scheduled_workflows\x18\x08 \x03(\x0b\x32\x12.ScheduledWorkflow\"?\n\x17WorkflowTriggerEventRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x11\n\tevent_key\x18\x02 \x01(\t\"9\n\x16WorkflowTriggerCronRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x0c\n\x04\x63ron\x18\x02 \x01(\t\"H\n\x1a\x42ulkTriggerWorkflowRequest\x12*\n\tworkflows\x18\x01 \x03(\x0b\x32\x17.TriggerWorkflowRequest\"7\n\x1b\x42ulkTriggerWorkflowResponse\x12\x18\n\x10workflow_run_ids\x18\x01 \x03(\t\"\xf7\x02\n\x16TriggerWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x02 \x01(\t\x12\x16\n\tparent_id\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x04 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x05 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x06 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x07 \x01(\tH\x04\x88\x01\x01\x12\x1e\n\x11\x64\x65sired_worker_id\x18\x08 \x01(\tH\x05\x88\x01\x01\x12\x15\n\x08priority\x18\t \x01(\x05H\x06\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadataB\x14\n\x12_desired_worker_idB\x0b\n\t_priority\"2\n\x17TriggerWorkflowResponse\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"W\n\x13PutRateLimitRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05limit\x18\x02 \x01(\x05\x12$\n\x08\x64uration\x18\x03 \x01(\x0e\x32\x12.RateLimitDuration\"\x16\n\x14PutRateLimitResponse*$\n\x0eStickyStrategy\x12\x08\n\x04SOFT\x10\x00\x12\x08\n\x04HARD\x10\x01*2\n\x0cWorkflowKind\x12\x0c\n\x08\x46UNCTION\x10\x00\x12\x0b\n\x07\x44URABLE\x10\x01\x12\x07\n\x03\x44\x41G\x10\x02*l\n\x18\x43oncurrencyLimitStrategy\x12\x16\n\x12\x43\x41NCEL_IN_PROGRESS\x10\x00\x12\x0f\n\x0b\x44ROP_NEWEST\x10\x01\x12\x10\n\x0cQUEUE_NEWEST\x10\x02\x12\x15\n\x11GROUP_ROUND_ROBIN\x10\x03*\x85\x01\n\x15WorkerLabelComparator\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x10\n\x0cGREATER_THAN\x10\x02\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x05*]\n\x11RateLimitDuration\x12\n\n\x06SECOND\x10\x00\x12\n\n\x06MINUTE\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\x07\n\x03\x44\x41Y\x10\x03\x12\x08\n\x04WEEK\x10\x04\x12\t\n\x05MONTH\x10\x05\x12\x08\n\x04YEAR\x10\x06\x32\xdc\x02\n\x0fWorkflowService\x12\x34\n\x0bPutWorkflow\x12\x13.PutWorkflowRequest\x1a\x10.WorkflowVersion\x12>\n\x10ScheduleWorkflow\x12\x18.ScheduleWorkflowRequest\x1a\x10.WorkflowVersion\x12\x44\n\x0fTriggerWorkflow\x12\x17.TriggerWorkflowRequest\x1a\x18.TriggerWorkflowResponse\x12P\n\x13\x42ulkTriggerWorkflow\x12\x1b.BulkTriggerWorkflowRequest\x1a\x1c.BulkTriggerWorkflowResponse\x12;\n\x0cPutRateLimit\x12\x14.PutRateLimitRequest\x1a\x15.PutRateLimitResponseBBZ@github.com/hatchet-dev/hatchet/internal/services/admin/contractsb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -25,16 +25,16 @@ _globals['DESCRIPTOR']._serialized_options = b'Z@github.com/hatchet-dev/hatchet/internal/services/admin/contracts' _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._options = None _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_options = b'8\001' - _globals['_STICKYSTRATEGY']._serialized_start=3165 - _globals['_STICKYSTRATEGY']._serialized_end=3201 - _globals['_WORKFLOWKIND']._serialized_start=3203 - _globals['_WORKFLOWKIND']._serialized_end=3253 - _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_start=3255 - _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_end=3363 - _globals['_WORKERLABELCOMPARATOR']._serialized_start=3366 - _globals['_WORKERLABELCOMPARATOR']._serialized_end=3499 - _globals['_RATELIMITDURATION']._serialized_start=3501 - _globals['_RATELIMITDURATION']._serialized_end=3594 + _globals['_STICKYSTRATEGY']._serialized_start=3295 + _globals['_STICKYSTRATEGY']._serialized_end=3331 + _globals['_WORKFLOWKIND']._serialized_start=3333 + _globals['_WORKFLOWKIND']._serialized_end=3383 + _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_start=3385 + _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_end=3493 + _globals['_WORKERLABELCOMPARATOR']._serialized_start=3496 + _globals['_WORKERLABELCOMPARATOR']._serialized_end=3629 + _globals['_RATELIMITDURATION']._serialized_start=3631 + _globals['_RATELIMITDURATION']._serialized_end=3724 _globals['_PUTWORKFLOWREQUEST']._serialized_start=52 _globals['_PUTWORKFLOWREQUEST']._serialized_end=114 _globals['_CREATEWORKFLOWVERSIONOPTS']._serialized_start=117 @@ -55,24 +55,26 @@ _globals['_LISTWORKFLOWSREQUEST']._serialized_end=1848 _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_start=1851 _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_end=2184 - _globals['_WORKFLOWVERSION']._serialized_start=2187 - _globals['_WORKFLOWVERSION']._serialized_end=2365 - _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_start=2367 - _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_end=2430 - _globals['_WORKFLOWTRIGGERCRONREF']._serialized_start=2432 - _globals['_WORKFLOWTRIGGERCRONREF']._serialized_end=2489 - _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_start=2491 - _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_end=2563 - _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_start=2565 - _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_end=2620 - _globals['_TRIGGERWORKFLOWREQUEST']._serialized_start=2623 - _globals['_TRIGGERWORKFLOWREQUEST']._serialized_end=2998 - _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_start=3000 - _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_end=3050 - _globals['_PUTRATELIMITREQUEST']._serialized_start=3052 - _globals['_PUTRATELIMITREQUEST']._serialized_end=3139 - _globals['_PUTRATELIMITRESPONSE']._serialized_start=3141 - _globals['_PUTRATELIMITRESPONSE']._serialized_end=3163 - _globals['_WORKFLOWSERVICE']._serialized_start=3597 - _globals['_WORKFLOWSERVICE']._serialized_end=3945 + _globals['_SCHEDULEDWORKFLOW']._serialized_start=2186 + _globals['_SCHEDULEDWORKFLOW']._serialized_end=2265 + _globals['_WORKFLOWVERSION']._serialized_start=2268 + _globals['_WORKFLOWVERSION']._serialized_end=2495 + _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_start=2497 + _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_end=2560 + _globals['_WORKFLOWTRIGGERCRONREF']._serialized_start=2562 + _globals['_WORKFLOWTRIGGERCRONREF']._serialized_end=2619 + _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_start=2621 + _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_end=2693 + _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_start=2695 + _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_end=2750 + _globals['_TRIGGERWORKFLOWREQUEST']._serialized_start=2753 + _globals['_TRIGGERWORKFLOWREQUEST']._serialized_end=3128 + _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_start=3130 + _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_end=3180 + _globals['_PUTRATELIMITREQUEST']._serialized_start=3182 + _globals['_PUTRATELIMITREQUEST']._serialized_end=3269 + _globals['_PUTRATELIMITRESPONSE']._serialized_start=3271 + _globals['_PUTRATELIMITRESPONSE']._serialized_end=3293 + _globals['_WORKFLOWSERVICE']._serialized_start=3727 + _globals['_WORKFLOWSERVICE']._serialized_end=4075 # @@protoc_insertion_point(module_scope) diff --git a/hatchet_sdk/contracts/workflows_pb2.pyi b/hatchet_sdk/contracts/workflows_pb2.pyi index c395b89b..abc8fbff 100644 --- a/hatchet_sdk/contracts/workflows_pb2.pyi +++ b/hatchet_sdk/contracts/workflows_pb2.pyi @@ -209,21 +209,31 @@ class ScheduleWorkflowRequest(_message.Message): additional_metadata: str def __init__(self, name: _Optional[str] = ..., schedules: _Optional[_Iterable[_Union[_timestamp_pb2.Timestamp, _Mapping]]] = ..., input: _Optional[str] = ..., parent_id: _Optional[str] = ..., parent_step_run_id: _Optional[str] = ..., child_index: _Optional[int] = ..., child_key: _Optional[str] = ..., additional_metadata: _Optional[str] = ...) -> None: ... +class ScheduledWorkflow(_message.Message): + __slots__ = ("id", "trigger_at") + ID_FIELD_NUMBER: _ClassVar[int] + TRIGGER_AT_FIELD_NUMBER: _ClassVar[int] + id: str + trigger_at: _timestamp_pb2.Timestamp + def __init__(self, id: _Optional[str] = ..., trigger_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... + class WorkflowVersion(_message.Message): - __slots__ = ("id", "created_at", "updated_at", "version", "order", "workflow_id") + __slots__ = ("id", "created_at", "updated_at", "version", "order", "workflow_id", "scheduled_workflows") ID_FIELD_NUMBER: _ClassVar[int] CREATED_AT_FIELD_NUMBER: _ClassVar[int] UPDATED_AT_FIELD_NUMBER: _ClassVar[int] VERSION_FIELD_NUMBER: _ClassVar[int] ORDER_FIELD_NUMBER: _ClassVar[int] WORKFLOW_ID_FIELD_NUMBER: _ClassVar[int] + SCHEDULED_WORKFLOWS_FIELD_NUMBER: _ClassVar[int] id: str created_at: _timestamp_pb2.Timestamp updated_at: _timestamp_pb2.Timestamp version: str order: int workflow_id: str - def __init__(self, id: _Optional[str] = ..., created_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., updated_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., version: _Optional[str] = ..., order: _Optional[int] = ..., workflow_id: _Optional[str] = ...) -> None: ... + scheduled_workflows: _containers.RepeatedCompositeFieldContainer[ScheduledWorkflow] + def __init__(self, id: _Optional[str] = ..., created_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., updated_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., version: _Optional[str] = ..., order: _Optional[int] = ..., workflow_id: _Optional[str] = ..., scheduled_workflows: _Optional[_Iterable[_Union[ScheduledWorkflow, _Mapping]]] = ...) -> None: ... class WorkflowTriggerEventRef(_message.Message): __slots__ = ("parent_id", "event_key") From 63f2bbc57257f67a6cce9ac32c1d015564d6eac3 Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Fri, 25 Oct 2024 09:02:27 -0400 Subject: [PATCH 6/7] feat: expose delete and get --- examples/delayed/{event.py => script.py} | 11 ++++++++++- hatchet_sdk/clients/admin.py | 7 +++++-- hatchet_sdk/clients/rest_client.py | 22 ++++++++++++++++++++++ 3 files changed, 37 insertions(+), 3 deletions(-) rename examples/delayed/{event.py => script.py} (52%) diff --git a/examples/delayed/event.py b/examples/delayed/script.py similarity index 52% rename from examples/delayed/event.py rename to examples/delayed/script.py index 34d63045..cf669283 100644 --- a/examples/delayed/event.py +++ b/examples/delayed/script.py @@ -1,4 +1,5 @@ from datetime import datetime, timedelta +from time import sleep from dotenv import load_dotenv @@ -8,9 +9,17 @@ hatchet = Hatchet() -hatchet.admin.schedule_workflow( +scheduled_run = hatchet.admin.schedule_workflow( "PrintPrinter", [datetime.now() + timedelta(seconds=15)], {"message": "test"}, options={"additional_metadata": {"triggeredBy": "script"}}, ) + +print("Scheduled run at: " + scheduled_run.trigger_at.ToDatetime().strftime("%Y-%m-%d %H:%M:%S") + "UTC") + +sleep(5) + +hatchet.rest.scheduled_run_delete(scheduled_run.id) + +print("Scheduled run deleted") \ No newline at end of file diff --git a/hatchet_sdk/clients/admin.py b/hatchet_sdk/clients/admin.py index 14424500..54578c50 100644 --- a/hatchet_sdk/clients/admin.py +++ b/hatchet_sdk/clients/admin.py @@ -18,6 +18,7 @@ PutWorkflowRequest, RateLimitDuration, ScheduleWorkflowRequest, + ScheduledWorkflow, TriggerWorkflowRequest, TriggerWorkflowResponse, WorkflowVersion, @@ -412,7 +413,7 @@ def schedule_workflow( schedules: List[Union[datetime, timestamp_pb2.Timestamp]], input={}, options: ScheduleTriggerWorkflowOptions = None, - ) -> WorkflowVersion: + ) -> ScheduledWorkflow: try: namespace = self.namespace @@ -432,10 +433,12 @@ def schedule_workflow( name, schedules, input, options ) - return self.client.ScheduleWorkflow( + res: WorkflowVersion = self.client.ScheduleWorkflow( request, metadata=get_metadata(self.token), ) + + return res.scheduled_workflows[0] except grpc.RpcError as e: if e.code() == grpc.StatusCode.ALREADY_EXISTS: raise DedupeViolationErr(e.details()) diff --git a/hatchet_sdk/clients/rest_client.py b/hatchet_sdk/clients/rest_client.py index b3d502e0..101e3a68 100644 --- a/hatchet_sdk/clients/rest_client.py +++ b/hatchet_sdk/clients/rest_client.py @@ -30,6 +30,7 @@ from hatchet_sdk.clients.rest.models.replay_workflow_runs_response import ( ReplayWorkflowRunsResponse, ) +from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows from hatchet_sdk.clients.rest.models.workflow import Workflow from hatchet_sdk.clients.rest.models.workflow_kind import WorkflowKind from hatchet_sdk.clients.rest.models.workflow_list import WorkflowList @@ -206,6 +207,21 @@ async def workflow_run_create( ), ) + async def scheduled_run_get(self, schedule_id: str) -> ScheduledWorkflows: + return await self.workflow_api.workflow_scheduled_get( + tenant=self.tenant_id, + scheduled_id=schedule_id, + ) + + async def scheduled_run_delete(self, schedule_id: str) -> None: + print(f"Deleting scheduled run {schedule_id}") + print(self.tenant_id) + + return await self.workflow_api.workflow_scheduled_delete( + tenant=self.tenant_id, + scheduled_id=schedule_id, + ) + async def list_logs( self, step_run_id: str, @@ -347,6 +363,12 @@ def workflow_run_bulk_cancel( ) -> WorkflowRunCancel200Response: return self._run_coroutine(self.aio.workflow_run_bulk_cancel(workflow_run_ids)) + def scheduled_run_get(self, schedule_id: str) -> ScheduledWorkflows: + return self._run_coroutine(self.aio.scheduled_run_get(schedule_id)) + + def scheduled_run_delete(self, schedule_id: str) -> None: + return self._run_coroutine(self.aio.scheduled_run_delete(schedule_id)) + def workflow_run_create( self, workflow_id: str, From 8af5f8ee4e7c4457c54ae98ff4b48f3d8b997fdf Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Fri, 25 Oct 2024 09:03:14 -0400 Subject: [PATCH 7/7] chore: lint --- examples/delayed/script.py | 8 ++++++-- hatchet_sdk/clients/admin.py | 2 +- .../rest/models/scheduled_run_status.py | 18 +++++++++--------- 3 files changed, 16 insertions(+), 12 deletions(-) diff --git a/examples/delayed/script.py b/examples/delayed/script.py index cf669283..8ec36c2f 100644 --- a/examples/delayed/script.py +++ b/examples/delayed/script.py @@ -16,10 +16,14 @@ options={"additional_metadata": {"triggeredBy": "script"}}, ) -print("Scheduled run at: " + scheduled_run.trigger_at.ToDatetime().strftime("%Y-%m-%d %H:%M:%S") + "UTC") +print( + "Scheduled run at: " + + scheduled_run.trigger_at.ToDatetime().strftime("%Y-%m-%d %H:%M:%S") + + "UTC" +) sleep(5) hatchet.rest.scheduled_run_delete(scheduled_run.id) -print("Scheduled run deleted") \ No newline at end of file +print("Scheduled run deleted") diff --git a/hatchet_sdk/clients/admin.py b/hatchet_sdk/clients/admin.py index 54578c50..f5708cfc 100644 --- a/hatchet_sdk/clients/admin.py +++ b/hatchet_sdk/clients/admin.py @@ -17,8 +17,8 @@ PutRateLimitRequest, PutWorkflowRequest, RateLimitDuration, - ScheduleWorkflowRequest, ScheduledWorkflow, + ScheduleWorkflowRequest, TriggerWorkflowRequest, TriggerWorkflowResponse, WorkflowVersion, diff --git a/hatchet_sdk/clients/rest/models/scheduled_run_status.py b/hatchet_sdk/clients/rest/models/scheduled_run_status.py index b1ce7ab0..f0f2a17f 100644 --- a/hatchet_sdk/clients/rest/models/scheduled_run_status.py +++ b/hatchet_sdk/clients/rest/models/scheduled_run_status.py @@ -13,8 +13,10 @@ from __future__ import annotations + import json from enum import Enum + from typing_extensions import Self @@ -26,17 +28,15 @@ class ScheduledRunStatus(str, Enum): """ allowed enum values """ - PENDING = 'PENDING' - RUNNING = 'RUNNING' - SUCCEEDED = 'SUCCEEDED' - FAILED = 'FAILED' - CANCELLED = 'CANCELLED' - QUEUED = 'QUEUED' - SCHEDULED = 'SCHEDULED' + PENDING = "PENDING" + RUNNING = "RUNNING" + SUCCEEDED = "SUCCEEDED" + FAILED = "FAILED" + CANCELLED = "CANCELLED" + QUEUED = "QUEUED" + SCHEDULED = "SCHEDULED" @classmethod def from_json(cls, json_str: str) -> Self: """Create an instance of ScheduledRunStatus from a JSON string""" return cls(json.loads(json_str)) - -