From 19eb98851e12b11abe7c34200f4e32a0d25fae09 Mon Sep 17 00:00:00 2001 From: Sean Reilly Date: Wed, 23 Oct 2024 15:20:19 -0700 Subject: [PATCH] add a run_workflows method to run many workflows in bulk (#222) * add a run_workflows method to run many workflows in bulk * lint * use the dedupe exceptions when we have a dedupe * add spawn_workflows and run_workflows with an example * think this is how the testing works * Add a test although it hangs on the second one - I think because of event loop stuff * is this how I bump version * maybe --------- Co-authored-by: Sean Reilly --- examples/bulk_fanout/bulk_trigger.py | 52 +++ examples/bulk_fanout/stream.py | 45 ++ examples/bulk_fanout/test_bulk_fanout.py | 25 ++ examples/bulk_fanout/trigger.py | 26 ++ examples/bulk_fanout/worker.py | 84 ++++ hatchet_sdk/clients/admin.py | 130 ++++++ hatchet_sdk/clients/rest/__init__.py | 15 + hatchet_sdk/clients/rest/api/__init__.py | 1 + hatchet_sdk/clients/rest/api/api_token_api.py | 12 +- hatchet_sdk/clients/rest/api/default_api.py | 24 +- hatchet_sdk/clients/rest/api/event_api.py | 32 +- hatchet_sdk/clients/rest/api/github_api.py | 4 +- .../clients/rest/api/healthcheck_api.py | 8 +- hatchet_sdk/clients/rest/api/log_api.py | 4 +- hatchet_sdk/clients/rest/api/metadata_api.py | 12 +- .../clients/rest/api/rate_limits_api.py | 423 ++++++++++++++++++ hatchet_sdk/clients/rest/api/slack_api.py | 8 +- hatchet_sdk/clients/rest/api/sns_api.py | 12 +- hatchet_sdk/clients/rest/api/step_run_api.py | 28 +- hatchet_sdk/clients/rest/api/tenant_api.py | 329 +++++++++++++- hatchet_sdk/clients/rest/api/user_api.py | 48 +- hatchet_sdk/clients/rest/api/worker_api.py | 12 +- hatchet_sdk/clients/rest/api/workflow_api.py | 411 ++++++++++++++++- .../clients/rest/api/workflow_run_api.py | 16 +- hatchet_sdk/clients/rest/api_client.py | 19 +- hatchet_sdk/clients/rest/models/__init__.py | 14 + .../rest/models/bulk_create_event_response.py | 12 +- hatchet_sdk/clients/rest/models/rate_limit.py | 117 +++++ .../clients/rest/models/rate_limit_list.py | 110 +++++ .../models/rate_limit_order_by_direction.py | 37 ++ .../rest/models/rate_limit_order_by_field.py | 38 ++ .../rest/models/step_run_event_reason.py | 1 + .../rest/models/tenant_queue_metrics.py | 6 +- .../models/tenant_step_run_queue_metrics.py | 83 ++++ hatchet_sdk/clients/rest/models/workflow.py | 7 +- .../rest/models/workflow_update_request.py | 85 ++++ hatchet_sdk/clients/rest/rest.py | 5 + hatchet_sdk/clients/workflow_listener.py | 9 +- hatchet_sdk/context/context.py | 35 ++ hatchet_sdk/contracts/dispatcher_pb2.py | 20 +- hatchet_sdk/contracts/dispatcher_pb2.pyi | 2 + hatchet_sdk/contracts/workflows_pb2.py | 70 +-- hatchet_sdk/contracts/workflows_pb2.pyi | 24 +- hatchet_sdk/contracts/workflows_pb2_grpc.py | 33 ++ hatchet_sdk/workflow_run.py | 2 +- pyproject.toml | 3 +- 46 files changed, 2347 insertions(+), 146 deletions(-) create mode 100644 examples/bulk_fanout/bulk_trigger.py create mode 100644 examples/bulk_fanout/stream.py create mode 100644 examples/bulk_fanout/test_bulk_fanout.py create mode 100644 examples/bulk_fanout/trigger.py create mode 100644 examples/bulk_fanout/worker.py create mode 100644 hatchet_sdk/clients/rest/api/rate_limits_api.py create mode 100644 hatchet_sdk/clients/rest/models/rate_limit.py create mode 100644 hatchet_sdk/clients/rest/models/rate_limit_list.py create mode 100644 hatchet_sdk/clients/rest/models/rate_limit_order_by_direction.py create mode 100644 hatchet_sdk/clients/rest/models/rate_limit_order_by_field.py create mode 100644 hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py create mode 100644 hatchet_sdk/clients/rest/models/workflow_update_request.py diff --git a/examples/bulk_fanout/bulk_trigger.py b/examples/bulk_fanout/bulk_trigger.py new file mode 100644 index 00000000..65d7c245 --- /dev/null +++ b/examples/bulk_fanout/bulk_trigger.py @@ -0,0 +1,52 @@ +import asyncio +import base64 +import json +import os + +from dotenv import load_dotenv + +from hatchet_sdk import new_client +from hatchet_sdk.clients.admin import TriggerWorkflowOptions +from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun +from hatchet_sdk.clients.run_event_listener import StepRunEventType + + +async def main(): + load_dotenv() + hatchet = new_client() + + workflowRuns: WorkflowRun = [] + + # we are going to run the BulkParent workflow 20 which will trigger the Child workflows n times for each n in range(20) + for i in range(20): + workflowRuns.append( + { + "workflow_name": "BulkParent", + "input": {"n": i}, + "options": { + "additional_metadata": { + "bulk-trigger": i, + "hello-{i}": "earth-{i}", + }, + }, + } + ) + + workflowRunRefs = hatchet.admin.run_workflows( + workflowRuns, + ) + + results = await asyncio.gather( + *[workflowRunRef.result() for workflowRunRef in workflowRunRefs], + return_exceptions=True, + ) + + for result in results: + if isinstance(result, Exception): + print(f"An error occurred: {result}") # Handle the exception here + else: + print(result) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/bulk_fanout/stream.py b/examples/bulk_fanout/stream.py new file mode 100644 index 00000000..8c2593e1 --- /dev/null +++ b/examples/bulk_fanout/stream.py @@ -0,0 +1,45 @@ +import asyncio +import base64 +import json +import os +import random + +from dotenv import load_dotenv + +from hatchet_sdk import new_client +from hatchet_sdk.clients.admin import TriggerWorkflowOptions +from hatchet_sdk.clients.run_event_listener import StepRunEventType +from hatchet_sdk.v2.hatchet import Hatchet + + +async def main(): + load_dotenv() + hatchet = Hatchet() + + # Generate a random stream key to use to track all + # stream events for this workflow run. + + streamKey = "streamKey" + streamVal = f"sk-{random.randint(1, 100)}" + + # Specify the stream key as additional metadata + # when running the workflow. + + # This key gets propagated to all child workflows + # and can have an arbitrary property name. + + workflowRun = hatchet.admin.run_workflow( + "Parent", + {"n": 2}, + options={"additional_metadata": {streamKey: streamVal}}, + ) + + # Stream all events for the additional meta key value + listener = hatchet.listener.stream_by_additional_metadata(streamKey, streamVal) + + async for event in listener: + print(event.type, event.payload) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/bulk_fanout/test_bulk_fanout.py b/examples/bulk_fanout/test_bulk_fanout.py new file mode 100644 index 00000000..87343074 --- /dev/null +++ b/examples/bulk_fanout/test_bulk_fanout.py @@ -0,0 +1,25 @@ +import pytest + +from hatchet_sdk import Hatchet +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "bulk_fanout"]) + + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + run = hatchet.admin.run_workflow("BulkParent", {"n": 12}) + result = await run.result() + print(result) + assert len(result["spawn"]["results"]) == 12 + + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run2(hatchet: Hatchet): + run = hatchet.admin.run_workflow("BulkParent", {"n": 10}) + result = await run.result() + assert len(result["spawn"]["results"]) == 10 diff --git a/examples/bulk_fanout/trigger.py b/examples/bulk_fanout/trigger.py new file mode 100644 index 00000000..a02eced0 --- /dev/null +++ b/examples/bulk_fanout/trigger.py @@ -0,0 +1,26 @@ +import asyncio +import base64 +import json +import os + +from dotenv import load_dotenv + +from hatchet_sdk import new_client +from hatchet_sdk.clients.admin import TriggerWorkflowOptions +from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun +from hatchet_sdk.clients.run_event_listener import StepRunEventType + + +async def main(): + load_dotenv() + hatchet = new_client() + + workflowRuns: WorkflowRun = [] + + event = hatchet.event.push( + "parent:create", {"n": 999}, {"additional_metadata": {"no-dedupe": "world"}} + ) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/bulk_fanout/worker.py b/examples/bulk_fanout/worker.py new file mode 100644 index 00000000..c02cc4b2 --- /dev/null +++ b/examples/bulk_fanout/worker.py @@ -0,0 +1,84 @@ +import asyncio +from typing import List + +from dotenv import load_dotenv + +from hatchet_sdk import Context, Hatchet +from hatchet_sdk.clients.admin import ChildWorkflowRunDict + +load_dotenv() + +hatchet = Hatchet(debug=True) + + +@hatchet.workflow(on_events=["parent:create"]) +class BulkParent: + @hatchet.step(timeout="5m") + async def spawn(self, context: Context): + print("spawning child") + + context.put_stream("spawning...") + results = [] + + n = context.workflow_input().get("n", 100) + + child_workflow_runs: List[ChildWorkflowRunDict] = [] + + for i in range(n): + + child_workflow_runs.append( + { + "workflow_name": "BulkChild", + "input": {"a": str(i)}, + "key": f"child{i}", + "options": {"additional_metadata": {"hello": "earth"}}, + } + ) + + if len(child_workflow_runs) == 0: + return + + spawn_results = await context.aio.spawn_workflows(child_workflow_runs) + + results = await asyncio.gather( + *[workflowRunRef.result() for workflowRunRef in spawn_results], + return_exceptions=True, + ) + + print("finished spawning children") + + for result in results: + if isinstance(result, Exception): + print(f"An error occurred: {result}") + else: + print(result) + + return {"results": results} + + +@hatchet.workflow(on_events=["child:create"]) +class BulkChild: + @hatchet.step() + def process(self, context: Context): + a = context.workflow_input()["a"] + print(f"child process {a}") + context.put_stream("child 1...") + return {"status": "success " + a} + + @hatchet.step() + def process2(self, context: Context): + print("child process2") + context.put_stream("child 2...") + return {"status2": "success"} + + +def main(): + + worker = hatchet.worker("fanout-worker", max_runs=40) + worker.register_workflow(BulkParent()) + worker.register_workflow(BulkChild()) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/hatchet_sdk/clients/admin.py b/hatchet_sdk/clients/admin.py index 5037a7c5..702ea892 100644 --- a/hatchet_sdk/clients/admin.py +++ b/hatchet_sdk/clients/admin.py @@ -5,11 +5,14 @@ import grpc from google.protobuf import timestamp_pb2 +from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun from hatchet_sdk.clients.rest.tenacity_utils import tenacity_retry from hatchet_sdk.clients.run_event_listener import new_listener from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener from hatchet_sdk.connection import new_conn from hatchet_sdk.contracts.workflows_pb2 import ( + BulkTriggerWorkflowRequest, + BulkTriggerWorkflowResponse, CreateWorkflowVersionOpts, PutRateLimitRequest, PutWorkflowRequest, @@ -44,6 +47,19 @@ class ChildTriggerWorkflowOptions(TypedDict): sticky: bool | None = None +class WorkflowRunDict(TypedDict): + workflow_name: str + input: Any + options: Optional[dict] + + +class ChildWorkflowRunDict(TypedDict): + workflow_name: str + input: Any + options: ChildTriggerWorkflowOptions[dict] + key: str + + class TriggerWorkflowOptions(ScheduleTriggerWorkflowOptions, TypedDict): additional_metadata: Dict[str, str] | None = None desired_worker_id: str | None = None @@ -203,6 +219,65 @@ async def run_workflow( raise ValueError(f"gRPC error: {e}") + @tenacity_retry + async def run_workflows( + self, workflows: List[WorkflowRunDict], options: TriggerWorkflowOptions = None + ) -> List[WorkflowRunRef]: + + if len(workflows) == 0: + raise ValueError("No workflows to run") + try: + if not self.pooled_workflow_listener: + self.pooled_workflow_listener = PooledWorkflowRunListener(self.config) + + namespace = self.namespace + + if ( + options is not None + and "namespace" in options + and options["namespace"] is not None + ): + namespace = options["namespace"] + del options["namespace"] + + workflow_run_requests: TriggerWorkflowRequest = [] + + for workflow in workflows: + + workflow_name = workflow["workflow_name"] + input_data = workflow["input"] + options = workflow["options"] + + if namespace != "" and not workflow_name.startswith(self.namespace): + workflow_name = f"{namespace}{workflow_name}" + + # Prepare and trigger workflow for each workflow name and input + request = self._prepare_workflow_request( + workflow_name, input_data, options + ) + workflow_run_requests.append(request) + + request = BulkTriggerWorkflowRequest(workflows=workflow_run_requests) + + resp: BulkTriggerWorkflowResponse = ( + await self.aio_client.BulkTriggerWorkflow( + request, + metadata=get_metadata(self.token), + ) + ) + + return [ + WorkflowRunRef( + workflow_run_id=workflow_run_id, + workflow_listener=self.pooled_workflow_listener, + workflow_run_event_listener=self.listener_client, + ) + for workflow_run_id in resp.workflow_run_ids + ] + + except grpc.RpcError as e: + raise ValueError(f"gRPC error: {e}") + @tenacity_retry async def put_workflow( self, @@ -398,6 +473,61 @@ def run_workflow( raise ValueError(f"gRPC error: {e}") + @tenacity_retry + def run_workflows( + self, workflows: List[WorkflowRunDict], options: TriggerWorkflowOptions = None + ) -> list[WorkflowRunRef]: + + workflow_run_requests: TriggerWorkflowRequest = [] + try: + if not self.pooled_workflow_listener: + self.pooled_workflow_listener = PooledWorkflowRunListener(self.config) + + for workflow in workflows: + + workflow_name = workflow["workflow_name"] + input_data = workflow["input"] + options = workflow["options"] + + namespace = self.namespace + + if ( + options is not None + and "namespace" in options + and options["namespace"] is not None + ): + namespace = options["namespace"] + del options["namespace"] + + if namespace != "" and not workflow_name.startswith(self.namespace): + workflow_name = f"{namespace}{workflow_name}" + + # Prepare and trigger workflow for each workflow name and input + request = self._prepare_workflow_request( + workflow_name, input_data, options + ) + + workflow_run_requests.append(request) + + request = BulkTriggerWorkflowRequest(workflows=workflow_run_requests) + + resp: BulkTriggerWorkflowResponse = self.client.BulkTriggerWorkflow( + request, + metadata=get_metadata(self.token), + ) + + except grpc.RpcError as e: + raise ValueError(f"gRPC error: {e}") + + return [ + WorkflowRunRef( + workflow_run_id=workflow_run_id, + workflow_listener=self.pooled_workflow_listener, + workflow_run_event_listener=self.listener_client, + ) + for workflow_run_id in resp.workflow_run_ids + ] + def run( self, function: Union[str, Callable[[Any], T]], diff --git a/hatchet_sdk/clients/rest/__init__.py b/hatchet_sdk/clients/rest/__init__.py index a83c1cdc..31cf4825 100644 --- a/hatchet_sdk/clients/rest/__init__.py +++ b/hatchet_sdk/clients/rest/__init__.py @@ -24,6 +24,7 @@ from hatchet_sdk.clients.rest.api.healthcheck_api import HealthcheckApi from hatchet_sdk.clients.rest.api.log_api import LogApi from hatchet_sdk.clients.rest.api.metadata_api import MetadataApi +from hatchet_sdk.clients.rest.api.rate_limits_api import RateLimitsApi from hatchet_sdk.clients.rest.api.slack_api import SlackApi from hatchet_sdk.clients.rest.api.sns_api import SNSApi from hatchet_sdk.clients.rest.api.step_run_api import StepRunApi @@ -122,6 +123,14 @@ from hatchet_sdk.clients.rest.models.pull_request import PullRequest from hatchet_sdk.clients.rest.models.pull_request_state import PullRequestState from hatchet_sdk.clients.rest.models.queue_metrics import QueueMetrics +from hatchet_sdk.clients.rest.models.rate_limit import RateLimit +from hatchet_sdk.clients.rest.models.rate_limit_list import RateLimitList +from hatchet_sdk.clients.rest.models.rate_limit_order_by_direction import ( + RateLimitOrderByDirection, +) +from hatchet_sdk.clients.rest.models.rate_limit_order_by_field import ( + RateLimitOrderByField, +) from hatchet_sdk.clients.rest.models.recent_step_runs import RecentStepRuns from hatchet_sdk.clients.rest.models.reject_invite_request import RejectInviteRequest from hatchet_sdk.clients.rest.models.replay_event_request import ReplayEventRequest @@ -165,6 +174,9 @@ from hatchet_sdk.clients.rest.models.tenant_resource import TenantResource from hatchet_sdk.clients.rest.models.tenant_resource_limit import TenantResourceLimit from hatchet_sdk.clients.rest.models.tenant_resource_policy import TenantResourcePolicy +from hatchet_sdk.clients.rest.models.tenant_step_run_queue_metrics import ( + TenantStepRunQueueMetrics, +) from hatchet_sdk.clients.rest.models.trigger_workflow_run_request import ( TriggerWorkflowRunRequest, ) @@ -240,6 +252,9 @@ WorkflowTriggerEventRef, ) from hatchet_sdk.clients.rest.models.workflow_triggers import WorkflowTriggers +from hatchet_sdk.clients.rest.models.workflow_update_request import ( + WorkflowUpdateRequest, +) from hatchet_sdk.clients.rest.models.workflow_version import WorkflowVersion from hatchet_sdk.clients.rest.models.workflow_version_definition import ( WorkflowVersionDefinition, diff --git a/hatchet_sdk/clients/rest/api/__init__.py b/hatchet_sdk/clients/rest/api/__init__.py index bc8c788d..f6ecbe38 100644 --- a/hatchet_sdk/clients/rest/api/__init__.py +++ b/hatchet_sdk/clients/rest/api/__init__.py @@ -8,6 +8,7 @@ from hatchet_sdk.clients.rest.api.healthcheck_api import HealthcheckApi from hatchet_sdk.clients.rest.api.log_api import LogApi from hatchet_sdk.clients.rest.api.metadata_api import MetadataApi +from hatchet_sdk.clients.rest.api.rate_limits_api import RateLimitsApi from hatchet_sdk.clients.rest.api.slack_api import SlackApi from hatchet_sdk.clients.rest.api.sns_api import SNSApi from hatchet_sdk.clients.rest.api.step_run_api import StepRunApi diff --git a/hatchet_sdk/clients/rest/api/api_token_api.py b/hatchet_sdk/clients/rest/api/api_token_api.py index 32faf798..054ccc6b 100644 --- a/hatchet_sdk/clients/rest/api/api_token_api.py +++ b/hatchet_sdk/clients/rest/api/api_token_api.py @@ -282,7 +282,9 @@ def _api_token_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -555,7 +557,9 @@ def _api_token_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -816,7 +820,9 @@ def _api_token_update_revoke_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/default_api.py b/hatchet_sdk/clients/rest/api/default_api.py index bdb00ebe..27b77ab7 100644 --- a/hatchet_sdk/clients/rest/api/default_api.py +++ b/hatchet_sdk/clients/rest/api/default_api.py @@ -308,7 +308,9 @@ def _tenant_invite_delete_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -624,7 +626,9 @@ def _tenant_invite_update_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -915,7 +919,9 @@ def _webhook_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1191,7 +1197,9 @@ def _webhook_delete_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1455,7 +1463,9 @@ def _webhook_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1719,7 +1729,9 @@ def _webhook_requests_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/event_api.py b/hatchet_sdk/clients/rest/api/event_api.py index febdf436..bb1500d6 100644 --- a/hatchet_sdk/clients/rest/api/event_api.py +++ b/hatchet_sdk/clients/rest/api/event_api.py @@ -303,7 +303,9 @@ def _event_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -598,7 +600,9 @@ def _event_create_bulk_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -871,7 +875,9 @@ def _event_data_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1132,7 +1138,9 @@ def _event_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1393,7 +1401,9 @@ def _event_key_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1868,7 +1878,9 @@ def _event_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2191,7 +2203,9 @@ def _event_update_cancel_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2486,7 +2500,9 @@ def _event_update_replay_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/github_api.py b/hatchet_sdk/clients/rest/api/github_api.py index 6d584c01..23c1b269 100644 --- a/hatchet_sdk/clients/rest/api/github_api.py +++ b/hatchet_sdk/clients/rest/api/github_api.py @@ -291,7 +291,9 @@ def _sns_update_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/healthcheck_api.py b/hatchet_sdk/clients/rest/api/healthcheck_api.py index 7eb18a36..4b7793eb 100644 --- a/hatchet_sdk/clients/rest/api/healthcheck_api.py +++ b/hatchet_sdk/clients/rest/api/healthcheck_api.py @@ -229,7 +229,9 @@ def _liveness_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -451,7 +453,9 @@ def _readiness_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/log_api.py b/hatchet_sdk/clients/rest/api/log_api.py index 8643d563..eaf16677 100644 --- a/hatchet_sdk/clients/rest/api/log_api.py +++ b/hatchet_sdk/clients/rest/api/log_api.py @@ -385,7 +385,9 @@ def _log_line_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/metadata_api.py b/hatchet_sdk/clients/rest/api/metadata_api.py index 2954440a..61659069 100644 --- a/hatchet_sdk/clients/rest/api/metadata_api.py +++ b/hatchet_sdk/clients/rest/api/metadata_api.py @@ -232,7 +232,9 @@ def _cloud_metadata_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -460,7 +462,9 @@ def _metadata_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -688,7 +692,9 @@ def _metadata_list_integrations_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/rate_limits_api.py b/hatchet_sdk/clients/rest/api/rate_limits_api.py new file mode 100644 index 00000000..c5e7e4ee --- /dev/null +++ b/hatchet_sdk/clients/rest/api/rate_limits_api.py @@ -0,0 +1,423 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from typing import Any, Dict, List, Optional, Tuple, Union + +from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call +from typing_extensions import Annotated + +from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized +from hatchet_sdk.clients.rest.api_response import ApiResponse +from hatchet_sdk.clients.rest.models.rate_limit_list import RateLimitList +from hatchet_sdk.clients.rest.models.rate_limit_order_by_direction import ( + RateLimitOrderByDirection, +) +from hatchet_sdk.clients.rest.models.rate_limit_order_by_field import ( + RateLimitOrderByField, +) +from hatchet_sdk.clients.rest.rest import RESTResponseType + + +class RateLimitsApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + @validate_call + async def rate_limit_list( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + offset: Annotated[ + Optional[StrictInt], Field(description="The number to skip") + ] = None, + limit: Annotated[ + Optional[StrictInt], Field(description="The number to limit by") + ] = None, + search: Annotated[ + Optional[StrictStr], Field(description="The search query to filter for") + ] = None, + order_by_field: Annotated[ + Optional[RateLimitOrderByField], Field(description="What to order by") + ] = None, + order_by_direction: Annotated[ + Optional[RateLimitOrderByDirection], + Field(description="The order direction"), + ] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RateLimitList: + """List rate limits + + Lists all rate limits for a tenant. + + :param tenant: The tenant id (required) + :type tenant: str + :param offset: The number to skip + :type offset: int + :param limit: The number to limit by + :type limit: int + :param search: The search query to filter for + :type search: str + :param order_by_field: What to order by + :type order_by_field: RateLimitOrderByField + :param order_by_direction: The order direction + :type order_by_direction: RateLimitOrderByDirection + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._rate_limit_list_serialize( + tenant=tenant, + offset=offset, + limit=limit, + search=search, + order_by_field=order_by_field, + order_by_direction=order_by_direction, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "RateLimitList", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + await response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + async def rate_limit_list_with_http_info( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + offset: Annotated[ + Optional[StrictInt], Field(description="The number to skip") + ] = None, + limit: Annotated[ + Optional[StrictInt], Field(description="The number to limit by") + ] = None, + search: Annotated[ + Optional[StrictStr], Field(description="The search query to filter for") + ] = None, + order_by_field: Annotated[ + Optional[RateLimitOrderByField], Field(description="What to order by") + ] = None, + order_by_direction: Annotated[ + Optional[RateLimitOrderByDirection], + Field(description="The order direction"), + ] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[RateLimitList]: + """List rate limits + + Lists all rate limits for a tenant. + + :param tenant: The tenant id (required) + :type tenant: str + :param offset: The number to skip + :type offset: int + :param limit: The number to limit by + :type limit: int + :param search: The search query to filter for + :type search: str + :param order_by_field: What to order by + :type order_by_field: RateLimitOrderByField + :param order_by_direction: The order direction + :type order_by_direction: RateLimitOrderByDirection + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._rate_limit_list_serialize( + tenant=tenant, + offset=offset, + limit=limit, + search=search, + order_by_field=order_by_field, + order_by_direction=order_by_direction, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "RateLimitList", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + await response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + async def rate_limit_list_without_preload_content( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + offset: Annotated[ + Optional[StrictInt], Field(description="The number to skip") + ] = None, + limit: Annotated[ + Optional[StrictInt], Field(description="The number to limit by") + ] = None, + search: Annotated[ + Optional[StrictStr], Field(description="The search query to filter for") + ] = None, + order_by_field: Annotated[ + Optional[RateLimitOrderByField], Field(description="What to order by") + ] = None, + order_by_direction: Annotated[ + Optional[RateLimitOrderByDirection], + Field(description="The order direction"), + ] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List rate limits + + Lists all rate limits for a tenant. + + :param tenant: The tenant id (required) + :type tenant: str + :param offset: The number to skip + :type offset: int + :param limit: The number to limit by + :type limit: int + :param search: The search query to filter for + :type search: str + :param order_by_field: What to order by + :type order_by_field: RateLimitOrderByField + :param order_by_direction: The order direction + :type order_by_direction: RateLimitOrderByDirection + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._rate_limit_list_serialize( + tenant=tenant, + offset=offset, + limit=limit, + search=search, + order_by_field=order_by_field, + order_by_direction=order_by_direction, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "RateLimitList", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + return response_data.response + + def _rate_limit_list_serialize( + self, + tenant, + offset, + limit, + search, + order_by_field, + order_by_direction, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if tenant is not None: + _path_params["tenant"] = tenant + # process the query parameters + if offset is not None: + + _query_params.append(("offset", offset)) + + if limit is not None: + + _query_params.append(("limit", limit)) + + if search is not None: + + _query_params.append(("search", search)) + + if order_by_field is not None: + + _query_params.append(("orderByField", order_by_field.value)) + + if order_by_direction is not None: + + _query_params.append(("orderByDirection", order_by_direction.value)) + + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) + + # authentication setting + _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] + + return self.api_client.param_serialize( + method="GET", + resource_path="/api/v1/tenants/{tenant}/rate-limits", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) diff --git a/hatchet_sdk/clients/rest/api/slack_api.py b/hatchet_sdk/clients/rest/api/slack_api.py index e12b65ab..9b0e637d 100644 --- a/hatchet_sdk/clients/rest/api/slack_api.py +++ b/hatchet_sdk/clients/rest/api/slack_api.py @@ -273,7 +273,9 @@ def _slack_webhook_delete_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -537,7 +539,9 @@ def _slack_webhook_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/sns_api.py b/hatchet_sdk/clients/rest/api/sns_api.py index d09763f7..bb020ceb 100644 --- a/hatchet_sdk/clients/rest/api/sns_api.py +++ b/hatchet_sdk/clients/rest/api/sns_api.py @@ -281,7 +281,9 @@ def _sns_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -566,7 +568,9 @@ def _sns_delete_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -830,7 +834,9 @@ def _sns_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/step_run_api.py b/hatchet_sdk/clients/rest/api/step_run_api.py index 1a3db17c..851ed174 100644 --- a/hatchet_sdk/clients/rest/api/step_run_api.py +++ b/hatchet_sdk/clients/rest/api/step_run_api.py @@ -295,7 +295,9 @@ def _step_run_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -589,7 +591,9 @@ def _step_run_get_schema_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -893,7 +897,9 @@ def _step_run_list_archives_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1203,7 +1209,9 @@ def _step_run_list_events_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1500,7 +1508,9 @@ def _step_run_update_cancel_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1810,7 +1820,9 @@ def _step_run_update_rerun_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2144,7 +2156,9 @@ def _workflow_run_list_step_run_events_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/tenant_api.py b/hatchet_sdk/clients/rest/api/tenant_api.py index cacd78b8..cd5e4f07 100644 --- a/hatchet_sdk/clients/rest/api/tenant_api.py +++ b/hatchet_sdk/clients/rest/api/tenant_api.py @@ -43,6 +43,9 @@ from hatchet_sdk.clients.rest.models.tenant_member import TenantMember from hatchet_sdk.clients.rest.models.tenant_member_list import TenantMemberList from hatchet_sdk.clients.rest.models.tenant_resource_policy import TenantResourcePolicy +from hatchet_sdk.clients.rest.models.tenant_step_run_queue_metrics import ( + TenantStepRunQueueMetrics, +) from hatchet_sdk.clients.rest.models.update_tenant_alert_email_group_request import ( UpdateTenantAlertEmailGroupRequest, ) @@ -310,7 +313,9 @@ def _alert_email_group_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -592,7 +597,9 @@ def _alert_email_group_delete_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -853,7 +860,9 @@ def _alert_email_group_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1145,7 +1154,9 @@ def _alert_email_group_update_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1418,7 +1429,9 @@ def _tenant_alerting_settings_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1670,7 +1683,9 @@ def _tenant_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1715,6 +1730,272 @@ def _tenant_create_serialize( _request_auth=_request_auth, ) + @validate_call + async def tenant_get_step_run_queue_metrics( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> TenantStepRunQueueMetrics: + """Get step run metrics + + Get the queue metrics for the tenant + + :param tenant: The tenant id (required) + :type tenant: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._tenant_get_step_run_queue_metrics_serialize( + tenant=tenant, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "TenantStepRunQueueMetrics", + "400": "APIErrors", + "403": "APIErrors", + "404": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + await response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + async def tenant_get_step_run_queue_metrics_with_http_info( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[TenantStepRunQueueMetrics]: + """Get step run metrics + + Get the queue metrics for the tenant + + :param tenant: The tenant id (required) + :type tenant: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._tenant_get_step_run_queue_metrics_serialize( + tenant=tenant, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "TenantStepRunQueueMetrics", + "400": "APIErrors", + "403": "APIErrors", + "404": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + await response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + async def tenant_get_step_run_queue_metrics_without_preload_content( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get step run metrics + + Get the queue metrics for the tenant + + :param tenant: The tenant id (required) + :type tenant: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._tenant_get_step_run_queue_metrics_serialize( + tenant=tenant, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "TenantStepRunQueueMetrics", + "400": "APIErrors", + "403": "APIErrors", + "404": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + return response_data.response + + def _tenant_get_step_run_queue_metrics_serialize( + self, + tenant, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if tenant is not None: + _path_params["tenant"] = tenant + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) + + # authentication setting + _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] + + return self.api_client.param_serialize( + method="GET", + resource_path="/api/v1/tenants/{tenant}/step-run-queue-metrics", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + @validate_call async def tenant_invite_accept( self, @@ -1926,7 +2207,9 @@ def _tenant_invite_accept_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2216,7 +2499,9 @@ def _tenant_invite_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2489,7 +2774,9 @@ def _tenant_invite_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2735,7 +3022,9 @@ def _tenant_invite_reject_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -3046,7 +3335,9 @@ def _tenant_member_delete_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -3309,7 +3600,9 @@ def _tenant_member_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -3570,7 +3863,9 @@ def _tenant_resource_policy_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -3850,7 +4145,9 @@ def _tenant_update_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -4095,7 +4392,9 @@ def _user_list_tenant_invites_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/user_api.py b/hatchet_sdk/clients/rest/api/user_api.py index 2c32e668..a9e7a35f 100644 --- a/hatchet_sdk/clients/rest/api/user_api.py +++ b/hatchet_sdk/clients/rest/api/user_api.py @@ -241,7 +241,9 @@ def _tenant_memberships_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -488,7 +490,9 @@ def _user_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -734,7 +738,9 @@ def _user_get_current_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -959,7 +965,9 @@ def _user_update_github_oauth_callback_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1178,7 +1186,9 @@ def _user_update_github_oauth_start_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1397,7 +1407,9 @@ def _user_update_google_oauth_callback_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1616,7 +1628,9 @@ def _user_update_google_oauth_start_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1857,7 +1871,9 @@ def _user_update_login_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2103,7 +2119,9 @@ def _user_update_logout_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2350,7 +2368,9 @@ def _user_update_password_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2587,7 +2607,9 @@ def _user_update_slack_oauth_callback_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2834,7 +2856,9 @@ def _user_update_slack_oauth_start_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/worker_api.py b/hatchet_sdk/clients/rest/api/worker_api.py index 81bc7696..f1be3e82 100644 --- a/hatchet_sdk/clients/rest/api/worker_api.py +++ b/hatchet_sdk/clients/rest/api/worker_api.py @@ -263,7 +263,9 @@ def _worker_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -524,7 +526,9 @@ def _worker_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -804,7 +808,9 @@ def _worker_update_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/workflow_api.py b/hatchet_sdk/clients/rest/api/workflow_api.py index 19d150e9..87532339 100644 --- a/hatchet_sdk/clients/rest/api/workflow_api.py +++ b/hatchet_sdk/clients/rest/api/workflow_api.py @@ -36,6 +36,9 @@ from hatchet_sdk.clients.rest.models.workflow_run_shape import WorkflowRunShape from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatus from hatchet_sdk.clients.rest.models.workflow_runs_metrics import WorkflowRunsMetrics +from hatchet_sdk.clients.rest.models.workflow_update_request import ( + WorkflowUpdateRequest, +) from hatchet_sdk.clients.rest.models.workflow_version import WorkflowVersion from hatchet_sdk.clients.rest.models.workflow_workers_count import WorkflowWorkersCount from hatchet_sdk.clients.rest.rest import RESTResponseType @@ -329,7 +332,9 @@ def _tenant_get_queue_metrics_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -601,7 +606,9 @@ def _workflow_delete_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -865,7 +872,9 @@ def _workflow_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1170,7 +1179,9 @@ def _workflow_get_metrics_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1467,7 +1478,9 @@ def _workflow_get_workers_count_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1730,7 +1743,9 @@ def _workflow_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2028,7 +2043,9 @@ def _workflow_run_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2447,7 +2464,9 @@ def _workflow_run_get_metrics_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2787,7 +2806,9 @@ def _workflow_run_get_shape_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -2875,6 +2896,14 @@ async def workflow_run_list( Optional[datetime], Field(description="The time before the workflow run was created"), ] = None, + finished_after: Annotated[ + Optional[datetime], + Field(description="The time after the workflow run was finished"), + ] = None, + finished_before: Annotated[ + Optional[datetime], + Field(description="The time before the workflow run was finished"), + ] = None, order_by_field: Annotated[ Optional[WorkflowRunOrderByField], Field(description="The order by field") ] = None, @@ -2922,6 +2951,10 @@ async def workflow_run_list( :type created_after: datetime :param created_before: The time before the workflow run was created :type created_before: datetime + :param finished_after: The time after the workflow run was finished + :type finished_after: datetime + :param finished_before: The time before the workflow run was finished + :type finished_before: datetime :param order_by_field: The order by field :type order_by_field: WorkflowRunOrderByField :param order_by_direction: The order by direction @@ -2961,6 +2994,8 @@ async def workflow_run_list( additional_metadata=additional_metadata, created_after=created_after, created_before=created_before, + finished_after=finished_after, + finished_before=finished_before, order_by_field=order_by_field, order_by_direction=order_by_direction, _request_auth=_request_auth, @@ -3034,6 +3069,14 @@ async def workflow_run_list_with_http_info( Optional[datetime], Field(description="The time before the workflow run was created"), ] = None, + finished_after: Annotated[ + Optional[datetime], + Field(description="The time after the workflow run was finished"), + ] = None, + finished_before: Annotated[ + Optional[datetime], + Field(description="The time before the workflow run was finished"), + ] = None, order_by_field: Annotated[ Optional[WorkflowRunOrderByField], Field(description="The order by field") ] = None, @@ -3081,6 +3124,10 @@ async def workflow_run_list_with_http_info( :type created_after: datetime :param created_before: The time before the workflow run was created :type created_before: datetime + :param finished_after: The time after the workflow run was finished + :type finished_after: datetime + :param finished_before: The time before the workflow run was finished + :type finished_before: datetime :param order_by_field: The order by field :type order_by_field: WorkflowRunOrderByField :param order_by_direction: The order by direction @@ -3120,6 +3167,8 @@ async def workflow_run_list_with_http_info( additional_metadata=additional_metadata, created_after=created_after, created_before=created_before, + finished_after=finished_after, + finished_before=finished_before, order_by_field=order_by_field, order_by_direction=order_by_direction, _request_auth=_request_auth, @@ -3193,6 +3242,14 @@ async def workflow_run_list_without_preload_content( Optional[datetime], Field(description="The time before the workflow run was created"), ] = None, + finished_after: Annotated[ + Optional[datetime], + Field(description="The time after the workflow run was finished"), + ] = None, + finished_before: Annotated[ + Optional[datetime], + Field(description="The time before the workflow run was finished"), + ] = None, order_by_field: Annotated[ Optional[WorkflowRunOrderByField], Field(description="The order by field") ] = None, @@ -3240,6 +3297,10 @@ async def workflow_run_list_without_preload_content( :type created_after: datetime :param created_before: The time before the workflow run was created :type created_before: datetime + :param finished_after: The time after the workflow run was finished + :type finished_after: datetime + :param finished_before: The time before the workflow run was finished + :type finished_before: datetime :param order_by_field: The order by field :type order_by_field: WorkflowRunOrderByField :param order_by_direction: The order by direction @@ -3279,6 +3340,8 @@ async def workflow_run_list_without_preload_content( additional_metadata=additional_metadata, created_after=created_after, created_before=created_before, + finished_after=finished_after, + finished_before=finished_before, order_by_field=order_by_field, order_by_direction=order_by_direction, _request_auth=_request_auth, @@ -3311,6 +3374,8 @@ def _workflow_run_list_serialize( additional_metadata, created_after, created_before, + finished_after, + finished_before, order_by_field, order_by_direction, _request_auth, @@ -3331,7 +3396,9 @@ def _workflow_run_list_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -3396,6 +3463,32 @@ def _workflow_run_list_serialize( else: _query_params.append(("createdBefore", created_before)) + if finished_after is not None: + if isinstance(finished_after, datetime): + _query_params.append( + ( + "finishedAfter", + finished_after.strftime( + self.api_client.configuration.datetime_format + ), + ) + ) + else: + _query_params.append(("finishedAfter", finished_after)) + + if finished_before is not None: + if isinstance(finished_before, datetime): + _query_params.append( + ( + "finishedBefore", + finished_before.strftime( + self.api_client.configuration.datetime_format + ), + ) + ) + else: + _query_params.append(("finishedBefore", finished_before)) + if order_by_field is not None: _query_params.append(("orderByField", order_by_field.value)) @@ -3432,6 +3525,300 @@ def _workflow_run_list_serialize( _request_auth=_request_auth, ) + @validate_call + async def workflow_update( + self, + workflow: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The workflow id" + ), + ], + workflow_update_request: Annotated[ + WorkflowUpdateRequest, Field(description="The input to update the workflow") + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Workflow: + """Update workflow + + Update a workflow for a tenant + + :param workflow: The workflow id (required) + :type workflow: str + :param workflow_update_request: The input to update the workflow (required) + :type workflow_update_request: WorkflowUpdateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_update_serialize( + workflow=workflow, + workflow_update_request=workflow_update_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "Workflow", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + await response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + async def workflow_update_with_http_info( + self, + workflow: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The workflow id" + ), + ], + workflow_update_request: Annotated[ + WorkflowUpdateRequest, Field(description="The input to update the workflow") + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Workflow]: + """Update workflow + + Update a workflow for a tenant + + :param workflow: The workflow id (required) + :type workflow: str + :param workflow_update_request: The input to update the workflow (required) + :type workflow_update_request: WorkflowUpdateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_update_serialize( + workflow=workflow, + workflow_update_request=workflow_update_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "Workflow", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + await response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + async def workflow_update_without_preload_content( + self, + workflow: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The workflow id" + ), + ], + workflow_update_request: Annotated[ + WorkflowUpdateRequest, Field(description="The input to update the workflow") + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update workflow + + Update a workflow for a tenant + + :param workflow: The workflow id (required) + :type workflow: str + :param workflow_update_request: The input to update the workflow (required) + :type workflow_update_request: WorkflowUpdateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_update_serialize( + workflow=workflow, + workflow_update_request=workflow_update_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "Workflow", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = await self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + return response_data.response + + def _workflow_update_serialize( + self, + workflow, + workflow_update_request, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if workflow is not None: + _path_params["workflow"] = workflow + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if workflow_update_request is not None: + _body_params = workflow_update_request + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params["Content-Type"] = _content_type + else: + _default_content_type = self.api_client.select_header_content_type( + ["application/json"] + ) + if _default_content_type is not None: + _header_params["Content-Type"] = _default_content_type + + # authentication setting + _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] + + return self.api_client.param_serialize( + method="PATCH", + resource_path="/api/v1/workflows/{workflow}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + @validate_call async def workflow_version_get( self, @@ -3689,7 +4076,9 @@ def _workflow_version_get_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api/workflow_run_api.py b/hatchet_sdk/clients/rest/api/workflow_run_api.py index 5583e223..d0e1aeb4 100644 --- a/hatchet_sdk/clients/rest/api/workflow_run_api.py +++ b/hatchet_sdk/clients/rest/api/workflow_run_api.py @@ -298,7 +298,9 @@ def _workflow_run_cancel_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -627,7 +629,9 @@ def _workflow_run_create_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -944,7 +948,9 @@ def _workflow_run_get_input_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters @@ -1232,7 +1238,9 @@ def _workflow_run_update_replay_serialize( _query_params: List[Tuple[str, str]] = [] _header_params: Dict[str, Optional[str]] = _headers or {} _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} _body_params: Optional[bytes] = None # process the path parameters diff --git a/hatchet_sdk/clients/rest/api_client.py b/hatchet_sdk/clients/rest/api_client.py index a35d765c..76446dda 100644 --- a/hatchet_sdk/clients/rest/api_client.py +++ b/hatchet_sdk/clients/rest/api_client.py @@ -400,12 +400,16 @@ def deserialize( data = json.loads(response_text) except ValueError: data = response_text - elif content_type.startswith("application/json"): + elif re.match( + r"^application/(json|[\w!#$&.+-^_]+\+json)\s*(;|$)", + content_type, + re.IGNORECASE, + ): if response_text == "": data = "" else: data = json.loads(response_text) - elif content_type.startswith("text/plain"): + elif re.match(r"^text\/[a-z.+-]+\s*(;|$)", content_type, re.IGNORECASE): data = response_text else: raise ApiException( @@ -527,7 +531,10 @@ def parameters_to_url_query(self, params, collection_formats): return "&".join(["=".join(map(str, item)) for item in new_params]) - def files_parameters(self, files: Dict[str, Union[str, bytes]]): + def files_parameters( + self, + files: Dict[str, Union[str, bytes, List[str], List[bytes], Tuple[str, bytes]]], + ): """Builds form parameters. :param files: File parameters. @@ -542,6 +549,12 @@ def files_parameters(self, files: Dict[str, Union[str, bytes]]): elif isinstance(v, bytes): filename = k filedata = v + elif isinstance(v, tuple): + filename, filedata = v + elif isinstance(v, list): + for file_param in v: + params.extend(self.files_parameters({k: file_param})) + continue else: raise ValueError("Unsupported file value") mimetype = mimetypes.guess_type(filename)[0] or "application/octet-stream" diff --git a/hatchet_sdk/clients/rest/models/__init__.py b/hatchet_sdk/clients/rest/models/__init__.py index fab0ace1..9c550c2e 100644 --- a/hatchet_sdk/clients/rest/models/__init__.py +++ b/hatchet_sdk/clients/rest/models/__init__.py @@ -90,6 +90,14 @@ from hatchet_sdk.clients.rest.models.pull_request import PullRequest from hatchet_sdk.clients.rest.models.pull_request_state import PullRequestState from hatchet_sdk.clients.rest.models.queue_metrics import QueueMetrics +from hatchet_sdk.clients.rest.models.rate_limit import RateLimit +from hatchet_sdk.clients.rest.models.rate_limit_list import RateLimitList +from hatchet_sdk.clients.rest.models.rate_limit_order_by_direction import ( + RateLimitOrderByDirection, +) +from hatchet_sdk.clients.rest.models.rate_limit_order_by_field import ( + RateLimitOrderByField, +) from hatchet_sdk.clients.rest.models.recent_step_runs import RecentStepRuns from hatchet_sdk.clients.rest.models.reject_invite_request import RejectInviteRequest from hatchet_sdk.clients.rest.models.replay_event_request import ReplayEventRequest @@ -133,6 +141,9 @@ from hatchet_sdk.clients.rest.models.tenant_resource import TenantResource from hatchet_sdk.clients.rest.models.tenant_resource_limit import TenantResourceLimit from hatchet_sdk.clients.rest.models.tenant_resource_policy import TenantResourcePolicy +from hatchet_sdk.clients.rest.models.tenant_step_run_queue_metrics import ( + TenantStepRunQueueMetrics, +) from hatchet_sdk.clients.rest.models.trigger_workflow_run_request import ( TriggerWorkflowRunRequest, ) @@ -208,6 +219,9 @@ WorkflowTriggerEventRef, ) from hatchet_sdk.clients.rest.models.workflow_triggers import WorkflowTriggers +from hatchet_sdk.clients.rest.models.workflow_update_request import ( + WorkflowUpdateRequest, +) from hatchet_sdk.clients.rest.models.workflow_version import WorkflowVersion from hatchet_sdk.clients.rest.models.workflow_version_definition import ( WorkflowVersionDefinition, diff --git a/hatchet_sdk/clients/rest/models/bulk_create_event_response.py b/hatchet_sdk/clients/rest/models/bulk_create_event_response.py index fd084252..768c5c90 100644 --- a/hatchet_sdk/clients/rest/models/bulk_create_event_response.py +++ b/hatchet_sdk/clients/rest/models/bulk_create_event_response.py @@ -24,7 +24,6 @@ from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta from hatchet_sdk.clients.rest.models.event import Event -from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse class BulkCreateEventResponse(BaseModel): @@ -34,8 +33,7 @@ class BulkCreateEventResponse(BaseModel): metadata: APIResourceMeta events: List[Event] = Field(description="The events.") - pagination: PaginationResponse = Field(description="The pagination information.") - __properties: ClassVar[List[str]] = ["metadata", "events", "pagination"] + __properties: ClassVar[List[str]] = ["metadata", "events"] model_config = ConfigDict( populate_by_name=True, @@ -84,9 +82,6 @@ def to_dict(self) -> Dict[str, Any]: if _item_events: _items.append(_item_events.to_dict()) _dict["events"] = _items - # override the default output from pydantic by calling `to_dict()` of pagination - if self.pagination: - _dict["pagination"] = self.pagination.to_dict() return _dict @classmethod @@ -110,11 +105,6 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: if obj.get("events") is not None else None ), - "pagination": ( - PaginationResponse.from_dict(obj["pagination"]) - if obj.get("pagination") is not None - else None - ), } ) return _obj diff --git a/hatchet_sdk/clients/rest/models/rate_limit.py b/hatchet_sdk/clients/rest/models/rate_limit.py new file mode 100644 index 00000000..0bf88522 --- /dev/null +++ b/hatchet_sdk/clients/rest/models/rate_limit.py @@ -0,0 +1,117 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from datetime import datetime +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing_extensions import Self + + +class RateLimit(BaseModel): + """ + RateLimit + """ # noqa: E501 + + key: StrictStr = Field(description="The key for the rate limit.") + tenant_id: StrictStr = Field( + description="The ID of the tenant associated with this rate limit.", + alias="tenantId", + ) + limit_value: StrictInt = Field( + description="The maximum number of requests allowed within the window.", + alias="limitValue", + ) + value: StrictInt = Field( + description="The current number of requests made within the window." + ) + window: StrictStr = Field( + description="The window of time in which the limitValue is enforced." + ) + last_refill: datetime = Field( + description="The last time the rate limit was refilled.", alias="lastRefill" + ) + __properties: ClassVar[List[str]] = [ + "key", + "tenantId", + "limitValue", + "value", + "window", + "lastRefill", + ] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RateLimit from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RateLimit from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "key": obj.get("key"), + "tenantId": obj.get("tenantId"), + "limitValue": obj.get("limitValue"), + "value": obj.get("value"), + "window": obj.get("window"), + "lastRefill": obj.get("lastRefill"), + } + ) + return _obj diff --git a/hatchet_sdk/clients/rest/models/rate_limit_list.py b/hatchet_sdk/clients/rest/models/rate_limit_list.py new file mode 100644 index 00000000..e9f2847d --- /dev/null +++ b/hatchet_sdk/clients/rest/models/rate_limit_list.py @@ -0,0 +1,110 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict +from typing_extensions import Self + +from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse +from hatchet_sdk.clients.rest.models.rate_limit import RateLimit + + +class RateLimitList(BaseModel): + """ + RateLimitList + """ # noqa: E501 + + pagination: Optional[PaginationResponse] = None + rows: Optional[List[RateLimit]] = None + __properties: ClassVar[List[str]] = ["pagination", "rows"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RateLimitList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict["pagination"] = self.pagination.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in rows (list) + _items = [] + if self.rows: + for _item_rows in self.rows: + if _item_rows: + _items.append(_item_rows.to_dict()) + _dict["rows"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RateLimitList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "pagination": ( + PaginationResponse.from_dict(obj["pagination"]) + if obj.get("pagination") is not None + else None + ), + "rows": ( + [RateLimit.from_dict(_item) for _item in obj["rows"]] + if obj.get("rows") is not None + else None + ), + } + ) + return _obj diff --git a/hatchet_sdk/clients/rest/models/rate_limit_order_by_direction.py b/hatchet_sdk/clients/rest/models/rate_limit_order_by_direction.py new file mode 100644 index 00000000..64451da9 --- /dev/null +++ b/hatchet_sdk/clients/rest/models/rate_limit_order_by_direction.py @@ -0,0 +1,37 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations + +import json +from enum import Enum + +from typing_extensions import Self + + +class RateLimitOrderByDirection(str, Enum): + """ + RateLimitOrderByDirection + """ + + """ + allowed enum values + """ + ASC = "asc" + DESC = "desc" + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of RateLimitOrderByDirection from a JSON string""" + return cls(json.loads(json_str)) diff --git a/hatchet_sdk/clients/rest/models/rate_limit_order_by_field.py b/hatchet_sdk/clients/rest/models/rate_limit_order_by_field.py new file mode 100644 index 00000000..6b5077be --- /dev/null +++ b/hatchet_sdk/clients/rest/models/rate_limit_order_by_field.py @@ -0,0 +1,38 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations + +import json +from enum import Enum + +from typing_extensions import Self + + +class RateLimitOrderByField(str, Enum): + """ + RateLimitOrderByField + """ + + """ + allowed enum values + """ + KEY = "key" + VALUE = "value" + LIMITVALUE = "limitValue" + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of RateLimitOrderByField from a JSON string""" + return cls(json.loads(json_str)) diff --git a/hatchet_sdk/clients/rest/models/step_run_event_reason.py b/hatchet_sdk/clients/rest/models/step_run_event_reason.py index 348f7596..487fde06 100644 --- a/hatchet_sdk/clients/rest/models/step_run_event_reason.py +++ b/hatchet_sdk/clients/rest/models/step_run_event_reason.py @@ -33,6 +33,7 @@ class StepRunEventReason(str, Enum): SCHEDULING_TIMED_OUT = "SCHEDULING_TIMED_OUT" ASSIGNED = "ASSIGNED" STARTED = "STARTED" + ACKNOWLEDGED = "ACKNOWLEDGED" FINISHED = "FINISHED" FAILED = "FAILED" RETRYING = "RETRYING" diff --git a/hatchet_sdk/clients/rest/models/tenant_queue_metrics.py b/hatchet_sdk/clients/rest/models/tenant_queue_metrics.py index 5b83ae45..4043d47f 100644 --- a/hatchet_sdk/clients/rest/models/tenant_queue_metrics.py +++ b/hatchet_sdk/clients/rest/models/tenant_queue_metrics.py @@ -19,7 +19,7 @@ import re # noqa: F401 from typing import Any, ClassVar, Dict, List, Optional, Set -from pydantic import BaseModel, ConfigDict, Field +from pydantic import BaseModel, ConfigDict, Field, StrictInt from typing_extensions import Self from hatchet_sdk.clients.rest.models.queue_metrics import QueueMetrics @@ -34,7 +34,8 @@ class TenantQueueMetrics(BaseModel): default=None, description="The total queue metrics." ) workflow: Optional[Dict[str, QueueMetrics]] = None - __properties: ClassVar[List[str]] = ["total", "workflow"] + queues: Optional[Dict[str, StrictInt]] = None + __properties: ClassVar[List[str]] = ["total", "workflow", "queues"] model_config = ConfigDict( populate_by_name=True, @@ -109,6 +110,7 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: if obj.get("workflow") is not None else None ), + "queues": obj.get("queues"), } ) return _obj diff --git a/hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py b/hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py new file mode 100644 index 00000000..4b9bfc81 --- /dev/null +++ b/hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py @@ -0,0 +1,83 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, StrictInt +from typing_extensions import Self + + +class TenantStepRunQueueMetrics(BaseModel): + """ + TenantStepRunQueueMetrics + """ # noqa: E501 + + queues: Optional[Dict[str, StrictInt]] = None + __properties: ClassVar[List[str]] = ["queues"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of TenantStepRunQueueMetrics from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of TenantStepRunQueueMetrics from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({"queues": obj.get("queues")}) + return _obj diff --git a/hatchet_sdk/clients/rest/models/workflow.py b/hatchet_sdk/clients/rest/models/workflow.py index 59e07b08..f3107144 100644 --- a/hatchet_sdk/clients/rest/models/workflow.py +++ b/hatchet_sdk/clients/rest/models/workflow.py @@ -19,7 +19,7 @@ import re # noqa: F401 from typing import Any, ClassVar, Dict, List, Optional, Set -from pydantic import BaseModel, ConfigDict, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr from typing_extensions import Self from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta @@ -37,6 +37,9 @@ class Workflow(BaseModel): description: Optional[StrictStr] = Field( default=None, description="The description of the workflow." ) + is_paused: Optional[StrictBool] = Field( + default=None, description="Whether the workflow is paused.", alias="isPaused" + ) versions: Optional[List[WorkflowVersionMeta]] = None tags: Optional[List[WorkflowTag]] = Field( default=None, description="The tags of the workflow." @@ -48,6 +51,7 @@ class Workflow(BaseModel): "metadata", "name", "description", + "isPaused", "versions", "tags", "jobs", @@ -134,6 +138,7 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: ), "name": obj.get("name"), "description": obj.get("description"), + "isPaused": obj.get("isPaused"), "versions": ( [WorkflowVersionMeta.from_dict(_item) for _item in obj["versions"]] if obj.get("versions") is not None diff --git a/hatchet_sdk/clients/rest/models/workflow_update_request.py b/hatchet_sdk/clients/rest/models/workflow_update_request.py new file mode 100644 index 00000000..5ec56835 --- /dev/null +++ b/hatchet_sdk/clients/rest/models/workflow_update_request.py @@ -0,0 +1,85 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing_extensions import Self + + +class WorkflowUpdateRequest(BaseModel): + """ + WorkflowUpdateRequest + """ # noqa: E501 + + is_paused: Optional[StrictBool] = Field( + default=None, description="Whether the workflow is paused.", alias="isPaused" + ) + __properties: ClassVar[List[str]] = ["isPaused"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WorkflowUpdateRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WorkflowUpdateRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({"isPaused": obj.get("isPaused")}) + return _obj diff --git a/hatchet_sdk/clients/rest/rest.py b/hatchet_sdk/clients/rest/rest.py index 35284aab..56286e14 100644 --- a/hatchet_sdk/clients/rest/rest.py +++ b/hatchet_sdk/clients/rest/rest.py @@ -156,6 +156,11 @@ async def request( if isinstance(v, tuple) and len(v) == 3: data.add_field(k, value=v[1], filename=v[0], content_type=v[2]) else: + # Ensures that dict objects are serialized + if isinstance(v, dict): + v = json.dumps(v) + elif isinstance(v, int): + v = str(v) data.add_field(k, v) args["data"] = data diff --git a/hatchet_sdk/clients/workflow_listener.py b/hatchet_sdk/clients/workflow_listener.py index 0abbe39f..b1131587 100644 --- a/hatchet_sdk/clients/workflow_listener.py +++ b/hatchet_sdk/clients/workflow_listener.py @@ -22,6 +22,8 @@ DEFAULT_WORKFLOW_LISTENER_RETRY_COUNT = 5 DEFAULT_WORKFLOW_LISTENER_INTERRUPT_INTERVAL = 1800 # 30 minutes +DEDUPE_MESSAGE = "DUPLICATE_WORKFLOW_RUN" + class _Subscription: def __init__(self, id: int, workflow_run_id: str): @@ -223,6 +225,8 @@ async def subscribe(self, workflow_run_id: str): self.cleanup_subscription(subscription_id) async def result(self, workflow_run_id: str): + from hatchet_sdk.clients.admin import DedupeViolationErr + event = await self.subscribe(workflow_run_id) errors = [] @@ -231,7 +235,10 @@ async def result(self, workflow_run_id: str): errors = [result.error for result in event.results if result.error] if errors: - raise Exception(f"Workflow Errors: {errors}") + if DEDUPE_MESSAGE in errors[0]: + raise DedupeViolationErr(errors[0]) + else: + raise Exception(f"Workflow Errors: {errors}") results = { result.stepReadableId: json.loads(result.output) diff --git a/hatchet_sdk/context/context.py b/hatchet_sdk/context/context.py index e0bb8486..5f6c8726 100644 --- a/hatchet_sdk/context/context.py +++ b/hatchet_sdk/context/context.py @@ -2,6 +2,7 @@ import json import traceback from concurrent.futures import Future, ThreadPoolExecutor +from typing import List from hatchet_sdk.clients.events import EventClient from hatchet_sdk.clients.rest.tenacity_utils import tenacity_retry @@ -10,12 +11,18 @@ from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener from hatchet_sdk.context.worker_context import WorkerContext from hatchet_sdk.contracts.dispatcher_pb2 import OverridesData +from hatchet_sdk.contracts.workflows_pb2 import ( + BulkTriggerWorkflowRequest, + TriggerWorkflowRequest, +) from hatchet_sdk.workflow_run import WorkflowRunRef from ..clients.admin import ( AdminClient, ChildTriggerWorkflowOptions, + ChildWorkflowRunDict, TriggerWorkflowOptions, + WorkflowRunDict, ) from ..clients.dispatcher.dispatcher import Action, DispatcherClient from ..logger import logger @@ -109,6 +116,34 @@ async def spawn_workflow( workflow_name, input, trigger_options ) + @tenacity_retry + async def spawn_workflows( + self, child_workflow_runs: List[ChildWorkflowRunDict] + ) -> List[WorkflowRunRef]: + + if len(child_workflow_runs) == 0: + raise Exception("no child workflows to spawn") + + worker_id = self.worker.id() + + bulk_trigger_workflow_runs: WorkflowRunDict = [] + for child_workflow_run in child_workflow_runs: + workflow_name = child_workflow_run["workflow_name"] + input = child_workflow_run["input"] + + key = child_workflow_run.get("key") + options = child_workflow_run.get("options", {}) + + trigger_options = self._prepare_workflow_options(key, options, worker_id) + + bulk_trigger_workflow_runs.append( + WorkflowRunDict( + workflow_name=workflow_name, input=input, options=trigger_options + ) + ) + + return await self.admin_client.aio.run_workflows(bulk_trigger_workflow_runs) + class Context(BaseContext): spawn_index = -1 diff --git a/hatchet_sdk/contracts/dispatcher_pb2.py b/hatchet_sdk/contracts/dispatcher_pb2.py index 699cfe33..68d2b5ec 100644 --- a/hatchet_sdk/contracts/dispatcher_pb2.py +++ b/hatchet_sdk/contracts/dispatcher_pb2.py @@ -15,7 +15,7 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x64ispatcher.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"V\n\x0cWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValue\"\x88\x02\n\x15WorkerRegisterRequest\x12\x12\n\nworkerName\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63tions\x18\x02 \x03(\t\x12\x10\n\x08services\x18\x03 \x03(\t\x12\x14\n\x07maxRuns\x18\x04 \x01(\x05H\x00\x88\x01\x01\x12\x32\n\x06labels\x18\x05 \x03(\x0b\x32\".WorkerRegisterRequest.LabelsEntry\x12\x16\n\twebhookId\x18\x06 \x01(\tH\x01\x88\x01\x01\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\x42\n\n\x08_maxRunsB\x0c\n\n_webhookId\"P\n\x16WorkerRegisterResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\x12\x12\n\nworkerName\x18\x03 \x01(\t\"\xa3\x01\n\x19UpsertWorkerLabelsRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x36\n\x06labels\x18\x02 \x03(\x0b\x32&.UpsertWorkerLabelsRequest.LabelsEntry\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\"@\n\x1aUpsertWorkerLabelsResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\x86\x04\n\x0e\x41ssignedAction\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\r\n\x05jobId\x18\x04 \x01(\t\x12\x0f\n\x07jobName\x18\x05 \x01(\t\x12\x10\n\x08jobRunId\x18\x06 \x01(\t\x12\x0e\n\x06stepId\x18\x07 \x01(\t\x12\x11\n\tstepRunId\x18\x08 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\t \x01(\t\x12\x1f\n\nactionType\x18\n \x01(\x0e\x32\x0b.ActionType\x12\x15\n\ractionPayload\x18\x0b \x01(\t\x12\x10\n\x08stepName\x18\x0c \x01(\t\x12\x12\n\nretryCount\x18\r \x01(\x05\x12 \n\x13\x61\x64\x64itional_metadata\x18\x0e \x01(\tH\x00\x88\x01\x01\x12!\n\x14\x63hild_workflow_index\x18\x0f \x01(\x05H\x01\x88\x01\x01\x12\x1f\n\x12\x63hild_workflow_key\x18\x10 \x01(\tH\x02\x88\x01\x01\x12#\n\x16parent_workflow_run_id\x18\x11 \x01(\tH\x03\x88\x01\x01\x42\x16\n\x14_additional_metadataB\x17\n\x15_child_workflow_indexB\x15\n\x13_child_workflow_keyB\x19\n\x17_parent_workflow_run_id\"\'\n\x13WorkerListenRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\",\n\x18WorkerUnsubscribeRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\"?\n\x19WorkerUnsubscribeResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\xe1\x01\n\x13GroupKeyActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\teventType\x18\x06 \x01(\x0e\x32\x18.GroupKeyActionEventType\x12\x14\n\x0c\x65ventPayload\x18\x07 \x01(\t\"\xec\x01\n\x0fStepActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\r\n\x05jobId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x0e\n\x06stepId\x18\x04 \x01(\t\x12\x11\n\tstepRunId\x18\x05 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x06 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\teventType\x18\x08 \x01(\x0e\x32\x14.StepActionEventType\x12\x14\n\x0c\x65ventPayload\x18\t \x01(\t\"9\n\x13\x41\x63tionEventResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\xc0\x01\n SubscribeToWorkflowEventsRequest\x12\x1a\n\rworkflowRunId\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x1e\n\x11\x61\x64\x64itionalMetaKey\x18\x02 \x01(\tH\x01\x88\x01\x01\x12 \n\x13\x61\x64\x64itionalMetaValue\x18\x03 \x01(\tH\x02\x88\x01\x01\x42\x10\n\x0e_workflowRunIdB\x14\n\x12_additionalMetaKeyB\x16\n\x14_additionalMetaValue\"7\n\x1eSubscribeToWorkflowRunsRequest\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\"\xb2\x02\n\rWorkflowEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12#\n\x0cresourceType\x18\x02 \x01(\x0e\x32\r.ResourceType\x12%\n\teventType\x18\x03 \x01(\x0e\x32\x12.ResourceEventType\x12\x12\n\nresourceId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0c\x65ventPayload\x18\x06 \x01(\t\x12\x0e\n\x06hangup\x18\x07 \x01(\x08\x12\x18\n\x0bstepRetries\x18\x08 \x01(\x05H\x00\x88\x01\x01\x12\x17\n\nretryCount\x18\t \x01(\x05H\x01\x88\x01\x01\x42\x0e\n\x0c_stepRetriesB\r\n\x0b_retryCount\"\xa8\x01\n\x10WorkflowRunEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12(\n\teventType\x18\x02 \x01(\x0e\x32\x15.WorkflowRunEventType\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x07results\x18\x04 \x03(\x0b\x32\x0e.StepRunResult\"\x8a\x01\n\rStepRunResult\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x16\n\x0estepReadableId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x12\n\x05\x65rror\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06output\x18\x05 \x01(\tH\x01\x88\x01\x01\x42\x08\n\x06_errorB\t\n\x07_output\"W\n\rOverridesData\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\x16\n\x0e\x63\x61llerFilename\x18\x04 \x01(\t\"\x17\n\x15OverridesDataResponse\"U\n\x10HeartbeatRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12/\n\x0bheartbeatAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x13\n\x11HeartbeatResponse\"F\n\x15RefreshTimeoutRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x1a\n\x12incrementTimeoutBy\x18\x02 \x01(\t\"G\n\x16RefreshTimeoutResponse\x12-\n\ttimeoutAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\'\n\x12ReleaseSlotRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\"\x15\n\x13ReleaseSlotResponse*N\n\nActionType\x12\x12\n\x0eSTART_STEP_RUN\x10\x00\x12\x13\n\x0f\x43\x41NCEL_STEP_RUN\x10\x01\x12\x17\n\x13START_GET_GROUP_KEY\x10\x02*\xa2\x01\n\x17GroupKeyActionEventType\x12 \n\x1cGROUP_KEY_EVENT_TYPE_UNKNOWN\x10\x00\x12 \n\x1cGROUP_KEY_EVENT_TYPE_STARTED\x10\x01\x12\"\n\x1eGROUP_KEY_EVENT_TYPE_COMPLETED\x10\x02\x12\x1f\n\x1bGROUP_KEY_EVENT_TYPE_FAILED\x10\x03*\x8a\x01\n\x13StepActionEventType\x12\x1b\n\x17STEP_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1b\n\x17STEP_EVENT_TYPE_STARTED\x10\x01\x12\x1d\n\x19STEP_EVENT_TYPE_COMPLETED\x10\x02\x12\x1a\n\x16STEP_EVENT_TYPE_FAILED\x10\x03*e\n\x0cResourceType\x12\x19\n\x15RESOURCE_TYPE_UNKNOWN\x10\x00\x12\x1a\n\x16RESOURCE_TYPE_STEP_RUN\x10\x01\x12\x1e\n\x1aRESOURCE_TYPE_WORKFLOW_RUN\x10\x02*\xfe\x01\n\x11ResourceEventType\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_STARTED\x10\x01\x12!\n\x1dRESOURCE_EVENT_TYPE_COMPLETED\x10\x02\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_FAILED\x10\x03\x12!\n\x1dRESOURCE_EVENT_TYPE_CANCELLED\x10\x04\x12!\n\x1dRESOURCE_EVENT_TYPE_TIMED_OUT\x10\x05\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_STREAM\x10\x06*<\n\x14WorkflowRunEventType\x12$\n WORKFLOW_RUN_EVENT_TYPE_FINISHED\x10\x00\x32\xf8\x06\n\nDispatcher\x12=\n\x08Register\x12\x16.WorkerRegisterRequest\x1a\x17.WorkerRegisterResponse\"\x00\x12\x33\n\x06Listen\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x35\n\x08ListenV2\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x34\n\tHeartbeat\x12\x11.HeartbeatRequest\x1a\x12.HeartbeatResponse\"\x00\x12R\n\x19SubscribeToWorkflowEvents\x12!.SubscribeToWorkflowEventsRequest\x1a\x0e.WorkflowEvent\"\x00\x30\x01\x12S\n\x17SubscribeToWorkflowRuns\x12\x1f.SubscribeToWorkflowRunsRequest\x1a\x11.WorkflowRunEvent\"\x00(\x01\x30\x01\x12?\n\x13SendStepActionEvent\x12\x10.StepActionEvent\x1a\x14.ActionEventResponse\"\x00\x12G\n\x17SendGroupKeyActionEvent\x12\x14.GroupKeyActionEvent\x1a\x14.ActionEventResponse\"\x00\x12<\n\x10PutOverridesData\x12\x0e.OverridesData\x1a\x16.OverridesDataResponse\"\x00\x12\x46\n\x0bUnsubscribe\x12\x19.WorkerUnsubscribeRequest\x1a\x1a.WorkerUnsubscribeResponse\"\x00\x12\x43\n\x0eRefreshTimeout\x12\x16.RefreshTimeoutRequest\x1a\x17.RefreshTimeoutResponse\"\x00\x12:\n\x0bReleaseSlot\x12\x13.ReleaseSlotRequest\x1a\x14.ReleaseSlotResponse\"\x00\x12O\n\x12UpsertWorkerLabels\x12\x1a.UpsertWorkerLabelsRequest\x1a\x1b.UpsertWorkerLabelsResponse\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x64ispatcher.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"V\n\x0cWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValue\"\x88\x02\n\x15WorkerRegisterRequest\x12\x12\n\nworkerName\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63tions\x18\x02 \x03(\t\x12\x10\n\x08services\x18\x03 \x03(\t\x12\x14\n\x07maxRuns\x18\x04 \x01(\x05H\x00\x88\x01\x01\x12\x32\n\x06labels\x18\x05 \x03(\x0b\x32\".WorkerRegisterRequest.LabelsEntry\x12\x16\n\twebhookId\x18\x06 \x01(\tH\x01\x88\x01\x01\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\x42\n\n\x08_maxRunsB\x0c\n\n_webhookId\"P\n\x16WorkerRegisterResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\x12\x12\n\nworkerName\x18\x03 \x01(\t\"\xa3\x01\n\x19UpsertWorkerLabelsRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x36\n\x06labels\x18\x02 \x03(\x0b\x32&.UpsertWorkerLabelsRequest.LabelsEntry\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\"@\n\x1aUpsertWorkerLabelsResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\x86\x04\n\x0e\x41ssignedAction\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\r\n\x05jobId\x18\x04 \x01(\t\x12\x0f\n\x07jobName\x18\x05 \x01(\t\x12\x10\n\x08jobRunId\x18\x06 \x01(\t\x12\x0e\n\x06stepId\x18\x07 \x01(\t\x12\x11\n\tstepRunId\x18\x08 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\t \x01(\t\x12\x1f\n\nactionType\x18\n \x01(\x0e\x32\x0b.ActionType\x12\x15\n\ractionPayload\x18\x0b \x01(\t\x12\x10\n\x08stepName\x18\x0c \x01(\t\x12\x12\n\nretryCount\x18\r \x01(\x05\x12 \n\x13\x61\x64\x64itional_metadata\x18\x0e \x01(\tH\x00\x88\x01\x01\x12!\n\x14\x63hild_workflow_index\x18\x0f \x01(\x05H\x01\x88\x01\x01\x12\x1f\n\x12\x63hild_workflow_key\x18\x10 \x01(\tH\x02\x88\x01\x01\x12#\n\x16parent_workflow_run_id\x18\x11 \x01(\tH\x03\x88\x01\x01\x42\x16\n\x14_additional_metadataB\x17\n\x15_child_workflow_indexB\x15\n\x13_child_workflow_keyB\x19\n\x17_parent_workflow_run_id\"\'\n\x13WorkerListenRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\",\n\x18WorkerUnsubscribeRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\"?\n\x19WorkerUnsubscribeResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\xe1\x01\n\x13GroupKeyActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\teventType\x18\x06 \x01(\x0e\x32\x18.GroupKeyActionEventType\x12\x14\n\x0c\x65ventPayload\x18\x07 \x01(\t\"\xec\x01\n\x0fStepActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\r\n\x05jobId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x0e\n\x06stepId\x18\x04 \x01(\t\x12\x11\n\tstepRunId\x18\x05 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x06 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\teventType\x18\x08 \x01(\x0e\x32\x14.StepActionEventType\x12\x14\n\x0c\x65ventPayload\x18\t \x01(\t\"9\n\x13\x41\x63tionEventResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\xc0\x01\n SubscribeToWorkflowEventsRequest\x12\x1a\n\rworkflowRunId\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x1e\n\x11\x61\x64\x64itionalMetaKey\x18\x02 \x01(\tH\x01\x88\x01\x01\x12 \n\x13\x61\x64\x64itionalMetaValue\x18\x03 \x01(\tH\x02\x88\x01\x01\x42\x10\n\x0e_workflowRunIdB\x14\n\x12_additionalMetaKeyB\x16\n\x14_additionalMetaValue\"7\n\x1eSubscribeToWorkflowRunsRequest\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\"\xb2\x02\n\rWorkflowEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12#\n\x0cresourceType\x18\x02 \x01(\x0e\x32\r.ResourceType\x12%\n\teventType\x18\x03 \x01(\x0e\x32\x12.ResourceEventType\x12\x12\n\nresourceId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0c\x65ventPayload\x18\x06 \x01(\t\x12\x0e\n\x06hangup\x18\x07 \x01(\x08\x12\x18\n\x0bstepRetries\x18\x08 \x01(\x05H\x00\x88\x01\x01\x12\x17\n\nretryCount\x18\t \x01(\x05H\x01\x88\x01\x01\x42\x0e\n\x0c_stepRetriesB\r\n\x0b_retryCount\"\xa8\x01\n\x10WorkflowRunEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12(\n\teventType\x18\x02 \x01(\x0e\x32\x15.WorkflowRunEventType\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x07results\x18\x04 \x03(\x0b\x32\x0e.StepRunResult\"\x8a\x01\n\rStepRunResult\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x16\n\x0estepReadableId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x12\n\x05\x65rror\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06output\x18\x05 \x01(\tH\x01\x88\x01\x01\x42\x08\n\x06_errorB\t\n\x07_output\"W\n\rOverridesData\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\x16\n\x0e\x63\x61llerFilename\x18\x04 \x01(\t\"\x17\n\x15OverridesDataResponse\"U\n\x10HeartbeatRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12/\n\x0bheartbeatAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x13\n\x11HeartbeatResponse\"F\n\x15RefreshTimeoutRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x1a\n\x12incrementTimeoutBy\x18\x02 \x01(\t\"G\n\x16RefreshTimeoutResponse\x12-\n\ttimeoutAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\'\n\x12ReleaseSlotRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\"\x15\n\x13ReleaseSlotResponse*N\n\nActionType\x12\x12\n\x0eSTART_STEP_RUN\x10\x00\x12\x13\n\x0f\x43\x41NCEL_STEP_RUN\x10\x01\x12\x17\n\x13START_GET_GROUP_KEY\x10\x02*\xa2\x01\n\x17GroupKeyActionEventType\x12 \n\x1cGROUP_KEY_EVENT_TYPE_UNKNOWN\x10\x00\x12 \n\x1cGROUP_KEY_EVENT_TYPE_STARTED\x10\x01\x12\"\n\x1eGROUP_KEY_EVENT_TYPE_COMPLETED\x10\x02\x12\x1f\n\x1bGROUP_KEY_EVENT_TYPE_FAILED\x10\x03*\xac\x01\n\x13StepActionEventType\x12\x1b\n\x17STEP_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1b\n\x17STEP_EVENT_TYPE_STARTED\x10\x01\x12\x1d\n\x19STEP_EVENT_TYPE_COMPLETED\x10\x02\x12\x1a\n\x16STEP_EVENT_TYPE_FAILED\x10\x03\x12 \n\x1cSTEP_EVENT_TYPE_ACKNOWLEDGED\x10\x04*e\n\x0cResourceType\x12\x19\n\x15RESOURCE_TYPE_UNKNOWN\x10\x00\x12\x1a\n\x16RESOURCE_TYPE_STEP_RUN\x10\x01\x12\x1e\n\x1aRESOURCE_TYPE_WORKFLOW_RUN\x10\x02*\xfe\x01\n\x11ResourceEventType\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_STARTED\x10\x01\x12!\n\x1dRESOURCE_EVENT_TYPE_COMPLETED\x10\x02\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_FAILED\x10\x03\x12!\n\x1dRESOURCE_EVENT_TYPE_CANCELLED\x10\x04\x12!\n\x1dRESOURCE_EVENT_TYPE_TIMED_OUT\x10\x05\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_STREAM\x10\x06*<\n\x14WorkflowRunEventType\x12$\n WORKFLOW_RUN_EVENT_TYPE_FINISHED\x10\x00\x32\xf8\x06\n\nDispatcher\x12=\n\x08Register\x12\x16.WorkerRegisterRequest\x1a\x17.WorkerRegisterResponse\"\x00\x12\x33\n\x06Listen\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x35\n\x08ListenV2\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x34\n\tHeartbeat\x12\x11.HeartbeatRequest\x1a\x12.HeartbeatResponse\"\x00\x12R\n\x19SubscribeToWorkflowEvents\x12!.SubscribeToWorkflowEventsRequest\x1a\x0e.WorkflowEvent\"\x00\x30\x01\x12S\n\x17SubscribeToWorkflowRuns\x12\x1f.SubscribeToWorkflowRunsRequest\x1a\x11.WorkflowRunEvent\"\x00(\x01\x30\x01\x12?\n\x13SendStepActionEvent\x12\x10.StepActionEvent\x1a\x14.ActionEventResponse\"\x00\x12G\n\x17SendGroupKeyActionEvent\x12\x14.GroupKeyActionEvent\x1a\x14.ActionEventResponse\"\x00\x12<\n\x10PutOverridesData\x12\x0e.OverridesData\x1a\x16.OverridesDataResponse\"\x00\x12\x46\n\x0bUnsubscribe\x12\x19.WorkerUnsubscribeRequest\x1a\x1a.WorkerUnsubscribeResponse\"\x00\x12\x43\n\x0eRefreshTimeout\x12\x16.RefreshTimeoutRequest\x1a\x17.RefreshTimeoutResponse\"\x00\x12:\n\x0bReleaseSlot\x12\x13.ReleaseSlotRequest\x1a\x14.ReleaseSlotResponse\"\x00\x12O\n\x12UpsertWorkerLabels\x12\x1a.UpsertWorkerLabelsRequest\x1a\x1b.UpsertWorkerLabelsResponse\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -32,13 +32,13 @@ _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_start=3306 _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_end=3468 _globals['_STEPACTIONEVENTTYPE']._serialized_start=3471 - _globals['_STEPACTIONEVENTTYPE']._serialized_end=3609 - _globals['_RESOURCETYPE']._serialized_start=3611 - _globals['_RESOURCETYPE']._serialized_end=3712 - _globals['_RESOURCEEVENTTYPE']._serialized_start=3715 - _globals['_RESOURCEEVENTTYPE']._serialized_end=3969 - _globals['_WORKFLOWRUNEVENTTYPE']._serialized_start=3971 - _globals['_WORKFLOWRUNEVENTTYPE']._serialized_end=4031 + _globals['_STEPACTIONEVENTTYPE']._serialized_end=3643 + _globals['_RESOURCETYPE']._serialized_start=3645 + _globals['_RESOURCETYPE']._serialized_end=3746 + _globals['_RESOURCEEVENTTYPE']._serialized_start=3749 + _globals['_RESOURCEEVENTTYPE']._serialized_end=4003 + _globals['_WORKFLOWRUNEVENTTYPE']._serialized_start=4005 + _globals['_WORKFLOWRUNEVENTTYPE']._serialized_end=4065 _globals['_WORKERLABELS']._serialized_start=53 _globals['_WORKERLABELS']._serialized_end=139 _globals['_WORKERREGISTERREQUEST']._serialized_start=142 @@ -93,6 +93,6 @@ _globals['_RELEASESLOTREQUEST']._serialized_end=3200 _globals['_RELEASESLOTRESPONSE']._serialized_start=3202 _globals['_RELEASESLOTRESPONSE']._serialized_end=3223 - _globals['_DISPATCHER']._serialized_start=4034 - _globals['_DISPATCHER']._serialized_end=4922 + _globals['_DISPATCHER']._serialized_start=4068 + _globals['_DISPATCHER']._serialized_end=4956 # @@protoc_insertion_point(module_scope) diff --git a/hatchet_sdk/contracts/dispatcher_pb2.pyi b/hatchet_sdk/contracts/dispatcher_pb2.pyi index 206dd4b5..96a996ee 100644 --- a/hatchet_sdk/contracts/dispatcher_pb2.pyi +++ b/hatchet_sdk/contracts/dispatcher_pb2.pyi @@ -26,6 +26,7 @@ class StepActionEventType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): STEP_EVENT_TYPE_STARTED: _ClassVar[StepActionEventType] STEP_EVENT_TYPE_COMPLETED: _ClassVar[StepActionEventType] STEP_EVENT_TYPE_FAILED: _ClassVar[StepActionEventType] + STEP_EVENT_TYPE_ACKNOWLEDGED: _ClassVar[StepActionEventType] class ResourceType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): __slots__ = () @@ -57,6 +58,7 @@ STEP_EVENT_TYPE_UNKNOWN: StepActionEventType STEP_EVENT_TYPE_STARTED: StepActionEventType STEP_EVENT_TYPE_COMPLETED: StepActionEventType STEP_EVENT_TYPE_FAILED: StepActionEventType +STEP_EVENT_TYPE_ACKNOWLEDGED: StepActionEventType RESOURCE_TYPE_UNKNOWN: ResourceType RESOURCE_TYPE_STEP_RUN: ResourceType RESOURCE_TYPE_WORKFLOW_RUN: ResourceType diff --git a/hatchet_sdk/contracts/workflows_pb2.py b/hatchet_sdk/contracts/workflows_pb2.py index 0d4a0c6e..113609bf 100644 --- a/hatchet_sdk/contracts/workflows_pb2.py +++ b/hatchet_sdk/contracts/workflows_pb2.py @@ -15,7 +15,7 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fworkflows.proto\x1a\x1fgoogle/protobuf/timestamp.proto\">\n\x12PutWorkflowRequest\x12(\n\x04opts\x18\x01 \x01(\x0b\x32\x1a.CreateWorkflowVersionOpts\"\xbf\x04\n\x19\x43reateWorkflowVersionOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x16\n\x0e\x65vent_triggers\x18\x04 \x03(\t\x12\x15\n\rcron_triggers\x18\x05 \x03(\t\x12\x36\n\x12scheduled_triggers\x18\x06 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12$\n\x04jobs\x18\x07 \x03(\x0b\x32\x16.CreateWorkflowJobOpts\x12-\n\x0b\x63oncurrency\x18\x08 \x01(\x0b\x32\x18.WorkflowConcurrencyOpts\x12\x1d\n\x10schedule_timeout\x18\t \x01(\tH\x00\x88\x01\x01\x12\x17\n\ncron_input\x18\n \x01(\tH\x01\x88\x01\x01\x12\x33\n\x0eon_failure_job\x18\x0b \x01(\x0b\x32\x16.CreateWorkflowJobOptsH\x02\x88\x01\x01\x12$\n\x06sticky\x18\x0c \x01(\x0e\x32\x0f.StickyStrategyH\x03\x88\x01\x01\x12 \n\x04kind\x18\r \x01(\x0e\x32\r.WorkflowKindH\x04\x88\x01\x01\x12\x1d\n\x10\x64\x65\x66\x61ult_priority\x18\x0e \x01(\x05H\x05\x88\x01\x01\x42\x13\n\x11_schedule_timeoutB\r\n\x0b_cron_inputB\x11\n\x0f_on_failure_jobB\t\n\x07_stickyB\x07\n\x05_kindB\x13\n\x11_default_priority\"\xd0\x01\n\x17WorkflowConcurrencyOpts\x12\x13\n\x06\x61\x63tion\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08max_runs\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x36\n\x0elimit_strategy\x18\x03 \x01(\x0e\x32\x19.ConcurrencyLimitStrategyH\x02\x88\x01\x01\x12\x17\n\nexpression\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\t\n\x07_actionB\x0b\n\t_max_runsB\x11\n\x0f_limit_strategyB\r\n\x0b_expression\"h\n\x15\x43reateWorkflowJobOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12&\n\x05steps\x18\x04 \x03(\x0b\x32\x17.CreateWorkflowStepOptsJ\x04\x08\x03\x10\x04\"\xe1\x01\n\x13\x44\x65siredWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x15\n\x08required\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12/\n\ncomparator\x18\x04 \x01(\x0e\x32\x16.WorkerLabelComparatorH\x03\x88\x01\x01\x12\x13\n\x06weight\x18\x05 \x01(\x05H\x04\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValueB\x0b\n\t_requiredB\r\n\x0b_comparatorB\t\n\x07_weight\"\xcb\x02\n\x16\x43reateWorkflowStepOpts\x12\x13\n\x0breadable_id\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\t\x12\x0e\n\x06inputs\x18\x04 \x01(\t\x12\x0f\n\x07parents\x18\x05 \x03(\t\x12\x11\n\tuser_data\x18\x06 \x01(\t\x12\x0f\n\x07retries\x18\x07 \x01(\x05\x12)\n\x0brate_limits\x18\x08 \x03(\x0b\x32\x14.CreateStepRateLimit\x12@\n\rworker_labels\x18\t \x03(\x0b\x32).CreateWorkflowStepOpts.WorkerLabelsEntry\x1aI\n\x11WorkerLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.DesiredWorkerLabels:\x02\x38\x01\"1\n\x13\x43reateStepRateLimit\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05units\x18\x02 \x01(\x05\"\x16\n\x14ListWorkflowsRequest\"\x93\x02\n\x17ScheduleWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tschedules\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05input\x18\x03 \x01(\t\x12\x16\n\tparent_id\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x06 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x07 \x01(\tH\x03\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_key\"\xb2\x01\n\x0fWorkflowVersion\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07version\x18\x05 \x01(\t\x12\r\n\x05order\x18\x06 \x01(\x05\x12\x13\n\x0bworkflow_id\x18\x07 \x01(\t\"?\n\x17WorkflowTriggerEventRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x11\n\tevent_key\x18\x02 \x01(\t\"9\n\x16WorkflowTriggerCronRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x0c\n\x04\x63ron\x18\x02 \x01(\t\"\xf7\x02\n\x16TriggerWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x02 \x01(\t\x12\x16\n\tparent_id\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x04 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x05 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x06 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x07 \x01(\tH\x04\x88\x01\x01\x12\x1e\n\x11\x64\x65sired_worker_id\x18\x08 \x01(\tH\x05\x88\x01\x01\x12\x15\n\x08priority\x18\t \x01(\x05H\x06\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadataB\x14\n\x12_desired_worker_idB\x0b\n\t_priority\"2\n\x17TriggerWorkflowResponse\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"W\n\x13PutRateLimitRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05limit\x18\x02 \x01(\x05\x12$\n\x08\x64uration\x18\x03 \x01(\x0e\x32\x12.RateLimitDuration\"\x16\n\x14PutRateLimitResponse*$\n\x0eStickyStrategy\x12\x08\n\x04SOFT\x10\x00\x12\x08\n\x04HARD\x10\x01*2\n\x0cWorkflowKind\x12\x0c\n\x08\x46UNCTION\x10\x00\x12\x0b\n\x07\x44URABLE\x10\x01\x12\x07\n\x03\x44\x41G\x10\x02*l\n\x18\x43oncurrencyLimitStrategy\x12\x16\n\x12\x43\x41NCEL_IN_PROGRESS\x10\x00\x12\x0f\n\x0b\x44ROP_NEWEST\x10\x01\x12\x10\n\x0cQUEUE_NEWEST\x10\x02\x12\x15\n\x11GROUP_ROUND_ROBIN\x10\x03*\x85\x01\n\x15WorkerLabelComparator\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x10\n\x0cGREATER_THAN\x10\x02\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x05*]\n\x11RateLimitDuration\x12\n\n\x06SECOND\x10\x00\x12\n\n\x06MINUTE\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\x07\n\x03\x44\x41Y\x10\x03\x12\x08\n\x04WEEK\x10\x04\x12\t\n\x05MONTH\x10\x05\x12\x08\n\x04YEAR\x10\x06\x32\x8a\x02\n\x0fWorkflowService\x12\x34\n\x0bPutWorkflow\x12\x13.PutWorkflowRequest\x1a\x10.WorkflowVersion\x12>\n\x10ScheduleWorkflow\x12\x18.ScheduleWorkflowRequest\x1a\x10.WorkflowVersion\x12\x44\n\x0fTriggerWorkflow\x12\x17.TriggerWorkflowRequest\x1a\x18.TriggerWorkflowResponse\x12;\n\x0cPutRateLimit\x12\x14.PutRateLimitRequest\x1a\x15.PutRateLimitResponseBBZ@github.com/hatchet-dev/hatchet/internal/services/admin/contractsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fworkflows.proto\x1a\x1fgoogle/protobuf/timestamp.proto\">\n\x12PutWorkflowRequest\x12(\n\x04opts\x18\x01 \x01(\x0b\x32\x1a.CreateWorkflowVersionOpts\"\xbf\x04\n\x19\x43reateWorkflowVersionOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x16\n\x0e\x65vent_triggers\x18\x04 \x03(\t\x12\x15\n\rcron_triggers\x18\x05 \x03(\t\x12\x36\n\x12scheduled_triggers\x18\x06 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12$\n\x04jobs\x18\x07 \x03(\x0b\x32\x16.CreateWorkflowJobOpts\x12-\n\x0b\x63oncurrency\x18\x08 \x01(\x0b\x32\x18.WorkflowConcurrencyOpts\x12\x1d\n\x10schedule_timeout\x18\t \x01(\tH\x00\x88\x01\x01\x12\x17\n\ncron_input\x18\n \x01(\tH\x01\x88\x01\x01\x12\x33\n\x0eon_failure_job\x18\x0b \x01(\x0b\x32\x16.CreateWorkflowJobOptsH\x02\x88\x01\x01\x12$\n\x06sticky\x18\x0c \x01(\x0e\x32\x0f.StickyStrategyH\x03\x88\x01\x01\x12 \n\x04kind\x18\r \x01(\x0e\x32\r.WorkflowKindH\x04\x88\x01\x01\x12\x1d\n\x10\x64\x65\x66\x61ult_priority\x18\x0e \x01(\x05H\x05\x88\x01\x01\x42\x13\n\x11_schedule_timeoutB\r\n\x0b_cron_inputB\x11\n\x0f_on_failure_jobB\t\n\x07_stickyB\x07\n\x05_kindB\x13\n\x11_default_priority\"\xd0\x01\n\x17WorkflowConcurrencyOpts\x12\x13\n\x06\x61\x63tion\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08max_runs\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x36\n\x0elimit_strategy\x18\x03 \x01(\x0e\x32\x19.ConcurrencyLimitStrategyH\x02\x88\x01\x01\x12\x17\n\nexpression\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\t\n\x07_actionB\x0b\n\t_max_runsB\x11\n\x0f_limit_strategyB\r\n\x0b_expression\"h\n\x15\x43reateWorkflowJobOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12&\n\x05steps\x18\x04 \x03(\x0b\x32\x17.CreateWorkflowStepOptsJ\x04\x08\x03\x10\x04\"\xe1\x01\n\x13\x44\x65siredWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x15\n\x08required\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12/\n\ncomparator\x18\x04 \x01(\x0e\x32\x16.WorkerLabelComparatorH\x03\x88\x01\x01\x12\x13\n\x06weight\x18\x05 \x01(\x05H\x04\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValueB\x0b\n\t_requiredB\r\n\x0b_comparatorB\t\n\x07_weight\"\xcb\x02\n\x16\x43reateWorkflowStepOpts\x12\x13\n\x0breadable_id\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\t\x12\x0e\n\x06inputs\x18\x04 \x01(\t\x12\x0f\n\x07parents\x18\x05 \x03(\t\x12\x11\n\tuser_data\x18\x06 \x01(\t\x12\x0f\n\x07retries\x18\x07 \x01(\x05\x12)\n\x0brate_limits\x18\x08 \x03(\x0b\x32\x14.CreateStepRateLimit\x12@\n\rworker_labels\x18\t \x03(\x0b\x32).CreateWorkflowStepOpts.WorkerLabelsEntry\x1aI\n\x11WorkerLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.DesiredWorkerLabels:\x02\x38\x01\"\xfa\x01\n\x13\x43reateStepRateLimit\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x12\n\x05units\x18\x02 \x01(\x05H\x00\x88\x01\x01\x12\x15\n\x08key_expr\x18\x03 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nunits_expr\x18\x04 \x01(\tH\x02\x88\x01\x01\x12\x1e\n\x11limit_values_expr\x18\x05 \x01(\tH\x03\x88\x01\x01\x12)\n\x08\x64uration\x18\x06 \x01(\x0e\x32\x12.RateLimitDurationH\x04\x88\x01\x01\x42\x08\n\x06_unitsB\x0b\n\t_key_exprB\r\n\x0b_units_exprB\x14\n\x12_limit_values_exprB\x0b\n\t_duration\"\x16\n\x14ListWorkflowsRequest\"\x93\x02\n\x17ScheduleWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tschedules\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05input\x18\x03 \x01(\t\x12\x16\n\tparent_id\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x06 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x07 \x01(\tH\x03\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_key\"\xb2\x01\n\x0fWorkflowVersion\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07version\x18\x05 \x01(\t\x12\r\n\x05order\x18\x06 \x01(\x05\x12\x13\n\x0bworkflow_id\x18\x07 \x01(\t\"?\n\x17WorkflowTriggerEventRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x11\n\tevent_key\x18\x02 \x01(\t\"9\n\x16WorkflowTriggerCronRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x0c\n\x04\x63ron\x18\x02 \x01(\t\"H\n\x1a\x42ulkTriggerWorkflowRequest\x12*\n\tworkflows\x18\x01 \x03(\x0b\x32\x17.TriggerWorkflowRequest\"7\n\x1b\x42ulkTriggerWorkflowResponse\x12\x18\n\x10workflow_run_ids\x18\x01 \x03(\t\"\xf7\x02\n\x16TriggerWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x02 \x01(\t\x12\x16\n\tparent_id\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x04 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x05 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x06 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x07 \x01(\tH\x04\x88\x01\x01\x12\x1e\n\x11\x64\x65sired_worker_id\x18\x08 \x01(\tH\x05\x88\x01\x01\x12\x15\n\x08priority\x18\t \x01(\x05H\x06\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadataB\x14\n\x12_desired_worker_idB\x0b\n\t_priority\"2\n\x17TriggerWorkflowResponse\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"W\n\x13PutRateLimitRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05limit\x18\x02 \x01(\x05\x12$\n\x08\x64uration\x18\x03 \x01(\x0e\x32\x12.RateLimitDuration\"\x16\n\x14PutRateLimitResponse*$\n\x0eStickyStrategy\x12\x08\n\x04SOFT\x10\x00\x12\x08\n\x04HARD\x10\x01*2\n\x0cWorkflowKind\x12\x0c\n\x08\x46UNCTION\x10\x00\x12\x0b\n\x07\x44URABLE\x10\x01\x12\x07\n\x03\x44\x41G\x10\x02*l\n\x18\x43oncurrencyLimitStrategy\x12\x16\n\x12\x43\x41NCEL_IN_PROGRESS\x10\x00\x12\x0f\n\x0b\x44ROP_NEWEST\x10\x01\x12\x10\n\x0cQUEUE_NEWEST\x10\x02\x12\x15\n\x11GROUP_ROUND_ROBIN\x10\x03*\x85\x01\n\x15WorkerLabelComparator\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x10\n\x0cGREATER_THAN\x10\x02\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x05*]\n\x11RateLimitDuration\x12\n\n\x06SECOND\x10\x00\x12\n\n\x06MINUTE\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\x07\n\x03\x44\x41Y\x10\x03\x12\x08\n\x04WEEK\x10\x04\x12\t\n\x05MONTH\x10\x05\x12\x08\n\x04YEAR\x10\x06\x32\xdc\x02\n\x0fWorkflowService\x12\x34\n\x0bPutWorkflow\x12\x13.PutWorkflowRequest\x1a\x10.WorkflowVersion\x12>\n\x10ScheduleWorkflow\x12\x18.ScheduleWorkflowRequest\x1a\x10.WorkflowVersion\x12\x44\n\x0fTriggerWorkflow\x12\x17.TriggerWorkflowRequest\x1a\x18.TriggerWorkflowResponse\x12P\n\x13\x42ulkTriggerWorkflow\x12\x1b.BulkTriggerWorkflowRequest\x1a\x1c.BulkTriggerWorkflowResponse\x12;\n\x0cPutRateLimit\x12\x14.PutRateLimitRequest\x1a\x15.PutRateLimitResponseBBZ@github.com/hatchet-dev/hatchet/internal/services/admin/contractsb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -25,16 +25,16 @@ _globals['DESCRIPTOR']._serialized_options = b'Z@github.com/hatchet-dev/hatchet/internal/services/admin/contracts' _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._options = None _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_options = b'8\001' - _globals['_STICKYSTRATEGY']._serialized_start=2774 - _globals['_STICKYSTRATEGY']._serialized_end=2810 - _globals['_WORKFLOWKIND']._serialized_start=2812 - _globals['_WORKFLOWKIND']._serialized_end=2862 - _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_start=2864 - _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_end=2972 - _globals['_WORKERLABELCOMPARATOR']._serialized_start=2975 - _globals['_WORKERLABELCOMPARATOR']._serialized_end=3108 - _globals['_RATELIMITDURATION']._serialized_start=3110 - _globals['_RATELIMITDURATION']._serialized_end=3203 + _globals['_STICKYSTRATEGY']._serialized_start=3107 + _globals['_STICKYSTRATEGY']._serialized_end=3143 + _globals['_WORKFLOWKIND']._serialized_start=3145 + _globals['_WORKFLOWKIND']._serialized_end=3195 + _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_start=3197 + _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_end=3305 + _globals['_WORKERLABELCOMPARATOR']._serialized_start=3308 + _globals['_WORKERLABELCOMPARATOR']._serialized_end=3441 + _globals['_RATELIMITDURATION']._serialized_start=3443 + _globals['_RATELIMITDURATION']._serialized_end=3536 _globals['_PUTWORKFLOWREQUEST']._serialized_start=52 _globals['_PUTWORKFLOWREQUEST']._serialized_end=114 _globals['_CREATEWORKFLOWVERSIONOPTS']._serialized_start=117 @@ -49,26 +49,30 @@ _globals['_CREATEWORKFLOWSTEPOPTS']._serialized_end=1571 _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_start=1498 _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_end=1571 - _globals['_CREATESTEPRATELIMIT']._serialized_start=1573 - _globals['_CREATESTEPRATELIMIT']._serialized_end=1622 - _globals['_LISTWORKFLOWSREQUEST']._serialized_start=1624 - _globals['_LISTWORKFLOWSREQUEST']._serialized_end=1646 - _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_start=1649 - _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_end=1924 - _globals['_WORKFLOWVERSION']._serialized_start=1927 - _globals['_WORKFLOWVERSION']._serialized_end=2105 - _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_start=2107 - _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_end=2170 - _globals['_WORKFLOWTRIGGERCRONREF']._serialized_start=2172 - _globals['_WORKFLOWTRIGGERCRONREF']._serialized_end=2229 - _globals['_TRIGGERWORKFLOWREQUEST']._serialized_start=2232 - _globals['_TRIGGERWORKFLOWREQUEST']._serialized_end=2607 - _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_start=2609 - _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_end=2659 - _globals['_PUTRATELIMITREQUEST']._serialized_start=2661 - _globals['_PUTRATELIMITREQUEST']._serialized_end=2748 - _globals['_PUTRATELIMITRESPONSE']._serialized_start=2750 - _globals['_PUTRATELIMITRESPONSE']._serialized_end=2772 - _globals['_WORKFLOWSERVICE']._serialized_start=3206 - _globals['_WORKFLOWSERVICE']._serialized_end=3472 + _globals['_CREATESTEPRATELIMIT']._serialized_start=1574 + _globals['_CREATESTEPRATELIMIT']._serialized_end=1824 + _globals['_LISTWORKFLOWSREQUEST']._serialized_start=1826 + _globals['_LISTWORKFLOWSREQUEST']._serialized_end=1848 + _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_start=1851 + _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_end=2126 + _globals['_WORKFLOWVERSION']._serialized_start=2129 + _globals['_WORKFLOWVERSION']._serialized_end=2307 + _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_start=2309 + _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_end=2372 + _globals['_WORKFLOWTRIGGERCRONREF']._serialized_start=2374 + _globals['_WORKFLOWTRIGGERCRONREF']._serialized_end=2431 + _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_start=2433 + _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_end=2505 + _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_start=2507 + _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_end=2562 + _globals['_TRIGGERWORKFLOWREQUEST']._serialized_start=2565 + _globals['_TRIGGERWORKFLOWREQUEST']._serialized_end=2940 + _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_start=2942 + _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_end=2992 + _globals['_PUTRATELIMITREQUEST']._serialized_start=2994 + _globals['_PUTRATELIMITREQUEST']._serialized_end=3081 + _globals['_PUTRATELIMITRESPONSE']._serialized_start=3083 + _globals['_PUTRATELIMITRESPONSE']._serialized_end=3105 + _globals['_WORKFLOWSERVICE']._serialized_start=3539 + _globals['_WORKFLOWSERVICE']._serialized_end=3887 # @@protoc_insertion_point(module_scope) diff --git a/hatchet_sdk/contracts/workflows_pb2.pyi b/hatchet_sdk/contracts/workflows_pb2.pyi index bd9f058a..219c12bf 100644 --- a/hatchet_sdk/contracts/workflows_pb2.pyi +++ b/hatchet_sdk/contracts/workflows_pb2.pyi @@ -170,12 +170,20 @@ class CreateWorkflowStepOpts(_message.Message): def __init__(self, readable_id: _Optional[str] = ..., action: _Optional[str] = ..., timeout: _Optional[str] = ..., inputs: _Optional[str] = ..., parents: _Optional[_Iterable[str]] = ..., user_data: _Optional[str] = ..., retries: _Optional[int] = ..., rate_limits: _Optional[_Iterable[_Union[CreateStepRateLimit, _Mapping]]] = ..., worker_labels: _Optional[_Mapping[str, DesiredWorkerLabels]] = ...) -> None: ... class CreateStepRateLimit(_message.Message): - __slots__ = ("key", "units") + __slots__ = ("key", "units", "key_expr", "units_expr", "limit_values_expr", "duration") KEY_FIELD_NUMBER: _ClassVar[int] UNITS_FIELD_NUMBER: _ClassVar[int] + KEY_EXPR_FIELD_NUMBER: _ClassVar[int] + UNITS_EXPR_FIELD_NUMBER: _ClassVar[int] + LIMIT_VALUES_EXPR_FIELD_NUMBER: _ClassVar[int] + DURATION_FIELD_NUMBER: _ClassVar[int] key: str units: int - def __init__(self, key: _Optional[str] = ..., units: _Optional[int] = ...) -> None: ... + key_expr: str + units_expr: str + limit_values_expr: str + duration: RateLimitDuration + def __init__(self, key: _Optional[str] = ..., units: _Optional[int] = ..., key_expr: _Optional[str] = ..., units_expr: _Optional[str] = ..., limit_values_expr: _Optional[str] = ..., duration: _Optional[_Union[RateLimitDuration, str]] = ...) -> None: ... class ListWorkflowsRequest(_message.Message): __slots__ = () @@ -231,6 +239,18 @@ class WorkflowTriggerCronRef(_message.Message): cron: str def __init__(self, parent_id: _Optional[str] = ..., cron: _Optional[str] = ...) -> None: ... +class BulkTriggerWorkflowRequest(_message.Message): + __slots__ = ("workflows",) + WORKFLOWS_FIELD_NUMBER: _ClassVar[int] + workflows: _containers.RepeatedCompositeFieldContainer[TriggerWorkflowRequest] + def __init__(self, workflows: _Optional[_Iterable[_Union[TriggerWorkflowRequest, _Mapping]]] = ...) -> None: ... + +class BulkTriggerWorkflowResponse(_message.Message): + __slots__ = ("workflow_run_ids",) + WORKFLOW_RUN_IDS_FIELD_NUMBER: _ClassVar[int] + workflow_run_ids: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, workflow_run_ids: _Optional[_Iterable[str]] = ...) -> None: ... + class TriggerWorkflowRequest(_message.Message): __slots__ = ("name", "input", "parent_id", "parent_step_run_id", "child_index", "child_key", "additional_metadata", "desired_worker_id", "priority") NAME_FIELD_NUMBER: _ClassVar[int] diff --git a/hatchet_sdk/contracts/workflows_pb2_grpc.py b/hatchet_sdk/contracts/workflows_pb2_grpc.py index 3fbf53b3..7f0b419a 100644 --- a/hatchet_sdk/contracts/workflows_pb2_grpc.py +++ b/hatchet_sdk/contracts/workflows_pb2_grpc.py @@ -30,6 +30,11 @@ def __init__(self, channel): request_serializer=workflows__pb2.TriggerWorkflowRequest.SerializeToString, response_deserializer=workflows__pb2.TriggerWorkflowResponse.FromString, ) + self.BulkTriggerWorkflow = channel.unary_unary( + '/WorkflowService/BulkTriggerWorkflow', + request_serializer=workflows__pb2.BulkTriggerWorkflowRequest.SerializeToString, + response_deserializer=workflows__pb2.BulkTriggerWorkflowResponse.FromString, + ) self.PutRateLimit = channel.unary_unary( '/WorkflowService/PutRateLimit', request_serializer=workflows__pb2.PutRateLimitRequest.SerializeToString, @@ -59,6 +64,12 @@ def TriggerWorkflow(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def BulkTriggerWorkflow(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def PutRateLimit(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) @@ -83,6 +94,11 @@ def add_WorkflowServiceServicer_to_server(servicer, server): request_deserializer=workflows__pb2.TriggerWorkflowRequest.FromString, response_serializer=workflows__pb2.TriggerWorkflowResponse.SerializeToString, ), + 'BulkTriggerWorkflow': grpc.unary_unary_rpc_method_handler( + servicer.BulkTriggerWorkflow, + request_deserializer=workflows__pb2.BulkTriggerWorkflowRequest.FromString, + response_serializer=workflows__pb2.BulkTriggerWorkflowResponse.SerializeToString, + ), 'PutRateLimit': grpc.unary_unary_rpc_method_handler( servicer.PutRateLimit, request_deserializer=workflows__pb2.PutRateLimitRequest.FromString, @@ -150,6 +166,23 @@ def TriggerWorkflow(request, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + @staticmethod + def BulkTriggerWorkflow(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/WorkflowService/BulkTriggerWorkflow', + workflows__pb2.BulkTriggerWorkflowRequest.SerializeToString, + workflows__pb2.BulkTriggerWorkflowResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + @staticmethod def PutRateLimit(request, target, diff --git a/hatchet_sdk/workflow_run.py b/hatchet_sdk/workflow_run.py index 43452b7e..51a23821 100644 --- a/hatchet_sdk/workflow_run.py +++ b/hatchet_sdk/workflow_run.py @@ -1,5 +1,5 @@ import asyncio -from typing import Coroutine, Generic, TypeVar +from typing import Any, Coroutine, Generic, Optional, TypedDict, TypeVar from hatchet_sdk.clients.run_event_listener import ( RunEventListener, diff --git a/pyproject.toml b/pyproject.toml index f118cc8c..9a5b0904 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "hatchet-sdk" -version = "0.38.3" +version = "0.39.0a0" description = "" authors = ["Alexander Belanger "] readme = "README.md" @@ -67,3 +67,4 @@ simple = "examples.simple.worker:main" timeout = "examples.timeout.worker:main" blocked = "examples.blocked_async.worker:main" existing_loop = "examples.worker_existing_loop.worker:main" +bulk_fanout = "examples.bulk_fanout.worker:main"