From 3780fa149bdc48f5f48267207b363f0f9749f1ce Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Tue, 23 Jul 2024 14:16:50 -0400 Subject: [PATCH 01/19] add tests --- examples/fanout/worker.py | 8 +++++--- hatchet | 2 +- tests/test_client.py | 17 ----------------- tests/test_fanout.py | 37 +++++++++++++++++++++++++++++++++++++ 4 files changed, 43 insertions(+), 21 deletions(-) create mode 100644 tests/test_fanout.py diff --git a/examples/fanout/worker.py b/examples/fanout/worker.py index cda2f6bb..9b0b3c30 100644 --- a/examples/fanout/worker.py +++ b/examples/fanout/worker.py @@ -17,7 +17,9 @@ async def spawn(self, context: Context): results = [] - for i in range(100): + n = context.workflow_input().get("n", 100) + + for i in range(n): results.append( ( await context.aio.spawn_workflow( @@ -38,13 +40,13 @@ async def spawn(self, context: Context): @hatchet.workflow(on_events=["child:create"]) class Child: @hatchet.step() - async def process(self, context: Context): + def process(self, context: Context): a = context.workflow_input()["a"] print(f"child process {a}") return {"status": "success " + a} @hatchet.step() - async def process2(self, context: Context): + def process2(self, context: Context): print("child process2") return {"status2": "success"} diff --git a/hatchet b/hatchet index f8981809..466b843d 160000 --- a/hatchet +++ b/hatchet @@ -1 +1 @@ -Subproject commit f8981809f79306c0e445f2cdb4a71afb96b96d95 +Subproject commit 466b843dac96bb73305ac52c671553c823c2eabe diff --git a/tests/test_client.py b/tests/test_client.py index 2bd0fb87..9aaf0b26 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -12,20 +12,3 @@ def hatchet(): def test_client(hatchet): assert hatchet - - -# requires scope module or higher for shared event loop -@pytest.mark.asyncio(scope="session") -async def test_listen(hatchet): - run = hatchet.client.admin.get_workflow_run("839e089c-6708-4708-ae79-bad47c832580") - result = await run.result() - print(result) - assert result - - -@pytest.mark.asyncio(scope="session") -async def test_listen2(hatchet): - run = hatchet.client.admin.get_workflow_run("839e089c-6708-4708-ae79-bad47c832580") - result = await run.result() - print(result) - assert result diff --git a/tests/test_fanout.py b/tests/test_fanout.py new file mode 100644 index 00000000..8770ebff --- /dev/null +++ b/tests/test_fanout.py @@ -0,0 +1,37 @@ +import subprocess +import time +from hatchet_sdk import Hatchet +import pytest + + +@pytest.fixture +def hatchet(): + return Hatchet(debug=True) + +@pytest.fixture(scope="session", autouse=True) +def fixture_background_hatchet_worker(): + proc = subprocess.Popen(["poetry", "run", "python3", "./examples/fanout/worker.py"]) + + # sleep long enough to make sure we are up and running + # it would be nice to NOT do this, but we need to ensure the worker is running before we trigger any events + time.sleep(5) + + yield + + proc.terminate() + proc.wait() + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet, ): + run = hatchet.client.admin.run_workflow("Parent", {"n": 2}) + result = await run.result() + assert len(result["spawn"]["results"]) == 2 + + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run2(hatchet): + run = hatchet.client.admin.run_workflow("Parent", {"n": 2}) + result = await run.result() + assert len(result["spawn"]["results"]) == 2 From 9f15fa27a8789510af7921f7e1b2a0f165a57ed4 Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Tue, 23 Jul 2024 14:48:57 -0400 Subject: [PATCH 02/19] poetry run --- pyproject.toml | 3 +++ tests/test_fanout.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 5b3b7328..6edb853d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,3 +40,6 @@ known_third_party = [ "pyyaml", "urllib3" ] + +[tool.poetry.scripts] +worker = "examples.fanout.worker:main" \ No newline at end of file diff --git a/tests/test_fanout.py b/tests/test_fanout.py index 8770ebff..3544b4a9 100644 --- a/tests/test_fanout.py +++ b/tests/test_fanout.py @@ -10,7 +10,7 @@ def hatchet(): @pytest.fixture(scope="session", autouse=True) def fixture_background_hatchet_worker(): - proc = subprocess.Popen(["poetry", "run", "python3", "./examples/fanout/worker.py"]) + proc = subprocess.Popen(["poetry", "run", "worker"]) # sleep long enough to make sure we are up and running # it would be nice to NOT do this, but we need to ensure the worker is running before we trigger any events From 16200a3f922e81b437d9891d72591835d95bb768 Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 07:05:08 -0400 Subject: [PATCH 03/19] chore: move contracts to dir --- hatchet_sdk/__init__.py | 2 +- hatchet_sdk/clients/admin.py | 4 ++-- hatchet_sdk/clients/dispatcher.py | 4 ++-- hatchet_sdk/clients/events.py | 4 ++-- hatchet_sdk/clients/run_event_listener.py | 4 ++-- hatchet_sdk/clients/workflow_listener.py | 4 ++-- hatchet_sdk/context.py | 2 +- hatchet_sdk/{ => contracts}/dispatcher_pb2.py | 0 hatchet_sdk/{ => contracts}/dispatcher_pb2.pyi | 0 hatchet_sdk/{ => contracts}/dispatcher_pb2_grpc.py | 0 hatchet_sdk/{ => contracts}/events_pb2.py | 0 hatchet_sdk/{ => contracts}/events_pb2.pyi | 0 hatchet_sdk/{ => contracts}/events_pb2_grpc.py | 0 hatchet_sdk/{ => contracts}/workflows_pb2.py | 0 hatchet_sdk/{ => contracts}/workflows_pb2.pyi | 0 hatchet_sdk/{ => contracts}/workflows_pb2_grpc.py | 0 hatchet_sdk/hatchet.py | 2 +- hatchet_sdk/worker.py | 2 +- hatchet_sdk/workflow.py | 2 +- 19 files changed, 15 insertions(+), 15 deletions(-) rename hatchet_sdk/{ => contracts}/dispatcher_pb2.py (100%) rename hatchet_sdk/{ => contracts}/dispatcher_pb2.pyi (100%) rename hatchet_sdk/{ => contracts}/dispatcher_pb2_grpc.py (100%) rename hatchet_sdk/{ => contracts}/events_pb2.py (100%) rename hatchet_sdk/{ => contracts}/events_pb2.pyi (100%) rename hatchet_sdk/{ => contracts}/events_pb2_grpc.py (100%) rename hatchet_sdk/{ => contracts}/workflows_pb2.py (100%) rename hatchet_sdk/{ => contracts}/workflows_pb2.pyi (100%) rename hatchet_sdk/{ => contracts}/workflows_pb2_grpc.py (100%) diff --git a/hatchet_sdk/__init__.py b/hatchet_sdk/__init__.py index 52fab2de..e7d33c25 100644 --- a/hatchet_sdk/__init__.py +++ b/hatchet_sdk/__init__.py @@ -132,7 +132,7 @@ from .context import Context from .hatchet import ClientConfig, Hatchet, concurrency, on_failure_step, step, workflow from .worker import Worker, WorkerStatus -from .workflows_pb2 import ( +from hatchet_sdk.contracts.workflows_pb2 import ( ConcurrencyLimitStrategy, CreateWorkflowVersionOpts, RateLimitDuration, diff --git a/hatchet_sdk/clients/admin.py b/hatchet_sdk/clients/admin.py index d7965532..677e8c00 100644 --- a/hatchet_sdk/clients/admin.py +++ b/hatchet_sdk/clients/admin.py @@ -14,7 +14,7 @@ from ..loader import ClientConfig from ..metadata import get_metadata from ..workflow import WorkflowMeta -from ..workflows_pb2 import ( +from hatchet_sdk.contracts.workflows_pb2 import ( CreateWorkflowVersionOpts, PutRateLimitRequest, PutWorkflowRequest, @@ -24,7 +24,7 @@ TriggerWorkflowResponse, WorkflowVersion, ) -from ..workflows_pb2_grpc import WorkflowServiceStub +from hatchet_sdk.contracts.workflows_pb2_grpc import WorkflowServiceStub def new_admin(config: ClientConfig): diff --git a/hatchet_sdk/clients/dispatcher.py b/hatchet_sdk/clients/dispatcher.py index 4866b075..ee7bb905 100644 --- a/hatchet_sdk/clients/dispatcher.py +++ b/hatchet_sdk/clients/dispatcher.py @@ -12,7 +12,7 @@ from hatchet_sdk.clients.event_ts import Event_ts, read_with_interrupt from hatchet_sdk.connection import new_conn -from ..dispatcher_pb2 import ( +from hatchet_sdk.contracts.dispatcher_pb2 import ( ActionEventResponse, ActionType, AssignedAction, @@ -27,7 +27,7 @@ WorkerRegisterResponse, WorkerUnsubscribeRequest, ) -from ..dispatcher_pb2_grpc import DispatcherStub +from hatchet_sdk.contracts.dispatcher_pb2_grpc import DispatcherStub from ..loader import ClientConfig from ..logger import logger from ..metadata import get_metadata diff --git a/hatchet_sdk/clients/events.py b/hatchet_sdk/clients/events.py index 1804ca44..5ecbcc77 100644 --- a/hatchet_sdk/clients/events.py +++ b/hatchet_sdk/clients/events.py @@ -6,8 +6,8 @@ import grpc from google.protobuf import timestamp_pb2 -from ..events_pb2 import Event, PushEventRequest, PutLogRequest, PutStreamEventRequest -from ..events_pb2_grpc import EventsServiceStub +from hatchet_sdk.contracts.events_pb2 import Event, PushEventRequest, PutLogRequest, PutStreamEventRequest +from hatchet_sdk.contracts.events_pb2_grpc import EventsServiceStub from ..loader import ClientConfig from ..metadata import get_metadata diff --git a/hatchet_sdk/clients/run_event_listener.py b/hatchet_sdk/clients/run_event_listener.py index c3f888fc..9f280e44 100644 --- a/hatchet_sdk/clients/run_event_listener.py +++ b/hatchet_sdk/clients/run_event_listener.py @@ -6,14 +6,14 @@ from hatchet_sdk.connection import new_conn -from ..dispatcher_pb2 import ( +from hatchet_sdk.contracts.dispatcher_pb2 import ( RESOURCE_TYPE_STEP_RUN, RESOURCE_TYPE_WORKFLOW_RUN, ResourceEventType, SubscribeToWorkflowEventsRequest, WorkflowEvent, ) -from ..dispatcher_pb2_grpc import DispatcherStub +from hatchet_sdk.contracts.dispatcher_pb2_grpc import DispatcherStub from ..loader import ClientConfig from ..metadata import get_metadata diff --git a/hatchet_sdk/clients/workflow_listener.py b/hatchet_sdk/clients/workflow_listener.py index 36124604..54fec01f 100644 --- a/hatchet_sdk/clients/workflow_listener.py +++ b/hatchet_sdk/clients/workflow_listener.py @@ -10,8 +10,8 @@ from hatchet_sdk.clients.event_ts import Event_ts, read_with_interrupt from hatchet_sdk.connection import new_conn -from ..dispatcher_pb2 import SubscribeToWorkflowRunsRequest, WorkflowRunEvent -from ..dispatcher_pb2_grpc import DispatcherStub +from hatchet_sdk.contracts.dispatcher_pb2 import SubscribeToWorkflowRunsRequest, WorkflowRunEvent +from hatchet_sdk.contracts.dispatcher_pb2_grpc import DispatcherStub from ..loader import ClientConfig from ..logger import logger from ..metadata import get_metadata diff --git a/hatchet_sdk/context.py b/hatchet_sdk/context.py index cf5bb6ff..c3791584 100644 --- a/hatchet_sdk/context.py +++ b/hatchet_sdk/context.py @@ -18,7 +18,7 @@ TriggerWorkflowOptions, ) from .clients.dispatcher import Action, DispatcherClientImpl -from .dispatcher_pb2 import OverridesData +from hatchet_sdk.contracts.dispatcher_pb2 import OverridesData from .logger import logger DEFAULT_WORKFLOW_POLLING_INTERVAL = 5 # Seconds diff --git a/hatchet_sdk/dispatcher_pb2.py b/hatchet_sdk/contracts/dispatcher_pb2.py similarity index 100% rename from hatchet_sdk/dispatcher_pb2.py rename to hatchet_sdk/contracts/dispatcher_pb2.py diff --git a/hatchet_sdk/dispatcher_pb2.pyi b/hatchet_sdk/contracts/dispatcher_pb2.pyi similarity index 100% rename from hatchet_sdk/dispatcher_pb2.pyi rename to hatchet_sdk/contracts/dispatcher_pb2.pyi diff --git a/hatchet_sdk/dispatcher_pb2_grpc.py b/hatchet_sdk/contracts/dispatcher_pb2_grpc.py similarity index 100% rename from hatchet_sdk/dispatcher_pb2_grpc.py rename to hatchet_sdk/contracts/dispatcher_pb2_grpc.py diff --git a/hatchet_sdk/events_pb2.py b/hatchet_sdk/contracts/events_pb2.py similarity index 100% rename from hatchet_sdk/events_pb2.py rename to hatchet_sdk/contracts/events_pb2.py diff --git a/hatchet_sdk/events_pb2.pyi b/hatchet_sdk/contracts/events_pb2.pyi similarity index 100% rename from hatchet_sdk/events_pb2.pyi rename to hatchet_sdk/contracts/events_pb2.pyi diff --git a/hatchet_sdk/events_pb2_grpc.py b/hatchet_sdk/contracts/events_pb2_grpc.py similarity index 100% rename from hatchet_sdk/events_pb2_grpc.py rename to hatchet_sdk/contracts/events_pb2_grpc.py diff --git a/hatchet_sdk/workflows_pb2.py b/hatchet_sdk/contracts/workflows_pb2.py similarity index 100% rename from hatchet_sdk/workflows_pb2.py rename to hatchet_sdk/contracts/workflows_pb2.py diff --git a/hatchet_sdk/workflows_pb2.pyi b/hatchet_sdk/contracts/workflows_pb2.pyi similarity index 100% rename from hatchet_sdk/workflows_pb2.pyi rename to hatchet_sdk/contracts/workflows_pb2.pyi diff --git a/hatchet_sdk/workflows_pb2_grpc.py b/hatchet_sdk/contracts/workflows_pb2_grpc.py similarity index 100% rename from hatchet_sdk/workflows_pb2_grpc.py rename to hatchet_sdk/contracts/workflows_pb2_grpc.py diff --git a/hatchet_sdk/hatchet.py b/hatchet_sdk/hatchet.py index dea651b7..6b61f5e8 100644 --- a/hatchet_sdk/hatchet.py +++ b/hatchet_sdk/hatchet.py @@ -8,7 +8,7 @@ from .logger import logger from .worker import Worker from .workflow import WorkflowMeta -from .workflows_pb2 import ConcurrencyLimitStrategy, CreateStepRateLimit +from hatchet_sdk.contracts.workflows_pb2 import ConcurrencyLimitStrategy, CreateStepRateLimit def workflow( diff --git a/hatchet_sdk/worker.py b/hatchet_sdk/worker.py index 4a72077e..c606f182 100644 --- a/hatchet_sdk/worker.py +++ b/hatchet_sdk/worker.py @@ -35,7 +35,7 @@ new_dispatcher, ) from .context import Context -from .dispatcher_pb2 import ( +from hatchet_sdk.contracts.dispatcher_pb2 import ( GROUP_KEY_EVENT_TYPE_COMPLETED, GROUP_KEY_EVENT_TYPE_FAILED, GROUP_KEY_EVENT_TYPE_STARTED, diff --git a/hatchet_sdk/workflow.py b/hatchet_sdk/workflow.py index 290e14da..2a82846e 100644 --- a/hatchet_sdk/workflow.py +++ b/hatchet_sdk/workflow.py @@ -1,7 +1,7 @@ import functools from typing import Any, Callable, List, Tuple -from .workflows_pb2 import ( +from hatchet_sdk.contracts.workflows_pb2 import ( CreateWorkflowJobOpts, CreateWorkflowStepOpts, CreateWorkflowVersionOpts, From 19d10a5ccc72361f21edec82b7112d6b95c9790d Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 07:15:18 -0400 Subject: [PATCH 04/19] chore: simplify clients --- hatchet_sdk/__init__.py | 2 +- hatchet_sdk/client.py | 41 ++++++++++++++--------------------- hatchet_sdk/clients/admin.py | 4 ++-- hatchet_sdk/clients/events.py | 4 ++-- hatchet_sdk/context.py | 12 +++++----- hatchet_sdk/hatchet.py | 6 ++--- hatchet_sdk/worker.py | 6 ++--- 7 files changed, 33 insertions(+), 42 deletions(-) diff --git a/hatchet_sdk/__init__.py b/hatchet_sdk/__init__.py index e7d33c25..dde703ed 100644 --- a/hatchet_sdk/__init__.py +++ b/hatchet_sdk/__init__.py @@ -121,7 +121,7 @@ ) from hatchet_sdk.clients.rest.models.workflow_version_meta import WorkflowVersionMeta -from .client import ClientImpl, new_client +from .client import new_client from .clients.admin import ( ChildTriggerWorkflowOptions, ScheduleTriggerWorkflowOptions, diff --git a/hatchet_sdk/client.py b/hatchet_sdk/client.py index a3a24204..64ee345d 100644 --- a/hatchet_sdk/client.py +++ b/hatchet_sdk/client.py @@ -1,7 +1,5 @@ -# relative imports -import os from logging import Logger -from typing import Any +from typing import Callable import grpc @@ -9,30 +7,23 @@ from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener from hatchet_sdk.connection import new_conn -from .clients.admin import AdminClientImpl, new_admin -from .clients.dispatcher import DispatcherClientImpl, new_dispatcher -from .clients.events import EventClientImpl, new_event -from .clients.rest.api.workflow_api import WorkflowApi -from .clients.rest.api.workflow_run_api import WorkflowRunApi -from .clients.rest.api_client import ApiClient -from .clients.rest.configuration import Configuration +from .clients.admin import AdminClient, new_admin +from .clients.dispatcher import DispatcherClient, new_dispatcher +from .clients.events import EventClient, new_event from .clients.rest_client import RestApi from .loader import ClientConfig, ConfigLoader class Client: - admin: AdminClientImpl - dispatcher: DispatcherClientImpl - event: EventClientImpl + admin: AdminClient + dispatcher: DispatcherClient + event: EventClient rest: RestApi workflow_listener: PooledWorkflowRunListener logger: Logger - -class ClientImpl(Client): - @classmethod - def from_environment(cls, defaults: ClientConfig = ClientConfig(), *opts_functions): + def from_environment(cls, defaults: ClientConfig = ClientConfig(), *opts_functions: Callable[[ClientConfig], None]): config: ClientConfig = ConfigLoader(".").load_client_config(defaults) for opt_function in opts_functions: opt_function(config) @@ -49,27 +40,27 @@ def from_config(cls, config: ClientConfig = ClientConfig()): conn: grpc.Channel = new_conn(config) - # Instantiate client implementations + # Instantiate clients event_client = new_event(conn, config) admin_client = new_admin(config) dispatcher_client = new_dispatcher(config) rest_client = RestApi(config.server_url, config.token, config.tenant_id) - workflow_listener_client = None + workflow_listener = None # Initialize this if needed return cls( event_client, admin_client, dispatcher_client, - workflow_listener_client, + workflow_listener, rest_client, config, ) def __init__( self, - event_client: EventClientImpl, - admin_client: AdminClientImpl, - dispatcher_client: DispatcherClientImpl, + event_client: EventClient, + admin_client: AdminClient, + dispatcher_client: DispatcherClient, workflow_listener: PooledWorkflowRunListener, rest_client: RestApi, config: ClientConfig, @@ -92,5 +83,5 @@ def with_host_port_impl(config: ClientConfig): return with_host_port_impl -new_client = ClientImpl.from_environment -new_client_raw = ClientImpl.from_config +new_client = Client.from_environment +new_client_raw = Client.from_config \ No newline at end of file diff --git a/hatchet_sdk/clients/admin.py b/hatchet_sdk/clients/admin.py index 677e8c00..4bd6e66c 100644 --- a/hatchet_sdk/clients/admin.py +++ b/hatchet_sdk/clients/admin.py @@ -28,7 +28,7 @@ def new_admin(config: ClientConfig): - return AdminClientImpl(config) + return AdminClient(config) class ScheduleTriggerWorkflowOptions(TypedDict): @@ -210,7 +210,7 @@ async def schedule_workflow( raise ValueError(f"gRPC error: {e}") -class AdminClientImpl(AdminClientBase): +class AdminClient(AdminClientBase): def __init__(self, config: ClientConfig): conn = new_conn(config) self.config = config diff --git a/hatchet_sdk/clients/events.py b/hatchet_sdk/clients/events.py index 5ecbcc77..9ee286dc 100644 --- a/hatchet_sdk/clients/events.py +++ b/hatchet_sdk/clients/events.py @@ -13,7 +13,7 @@ def new_event(conn, config: ClientConfig): - return EventClientImpl( + return EventClient( client=EventsServiceStub(conn), config=config, ) @@ -31,7 +31,7 @@ class PushEventOptions(TypedDict): additional_metadata: Dict[str, str] | None = None -class EventClientImpl: +class EventClient: def __init__(self, client: EventsServiceStub, config: ClientConfig): self.client = client self.token = config.token diff --git a/hatchet_sdk/context.py b/hatchet_sdk/context.py index c3791584..70ae0790 100644 --- a/hatchet_sdk/context.py +++ b/hatchet_sdk/context.py @@ -3,7 +3,7 @@ import traceback from concurrent.futures import Future, ThreadPoolExecutor -from hatchet_sdk.clients.events import EventClientImpl +from hatchet_sdk.clients.events import EventClient from hatchet_sdk.clients.run_event_listener import ( RunEventListener, RunEventListenerClient, @@ -12,7 +12,7 @@ from hatchet_sdk.workflow_run import WorkflowRunRef from .clients.admin import ( - AdminClientImpl, + AdminClient, ChildTriggerWorkflowOptions, ScheduleTriggerWorkflowOptions, TriggerWorkflowOptions, @@ -54,8 +54,8 @@ def __init__( self, action: Action, dispatcher_client: DispatcherClientImpl, - admin_client: AdminClientImpl, - event_client: EventClientImpl, + admin_client: AdminClient, + event_client: EventClient, workflow_listener: PooledWorkflowRunListener, workflow_run_event_listener: RunEventListenerClient, namespace: str = "", @@ -90,8 +90,8 @@ def __init__( self, action: Action, dispatcher_client: DispatcherClientImpl, - admin_client: AdminClientImpl, - event_client: EventClientImpl, + admin_client: AdminClient, + event_client: EventClient, workflow_listener: PooledWorkflowRunListener, workflow_run_event_listener: RunEventListenerClient, namespace: str = "", diff --git a/hatchet_sdk/hatchet.py b/hatchet_sdk/hatchet.py index 6b61f5e8..3dfc3d0e 100644 --- a/hatchet_sdk/hatchet.py +++ b/hatchet_sdk/hatchet.py @@ -4,7 +4,7 @@ from hatchet_sdk.loader import ClientConfig from hatchet_sdk.rate_limit import RateLimit -from .client import ClientImpl, new_client, new_client_raw +from .client import Client, new_client, new_client_raw from .logger import logger from .worker import Worker from .workflow import WorkflowMeta @@ -103,7 +103,7 @@ def inner(func): class Hatchet: - client: ClientImpl + client: Client @classmethod def from_environment(cls, defaults: ClientConfig = ClientConfig(), **kwargs): @@ -116,7 +116,7 @@ def from_config(cls, config: ClientConfig, **kwargs): def __init__( self, debug: bool = False, - client: Optional[ClientImpl] = None, + client: Optional[Client] = None, config: ClientConfig = ClientConfig(), ): if client is not None: diff --git a/hatchet_sdk/worker.py b/hatchet_sdk/worker.py index c606f182..903bba42 100644 --- a/hatchet_sdk/worker.py +++ b/hatchet_sdk/worker.py @@ -22,7 +22,7 @@ from google.protobuf.timestamp_pb2 import Timestamp from hatchet_sdk.clients.admin import new_admin -from hatchet_sdk.clients.events import EventClientImpl +from hatchet_sdk.clients.events import EventClient from hatchet_sdk.clients.run_event_listener import new_listener from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener from hatchet_sdk.loader import ClientConfig @@ -76,7 +76,7 @@ def filter(self, record): # Custom log handler to process log lines class CustomLogHandler(StreamHandler): - def __init__(self, event_client: EventClientImpl, stream=None): + def __init__(self, event_client: EventClient, stream=None): super().__init__(stream) self.logger_thread_pool = ThreadPoolExecutor(max_workers=1) self.event_client = event_client @@ -99,7 +99,7 @@ def emit(self, record): def capture_logs( logger: logging.Logger, - event_client: EventClientImpl, + event_client: EventClient, func: Coroutine[Any, Any, Any], ): @functools.wraps(func) From 901d1ecf5a5960118af1fd40520da0ce3c7f6d8b Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 07:40:51 -0400 Subject: [PATCH 05/19] chore: reusable bg fixture --- examples/fanout/test_fanout.py | 27 +++++++++++++++++++++++++ tests/test_fanout.py | 37 ---------------------------------- tests/utils/__init__.py | 1 + tests/utils/bg_worker.py | 18 +++++++++++++++++ 4 files changed, 46 insertions(+), 37 deletions(-) create mode 100644 examples/fanout/test_fanout.py delete mode 100644 tests/test_fanout.py create mode 100644 tests/utils/__init__.py create mode 100644 tests/utils/bg_worker.py diff --git a/examples/fanout/test_fanout.py b/examples/fanout/test_fanout.py new file mode 100644 index 00000000..83996462 --- /dev/null +++ b/examples/fanout/test_fanout.py @@ -0,0 +1,27 @@ +from hatchet_sdk import Hatchet +import pytest + +from tests.utils import background_hatchet_worker + + + +@pytest.fixture +def hatchet(): + return Hatchet(debug=True) + +fixture_background_hatchet_worker = background_hatchet_worker(["poetry", "run", "worker"]) + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet): + run = hatchet.admin.run_workflow("Parent", {"n": 2}) + result = await run.result() + assert len(result["spawn"]["results"]) == 2 + + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run2(hatchet): + run = hatchet.admin.run_workflow("Parent", {"n": 2}) + result = await run.result() + assert len(result["spawn"]["results"]) == 2 diff --git a/tests/test_fanout.py b/tests/test_fanout.py deleted file mode 100644 index 3544b4a9..00000000 --- a/tests/test_fanout.py +++ /dev/null @@ -1,37 +0,0 @@ -import subprocess -import time -from hatchet_sdk import Hatchet -import pytest - - -@pytest.fixture -def hatchet(): - return Hatchet(debug=True) - -@pytest.fixture(scope="session", autouse=True) -def fixture_background_hatchet_worker(): - proc = subprocess.Popen(["poetry", "run", "worker"]) - - # sleep long enough to make sure we are up and running - # it would be nice to NOT do this, but we need to ensure the worker is running before we trigger any events - time.sleep(5) - - yield - - proc.terminate() - proc.wait() - -# requires scope module or higher for shared event loop -@pytest.mark.asyncio(scope="session") -async def test_run(hatchet, ): - run = hatchet.client.admin.run_workflow("Parent", {"n": 2}) - result = await run.result() - assert len(result["spawn"]["results"]) == 2 - - -# requires scope module or higher for shared event loop -@pytest.mark.asyncio(scope="session") -async def test_run2(hatchet): - run = hatchet.client.admin.run_workflow("Parent", {"n": 2}) - result = await run.result() - assert len(result["spawn"]["results"]) == 2 diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py new file mode 100644 index 00000000..d23252eb --- /dev/null +++ b/tests/utils/__init__.py @@ -0,0 +1 @@ +from .bg_worker import background_hatchet_worker \ No newline at end of file diff --git a/tests/utils/bg_worker.py b/tests/utils/bg_worker.py new file mode 100644 index 00000000..a462a42a --- /dev/null +++ b/tests/utils/bg_worker.py @@ -0,0 +1,18 @@ +import subprocess +import time +import pytest + +def background_hatchet_worker(command, startup_time=5): + @pytest.fixture(scope="session", autouse=True) + def fixture_background_hatchet_worker(): + proc = subprocess.Popen(command) + + # sleep long enough to make sure we are up and running + time.sleep(startup_time) + + yield + + proc.terminate() + proc.wait() + + return fixture_background_hatchet_worker \ No newline at end of file From c84168d1bfb59e671539f065e2f7e2693e489d77 Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 07:42:45 -0400 Subject: [PATCH 06/19] chore: expose clients on hatchet --- examples/api/api.py | 2 +- examples/dag/event.py | 7 ++-- examples/delayed/worker.py | 2 +- examples/programatic_replay/script.py | 8 ++--- examples/rate_limit/event.py | 6 ++-- examples/rate_limit/worker.py | 2 +- hatchet_sdk/hatchet.py | 49 +++++++++++++++++++++++++-- 7 files changed, 60 insertions(+), 16 deletions(-) diff --git a/examples/api/api.py b/examples/api/api.py index b7dc1a06..e203734f 100644 --- a/examples/api/api.py +++ b/examples/api/api.py @@ -6,7 +6,7 @@ hatchet = Hatchet(debug=True) -list: WorkflowList = hatchet.client.rest().workflow_list() +list: WorkflowList = hatchet.rest.workflow_list() for workflow in list.rows: print(workflow.name) diff --git a/examples/dag/event.py b/examples/dag/event.py index d6a149f0..a474f93a 100644 --- a/examples/dag/event.py +++ b/examples/dag/event.py @@ -1,10 +1,9 @@ from dotenv import load_dotenv - -from hatchet_sdk import new_client +from hatchet_sdk import Context, Hatchet load_dotenv() -client = new_client() +hatchet = Hatchet(debug=True) for i in range(10): - client.event.push("user:create", {"test": "test"}) + hatchet.event.push("user:create", {"test": "test"}) diff --git a/examples/delayed/worker.py b/examples/delayed/worker.py index c6e5ded8..83976177 100644 --- a/examples/delayed/worker.py +++ b/examples/delayed/worker.py @@ -18,7 +18,7 @@ def schedule(self, context: Context): future_time = now + timedelta(seconds=15) print(f"scheduling for \t {future_time.strftime('%H:%M:%S')}") - hatchet.client.admin.schedule_workflow( + hatchet.admin.schedule_workflow( "PrintPrinter", [future_time], context.workflow_input() ) diff --git a/examples/programatic_replay/script.py b/examples/programatic_replay/script.py index 02f9c1b1..fd888544 100644 --- a/examples/programatic_replay/script.py +++ b/examples/programatic_replay/script.py @@ -1,7 +1,6 @@ from dotenv import load_dotenv -from hatchet_sdk import Hatchet -from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatus +from hatchet_sdk import Hatchet, WorkflowRunStatus load_dotenv() @@ -9,9 +8,10 @@ if __name__ == "__main__": # Look up the failed workflow runs - failed = hatchet.client.rest.events_list( + failed = hatchet.rest.events_list( statuses=[WorkflowRunStatus.FAILED], limit=3 ) # Replay the failed workflow runs - retried = hatchet.client.rest.events_replay(failed) + retried = hatchet.rest.events_replay(failed) + diff --git a/examples/rate_limit/event.py b/examples/rate_limit/event.py index ae8776fd..ed077fea 100644 --- a/examples/rate_limit/event.py +++ b/examples/rate_limit/event.py @@ -6,6 +6,6 @@ hatchet = Hatchet(debug=True) -hatchet.client.event.push("rate_limit:create", {"test": "1"}) -hatchet.client.event.push("rate_limit:create", {"test": "2"}) -hatchet.client.event.push("rate_limit:create", {"test": "3"}) +hatchet.event.push("rate_limit:create", {"test": "1"}) +hatchet.event.push("rate_limit:create", {"test": "2"}) +hatchet.event.push("rate_limit:create", {"test": "3"}) diff --git a/examples/rate_limit/worker.py b/examples/rate_limit/worker.py index 48637cef..60844f45 100644 --- a/examples/rate_limit/worker.py +++ b/examples/rate_limit/worker.py @@ -19,7 +19,7 @@ def step1(self, context: Context): pass -hatchet.client.admin.put_rate_limit("test-limit", 2, RateLimitDuration.MINUTE) +hatchet.admin.put_rate_limit("test-limit", 2, RateLimitDuration.MINUTE) worker = hatchet.worker("test-worker", max_runs=4) worker.register_workflow(RateLimitWorkflow()) diff --git a/hatchet_sdk/hatchet.py b/hatchet_sdk/hatchet.py index 3dfc3d0e..93b41462 100644 --- a/hatchet_sdk/hatchet.py +++ b/hatchet_sdk/hatchet.py @@ -1,5 +1,6 @@ import logging from typing import List, Optional +from typing_extensions import deprecated from hatchet_sdk.loader import ClientConfig from hatchet_sdk.rate_limit import RateLimit @@ -103,7 +104,20 @@ def inner(func): class Hatchet: - client: Client + """ + Main client for interacting with the Hatchet SDK. + + This class provides access to various client interfaces and utility methods + for working with Hatchet workers, workflows, and steps. + + Attributes: + admin (AdminClient): Interface for administrative operations. + dispatcher (DispatcherClient): Interface for dispatching operations. + event (EventClient): Interface for event-related operations. + rest (RestApi): Interface for REST API operations. + """ + + _client: Client @classmethod def from_environment(cls, defaults: ClientConfig = ClientConfig(), **kwargs): @@ -119,14 +133,45 @@ def __init__( client: Optional[Client] = None, config: ClientConfig = ClientConfig(), ): + """ + Initialize a new Hatchet instance. + + Args: + debug (bool, optional): Enable debug logging. Defaults to False. + client (Optional[Client], optional): A pre-configured Client instance. Defaults to None. + config (ClientConfig, optional): Configuration for creating a new Client. Defaults to ClientConfig(). + """ if client is not None: self.client = client else: - self.client = new_client(config) + self._client = new_client(config) if debug: logger.setLevel(logging.DEBUG) + @property + @deprecated( + "Direct access to client is deprecated and will be removed in a future version. Use specific client properties (Hatchet.admin, Hatchet.dispatcher, Hatchet.event, Hatchet.rest) instead. [0.32.0]", + ) + def client(self) -> Client: + return self._client + + @property + def admin(self): + return self._client.admin + + @property + def dispatcher(self): + return self._client.dispatcher + + @property + def event(self): + return self._client.event + + @property + def rest(self): + return self._client.rest + concurrency = staticmethod(concurrency) workflow = staticmethod(workflow) From a77085a2342b7b609b989ab0c86a3d562d53c613 Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 07:47:48 -0400 Subject: [PATCH 07/19] chore: enable vs code pytest --- .vscode/settings.json | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 .vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..3e99ede3 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "python.testing.pytestArgs": [ + "." + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} \ No newline at end of file From 10df19e0fb627fe266b9cc5b6fcee595d0753873 Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 10:32:07 -0400 Subject: [PATCH 08/19] tests: initial tests --- .vscode/settings.json | 6 +- examples/_deprecated/README.md | 1 + examples/_deprecated/test_event_client.py | 21 +++++ examples/api/api.py | 16 ++-- examples/api/test_api.py | 14 ++++ examples/async/event.py | 8 ++ examples/async/event_test.py | 9 --- examples/async/test_async.py | 22 ++++++ examples/async/worker.py | 18 ++--- examples/cancellation/test_cancellation.py | 17 +++++ examples/cancellation/worker.py | 9 +-- .../test_concurrency_limit_rr.py | 55 ++++++++++++++ examples/concurrency-limit-rr/worker.py | 9 +-- .../test_concurrency_limit.py | 41 ++++++++++ examples/concurrency-limit/worker.py | 26 +++---- examples/dag/event.py | 2 +- examples/dag/test_dag.py | 21 +++++ examples/dag/worker.py | 55 +++++--------- examples/delayed/test_delayed.py | 14 ++++ examples/delayed/worker.py | 2 +- examples/events/event.py | 8 ++ examples/events/test_event.py | 13 ++++ examples/fanout/test_fanout.py | 15 ++-- examples/logger/test_logger.py | 16 ++++ examples/logger/worker.py | 13 +++- examples/logger/workflow.py | 5 +- .../manual_trigger/test_manual_trigger.py | 14 ++++ examples/on_failure/test_on_failure.py | 14 ++++ examples/on_failure/worker.py | 12 ++- examples/overrides/test_overrides.py | 17 +++++ examples/overrides/worker.py | 76 +++++++++++++++++++ .../test_programatic_replay.py | 17 +++++ examples/rate_limit/test_rate_limit.py | 31 ++++++++ examples/rate_limit/worker.py | 12 ++- examples/timeout/test_timeout.py | 28 +++++++ examples/timeout/worker.py | 41 ++++++---- pyproject.toml | 17 ++++- tests/utils/__init__.py | 3 +- tests/utils/bg_worker.py | 2 +- tests/utils/hatchet_client.py | 13 ++++ 40 files changed, 599 insertions(+), 134 deletions(-) create mode 100644 examples/_deprecated/README.md create mode 100644 examples/_deprecated/test_event_client.py create mode 100644 examples/api/test_api.py create mode 100644 examples/async/event.py delete mode 100644 examples/async/event_test.py create mode 100644 examples/async/test_async.py create mode 100644 examples/cancellation/test_cancellation.py create mode 100644 examples/concurrency-limit-rr/test_concurrency_limit_rr.py create mode 100644 examples/concurrency-limit/test_concurrency_limit.py create mode 100644 examples/dag/test_dag.py create mode 100644 examples/delayed/test_delayed.py create mode 100644 examples/events/event.py create mode 100644 examples/events/test_event.py create mode 100644 examples/logger/test_logger.py create mode 100644 examples/manual_trigger/test_manual_trigger.py create mode 100644 examples/on_failure/test_on_failure.py create mode 100644 examples/overrides/test_overrides.py create mode 100644 examples/overrides/worker.py create mode 100644 examples/programatic_replay/test_programatic_replay.py create mode 100644 examples/rate_limit/test_rate_limit.py create mode 100644 examples/timeout/test_timeout.py create mode 100644 tests/utils/hatchet_client.py diff --git a/.vscode/settings.json b/.vscode/settings.json index 3e99ede3..58aab7f8 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -3,5 +3,9 @@ "." ], "python.testing.unittestEnabled": false, - "python.testing.pytestEnabled": true + "python.testing.pytestEnabled": true, + "cSpell.words": [ + "dotenv", + "reqs" + ] } \ No newline at end of file diff --git a/examples/_deprecated/README.md b/examples/_deprecated/README.md new file mode 100644 index 00000000..7ba8135a --- /dev/null +++ b/examples/_deprecated/README.md @@ -0,0 +1 @@ +The examples and tests in this directory are deprecated, but we're maintaining them to ensure backwards compatibility. \ No newline at end of file diff --git a/examples/_deprecated/test_event_client.py b/examples/_deprecated/test_event_client.py new file mode 100644 index 00000000..6d26e30b --- /dev/null +++ b/examples/_deprecated/test_event_client.py @@ -0,0 +1,21 @@ +from dotenv import load_dotenv +import pytest + +from hatchet_sdk import new_client +from hatchet_sdk.hatchet import Hatchet + +load_dotenv() + +@pytest.mark.asyncio(scope="session") +async def test_direct_client_event(): + client = new_client() + e = client.event.push("user:create", {"test": "test"}) + + assert e.eventId is not None + +@pytest.mark.asyncio(scope="session") +async def test_hatchet_client_event(): + hatchet = Hatchet() + e = hatchet.client.event.push("user:create", {"test": "test"}) + + assert e.eventId is not None diff --git a/examples/api/api.py b/examples/api/api.py index e203734f..55e19380 100644 --- a/examples/api/api.py +++ b/examples/api/api.py @@ -6,10 +6,14 @@ hatchet = Hatchet(debug=True) -list: WorkflowList = hatchet.rest.workflow_list() +def main(): + list: WorkflowList = hatchet.rest.workflow_list() -for workflow in list.rows: - print(workflow.name) - print(workflow.metadata.id) - print(workflow.metadata.created_at) - print(workflow.metadata.updated_at) + for workflow in list.rows: + print(workflow.name) + print(workflow.metadata.id) + print(workflow.metadata.created_at) + print(workflow.metadata.updated_at) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/api/test_api.py b/examples/api/test_api.py new file mode 100644 index 00000000..54aa9d55 --- /dev/null +++ b/examples/api/test_api.py @@ -0,0 +1,14 @@ +from hatchet_sdk import Hatchet +import pytest + +from tests.utils.hatchet_client import hatchet_client_fixture + + +hatchet = hatchet_client_fixture() + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_list_workflows(hatchet: Hatchet): + list = hatchet.rest.workflow_list() + + assert len(list.rows) != 0 diff --git a/examples/async/event.py b/examples/async/event.py new file mode 100644 index 00000000..a0fc8cf7 --- /dev/null +++ b/examples/async/event.py @@ -0,0 +1,8 @@ +from dotenv import load_dotenv + +from hatchet_sdk import Hatchet + +load_dotenv() + +hatchet = Hatchet() +hatchet.event.push("async:create", {"test": "test"}) diff --git a/examples/async/event_test.py b/examples/async/event_test.py deleted file mode 100644 index 53fe9473..00000000 --- a/examples/async/event_test.py +++ /dev/null @@ -1,9 +0,0 @@ -from dotenv import load_dotenv - -from hatchet_sdk import new_client - -load_dotenv() - -client = new_client() - -client.event.push("user:create", {"test": "test"}) diff --git a/examples/async/test_async.py b/examples/async/test_async.py new file mode 100644 index 00000000..c56c7045 --- /dev/null +++ b/examples/async/test_async.py @@ -0,0 +1,22 @@ +from hatchet_sdk import Hatchet +import pytest + +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "async"]) + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + run = hatchet.admin.run_workflow("AsyncWorkflow", {}) + result = await run.result() + assert result["step1"]["test"] == "test" + +@pytest.mark.asyncio(scope="session") +async def test_run_async(hatchet: Hatchet): + run = await hatchet.admin.aio.run_workflow("AsyncWorkflow", {}) + result = await run.result() + assert result["step1"]["test"] == "test" diff --git a/examples/async/worker.py b/examples/async/worker.py index 32f8485d..495d0358 100644 --- a/examples/async/worker.py +++ b/examples/async/worker.py @@ -9,33 +9,29 @@ hatchet = Hatchet(debug=True) -@hatchet.workflow(on_events=["user:create"]) +@hatchet.workflow(on_events=["async:create"]) class AsyncWorkflow: def __init__(self): self.my_value = "test" @hatchet.step(timeout="2s") async def step1(self, context: Context): - context.refresh_timeout("5s") - print("started step1") - await asyncio.sleep(3) - print("finished step1") - return {"test": "test"} @hatchet.step(parents=["step1"], timeout="4s") async def step2(self, context): - print("started async step2") - await asyncio.sleep(2) print("finished step2") -async def main(): +async def _main(): workflow = AsyncWorkflow() - worker = hatchet.worker("test-worker", max_runs=4) + worker = hatchet.worker("async-worker", max_runs=4) worker.register_workflow(workflow) await worker.async_start() +def main(): + asyncio.run(_main()) -asyncio.run(main()) +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/cancellation/test_cancellation.py b/examples/cancellation/test_cancellation.py new file mode 100644 index 00000000..9f819fc4 --- /dev/null +++ b/examples/cancellation/test_cancellation.py @@ -0,0 +1,17 @@ +from hatchet_sdk import Hatchet +import pytest + +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "cancellation"]) + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + run = hatchet.admin.run_workflow("CancelWorkflow", {}) + result = await run.result() + # TODO is this the expected result for a timed out run... + assert result == {} diff --git a/examples/cancellation/worker.py b/examples/cancellation/worker.py index 26426e64..b495e05d 100644 --- a/examples/cancellation/worker.py +++ b/examples/cancellation/worker.py @@ -11,23 +11,20 @@ @hatchet.workflow(on_events=["user:create"]) class CancelWorkflow: - def __init__(self): - self.my_value = "test" - @hatchet.step(timeout="10s", retries=1) async def step1(self, context: Context): i = 0 - while not context.exit_flag.is_set() and i < 20: + while not context.exit_flag and i < 20: print(f"Waiting for cancellation {i}") await asyncio.sleep(1) i += 1 - if context.exit_flag.is_set(): + if context.exit_flag: print("Cancelled") workflow = CancelWorkflow() -worker = hatchet.worker("test-worker", max_runs=4) +worker = hatchet.worker("cancellation-worker", max_runs=4) worker.register_workflow(workflow) worker.start() diff --git a/examples/concurrency-limit-rr/test_concurrency_limit_rr.py b/examples/concurrency-limit-rr/test_concurrency_limit_rr.py new file mode 100644 index 00000000..f51680cd --- /dev/null +++ b/examples/concurrency-limit-rr/test_concurrency_limit_rr.py @@ -0,0 +1,55 @@ +import asyncio +import time +from hatchet_sdk import Hatchet +import pytest + +from hatchet_sdk.workflow_run import WorkflowRunRef +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "concurrency_limit_rr"]) + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + num_groups = 2 + runs: list[WorkflowRunRef] = [] + + + # Start all runs + for i in range(1, num_groups + 1): + run = hatchet.admin.run_workflow("ConcurrencyDemoWorkflowRR", {"group": i}) + runs.append(run) + run = hatchet.admin.run_workflow("ConcurrencyDemoWorkflowRR", {"group": i}) + runs.append(run) + + # Wait for all results + successful_runs = [] + cancelled_runs = [] + + start_time = time.time() + + # Process each run individually + for i, run in enumerate(runs, start=1): + try: + result = await run.result() + successful_runs.append((i, result)) + except Exception as e: + if "CANCELLED_BY_CONCURRENCY_LIMIT" in str(e): + cancelled_runs.append((i, str(e))) + else: + raise # Re-raise if it's an unexpected error + + end_time = time.time() + total_time = end_time - start_time + + # Check that we have the correct number of successful and cancelled runs + assert len(successful_runs) == 4, f"Expected 4 successful runs, got {len(successful_runs)}" + assert len(cancelled_runs) == 0, f"Expected 0 cancelled run, got {len(cancelled_runs)}" + + # Check that the total time is close to 2 seconds + assert 3.8 <= total_time <= 5, f"Expected runtime to be about 4 seconds, but it took {total_time:.2f} seconds" + + print(f"Total execution time: {total_time:.2f} seconds") \ No newline at end of file diff --git a/examples/concurrency-limit-rr/worker.py b/examples/concurrency-limit-rr/worker.py index 52c9724c..5032a629 100644 --- a/examples/concurrency-limit-rr/worker.py +++ b/examples/concurrency-limit-rr/worker.py @@ -12,19 +12,18 @@ @hatchet.workflow(on_events=["concurrency-test"], schedule_timeout="10m") class ConcurrencyDemoWorkflowRR: @hatchet.concurrency( - max_runs=1, limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN + max_runs=1, + limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN ) def concurrency(self, context: Context) -> str: input = context.workflow_input() - print(input) - - return input.get("group") + return f'group-{input["group"]}' @hatchet.step() def step1(self, context): print("starting step1") - time.sleep(0.2) + time.sleep(2) print("finished step1") pass diff --git a/examples/concurrency-limit/test_concurrency_limit.py b/examples/concurrency-limit/test_concurrency_limit.py new file mode 100644 index 00000000..ac8af4a6 --- /dev/null +++ b/examples/concurrency-limit/test_concurrency_limit.py @@ -0,0 +1,41 @@ +import asyncio +from hatchet_sdk import Hatchet +import pytest + +from hatchet_sdk.workflow_run import WorkflowRunRef +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "concurrency_limit"]) + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + num_runs = 6 + runs: list[WorkflowRunRef] = [] + + # Start all runs + for i in range(1, num_runs + 1): + run = hatchet.admin.run_workflow("ConcurrencyDemoWorkflow", {"run": i}) + runs.append(run) + + # Wait for all results + successful_runs = [] + cancelled_runs = [] + + # Process each run individually + for i, run in enumerate(runs, start=1): + try: + result = await run.result() + successful_runs.append((i, result)) + except Exception as e: + if "CANCELLED_BY_CONCURRENCY_LIMIT" in str(e): + cancelled_runs.append((i, str(e))) + else: + raise # Re-raise if it's an unexpected error + + # Check that we have the correct number of successful and cancelled runs + assert len(successful_runs) == 5, f"Expected 5 successful runs, got {len(successful_runs)}" + assert len(cancelled_runs) == 1, f"Expected 1 cancelled run, got {len(cancelled_runs)}" diff --git a/examples/concurrency-limit/worker.py b/examples/concurrency-limit/worker.py index ca01b7cc..2915143d 100644 --- a/examples/concurrency-limit/worker.py +++ b/examples/concurrency-limit/worker.py @@ -3,6 +3,7 @@ from dotenv import load_dotenv from hatchet_sdk import Hatchet +from hatchet_sdk.context import Context load_dotenv() @@ -11,27 +12,24 @@ @hatchet.workflow(on_events=["concurrency-test"]) class ConcurrencyDemoWorkflow: - def __init__(self): - self.my_value = "test" @hatchet.concurrency(max_runs=5) def concurrency(self, context) -> str: return "concurrency-key" @hatchet.step() - def step1(self, context): + def step1(self, context: Context): + input = context.workflow_input() + time.sleep(3) print("executed step1") - pass + return {"run": input["run"]} - @hatchet.step(parents=["step1"], timeout="4s") - def step2(self, context): - print("started step2") - time.sleep(1) - print("finished step2") +def main(): + workflow = ConcurrencyDemoWorkflow() + worker = hatchet.worker("concurrency-demo-worker", max_runs=10) + worker.register_workflow(workflow) + worker.start() -workflow = ConcurrencyDemoWorkflow() -worker = hatchet.worker("concurrency-demo-worker", max_runs=4) -worker.register_workflow(workflow) - -worker.start() +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/dag/event.py b/examples/dag/event.py index a474f93a..4090a6a9 100644 --- a/examples/dag/event.py +++ b/examples/dag/event.py @@ -6,4 +6,4 @@ hatchet = Hatchet(debug=True) for i in range(10): - hatchet.event.push("user:create", {"test": "test"}) + hatchet.event.push("dag:create", {"test": "test"}) diff --git a/examples/dag/test_dag.py b/examples/dag/test_dag.py new file mode 100644 index 00000000..5cf7dfae --- /dev/null +++ b/examples/dag/test_dag.py @@ -0,0 +1,21 @@ +from hatchet_sdk import Hatchet +import pytest + +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "dag"]) + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + run = hatchet.admin.run_workflow("DagWorkflow", {}) + result = await run.result() + + one = result["step1"]["rando"] + two = result["step2"]["rando"] + assert result["step3"]["sum"] == one + two + assert result["step4"]["step4"] == "step4" + diff --git a/examples/dag/worker.py b/examples/dag/worker.py index a29af287..acf3247f 100644 --- a/examples/dag/worker.py +++ b/examples/dag/worker.py @@ -3,56 +3,37 @@ from dotenv import load_dotenv from hatchet_sdk import Context, Hatchet +import random load_dotenv() hatchet = Hatchet(debug=True) -@hatchet.workflow(on_events=["user:create"], schedule_timeout="10m") -class MyWorkflow: - def __init__(self): - self.my_value = "test" +@hatchet.workflow(on_events=["dag:create"], schedule_timeout="10m") +class DagWorkflow: @hatchet.step(timeout="5s") def step1(self, context: Context): - print( - "starting step1", - time.strftime("%H:%M:%S", time.localtime()), - context.workflow_input(), - ) - overrideValue = context.playground("prompt", "You are an AI assistant...") - time.sleep(3) - # pretty-print time - print("executed step1", time.strftime("%H:%M:%S", time.localtime())) + rando = random.randint(1, 100) # Generate a random number between 1 and 100return { return { - "step1": overrideValue, + "rando": rando, } - @hatchet.step() + @hatchet.step(timeout="5s") def step2(self, context: Context): - print( - "starting step2", - time.strftime("%H:%M:%S", time.localtime()), - context.workflow_input(), - ) - time.sleep(5) - print("executed step2", time.strftime("%H:%M:%S", time.localtime())) + rando = random.randint(1, 100) # Generate a random number between 1 and 100return { return { - "step2": "step2", + "rando": rando, } @hatchet.step(parents=["step1", "step2"]) def step3(self, context: Context): - print( - "executed step3", - time.strftime("%H:%M:%S", time.localtime()), - context.workflow_input(), - context.step_output("step1"), - context.step_output("step2"), - ) + one = context.step_output("step1")['rando'] + two = context.step_output("step2")['rando'] + return { - "step3": "step3", + "sum": one + two, } @hatchet.step(parents=["step1", "step3"]) @@ -68,9 +49,13 @@ def step4(self, context: Context): "step4": "step4", } +def main(): + + workflow = DagWorkflow() + worker = hatchet.worker("dag-worker") + worker.register_workflow(workflow) -workflow = MyWorkflow() -worker = hatchet.worker("test-worker") -worker.register_workflow(workflow) + worker.start() -worker.start() +if __name__ == "__main__": + main() diff --git a/examples/delayed/test_delayed.py b/examples/delayed/test_delayed.py new file mode 100644 index 00000000..b626a3bb --- /dev/null +++ b/examples/delayed/test_delayed.py @@ -0,0 +1,14 @@ +# from hatchet_sdk import Hatchet +# import pytest + +# from tests.utils import fixture_bg_worker +# from tests.utils.hatchet_client import hatchet_client_fixture + + +# hatchet = hatchet_client_fixture() +# worker = fixture_bg_worker(["poetry", "run", "manual_trigger"]) + +# # requires scope module or higher for shared event loop +# @pytest.mark.asyncio(scope="session") +# async def test_run(hatchet: Hatchet): +# # TODO \ No newline at end of file diff --git a/examples/delayed/worker.py b/examples/delayed/worker.py index 83976177..9e9197e2 100644 --- a/examples/delayed/worker.py +++ b/examples/delayed/worker.py @@ -32,7 +32,7 @@ def step1(self, context: Context): print(f"message \t {context.workflow_input()['message']}") -worker = hatchet.worker("test-worker", max_runs=4) +worker = hatchet.worker("delayed-worker", max_runs=4) worker.register_workflow(PrintSchedule()) worker.register_workflow(PrintPrinter()) diff --git a/examples/events/event.py b/examples/events/event.py new file mode 100644 index 00000000..21b83cdd --- /dev/null +++ b/examples/events/event.py @@ -0,0 +1,8 @@ +from dotenv import load_dotenv + +from hatchet_sdk import Hatchet + +load_dotenv() + +hatchet = Hatchet() +hatchet.event.push("user:create", {"test": "test"}) diff --git a/examples/events/test_event.py b/examples/events/test_event.py new file mode 100644 index 00000000..0acd14a5 --- /dev/null +++ b/examples/events/test_event.py @@ -0,0 +1,13 @@ +import pytest + +from hatchet_sdk.hatchet import Hatchet +from tests.utils import hatchet_client_fixture + +hatchet = hatchet_client_fixture() + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_event_push(hatchet: Hatchet): + e = hatchet.event.push("user:create", {"test": "test"}) + + assert e.eventId is not None diff --git a/examples/fanout/test_fanout.py b/examples/fanout/test_fanout.py index 83996462..9195bad4 100644 --- a/examples/fanout/test_fanout.py +++ b/examples/fanout/test_fanout.py @@ -1,19 +1,16 @@ from hatchet_sdk import Hatchet import pytest -from tests.utils import background_hatchet_worker +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture - -@pytest.fixture -def hatchet(): - return Hatchet(debug=True) - -fixture_background_hatchet_worker = background_hatchet_worker(["poetry", "run", "worker"]) +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "fanout"]) # requires scope module or higher for shared event loop @pytest.mark.asyncio(scope="session") -async def test_run(hatchet): +async def test_run(hatchet: Hatchet): run = hatchet.admin.run_workflow("Parent", {"n": 2}) result = await run.result() assert len(result["spawn"]["results"]) == 2 @@ -21,7 +18,7 @@ async def test_run(hatchet): # requires scope module or higher for shared event loop @pytest.mark.asyncio(scope="session") -async def test_run2(hatchet): +async def test_run2(hatchet: Hatchet): run = hatchet.admin.run_workflow("Parent", {"n": 2}) result = await run.result() assert len(result["spawn"]["results"]) == 2 diff --git a/examples/logger/test_logger.py b/examples/logger/test_logger.py new file mode 100644 index 00000000..ffe48729 --- /dev/null +++ b/examples/logger/test_logger.py @@ -0,0 +1,16 @@ +from hatchet_sdk import Hatchet +import pytest + +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "logger"]) + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + run = hatchet.admin.run_workflow("LoggingWorkflow", {}) + result = await run.result() + assert result["step1"]["status"] == "success" diff --git a/examples/logger/worker.py b/examples/logger/worker.py index ce846cdc..d9a34e22 100644 --- a/examples/logger/worker.py +++ b/examples/logger/worker.py @@ -5,9 +5,14 @@ from examples.logger.client import hatchet from examples.logger.workflow import LoggingWorkflow -worker = hatchet.worker("test-worker", max_runs=5) -workflow = LoggingWorkflow() -worker.register_workflow(workflow) +def main(): + worker = hatchet.worker("logger-worker", max_runs=5) -worker.start() + workflow = LoggingWorkflow() + worker.register_workflow(workflow) + + worker.start() + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/logger/workflow.py b/examples/logger/workflow.py index cc51c14d..6eb1d277 100644 --- a/examples/logger/workflow.py +++ b/examples/logger/workflow.py @@ -7,11 +7,12 @@ logger = logging.getLogger(__name__) -@hatchet.workflow(on_crons=["* * * * *"]) +@hatchet.workflow() class LoggingWorkflow: @hatchet.step() def step1(self, context: Context): for i in range(12): logger.info("executed step1 - {}".format(i)) - time.sleep(1) + logger.info({"step1": "step1"}) + time.sleep(.1) return {"status": "success"} diff --git a/examples/manual_trigger/test_manual_trigger.py b/examples/manual_trigger/test_manual_trigger.py new file mode 100644 index 00000000..b626a3bb --- /dev/null +++ b/examples/manual_trigger/test_manual_trigger.py @@ -0,0 +1,14 @@ +# from hatchet_sdk import Hatchet +# import pytest + +# from tests.utils import fixture_bg_worker +# from tests.utils.hatchet_client import hatchet_client_fixture + + +# hatchet = hatchet_client_fixture() +# worker = fixture_bg_worker(["poetry", "run", "manual_trigger"]) + +# # requires scope module or higher for shared event loop +# @pytest.mark.asyncio(scope="session") +# async def test_run(hatchet: Hatchet): +# # TODO \ No newline at end of file diff --git a/examples/on_failure/test_on_failure.py b/examples/on_failure/test_on_failure.py new file mode 100644 index 00000000..b626a3bb --- /dev/null +++ b/examples/on_failure/test_on_failure.py @@ -0,0 +1,14 @@ +# from hatchet_sdk import Hatchet +# import pytest + +# from tests.utils import fixture_bg_worker +# from tests.utils.hatchet_client import hatchet_client_fixture + + +# hatchet = hatchet_client_fixture() +# worker = fixture_bg_worker(["poetry", "run", "manual_trigger"]) + +# # requires scope module or higher for shared event loop +# @pytest.mark.asyncio(scope="session") +# async def test_run(hatchet: Hatchet): +# # TODO \ No newline at end of file diff --git a/examples/on_failure/worker.py b/examples/on_failure/worker.py index afc35c0e..d2bdf94b 100644 --- a/examples/on_failure/worker.py +++ b/examples/on_failure/worker.py @@ -20,9 +20,13 @@ def on_failure(self, context): print("executed on_failure") print(context) +def main(): + workflow = OnFailureWorkflow() + worker = hatchet.worker("on-failure-worker", max_runs=4) + worker.register_workflow(workflow) -workflow = OnFailureWorkflow() -worker = hatchet.worker("test-worker", max_runs=4) -worker.register_workflow(workflow) + worker.start() -worker.start() + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/overrides/test_overrides.py b/examples/overrides/test_overrides.py new file mode 100644 index 00000000..7becc991 --- /dev/null +++ b/examples/overrides/test_overrides.py @@ -0,0 +1,17 @@ +# from hatchet_sdk import Hatchet +# import pytest + +# from tests.utils import fixture_bg_worker +# from tests.utils.hatchet_client import hatchet_client_fixture + + +# hatchet = hatchet_client_fixture() +# worker = fixture_bg_worker(["poetry", "run", "async"]) + +# # requires scope module or higher for shared event loop +# @pytest.mark.asyncio(scope="session") +# async def test_run(hatchet: Hatchet): +# run = hatchet.admin.run_workflow("DagWorkflow", {}) +# result = await run.result() +# assert result["step1"]["test"] == "test" + diff --git a/examples/overrides/worker.py b/examples/overrides/worker.py new file mode 100644 index 00000000..86609df3 --- /dev/null +++ b/examples/overrides/worker.py @@ -0,0 +1,76 @@ +import time + +from dotenv import load_dotenv + +from hatchet_sdk import Context, Hatchet + +load_dotenv() + +hatchet = Hatchet(debug=True) + + +@hatchet.workflow(on_events=["overrides:create"], schedule_timeout="10m") +class OverridesWorkflow: + def __init__(self): + self.my_value = "test" + + @hatchet.step(timeout="5s") + def step1(self, context: Context): + print( + "starting step1", + time.strftime("%H:%M:%S", time.localtime()), + context.workflow_input(), + ) + overrideValue = context.playground("prompt", "You are an AI assistant...") + time.sleep(3) + # pretty-print time + print("executed step1", time.strftime("%H:%M:%S", time.localtime())) + return { + "step1": overrideValue, + } + + @hatchet.step() + def step2(self, context: Context): + print( + "starting step2", + time.strftime("%H:%M:%S", time.localtime()), + context.workflow_input(), + ) + time.sleep(5) + print("executed step2", time.strftime("%H:%M:%S", time.localtime())) + return { + "step2": "step2", + } + + @hatchet.step(parents=["step1", "step2"]) + def step3(self, context: Context): + print( + "executed step3", + time.strftime("%H:%M:%S", time.localtime()), + context.workflow_input(), + context.step_output("step1"), + context.step_output("step2"), + ) + return { + "step3": "step3", + } + + @hatchet.step(parents=["step1", "step3"]) + def step4(self, context: Context): + print( + "executed step4", + time.strftime("%H:%M:%S", time.localtime()), + context.workflow_input(), + context.step_output("step1"), + context.step_output("step3"), + ) + return { + "step4": "step4", + } + + +workflow = OverridesWorkflow() +worker = hatchet.worker("overrides-worker") +worker.register_workflow(workflow) + +worker.start() diff --git a/examples/programatic_replay/test_programatic_replay.py b/examples/programatic_replay/test_programatic_replay.py new file mode 100644 index 00000000..7becc991 --- /dev/null +++ b/examples/programatic_replay/test_programatic_replay.py @@ -0,0 +1,17 @@ +# from hatchet_sdk import Hatchet +# import pytest + +# from tests.utils import fixture_bg_worker +# from tests.utils.hatchet_client import hatchet_client_fixture + + +# hatchet = hatchet_client_fixture() +# worker = fixture_bg_worker(["poetry", "run", "async"]) + +# # requires scope module or higher for shared event loop +# @pytest.mark.asyncio(scope="session") +# async def test_run(hatchet: Hatchet): +# run = hatchet.admin.run_workflow("DagWorkflow", {}) +# result = await run.result() +# assert result["step1"]["test"] == "test" + diff --git a/examples/rate_limit/test_rate_limit.py b/examples/rate_limit/test_rate_limit.py new file mode 100644 index 00000000..16121ebf --- /dev/null +++ b/examples/rate_limit/test_rate_limit.py @@ -0,0 +1,31 @@ +import asyncio +import time +from hatchet_sdk import Hatchet +import pytest + +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "rate_limit"]) + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + + run1 = hatchet.admin.run_workflow("RateLimitWorkflow", {}) + run2 = hatchet.admin.run_workflow("RateLimitWorkflow", {}) + run3 = hatchet.admin.run_workflow("RateLimitWorkflow", {}) + + start_time = time.time() + + await asyncio.gather(run1.result(), run2.result(), run3.result()) + + end_time = time.time() + + total_time = end_time - start_time + + assert 1 <= total_time <= 2, f"Expected runtime to be a bit more than 1 seconds, but it took {total_time:.2f} seconds" + + diff --git a/examples/rate_limit/worker.py b/examples/rate_limit/worker.py index 60844f45..a13f4b06 100644 --- a/examples/rate_limit/worker.py +++ b/examples/rate_limit/worker.py @@ -10,18 +10,16 @@ @hatchet.workflow(on_events=["rate_limit:create"]) class RateLimitWorkflow: - def __init__(self): - self.my_value = "test" @hatchet.step(rate_limits=[RateLimit(key="test-limit", units=1)]) def step1(self, context: Context): print("executed step1") pass +def main(): + hatchet.admin.put_rate_limit("test-limit", 2, RateLimitDuration.SECOND) -hatchet.admin.put_rate_limit("test-limit", 2, RateLimitDuration.MINUTE) + worker = hatchet.worker("rate-limit-worker", max_runs=10) + worker.register_workflow(RateLimitWorkflow()) -worker = hatchet.worker("test-worker", max_runs=4) -worker.register_workflow(RateLimitWorkflow()) - -worker.start() + worker.start() diff --git a/examples/timeout/test_timeout.py b/examples/timeout/test_timeout.py new file mode 100644 index 00000000..1c9d0d89 --- /dev/null +++ b/examples/timeout/test_timeout.py @@ -0,0 +1,28 @@ +from hatchet_sdk import Hatchet +import pytest + +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "timeout"]) + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run_timeout(hatchet: Hatchet): + run = hatchet.admin.run_workflow("TimeoutWorkflow", {}) + try: + await run.result() + assert False, "Expected workflow to timeout" + except Exception as e: + assert str(e) == "Workflow Errors: ['TIMED_OUT']" + +@pytest.mark.asyncio(scope="session") +async def test_run_refresh_timeout(hatchet: Hatchet): + run = hatchet.admin.run_workflow("RefreshTimeoutWorkflow", {}) + result = await run.result() + assert result["step1"]["status"] == "success" + + + diff --git a/examples/timeout/worker.py b/examples/timeout/worker.py index 6d55f0e4..409fd0fa 100644 --- a/examples/timeout/worker.py +++ b/examples/timeout/worker.py @@ -2,31 +2,40 @@ from dotenv import load_dotenv -from hatchet_sdk import Hatchet +from hatchet_sdk import Hatchet, Context load_dotenv() hatchet = Hatchet(debug=True) -@hatchet.workflow(on_events=["user:create"]) +@hatchet.workflow(on_events=["timeout:create"]) class TimeoutWorkflow: - def __init__(self): - self.my_value = "test" @hatchet.step(timeout="4s") - def timeout(self, context): - try: - print("started step2") - time.sleep(5) - print("finished step2") - except Exception as e: - print("caught an exception: " + str(e)) - raise e + def step1(self, context: Context): + time.sleep(5) + return {"status": "success"} -workflow = TimeoutWorkflow() -worker = hatchet.worker("timeout-worker", max_runs=4) -worker.register_workflow(workflow) +@hatchet.workflow(on_events=["refresh:create"]) +class RefreshTimeoutWorkflow: + + @hatchet.step(timeout="4s") + def step1(self, context: Context): + + context.refresh_timeout("10s") + time.sleep(5) + + return {"status": "success"} + + +def main(): + worker = hatchet.worker("timeout-worker", max_runs=4) + worker.register_workflow(TimeoutWorkflow()) + worker.register_workflow(RefreshTimeoutWorkflow()) + + worker.start() -worker.start() +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 6edb853d..40b838ae 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,4 +42,19 @@ known_third_party = [ ] [tool.poetry.scripts] -worker = "examples.fanout.worker:main" \ No newline at end of file +api = "examples.api.api:main" +async = "examples.async.worker:main" +fanout = "examples.fanout.worker:main" +cancellation = "examples.cancellation.worker:main" +concurrency_limit = "examples.concurrency-limit.worker:main" +concurrency_limit_rr = "examples.concurrency-limit-rr.worker:main" +dag = "examples.dag.worker:main" +delayed = "examples.delayed.worker:main" +events = "examples.events.worker:main" +logger = "examples.logger.worker:main" +manual_trigger = "examples.manual_trigger.worker:main" +on_failure = "examples.on_failure.worker:main" +programatic_replay = "examples.programatic_replay.worker:main" +rate_limit = "examples.rate_limit.worker:main" +simple = "examples.simple.worker:main" +timeout = "examples.timeout.worker:main" \ No newline at end of file diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index d23252eb..a2e54df8 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -1 +1,2 @@ -from .bg_worker import background_hatchet_worker \ No newline at end of file +from .hatchet_client import hatchet_client_fixture +from .bg_worker import fixture_bg_worker \ No newline at end of file diff --git a/tests/utils/bg_worker.py b/tests/utils/bg_worker.py index a462a42a..11478a2f 100644 --- a/tests/utils/bg_worker.py +++ b/tests/utils/bg_worker.py @@ -2,7 +2,7 @@ import time import pytest -def background_hatchet_worker(command, startup_time=5): +def fixture_bg_worker(command, startup_time=5): @pytest.fixture(scope="session", autouse=True) def fixture_background_hatchet_worker(): proc = subprocess.Popen(command) diff --git a/tests/utils/hatchet_client.py b/tests/utils/hatchet_client.py new file mode 100644 index 00000000..7ed8312b --- /dev/null +++ b/tests/utils/hatchet_client.py @@ -0,0 +1,13 @@ +from dotenv import load_dotenv +import pytest + +from hatchet_sdk.hatchet import Hatchet + +load_dotenv() + +def hatchet_client_fixture(): + @pytest.fixture + def hatchet(): + return Hatchet(debug=True) + + return hatchet \ No newline at end of file From 73d82021d64f9993277f63699750baefa4486374 Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 10:34:19 -0400 Subject: [PATCH 09/19] chore: lint --- .vscode/settings.json | 2 +- examples/_deprecated/README.md | 2 +- examples/_deprecated/test_event_client.py | 8 ++++--- examples/api/api.py | 4 +++- examples/api/test_api.py | 4 ++-- examples/async/test_async.py | 5 +++-- examples/async/worker.py | 4 +++- examples/cancellation/test_cancellation.py | 4 ++-- .../test_concurrency_limit_rr.py | 22 ++++++++++++------- examples/concurrency-limit-rr/worker.py | 3 +-- .../test_concurrency_limit.py | 17 +++++++++----- examples/concurrency-limit/worker.py | 4 +++- examples/dag/event.py | 1 + examples/dag/test_dag.py | 5 ++--- examples/dag/worker.py | 16 +++++++++----- examples/delayed/test_delayed.py | 2 +- examples/events/test_event.py | 1 + examples/fanout/test_fanout.py | 4 ++-- examples/logger/test_logger.py | 4 ++-- examples/logger/worker.py | 3 ++- examples/logger/workflow.py | 2 +- .../manual_trigger/test_manual_trigger.py | 2 +- examples/on_failure/test_on_failure.py | 2 +- examples/on_failure/worker.py | 3 ++- examples/overrides/test_overrides.py | 1 - examples/programatic_replay/script.py | 5 +---- .../test_programatic_replay.py | 1 - examples/rate_limit/test_rate_limit.py | 11 +++++----- examples/rate_limit/worker.py | 1 + examples/timeout/test_timeout.py | 8 +++---- examples/timeout/worker.py | 9 ++++---- hatchet_sdk/__init__.py | 10 ++++----- hatchet_sdk/client.py | 8 +++++-- hatchet_sdk/clients/admin.py | 10 ++++----- hatchet_sdk/clients/dispatcher.py | 2 +- hatchet_sdk/clients/events.py | 8 ++++++- hatchet_sdk/clients/run_event_listener.py | 2 +- hatchet_sdk/clients/workflow_listener.py | 7 ++++-- hatchet_sdk/context.py | 2 +- hatchet_sdk/hatchet.py | 6 ++++- hatchet_sdk/worker.py | 20 ++++++++--------- pyproject.toml | 2 +- tests/utils/__init__.py | 2 +- tests/utils/bg_worker.py | 4 +++- tests/utils/hatchet_client.py | 7 +++--- 45 files changed, 147 insertions(+), 103 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 58aab7f8..f290ce12 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -8,4 +8,4 @@ "dotenv", "reqs" ] -} \ No newline at end of file +} diff --git a/examples/_deprecated/README.md b/examples/_deprecated/README.md index 7ba8135a..ee47c61b 100644 --- a/examples/_deprecated/README.md +++ b/examples/_deprecated/README.md @@ -1 +1 @@ -The examples and tests in this directory are deprecated, but we're maintaining them to ensure backwards compatibility. \ No newline at end of file +The examples and tests in this directory are deprecated, but we're maintaining them to ensure backwards compatibility. diff --git a/examples/_deprecated/test_event_client.py b/examples/_deprecated/test_event_client.py index 6d26e30b..9669c7d4 100644 --- a/examples/_deprecated/test_event_client.py +++ b/examples/_deprecated/test_event_client.py @@ -1,21 +1,23 @@ -from dotenv import load_dotenv import pytest +from dotenv import load_dotenv from hatchet_sdk import new_client from hatchet_sdk.hatchet import Hatchet load_dotenv() + @pytest.mark.asyncio(scope="session") async def test_direct_client_event(): client = new_client() e = client.event.push("user:create", {"test": "test"}) - + assert e.eventId is not None + @pytest.mark.asyncio(scope="session") async def test_hatchet_client_event(): hatchet = Hatchet() e = hatchet.client.event.push("user:create", {"test": "test"}) - + assert e.eventId is not None diff --git a/examples/api/api.py b/examples/api/api.py index 55e19380..0b091fee 100644 --- a/examples/api/api.py +++ b/examples/api/api.py @@ -6,6 +6,7 @@ hatchet = Hatchet(debug=True) + def main(): list: WorkflowList = hatchet.rest.workflow_list() @@ -15,5 +16,6 @@ def main(): print(workflow.metadata.created_at) print(workflow.metadata.updated_at) + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/examples/api/test_api.py b/examples/api/test_api.py index 54aa9d55..86e82ee1 100644 --- a/examples/api/test_api.py +++ b/examples/api/test_api.py @@ -1,11 +1,11 @@ -from hatchet_sdk import Hatchet import pytest +from hatchet_sdk import Hatchet from tests.utils.hatchet_client import hatchet_client_fixture - hatchet = hatchet_client_fixture() + # requires scope module or higher for shared event loop @pytest.mark.asyncio(scope="session") async def test_list_workflows(hatchet: Hatchet): diff --git a/examples/async/test_async.py b/examples/async/test_async.py index c56c7045..47387bd8 100644 --- a/examples/async/test_async.py +++ b/examples/async/test_async.py @@ -1,13 +1,13 @@ -from hatchet_sdk import Hatchet import pytest +from hatchet_sdk import Hatchet from tests.utils import fixture_bg_worker from tests.utils.hatchet_client import hatchet_client_fixture - hatchet = hatchet_client_fixture() worker = fixture_bg_worker(["poetry", "run", "async"]) + # requires scope module or higher for shared event loop @pytest.mark.asyncio(scope="session") async def test_run(hatchet: Hatchet): @@ -15,6 +15,7 @@ async def test_run(hatchet: Hatchet): result = await run.result() assert result["step1"]["test"] == "test" + @pytest.mark.asyncio(scope="session") async def test_run_async(hatchet: Hatchet): run = await hatchet.admin.aio.run_workflow("AsyncWorkflow", {}) diff --git a/examples/async/worker.py b/examples/async/worker.py index 495d0358..0c766dc0 100644 --- a/examples/async/worker.py +++ b/examples/async/worker.py @@ -30,8 +30,10 @@ async def _main(): worker.register_workflow(workflow) await worker.async_start() + def main(): asyncio.run(_main()) + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/examples/cancellation/test_cancellation.py b/examples/cancellation/test_cancellation.py index 9f819fc4..88f531df 100644 --- a/examples/cancellation/test_cancellation.py +++ b/examples/cancellation/test_cancellation.py @@ -1,13 +1,13 @@ -from hatchet_sdk import Hatchet import pytest +from hatchet_sdk import Hatchet from tests.utils import fixture_bg_worker from tests.utils.hatchet_client import hatchet_client_fixture - hatchet = hatchet_client_fixture() worker = fixture_bg_worker(["poetry", "run", "cancellation"]) + # requires scope module or higher for shared event loop @pytest.mark.asyncio(scope="session") async def test_run(hatchet: Hatchet): diff --git a/examples/concurrency-limit-rr/test_concurrency_limit_rr.py b/examples/concurrency-limit-rr/test_concurrency_limit_rr.py index f51680cd..b9cab0bc 100644 --- a/examples/concurrency-limit-rr/test_concurrency_limit_rr.py +++ b/examples/concurrency-limit-rr/test_concurrency_limit_rr.py @@ -1,22 +1,22 @@ import asyncio import time -from hatchet_sdk import Hatchet + import pytest +from hatchet_sdk import Hatchet from hatchet_sdk.workflow_run import WorkflowRunRef from tests.utils import fixture_bg_worker from tests.utils.hatchet_client import hatchet_client_fixture - hatchet = hatchet_client_fixture() worker = fixture_bg_worker(["poetry", "run", "concurrency_limit_rr"]) + # requires scope module or higher for shared event loop @pytest.mark.asyncio(scope="session") async def test_run(hatchet: Hatchet): num_groups = 2 runs: list[WorkflowRunRef] = [] - # Start all runs for i in range(1, num_groups + 1): @@ -24,7 +24,7 @@ async def test_run(hatchet: Hatchet): runs.append(run) run = hatchet.admin.run_workflow("ConcurrencyDemoWorkflowRR", {"group": i}) runs.append(run) - + # Wait for all results successful_runs = [] cancelled_runs = [] @@ -46,10 +46,16 @@ async def test_run(hatchet: Hatchet): total_time = end_time - start_time # Check that we have the correct number of successful and cancelled runs - assert len(successful_runs) == 4, f"Expected 4 successful runs, got {len(successful_runs)}" - assert len(cancelled_runs) == 0, f"Expected 0 cancelled run, got {len(cancelled_runs)}" + assert ( + len(successful_runs) == 4 + ), f"Expected 4 successful runs, got {len(successful_runs)}" + assert ( + len(cancelled_runs) == 0 + ), f"Expected 0 cancelled run, got {len(cancelled_runs)}" # Check that the total time is close to 2 seconds - assert 3.8 <= total_time <= 5, f"Expected runtime to be about 4 seconds, but it took {total_time:.2f} seconds" + assert ( + 3.8 <= total_time <= 5 + ), f"Expected runtime to be about 4 seconds, but it took {total_time:.2f} seconds" - print(f"Total execution time: {total_time:.2f} seconds") \ No newline at end of file + print(f"Total execution time: {total_time:.2f} seconds") diff --git a/examples/concurrency-limit-rr/worker.py b/examples/concurrency-limit-rr/worker.py index 5032a629..be58ed81 100644 --- a/examples/concurrency-limit-rr/worker.py +++ b/examples/concurrency-limit-rr/worker.py @@ -12,8 +12,7 @@ @hatchet.workflow(on_events=["concurrency-test"], schedule_timeout="10m") class ConcurrencyDemoWorkflowRR: @hatchet.concurrency( - max_runs=1, - limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN + max_runs=1, limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN ) def concurrency(self, context: Context) -> str: input = context.workflow_input() diff --git a/examples/concurrency-limit/test_concurrency_limit.py b/examples/concurrency-limit/test_concurrency_limit.py index ac8af4a6..f1b35c38 100644 --- a/examples/concurrency-limit/test_concurrency_limit.py +++ b/examples/concurrency-limit/test_concurrency_limit.py @@ -1,26 +1,27 @@ import asyncio -from hatchet_sdk import Hatchet + import pytest +from hatchet_sdk import Hatchet from hatchet_sdk.workflow_run import WorkflowRunRef from tests.utils import fixture_bg_worker from tests.utils.hatchet_client import hatchet_client_fixture - hatchet = hatchet_client_fixture() worker = fixture_bg_worker(["poetry", "run", "concurrency_limit"]) + # requires scope module or higher for shared event loop @pytest.mark.asyncio(scope="session") async def test_run(hatchet: Hatchet): num_runs = 6 runs: list[WorkflowRunRef] = [] - + # Start all runs for i in range(1, num_runs + 1): run = hatchet.admin.run_workflow("ConcurrencyDemoWorkflow", {"run": i}) runs.append(run) - + # Wait for all results successful_runs = [] cancelled_runs = [] @@ -37,5 +38,9 @@ async def test_run(hatchet: Hatchet): raise # Re-raise if it's an unexpected error # Check that we have the correct number of successful and cancelled runs - assert len(successful_runs) == 5, f"Expected 5 successful runs, got {len(successful_runs)}" - assert len(cancelled_runs) == 1, f"Expected 1 cancelled run, got {len(cancelled_runs)}" + assert ( + len(successful_runs) == 5 + ), f"Expected 5 successful runs, got {len(successful_runs)}" + assert ( + len(cancelled_runs) == 1 + ), f"Expected 1 cancelled run, got {len(cancelled_runs)}" diff --git a/examples/concurrency-limit/worker.py b/examples/concurrency-limit/worker.py index 2915143d..b4820628 100644 --- a/examples/concurrency-limit/worker.py +++ b/examples/concurrency-limit/worker.py @@ -24,6 +24,7 @@ def step1(self, context: Context): print("executed step1") return {"run": input["run"]} + def main(): workflow = ConcurrencyDemoWorkflow() worker = hatchet.worker("concurrency-demo-worker", max_runs=10) @@ -31,5 +32,6 @@ def main(): worker.start() + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/examples/dag/event.py b/examples/dag/event.py index 4090a6a9..96953908 100644 --- a/examples/dag/event.py +++ b/examples/dag/event.py @@ -1,4 +1,5 @@ from dotenv import load_dotenv + from hatchet_sdk import Context, Hatchet load_dotenv() diff --git a/examples/dag/test_dag.py b/examples/dag/test_dag.py index 5cf7dfae..89e2f22e 100644 --- a/examples/dag/test_dag.py +++ b/examples/dag/test_dag.py @@ -1,13 +1,13 @@ -from hatchet_sdk import Hatchet import pytest +from hatchet_sdk import Hatchet from tests.utils import fixture_bg_worker from tests.utils.hatchet_client import hatchet_client_fixture - hatchet = hatchet_client_fixture() worker = fixture_bg_worker(["poetry", "run", "dag"]) + # requires scope module or higher for shared event loop @pytest.mark.asyncio(scope="session") async def test_run(hatchet: Hatchet): @@ -18,4 +18,3 @@ async def test_run(hatchet: Hatchet): two = result["step2"]["rando"] assert result["step3"]["sum"] == one + two assert result["step4"]["step4"] == "step4" - diff --git a/examples/dag/worker.py b/examples/dag/worker.py index acf3247f..7b14e302 100644 --- a/examples/dag/worker.py +++ b/examples/dag/worker.py @@ -1,9 +1,9 @@ +import random import time from dotenv import load_dotenv from hatchet_sdk import Context, Hatchet -import random load_dotenv() @@ -15,22 +15,26 @@ class DagWorkflow: @hatchet.step(timeout="5s") def step1(self, context: Context): - rando = random.randint(1, 100) # Generate a random number between 1 and 100return { + rando = random.randint( + 1, 100 + ) # Generate a random number between 1 and 100return { return { "rando": rando, } @hatchet.step(timeout="5s") def step2(self, context: Context): - rando = random.randint(1, 100) # Generate a random number between 1 and 100return { + rando = random.randint( + 1, 100 + ) # Generate a random number between 1 and 100return { return { "rando": rando, } @hatchet.step(parents=["step1", "step2"]) def step3(self, context: Context): - one = context.step_output("step1")['rando'] - two = context.step_output("step2")['rando'] + one = context.step_output("step1")["rando"] + two = context.step_output("step2")["rando"] return { "sum": one + two, @@ -49,6 +53,7 @@ def step4(self, context: Context): "step4": "step4", } + def main(): workflow = DagWorkflow() @@ -57,5 +62,6 @@ def main(): worker.start() + if __name__ == "__main__": main() diff --git a/examples/delayed/test_delayed.py b/examples/delayed/test_delayed.py index b626a3bb..55103aa6 100644 --- a/examples/delayed/test_delayed.py +++ b/examples/delayed/test_delayed.py @@ -11,4 +11,4 @@ # # requires scope module or higher for shared event loop # @pytest.mark.asyncio(scope="session") # async def test_run(hatchet: Hatchet): -# # TODO \ No newline at end of file +# # TODO diff --git a/examples/events/test_event.py b/examples/events/test_event.py index 0acd14a5..f67be3f8 100644 --- a/examples/events/test_event.py +++ b/examples/events/test_event.py @@ -5,6 +5,7 @@ hatchet = hatchet_client_fixture() + # requires scope module or higher for shared event loop @pytest.mark.asyncio(scope="session") async def test_event_push(hatchet: Hatchet): diff --git a/examples/fanout/test_fanout.py b/examples/fanout/test_fanout.py index 9195bad4..85ed28dd 100644 --- a/examples/fanout/test_fanout.py +++ b/examples/fanout/test_fanout.py @@ -1,13 +1,13 @@ -from hatchet_sdk import Hatchet import pytest +from hatchet_sdk import Hatchet from tests.utils import fixture_bg_worker from tests.utils.hatchet_client import hatchet_client_fixture - hatchet = hatchet_client_fixture() worker = fixture_bg_worker(["poetry", "run", "fanout"]) + # requires scope module or higher for shared event loop @pytest.mark.asyncio(scope="session") async def test_run(hatchet: Hatchet): diff --git a/examples/logger/test_logger.py b/examples/logger/test_logger.py index ffe48729..f666526c 100644 --- a/examples/logger/test_logger.py +++ b/examples/logger/test_logger.py @@ -1,13 +1,13 @@ -from hatchet_sdk import Hatchet import pytest +from hatchet_sdk import Hatchet from tests.utils import fixture_bg_worker from tests.utils.hatchet_client import hatchet_client_fixture - hatchet = hatchet_client_fixture() worker = fixture_bg_worker(["poetry", "run", "logger"]) + # requires scope module or higher for shared event loop @pytest.mark.asyncio(scope="session") async def test_run(hatchet: Hatchet): diff --git a/examples/logger/worker.py b/examples/logger/worker.py index d9a34e22..9c51bdcf 100644 --- a/examples/logger/worker.py +++ b/examples/logger/worker.py @@ -14,5 +14,6 @@ def main(): worker.start() + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/examples/logger/workflow.py b/examples/logger/workflow.py index 6eb1d277..a3afa0aa 100644 --- a/examples/logger/workflow.py +++ b/examples/logger/workflow.py @@ -14,5 +14,5 @@ def step1(self, context: Context): for i in range(12): logger.info("executed step1 - {}".format(i)) logger.info({"step1": "step1"}) - time.sleep(.1) + time.sleep(0.1) return {"status": "success"} diff --git a/examples/manual_trigger/test_manual_trigger.py b/examples/manual_trigger/test_manual_trigger.py index b626a3bb..55103aa6 100644 --- a/examples/manual_trigger/test_manual_trigger.py +++ b/examples/manual_trigger/test_manual_trigger.py @@ -11,4 +11,4 @@ # # requires scope module or higher for shared event loop # @pytest.mark.asyncio(scope="session") # async def test_run(hatchet: Hatchet): -# # TODO \ No newline at end of file +# # TODO diff --git a/examples/on_failure/test_on_failure.py b/examples/on_failure/test_on_failure.py index b626a3bb..55103aa6 100644 --- a/examples/on_failure/test_on_failure.py +++ b/examples/on_failure/test_on_failure.py @@ -11,4 +11,4 @@ # # requires scope module or higher for shared event loop # @pytest.mark.asyncio(scope="session") # async def test_run(hatchet: Hatchet): -# # TODO \ No newline at end of file +# # TODO diff --git a/examples/on_failure/worker.py b/examples/on_failure/worker.py index d2bdf94b..e1b36e3e 100644 --- a/examples/on_failure/worker.py +++ b/examples/on_failure/worker.py @@ -20,6 +20,7 @@ def on_failure(self, context): print("executed on_failure") print(context) + def main(): workflow = OnFailureWorkflow() worker = hatchet.worker("on-failure-worker", max_runs=4) @@ -29,4 +30,4 @@ def main(): if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/examples/overrides/test_overrides.py b/examples/overrides/test_overrides.py index 7becc991..a3a232d0 100644 --- a/examples/overrides/test_overrides.py +++ b/examples/overrides/test_overrides.py @@ -14,4 +14,3 @@ # run = hatchet.admin.run_workflow("DagWorkflow", {}) # result = await run.result() # assert result["step1"]["test"] == "test" - diff --git a/examples/programatic_replay/script.py b/examples/programatic_replay/script.py index fd888544..edb5d685 100644 --- a/examples/programatic_replay/script.py +++ b/examples/programatic_replay/script.py @@ -8,10 +8,7 @@ if __name__ == "__main__": # Look up the failed workflow runs - failed = hatchet.rest.events_list( - statuses=[WorkflowRunStatus.FAILED], limit=3 - ) + failed = hatchet.rest.events_list(statuses=[WorkflowRunStatus.FAILED], limit=3) # Replay the failed workflow runs retried = hatchet.rest.events_replay(failed) - diff --git a/examples/programatic_replay/test_programatic_replay.py b/examples/programatic_replay/test_programatic_replay.py index 7becc991..a3a232d0 100644 --- a/examples/programatic_replay/test_programatic_replay.py +++ b/examples/programatic_replay/test_programatic_replay.py @@ -14,4 +14,3 @@ # run = hatchet.admin.run_workflow("DagWorkflow", {}) # result = await run.result() # assert result["step1"]["test"] == "test" - diff --git a/examples/rate_limit/test_rate_limit.py b/examples/rate_limit/test_rate_limit.py index 16121ebf..ca3d3f96 100644 --- a/examples/rate_limit/test_rate_limit.py +++ b/examples/rate_limit/test_rate_limit.py @@ -1,15 +1,16 @@ import asyncio import time -from hatchet_sdk import Hatchet + import pytest +from hatchet_sdk import Hatchet from tests.utils import fixture_bg_worker from tests.utils.hatchet_client import hatchet_client_fixture - hatchet = hatchet_client_fixture() worker = fixture_bg_worker(["poetry", "run", "rate_limit"]) + # requires scope module or higher for shared event loop @pytest.mark.asyncio(scope="session") async def test_run(hatchet: Hatchet): @@ -25,7 +26,7 @@ async def test_run(hatchet: Hatchet): end_time = time.time() total_time = end_time - start_time - - assert 1 <= total_time <= 2, f"Expected runtime to be a bit more than 1 seconds, but it took {total_time:.2f} seconds" - + assert ( + 1 <= total_time <= 2 + ), f"Expected runtime to be a bit more than 1 seconds, but it took {total_time:.2f} seconds" diff --git a/examples/rate_limit/worker.py b/examples/rate_limit/worker.py index a13f4b06..d773b606 100644 --- a/examples/rate_limit/worker.py +++ b/examples/rate_limit/worker.py @@ -16,6 +16,7 @@ def step1(self, context: Context): print("executed step1") pass + def main(): hatchet.admin.put_rate_limit("test-limit", 2, RateLimitDuration.SECOND) diff --git a/examples/timeout/test_timeout.py b/examples/timeout/test_timeout.py index 1c9d0d89..d702fd04 100644 --- a/examples/timeout/test_timeout.py +++ b/examples/timeout/test_timeout.py @@ -1,13 +1,13 @@ -from hatchet_sdk import Hatchet import pytest +from hatchet_sdk import Hatchet from tests.utils import fixture_bg_worker from tests.utils.hatchet_client import hatchet_client_fixture - hatchet = hatchet_client_fixture() worker = fixture_bg_worker(["poetry", "run", "timeout"]) + # requires scope module or higher for shared event loop @pytest.mark.asyncio(scope="session") async def test_run_timeout(hatchet: Hatchet): @@ -18,11 +18,9 @@ async def test_run_timeout(hatchet: Hatchet): except Exception as e: assert str(e) == "Workflow Errors: ['TIMED_OUT']" + @pytest.mark.asyncio(scope="session") async def test_run_refresh_timeout(hatchet: Hatchet): run = hatchet.admin.run_workflow("RefreshTimeoutWorkflow", {}) result = await run.result() assert result["step1"]["status"] == "success" - - - diff --git a/examples/timeout/worker.py b/examples/timeout/worker.py index 409fd0fa..07885ba0 100644 --- a/examples/timeout/worker.py +++ b/examples/timeout/worker.py @@ -2,7 +2,7 @@ from dotenv import load_dotenv -from hatchet_sdk import Hatchet, Context +from hatchet_sdk import Context, Hatchet load_dotenv() @@ -20,7 +20,7 @@ def step1(self, context: Context): @hatchet.workflow(on_events=["refresh:create"]) class RefreshTimeoutWorkflow: - + @hatchet.step(timeout="4s") def step1(self, context: Context): @@ -28,7 +28,7 @@ def step1(self, context: Context): time.sleep(5) return {"status": "success"} - + def main(): worker = hatchet.worker("timeout-worker", max_runs=4) @@ -37,5 +37,6 @@ def main(): worker.start() + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/hatchet_sdk/__init__.py b/hatchet_sdk/__init__.py index dde703ed..eb4a33d2 100644 --- a/hatchet_sdk/__init__.py +++ b/hatchet_sdk/__init__.py @@ -120,6 +120,11 @@ WorkflowVersionDefinition, ) from hatchet_sdk.clients.rest.models.workflow_version_meta import WorkflowVersionMeta +from hatchet_sdk.contracts.workflows_pb2 import ( + ConcurrencyLimitStrategy, + CreateWorkflowVersionOpts, + RateLimitDuration, +) from .client import new_client from .clients.admin import ( @@ -132,8 +137,3 @@ from .context import Context from .hatchet import ClientConfig, Hatchet, concurrency, on_failure_step, step, workflow from .worker import Worker, WorkerStatus -from hatchet_sdk.contracts.workflows_pb2 import ( - ConcurrencyLimitStrategy, - CreateWorkflowVersionOpts, - RateLimitDuration, -) diff --git a/hatchet_sdk/client.py b/hatchet_sdk/client.py index 64ee345d..bfbadd7b 100644 --- a/hatchet_sdk/client.py +++ b/hatchet_sdk/client.py @@ -23,7 +23,11 @@ class Client: logger: Logger @classmethod - def from_environment(cls, defaults: ClientConfig = ClientConfig(), *opts_functions: Callable[[ClientConfig], None]): + def from_environment( + cls, + defaults: ClientConfig = ClientConfig(), + *opts_functions: Callable[[ClientConfig], None] + ): config: ClientConfig = ConfigLoader(".").load_client_config(defaults) for opt_function in opts_functions: opt_function(config) @@ -84,4 +88,4 @@ def with_host_port_impl(config: ClientConfig): new_client = Client.from_environment -new_client_raw = Client.from_config \ No newline at end of file +new_client_raw = Client.from_config diff --git a/hatchet_sdk/clients/admin.py b/hatchet_sdk/clients/admin.py index 4bd6e66c..be96b859 100644 --- a/hatchet_sdk/clients/admin.py +++ b/hatchet_sdk/clients/admin.py @@ -9,11 +9,6 @@ from hatchet_sdk.clients.run_event_listener import new_listener from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener from hatchet_sdk.connection import new_conn -from hatchet_sdk.workflow_run import WorkflowRunRef - -from ..loader import ClientConfig -from ..metadata import get_metadata -from ..workflow import WorkflowMeta from hatchet_sdk.contracts.workflows_pb2 import ( CreateWorkflowVersionOpts, PutRateLimitRequest, @@ -25,6 +20,11 @@ WorkflowVersion, ) from hatchet_sdk.contracts.workflows_pb2_grpc import WorkflowServiceStub +from hatchet_sdk.workflow_run import WorkflowRunRef + +from ..loader import ClientConfig +from ..metadata import get_metadata +from ..workflow import WorkflowMeta def new_admin(config: ClientConfig): diff --git a/hatchet_sdk/clients/dispatcher.py b/hatchet_sdk/clients/dispatcher.py index ee7bb905..d456021c 100644 --- a/hatchet_sdk/clients/dispatcher.py +++ b/hatchet_sdk/clients/dispatcher.py @@ -11,7 +11,6 @@ from hatchet_sdk.clients.event_ts import Event_ts, read_with_interrupt from hatchet_sdk.connection import new_conn - from hatchet_sdk.contracts.dispatcher_pb2 import ( ActionEventResponse, ActionType, @@ -28,6 +27,7 @@ WorkerUnsubscribeRequest, ) from hatchet_sdk.contracts.dispatcher_pb2_grpc import DispatcherStub + from ..loader import ClientConfig from ..logger import logger from ..metadata import get_metadata diff --git a/hatchet_sdk/clients/events.py b/hatchet_sdk/clients/events.py index 9ee286dc..780c54a5 100644 --- a/hatchet_sdk/clients/events.py +++ b/hatchet_sdk/clients/events.py @@ -6,8 +6,14 @@ import grpc from google.protobuf import timestamp_pb2 -from hatchet_sdk.contracts.events_pb2 import Event, PushEventRequest, PutLogRequest, PutStreamEventRequest +from hatchet_sdk.contracts.events_pb2 import ( + Event, + PushEventRequest, + PutLogRequest, + PutStreamEventRequest, +) from hatchet_sdk.contracts.events_pb2_grpc import EventsServiceStub + from ..loader import ClientConfig from ..metadata import get_metadata diff --git a/hatchet_sdk/clients/run_event_listener.py b/hatchet_sdk/clients/run_event_listener.py index 9f280e44..c8e3d839 100644 --- a/hatchet_sdk/clients/run_event_listener.py +++ b/hatchet_sdk/clients/run_event_listener.py @@ -5,7 +5,6 @@ import grpc from hatchet_sdk.connection import new_conn - from hatchet_sdk.contracts.dispatcher_pb2 import ( RESOURCE_TYPE_STEP_RUN, RESOURCE_TYPE_WORKFLOW_RUN, @@ -14,6 +13,7 @@ WorkflowEvent, ) from hatchet_sdk.contracts.dispatcher_pb2_grpc import DispatcherStub + from ..loader import ClientConfig from ..metadata import get_metadata diff --git a/hatchet_sdk/clients/workflow_listener.py b/hatchet_sdk/clients/workflow_listener.py index 54fec01f..89a6f7c5 100644 --- a/hatchet_sdk/clients/workflow_listener.py +++ b/hatchet_sdk/clients/workflow_listener.py @@ -9,9 +9,12 @@ from hatchet_sdk.clients.event_ts import Event_ts, read_with_interrupt from hatchet_sdk.connection import new_conn - -from hatchet_sdk.contracts.dispatcher_pb2 import SubscribeToWorkflowRunsRequest, WorkflowRunEvent +from hatchet_sdk.contracts.dispatcher_pb2 import ( + SubscribeToWorkflowRunsRequest, + WorkflowRunEvent, +) from hatchet_sdk.contracts.dispatcher_pb2_grpc import DispatcherStub + from ..loader import ClientConfig from ..logger import logger from ..metadata import get_metadata diff --git a/hatchet_sdk/context.py b/hatchet_sdk/context.py index 70ae0790..0cfccd04 100644 --- a/hatchet_sdk/context.py +++ b/hatchet_sdk/context.py @@ -9,6 +9,7 @@ RunEventListenerClient, ) from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener +from hatchet_sdk.contracts.dispatcher_pb2 import OverridesData from hatchet_sdk.workflow_run import WorkflowRunRef from .clients.admin import ( @@ -18,7 +19,6 @@ TriggerWorkflowOptions, ) from .clients.dispatcher import Action, DispatcherClientImpl -from hatchet_sdk.contracts.dispatcher_pb2 import OverridesData from .logger import logger DEFAULT_WORKFLOW_POLLING_INTERVAL = 5 # Seconds diff --git a/hatchet_sdk/hatchet.py b/hatchet_sdk/hatchet.py index 93b41462..365173ab 100644 --- a/hatchet_sdk/hatchet.py +++ b/hatchet_sdk/hatchet.py @@ -1,7 +1,12 @@ import logging from typing import List, Optional + from typing_extensions import deprecated +from hatchet_sdk.contracts.workflows_pb2 import ( + ConcurrencyLimitStrategy, + CreateStepRateLimit, +) from hatchet_sdk.loader import ClientConfig from hatchet_sdk.rate_limit import RateLimit @@ -9,7 +14,6 @@ from .logger import logger from .worker import Worker from .workflow import WorkflowMeta -from hatchet_sdk.contracts.workflows_pb2 import ConcurrencyLimitStrategy, CreateStepRateLimit def workflow( diff --git a/hatchet_sdk/worker.py b/hatchet_sdk/worker.py index 903bba42..abedfc56 100644 --- a/hatchet_sdk/worker.py +++ b/hatchet_sdk/worker.py @@ -25,16 +25,6 @@ from hatchet_sdk.clients.events import EventClient from hatchet_sdk.clients.run_event_listener import new_listener from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener -from hatchet_sdk.loader import ClientConfig - -from .client import new_client, new_client_raw -from .clients.dispatcher import ( - Action, - ActionListenerImpl, - GetActionListenerRequest, - new_dispatcher, -) -from .context import Context from hatchet_sdk.contracts.dispatcher_pb2 import ( GROUP_KEY_EVENT_TYPE_COMPLETED, GROUP_KEY_EVENT_TYPE_FAILED, @@ -48,6 +38,16 @@ StepActionEvent, StepActionEventType, ) +from hatchet_sdk.loader import ClientConfig + +from .client import new_client, new_client_raw +from .clients.dispatcher import ( + Action, + ActionListenerImpl, + GetActionListenerRequest, + new_dispatcher, +) +from .context import Context from .logger import logger from .workflow import WorkflowMeta diff --git a/pyproject.toml b/pyproject.toml index 40b838ae..272eba62 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,4 +57,4 @@ on_failure = "examples.on_failure.worker:main" programatic_replay = "examples.programatic_replay.worker:main" rate_limit = "examples.rate_limit.worker:main" simple = "examples.simple.worker:main" -timeout = "examples.timeout.worker:main" \ No newline at end of file +timeout = "examples.timeout.worker:main" diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index a2e54df8..220e788f 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -1,2 +1,2 @@ +from .bg_worker import fixture_bg_worker from .hatchet_client import hatchet_client_fixture -from .bg_worker import fixture_bg_worker \ No newline at end of file diff --git a/tests/utils/bg_worker.py b/tests/utils/bg_worker.py index 11478a2f..f99372eb 100644 --- a/tests/utils/bg_worker.py +++ b/tests/utils/bg_worker.py @@ -1,7 +1,9 @@ import subprocess import time + import pytest + def fixture_bg_worker(command, startup_time=5): @pytest.fixture(scope="session", autouse=True) def fixture_background_hatchet_worker(): @@ -15,4 +17,4 @@ def fixture_background_hatchet_worker(): proc.terminate() proc.wait() - return fixture_background_hatchet_worker \ No newline at end of file + return fixture_background_hatchet_worker diff --git a/tests/utils/hatchet_client.py b/tests/utils/hatchet_client.py index 7ed8312b..797cfa08 100644 --- a/tests/utils/hatchet_client.py +++ b/tests/utils/hatchet_client.py @@ -1,13 +1,14 @@ -from dotenv import load_dotenv import pytest +from dotenv import load_dotenv from hatchet_sdk.hatchet import Hatchet load_dotenv() + def hatchet_client_fixture(): @pytest.fixture def hatchet(): return Hatchet(debug=True) - - return hatchet \ No newline at end of file + + return hatchet From b7b950d24948cae3f82d4b80009694459b861bf7 Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 10:42:11 -0400 Subject: [PATCH 10/19] chore: e2e action --- .github/workflows/e2e.yaml | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 .github/workflows/e2e.yaml diff --git a/.github/workflows/e2e.yaml b/.github/workflows/e2e.yaml new file mode 100644 index 00000000..c25af13d --- /dev/null +++ b/.github/workflows/e2e.yaml @@ -0,0 +1,32 @@ +name: e2e + +on: [pull_request] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["pypy3.9", "pypy3.10", "3.9", "3.10", "3.11", "3.12"] + + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Display Python version + run: python -c "import sys; print(sys.version)" + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: 1.5.1 + virtualenvs-create: true + virtualenvs-in-project: true + - name: Install dependencies + run: poetry install --no-interaction --no-root + - name: Run pytest + env: + HATCHET_CLIENT_TOKEN: ${{ secrets.HATCHET_CLIENT_TOKEN }} + run: poetry run pytest \ No newline at end of file From 69e78119005e8d5fd112fbbd3f2a59e75b7df33b Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 10:44:34 -0400 Subject: [PATCH 11/19] fix: install --- .github/workflows/e2e.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/e2e.yaml b/.github/workflows/e2e.yaml index c25af13d..ffefbd8e 100644 --- a/.github/workflows/e2e.yaml +++ b/.github/workflows/e2e.yaml @@ -25,7 +25,7 @@ jobs: virtualenvs-create: true virtualenvs-in-project: true - name: Install dependencies - run: poetry install --no-interaction --no-root + run: poetry install --no-interaction - name: Run pytest env: HATCHET_CLIENT_TOKEN: ${{ secrets.HATCHET_CLIENT_TOKEN }} From 1e5565c1425f49bde05d7fe8a46f9ff30e50bf20 Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 10:49:03 -0400 Subject: [PATCH 12/19] fix: with namespace --- .github/workflows/e2e.yaml | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/.github/workflows/e2e.yaml b/.github/workflows/e2e.yaml index ffefbd8e..a252573f 100644 --- a/.github/workflows/e2e.yaml +++ b/.github/workflows/e2e.yaml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["pypy3.9", "pypy3.10", "3.9", "3.10", "3.11", "3.12"] + python-version: ["pypy3.10","3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v4 @@ -26,7 +26,14 @@ jobs: virtualenvs-in-project: true - name: Install dependencies run: poetry install --no-interaction + - name: Set HATCHET_CLIENT_NAMESPACE + run: | + PYTHON_VERSION=$(python -c "import sys; print(f'py{sys.version_info.major}{sys.version_info.minor}')") + SHORT_SHA=$(git rev-parse --short HEAD) + echo "HATCHET_CLIENT_NAMESPACE=${PYTHON_VERSION}-${SHORT_SHA}" >> $GITHUB_ENV - name: Run pytest env: HATCHET_CLIENT_TOKEN: ${{ secrets.HATCHET_CLIENT_TOKEN }} - run: poetry run pytest \ No newline at end of file + run: | + echo "Using HATCHET_CLIENT_NAMESPACE: $HATCHET_CLIENT_NAMESPACE" + poetry run pytest \ No newline at end of file From e91d879b3065f64b7f340e192792aea3c3c25fa0 Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 10:52:36 -0400 Subject: [PATCH 13/19] chore: skip unreliable timing --- examples/concurrency-limit-rr/test_concurrency_limit_rr.py | 1 + examples/concurrency-limit/test_concurrency_limit.py | 1 + examples/rate_limit/test_rate_limit.py | 1 + 3 files changed, 3 insertions(+) diff --git a/examples/concurrency-limit-rr/test_concurrency_limit_rr.py b/examples/concurrency-limit-rr/test_concurrency_limit_rr.py index b9cab0bc..e135d10c 100644 --- a/examples/concurrency-limit-rr/test_concurrency_limit_rr.py +++ b/examples/concurrency-limit-rr/test_concurrency_limit_rr.py @@ -13,6 +13,7 @@ # requires scope module or higher for shared event loop +@pytest.mark.skip(reason="The timing for this test is not reliable") @pytest.mark.asyncio(scope="session") async def test_run(hatchet: Hatchet): num_groups = 2 diff --git a/examples/concurrency-limit/test_concurrency_limit.py b/examples/concurrency-limit/test_concurrency_limit.py index f1b35c38..ba89f21c 100644 --- a/examples/concurrency-limit/test_concurrency_limit.py +++ b/examples/concurrency-limit/test_concurrency_limit.py @@ -12,6 +12,7 @@ # requires scope module or higher for shared event loop +@pytest.mark.skip(reason="The timing for this test is not reliable") @pytest.mark.asyncio(scope="session") async def test_run(hatchet: Hatchet): num_runs = 6 diff --git a/examples/rate_limit/test_rate_limit.py b/examples/rate_limit/test_rate_limit.py index ca3d3f96..6f23da01 100644 --- a/examples/rate_limit/test_rate_limit.py +++ b/examples/rate_limit/test_rate_limit.py @@ -12,6 +12,7 @@ # requires scope module or higher for shared event loop +@pytest.mark.skip(reason="The timing for this test is not reliable") @pytest.mark.asyncio(scope="session") async def test_run(hatchet: Hatchet): From 92267451a82f8a64065db5939c831a312f2456ef Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 10:53:32 -0400 Subject: [PATCH 14/19] chore: lint --- .github/workflows/e2e.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/e2e.yaml b/.github/workflows/e2e.yaml index a252573f..70ef8dee 100644 --- a/.github/workflows/e2e.yaml +++ b/.github/workflows/e2e.yaml @@ -36,4 +36,4 @@ jobs: HATCHET_CLIENT_TOKEN: ${{ secrets.HATCHET_CLIENT_TOKEN }} run: | echo "Using HATCHET_CLIENT_NAMESPACE: $HATCHET_CLIENT_NAMESPACE" - poetry run pytest \ No newline at end of file + poetry run pytest From 58cc75f6140257eccffb6da458459e56de770971 Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 10:56:53 -0400 Subject: [PATCH 15/19] fix: drop pypy --- .github/workflows/e2e.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/e2e.yaml b/.github/workflows/e2e.yaml index 70ef8dee..1fefc43f 100644 --- a/.github/workflows/e2e.yaml +++ b/.github/workflows/e2e.yaml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["pypy3.10","3.10", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v4 From 7726b8e7e8e39e835e264b2c2d5636ff51c71e3b Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 11:12:52 -0400 Subject: [PATCH 16/19] fix: ignore warning --- examples/_deprecated/test_event_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/_deprecated/test_event_client.py b/examples/_deprecated/test_event_client.py index 9669c7d4..a7cb8417 100644 --- a/examples/_deprecated/test_event_client.py +++ b/examples/_deprecated/test_event_client.py @@ -14,7 +14,7 @@ async def test_direct_client_event(): assert e.eventId is not None - +@pytest.mark.filterwarnings("ignore:Direct access to client is deprecated:DeprecationWarning") @pytest.mark.asyncio(scope="session") async def test_hatchet_client_event(): hatchet = Hatchet() From b82fa9b285d2c5a2a7ea4e0cb310b65746772191 Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 11:14:39 -0400 Subject: [PATCH 17/19] chore: lint --- examples/_deprecated/test_event_client.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/examples/_deprecated/test_event_client.py b/examples/_deprecated/test_event_client.py index a7cb8417..978c33f3 100644 --- a/examples/_deprecated/test_event_client.py +++ b/examples/_deprecated/test_event_client.py @@ -14,7 +14,10 @@ async def test_direct_client_event(): assert e.eventId is not None -@pytest.mark.filterwarnings("ignore:Direct access to client is deprecated:DeprecationWarning") + +@pytest.mark.filterwarnings( + "ignore:Direct access to client is deprecated:DeprecationWarning" +) @pytest.mark.asyncio(scope="session") async def test_hatchet_client_event(): hatchet = Hatchet() From 7a3708dea54f72345a56c0526124ae93722c7c4d Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 11:39:11 -0400 Subject: [PATCH 18/19] chore: fix generate command --- .gitmodules | 4 + generate.sh | 15 +- hatchet_sdk/clients/rest/__init__.py | 8 + hatchet_sdk/clients/rest/api/__init__.py | 1 + .../clients/rest/api/workflow_runs_api.py | 335 ++++++++++++++++++ hatchet_sdk/clients/rest/models/__init__.py | 7 + .../clients/rest/models/api_resource_meta.py | 2 +- .../models/replay_workflow_runs_request.py | 88 +++++ .../models/replay_workflow_runs_response.py | 95 +++++ hatchet_sdk/clients/rest/models/worker.py | 17 + .../clients/rest/models/worker_label.py | 95 +++++ hatchet_sdk/contracts/dispatcher_pb2.py | 132 ++++--- hatchet_sdk/contracts/dispatcher_pb2.pyi | 44 ++- hatchet_sdk/contracts/dispatcher_pb2_grpc.py | 33 ++ hatchet_sdk/contracts/workflows_pb2.py | 78 ++-- hatchet_sdk/contracts/workflows_pb2.pyi | 61 +++- 16 files changed, 910 insertions(+), 105 deletions(-) create mode 100644 hatchet_sdk/clients/rest/api/workflow_runs_api.py create mode 100644 hatchet_sdk/clients/rest/models/replay_workflow_runs_request.py create mode 100644 hatchet_sdk/clients/rest/models/replay_workflow_runs_response.py create mode 100644 hatchet_sdk/clients/rest/models/worker_label.py diff --git a/.gitmodules b/.gitmodules index e69de29b..2e2e6198 100644 --- a/.gitmodules +++ b/.gitmodules @@ -0,0 +1,4 @@ +[submodule "hatchet"] + path = hatchet + url = git@github.com:hatchet-dev/hatchet.git + branch = main diff --git a/generate.sh b/generate.sh index 7fbbad13..5962832b 100755 --- a/generate.sh +++ b/generate.sh @@ -56,9 +56,18 @@ cp $tmp_dir/hatchet_sdk/clients/rest/api/__init__.py $dst_dir/api/__init__.py # remove tmp folder rm -rf $tmp_dir -poetry run python -m grpc_tools.protoc --proto_path=hatchet/api-contracts/dispatcher --python_out=./hatchet_sdk --pyi_out=./hatchet_sdk --grpc_python_out=./hatchet_sdk dispatcher.proto -poetry run python -m grpc_tools.protoc --proto_path=hatchet/api-contracts/events --python_out=./hatchet_sdk --pyi_out=./hatchet_sdk --grpc_python_out=./hatchet_sdk events.proto -poetry run python -m grpc_tools.protoc --proto_path=hatchet/api-contracts/workflows --python_out=./hatchet_sdk --pyi_out=./hatchet_sdk --grpc_python_out=./hatchet_sdk workflows.proto +poetry run python -m grpc_tools.protoc --proto_path=hatchet/api-contracts/dispatcher --python_out=./hatchet_sdk/contracts --pyi_out=./hatchet_sdk/contracts --grpc_python_out=./hatchet_sdk/contracts dispatcher.proto +poetry run python -m grpc_tools.protoc --proto_path=hatchet/api-contracts/events --python_out=./hatchet_sdk/contracts --pyi_out=./hatchet_sdk/contracts --grpc_python_out=./hatchet_sdk/contracts events.proto +poetry run python -m grpc_tools.protoc --proto_path=hatchet/api-contracts/workflows --python_out=./hatchet_sdk/contracts --pyi_out=./hatchet_sdk/contracts --grpc_python_out=./hatchet_sdk/contracts workflows.proto + +# Fix relative imports in _grpc.py files +if [[ "$OSTYPE" == "darwin"* ]]; then + # macOS + find ./hatchet_sdk/contracts -type f -name '*_grpc.py' -print0 | xargs -0 sed -i '' 's/^import \([^ ]*\)_pb2/from . import \1_pb2/' +else + # Linux and others + find ./hatchet_sdk/contracts -type f -name '*_grpc.py' -print0 | xargs -0 sed -i 's/^import \([^ ]*\)_pb2/from . import \1_pb2/' +fi # ensure that pre-commit is applied without errors pre-commit run --all-files || pre-commit run --all-files diff --git a/hatchet_sdk/clients/rest/__init__.py b/hatchet_sdk/clients/rest/__init__.py index e8d10b51..32022b15 100644 --- a/hatchet_sdk/clients/rest/__init__.py +++ b/hatchet_sdk/clients/rest/__init__.py @@ -32,6 +32,7 @@ from hatchet_sdk.clients.rest.api.worker_api import WorkerApi from hatchet_sdk.clients.rest.api.workflow_api import WorkflowApi from hatchet_sdk.clients.rest.api.workflow_run_api import WorkflowRunApi +from hatchet_sdk.clients.rest.api.workflow_runs_api import WorkflowRunsApi from hatchet_sdk.clients.rest.api_client import ApiClient # import ApiClient @@ -114,6 +115,12 @@ from hatchet_sdk.clients.rest.models.queue_metrics import QueueMetrics from hatchet_sdk.clients.rest.models.reject_invite_request import RejectInviteRequest from hatchet_sdk.clients.rest.models.replay_event_request import ReplayEventRequest +from hatchet_sdk.clients.rest.models.replay_workflow_runs_request import ( + ReplayWorkflowRunsRequest, +) +from hatchet_sdk.clients.rest.models.replay_workflow_runs_response import ( + ReplayWorkflowRunsResponse, +) from hatchet_sdk.clients.rest.models.rerun_step_run_request import RerunStepRunRequest from hatchet_sdk.clients.rest.models.slack_webhook import SlackWebhook from hatchet_sdk.clients.rest.models.sns_integration import SNSIntegration @@ -180,6 +187,7 @@ WebhookWorkerListResponse, ) from hatchet_sdk.clients.rest.models.worker import Worker +from hatchet_sdk.clients.rest.models.worker_label import WorkerLabel from hatchet_sdk.clients.rest.models.worker_list import WorkerList from hatchet_sdk.clients.rest.models.workflow import Workflow from hatchet_sdk.clients.rest.models.workflow_concurrency import WorkflowConcurrency diff --git a/hatchet_sdk/clients/rest/api/__init__.py b/hatchet_sdk/clients/rest/api/__init__.py index bc8c788d..718a6534 100644 --- a/hatchet_sdk/clients/rest/api/__init__.py +++ b/hatchet_sdk/clients/rest/api/__init__.py @@ -16,3 +16,4 @@ from hatchet_sdk.clients.rest.api.worker_api import WorkerApi from hatchet_sdk.clients.rest.api.workflow_api import WorkflowApi from hatchet_sdk.clients.rest.api.workflow_run_api import WorkflowRunApi +from hatchet_sdk.clients.rest.api.workflow_runs_api import WorkflowRunsApi diff --git a/hatchet_sdk/clients/rest/api/workflow_runs_api.py b/hatchet_sdk/clients/rest/api/workflow_runs_api.py new file mode 100644 index 00000000..6bfe222b --- /dev/null +++ b/hatchet_sdk/clients/rest/api/workflow_runs_api.py @@ -0,0 +1,335 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field +from typing_extensions import Annotated +from hatchet_sdk.clients.rest.models.replay_workflow_runs_request import ReplayWorkflowRunsRequest +from hatchet_sdk.clients.rest.models.replay_workflow_runs_response import ReplayWorkflowRunsResponse + +from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized +from hatchet_sdk.clients.rest.api_response import ApiResponse +from hatchet_sdk.clients.rest.rest import RESTResponseType + + +class WorkflowRunsApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def workflow_run_update_replay( + self, + tenant: Annotated[str, Field(min_length=36, strict=True, max_length=36, description="The tenant id")], + replay_workflow_runs_request: Annotated[ReplayWorkflowRunsRequest, Field(description="The workflow run ids to replay")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ReplayWorkflowRunsResponse: + """Replay workflow runs + + Replays a list of workflow runs. + + :param tenant: The tenant id (required) + :type tenant: str + :param replay_workflow_runs_request: The workflow run ids to replay (required) + :type replay_workflow_runs_request: ReplayWorkflowRunsRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_run_update_replay_serialize( + tenant=tenant, + replay_workflow_runs_request=replay_workflow_runs_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ReplayWorkflowRunsResponse", + '400': "APIErrors", + '403': "APIErrors", + '429': "APIErrors", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def workflow_run_update_replay_with_http_info( + self, + tenant: Annotated[str, Field(min_length=36, strict=True, max_length=36, description="The tenant id")], + replay_workflow_runs_request: Annotated[ReplayWorkflowRunsRequest, Field(description="The workflow run ids to replay")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ReplayWorkflowRunsResponse]: + """Replay workflow runs + + Replays a list of workflow runs. + + :param tenant: The tenant id (required) + :type tenant: str + :param replay_workflow_runs_request: The workflow run ids to replay (required) + :type replay_workflow_runs_request: ReplayWorkflowRunsRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_run_update_replay_serialize( + tenant=tenant, + replay_workflow_runs_request=replay_workflow_runs_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ReplayWorkflowRunsResponse", + '400': "APIErrors", + '403': "APIErrors", + '429': "APIErrors", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def workflow_run_update_replay_without_preload_content( + self, + tenant: Annotated[str, Field(min_length=36, strict=True, max_length=36, description="The tenant id")], + replay_workflow_runs_request: Annotated[ReplayWorkflowRunsRequest, Field(description="The workflow run ids to replay")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Replay workflow runs + + Replays a list of workflow runs. + + :param tenant: The tenant id (required) + :type tenant: str + :param replay_workflow_runs_request: The workflow run ids to replay (required) + :type replay_workflow_runs_request: ReplayWorkflowRunsRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_run_update_replay_serialize( + tenant=tenant, + replay_workflow_runs_request=replay_workflow_runs_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ReplayWorkflowRunsResponse", + '400': "APIErrors", + '403': "APIErrors", + '429': "APIErrors", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _workflow_run_update_replay_serialize( + self, + tenant, + replay_workflow_runs_request, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if tenant is not None: + _path_params['tenant'] = tenant + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if replay_workflow_runs_request is not None: + _body_params = replay_workflow_runs_request + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'cookieAuth', + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/api/v1/tenants/{tenant}/workflow-runs/replay', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/hatchet_sdk/clients/rest/models/__init__.py b/hatchet_sdk/clients/rest/models/__init__.py index b9c0ece4..94f1b247 100644 --- a/hatchet_sdk/clients/rest/models/__init__.py +++ b/hatchet_sdk/clients/rest/models/__init__.py @@ -82,6 +82,12 @@ from hatchet_sdk.clients.rest.models.queue_metrics import QueueMetrics from hatchet_sdk.clients.rest.models.reject_invite_request import RejectInviteRequest from hatchet_sdk.clients.rest.models.replay_event_request import ReplayEventRequest +from hatchet_sdk.clients.rest.models.replay_workflow_runs_request import ( + ReplayWorkflowRunsRequest, +) +from hatchet_sdk.clients.rest.models.replay_workflow_runs_response import ( + ReplayWorkflowRunsResponse, +) from hatchet_sdk.clients.rest.models.rerun_step_run_request import RerunStepRunRequest from hatchet_sdk.clients.rest.models.slack_webhook import SlackWebhook from hatchet_sdk.clients.rest.models.sns_integration import SNSIntegration @@ -148,6 +154,7 @@ WebhookWorkerListResponse, ) from hatchet_sdk.clients.rest.models.worker import Worker +from hatchet_sdk.clients.rest.models.worker_label import WorkerLabel from hatchet_sdk.clients.rest.models.worker_list import WorkerList from hatchet_sdk.clients.rest.models.workflow import Workflow from hatchet_sdk.clients.rest.models.workflow_concurrency import WorkflowConcurrency diff --git a/hatchet_sdk/clients/rest/models/api_resource_meta.py b/hatchet_sdk/clients/rest/models/api_resource_meta.py index bb8ad457..8c353248 100644 --- a/hatchet_sdk/clients/rest/models/api_resource_meta.py +++ b/hatchet_sdk/clients/rest/models/api_resource_meta.py @@ -29,7 +29,7 @@ class APIResourceMeta(BaseModel): APIResourceMeta """ # noqa: E501 - id: Annotated[str, Field(min_length=36, strict=True, max_length=36)] = Field( + id: Annotated[str, Field(min_length=0, strict=True, max_length=36)] = Field( description="the id of this resource, in UUID format" ) created_at: datetime = Field( diff --git a/hatchet_sdk/clients/rest/models/replay_workflow_runs_request.py b/hatchet_sdk/clients/rest/models/replay_workflow_runs_request.py new file mode 100644 index 00000000..f30b784c --- /dev/null +++ b/hatchet_sdk/clients/rest/models/replay_workflow_runs_request.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class ReplayWorkflowRunsRequest(BaseModel): + """ + ReplayWorkflowRunsRequest + """ # noqa: E501 + workflow_run_ids: List[Annotated[str, Field(min_length=36, strict=True, max_length=36)]] = Field(alias="workflowRunIds") + __properties: ClassVar[List[str]] = ["workflowRunIds"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ReplayWorkflowRunsRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ReplayWorkflowRunsRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "workflowRunIds": obj.get("workflowRunIds") + }) + return _obj + + diff --git a/hatchet_sdk/clients/rest/models/replay_workflow_runs_response.py b/hatchet_sdk/clients/rest/models/replay_workflow_runs_response.py new file mode 100644 index 00000000..1b84adcc --- /dev/null +++ b/hatchet_sdk/clients/rest/models/replay_workflow_runs_response.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List +from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun +from typing import Optional, Set +from typing_extensions import Self + +class ReplayWorkflowRunsResponse(BaseModel): + """ + ReplayWorkflowRunsResponse + """ # noqa: E501 + workflow_runs: List[WorkflowRun] = Field(alias="workflowRuns") + __properties: ClassVar[List[str]] = ["workflowRuns"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ReplayWorkflowRunsResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in workflow_runs (list) + _items = [] + if self.workflow_runs: + for _item in self.workflow_runs: + if _item: + _items.append(_item.to_dict()) + _dict['workflowRuns'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ReplayWorkflowRunsResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "workflowRuns": [WorkflowRun.from_dict(_item) for _item in obj["workflowRuns"]] if obj.get("workflowRuns") is not None else None + }) + return _obj + + diff --git a/hatchet_sdk/clients/rest/models/worker.py b/hatchet_sdk/clients/rest/models/worker.py index b70ae839..710e0e8f 100644 --- a/hatchet_sdk/clients/rest/models/worker.py +++ b/hatchet_sdk/clients/rest/models/worker.py @@ -25,6 +25,7 @@ from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta from hatchet_sdk.clients.rest.models.step_run import StepRun +from hatchet_sdk.clients.rest.models.worker_label import WorkerLabel class Worker(BaseModel): @@ -72,6 +73,9 @@ class Worker(BaseModel): description="the id of the assigned dispatcher, in UUID format", alias="dispatcherId", ) + labels: Optional[List[WorkerLabel]] = Field( + default=None, description="The current label state of the worker." + ) __properties: ClassVar[List[str]] = [ "metadata", "name", @@ -83,6 +87,7 @@ class Worker(BaseModel): "maxRuns", "availableRuns", "dispatcherId", + "labels", ] @field_validator("status") @@ -144,6 +149,13 @@ def to_dict(self) -> Dict[str, Any]: if _item: _items.append(_item.to_dict()) _dict["recentStepRuns"] = _items + # override the default output from pydantic by calling `to_dict()` of each item in labels (list) + _items = [] + if self.labels: + for _item in self.labels: + if _item: + _items.append(_item.to_dict()) + _dict["labels"] = _items return _dict @classmethod @@ -175,6 +187,11 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: "maxRuns": obj.get("maxRuns"), "availableRuns": obj.get("availableRuns"), "dispatcherId": obj.get("dispatcherId"), + "labels": ( + [WorkerLabel.from_dict(_item) for _item in obj["labels"]] + if obj.get("labels") is not None + else None + ), } ) return _obj diff --git a/hatchet_sdk/clients/rest/models/worker_label.py b/hatchet_sdk/clients/rest/models/worker_label.py new file mode 100644 index 00000000..75f6b49b --- /dev/null +++ b/hatchet_sdk/clients/rest/models/worker_label.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta +from typing import Optional, Set +from typing_extensions import Self + +class WorkerLabel(BaseModel): + """ + WorkerLabel + """ # noqa: E501 + metadata: APIResourceMeta + key: StrictStr = Field(description="The key of the label.") + value: Optional[StrictStr] = Field(default=None, description="The value of the label.") + __properties: ClassVar[List[str]] = ["metadata", "key", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WorkerLabel from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WorkerLabel from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "metadata": APIResourceMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "key": obj.get("key"), + "value": obj.get("value") + }) + return _obj + + diff --git a/hatchet_sdk/contracts/dispatcher_pb2.py b/hatchet_sdk/contracts/dispatcher_pb2.py index c7a6f882..bc2d3978 100644 --- a/hatchet_sdk/contracts/dispatcher_pb2.py +++ b/hatchet_sdk/contracts/dispatcher_pb2.py @@ -15,7 +15,7 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x64ispatcher.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"p\n\x15WorkerRegisterRequest\x12\x12\n\nworkerName\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63tions\x18\x02 \x03(\t\x12\x10\n\x08services\x18\x03 \x03(\t\x12\x14\n\x07maxRuns\x18\x04 \x01(\x05H\x00\x88\x01\x01\x42\n\n\x08_maxRuns\"P\n\x16WorkerRegisterResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\x12\x12\n\nworkerName\x18\x03 \x01(\t\"\x98\x02\n\x0e\x41ssignedAction\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\r\n\x05jobId\x18\x04 \x01(\t\x12\x0f\n\x07jobName\x18\x05 \x01(\t\x12\x10\n\x08jobRunId\x18\x06 \x01(\t\x12\x0e\n\x06stepId\x18\x07 \x01(\t\x12\x11\n\tstepRunId\x18\x08 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\t \x01(\t\x12\x1f\n\nactionType\x18\n \x01(\x0e\x32\x0b.ActionType\x12\x15\n\ractionPayload\x18\x0b \x01(\t\x12\x10\n\x08stepName\x18\x0c \x01(\t\x12\x12\n\nretryCount\x18\r \x01(\x05\"\'\n\x13WorkerListenRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\",\n\x18WorkerUnsubscribeRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\"?\n\x19WorkerUnsubscribeResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\xe1\x01\n\x13GroupKeyActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\teventType\x18\x06 \x01(\x0e\x32\x18.GroupKeyActionEventType\x12\x14\n\x0c\x65ventPayload\x18\x07 \x01(\t\"\xec\x01\n\x0fStepActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\r\n\x05jobId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x0e\n\x06stepId\x18\x04 \x01(\t\x12\x11\n\tstepRunId\x18\x05 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x06 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\teventType\x18\x08 \x01(\x0e\x32\x14.StepActionEventType\x12\x14\n\x0c\x65ventPayload\x18\t \x01(\t\"9\n\x13\x41\x63tionEventResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"9\n SubscribeToWorkflowEventsRequest\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\"7\n\x1eSubscribeToWorkflowRunsRequest\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\"\xb2\x02\n\rWorkflowEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12#\n\x0cresourceType\x18\x02 \x01(\x0e\x32\r.ResourceType\x12%\n\teventType\x18\x03 \x01(\x0e\x32\x12.ResourceEventType\x12\x12\n\nresourceId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0c\x65ventPayload\x18\x06 \x01(\t\x12\x0e\n\x06hangup\x18\x07 \x01(\x08\x12\x18\n\x0bstepRetries\x18\x08 \x01(\x05H\x00\x88\x01\x01\x12\x17\n\nretryCount\x18\t \x01(\x05H\x01\x88\x01\x01\x42\x0e\n\x0c_stepRetriesB\r\n\x0b_retryCount\"\xa8\x01\n\x10WorkflowRunEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12(\n\teventType\x18\x02 \x01(\x0e\x32\x15.WorkflowRunEventType\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x07results\x18\x04 \x03(\x0b\x32\x0e.StepRunResult\"\x8a\x01\n\rStepRunResult\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x16\n\x0estepReadableId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x12\n\x05\x65rror\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06output\x18\x05 \x01(\tH\x01\x88\x01\x01\x42\x08\n\x06_errorB\t\n\x07_output\"W\n\rOverridesData\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\x16\n\x0e\x63\x61llerFilename\x18\x04 \x01(\t\"\x17\n\x15OverridesDataResponse\"U\n\x10HeartbeatRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12/\n\x0bheartbeatAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x13\n\x11HeartbeatResponse\"F\n\x15RefreshTimeoutRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x1a\n\x12incrementTimeoutBy\x18\x02 \x01(\t\"G\n\x16RefreshTimeoutResponse\x12-\n\ttimeoutAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\'\n\x12ReleaseSlotRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\"\x15\n\x13ReleaseSlotResponse*N\n\nActionType\x12\x12\n\x0eSTART_STEP_RUN\x10\x00\x12\x13\n\x0f\x43\x41NCEL_STEP_RUN\x10\x01\x12\x17\n\x13START_GET_GROUP_KEY\x10\x02*\xa2\x01\n\x17GroupKeyActionEventType\x12 \n\x1cGROUP_KEY_EVENT_TYPE_UNKNOWN\x10\x00\x12 \n\x1cGROUP_KEY_EVENT_TYPE_STARTED\x10\x01\x12\"\n\x1eGROUP_KEY_EVENT_TYPE_COMPLETED\x10\x02\x12\x1f\n\x1bGROUP_KEY_EVENT_TYPE_FAILED\x10\x03*\x8a\x01\n\x13StepActionEventType\x12\x1b\n\x17STEP_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1b\n\x17STEP_EVENT_TYPE_STARTED\x10\x01\x12\x1d\n\x19STEP_EVENT_TYPE_COMPLETED\x10\x02\x12\x1a\n\x16STEP_EVENT_TYPE_FAILED\x10\x03*e\n\x0cResourceType\x12\x19\n\x15RESOURCE_TYPE_UNKNOWN\x10\x00\x12\x1a\n\x16RESOURCE_TYPE_STEP_RUN\x10\x01\x12\x1e\n\x1aRESOURCE_TYPE_WORKFLOW_RUN\x10\x02*\xfe\x01\n\x11ResourceEventType\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_STARTED\x10\x01\x12!\n\x1dRESOURCE_EVENT_TYPE_COMPLETED\x10\x02\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_FAILED\x10\x03\x12!\n\x1dRESOURCE_EVENT_TYPE_CANCELLED\x10\x04\x12!\n\x1dRESOURCE_EVENT_TYPE_TIMED_OUT\x10\x05\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_STREAM\x10\x06*<\n\x14WorkflowRunEventType\x12$\n WORKFLOW_RUN_EVENT_TYPE_FINISHED\x10\x00\x32\xa7\x06\n\nDispatcher\x12=\n\x08Register\x12\x16.WorkerRegisterRequest\x1a\x17.WorkerRegisterResponse\"\x00\x12\x33\n\x06Listen\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x35\n\x08ListenV2\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x34\n\tHeartbeat\x12\x11.HeartbeatRequest\x1a\x12.HeartbeatResponse\"\x00\x12R\n\x19SubscribeToWorkflowEvents\x12!.SubscribeToWorkflowEventsRequest\x1a\x0e.WorkflowEvent\"\x00\x30\x01\x12S\n\x17SubscribeToWorkflowRuns\x12\x1f.SubscribeToWorkflowRunsRequest\x1a\x11.WorkflowRunEvent\"\x00(\x01\x30\x01\x12?\n\x13SendStepActionEvent\x12\x10.StepActionEvent\x1a\x14.ActionEventResponse\"\x00\x12G\n\x17SendGroupKeyActionEvent\x12\x14.GroupKeyActionEvent\x1a\x14.ActionEventResponse\"\x00\x12<\n\x10PutOverridesData\x12\x0e.OverridesData\x1a\x16.OverridesDataResponse\"\x00\x12\x46\n\x0bUnsubscribe\x12\x19.WorkerUnsubscribeRequest\x1a\x1a.WorkerUnsubscribeResponse\"\x00\x12\x43\n\x0eRefreshTimeout\x12\x16.RefreshTimeoutRequest\x1a\x17.RefreshTimeoutResponse\"\x00\x12:\n\x0bReleaseSlot\x12\x13.ReleaseSlotRequest\x1a\x14.ReleaseSlotResponse\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x64ispatcher.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"V\n\x0cWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValue\"\xe2\x01\n\x15WorkerRegisterRequest\x12\x12\n\nworkerName\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63tions\x18\x02 \x03(\t\x12\x10\n\x08services\x18\x03 \x03(\t\x12\x14\n\x07maxRuns\x18\x04 \x01(\x05H\x00\x88\x01\x01\x12\x32\n\x06labels\x18\x05 \x03(\x0b\x32\".WorkerRegisterRequest.LabelsEntry\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\x42\n\n\x08_maxRuns\"P\n\x16WorkerRegisterResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\x12\x12\n\nworkerName\x18\x03 \x01(\t\"\xa3\x01\n\x19UpsertWorkerLabelsRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x36\n\x06labels\x18\x02 \x03(\x0b\x32&.UpsertWorkerLabelsRequest.LabelsEntry\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\"@\n\x1aUpsertWorkerLabelsResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\x98\x02\n\x0e\x41ssignedAction\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\r\n\x05jobId\x18\x04 \x01(\t\x12\x0f\n\x07jobName\x18\x05 \x01(\t\x12\x10\n\x08jobRunId\x18\x06 \x01(\t\x12\x0e\n\x06stepId\x18\x07 \x01(\t\x12\x11\n\tstepRunId\x18\x08 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\t \x01(\t\x12\x1f\n\nactionType\x18\n \x01(\x0e\x32\x0b.ActionType\x12\x15\n\ractionPayload\x18\x0b \x01(\t\x12\x10\n\x08stepName\x18\x0c \x01(\t\x12\x12\n\nretryCount\x18\r \x01(\x05\"\'\n\x13WorkerListenRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\",\n\x18WorkerUnsubscribeRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\"?\n\x19WorkerUnsubscribeResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\xe1\x01\n\x13GroupKeyActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\teventType\x18\x06 \x01(\x0e\x32\x18.GroupKeyActionEventType\x12\x14\n\x0c\x65ventPayload\x18\x07 \x01(\t\"\xec\x01\n\x0fStepActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\r\n\x05jobId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x0e\n\x06stepId\x18\x04 \x01(\t\x12\x11\n\tstepRunId\x18\x05 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x06 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\teventType\x18\x08 \x01(\x0e\x32\x14.StepActionEventType\x12\x14\n\x0c\x65ventPayload\x18\t \x01(\t\"9\n\x13\x41\x63tionEventResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"9\n SubscribeToWorkflowEventsRequest\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\"7\n\x1eSubscribeToWorkflowRunsRequest\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\"\xb2\x02\n\rWorkflowEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12#\n\x0cresourceType\x18\x02 \x01(\x0e\x32\r.ResourceType\x12%\n\teventType\x18\x03 \x01(\x0e\x32\x12.ResourceEventType\x12\x12\n\nresourceId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0c\x65ventPayload\x18\x06 \x01(\t\x12\x0e\n\x06hangup\x18\x07 \x01(\x08\x12\x18\n\x0bstepRetries\x18\x08 \x01(\x05H\x00\x88\x01\x01\x12\x17\n\nretryCount\x18\t \x01(\x05H\x01\x88\x01\x01\x42\x0e\n\x0c_stepRetriesB\r\n\x0b_retryCount\"\xa8\x01\n\x10WorkflowRunEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12(\n\teventType\x18\x02 \x01(\x0e\x32\x15.WorkflowRunEventType\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x07results\x18\x04 \x03(\x0b\x32\x0e.StepRunResult\"\x8a\x01\n\rStepRunResult\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x16\n\x0estepReadableId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x12\n\x05\x65rror\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06output\x18\x05 \x01(\tH\x01\x88\x01\x01\x42\x08\n\x06_errorB\t\n\x07_output\"W\n\rOverridesData\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\x16\n\x0e\x63\x61llerFilename\x18\x04 \x01(\t\"\x17\n\x15OverridesDataResponse\"U\n\x10HeartbeatRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12/\n\x0bheartbeatAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x13\n\x11HeartbeatResponse\"F\n\x15RefreshTimeoutRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x1a\n\x12incrementTimeoutBy\x18\x02 \x01(\t\"G\n\x16RefreshTimeoutResponse\x12-\n\ttimeoutAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\'\n\x12ReleaseSlotRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\"\x15\n\x13ReleaseSlotResponse*N\n\nActionType\x12\x12\n\x0eSTART_STEP_RUN\x10\x00\x12\x13\n\x0f\x43\x41NCEL_STEP_RUN\x10\x01\x12\x17\n\x13START_GET_GROUP_KEY\x10\x02*\xa2\x01\n\x17GroupKeyActionEventType\x12 \n\x1cGROUP_KEY_EVENT_TYPE_UNKNOWN\x10\x00\x12 \n\x1cGROUP_KEY_EVENT_TYPE_STARTED\x10\x01\x12\"\n\x1eGROUP_KEY_EVENT_TYPE_COMPLETED\x10\x02\x12\x1f\n\x1bGROUP_KEY_EVENT_TYPE_FAILED\x10\x03*\x8a\x01\n\x13StepActionEventType\x12\x1b\n\x17STEP_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1b\n\x17STEP_EVENT_TYPE_STARTED\x10\x01\x12\x1d\n\x19STEP_EVENT_TYPE_COMPLETED\x10\x02\x12\x1a\n\x16STEP_EVENT_TYPE_FAILED\x10\x03*e\n\x0cResourceType\x12\x19\n\x15RESOURCE_TYPE_UNKNOWN\x10\x00\x12\x1a\n\x16RESOURCE_TYPE_STEP_RUN\x10\x01\x12\x1e\n\x1aRESOURCE_TYPE_WORKFLOW_RUN\x10\x02*\xfe\x01\n\x11ResourceEventType\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_STARTED\x10\x01\x12!\n\x1dRESOURCE_EVENT_TYPE_COMPLETED\x10\x02\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_FAILED\x10\x03\x12!\n\x1dRESOURCE_EVENT_TYPE_CANCELLED\x10\x04\x12!\n\x1dRESOURCE_EVENT_TYPE_TIMED_OUT\x10\x05\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_STREAM\x10\x06*<\n\x14WorkflowRunEventType\x12$\n WORKFLOW_RUN_EVENT_TYPE_FINISHED\x10\x00\x32\xf8\x06\n\nDispatcher\x12=\n\x08Register\x12\x16.WorkerRegisterRequest\x1a\x17.WorkerRegisterResponse\"\x00\x12\x33\n\x06Listen\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x35\n\x08ListenV2\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x34\n\tHeartbeat\x12\x11.HeartbeatRequest\x1a\x12.HeartbeatResponse\"\x00\x12R\n\x19SubscribeToWorkflowEvents\x12!.SubscribeToWorkflowEventsRequest\x1a\x0e.WorkflowEvent\"\x00\x30\x01\x12S\n\x17SubscribeToWorkflowRuns\x12\x1f.SubscribeToWorkflowRunsRequest\x1a\x11.WorkflowRunEvent\"\x00(\x01\x30\x01\x12?\n\x13SendStepActionEvent\x12\x10.StepActionEvent\x1a\x14.ActionEventResponse\"\x00\x12G\n\x17SendGroupKeyActionEvent\x12\x14.GroupKeyActionEvent\x1a\x14.ActionEventResponse\"\x00\x12<\n\x10PutOverridesData\x12\x0e.OverridesData\x1a\x16.OverridesDataResponse\"\x00\x12\x46\n\x0bUnsubscribe\x12\x19.WorkerUnsubscribeRequest\x1a\x1a.WorkerUnsubscribeResponse\"\x00\x12\x43\n\x0eRefreshTimeout\x12\x16.RefreshTimeoutRequest\x1a\x17.RefreshTimeoutResponse\"\x00\x12:\n\x0bReleaseSlot\x12\x13.ReleaseSlotRequest\x1a\x14.ReleaseSlotResponse\"\x00\x12O\n\x12UpsertWorkerLabels\x12\x1a.UpsertWorkerLabelsRequest\x1a\x1b.UpsertWorkerLabelsResponse\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -23,62 +23,76 @@ if _descriptor._USE_C_DESCRIPTORS == False: _globals['DESCRIPTOR']._options = None _globals['DESCRIPTOR']._serialized_options = b'ZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contracts' - _globals['_ACTIONTYPE']._serialized_start=2378 - _globals['_ACTIONTYPE']._serialized_end=2456 - _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_start=2459 - _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_end=2621 - _globals['_STEPACTIONEVENTTYPE']._serialized_start=2624 - _globals['_STEPACTIONEVENTTYPE']._serialized_end=2762 - _globals['_RESOURCETYPE']._serialized_start=2764 - _globals['_RESOURCETYPE']._serialized_end=2865 - _globals['_RESOURCEEVENTTYPE']._serialized_start=2868 - _globals['_RESOURCEEVENTTYPE']._serialized_end=3122 - _globals['_WORKFLOWRUNEVENTTYPE']._serialized_start=3124 - _globals['_WORKFLOWRUNEVENTTYPE']._serialized_end=3184 - _globals['_WORKERREGISTERREQUEST']._serialized_start=53 - _globals['_WORKERREGISTERREQUEST']._serialized_end=165 - _globals['_WORKERREGISTERRESPONSE']._serialized_start=167 - _globals['_WORKERREGISTERRESPONSE']._serialized_end=247 - _globals['_ASSIGNEDACTION']._serialized_start=250 - _globals['_ASSIGNEDACTION']._serialized_end=530 - _globals['_WORKERLISTENREQUEST']._serialized_start=532 - _globals['_WORKERLISTENREQUEST']._serialized_end=571 - _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_start=573 - _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_end=617 - _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_start=619 - _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_end=682 - _globals['_GROUPKEYACTIONEVENT']._serialized_start=685 - _globals['_GROUPKEYACTIONEVENT']._serialized_end=910 - _globals['_STEPACTIONEVENT']._serialized_start=913 - _globals['_STEPACTIONEVENT']._serialized_end=1149 - _globals['_ACTIONEVENTRESPONSE']._serialized_start=1151 - _globals['_ACTIONEVENTRESPONSE']._serialized_end=1208 - _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_start=1210 - _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_end=1267 - _globals['_SUBSCRIBETOWORKFLOWRUNSREQUEST']._serialized_start=1269 - _globals['_SUBSCRIBETOWORKFLOWRUNSREQUEST']._serialized_end=1324 - _globals['_WORKFLOWEVENT']._serialized_start=1327 - _globals['_WORKFLOWEVENT']._serialized_end=1633 - _globals['_WORKFLOWRUNEVENT']._serialized_start=1636 - _globals['_WORKFLOWRUNEVENT']._serialized_end=1804 - _globals['_STEPRUNRESULT']._serialized_start=1807 - _globals['_STEPRUNRESULT']._serialized_end=1945 - _globals['_OVERRIDESDATA']._serialized_start=1947 - _globals['_OVERRIDESDATA']._serialized_end=2034 - _globals['_OVERRIDESDATARESPONSE']._serialized_start=2036 - _globals['_OVERRIDESDATARESPONSE']._serialized_end=2059 - _globals['_HEARTBEATREQUEST']._serialized_start=2061 - _globals['_HEARTBEATREQUEST']._serialized_end=2146 - _globals['_HEARTBEATRESPONSE']._serialized_start=2148 - _globals['_HEARTBEATRESPONSE']._serialized_end=2167 - _globals['_REFRESHTIMEOUTREQUEST']._serialized_start=2169 - _globals['_REFRESHTIMEOUTREQUEST']._serialized_end=2239 - _globals['_REFRESHTIMEOUTRESPONSE']._serialized_start=2241 - _globals['_REFRESHTIMEOUTRESPONSE']._serialized_end=2312 - _globals['_RELEASESLOTREQUEST']._serialized_start=2314 - _globals['_RELEASESLOTREQUEST']._serialized_end=2353 - _globals['_RELEASESLOTRESPONSE']._serialized_start=2355 - _globals['_RELEASESLOTRESPONSE']._serialized_end=2376 - _globals['_DISPATCHER']._serialized_start=3187 - _globals['_DISPATCHER']._serialized_end=3994 + _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._options = None + _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._serialized_options = b'8\001' + _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._options = None + _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._serialized_options = b'8\001' + _globals['_ACTIONTYPE']._serialized_start=2813 + _globals['_ACTIONTYPE']._serialized_end=2891 + _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_start=2894 + _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_end=3056 + _globals['_STEPACTIONEVENTTYPE']._serialized_start=3059 + _globals['_STEPACTIONEVENTTYPE']._serialized_end=3197 + _globals['_RESOURCETYPE']._serialized_start=3199 + _globals['_RESOURCETYPE']._serialized_end=3300 + _globals['_RESOURCEEVENTTYPE']._serialized_start=3303 + _globals['_RESOURCEEVENTTYPE']._serialized_end=3557 + _globals['_WORKFLOWRUNEVENTTYPE']._serialized_start=3559 + _globals['_WORKFLOWRUNEVENTTYPE']._serialized_end=3619 + _globals['_WORKERLABELS']._serialized_start=53 + _globals['_WORKERLABELS']._serialized_end=139 + _globals['_WORKERREGISTERREQUEST']._serialized_start=142 + _globals['_WORKERREGISTERREQUEST']._serialized_end=368 + _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._serialized_start=296 + _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._serialized_end=356 + _globals['_WORKERREGISTERRESPONSE']._serialized_start=370 + _globals['_WORKERREGISTERRESPONSE']._serialized_end=450 + _globals['_UPSERTWORKERLABELSREQUEST']._serialized_start=453 + _globals['_UPSERTWORKERLABELSREQUEST']._serialized_end=616 + _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._serialized_start=296 + _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._serialized_end=356 + _globals['_UPSERTWORKERLABELSRESPONSE']._serialized_start=618 + _globals['_UPSERTWORKERLABELSRESPONSE']._serialized_end=682 + _globals['_ASSIGNEDACTION']._serialized_start=685 + _globals['_ASSIGNEDACTION']._serialized_end=965 + _globals['_WORKERLISTENREQUEST']._serialized_start=967 + _globals['_WORKERLISTENREQUEST']._serialized_end=1006 + _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_start=1008 + _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_end=1052 + _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_start=1054 + _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_end=1117 + _globals['_GROUPKEYACTIONEVENT']._serialized_start=1120 + _globals['_GROUPKEYACTIONEVENT']._serialized_end=1345 + _globals['_STEPACTIONEVENT']._serialized_start=1348 + _globals['_STEPACTIONEVENT']._serialized_end=1584 + _globals['_ACTIONEVENTRESPONSE']._serialized_start=1586 + _globals['_ACTIONEVENTRESPONSE']._serialized_end=1643 + _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_start=1645 + _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_end=1702 + _globals['_SUBSCRIBETOWORKFLOWRUNSREQUEST']._serialized_start=1704 + _globals['_SUBSCRIBETOWORKFLOWRUNSREQUEST']._serialized_end=1759 + _globals['_WORKFLOWEVENT']._serialized_start=1762 + _globals['_WORKFLOWEVENT']._serialized_end=2068 + _globals['_WORKFLOWRUNEVENT']._serialized_start=2071 + _globals['_WORKFLOWRUNEVENT']._serialized_end=2239 + _globals['_STEPRUNRESULT']._serialized_start=2242 + _globals['_STEPRUNRESULT']._serialized_end=2380 + _globals['_OVERRIDESDATA']._serialized_start=2382 + _globals['_OVERRIDESDATA']._serialized_end=2469 + _globals['_OVERRIDESDATARESPONSE']._serialized_start=2471 + _globals['_OVERRIDESDATARESPONSE']._serialized_end=2494 + _globals['_HEARTBEATREQUEST']._serialized_start=2496 + _globals['_HEARTBEATREQUEST']._serialized_end=2581 + _globals['_HEARTBEATRESPONSE']._serialized_start=2583 + _globals['_HEARTBEATRESPONSE']._serialized_end=2602 + _globals['_REFRESHTIMEOUTREQUEST']._serialized_start=2604 + _globals['_REFRESHTIMEOUTREQUEST']._serialized_end=2674 + _globals['_REFRESHTIMEOUTRESPONSE']._serialized_start=2676 + _globals['_REFRESHTIMEOUTRESPONSE']._serialized_end=2747 + _globals['_RELEASESLOTREQUEST']._serialized_start=2749 + _globals['_RELEASESLOTREQUEST']._serialized_end=2788 + _globals['_RELEASESLOTRESPONSE']._serialized_start=2790 + _globals['_RELEASESLOTRESPONSE']._serialized_end=2811 + _globals['_DISPATCHER']._serialized_start=3622 + _globals['_DISPATCHER']._serialized_end=4510 # @@protoc_insertion_point(module_scope) diff --git a/hatchet_sdk/contracts/dispatcher_pb2.pyi b/hatchet_sdk/contracts/dispatcher_pb2.pyi index b6e81754..18f26a29 100644 --- a/hatchet_sdk/contracts/dispatcher_pb2.pyi +++ b/hatchet_sdk/contracts/dispatcher_pb2.pyi @@ -69,17 +69,34 @@ RESOURCE_EVENT_TYPE_TIMED_OUT: ResourceEventType RESOURCE_EVENT_TYPE_STREAM: ResourceEventType WORKFLOW_RUN_EVENT_TYPE_FINISHED: WorkflowRunEventType +class WorkerLabels(_message.Message): + __slots__ = ("strValue", "intValue") + STRVALUE_FIELD_NUMBER: _ClassVar[int] + INTVALUE_FIELD_NUMBER: _ClassVar[int] + strValue: str + intValue: int + def __init__(self, strValue: _Optional[str] = ..., intValue: _Optional[int] = ...) -> None: ... + class WorkerRegisterRequest(_message.Message): - __slots__ = ("workerName", "actions", "services", "maxRuns") + __slots__ = ("workerName", "actions", "services", "maxRuns", "labels") + class LabelsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: WorkerLabels + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[WorkerLabels, _Mapping]] = ...) -> None: ... WORKERNAME_FIELD_NUMBER: _ClassVar[int] ACTIONS_FIELD_NUMBER: _ClassVar[int] SERVICES_FIELD_NUMBER: _ClassVar[int] MAXRUNS_FIELD_NUMBER: _ClassVar[int] + LABELS_FIELD_NUMBER: _ClassVar[int] workerName: str actions: _containers.RepeatedScalarFieldContainer[str] services: _containers.RepeatedScalarFieldContainer[str] maxRuns: int - def __init__(self, workerName: _Optional[str] = ..., actions: _Optional[_Iterable[str]] = ..., services: _Optional[_Iterable[str]] = ..., maxRuns: _Optional[int] = ...) -> None: ... + labels: _containers.MessageMap[str, WorkerLabels] + def __init__(self, workerName: _Optional[str] = ..., actions: _Optional[_Iterable[str]] = ..., services: _Optional[_Iterable[str]] = ..., maxRuns: _Optional[int] = ..., labels: _Optional[_Mapping[str, WorkerLabels]] = ...) -> None: ... class WorkerRegisterResponse(_message.Message): __slots__ = ("tenantId", "workerId", "workerName") @@ -91,6 +108,29 @@ class WorkerRegisterResponse(_message.Message): workerName: str def __init__(self, tenantId: _Optional[str] = ..., workerId: _Optional[str] = ..., workerName: _Optional[str] = ...) -> None: ... +class UpsertWorkerLabelsRequest(_message.Message): + __slots__ = ("workerId", "labels") + class LabelsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: WorkerLabels + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[WorkerLabels, _Mapping]] = ...) -> None: ... + WORKERID_FIELD_NUMBER: _ClassVar[int] + LABELS_FIELD_NUMBER: _ClassVar[int] + workerId: str + labels: _containers.MessageMap[str, WorkerLabels] + def __init__(self, workerId: _Optional[str] = ..., labels: _Optional[_Mapping[str, WorkerLabels]] = ...) -> None: ... + +class UpsertWorkerLabelsResponse(_message.Message): + __slots__ = ("tenantId", "workerId") + TENANTID_FIELD_NUMBER: _ClassVar[int] + WORKERID_FIELD_NUMBER: _ClassVar[int] + tenantId: str + workerId: str + def __init__(self, tenantId: _Optional[str] = ..., workerId: _Optional[str] = ...) -> None: ... + class AssignedAction(_message.Message): __slots__ = ("tenantId", "workflowRunId", "getGroupKeyRunId", "jobId", "jobName", "jobRunId", "stepId", "stepRunId", "actionId", "actionType", "actionPayload", "stepName", "retryCount") TENANTID_FIELD_NUMBER: _ClassVar[int] diff --git a/hatchet_sdk/contracts/dispatcher_pb2_grpc.py b/hatchet_sdk/contracts/dispatcher_pb2_grpc.py index 2b6d2630..f10f1693 100644 --- a/hatchet_sdk/contracts/dispatcher_pb2_grpc.py +++ b/hatchet_sdk/contracts/dispatcher_pb2_grpc.py @@ -74,6 +74,11 @@ def __init__(self, channel): request_serializer=dispatcher__pb2.ReleaseSlotRequest.SerializeToString, response_deserializer=dispatcher__pb2.ReleaseSlotResponse.FromString, ) + self.UpsertWorkerLabels = channel.unary_unary( + '/Dispatcher/UpsertWorkerLabels', + request_serializer=dispatcher__pb2.UpsertWorkerLabelsRequest.SerializeToString, + response_deserializer=dispatcher__pb2.UpsertWorkerLabelsResponse.FromString, + ) class DispatcherServicer(object): @@ -154,6 +159,12 @@ def ReleaseSlot(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def UpsertWorkerLabels(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def add_DispatcherServicer_to_server(servicer, server): rpc_method_handlers = { @@ -217,6 +228,11 @@ def add_DispatcherServicer_to_server(servicer, server): request_deserializer=dispatcher__pb2.ReleaseSlotRequest.FromString, response_serializer=dispatcher__pb2.ReleaseSlotResponse.SerializeToString, ), + 'UpsertWorkerLabels': grpc.unary_unary_rpc_method_handler( + servicer.UpsertWorkerLabels, + request_deserializer=dispatcher__pb2.UpsertWorkerLabelsRequest.FromString, + response_serializer=dispatcher__pb2.UpsertWorkerLabelsResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( 'Dispatcher', rpc_method_handlers) @@ -430,3 +446,20 @@ def ReleaseSlot(request, dispatcher__pb2.ReleaseSlotResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def UpsertWorkerLabels(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/Dispatcher/UpsertWorkerLabels', + dispatcher__pb2.UpsertWorkerLabelsRequest.SerializeToString, + dispatcher__pb2.UpsertWorkerLabelsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/hatchet_sdk/contracts/workflows_pb2.py b/hatchet_sdk/contracts/workflows_pb2.py index 4049443b..9c397ec1 100644 --- a/hatchet_sdk/contracts/workflows_pb2.py +++ b/hatchet_sdk/contracts/workflows_pb2.py @@ -15,7 +15,7 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fworkflows.proto\x1a\x1fgoogle/protobuf/timestamp.proto\">\n\x12PutWorkflowRequest\x12(\n\x04opts\x18\x01 \x01(\x0b\x32\x1a.CreateWorkflowVersionOpts\"\xaf\x03\n\x19\x43reateWorkflowVersionOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x16\n\x0e\x65vent_triggers\x18\x04 \x03(\t\x12\x15\n\rcron_triggers\x18\x05 \x03(\t\x12\x36\n\x12scheduled_triggers\x18\x06 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12$\n\x04jobs\x18\x07 \x03(\x0b\x32\x16.CreateWorkflowJobOpts\x12-\n\x0b\x63oncurrency\x18\x08 \x01(\x0b\x32\x18.WorkflowConcurrencyOpts\x12\x1d\n\x10schedule_timeout\x18\t \x01(\tH\x00\x88\x01\x01\x12\x17\n\ncron_input\x18\n \x01(\tH\x01\x88\x01\x01\x12\x33\n\x0eon_failure_job\x18\x0b \x01(\x0b\x32\x16.CreateWorkflowJobOptsH\x02\x88\x01\x01\x42\x13\n\x11_schedule_timeoutB\r\n\x0b_cron_inputB\x11\n\x0f_on_failure_job\"n\n\x17WorkflowConcurrencyOpts\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12\x10\n\x08max_runs\x18\x02 \x01(\x05\x12\x31\n\x0elimit_strategy\x18\x03 \x01(\x0e\x32\x19.ConcurrencyLimitStrategy\"h\n\x15\x43reateWorkflowJobOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12&\n\x05steps\x18\x04 \x03(\x0b\x32\x17.CreateWorkflowStepOptsJ\x04\x08\x03\x10\x04\"\xbe\x01\n\x16\x43reateWorkflowStepOpts\x12\x13\n\x0breadable_id\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\t\x12\x0e\n\x06inputs\x18\x04 \x01(\t\x12\x0f\n\x07parents\x18\x05 \x03(\t\x12\x11\n\tuser_data\x18\x06 \x01(\t\x12\x0f\n\x07retries\x18\x07 \x01(\x05\x12)\n\x0brate_limits\x18\x08 \x03(\x0b\x32\x14.CreateStepRateLimit\"1\n\x13\x43reateStepRateLimit\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05units\x18\x02 \x01(\x05\"\x16\n\x14ListWorkflowsRequest\"\x93\x02\n\x17ScheduleWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tschedules\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05input\x18\x03 \x01(\t\x12\x16\n\tparent_id\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x06 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x07 \x01(\tH\x03\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_key\"\xb2\x01\n\x0fWorkflowVersion\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07version\x18\x05 \x01(\t\x12\r\n\x05order\x18\x06 \x01(\x05\x12\x13\n\x0bworkflow_id\x18\x07 \x01(\t\"?\n\x17WorkflowTriggerEventRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x11\n\tevent_key\x18\x02 \x01(\t\"9\n\x16WorkflowTriggerCronRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x0c\n\x04\x63ron\x18\x02 \x01(\t\"\x9d\x02\n\x16TriggerWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x02 \x01(\t\x12\x16\n\tparent_id\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x04 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x05 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x06 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x07 \x01(\tH\x04\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadata\"2\n\x17TriggerWorkflowResponse\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"W\n\x13PutRateLimitRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05limit\x18\x02 \x01(\x05\x12$\n\x08\x64uration\x18\x03 \x01(\x0e\x32\x12.RateLimitDuration\"\x16\n\x14PutRateLimitResponse*l\n\x18\x43oncurrencyLimitStrategy\x12\x16\n\x12\x43\x41NCEL_IN_PROGRESS\x10\x00\x12\x0f\n\x0b\x44ROP_NEWEST\x10\x01\x12\x10\n\x0cQUEUE_NEWEST\x10\x02\x12\x15\n\x11GROUP_ROUND_ROBIN\x10\x03*]\n\x11RateLimitDuration\x12\n\n\x06SECOND\x10\x00\x12\n\n\x06MINUTE\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\x07\n\x03\x44\x41Y\x10\x03\x12\x08\n\x04WEEK\x10\x04\x12\t\n\x05MONTH\x10\x05\x12\x08\n\x04YEAR\x10\x06\x32\x8a\x02\n\x0fWorkflowService\x12\x34\n\x0bPutWorkflow\x12\x13.PutWorkflowRequest\x1a\x10.WorkflowVersion\x12>\n\x10ScheduleWorkflow\x12\x18.ScheduleWorkflowRequest\x1a\x10.WorkflowVersion\x12\x44\n\x0fTriggerWorkflow\x12\x17.TriggerWorkflowRequest\x1a\x18.TriggerWorkflowResponse\x12;\n\x0cPutRateLimit\x12\x14.PutRateLimitRequest\x1a\x15.PutRateLimitResponseBBZ@github.com/hatchet-dev/hatchet/internal/services/admin/contractsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fworkflows.proto\x1a\x1fgoogle/protobuf/timestamp.proto\">\n\x12PutWorkflowRequest\x12(\n\x04opts\x18\x01 \x01(\x0b\x32\x1a.CreateWorkflowVersionOpts\"\xe0\x03\n\x19\x43reateWorkflowVersionOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x16\n\x0e\x65vent_triggers\x18\x04 \x03(\t\x12\x15\n\rcron_triggers\x18\x05 \x03(\t\x12\x36\n\x12scheduled_triggers\x18\x06 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12$\n\x04jobs\x18\x07 \x03(\x0b\x32\x16.CreateWorkflowJobOpts\x12-\n\x0b\x63oncurrency\x18\x08 \x01(\x0b\x32\x18.WorkflowConcurrencyOpts\x12\x1d\n\x10schedule_timeout\x18\t \x01(\tH\x00\x88\x01\x01\x12\x17\n\ncron_input\x18\n \x01(\tH\x01\x88\x01\x01\x12\x33\n\x0eon_failure_job\x18\x0b \x01(\x0b\x32\x16.CreateWorkflowJobOptsH\x02\x88\x01\x01\x12$\n\x06sticky\x18\x0c \x01(\x0e\x32\x0f.StickyStrategyH\x03\x88\x01\x01\x42\x13\n\x11_schedule_timeoutB\r\n\x0b_cron_inputB\x11\n\x0f_on_failure_jobB\t\n\x07_sticky\"n\n\x17WorkflowConcurrencyOpts\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12\x10\n\x08max_runs\x18\x02 \x01(\x05\x12\x31\n\x0elimit_strategy\x18\x03 \x01(\x0e\x32\x19.ConcurrencyLimitStrategy\"h\n\x15\x43reateWorkflowJobOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12&\n\x05steps\x18\x04 \x03(\x0b\x32\x17.CreateWorkflowStepOptsJ\x04\x08\x03\x10\x04\"\xe1\x01\n\x13\x44\x65siredWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x15\n\x08required\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12/\n\ncomparator\x18\x04 \x01(\x0e\x32\x16.WorkerLabelComparatorH\x03\x88\x01\x01\x12\x13\n\x06weight\x18\x05 \x01(\x05H\x04\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValueB\x0b\n\t_requiredB\r\n\x0b_comparatorB\t\n\x07_weight\"\xcb\x02\n\x16\x43reateWorkflowStepOpts\x12\x13\n\x0breadable_id\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\t\x12\x0e\n\x06inputs\x18\x04 \x01(\t\x12\x0f\n\x07parents\x18\x05 \x03(\t\x12\x11\n\tuser_data\x18\x06 \x01(\t\x12\x0f\n\x07retries\x18\x07 \x01(\x05\x12)\n\x0brate_limits\x18\x08 \x03(\x0b\x32\x14.CreateStepRateLimit\x12@\n\rworker_labels\x18\t \x03(\x0b\x32).CreateWorkflowStepOpts.WorkerLabelsEntry\x1aI\n\x11WorkerLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.DesiredWorkerLabels:\x02\x38\x01\"1\n\x13\x43reateStepRateLimit\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05units\x18\x02 \x01(\x05\"\x16\n\x14ListWorkflowsRequest\"\x93\x02\n\x17ScheduleWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tschedules\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05input\x18\x03 \x01(\t\x12\x16\n\tparent_id\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x06 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x07 \x01(\tH\x03\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_key\"\xb2\x01\n\x0fWorkflowVersion\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07version\x18\x05 \x01(\t\x12\r\n\x05order\x18\x06 \x01(\x05\x12\x13\n\x0bworkflow_id\x18\x07 \x01(\t\"?\n\x17WorkflowTriggerEventRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x11\n\tevent_key\x18\x02 \x01(\t\"9\n\x16WorkflowTriggerCronRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x0c\n\x04\x63ron\x18\x02 \x01(\t\"\xd3\x02\n\x16TriggerWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x02 \x01(\t\x12\x16\n\tparent_id\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x04 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x05 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x06 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x07 \x01(\tH\x04\x88\x01\x01\x12\x1e\n\x11\x64\x65sired_worker_id\x18\x08 \x01(\tH\x05\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadataB\x14\n\x12_desired_worker_id\"2\n\x17TriggerWorkflowResponse\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"W\n\x13PutRateLimitRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05limit\x18\x02 \x01(\x05\x12$\n\x08\x64uration\x18\x03 \x01(\x0e\x32\x12.RateLimitDuration\"\x16\n\x14PutRateLimitResponse*$\n\x0eStickyStrategy\x12\x08\n\x04SOFT\x10\x00\x12\x08\n\x04HARD\x10\x01*l\n\x18\x43oncurrencyLimitStrategy\x12\x16\n\x12\x43\x41NCEL_IN_PROGRESS\x10\x00\x12\x0f\n\x0b\x44ROP_NEWEST\x10\x01\x12\x10\n\x0cQUEUE_NEWEST\x10\x02\x12\x15\n\x11GROUP_ROUND_ROBIN\x10\x03*\x85\x01\n\x15WorkerLabelComparator\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x10\n\x0cGREATER_THAN\x10\x02\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x05*]\n\x11RateLimitDuration\x12\n\n\x06SECOND\x10\x00\x12\n\n\x06MINUTE\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\x07\n\x03\x44\x41Y\x10\x03\x12\x08\n\x04WEEK\x10\x04\x12\t\n\x05MONTH\x10\x05\x12\x08\n\x04YEAR\x10\x06\x32\x8a\x02\n\x0fWorkflowService\x12\x34\n\x0bPutWorkflow\x12\x13.PutWorkflowRequest\x1a\x10.WorkflowVersion\x12>\n\x10ScheduleWorkflow\x12\x18.ScheduleWorkflowRequest\x1a\x10.WorkflowVersion\x12\x44\n\x0fTriggerWorkflow\x12\x17.TriggerWorkflowRequest\x1a\x18.TriggerWorkflowResponse\x12;\n\x0cPutRateLimit\x12\x14.PutRateLimitRequest\x1a\x15.PutRateLimitResponseBBZ@github.com/hatchet-dev/hatchet/internal/services/admin/contractsb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -23,40 +23,50 @@ if _descriptor._USE_C_DESCRIPTORS == False: _globals['DESCRIPTOR']._options = None _globals['DESCRIPTOR']._serialized_options = b'Z@github.com/hatchet-dev/hatchet/internal/services/admin/contracts' - _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_start=2072 - _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_end=2180 - _globals['_RATELIMITDURATION']._serialized_start=2182 - _globals['_RATELIMITDURATION']._serialized_end=2275 + _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._options = None + _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_options = b'8\001' + _globals['_STICKYSTRATEGY']._serialized_start=2544 + _globals['_STICKYSTRATEGY']._serialized_end=2580 + _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_start=2582 + _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_end=2690 + _globals['_WORKERLABELCOMPARATOR']._serialized_start=2693 + _globals['_WORKERLABELCOMPARATOR']._serialized_end=2826 + _globals['_RATELIMITDURATION']._serialized_start=2828 + _globals['_RATELIMITDURATION']._serialized_end=2921 _globals['_PUTWORKFLOWREQUEST']._serialized_start=52 _globals['_PUTWORKFLOWREQUEST']._serialized_end=114 _globals['_CREATEWORKFLOWVERSIONOPTS']._serialized_start=117 - _globals['_CREATEWORKFLOWVERSIONOPTS']._serialized_end=548 - _globals['_WORKFLOWCONCURRENCYOPTS']._serialized_start=550 - _globals['_WORKFLOWCONCURRENCYOPTS']._serialized_end=660 - _globals['_CREATEWORKFLOWJOBOPTS']._serialized_start=662 - _globals['_CREATEWORKFLOWJOBOPTS']._serialized_end=766 - _globals['_CREATEWORKFLOWSTEPOPTS']._serialized_start=769 - _globals['_CREATEWORKFLOWSTEPOPTS']._serialized_end=959 - _globals['_CREATESTEPRATELIMIT']._serialized_start=961 - _globals['_CREATESTEPRATELIMIT']._serialized_end=1010 - _globals['_LISTWORKFLOWSREQUEST']._serialized_start=1012 - _globals['_LISTWORKFLOWSREQUEST']._serialized_end=1034 - _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_start=1037 - _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_end=1312 - _globals['_WORKFLOWVERSION']._serialized_start=1315 - _globals['_WORKFLOWVERSION']._serialized_end=1493 - _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_start=1495 - _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_end=1558 - _globals['_WORKFLOWTRIGGERCRONREF']._serialized_start=1560 - _globals['_WORKFLOWTRIGGERCRONREF']._serialized_end=1617 - _globals['_TRIGGERWORKFLOWREQUEST']._serialized_start=1620 - _globals['_TRIGGERWORKFLOWREQUEST']._serialized_end=1905 - _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_start=1907 - _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_end=1957 - _globals['_PUTRATELIMITREQUEST']._serialized_start=1959 - _globals['_PUTRATELIMITREQUEST']._serialized_end=2046 - _globals['_PUTRATELIMITRESPONSE']._serialized_start=2048 - _globals['_PUTRATELIMITRESPONSE']._serialized_end=2070 - _globals['_WORKFLOWSERVICE']._serialized_start=2278 - _globals['_WORKFLOWSERVICE']._serialized_end=2544 + _globals['_CREATEWORKFLOWVERSIONOPTS']._serialized_end=597 + _globals['_WORKFLOWCONCURRENCYOPTS']._serialized_start=599 + _globals['_WORKFLOWCONCURRENCYOPTS']._serialized_end=709 + _globals['_CREATEWORKFLOWJOBOPTS']._serialized_start=711 + _globals['_CREATEWORKFLOWJOBOPTS']._serialized_end=815 + _globals['_DESIREDWORKERLABELS']._serialized_start=818 + _globals['_DESIREDWORKERLABELS']._serialized_end=1043 + _globals['_CREATEWORKFLOWSTEPOPTS']._serialized_start=1046 + _globals['_CREATEWORKFLOWSTEPOPTS']._serialized_end=1377 + _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_start=1304 + _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_end=1377 + _globals['_CREATESTEPRATELIMIT']._serialized_start=1379 + _globals['_CREATESTEPRATELIMIT']._serialized_end=1428 + _globals['_LISTWORKFLOWSREQUEST']._serialized_start=1430 + _globals['_LISTWORKFLOWSREQUEST']._serialized_end=1452 + _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_start=1455 + _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_end=1730 + _globals['_WORKFLOWVERSION']._serialized_start=1733 + _globals['_WORKFLOWVERSION']._serialized_end=1911 + _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_start=1913 + _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_end=1976 + _globals['_WORKFLOWTRIGGERCRONREF']._serialized_start=1978 + _globals['_WORKFLOWTRIGGERCRONREF']._serialized_end=2035 + _globals['_TRIGGERWORKFLOWREQUEST']._serialized_start=2038 + _globals['_TRIGGERWORKFLOWREQUEST']._serialized_end=2377 + _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_start=2379 + _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_end=2429 + _globals['_PUTRATELIMITREQUEST']._serialized_start=2431 + _globals['_PUTRATELIMITREQUEST']._serialized_end=2518 + _globals['_PUTRATELIMITRESPONSE']._serialized_start=2520 + _globals['_PUTRATELIMITRESPONSE']._serialized_end=2542 + _globals['_WORKFLOWSERVICE']._serialized_start=2924 + _globals['_WORKFLOWSERVICE']._serialized_end=3190 # @@protoc_insertion_point(module_scope) diff --git a/hatchet_sdk/contracts/workflows_pb2.pyi b/hatchet_sdk/contracts/workflows_pb2.pyi index 38eb8a6e..3bd3a45b 100644 --- a/hatchet_sdk/contracts/workflows_pb2.pyi +++ b/hatchet_sdk/contracts/workflows_pb2.pyi @@ -7,6 +7,11 @@ from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Map DESCRIPTOR: _descriptor.FileDescriptor +class StickyStrategy(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + SOFT: _ClassVar[StickyStrategy] + HARD: _ClassVar[StickyStrategy] + class ConcurrencyLimitStrategy(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): __slots__ = () CANCEL_IN_PROGRESS: _ClassVar[ConcurrencyLimitStrategy] @@ -14,6 +19,15 @@ class ConcurrencyLimitStrategy(int, metaclass=_enum_type_wrapper.EnumTypeWrapper QUEUE_NEWEST: _ClassVar[ConcurrencyLimitStrategy] GROUP_ROUND_ROBIN: _ClassVar[ConcurrencyLimitStrategy] +class WorkerLabelComparator(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + EQUAL: _ClassVar[WorkerLabelComparator] + NOT_EQUAL: _ClassVar[WorkerLabelComparator] + GREATER_THAN: _ClassVar[WorkerLabelComparator] + GREATER_THAN_OR_EQUAL: _ClassVar[WorkerLabelComparator] + LESS_THAN: _ClassVar[WorkerLabelComparator] + LESS_THAN_OR_EQUAL: _ClassVar[WorkerLabelComparator] + class RateLimitDuration(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): __slots__ = () SECOND: _ClassVar[RateLimitDuration] @@ -23,10 +37,18 @@ class RateLimitDuration(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): WEEK: _ClassVar[RateLimitDuration] MONTH: _ClassVar[RateLimitDuration] YEAR: _ClassVar[RateLimitDuration] +SOFT: StickyStrategy +HARD: StickyStrategy CANCEL_IN_PROGRESS: ConcurrencyLimitStrategy DROP_NEWEST: ConcurrencyLimitStrategy QUEUE_NEWEST: ConcurrencyLimitStrategy GROUP_ROUND_ROBIN: ConcurrencyLimitStrategy +EQUAL: WorkerLabelComparator +NOT_EQUAL: WorkerLabelComparator +GREATER_THAN: WorkerLabelComparator +GREATER_THAN_OR_EQUAL: WorkerLabelComparator +LESS_THAN: WorkerLabelComparator +LESS_THAN_OR_EQUAL: WorkerLabelComparator SECOND: RateLimitDuration MINUTE: RateLimitDuration HOUR: RateLimitDuration @@ -42,7 +64,7 @@ class PutWorkflowRequest(_message.Message): def __init__(self, opts: _Optional[_Union[CreateWorkflowVersionOpts, _Mapping]] = ...) -> None: ... class CreateWorkflowVersionOpts(_message.Message): - __slots__ = ("name", "description", "version", "event_triggers", "cron_triggers", "scheduled_triggers", "jobs", "concurrency", "schedule_timeout", "cron_input", "on_failure_job") + __slots__ = ("name", "description", "version", "event_triggers", "cron_triggers", "scheduled_triggers", "jobs", "concurrency", "schedule_timeout", "cron_input", "on_failure_job", "sticky") NAME_FIELD_NUMBER: _ClassVar[int] DESCRIPTION_FIELD_NUMBER: _ClassVar[int] VERSION_FIELD_NUMBER: _ClassVar[int] @@ -54,6 +76,7 @@ class CreateWorkflowVersionOpts(_message.Message): SCHEDULE_TIMEOUT_FIELD_NUMBER: _ClassVar[int] CRON_INPUT_FIELD_NUMBER: _ClassVar[int] ON_FAILURE_JOB_FIELD_NUMBER: _ClassVar[int] + STICKY_FIELD_NUMBER: _ClassVar[int] name: str description: str version: str @@ -65,7 +88,8 @@ class CreateWorkflowVersionOpts(_message.Message): schedule_timeout: str cron_input: str on_failure_job: CreateWorkflowJobOpts - def __init__(self, name: _Optional[str] = ..., description: _Optional[str] = ..., version: _Optional[str] = ..., event_triggers: _Optional[_Iterable[str]] = ..., cron_triggers: _Optional[_Iterable[str]] = ..., scheduled_triggers: _Optional[_Iterable[_Union[_timestamp_pb2.Timestamp, _Mapping]]] = ..., jobs: _Optional[_Iterable[_Union[CreateWorkflowJobOpts, _Mapping]]] = ..., concurrency: _Optional[_Union[WorkflowConcurrencyOpts, _Mapping]] = ..., schedule_timeout: _Optional[str] = ..., cron_input: _Optional[str] = ..., on_failure_job: _Optional[_Union[CreateWorkflowJobOpts, _Mapping]] = ...) -> None: ... + sticky: StickyStrategy + def __init__(self, name: _Optional[str] = ..., description: _Optional[str] = ..., version: _Optional[str] = ..., event_triggers: _Optional[_Iterable[str]] = ..., cron_triggers: _Optional[_Iterable[str]] = ..., scheduled_triggers: _Optional[_Iterable[_Union[_timestamp_pb2.Timestamp, _Mapping]]] = ..., jobs: _Optional[_Iterable[_Union[CreateWorkflowJobOpts, _Mapping]]] = ..., concurrency: _Optional[_Union[WorkflowConcurrencyOpts, _Mapping]] = ..., schedule_timeout: _Optional[str] = ..., cron_input: _Optional[str] = ..., on_failure_job: _Optional[_Union[CreateWorkflowJobOpts, _Mapping]] = ..., sticky: _Optional[_Union[StickyStrategy, str]] = ...) -> None: ... class WorkflowConcurrencyOpts(_message.Message): __slots__ = ("action", "max_runs", "limit_strategy") @@ -87,8 +111,29 @@ class CreateWorkflowJobOpts(_message.Message): steps: _containers.RepeatedCompositeFieldContainer[CreateWorkflowStepOpts] def __init__(self, name: _Optional[str] = ..., description: _Optional[str] = ..., steps: _Optional[_Iterable[_Union[CreateWorkflowStepOpts, _Mapping]]] = ...) -> None: ... +class DesiredWorkerLabels(_message.Message): + __slots__ = ("strValue", "intValue", "required", "comparator", "weight") + STRVALUE_FIELD_NUMBER: _ClassVar[int] + INTVALUE_FIELD_NUMBER: _ClassVar[int] + REQUIRED_FIELD_NUMBER: _ClassVar[int] + COMPARATOR_FIELD_NUMBER: _ClassVar[int] + WEIGHT_FIELD_NUMBER: _ClassVar[int] + strValue: str + intValue: int + required: bool + comparator: WorkerLabelComparator + weight: int + def __init__(self, strValue: _Optional[str] = ..., intValue: _Optional[int] = ..., required: bool = ..., comparator: _Optional[_Union[WorkerLabelComparator, str]] = ..., weight: _Optional[int] = ...) -> None: ... + class CreateWorkflowStepOpts(_message.Message): - __slots__ = ("readable_id", "action", "timeout", "inputs", "parents", "user_data", "retries", "rate_limits") + __slots__ = ("readable_id", "action", "timeout", "inputs", "parents", "user_data", "retries", "rate_limits", "worker_labels") + class WorkerLabelsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: DesiredWorkerLabels + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[DesiredWorkerLabels, _Mapping]] = ...) -> None: ... READABLE_ID_FIELD_NUMBER: _ClassVar[int] ACTION_FIELD_NUMBER: _ClassVar[int] TIMEOUT_FIELD_NUMBER: _ClassVar[int] @@ -97,6 +142,7 @@ class CreateWorkflowStepOpts(_message.Message): USER_DATA_FIELD_NUMBER: _ClassVar[int] RETRIES_FIELD_NUMBER: _ClassVar[int] RATE_LIMITS_FIELD_NUMBER: _ClassVar[int] + WORKER_LABELS_FIELD_NUMBER: _ClassVar[int] readable_id: str action: str timeout: str @@ -105,7 +151,8 @@ class CreateWorkflowStepOpts(_message.Message): user_data: str retries: int rate_limits: _containers.RepeatedCompositeFieldContainer[CreateStepRateLimit] - def __init__(self, readable_id: _Optional[str] = ..., action: _Optional[str] = ..., timeout: _Optional[str] = ..., inputs: _Optional[str] = ..., parents: _Optional[_Iterable[str]] = ..., user_data: _Optional[str] = ..., retries: _Optional[int] = ..., rate_limits: _Optional[_Iterable[_Union[CreateStepRateLimit, _Mapping]]] = ...) -> None: ... + worker_labels: _containers.MessageMap[str, DesiredWorkerLabels] + def __init__(self, readable_id: _Optional[str] = ..., action: _Optional[str] = ..., timeout: _Optional[str] = ..., inputs: _Optional[str] = ..., parents: _Optional[_Iterable[str]] = ..., user_data: _Optional[str] = ..., retries: _Optional[int] = ..., rate_limits: _Optional[_Iterable[_Union[CreateStepRateLimit, _Mapping]]] = ..., worker_labels: _Optional[_Mapping[str, DesiredWorkerLabels]] = ...) -> None: ... class CreateStepRateLimit(_message.Message): __slots__ = ("key", "units") @@ -170,7 +217,7 @@ class WorkflowTriggerCronRef(_message.Message): def __init__(self, parent_id: _Optional[str] = ..., cron: _Optional[str] = ...) -> None: ... class TriggerWorkflowRequest(_message.Message): - __slots__ = ("name", "input", "parent_id", "parent_step_run_id", "child_index", "child_key", "additional_metadata") + __slots__ = ("name", "input", "parent_id", "parent_step_run_id", "child_index", "child_key", "additional_metadata", "desired_worker_id") NAME_FIELD_NUMBER: _ClassVar[int] INPUT_FIELD_NUMBER: _ClassVar[int] PARENT_ID_FIELD_NUMBER: _ClassVar[int] @@ -178,6 +225,7 @@ class TriggerWorkflowRequest(_message.Message): CHILD_INDEX_FIELD_NUMBER: _ClassVar[int] CHILD_KEY_FIELD_NUMBER: _ClassVar[int] ADDITIONAL_METADATA_FIELD_NUMBER: _ClassVar[int] + DESIRED_WORKER_ID_FIELD_NUMBER: _ClassVar[int] name: str input: str parent_id: str @@ -185,7 +233,8 @@ class TriggerWorkflowRequest(_message.Message): child_index: int child_key: str additional_metadata: str - def __init__(self, name: _Optional[str] = ..., input: _Optional[str] = ..., parent_id: _Optional[str] = ..., parent_step_run_id: _Optional[str] = ..., child_index: _Optional[int] = ..., child_key: _Optional[str] = ..., additional_metadata: _Optional[str] = ...) -> None: ... + desired_worker_id: str + def __init__(self, name: _Optional[str] = ..., input: _Optional[str] = ..., parent_id: _Optional[str] = ..., parent_step_run_id: _Optional[str] = ..., child_index: _Optional[int] = ..., child_key: _Optional[str] = ..., additional_metadata: _Optional[str] = ..., desired_worker_id: _Optional[str] = ...) -> None: ... class TriggerWorkflowResponse(_message.Message): __slots__ = ("workflow_run_id",) From 7637ceba940290a6922c436ac73a83c383544b64 Mon Sep 17 00:00:00 2001 From: gabriel ruttner Date: Wed, 24 Jul 2024 11:52:16 -0400 Subject: [PATCH 19/19] chore: move worker --- hatchet_sdk/{ => worker}/worker.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) rename hatchet_sdk/{ => worker}/worker.py (99%) diff --git a/hatchet_sdk/worker.py b/hatchet_sdk/worker/worker.py similarity index 99% rename from hatchet_sdk/worker.py rename to hatchet_sdk/worker/worker.py index abedfc56..60f858d0 100644 --- a/hatchet_sdk/worker.py +++ b/hatchet_sdk/worker/worker.py @@ -40,16 +40,16 @@ ) from hatchet_sdk.loader import ClientConfig -from .client import new_client, new_client_raw -from .clients.dispatcher import ( +from ..client import new_client, new_client_raw +from ..clients.dispatcher import ( Action, ActionListenerImpl, GetActionListenerRequest, new_dispatcher, ) -from .context import Context -from .logger import logger -from .workflow import WorkflowMeta +from ..context import Context +from ..logger import logger +from ..workflow import WorkflowMeta wr: contextvars.ContextVar[str | None] = contextvars.ContextVar( "workflow_run_id", default=None