diff --git a/.github/workflows/e2e.yaml b/.github/workflows/e2e.yaml new file mode 100644 index 00000000..1fefc43f --- /dev/null +++ b/.github/workflows/e2e.yaml @@ -0,0 +1,39 @@ +name: e2e + +on: [pull_request] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12"] + + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Display Python version + run: python -c "import sys; print(sys.version)" + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: 1.5.1 + virtualenvs-create: true + virtualenvs-in-project: true + - name: Install dependencies + run: poetry install --no-interaction + - name: Set HATCHET_CLIENT_NAMESPACE + run: | + PYTHON_VERSION=$(python -c "import sys; print(f'py{sys.version_info.major}{sys.version_info.minor}')") + SHORT_SHA=$(git rev-parse --short HEAD) + echo "HATCHET_CLIENT_NAMESPACE=${PYTHON_VERSION}-${SHORT_SHA}" >> $GITHUB_ENV + - name: Run pytest + env: + HATCHET_CLIENT_TOKEN: ${{ secrets.HATCHET_CLIENT_TOKEN }} + run: | + echo "Using HATCHET_CLIENT_NAMESPACE: $HATCHET_CLIENT_NAMESPACE" + poetry run pytest diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..f290ce12 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,11 @@ +{ + "python.testing.pytestArgs": [ + "." + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "cSpell.words": [ + "dotenv", + "reqs" + ] +} diff --git a/examples/_deprecated/README.md b/examples/_deprecated/README.md new file mode 100644 index 00000000..ee47c61b --- /dev/null +++ b/examples/_deprecated/README.md @@ -0,0 +1 @@ +The examples and tests in this directory are deprecated, but we're maintaining them to ensure backwards compatibility. diff --git a/examples/_deprecated/test_event_client.py b/examples/_deprecated/test_event_client.py new file mode 100644 index 00000000..978c33f3 --- /dev/null +++ b/examples/_deprecated/test_event_client.py @@ -0,0 +1,26 @@ +import pytest +from dotenv import load_dotenv + +from hatchet_sdk import new_client +from hatchet_sdk.hatchet import Hatchet + +load_dotenv() + + +@pytest.mark.asyncio(scope="session") +async def test_direct_client_event(): + client = new_client() + e = client.event.push("user:create", {"test": "test"}) + + assert e.eventId is not None + + +@pytest.mark.filterwarnings( + "ignore:Direct access to client is deprecated:DeprecationWarning" +) +@pytest.mark.asyncio(scope="session") +async def test_hatchet_client_event(): + hatchet = Hatchet() + e = hatchet.client.event.push("user:create", {"test": "test"}) + + assert e.eventId is not None diff --git a/examples/api/api.py b/examples/api/api.py index b7dc1a06..0b091fee 100644 --- a/examples/api/api.py +++ b/examples/api/api.py @@ -6,10 +6,16 @@ hatchet = Hatchet(debug=True) -list: WorkflowList = hatchet.client.rest().workflow_list() -for workflow in list.rows: - print(workflow.name) - print(workflow.metadata.id) - print(workflow.metadata.created_at) - print(workflow.metadata.updated_at) +def main(): + list: WorkflowList = hatchet.rest.workflow_list() + + for workflow in list.rows: + print(workflow.name) + print(workflow.metadata.id) + print(workflow.metadata.created_at) + print(workflow.metadata.updated_at) + + +if __name__ == "__main__": + main() diff --git a/examples/api/test_api.py b/examples/api/test_api.py new file mode 100644 index 00000000..86e82ee1 --- /dev/null +++ b/examples/api/test_api.py @@ -0,0 +1,14 @@ +import pytest + +from hatchet_sdk import Hatchet +from tests.utils.hatchet_client import hatchet_client_fixture + +hatchet = hatchet_client_fixture() + + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_list_workflows(hatchet: Hatchet): + list = hatchet.rest.workflow_list() + + assert len(list.rows) != 0 diff --git a/examples/async/event.py b/examples/async/event.py new file mode 100644 index 00000000..a0fc8cf7 --- /dev/null +++ b/examples/async/event.py @@ -0,0 +1,8 @@ +from dotenv import load_dotenv + +from hatchet_sdk import Hatchet + +load_dotenv() + +hatchet = Hatchet() +hatchet.event.push("async:create", {"test": "test"}) diff --git a/examples/async/event_test.py b/examples/async/event_test.py deleted file mode 100644 index 53fe9473..00000000 --- a/examples/async/event_test.py +++ /dev/null @@ -1,9 +0,0 @@ -from dotenv import load_dotenv - -from hatchet_sdk import new_client - -load_dotenv() - -client = new_client() - -client.event.push("user:create", {"test": "test"}) diff --git a/examples/async/test_async.py b/examples/async/test_async.py new file mode 100644 index 00000000..47387bd8 --- /dev/null +++ b/examples/async/test_async.py @@ -0,0 +1,23 @@ +import pytest + +from hatchet_sdk import Hatchet +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "async"]) + + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + run = hatchet.admin.run_workflow("AsyncWorkflow", {}) + result = await run.result() + assert result["step1"]["test"] == "test" + + +@pytest.mark.asyncio(scope="session") +async def test_run_async(hatchet: Hatchet): + run = await hatchet.admin.aio.run_workflow("AsyncWorkflow", {}) + result = await run.result() + assert result["step1"]["test"] == "test" diff --git a/examples/async/worker.py b/examples/async/worker.py index 32f8485d..0c766dc0 100644 --- a/examples/async/worker.py +++ b/examples/async/worker.py @@ -9,33 +9,31 @@ hatchet = Hatchet(debug=True) -@hatchet.workflow(on_events=["user:create"]) +@hatchet.workflow(on_events=["async:create"]) class AsyncWorkflow: def __init__(self): self.my_value = "test" @hatchet.step(timeout="2s") async def step1(self, context: Context): - context.refresh_timeout("5s") - print("started step1") - await asyncio.sleep(3) - print("finished step1") - return {"test": "test"} @hatchet.step(parents=["step1"], timeout="4s") async def step2(self, context): - print("started async step2") - await asyncio.sleep(2) print("finished step2") -async def main(): +async def _main(): workflow = AsyncWorkflow() - worker = hatchet.worker("test-worker", max_runs=4) + worker = hatchet.worker("async-worker", max_runs=4) worker.register_workflow(workflow) await worker.async_start() -asyncio.run(main()) +def main(): + asyncio.run(_main()) + + +if __name__ == "__main__": + main() diff --git a/examples/cancellation/test_cancellation.py b/examples/cancellation/test_cancellation.py new file mode 100644 index 00000000..88f531df --- /dev/null +++ b/examples/cancellation/test_cancellation.py @@ -0,0 +1,17 @@ +import pytest + +from hatchet_sdk import Hatchet +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "cancellation"]) + + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + run = hatchet.admin.run_workflow("CancelWorkflow", {}) + result = await run.result() + # TODO is this the expected result for a timed out run... + assert result == {} diff --git a/examples/cancellation/worker.py b/examples/cancellation/worker.py index 26426e64..b495e05d 100644 --- a/examples/cancellation/worker.py +++ b/examples/cancellation/worker.py @@ -11,23 +11,20 @@ @hatchet.workflow(on_events=["user:create"]) class CancelWorkflow: - def __init__(self): - self.my_value = "test" - @hatchet.step(timeout="10s", retries=1) async def step1(self, context: Context): i = 0 - while not context.exit_flag.is_set() and i < 20: + while not context.exit_flag and i < 20: print(f"Waiting for cancellation {i}") await asyncio.sleep(1) i += 1 - if context.exit_flag.is_set(): + if context.exit_flag: print("Cancelled") workflow = CancelWorkflow() -worker = hatchet.worker("test-worker", max_runs=4) +worker = hatchet.worker("cancellation-worker", max_runs=4) worker.register_workflow(workflow) worker.start() diff --git a/examples/concurrency-limit-rr/test_concurrency_limit_rr.py b/examples/concurrency-limit-rr/test_concurrency_limit_rr.py new file mode 100644 index 00000000..e135d10c --- /dev/null +++ b/examples/concurrency-limit-rr/test_concurrency_limit_rr.py @@ -0,0 +1,62 @@ +import asyncio +import time + +import pytest + +from hatchet_sdk import Hatchet +from hatchet_sdk.workflow_run import WorkflowRunRef +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "concurrency_limit_rr"]) + + +# requires scope module or higher for shared event loop +@pytest.mark.skip(reason="The timing for this test is not reliable") +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + num_groups = 2 + runs: list[WorkflowRunRef] = [] + + # Start all runs + for i in range(1, num_groups + 1): + run = hatchet.admin.run_workflow("ConcurrencyDemoWorkflowRR", {"group": i}) + runs.append(run) + run = hatchet.admin.run_workflow("ConcurrencyDemoWorkflowRR", {"group": i}) + runs.append(run) + + # Wait for all results + successful_runs = [] + cancelled_runs = [] + + start_time = time.time() + + # Process each run individually + for i, run in enumerate(runs, start=1): + try: + result = await run.result() + successful_runs.append((i, result)) + except Exception as e: + if "CANCELLED_BY_CONCURRENCY_LIMIT" in str(e): + cancelled_runs.append((i, str(e))) + else: + raise # Re-raise if it's an unexpected error + + end_time = time.time() + total_time = end_time - start_time + + # Check that we have the correct number of successful and cancelled runs + assert ( + len(successful_runs) == 4 + ), f"Expected 4 successful runs, got {len(successful_runs)}" + assert ( + len(cancelled_runs) == 0 + ), f"Expected 0 cancelled run, got {len(cancelled_runs)}" + + # Check that the total time is close to 2 seconds + assert ( + 3.8 <= total_time <= 5 + ), f"Expected runtime to be about 4 seconds, but it took {total_time:.2f} seconds" + + print(f"Total execution time: {total_time:.2f} seconds") diff --git a/examples/concurrency-limit-rr/worker.py b/examples/concurrency-limit-rr/worker.py index 52c9724c..be58ed81 100644 --- a/examples/concurrency-limit-rr/worker.py +++ b/examples/concurrency-limit-rr/worker.py @@ -16,15 +16,13 @@ class ConcurrencyDemoWorkflowRR: ) def concurrency(self, context: Context) -> str: input = context.workflow_input() - print(input) - - return input.get("group") + return f'group-{input["group"]}' @hatchet.step() def step1(self, context): print("starting step1") - time.sleep(0.2) + time.sleep(2) print("finished step1") pass diff --git a/examples/concurrency-limit/test_concurrency_limit.py b/examples/concurrency-limit/test_concurrency_limit.py new file mode 100644 index 00000000..ba89f21c --- /dev/null +++ b/examples/concurrency-limit/test_concurrency_limit.py @@ -0,0 +1,47 @@ +import asyncio + +import pytest + +from hatchet_sdk import Hatchet +from hatchet_sdk.workflow_run import WorkflowRunRef +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "concurrency_limit"]) + + +# requires scope module or higher for shared event loop +@pytest.mark.skip(reason="The timing for this test is not reliable") +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + num_runs = 6 + runs: list[WorkflowRunRef] = [] + + # Start all runs + for i in range(1, num_runs + 1): + run = hatchet.admin.run_workflow("ConcurrencyDemoWorkflow", {"run": i}) + runs.append(run) + + # Wait for all results + successful_runs = [] + cancelled_runs = [] + + # Process each run individually + for i, run in enumerate(runs, start=1): + try: + result = await run.result() + successful_runs.append((i, result)) + except Exception as e: + if "CANCELLED_BY_CONCURRENCY_LIMIT" in str(e): + cancelled_runs.append((i, str(e))) + else: + raise # Re-raise if it's an unexpected error + + # Check that we have the correct number of successful and cancelled runs + assert ( + len(successful_runs) == 5 + ), f"Expected 5 successful runs, got {len(successful_runs)}" + assert ( + len(cancelled_runs) == 1 + ), f"Expected 1 cancelled run, got {len(cancelled_runs)}" diff --git a/examples/concurrency-limit/worker.py b/examples/concurrency-limit/worker.py index ca01b7cc..b4820628 100644 --- a/examples/concurrency-limit/worker.py +++ b/examples/concurrency-limit/worker.py @@ -3,6 +3,7 @@ from dotenv import load_dotenv from hatchet_sdk import Hatchet +from hatchet_sdk.context import Context load_dotenv() @@ -11,27 +12,26 @@ @hatchet.workflow(on_events=["concurrency-test"]) class ConcurrencyDemoWorkflow: - def __init__(self): - self.my_value = "test" @hatchet.concurrency(max_runs=5) def concurrency(self, context) -> str: return "concurrency-key" @hatchet.step() - def step1(self, context): + def step1(self, context: Context): + input = context.workflow_input() + time.sleep(3) print("executed step1") - pass + return {"run": input["run"]} - @hatchet.step(parents=["step1"], timeout="4s") - def step2(self, context): - print("started step2") - time.sleep(1) - print("finished step2") +def main(): + workflow = ConcurrencyDemoWorkflow() + worker = hatchet.worker("concurrency-demo-worker", max_runs=10) + worker.register_workflow(workflow) -workflow = ConcurrencyDemoWorkflow() -worker = hatchet.worker("concurrency-demo-worker", max_runs=4) -worker.register_workflow(workflow) + worker.start() -worker.start() + +if __name__ == "__main__": + main() diff --git a/examples/dag/event.py b/examples/dag/event.py index d6a149f0..96953908 100644 --- a/examples/dag/event.py +++ b/examples/dag/event.py @@ -1,10 +1,10 @@ from dotenv import load_dotenv -from hatchet_sdk import new_client +from hatchet_sdk import Context, Hatchet load_dotenv() -client = new_client() +hatchet = Hatchet(debug=True) for i in range(10): - client.event.push("user:create", {"test": "test"}) + hatchet.event.push("dag:create", {"test": "test"}) diff --git a/examples/dag/test_dag.py b/examples/dag/test_dag.py new file mode 100644 index 00000000..89e2f22e --- /dev/null +++ b/examples/dag/test_dag.py @@ -0,0 +1,20 @@ +import pytest + +from hatchet_sdk import Hatchet +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "dag"]) + + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + run = hatchet.admin.run_workflow("DagWorkflow", {}) + result = await run.result() + + one = result["step1"]["rando"] + two = result["step2"]["rando"] + assert result["step3"]["sum"] == one + two + assert result["step4"]["step4"] == "step4" diff --git a/examples/dag/worker.py b/examples/dag/worker.py index a29af287..7b14e302 100644 --- a/examples/dag/worker.py +++ b/examples/dag/worker.py @@ -1,3 +1,4 @@ +import random import time from dotenv import load_dotenv @@ -9,50 +10,34 @@ hatchet = Hatchet(debug=True) -@hatchet.workflow(on_events=["user:create"], schedule_timeout="10m") -class MyWorkflow: - def __init__(self): - self.my_value = "test" +@hatchet.workflow(on_events=["dag:create"], schedule_timeout="10m") +class DagWorkflow: @hatchet.step(timeout="5s") def step1(self, context: Context): - print( - "starting step1", - time.strftime("%H:%M:%S", time.localtime()), - context.workflow_input(), - ) - overrideValue = context.playground("prompt", "You are an AI assistant...") - time.sleep(3) - # pretty-print time - print("executed step1", time.strftime("%H:%M:%S", time.localtime())) + rando = random.randint( + 1, 100 + ) # Generate a random number between 1 and 100return { return { - "step1": overrideValue, + "rando": rando, } - @hatchet.step() + @hatchet.step(timeout="5s") def step2(self, context: Context): - print( - "starting step2", - time.strftime("%H:%M:%S", time.localtime()), - context.workflow_input(), - ) - time.sleep(5) - print("executed step2", time.strftime("%H:%M:%S", time.localtime())) + rando = random.randint( + 1, 100 + ) # Generate a random number between 1 and 100return { return { - "step2": "step2", + "rando": rando, } @hatchet.step(parents=["step1", "step2"]) def step3(self, context: Context): - print( - "executed step3", - time.strftime("%H:%M:%S", time.localtime()), - context.workflow_input(), - context.step_output("step1"), - context.step_output("step2"), - ) + one = context.step_output("step1")["rando"] + two = context.step_output("step2")["rando"] + return { - "step3": "step3", + "sum": one + two, } @hatchet.step(parents=["step1", "step3"]) @@ -69,8 +54,14 @@ def step4(self, context: Context): } -workflow = MyWorkflow() -worker = hatchet.worker("test-worker") -worker.register_workflow(workflow) +def main(): + + workflow = DagWorkflow() + worker = hatchet.worker("dag-worker") + worker.register_workflow(workflow) + + worker.start() + -worker.start() +if __name__ == "__main__": + main() diff --git a/examples/delayed/test_delayed.py b/examples/delayed/test_delayed.py new file mode 100644 index 00000000..55103aa6 --- /dev/null +++ b/examples/delayed/test_delayed.py @@ -0,0 +1,14 @@ +# from hatchet_sdk import Hatchet +# import pytest + +# from tests.utils import fixture_bg_worker +# from tests.utils.hatchet_client import hatchet_client_fixture + + +# hatchet = hatchet_client_fixture() +# worker = fixture_bg_worker(["poetry", "run", "manual_trigger"]) + +# # requires scope module or higher for shared event loop +# @pytest.mark.asyncio(scope="session") +# async def test_run(hatchet: Hatchet): +# # TODO diff --git a/examples/delayed/worker.py b/examples/delayed/worker.py index c6e5ded8..9e9197e2 100644 --- a/examples/delayed/worker.py +++ b/examples/delayed/worker.py @@ -18,7 +18,7 @@ def schedule(self, context: Context): future_time = now + timedelta(seconds=15) print(f"scheduling for \t {future_time.strftime('%H:%M:%S')}") - hatchet.client.admin.schedule_workflow( + hatchet.admin.schedule_workflow( "PrintPrinter", [future_time], context.workflow_input() ) @@ -32,7 +32,7 @@ def step1(self, context: Context): print(f"message \t {context.workflow_input()['message']}") -worker = hatchet.worker("test-worker", max_runs=4) +worker = hatchet.worker("delayed-worker", max_runs=4) worker.register_workflow(PrintSchedule()) worker.register_workflow(PrintPrinter()) diff --git a/examples/events/event.py b/examples/events/event.py new file mode 100644 index 00000000..21b83cdd --- /dev/null +++ b/examples/events/event.py @@ -0,0 +1,8 @@ +from dotenv import load_dotenv + +from hatchet_sdk import Hatchet + +load_dotenv() + +hatchet = Hatchet() +hatchet.event.push("user:create", {"test": "test"}) diff --git a/examples/events/test_event.py b/examples/events/test_event.py new file mode 100644 index 00000000..f67be3f8 --- /dev/null +++ b/examples/events/test_event.py @@ -0,0 +1,14 @@ +import pytest + +from hatchet_sdk.hatchet import Hatchet +from tests.utils import hatchet_client_fixture + +hatchet = hatchet_client_fixture() + + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_event_push(hatchet: Hatchet): + e = hatchet.event.push("user:create", {"test": "test"}) + + assert e.eventId is not None diff --git a/examples/fanout/test_fanout.py b/examples/fanout/test_fanout.py new file mode 100644 index 00000000..85ed28dd --- /dev/null +++ b/examples/fanout/test_fanout.py @@ -0,0 +1,24 @@ +import pytest + +from hatchet_sdk import Hatchet +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "fanout"]) + + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + run = hatchet.admin.run_workflow("Parent", {"n": 2}) + result = await run.result() + assert len(result["spawn"]["results"]) == 2 + + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run2(hatchet: Hatchet): + run = hatchet.admin.run_workflow("Parent", {"n": 2}) + result = await run.result() + assert len(result["spawn"]["results"]) == 2 diff --git a/examples/fanout/worker.py b/examples/fanout/worker.py index cda2f6bb..9b0b3c30 100644 --- a/examples/fanout/worker.py +++ b/examples/fanout/worker.py @@ -17,7 +17,9 @@ async def spawn(self, context: Context): results = [] - for i in range(100): + n = context.workflow_input().get("n", 100) + + for i in range(n): results.append( ( await context.aio.spawn_workflow( @@ -38,13 +40,13 @@ async def spawn(self, context: Context): @hatchet.workflow(on_events=["child:create"]) class Child: @hatchet.step() - async def process(self, context: Context): + def process(self, context: Context): a = context.workflow_input()["a"] print(f"child process {a}") return {"status": "success " + a} @hatchet.step() - async def process2(self, context: Context): + def process2(self, context: Context): print("child process2") return {"status2": "success"} diff --git a/examples/logger/test_logger.py b/examples/logger/test_logger.py new file mode 100644 index 00000000..f666526c --- /dev/null +++ b/examples/logger/test_logger.py @@ -0,0 +1,16 @@ +import pytest + +from hatchet_sdk import Hatchet +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "logger"]) + + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + run = hatchet.admin.run_workflow("LoggingWorkflow", {}) + result = await run.result() + assert result["step1"]["status"] == "success" diff --git a/examples/logger/worker.py b/examples/logger/worker.py index ce846cdc..9c51bdcf 100644 --- a/examples/logger/worker.py +++ b/examples/logger/worker.py @@ -5,9 +5,15 @@ from examples.logger.client import hatchet from examples.logger.workflow import LoggingWorkflow -worker = hatchet.worker("test-worker", max_runs=5) -workflow = LoggingWorkflow() -worker.register_workflow(workflow) +def main(): + worker = hatchet.worker("logger-worker", max_runs=5) -worker.start() + workflow = LoggingWorkflow() + worker.register_workflow(workflow) + + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/logger/workflow.py b/examples/logger/workflow.py index cc51c14d..a3afa0aa 100644 --- a/examples/logger/workflow.py +++ b/examples/logger/workflow.py @@ -7,11 +7,12 @@ logger = logging.getLogger(__name__) -@hatchet.workflow(on_crons=["* * * * *"]) +@hatchet.workflow() class LoggingWorkflow: @hatchet.step() def step1(self, context: Context): for i in range(12): logger.info("executed step1 - {}".format(i)) - time.sleep(1) + logger.info({"step1": "step1"}) + time.sleep(0.1) return {"status": "success"} diff --git a/examples/manual_trigger/test_manual_trigger.py b/examples/manual_trigger/test_manual_trigger.py new file mode 100644 index 00000000..55103aa6 --- /dev/null +++ b/examples/manual_trigger/test_manual_trigger.py @@ -0,0 +1,14 @@ +# from hatchet_sdk import Hatchet +# import pytest + +# from tests.utils import fixture_bg_worker +# from tests.utils.hatchet_client import hatchet_client_fixture + + +# hatchet = hatchet_client_fixture() +# worker = fixture_bg_worker(["poetry", "run", "manual_trigger"]) + +# # requires scope module or higher for shared event loop +# @pytest.mark.asyncio(scope="session") +# async def test_run(hatchet: Hatchet): +# # TODO diff --git a/examples/on_failure/test_on_failure.py b/examples/on_failure/test_on_failure.py new file mode 100644 index 00000000..55103aa6 --- /dev/null +++ b/examples/on_failure/test_on_failure.py @@ -0,0 +1,14 @@ +# from hatchet_sdk import Hatchet +# import pytest + +# from tests.utils import fixture_bg_worker +# from tests.utils.hatchet_client import hatchet_client_fixture + + +# hatchet = hatchet_client_fixture() +# worker = fixture_bg_worker(["poetry", "run", "manual_trigger"]) + +# # requires scope module or higher for shared event loop +# @pytest.mark.asyncio(scope="session") +# async def test_run(hatchet: Hatchet): +# # TODO diff --git a/examples/on_failure/worker.py b/examples/on_failure/worker.py index afc35c0e..e1b36e3e 100644 --- a/examples/on_failure/worker.py +++ b/examples/on_failure/worker.py @@ -21,8 +21,13 @@ def on_failure(self, context): print(context) -workflow = OnFailureWorkflow() -worker = hatchet.worker("test-worker", max_runs=4) -worker.register_workflow(workflow) +def main(): + workflow = OnFailureWorkflow() + worker = hatchet.worker("on-failure-worker", max_runs=4) + worker.register_workflow(workflow) -worker.start() + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/overrides/test_overrides.py b/examples/overrides/test_overrides.py new file mode 100644 index 00000000..a3a232d0 --- /dev/null +++ b/examples/overrides/test_overrides.py @@ -0,0 +1,16 @@ +# from hatchet_sdk import Hatchet +# import pytest + +# from tests.utils import fixture_bg_worker +# from tests.utils.hatchet_client import hatchet_client_fixture + + +# hatchet = hatchet_client_fixture() +# worker = fixture_bg_worker(["poetry", "run", "async"]) + +# # requires scope module or higher for shared event loop +# @pytest.mark.asyncio(scope="session") +# async def test_run(hatchet: Hatchet): +# run = hatchet.admin.run_workflow("DagWorkflow", {}) +# result = await run.result() +# assert result["step1"]["test"] == "test" diff --git a/examples/overrides/worker.py b/examples/overrides/worker.py new file mode 100644 index 00000000..86609df3 --- /dev/null +++ b/examples/overrides/worker.py @@ -0,0 +1,76 @@ +import time + +from dotenv import load_dotenv + +from hatchet_sdk import Context, Hatchet + +load_dotenv() + +hatchet = Hatchet(debug=True) + + +@hatchet.workflow(on_events=["overrides:create"], schedule_timeout="10m") +class OverridesWorkflow: + def __init__(self): + self.my_value = "test" + + @hatchet.step(timeout="5s") + def step1(self, context: Context): + print( + "starting step1", + time.strftime("%H:%M:%S", time.localtime()), + context.workflow_input(), + ) + overrideValue = context.playground("prompt", "You are an AI assistant...") + time.sleep(3) + # pretty-print time + print("executed step1", time.strftime("%H:%M:%S", time.localtime())) + return { + "step1": overrideValue, + } + + @hatchet.step() + def step2(self, context: Context): + print( + "starting step2", + time.strftime("%H:%M:%S", time.localtime()), + context.workflow_input(), + ) + time.sleep(5) + print("executed step2", time.strftime("%H:%M:%S", time.localtime())) + return { + "step2": "step2", + } + + @hatchet.step(parents=["step1", "step2"]) + def step3(self, context: Context): + print( + "executed step3", + time.strftime("%H:%M:%S", time.localtime()), + context.workflow_input(), + context.step_output("step1"), + context.step_output("step2"), + ) + return { + "step3": "step3", + } + + @hatchet.step(parents=["step1", "step3"]) + def step4(self, context: Context): + print( + "executed step4", + time.strftime("%H:%M:%S", time.localtime()), + context.workflow_input(), + context.step_output("step1"), + context.step_output("step3"), + ) + return { + "step4": "step4", + } + + +workflow = OverridesWorkflow() +worker = hatchet.worker("overrides-worker") +worker.register_workflow(workflow) + +worker.start() diff --git a/examples/programatic_replay/script.py b/examples/programatic_replay/script.py index 02f9c1b1..edb5d685 100644 --- a/examples/programatic_replay/script.py +++ b/examples/programatic_replay/script.py @@ -1,7 +1,6 @@ from dotenv import load_dotenv -from hatchet_sdk import Hatchet -from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatus +from hatchet_sdk import Hatchet, WorkflowRunStatus load_dotenv() @@ -9,9 +8,7 @@ if __name__ == "__main__": # Look up the failed workflow runs - failed = hatchet.client.rest.events_list( - statuses=[WorkflowRunStatus.FAILED], limit=3 - ) + failed = hatchet.rest.events_list(statuses=[WorkflowRunStatus.FAILED], limit=3) # Replay the failed workflow runs - retried = hatchet.client.rest.events_replay(failed) + retried = hatchet.rest.events_replay(failed) diff --git a/examples/programatic_replay/test_programatic_replay.py b/examples/programatic_replay/test_programatic_replay.py new file mode 100644 index 00000000..a3a232d0 --- /dev/null +++ b/examples/programatic_replay/test_programatic_replay.py @@ -0,0 +1,16 @@ +# from hatchet_sdk import Hatchet +# import pytest + +# from tests.utils import fixture_bg_worker +# from tests.utils.hatchet_client import hatchet_client_fixture + + +# hatchet = hatchet_client_fixture() +# worker = fixture_bg_worker(["poetry", "run", "async"]) + +# # requires scope module or higher for shared event loop +# @pytest.mark.asyncio(scope="session") +# async def test_run(hatchet: Hatchet): +# run = hatchet.admin.run_workflow("DagWorkflow", {}) +# result = await run.result() +# assert result["step1"]["test"] == "test" diff --git a/examples/rate_limit/event.py b/examples/rate_limit/event.py index ae8776fd..ed077fea 100644 --- a/examples/rate_limit/event.py +++ b/examples/rate_limit/event.py @@ -6,6 +6,6 @@ hatchet = Hatchet(debug=True) -hatchet.client.event.push("rate_limit:create", {"test": "1"}) -hatchet.client.event.push("rate_limit:create", {"test": "2"}) -hatchet.client.event.push("rate_limit:create", {"test": "3"}) +hatchet.event.push("rate_limit:create", {"test": "1"}) +hatchet.event.push("rate_limit:create", {"test": "2"}) +hatchet.event.push("rate_limit:create", {"test": "3"}) diff --git a/examples/rate_limit/test_rate_limit.py b/examples/rate_limit/test_rate_limit.py new file mode 100644 index 00000000..6f23da01 --- /dev/null +++ b/examples/rate_limit/test_rate_limit.py @@ -0,0 +1,33 @@ +import asyncio +import time + +import pytest + +from hatchet_sdk import Hatchet +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "rate_limit"]) + + +# requires scope module or higher for shared event loop +@pytest.mark.skip(reason="The timing for this test is not reliable") +@pytest.mark.asyncio(scope="session") +async def test_run(hatchet: Hatchet): + + run1 = hatchet.admin.run_workflow("RateLimitWorkflow", {}) + run2 = hatchet.admin.run_workflow("RateLimitWorkflow", {}) + run3 = hatchet.admin.run_workflow("RateLimitWorkflow", {}) + + start_time = time.time() + + await asyncio.gather(run1.result(), run2.result(), run3.result()) + + end_time = time.time() + + total_time = end_time - start_time + + assert ( + 1 <= total_time <= 2 + ), f"Expected runtime to be a bit more than 1 seconds, but it took {total_time:.2f} seconds" diff --git a/examples/rate_limit/worker.py b/examples/rate_limit/worker.py index 48637cef..d773b606 100644 --- a/examples/rate_limit/worker.py +++ b/examples/rate_limit/worker.py @@ -10,8 +10,6 @@ @hatchet.workflow(on_events=["rate_limit:create"]) class RateLimitWorkflow: - def __init__(self): - self.my_value = "test" @hatchet.step(rate_limits=[RateLimit(key="test-limit", units=1)]) def step1(self, context: Context): @@ -19,9 +17,10 @@ def step1(self, context: Context): pass -hatchet.client.admin.put_rate_limit("test-limit", 2, RateLimitDuration.MINUTE) +def main(): + hatchet.admin.put_rate_limit("test-limit", 2, RateLimitDuration.SECOND) -worker = hatchet.worker("test-worker", max_runs=4) -worker.register_workflow(RateLimitWorkflow()) + worker = hatchet.worker("rate-limit-worker", max_runs=10) + worker.register_workflow(RateLimitWorkflow()) -worker.start() + worker.start() diff --git a/examples/timeout/test_timeout.py b/examples/timeout/test_timeout.py new file mode 100644 index 00000000..d702fd04 --- /dev/null +++ b/examples/timeout/test_timeout.py @@ -0,0 +1,26 @@ +import pytest + +from hatchet_sdk import Hatchet +from tests.utils import fixture_bg_worker +from tests.utils.hatchet_client import hatchet_client_fixture + +hatchet = hatchet_client_fixture() +worker = fixture_bg_worker(["poetry", "run", "timeout"]) + + +# requires scope module or higher for shared event loop +@pytest.mark.asyncio(scope="session") +async def test_run_timeout(hatchet: Hatchet): + run = hatchet.admin.run_workflow("TimeoutWorkflow", {}) + try: + await run.result() + assert False, "Expected workflow to timeout" + except Exception as e: + assert str(e) == "Workflow Errors: ['TIMED_OUT']" + + +@pytest.mark.asyncio(scope="session") +async def test_run_refresh_timeout(hatchet: Hatchet): + run = hatchet.admin.run_workflow("RefreshTimeoutWorkflow", {}) + result = await run.result() + assert result["step1"]["status"] == "success" diff --git a/examples/timeout/worker.py b/examples/timeout/worker.py index 6d55f0e4..07885ba0 100644 --- a/examples/timeout/worker.py +++ b/examples/timeout/worker.py @@ -2,31 +2,41 @@ from dotenv import load_dotenv -from hatchet_sdk import Hatchet +from hatchet_sdk import Context, Hatchet load_dotenv() hatchet = Hatchet(debug=True) -@hatchet.workflow(on_events=["user:create"]) +@hatchet.workflow(on_events=["timeout:create"]) class TimeoutWorkflow: - def __init__(self): - self.my_value = "test" @hatchet.step(timeout="4s") - def timeout(self, context): - try: - print("started step2") - time.sleep(5) - print("finished step2") - except Exception as e: - print("caught an exception: " + str(e)) - raise e + def step1(self, context: Context): + time.sleep(5) + return {"status": "success"} -workflow = TimeoutWorkflow() -worker = hatchet.worker("timeout-worker", max_runs=4) -worker.register_workflow(workflow) +@hatchet.workflow(on_events=["refresh:create"]) +class RefreshTimeoutWorkflow: -worker.start() + @hatchet.step(timeout="4s") + def step1(self, context: Context): + + context.refresh_timeout("10s") + time.sleep(5) + + return {"status": "success"} + + +def main(): + worker = hatchet.worker("timeout-worker", max_runs=4) + worker.register_workflow(TimeoutWorkflow()) + worker.register_workflow(RefreshTimeoutWorkflow()) + + worker.start() + + +if __name__ == "__main__": + main() diff --git a/generate.sh b/generate.sh index 7fbbad13..5962832b 100755 --- a/generate.sh +++ b/generate.sh @@ -56,9 +56,18 @@ cp $tmp_dir/hatchet_sdk/clients/rest/api/__init__.py $dst_dir/api/__init__.py # remove tmp folder rm -rf $tmp_dir -poetry run python -m grpc_tools.protoc --proto_path=hatchet/api-contracts/dispatcher --python_out=./hatchet_sdk --pyi_out=./hatchet_sdk --grpc_python_out=./hatchet_sdk dispatcher.proto -poetry run python -m grpc_tools.protoc --proto_path=hatchet/api-contracts/events --python_out=./hatchet_sdk --pyi_out=./hatchet_sdk --grpc_python_out=./hatchet_sdk events.proto -poetry run python -m grpc_tools.protoc --proto_path=hatchet/api-contracts/workflows --python_out=./hatchet_sdk --pyi_out=./hatchet_sdk --grpc_python_out=./hatchet_sdk workflows.proto +poetry run python -m grpc_tools.protoc --proto_path=hatchet/api-contracts/dispatcher --python_out=./hatchet_sdk/contracts --pyi_out=./hatchet_sdk/contracts --grpc_python_out=./hatchet_sdk/contracts dispatcher.proto +poetry run python -m grpc_tools.protoc --proto_path=hatchet/api-contracts/events --python_out=./hatchet_sdk/contracts --pyi_out=./hatchet_sdk/contracts --grpc_python_out=./hatchet_sdk/contracts events.proto +poetry run python -m grpc_tools.protoc --proto_path=hatchet/api-contracts/workflows --python_out=./hatchet_sdk/contracts --pyi_out=./hatchet_sdk/contracts --grpc_python_out=./hatchet_sdk/contracts workflows.proto + +# Fix relative imports in _grpc.py files +if [[ "$OSTYPE" == "darwin"* ]]; then + # macOS + find ./hatchet_sdk/contracts -type f -name '*_grpc.py' -print0 | xargs -0 sed -i '' 's/^import \([^ ]*\)_pb2/from . import \1_pb2/' +else + # Linux and others + find ./hatchet_sdk/contracts -type f -name '*_grpc.py' -print0 | xargs -0 sed -i 's/^import \([^ ]*\)_pb2/from . import \1_pb2/' +fi # ensure that pre-commit is applied without errors pre-commit run --all-files || pre-commit run --all-files diff --git a/hatchet_sdk/__init__.py b/hatchet_sdk/__init__.py index 52fab2de..eb4a33d2 100644 --- a/hatchet_sdk/__init__.py +++ b/hatchet_sdk/__init__.py @@ -120,8 +120,13 @@ WorkflowVersionDefinition, ) from hatchet_sdk.clients.rest.models.workflow_version_meta import WorkflowVersionMeta +from hatchet_sdk.contracts.workflows_pb2 import ( + ConcurrencyLimitStrategy, + CreateWorkflowVersionOpts, + RateLimitDuration, +) -from .client import ClientImpl, new_client +from .client import new_client from .clients.admin import ( ChildTriggerWorkflowOptions, ScheduleTriggerWorkflowOptions, @@ -132,8 +137,3 @@ from .context import Context from .hatchet import ClientConfig, Hatchet, concurrency, on_failure_step, step, workflow from .worker import Worker, WorkerStatus -from .workflows_pb2 import ( - ConcurrencyLimitStrategy, - CreateWorkflowVersionOpts, - RateLimitDuration, -) diff --git a/hatchet_sdk/client.py b/hatchet_sdk/client.py index a3a24204..bfbadd7b 100644 --- a/hatchet_sdk/client.py +++ b/hatchet_sdk/client.py @@ -1,7 +1,5 @@ -# relative imports -import os from logging import Logger -from typing import Any +from typing import Callable import grpc @@ -9,30 +7,27 @@ from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener from hatchet_sdk.connection import new_conn -from .clients.admin import AdminClientImpl, new_admin -from .clients.dispatcher import DispatcherClientImpl, new_dispatcher -from .clients.events import EventClientImpl, new_event -from .clients.rest.api.workflow_api import WorkflowApi -from .clients.rest.api.workflow_run_api import WorkflowRunApi -from .clients.rest.api_client import ApiClient -from .clients.rest.configuration import Configuration +from .clients.admin import AdminClient, new_admin +from .clients.dispatcher import DispatcherClient, new_dispatcher +from .clients.events import EventClient, new_event from .clients.rest_client import RestApi from .loader import ClientConfig, ConfigLoader class Client: - admin: AdminClientImpl - dispatcher: DispatcherClientImpl - event: EventClientImpl + admin: AdminClient + dispatcher: DispatcherClient + event: EventClient rest: RestApi workflow_listener: PooledWorkflowRunListener logger: Logger - -class ClientImpl(Client): - @classmethod - def from_environment(cls, defaults: ClientConfig = ClientConfig(), *opts_functions): + def from_environment( + cls, + defaults: ClientConfig = ClientConfig(), + *opts_functions: Callable[[ClientConfig], None] + ): config: ClientConfig = ConfigLoader(".").load_client_config(defaults) for opt_function in opts_functions: opt_function(config) @@ -49,27 +44,27 @@ def from_config(cls, config: ClientConfig = ClientConfig()): conn: grpc.Channel = new_conn(config) - # Instantiate client implementations + # Instantiate clients event_client = new_event(conn, config) admin_client = new_admin(config) dispatcher_client = new_dispatcher(config) rest_client = RestApi(config.server_url, config.token, config.tenant_id) - workflow_listener_client = None + workflow_listener = None # Initialize this if needed return cls( event_client, admin_client, dispatcher_client, - workflow_listener_client, + workflow_listener, rest_client, config, ) def __init__( self, - event_client: EventClientImpl, - admin_client: AdminClientImpl, - dispatcher_client: DispatcherClientImpl, + event_client: EventClient, + admin_client: AdminClient, + dispatcher_client: DispatcherClient, workflow_listener: PooledWorkflowRunListener, rest_client: RestApi, config: ClientConfig, @@ -92,5 +87,5 @@ def with_host_port_impl(config: ClientConfig): return with_host_port_impl -new_client = ClientImpl.from_environment -new_client_raw = ClientImpl.from_config +new_client = Client.from_environment +new_client_raw = Client.from_config diff --git a/hatchet_sdk/clients/admin.py b/hatchet_sdk/clients/admin.py index d7965532..be96b859 100644 --- a/hatchet_sdk/clients/admin.py +++ b/hatchet_sdk/clients/admin.py @@ -9,12 +9,7 @@ from hatchet_sdk.clients.run_event_listener import new_listener from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener from hatchet_sdk.connection import new_conn -from hatchet_sdk.workflow_run import WorkflowRunRef - -from ..loader import ClientConfig -from ..metadata import get_metadata -from ..workflow import WorkflowMeta -from ..workflows_pb2 import ( +from hatchet_sdk.contracts.workflows_pb2 import ( CreateWorkflowVersionOpts, PutRateLimitRequest, PutWorkflowRequest, @@ -24,11 +19,16 @@ TriggerWorkflowResponse, WorkflowVersion, ) -from ..workflows_pb2_grpc import WorkflowServiceStub +from hatchet_sdk.contracts.workflows_pb2_grpc import WorkflowServiceStub +from hatchet_sdk.workflow_run import WorkflowRunRef + +from ..loader import ClientConfig +from ..metadata import get_metadata +from ..workflow import WorkflowMeta def new_admin(config: ClientConfig): - return AdminClientImpl(config) + return AdminClient(config) class ScheduleTriggerWorkflowOptions(TypedDict): @@ -210,7 +210,7 @@ async def schedule_workflow( raise ValueError(f"gRPC error: {e}") -class AdminClientImpl(AdminClientBase): +class AdminClient(AdminClientBase): def __init__(self, config: ClientConfig): conn = new_conn(config) self.config = config diff --git a/hatchet_sdk/clients/dispatcher.py b/hatchet_sdk/clients/dispatcher.py index 4866b075..d456021c 100644 --- a/hatchet_sdk/clients/dispatcher.py +++ b/hatchet_sdk/clients/dispatcher.py @@ -11,8 +11,7 @@ from hatchet_sdk.clients.event_ts import Event_ts, read_with_interrupt from hatchet_sdk.connection import new_conn - -from ..dispatcher_pb2 import ( +from hatchet_sdk.contracts.dispatcher_pb2 import ( ActionEventResponse, ActionType, AssignedAction, @@ -27,7 +26,8 @@ WorkerRegisterResponse, WorkerUnsubscribeRequest, ) -from ..dispatcher_pb2_grpc import DispatcherStub +from hatchet_sdk.contracts.dispatcher_pb2_grpc import DispatcherStub + from ..loader import ClientConfig from ..logger import logger from ..metadata import get_metadata diff --git a/hatchet_sdk/clients/events.py b/hatchet_sdk/clients/events.py index 1804ca44..780c54a5 100644 --- a/hatchet_sdk/clients/events.py +++ b/hatchet_sdk/clients/events.py @@ -6,14 +6,20 @@ import grpc from google.protobuf import timestamp_pb2 -from ..events_pb2 import Event, PushEventRequest, PutLogRequest, PutStreamEventRequest -from ..events_pb2_grpc import EventsServiceStub +from hatchet_sdk.contracts.events_pb2 import ( + Event, + PushEventRequest, + PutLogRequest, + PutStreamEventRequest, +) +from hatchet_sdk.contracts.events_pb2_grpc import EventsServiceStub + from ..loader import ClientConfig from ..metadata import get_metadata def new_event(conn, config: ClientConfig): - return EventClientImpl( + return EventClient( client=EventsServiceStub(conn), config=config, ) @@ -31,7 +37,7 @@ class PushEventOptions(TypedDict): additional_metadata: Dict[str, str] | None = None -class EventClientImpl: +class EventClient: def __init__(self, client: EventsServiceStub, config: ClientConfig): self.client = client self.token = config.token diff --git a/hatchet_sdk/clients/rest/__init__.py b/hatchet_sdk/clients/rest/__init__.py index e8d10b51..32022b15 100644 --- a/hatchet_sdk/clients/rest/__init__.py +++ b/hatchet_sdk/clients/rest/__init__.py @@ -32,6 +32,7 @@ from hatchet_sdk.clients.rest.api.worker_api import WorkerApi from hatchet_sdk.clients.rest.api.workflow_api import WorkflowApi from hatchet_sdk.clients.rest.api.workflow_run_api import WorkflowRunApi +from hatchet_sdk.clients.rest.api.workflow_runs_api import WorkflowRunsApi from hatchet_sdk.clients.rest.api_client import ApiClient # import ApiClient @@ -114,6 +115,12 @@ from hatchet_sdk.clients.rest.models.queue_metrics import QueueMetrics from hatchet_sdk.clients.rest.models.reject_invite_request import RejectInviteRequest from hatchet_sdk.clients.rest.models.replay_event_request import ReplayEventRequest +from hatchet_sdk.clients.rest.models.replay_workflow_runs_request import ( + ReplayWorkflowRunsRequest, +) +from hatchet_sdk.clients.rest.models.replay_workflow_runs_response import ( + ReplayWorkflowRunsResponse, +) from hatchet_sdk.clients.rest.models.rerun_step_run_request import RerunStepRunRequest from hatchet_sdk.clients.rest.models.slack_webhook import SlackWebhook from hatchet_sdk.clients.rest.models.sns_integration import SNSIntegration @@ -180,6 +187,7 @@ WebhookWorkerListResponse, ) from hatchet_sdk.clients.rest.models.worker import Worker +from hatchet_sdk.clients.rest.models.worker_label import WorkerLabel from hatchet_sdk.clients.rest.models.worker_list import WorkerList from hatchet_sdk.clients.rest.models.workflow import Workflow from hatchet_sdk.clients.rest.models.workflow_concurrency import WorkflowConcurrency diff --git a/hatchet_sdk/clients/rest/api/__init__.py b/hatchet_sdk/clients/rest/api/__init__.py index bc8c788d..718a6534 100644 --- a/hatchet_sdk/clients/rest/api/__init__.py +++ b/hatchet_sdk/clients/rest/api/__init__.py @@ -16,3 +16,4 @@ from hatchet_sdk.clients.rest.api.worker_api import WorkerApi from hatchet_sdk.clients.rest.api.workflow_api import WorkflowApi from hatchet_sdk.clients.rest.api.workflow_run_api import WorkflowRunApi +from hatchet_sdk.clients.rest.api.workflow_runs_api import WorkflowRunsApi diff --git a/hatchet_sdk/clients/rest/api/workflow_runs_api.py b/hatchet_sdk/clients/rest/api/workflow_runs_api.py new file mode 100644 index 00000000..6bfe222b --- /dev/null +++ b/hatchet_sdk/clients/rest/api/workflow_runs_api.py @@ -0,0 +1,335 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field +from typing_extensions import Annotated +from hatchet_sdk.clients.rest.models.replay_workflow_runs_request import ReplayWorkflowRunsRequest +from hatchet_sdk.clients.rest.models.replay_workflow_runs_response import ReplayWorkflowRunsResponse + +from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized +from hatchet_sdk.clients.rest.api_response import ApiResponse +from hatchet_sdk.clients.rest.rest import RESTResponseType + + +class WorkflowRunsApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def workflow_run_update_replay( + self, + tenant: Annotated[str, Field(min_length=36, strict=True, max_length=36, description="The tenant id")], + replay_workflow_runs_request: Annotated[ReplayWorkflowRunsRequest, Field(description="The workflow run ids to replay")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ReplayWorkflowRunsResponse: + """Replay workflow runs + + Replays a list of workflow runs. + + :param tenant: The tenant id (required) + :type tenant: str + :param replay_workflow_runs_request: The workflow run ids to replay (required) + :type replay_workflow_runs_request: ReplayWorkflowRunsRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_run_update_replay_serialize( + tenant=tenant, + replay_workflow_runs_request=replay_workflow_runs_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ReplayWorkflowRunsResponse", + '400': "APIErrors", + '403': "APIErrors", + '429': "APIErrors", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def workflow_run_update_replay_with_http_info( + self, + tenant: Annotated[str, Field(min_length=36, strict=True, max_length=36, description="The tenant id")], + replay_workflow_runs_request: Annotated[ReplayWorkflowRunsRequest, Field(description="The workflow run ids to replay")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ReplayWorkflowRunsResponse]: + """Replay workflow runs + + Replays a list of workflow runs. + + :param tenant: The tenant id (required) + :type tenant: str + :param replay_workflow_runs_request: The workflow run ids to replay (required) + :type replay_workflow_runs_request: ReplayWorkflowRunsRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_run_update_replay_serialize( + tenant=tenant, + replay_workflow_runs_request=replay_workflow_runs_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ReplayWorkflowRunsResponse", + '400': "APIErrors", + '403': "APIErrors", + '429': "APIErrors", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def workflow_run_update_replay_without_preload_content( + self, + tenant: Annotated[str, Field(min_length=36, strict=True, max_length=36, description="The tenant id")], + replay_workflow_runs_request: Annotated[ReplayWorkflowRunsRequest, Field(description="The workflow run ids to replay")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Replay workflow runs + + Replays a list of workflow runs. + + :param tenant: The tenant id (required) + :type tenant: str + :param replay_workflow_runs_request: The workflow run ids to replay (required) + :type replay_workflow_runs_request: ReplayWorkflowRunsRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_run_update_replay_serialize( + tenant=tenant, + replay_workflow_runs_request=replay_workflow_runs_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ReplayWorkflowRunsResponse", + '400': "APIErrors", + '403': "APIErrors", + '429': "APIErrors", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _workflow_run_update_replay_serialize( + self, + tenant, + replay_workflow_runs_request, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if tenant is not None: + _path_params['tenant'] = tenant + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if replay_workflow_runs_request is not None: + _body_params = replay_workflow_runs_request + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'cookieAuth', + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/api/v1/tenants/{tenant}/workflow-runs/replay', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/hatchet_sdk/clients/rest/models/__init__.py b/hatchet_sdk/clients/rest/models/__init__.py index b9c0ece4..94f1b247 100644 --- a/hatchet_sdk/clients/rest/models/__init__.py +++ b/hatchet_sdk/clients/rest/models/__init__.py @@ -82,6 +82,12 @@ from hatchet_sdk.clients.rest.models.queue_metrics import QueueMetrics from hatchet_sdk.clients.rest.models.reject_invite_request import RejectInviteRequest from hatchet_sdk.clients.rest.models.replay_event_request import ReplayEventRequest +from hatchet_sdk.clients.rest.models.replay_workflow_runs_request import ( + ReplayWorkflowRunsRequest, +) +from hatchet_sdk.clients.rest.models.replay_workflow_runs_response import ( + ReplayWorkflowRunsResponse, +) from hatchet_sdk.clients.rest.models.rerun_step_run_request import RerunStepRunRequest from hatchet_sdk.clients.rest.models.slack_webhook import SlackWebhook from hatchet_sdk.clients.rest.models.sns_integration import SNSIntegration @@ -148,6 +154,7 @@ WebhookWorkerListResponse, ) from hatchet_sdk.clients.rest.models.worker import Worker +from hatchet_sdk.clients.rest.models.worker_label import WorkerLabel from hatchet_sdk.clients.rest.models.worker_list import WorkerList from hatchet_sdk.clients.rest.models.workflow import Workflow from hatchet_sdk.clients.rest.models.workflow_concurrency import WorkflowConcurrency diff --git a/hatchet_sdk/clients/rest/models/api_resource_meta.py b/hatchet_sdk/clients/rest/models/api_resource_meta.py index bb8ad457..8c353248 100644 --- a/hatchet_sdk/clients/rest/models/api_resource_meta.py +++ b/hatchet_sdk/clients/rest/models/api_resource_meta.py @@ -29,7 +29,7 @@ class APIResourceMeta(BaseModel): APIResourceMeta """ # noqa: E501 - id: Annotated[str, Field(min_length=36, strict=True, max_length=36)] = Field( + id: Annotated[str, Field(min_length=0, strict=True, max_length=36)] = Field( description="the id of this resource, in UUID format" ) created_at: datetime = Field( diff --git a/hatchet_sdk/clients/rest/models/replay_workflow_runs_request.py b/hatchet_sdk/clients/rest/models/replay_workflow_runs_request.py new file mode 100644 index 00000000..f30b784c --- /dev/null +++ b/hatchet_sdk/clients/rest/models/replay_workflow_runs_request.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class ReplayWorkflowRunsRequest(BaseModel): + """ + ReplayWorkflowRunsRequest + """ # noqa: E501 + workflow_run_ids: List[Annotated[str, Field(min_length=36, strict=True, max_length=36)]] = Field(alias="workflowRunIds") + __properties: ClassVar[List[str]] = ["workflowRunIds"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ReplayWorkflowRunsRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ReplayWorkflowRunsRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "workflowRunIds": obj.get("workflowRunIds") + }) + return _obj + + diff --git a/hatchet_sdk/clients/rest/models/replay_workflow_runs_response.py b/hatchet_sdk/clients/rest/models/replay_workflow_runs_response.py new file mode 100644 index 00000000..1b84adcc --- /dev/null +++ b/hatchet_sdk/clients/rest/models/replay_workflow_runs_response.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List +from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun +from typing import Optional, Set +from typing_extensions import Self + +class ReplayWorkflowRunsResponse(BaseModel): + """ + ReplayWorkflowRunsResponse + """ # noqa: E501 + workflow_runs: List[WorkflowRun] = Field(alias="workflowRuns") + __properties: ClassVar[List[str]] = ["workflowRuns"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ReplayWorkflowRunsResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in workflow_runs (list) + _items = [] + if self.workflow_runs: + for _item in self.workflow_runs: + if _item: + _items.append(_item.to_dict()) + _dict['workflowRuns'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ReplayWorkflowRunsResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "workflowRuns": [WorkflowRun.from_dict(_item) for _item in obj["workflowRuns"]] if obj.get("workflowRuns") is not None else None + }) + return _obj + + diff --git a/hatchet_sdk/clients/rest/models/worker.py b/hatchet_sdk/clients/rest/models/worker.py index b70ae839..710e0e8f 100644 --- a/hatchet_sdk/clients/rest/models/worker.py +++ b/hatchet_sdk/clients/rest/models/worker.py @@ -25,6 +25,7 @@ from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta from hatchet_sdk.clients.rest.models.step_run import StepRun +from hatchet_sdk.clients.rest.models.worker_label import WorkerLabel class Worker(BaseModel): @@ -72,6 +73,9 @@ class Worker(BaseModel): description="the id of the assigned dispatcher, in UUID format", alias="dispatcherId", ) + labels: Optional[List[WorkerLabel]] = Field( + default=None, description="The current label state of the worker." + ) __properties: ClassVar[List[str]] = [ "metadata", "name", @@ -83,6 +87,7 @@ class Worker(BaseModel): "maxRuns", "availableRuns", "dispatcherId", + "labels", ] @field_validator("status") @@ -144,6 +149,13 @@ def to_dict(self) -> Dict[str, Any]: if _item: _items.append(_item.to_dict()) _dict["recentStepRuns"] = _items + # override the default output from pydantic by calling `to_dict()` of each item in labels (list) + _items = [] + if self.labels: + for _item in self.labels: + if _item: + _items.append(_item.to_dict()) + _dict["labels"] = _items return _dict @classmethod @@ -175,6 +187,11 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: "maxRuns": obj.get("maxRuns"), "availableRuns": obj.get("availableRuns"), "dispatcherId": obj.get("dispatcherId"), + "labels": ( + [WorkerLabel.from_dict(_item) for _item in obj["labels"]] + if obj.get("labels") is not None + else None + ), } ) return _obj diff --git a/hatchet_sdk/clients/rest/models/worker_label.py b/hatchet_sdk/clients/rest/models/worker_label.py new file mode 100644 index 00000000..75f6b49b --- /dev/null +++ b/hatchet_sdk/clients/rest/models/worker_label.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta +from typing import Optional, Set +from typing_extensions import Self + +class WorkerLabel(BaseModel): + """ + WorkerLabel + """ # noqa: E501 + metadata: APIResourceMeta + key: StrictStr = Field(description="The key of the label.") + value: Optional[StrictStr] = Field(default=None, description="The value of the label.") + __properties: ClassVar[List[str]] = ["metadata", "key", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WorkerLabel from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WorkerLabel from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "metadata": APIResourceMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "key": obj.get("key"), + "value": obj.get("value") + }) + return _obj + + diff --git a/hatchet_sdk/clients/run_event_listener.py b/hatchet_sdk/clients/run_event_listener.py index c3f888fc..c8e3d839 100644 --- a/hatchet_sdk/clients/run_event_listener.py +++ b/hatchet_sdk/clients/run_event_listener.py @@ -5,15 +5,15 @@ import grpc from hatchet_sdk.connection import new_conn - -from ..dispatcher_pb2 import ( +from hatchet_sdk.contracts.dispatcher_pb2 import ( RESOURCE_TYPE_STEP_RUN, RESOURCE_TYPE_WORKFLOW_RUN, ResourceEventType, SubscribeToWorkflowEventsRequest, WorkflowEvent, ) -from ..dispatcher_pb2_grpc import DispatcherStub +from hatchet_sdk.contracts.dispatcher_pb2_grpc import DispatcherStub + from ..loader import ClientConfig from ..metadata import get_metadata diff --git a/hatchet_sdk/clients/workflow_listener.py b/hatchet_sdk/clients/workflow_listener.py index 36124604..89a6f7c5 100644 --- a/hatchet_sdk/clients/workflow_listener.py +++ b/hatchet_sdk/clients/workflow_listener.py @@ -9,9 +9,12 @@ from hatchet_sdk.clients.event_ts import Event_ts, read_with_interrupt from hatchet_sdk.connection import new_conn +from hatchet_sdk.contracts.dispatcher_pb2 import ( + SubscribeToWorkflowRunsRequest, + WorkflowRunEvent, +) +from hatchet_sdk.contracts.dispatcher_pb2_grpc import DispatcherStub -from ..dispatcher_pb2 import SubscribeToWorkflowRunsRequest, WorkflowRunEvent -from ..dispatcher_pb2_grpc import DispatcherStub from ..loader import ClientConfig from ..logger import logger from ..metadata import get_metadata diff --git a/hatchet_sdk/context.py b/hatchet_sdk/context.py index cf5bb6ff..0cfccd04 100644 --- a/hatchet_sdk/context.py +++ b/hatchet_sdk/context.py @@ -3,22 +3,22 @@ import traceback from concurrent.futures import Future, ThreadPoolExecutor -from hatchet_sdk.clients.events import EventClientImpl +from hatchet_sdk.clients.events import EventClient from hatchet_sdk.clients.run_event_listener import ( RunEventListener, RunEventListenerClient, ) from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener +from hatchet_sdk.contracts.dispatcher_pb2 import OverridesData from hatchet_sdk.workflow_run import WorkflowRunRef from .clients.admin import ( - AdminClientImpl, + AdminClient, ChildTriggerWorkflowOptions, ScheduleTriggerWorkflowOptions, TriggerWorkflowOptions, ) from .clients.dispatcher import Action, DispatcherClientImpl -from .dispatcher_pb2 import OverridesData from .logger import logger DEFAULT_WORKFLOW_POLLING_INTERVAL = 5 # Seconds @@ -54,8 +54,8 @@ def __init__( self, action: Action, dispatcher_client: DispatcherClientImpl, - admin_client: AdminClientImpl, - event_client: EventClientImpl, + admin_client: AdminClient, + event_client: EventClient, workflow_listener: PooledWorkflowRunListener, workflow_run_event_listener: RunEventListenerClient, namespace: str = "", @@ -90,8 +90,8 @@ def __init__( self, action: Action, dispatcher_client: DispatcherClientImpl, - admin_client: AdminClientImpl, - event_client: EventClientImpl, + admin_client: AdminClient, + event_client: EventClient, workflow_listener: PooledWorkflowRunListener, workflow_run_event_listener: RunEventListenerClient, namespace: str = "", diff --git a/hatchet_sdk/contracts/dispatcher_pb2.py b/hatchet_sdk/contracts/dispatcher_pb2.py new file mode 100644 index 00000000..bc2d3978 --- /dev/null +++ b/hatchet_sdk/contracts/dispatcher_pb2.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: dispatcher.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x64ispatcher.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"V\n\x0cWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValue\"\xe2\x01\n\x15WorkerRegisterRequest\x12\x12\n\nworkerName\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63tions\x18\x02 \x03(\t\x12\x10\n\x08services\x18\x03 \x03(\t\x12\x14\n\x07maxRuns\x18\x04 \x01(\x05H\x00\x88\x01\x01\x12\x32\n\x06labels\x18\x05 \x03(\x0b\x32\".WorkerRegisterRequest.LabelsEntry\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\x42\n\n\x08_maxRuns\"P\n\x16WorkerRegisterResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\x12\x12\n\nworkerName\x18\x03 \x01(\t\"\xa3\x01\n\x19UpsertWorkerLabelsRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x36\n\x06labels\x18\x02 \x03(\x0b\x32&.UpsertWorkerLabelsRequest.LabelsEntry\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\"@\n\x1aUpsertWorkerLabelsResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\x98\x02\n\x0e\x41ssignedAction\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\r\n\x05jobId\x18\x04 \x01(\t\x12\x0f\n\x07jobName\x18\x05 \x01(\t\x12\x10\n\x08jobRunId\x18\x06 \x01(\t\x12\x0e\n\x06stepId\x18\x07 \x01(\t\x12\x11\n\tstepRunId\x18\x08 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\t \x01(\t\x12\x1f\n\nactionType\x18\n \x01(\x0e\x32\x0b.ActionType\x12\x15\n\ractionPayload\x18\x0b \x01(\t\x12\x10\n\x08stepName\x18\x0c \x01(\t\x12\x12\n\nretryCount\x18\r \x01(\x05\"\'\n\x13WorkerListenRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\",\n\x18WorkerUnsubscribeRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\"?\n\x19WorkerUnsubscribeResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\xe1\x01\n\x13GroupKeyActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\teventType\x18\x06 \x01(\x0e\x32\x18.GroupKeyActionEventType\x12\x14\n\x0c\x65ventPayload\x18\x07 \x01(\t\"\xec\x01\n\x0fStepActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\r\n\x05jobId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x0e\n\x06stepId\x18\x04 \x01(\t\x12\x11\n\tstepRunId\x18\x05 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x06 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\teventType\x18\x08 \x01(\x0e\x32\x14.StepActionEventType\x12\x14\n\x0c\x65ventPayload\x18\t \x01(\t\"9\n\x13\x41\x63tionEventResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"9\n SubscribeToWorkflowEventsRequest\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\"7\n\x1eSubscribeToWorkflowRunsRequest\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\"\xb2\x02\n\rWorkflowEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12#\n\x0cresourceType\x18\x02 \x01(\x0e\x32\r.ResourceType\x12%\n\teventType\x18\x03 \x01(\x0e\x32\x12.ResourceEventType\x12\x12\n\nresourceId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0c\x65ventPayload\x18\x06 \x01(\t\x12\x0e\n\x06hangup\x18\x07 \x01(\x08\x12\x18\n\x0bstepRetries\x18\x08 \x01(\x05H\x00\x88\x01\x01\x12\x17\n\nretryCount\x18\t \x01(\x05H\x01\x88\x01\x01\x42\x0e\n\x0c_stepRetriesB\r\n\x0b_retryCount\"\xa8\x01\n\x10WorkflowRunEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12(\n\teventType\x18\x02 \x01(\x0e\x32\x15.WorkflowRunEventType\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x07results\x18\x04 \x03(\x0b\x32\x0e.StepRunResult\"\x8a\x01\n\rStepRunResult\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x16\n\x0estepReadableId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x12\n\x05\x65rror\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06output\x18\x05 \x01(\tH\x01\x88\x01\x01\x42\x08\n\x06_errorB\t\n\x07_output\"W\n\rOverridesData\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\x16\n\x0e\x63\x61llerFilename\x18\x04 \x01(\t\"\x17\n\x15OverridesDataResponse\"U\n\x10HeartbeatRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12/\n\x0bheartbeatAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x13\n\x11HeartbeatResponse\"F\n\x15RefreshTimeoutRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x1a\n\x12incrementTimeoutBy\x18\x02 \x01(\t\"G\n\x16RefreshTimeoutResponse\x12-\n\ttimeoutAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\'\n\x12ReleaseSlotRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\"\x15\n\x13ReleaseSlotResponse*N\n\nActionType\x12\x12\n\x0eSTART_STEP_RUN\x10\x00\x12\x13\n\x0f\x43\x41NCEL_STEP_RUN\x10\x01\x12\x17\n\x13START_GET_GROUP_KEY\x10\x02*\xa2\x01\n\x17GroupKeyActionEventType\x12 \n\x1cGROUP_KEY_EVENT_TYPE_UNKNOWN\x10\x00\x12 \n\x1cGROUP_KEY_EVENT_TYPE_STARTED\x10\x01\x12\"\n\x1eGROUP_KEY_EVENT_TYPE_COMPLETED\x10\x02\x12\x1f\n\x1bGROUP_KEY_EVENT_TYPE_FAILED\x10\x03*\x8a\x01\n\x13StepActionEventType\x12\x1b\n\x17STEP_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1b\n\x17STEP_EVENT_TYPE_STARTED\x10\x01\x12\x1d\n\x19STEP_EVENT_TYPE_COMPLETED\x10\x02\x12\x1a\n\x16STEP_EVENT_TYPE_FAILED\x10\x03*e\n\x0cResourceType\x12\x19\n\x15RESOURCE_TYPE_UNKNOWN\x10\x00\x12\x1a\n\x16RESOURCE_TYPE_STEP_RUN\x10\x01\x12\x1e\n\x1aRESOURCE_TYPE_WORKFLOW_RUN\x10\x02*\xfe\x01\n\x11ResourceEventType\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_STARTED\x10\x01\x12!\n\x1dRESOURCE_EVENT_TYPE_COMPLETED\x10\x02\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_FAILED\x10\x03\x12!\n\x1dRESOURCE_EVENT_TYPE_CANCELLED\x10\x04\x12!\n\x1dRESOURCE_EVENT_TYPE_TIMED_OUT\x10\x05\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_STREAM\x10\x06*<\n\x14WorkflowRunEventType\x12$\n WORKFLOW_RUN_EVENT_TYPE_FINISHED\x10\x00\x32\xf8\x06\n\nDispatcher\x12=\n\x08Register\x12\x16.WorkerRegisterRequest\x1a\x17.WorkerRegisterResponse\"\x00\x12\x33\n\x06Listen\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x35\n\x08ListenV2\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x34\n\tHeartbeat\x12\x11.HeartbeatRequest\x1a\x12.HeartbeatResponse\"\x00\x12R\n\x19SubscribeToWorkflowEvents\x12!.SubscribeToWorkflowEventsRequest\x1a\x0e.WorkflowEvent\"\x00\x30\x01\x12S\n\x17SubscribeToWorkflowRuns\x12\x1f.SubscribeToWorkflowRunsRequest\x1a\x11.WorkflowRunEvent\"\x00(\x01\x30\x01\x12?\n\x13SendStepActionEvent\x12\x10.StepActionEvent\x1a\x14.ActionEventResponse\"\x00\x12G\n\x17SendGroupKeyActionEvent\x12\x14.GroupKeyActionEvent\x1a\x14.ActionEventResponse\"\x00\x12<\n\x10PutOverridesData\x12\x0e.OverridesData\x1a\x16.OverridesDataResponse\"\x00\x12\x46\n\x0bUnsubscribe\x12\x19.WorkerUnsubscribeRequest\x1a\x1a.WorkerUnsubscribeResponse\"\x00\x12\x43\n\x0eRefreshTimeout\x12\x16.RefreshTimeoutRequest\x1a\x17.RefreshTimeoutResponse\"\x00\x12:\n\x0bReleaseSlot\x12\x13.ReleaseSlotRequest\x1a\x14.ReleaseSlotResponse\"\x00\x12O\n\x12UpsertWorkerLabels\x12\x1a.UpsertWorkerLabelsRequest\x1a\x1b.UpsertWorkerLabelsResponse\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'dispatcher_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'ZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contracts' + _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._options = None + _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._serialized_options = b'8\001' + _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._options = None + _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._serialized_options = b'8\001' + _globals['_ACTIONTYPE']._serialized_start=2813 + _globals['_ACTIONTYPE']._serialized_end=2891 + _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_start=2894 + _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_end=3056 + _globals['_STEPACTIONEVENTTYPE']._serialized_start=3059 + _globals['_STEPACTIONEVENTTYPE']._serialized_end=3197 + _globals['_RESOURCETYPE']._serialized_start=3199 + _globals['_RESOURCETYPE']._serialized_end=3300 + _globals['_RESOURCEEVENTTYPE']._serialized_start=3303 + _globals['_RESOURCEEVENTTYPE']._serialized_end=3557 + _globals['_WORKFLOWRUNEVENTTYPE']._serialized_start=3559 + _globals['_WORKFLOWRUNEVENTTYPE']._serialized_end=3619 + _globals['_WORKERLABELS']._serialized_start=53 + _globals['_WORKERLABELS']._serialized_end=139 + _globals['_WORKERREGISTERREQUEST']._serialized_start=142 + _globals['_WORKERREGISTERREQUEST']._serialized_end=368 + _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._serialized_start=296 + _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._serialized_end=356 + _globals['_WORKERREGISTERRESPONSE']._serialized_start=370 + _globals['_WORKERREGISTERRESPONSE']._serialized_end=450 + _globals['_UPSERTWORKERLABELSREQUEST']._serialized_start=453 + _globals['_UPSERTWORKERLABELSREQUEST']._serialized_end=616 + _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._serialized_start=296 + _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._serialized_end=356 + _globals['_UPSERTWORKERLABELSRESPONSE']._serialized_start=618 + _globals['_UPSERTWORKERLABELSRESPONSE']._serialized_end=682 + _globals['_ASSIGNEDACTION']._serialized_start=685 + _globals['_ASSIGNEDACTION']._serialized_end=965 + _globals['_WORKERLISTENREQUEST']._serialized_start=967 + _globals['_WORKERLISTENREQUEST']._serialized_end=1006 + _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_start=1008 + _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_end=1052 + _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_start=1054 + _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_end=1117 + _globals['_GROUPKEYACTIONEVENT']._serialized_start=1120 + _globals['_GROUPKEYACTIONEVENT']._serialized_end=1345 + _globals['_STEPACTIONEVENT']._serialized_start=1348 + _globals['_STEPACTIONEVENT']._serialized_end=1584 + _globals['_ACTIONEVENTRESPONSE']._serialized_start=1586 + _globals['_ACTIONEVENTRESPONSE']._serialized_end=1643 + _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_start=1645 + _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_end=1702 + _globals['_SUBSCRIBETOWORKFLOWRUNSREQUEST']._serialized_start=1704 + _globals['_SUBSCRIBETOWORKFLOWRUNSREQUEST']._serialized_end=1759 + _globals['_WORKFLOWEVENT']._serialized_start=1762 + _globals['_WORKFLOWEVENT']._serialized_end=2068 + _globals['_WORKFLOWRUNEVENT']._serialized_start=2071 + _globals['_WORKFLOWRUNEVENT']._serialized_end=2239 + _globals['_STEPRUNRESULT']._serialized_start=2242 + _globals['_STEPRUNRESULT']._serialized_end=2380 + _globals['_OVERRIDESDATA']._serialized_start=2382 + _globals['_OVERRIDESDATA']._serialized_end=2469 + _globals['_OVERRIDESDATARESPONSE']._serialized_start=2471 + _globals['_OVERRIDESDATARESPONSE']._serialized_end=2494 + _globals['_HEARTBEATREQUEST']._serialized_start=2496 + _globals['_HEARTBEATREQUEST']._serialized_end=2581 + _globals['_HEARTBEATRESPONSE']._serialized_start=2583 + _globals['_HEARTBEATRESPONSE']._serialized_end=2602 + _globals['_REFRESHTIMEOUTREQUEST']._serialized_start=2604 + _globals['_REFRESHTIMEOUTREQUEST']._serialized_end=2674 + _globals['_REFRESHTIMEOUTRESPONSE']._serialized_start=2676 + _globals['_REFRESHTIMEOUTRESPONSE']._serialized_end=2747 + _globals['_RELEASESLOTREQUEST']._serialized_start=2749 + _globals['_RELEASESLOTREQUEST']._serialized_end=2788 + _globals['_RELEASESLOTRESPONSE']._serialized_start=2790 + _globals['_RELEASESLOTRESPONSE']._serialized_end=2811 + _globals['_DISPATCHER']._serialized_start=3622 + _globals['_DISPATCHER']._serialized_end=4510 +# @@protoc_insertion_point(module_scope) diff --git a/hatchet_sdk/dispatcher_pb2.pyi b/hatchet_sdk/contracts/dispatcher_pb2.pyi similarity index 88% rename from hatchet_sdk/dispatcher_pb2.pyi rename to hatchet_sdk/contracts/dispatcher_pb2.pyi index b6e81754..18f26a29 100644 --- a/hatchet_sdk/dispatcher_pb2.pyi +++ b/hatchet_sdk/contracts/dispatcher_pb2.pyi @@ -69,17 +69,34 @@ RESOURCE_EVENT_TYPE_TIMED_OUT: ResourceEventType RESOURCE_EVENT_TYPE_STREAM: ResourceEventType WORKFLOW_RUN_EVENT_TYPE_FINISHED: WorkflowRunEventType +class WorkerLabels(_message.Message): + __slots__ = ("strValue", "intValue") + STRVALUE_FIELD_NUMBER: _ClassVar[int] + INTVALUE_FIELD_NUMBER: _ClassVar[int] + strValue: str + intValue: int + def __init__(self, strValue: _Optional[str] = ..., intValue: _Optional[int] = ...) -> None: ... + class WorkerRegisterRequest(_message.Message): - __slots__ = ("workerName", "actions", "services", "maxRuns") + __slots__ = ("workerName", "actions", "services", "maxRuns", "labels") + class LabelsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: WorkerLabels + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[WorkerLabels, _Mapping]] = ...) -> None: ... WORKERNAME_FIELD_NUMBER: _ClassVar[int] ACTIONS_FIELD_NUMBER: _ClassVar[int] SERVICES_FIELD_NUMBER: _ClassVar[int] MAXRUNS_FIELD_NUMBER: _ClassVar[int] + LABELS_FIELD_NUMBER: _ClassVar[int] workerName: str actions: _containers.RepeatedScalarFieldContainer[str] services: _containers.RepeatedScalarFieldContainer[str] maxRuns: int - def __init__(self, workerName: _Optional[str] = ..., actions: _Optional[_Iterable[str]] = ..., services: _Optional[_Iterable[str]] = ..., maxRuns: _Optional[int] = ...) -> None: ... + labels: _containers.MessageMap[str, WorkerLabels] + def __init__(self, workerName: _Optional[str] = ..., actions: _Optional[_Iterable[str]] = ..., services: _Optional[_Iterable[str]] = ..., maxRuns: _Optional[int] = ..., labels: _Optional[_Mapping[str, WorkerLabels]] = ...) -> None: ... class WorkerRegisterResponse(_message.Message): __slots__ = ("tenantId", "workerId", "workerName") @@ -91,6 +108,29 @@ class WorkerRegisterResponse(_message.Message): workerName: str def __init__(self, tenantId: _Optional[str] = ..., workerId: _Optional[str] = ..., workerName: _Optional[str] = ...) -> None: ... +class UpsertWorkerLabelsRequest(_message.Message): + __slots__ = ("workerId", "labels") + class LabelsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: WorkerLabels + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[WorkerLabels, _Mapping]] = ...) -> None: ... + WORKERID_FIELD_NUMBER: _ClassVar[int] + LABELS_FIELD_NUMBER: _ClassVar[int] + workerId: str + labels: _containers.MessageMap[str, WorkerLabels] + def __init__(self, workerId: _Optional[str] = ..., labels: _Optional[_Mapping[str, WorkerLabels]] = ...) -> None: ... + +class UpsertWorkerLabelsResponse(_message.Message): + __slots__ = ("tenantId", "workerId") + TENANTID_FIELD_NUMBER: _ClassVar[int] + WORKERID_FIELD_NUMBER: _ClassVar[int] + tenantId: str + workerId: str + def __init__(self, tenantId: _Optional[str] = ..., workerId: _Optional[str] = ...) -> None: ... + class AssignedAction(_message.Message): __slots__ = ("tenantId", "workflowRunId", "getGroupKeyRunId", "jobId", "jobName", "jobRunId", "stepId", "stepRunId", "actionId", "actionType", "actionPayload", "stepName", "retryCount") TENANTID_FIELD_NUMBER: _ClassVar[int] diff --git a/hatchet_sdk/dispatcher_pb2_grpc.py b/hatchet_sdk/contracts/dispatcher_pb2_grpc.py similarity index 92% rename from hatchet_sdk/dispatcher_pb2_grpc.py rename to hatchet_sdk/contracts/dispatcher_pb2_grpc.py index 2b6d2630..f10f1693 100644 --- a/hatchet_sdk/dispatcher_pb2_grpc.py +++ b/hatchet_sdk/contracts/dispatcher_pb2_grpc.py @@ -74,6 +74,11 @@ def __init__(self, channel): request_serializer=dispatcher__pb2.ReleaseSlotRequest.SerializeToString, response_deserializer=dispatcher__pb2.ReleaseSlotResponse.FromString, ) + self.UpsertWorkerLabels = channel.unary_unary( + '/Dispatcher/UpsertWorkerLabels', + request_serializer=dispatcher__pb2.UpsertWorkerLabelsRequest.SerializeToString, + response_deserializer=dispatcher__pb2.UpsertWorkerLabelsResponse.FromString, + ) class DispatcherServicer(object): @@ -154,6 +159,12 @@ def ReleaseSlot(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def UpsertWorkerLabels(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def add_DispatcherServicer_to_server(servicer, server): rpc_method_handlers = { @@ -217,6 +228,11 @@ def add_DispatcherServicer_to_server(servicer, server): request_deserializer=dispatcher__pb2.ReleaseSlotRequest.FromString, response_serializer=dispatcher__pb2.ReleaseSlotResponse.SerializeToString, ), + 'UpsertWorkerLabels': grpc.unary_unary_rpc_method_handler( + servicer.UpsertWorkerLabels, + request_deserializer=dispatcher__pb2.UpsertWorkerLabelsRequest.FromString, + response_serializer=dispatcher__pb2.UpsertWorkerLabelsResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( 'Dispatcher', rpc_method_handlers) @@ -430,3 +446,20 @@ def ReleaseSlot(request, dispatcher__pb2.ReleaseSlotResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def UpsertWorkerLabels(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/Dispatcher/UpsertWorkerLabels', + dispatcher__pb2.UpsertWorkerLabelsRequest.SerializeToString, + dispatcher__pb2.UpsertWorkerLabelsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/hatchet_sdk/events_pb2.py b/hatchet_sdk/contracts/events_pb2.py similarity index 100% rename from hatchet_sdk/events_pb2.py rename to hatchet_sdk/contracts/events_pb2.py diff --git a/hatchet_sdk/events_pb2.pyi b/hatchet_sdk/contracts/events_pb2.pyi similarity index 100% rename from hatchet_sdk/events_pb2.pyi rename to hatchet_sdk/contracts/events_pb2.pyi diff --git a/hatchet_sdk/events_pb2_grpc.py b/hatchet_sdk/contracts/events_pb2_grpc.py similarity index 100% rename from hatchet_sdk/events_pb2_grpc.py rename to hatchet_sdk/contracts/events_pb2_grpc.py diff --git a/hatchet_sdk/contracts/workflows_pb2.py b/hatchet_sdk/contracts/workflows_pb2.py new file mode 100644 index 00000000..9c397ec1 --- /dev/null +++ b/hatchet_sdk/contracts/workflows_pb2.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: workflows.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fworkflows.proto\x1a\x1fgoogle/protobuf/timestamp.proto\">\n\x12PutWorkflowRequest\x12(\n\x04opts\x18\x01 \x01(\x0b\x32\x1a.CreateWorkflowVersionOpts\"\xe0\x03\n\x19\x43reateWorkflowVersionOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x16\n\x0e\x65vent_triggers\x18\x04 \x03(\t\x12\x15\n\rcron_triggers\x18\x05 \x03(\t\x12\x36\n\x12scheduled_triggers\x18\x06 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12$\n\x04jobs\x18\x07 \x03(\x0b\x32\x16.CreateWorkflowJobOpts\x12-\n\x0b\x63oncurrency\x18\x08 \x01(\x0b\x32\x18.WorkflowConcurrencyOpts\x12\x1d\n\x10schedule_timeout\x18\t \x01(\tH\x00\x88\x01\x01\x12\x17\n\ncron_input\x18\n \x01(\tH\x01\x88\x01\x01\x12\x33\n\x0eon_failure_job\x18\x0b \x01(\x0b\x32\x16.CreateWorkflowJobOptsH\x02\x88\x01\x01\x12$\n\x06sticky\x18\x0c \x01(\x0e\x32\x0f.StickyStrategyH\x03\x88\x01\x01\x42\x13\n\x11_schedule_timeoutB\r\n\x0b_cron_inputB\x11\n\x0f_on_failure_jobB\t\n\x07_sticky\"n\n\x17WorkflowConcurrencyOpts\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12\x10\n\x08max_runs\x18\x02 \x01(\x05\x12\x31\n\x0elimit_strategy\x18\x03 \x01(\x0e\x32\x19.ConcurrencyLimitStrategy\"h\n\x15\x43reateWorkflowJobOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12&\n\x05steps\x18\x04 \x03(\x0b\x32\x17.CreateWorkflowStepOptsJ\x04\x08\x03\x10\x04\"\xe1\x01\n\x13\x44\x65siredWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x15\n\x08required\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12/\n\ncomparator\x18\x04 \x01(\x0e\x32\x16.WorkerLabelComparatorH\x03\x88\x01\x01\x12\x13\n\x06weight\x18\x05 \x01(\x05H\x04\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValueB\x0b\n\t_requiredB\r\n\x0b_comparatorB\t\n\x07_weight\"\xcb\x02\n\x16\x43reateWorkflowStepOpts\x12\x13\n\x0breadable_id\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\t\x12\x0e\n\x06inputs\x18\x04 \x01(\t\x12\x0f\n\x07parents\x18\x05 \x03(\t\x12\x11\n\tuser_data\x18\x06 \x01(\t\x12\x0f\n\x07retries\x18\x07 \x01(\x05\x12)\n\x0brate_limits\x18\x08 \x03(\x0b\x32\x14.CreateStepRateLimit\x12@\n\rworker_labels\x18\t \x03(\x0b\x32).CreateWorkflowStepOpts.WorkerLabelsEntry\x1aI\n\x11WorkerLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.DesiredWorkerLabels:\x02\x38\x01\"1\n\x13\x43reateStepRateLimit\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05units\x18\x02 \x01(\x05\"\x16\n\x14ListWorkflowsRequest\"\x93\x02\n\x17ScheduleWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tschedules\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05input\x18\x03 \x01(\t\x12\x16\n\tparent_id\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x06 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x07 \x01(\tH\x03\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_key\"\xb2\x01\n\x0fWorkflowVersion\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07version\x18\x05 \x01(\t\x12\r\n\x05order\x18\x06 \x01(\x05\x12\x13\n\x0bworkflow_id\x18\x07 \x01(\t\"?\n\x17WorkflowTriggerEventRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x11\n\tevent_key\x18\x02 \x01(\t\"9\n\x16WorkflowTriggerCronRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x0c\n\x04\x63ron\x18\x02 \x01(\t\"\xd3\x02\n\x16TriggerWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x02 \x01(\t\x12\x16\n\tparent_id\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x04 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x05 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x06 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x07 \x01(\tH\x04\x88\x01\x01\x12\x1e\n\x11\x64\x65sired_worker_id\x18\x08 \x01(\tH\x05\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadataB\x14\n\x12_desired_worker_id\"2\n\x17TriggerWorkflowResponse\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"W\n\x13PutRateLimitRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05limit\x18\x02 \x01(\x05\x12$\n\x08\x64uration\x18\x03 \x01(\x0e\x32\x12.RateLimitDuration\"\x16\n\x14PutRateLimitResponse*$\n\x0eStickyStrategy\x12\x08\n\x04SOFT\x10\x00\x12\x08\n\x04HARD\x10\x01*l\n\x18\x43oncurrencyLimitStrategy\x12\x16\n\x12\x43\x41NCEL_IN_PROGRESS\x10\x00\x12\x0f\n\x0b\x44ROP_NEWEST\x10\x01\x12\x10\n\x0cQUEUE_NEWEST\x10\x02\x12\x15\n\x11GROUP_ROUND_ROBIN\x10\x03*\x85\x01\n\x15WorkerLabelComparator\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x10\n\x0cGREATER_THAN\x10\x02\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x05*]\n\x11RateLimitDuration\x12\n\n\x06SECOND\x10\x00\x12\n\n\x06MINUTE\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\x07\n\x03\x44\x41Y\x10\x03\x12\x08\n\x04WEEK\x10\x04\x12\t\n\x05MONTH\x10\x05\x12\x08\n\x04YEAR\x10\x06\x32\x8a\x02\n\x0fWorkflowService\x12\x34\n\x0bPutWorkflow\x12\x13.PutWorkflowRequest\x1a\x10.WorkflowVersion\x12>\n\x10ScheduleWorkflow\x12\x18.ScheduleWorkflowRequest\x1a\x10.WorkflowVersion\x12\x44\n\x0fTriggerWorkflow\x12\x17.TriggerWorkflowRequest\x1a\x18.TriggerWorkflowResponse\x12;\n\x0cPutRateLimit\x12\x14.PutRateLimitRequest\x1a\x15.PutRateLimitResponseBBZ@github.com/hatchet-dev/hatchet/internal/services/admin/contractsb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'workflows_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'Z@github.com/hatchet-dev/hatchet/internal/services/admin/contracts' + _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._options = None + _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_options = b'8\001' + _globals['_STICKYSTRATEGY']._serialized_start=2544 + _globals['_STICKYSTRATEGY']._serialized_end=2580 + _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_start=2582 + _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_end=2690 + _globals['_WORKERLABELCOMPARATOR']._serialized_start=2693 + _globals['_WORKERLABELCOMPARATOR']._serialized_end=2826 + _globals['_RATELIMITDURATION']._serialized_start=2828 + _globals['_RATELIMITDURATION']._serialized_end=2921 + _globals['_PUTWORKFLOWREQUEST']._serialized_start=52 + _globals['_PUTWORKFLOWREQUEST']._serialized_end=114 + _globals['_CREATEWORKFLOWVERSIONOPTS']._serialized_start=117 + _globals['_CREATEWORKFLOWVERSIONOPTS']._serialized_end=597 + _globals['_WORKFLOWCONCURRENCYOPTS']._serialized_start=599 + _globals['_WORKFLOWCONCURRENCYOPTS']._serialized_end=709 + _globals['_CREATEWORKFLOWJOBOPTS']._serialized_start=711 + _globals['_CREATEWORKFLOWJOBOPTS']._serialized_end=815 + _globals['_DESIREDWORKERLABELS']._serialized_start=818 + _globals['_DESIREDWORKERLABELS']._serialized_end=1043 + _globals['_CREATEWORKFLOWSTEPOPTS']._serialized_start=1046 + _globals['_CREATEWORKFLOWSTEPOPTS']._serialized_end=1377 + _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_start=1304 + _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_end=1377 + _globals['_CREATESTEPRATELIMIT']._serialized_start=1379 + _globals['_CREATESTEPRATELIMIT']._serialized_end=1428 + _globals['_LISTWORKFLOWSREQUEST']._serialized_start=1430 + _globals['_LISTWORKFLOWSREQUEST']._serialized_end=1452 + _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_start=1455 + _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_end=1730 + _globals['_WORKFLOWVERSION']._serialized_start=1733 + _globals['_WORKFLOWVERSION']._serialized_end=1911 + _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_start=1913 + _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_end=1976 + _globals['_WORKFLOWTRIGGERCRONREF']._serialized_start=1978 + _globals['_WORKFLOWTRIGGERCRONREF']._serialized_end=2035 + _globals['_TRIGGERWORKFLOWREQUEST']._serialized_start=2038 + _globals['_TRIGGERWORKFLOWREQUEST']._serialized_end=2377 + _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_start=2379 + _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_end=2429 + _globals['_PUTRATELIMITREQUEST']._serialized_start=2431 + _globals['_PUTRATELIMITREQUEST']._serialized_end=2518 + _globals['_PUTRATELIMITRESPONSE']._serialized_start=2520 + _globals['_PUTRATELIMITRESPONSE']._serialized_end=2542 + _globals['_WORKFLOWSERVICE']._serialized_start=2924 + _globals['_WORKFLOWSERVICE']._serialized_end=3190 +# @@protoc_insertion_point(module_scope) diff --git a/hatchet_sdk/workflows_pb2.pyi b/hatchet_sdk/contracts/workflows_pb2.pyi similarity index 79% rename from hatchet_sdk/workflows_pb2.pyi rename to hatchet_sdk/contracts/workflows_pb2.pyi index 38eb8a6e..3bd3a45b 100644 --- a/hatchet_sdk/workflows_pb2.pyi +++ b/hatchet_sdk/contracts/workflows_pb2.pyi @@ -7,6 +7,11 @@ from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Map DESCRIPTOR: _descriptor.FileDescriptor +class StickyStrategy(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + SOFT: _ClassVar[StickyStrategy] + HARD: _ClassVar[StickyStrategy] + class ConcurrencyLimitStrategy(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): __slots__ = () CANCEL_IN_PROGRESS: _ClassVar[ConcurrencyLimitStrategy] @@ -14,6 +19,15 @@ class ConcurrencyLimitStrategy(int, metaclass=_enum_type_wrapper.EnumTypeWrapper QUEUE_NEWEST: _ClassVar[ConcurrencyLimitStrategy] GROUP_ROUND_ROBIN: _ClassVar[ConcurrencyLimitStrategy] +class WorkerLabelComparator(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + EQUAL: _ClassVar[WorkerLabelComparator] + NOT_EQUAL: _ClassVar[WorkerLabelComparator] + GREATER_THAN: _ClassVar[WorkerLabelComparator] + GREATER_THAN_OR_EQUAL: _ClassVar[WorkerLabelComparator] + LESS_THAN: _ClassVar[WorkerLabelComparator] + LESS_THAN_OR_EQUAL: _ClassVar[WorkerLabelComparator] + class RateLimitDuration(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): __slots__ = () SECOND: _ClassVar[RateLimitDuration] @@ -23,10 +37,18 @@ class RateLimitDuration(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): WEEK: _ClassVar[RateLimitDuration] MONTH: _ClassVar[RateLimitDuration] YEAR: _ClassVar[RateLimitDuration] +SOFT: StickyStrategy +HARD: StickyStrategy CANCEL_IN_PROGRESS: ConcurrencyLimitStrategy DROP_NEWEST: ConcurrencyLimitStrategy QUEUE_NEWEST: ConcurrencyLimitStrategy GROUP_ROUND_ROBIN: ConcurrencyLimitStrategy +EQUAL: WorkerLabelComparator +NOT_EQUAL: WorkerLabelComparator +GREATER_THAN: WorkerLabelComparator +GREATER_THAN_OR_EQUAL: WorkerLabelComparator +LESS_THAN: WorkerLabelComparator +LESS_THAN_OR_EQUAL: WorkerLabelComparator SECOND: RateLimitDuration MINUTE: RateLimitDuration HOUR: RateLimitDuration @@ -42,7 +64,7 @@ class PutWorkflowRequest(_message.Message): def __init__(self, opts: _Optional[_Union[CreateWorkflowVersionOpts, _Mapping]] = ...) -> None: ... class CreateWorkflowVersionOpts(_message.Message): - __slots__ = ("name", "description", "version", "event_triggers", "cron_triggers", "scheduled_triggers", "jobs", "concurrency", "schedule_timeout", "cron_input", "on_failure_job") + __slots__ = ("name", "description", "version", "event_triggers", "cron_triggers", "scheduled_triggers", "jobs", "concurrency", "schedule_timeout", "cron_input", "on_failure_job", "sticky") NAME_FIELD_NUMBER: _ClassVar[int] DESCRIPTION_FIELD_NUMBER: _ClassVar[int] VERSION_FIELD_NUMBER: _ClassVar[int] @@ -54,6 +76,7 @@ class CreateWorkflowVersionOpts(_message.Message): SCHEDULE_TIMEOUT_FIELD_NUMBER: _ClassVar[int] CRON_INPUT_FIELD_NUMBER: _ClassVar[int] ON_FAILURE_JOB_FIELD_NUMBER: _ClassVar[int] + STICKY_FIELD_NUMBER: _ClassVar[int] name: str description: str version: str @@ -65,7 +88,8 @@ class CreateWorkflowVersionOpts(_message.Message): schedule_timeout: str cron_input: str on_failure_job: CreateWorkflowJobOpts - def __init__(self, name: _Optional[str] = ..., description: _Optional[str] = ..., version: _Optional[str] = ..., event_triggers: _Optional[_Iterable[str]] = ..., cron_triggers: _Optional[_Iterable[str]] = ..., scheduled_triggers: _Optional[_Iterable[_Union[_timestamp_pb2.Timestamp, _Mapping]]] = ..., jobs: _Optional[_Iterable[_Union[CreateWorkflowJobOpts, _Mapping]]] = ..., concurrency: _Optional[_Union[WorkflowConcurrencyOpts, _Mapping]] = ..., schedule_timeout: _Optional[str] = ..., cron_input: _Optional[str] = ..., on_failure_job: _Optional[_Union[CreateWorkflowJobOpts, _Mapping]] = ...) -> None: ... + sticky: StickyStrategy + def __init__(self, name: _Optional[str] = ..., description: _Optional[str] = ..., version: _Optional[str] = ..., event_triggers: _Optional[_Iterable[str]] = ..., cron_triggers: _Optional[_Iterable[str]] = ..., scheduled_triggers: _Optional[_Iterable[_Union[_timestamp_pb2.Timestamp, _Mapping]]] = ..., jobs: _Optional[_Iterable[_Union[CreateWorkflowJobOpts, _Mapping]]] = ..., concurrency: _Optional[_Union[WorkflowConcurrencyOpts, _Mapping]] = ..., schedule_timeout: _Optional[str] = ..., cron_input: _Optional[str] = ..., on_failure_job: _Optional[_Union[CreateWorkflowJobOpts, _Mapping]] = ..., sticky: _Optional[_Union[StickyStrategy, str]] = ...) -> None: ... class WorkflowConcurrencyOpts(_message.Message): __slots__ = ("action", "max_runs", "limit_strategy") @@ -87,8 +111,29 @@ class CreateWorkflowJobOpts(_message.Message): steps: _containers.RepeatedCompositeFieldContainer[CreateWorkflowStepOpts] def __init__(self, name: _Optional[str] = ..., description: _Optional[str] = ..., steps: _Optional[_Iterable[_Union[CreateWorkflowStepOpts, _Mapping]]] = ...) -> None: ... +class DesiredWorkerLabels(_message.Message): + __slots__ = ("strValue", "intValue", "required", "comparator", "weight") + STRVALUE_FIELD_NUMBER: _ClassVar[int] + INTVALUE_FIELD_NUMBER: _ClassVar[int] + REQUIRED_FIELD_NUMBER: _ClassVar[int] + COMPARATOR_FIELD_NUMBER: _ClassVar[int] + WEIGHT_FIELD_NUMBER: _ClassVar[int] + strValue: str + intValue: int + required: bool + comparator: WorkerLabelComparator + weight: int + def __init__(self, strValue: _Optional[str] = ..., intValue: _Optional[int] = ..., required: bool = ..., comparator: _Optional[_Union[WorkerLabelComparator, str]] = ..., weight: _Optional[int] = ...) -> None: ... + class CreateWorkflowStepOpts(_message.Message): - __slots__ = ("readable_id", "action", "timeout", "inputs", "parents", "user_data", "retries", "rate_limits") + __slots__ = ("readable_id", "action", "timeout", "inputs", "parents", "user_data", "retries", "rate_limits", "worker_labels") + class WorkerLabelsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: DesiredWorkerLabels + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[DesiredWorkerLabels, _Mapping]] = ...) -> None: ... READABLE_ID_FIELD_NUMBER: _ClassVar[int] ACTION_FIELD_NUMBER: _ClassVar[int] TIMEOUT_FIELD_NUMBER: _ClassVar[int] @@ -97,6 +142,7 @@ class CreateWorkflowStepOpts(_message.Message): USER_DATA_FIELD_NUMBER: _ClassVar[int] RETRIES_FIELD_NUMBER: _ClassVar[int] RATE_LIMITS_FIELD_NUMBER: _ClassVar[int] + WORKER_LABELS_FIELD_NUMBER: _ClassVar[int] readable_id: str action: str timeout: str @@ -105,7 +151,8 @@ class CreateWorkflowStepOpts(_message.Message): user_data: str retries: int rate_limits: _containers.RepeatedCompositeFieldContainer[CreateStepRateLimit] - def __init__(self, readable_id: _Optional[str] = ..., action: _Optional[str] = ..., timeout: _Optional[str] = ..., inputs: _Optional[str] = ..., parents: _Optional[_Iterable[str]] = ..., user_data: _Optional[str] = ..., retries: _Optional[int] = ..., rate_limits: _Optional[_Iterable[_Union[CreateStepRateLimit, _Mapping]]] = ...) -> None: ... + worker_labels: _containers.MessageMap[str, DesiredWorkerLabels] + def __init__(self, readable_id: _Optional[str] = ..., action: _Optional[str] = ..., timeout: _Optional[str] = ..., inputs: _Optional[str] = ..., parents: _Optional[_Iterable[str]] = ..., user_data: _Optional[str] = ..., retries: _Optional[int] = ..., rate_limits: _Optional[_Iterable[_Union[CreateStepRateLimit, _Mapping]]] = ..., worker_labels: _Optional[_Mapping[str, DesiredWorkerLabels]] = ...) -> None: ... class CreateStepRateLimit(_message.Message): __slots__ = ("key", "units") @@ -170,7 +217,7 @@ class WorkflowTriggerCronRef(_message.Message): def __init__(self, parent_id: _Optional[str] = ..., cron: _Optional[str] = ...) -> None: ... class TriggerWorkflowRequest(_message.Message): - __slots__ = ("name", "input", "parent_id", "parent_step_run_id", "child_index", "child_key", "additional_metadata") + __slots__ = ("name", "input", "parent_id", "parent_step_run_id", "child_index", "child_key", "additional_metadata", "desired_worker_id") NAME_FIELD_NUMBER: _ClassVar[int] INPUT_FIELD_NUMBER: _ClassVar[int] PARENT_ID_FIELD_NUMBER: _ClassVar[int] @@ -178,6 +225,7 @@ class TriggerWorkflowRequest(_message.Message): CHILD_INDEX_FIELD_NUMBER: _ClassVar[int] CHILD_KEY_FIELD_NUMBER: _ClassVar[int] ADDITIONAL_METADATA_FIELD_NUMBER: _ClassVar[int] + DESIRED_WORKER_ID_FIELD_NUMBER: _ClassVar[int] name: str input: str parent_id: str @@ -185,7 +233,8 @@ class TriggerWorkflowRequest(_message.Message): child_index: int child_key: str additional_metadata: str - def __init__(self, name: _Optional[str] = ..., input: _Optional[str] = ..., parent_id: _Optional[str] = ..., parent_step_run_id: _Optional[str] = ..., child_index: _Optional[int] = ..., child_key: _Optional[str] = ..., additional_metadata: _Optional[str] = ...) -> None: ... + desired_worker_id: str + def __init__(self, name: _Optional[str] = ..., input: _Optional[str] = ..., parent_id: _Optional[str] = ..., parent_step_run_id: _Optional[str] = ..., child_index: _Optional[int] = ..., child_key: _Optional[str] = ..., additional_metadata: _Optional[str] = ..., desired_worker_id: _Optional[str] = ...) -> None: ... class TriggerWorkflowResponse(_message.Message): __slots__ = ("workflow_run_id",) diff --git a/hatchet_sdk/workflows_pb2_grpc.py b/hatchet_sdk/contracts/workflows_pb2_grpc.py similarity index 100% rename from hatchet_sdk/workflows_pb2_grpc.py rename to hatchet_sdk/contracts/workflows_pb2_grpc.py diff --git a/hatchet_sdk/dispatcher_pb2.py b/hatchet_sdk/dispatcher_pb2.py deleted file mode 100644 index c7a6f882..00000000 --- a/hatchet_sdk/dispatcher_pb2.py +++ /dev/null @@ -1,84 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: dispatcher.proto -# Protobuf Python Version: 4.25.1 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x64ispatcher.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"p\n\x15WorkerRegisterRequest\x12\x12\n\nworkerName\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63tions\x18\x02 \x03(\t\x12\x10\n\x08services\x18\x03 \x03(\t\x12\x14\n\x07maxRuns\x18\x04 \x01(\x05H\x00\x88\x01\x01\x42\n\n\x08_maxRuns\"P\n\x16WorkerRegisterResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\x12\x12\n\nworkerName\x18\x03 \x01(\t\"\x98\x02\n\x0e\x41ssignedAction\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\r\n\x05jobId\x18\x04 \x01(\t\x12\x0f\n\x07jobName\x18\x05 \x01(\t\x12\x10\n\x08jobRunId\x18\x06 \x01(\t\x12\x0e\n\x06stepId\x18\x07 \x01(\t\x12\x11\n\tstepRunId\x18\x08 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\t \x01(\t\x12\x1f\n\nactionType\x18\n \x01(\x0e\x32\x0b.ActionType\x12\x15\n\ractionPayload\x18\x0b \x01(\t\x12\x10\n\x08stepName\x18\x0c \x01(\t\x12\x12\n\nretryCount\x18\r \x01(\x05\"\'\n\x13WorkerListenRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\",\n\x18WorkerUnsubscribeRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\"?\n\x19WorkerUnsubscribeResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\xe1\x01\n\x13GroupKeyActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\teventType\x18\x06 \x01(\x0e\x32\x18.GroupKeyActionEventType\x12\x14\n\x0c\x65ventPayload\x18\x07 \x01(\t\"\xec\x01\n\x0fStepActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\r\n\x05jobId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x0e\n\x06stepId\x18\x04 \x01(\t\x12\x11\n\tstepRunId\x18\x05 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x06 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\teventType\x18\x08 \x01(\x0e\x32\x14.StepActionEventType\x12\x14\n\x0c\x65ventPayload\x18\t \x01(\t\"9\n\x13\x41\x63tionEventResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"9\n SubscribeToWorkflowEventsRequest\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\"7\n\x1eSubscribeToWorkflowRunsRequest\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\"\xb2\x02\n\rWorkflowEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12#\n\x0cresourceType\x18\x02 \x01(\x0e\x32\r.ResourceType\x12%\n\teventType\x18\x03 \x01(\x0e\x32\x12.ResourceEventType\x12\x12\n\nresourceId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0c\x65ventPayload\x18\x06 \x01(\t\x12\x0e\n\x06hangup\x18\x07 \x01(\x08\x12\x18\n\x0bstepRetries\x18\x08 \x01(\x05H\x00\x88\x01\x01\x12\x17\n\nretryCount\x18\t \x01(\x05H\x01\x88\x01\x01\x42\x0e\n\x0c_stepRetriesB\r\n\x0b_retryCount\"\xa8\x01\n\x10WorkflowRunEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12(\n\teventType\x18\x02 \x01(\x0e\x32\x15.WorkflowRunEventType\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x07results\x18\x04 \x03(\x0b\x32\x0e.StepRunResult\"\x8a\x01\n\rStepRunResult\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x16\n\x0estepReadableId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x12\n\x05\x65rror\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06output\x18\x05 \x01(\tH\x01\x88\x01\x01\x42\x08\n\x06_errorB\t\n\x07_output\"W\n\rOverridesData\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\x16\n\x0e\x63\x61llerFilename\x18\x04 \x01(\t\"\x17\n\x15OverridesDataResponse\"U\n\x10HeartbeatRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12/\n\x0bheartbeatAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x13\n\x11HeartbeatResponse\"F\n\x15RefreshTimeoutRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x1a\n\x12incrementTimeoutBy\x18\x02 \x01(\t\"G\n\x16RefreshTimeoutResponse\x12-\n\ttimeoutAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\'\n\x12ReleaseSlotRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\"\x15\n\x13ReleaseSlotResponse*N\n\nActionType\x12\x12\n\x0eSTART_STEP_RUN\x10\x00\x12\x13\n\x0f\x43\x41NCEL_STEP_RUN\x10\x01\x12\x17\n\x13START_GET_GROUP_KEY\x10\x02*\xa2\x01\n\x17GroupKeyActionEventType\x12 \n\x1cGROUP_KEY_EVENT_TYPE_UNKNOWN\x10\x00\x12 \n\x1cGROUP_KEY_EVENT_TYPE_STARTED\x10\x01\x12\"\n\x1eGROUP_KEY_EVENT_TYPE_COMPLETED\x10\x02\x12\x1f\n\x1bGROUP_KEY_EVENT_TYPE_FAILED\x10\x03*\x8a\x01\n\x13StepActionEventType\x12\x1b\n\x17STEP_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1b\n\x17STEP_EVENT_TYPE_STARTED\x10\x01\x12\x1d\n\x19STEP_EVENT_TYPE_COMPLETED\x10\x02\x12\x1a\n\x16STEP_EVENT_TYPE_FAILED\x10\x03*e\n\x0cResourceType\x12\x19\n\x15RESOURCE_TYPE_UNKNOWN\x10\x00\x12\x1a\n\x16RESOURCE_TYPE_STEP_RUN\x10\x01\x12\x1e\n\x1aRESOURCE_TYPE_WORKFLOW_RUN\x10\x02*\xfe\x01\n\x11ResourceEventType\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_STARTED\x10\x01\x12!\n\x1dRESOURCE_EVENT_TYPE_COMPLETED\x10\x02\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_FAILED\x10\x03\x12!\n\x1dRESOURCE_EVENT_TYPE_CANCELLED\x10\x04\x12!\n\x1dRESOURCE_EVENT_TYPE_TIMED_OUT\x10\x05\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_STREAM\x10\x06*<\n\x14WorkflowRunEventType\x12$\n WORKFLOW_RUN_EVENT_TYPE_FINISHED\x10\x00\x32\xa7\x06\n\nDispatcher\x12=\n\x08Register\x12\x16.WorkerRegisterRequest\x1a\x17.WorkerRegisterResponse\"\x00\x12\x33\n\x06Listen\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x35\n\x08ListenV2\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x34\n\tHeartbeat\x12\x11.HeartbeatRequest\x1a\x12.HeartbeatResponse\"\x00\x12R\n\x19SubscribeToWorkflowEvents\x12!.SubscribeToWorkflowEventsRequest\x1a\x0e.WorkflowEvent\"\x00\x30\x01\x12S\n\x17SubscribeToWorkflowRuns\x12\x1f.SubscribeToWorkflowRunsRequest\x1a\x11.WorkflowRunEvent\"\x00(\x01\x30\x01\x12?\n\x13SendStepActionEvent\x12\x10.StepActionEvent\x1a\x14.ActionEventResponse\"\x00\x12G\n\x17SendGroupKeyActionEvent\x12\x14.GroupKeyActionEvent\x1a\x14.ActionEventResponse\"\x00\x12<\n\x10PutOverridesData\x12\x0e.OverridesData\x1a\x16.OverridesDataResponse\"\x00\x12\x46\n\x0bUnsubscribe\x12\x19.WorkerUnsubscribeRequest\x1a\x1a.WorkerUnsubscribeResponse\"\x00\x12\x43\n\x0eRefreshTimeout\x12\x16.RefreshTimeoutRequest\x1a\x17.RefreshTimeoutResponse\"\x00\x12:\n\x0bReleaseSlot\x12\x13.ReleaseSlotRequest\x1a\x14.ReleaseSlotResponse\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'dispatcher_pb2', _globals) -if _descriptor._USE_C_DESCRIPTORS == False: - _globals['DESCRIPTOR']._options = None - _globals['DESCRIPTOR']._serialized_options = b'ZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contracts' - _globals['_ACTIONTYPE']._serialized_start=2378 - _globals['_ACTIONTYPE']._serialized_end=2456 - _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_start=2459 - _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_end=2621 - _globals['_STEPACTIONEVENTTYPE']._serialized_start=2624 - _globals['_STEPACTIONEVENTTYPE']._serialized_end=2762 - _globals['_RESOURCETYPE']._serialized_start=2764 - _globals['_RESOURCETYPE']._serialized_end=2865 - _globals['_RESOURCEEVENTTYPE']._serialized_start=2868 - _globals['_RESOURCEEVENTTYPE']._serialized_end=3122 - _globals['_WORKFLOWRUNEVENTTYPE']._serialized_start=3124 - _globals['_WORKFLOWRUNEVENTTYPE']._serialized_end=3184 - _globals['_WORKERREGISTERREQUEST']._serialized_start=53 - _globals['_WORKERREGISTERREQUEST']._serialized_end=165 - _globals['_WORKERREGISTERRESPONSE']._serialized_start=167 - _globals['_WORKERREGISTERRESPONSE']._serialized_end=247 - _globals['_ASSIGNEDACTION']._serialized_start=250 - _globals['_ASSIGNEDACTION']._serialized_end=530 - _globals['_WORKERLISTENREQUEST']._serialized_start=532 - _globals['_WORKERLISTENREQUEST']._serialized_end=571 - _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_start=573 - _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_end=617 - _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_start=619 - _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_end=682 - _globals['_GROUPKEYACTIONEVENT']._serialized_start=685 - _globals['_GROUPKEYACTIONEVENT']._serialized_end=910 - _globals['_STEPACTIONEVENT']._serialized_start=913 - _globals['_STEPACTIONEVENT']._serialized_end=1149 - _globals['_ACTIONEVENTRESPONSE']._serialized_start=1151 - _globals['_ACTIONEVENTRESPONSE']._serialized_end=1208 - _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_start=1210 - _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_end=1267 - _globals['_SUBSCRIBETOWORKFLOWRUNSREQUEST']._serialized_start=1269 - _globals['_SUBSCRIBETOWORKFLOWRUNSREQUEST']._serialized_end=1324 - _globals['_WORKFLOWEVENT']._serialized_start=1327 - _globals['_WORKFLOWEVENT']._serialized_end=1633 - _globals['_WORKFLOWRUNEVENT']._serialized_start=1636 - _globals['_WORKFLOWRUNEVENT']._serialized_end=1804 - _globals['_STEPRUNRESULT']._serialized_start=1807 - _globals['_STEPRUNRESULT']._serialized_end=1945 - _globals['_OVERRIDESDATA']._serialized_start=1947 - _globals['_OVERRIDESDATA']._serialized_end=2034 - _globals['_OVERRIDESDATARESPONSE']._serialized_start=2036 - _globals['_OVERRIDESDATARESPONSE']._serialized_end=2059 - _globals['_HEARTBEATREQUEST']._serialized_start=2061 - _globals['_HEARTBEATREQUEST']._serialized_end=2146 - _globals['_HEARTBEATRESPONSE']._serialized_start=2148 - _globals['_HEARTBEATRESPONSE']._serialized_end=2167 - _globals['_REFRESHTIMEOUTREQUEST']._serialized_start=2169 - _globals['_REFRESHTIMEOUTREQUEST']._serialized_end=2239 - _globals['_REFRESHTIMEOUTRESPONSE']._serialized_start=2241 - _globals['_REFRESHTIMEOUTRESPONSE']._serialized_end=2312 - _globals['_RELEASESLOTREQUEST']._serialized_start=2314 - _globals['_RELEASESLOTREQUEST']._serialized_end=2353 - _globals['_RELEASESLOTRESPONSE']._serialized_start=2355 - _globals['_RELEASESLOTRESPONSE']._serialized_end=2376 - _globals['_DISPATCHER']._serialized_start=3187 - _globals['_DISPATCHER']._serialized_end=3994 -# @@protoc_insertion_point(module_scope) diff --git a/hatchet_sdk/hatchet.py b/hatchet_sdk/hatchet.py index dea651b7..365173ab 100644 --- a/hatchet_sdk/hatchet.py +++ b/hatchet_sdk/hatchet.py @@ -1,14 +1,19 @@ import logging from typing import List, Optional +from typing_extensions import deprecated + +from hatchet_sdk.contracts.workflows_pb2 import ( + ConcurrencyLimitStrategy, + CreateStepRateLimit, +) from hatchet_sdk.loader import ClientConfig from hatchet_sdk.rate_limit import RateLimit -from .client import ClientImpl, new_client, new_client_raw +from .client import Client, new_client, new_client_raw from .logger import logger from .worker import Worker from .workflow import WorkflowMeta -from .workflows_pb2 import ConcurrencyLimitStrategy, CreateStepRateLimit def workflow( @@ -103,7 +108,20 @@ def inner(func): class Hatchet: - client: ClientImpl + """ + Main client for interacting with the Hatchet SDK. + + This class provides access to various client interfaces and utility methods + for working with Hatchet workers, workflows, and steps. + + Attributes: + admin (AdminClient): Interface for administrative operations. + dispatcher (DispatcherClient): Interface for dispatching operations. + event (EventClient): Interface for event-related operations. + rest (RestApi): Interface for REST API operations. + """ + + _client: Client @classmethod def from_environment(cls, defaults: ClientConfig = ClientConfig(), **kwargs): @@ -116,17 +134,48 @@ def from_config(cls, config: ClientConfig, **kwargs): def __init__( self, debug: bool = False, - client: Optional[ClientImpl] = None, + client: Optional[Client] = None, config: ClientConfig = ClientConfig(), ): + """ + Initialize a new Hatchet instance. + + Args: + debug (bool, optional): Enable debug logging. Defaults to False. + client (Optional[Client], optional): A pre-configured Client instance. Defaults to None. + config (ClientConfig, optional): Configuration for creating a new Client. Defaults to ClientConfig(). + """ if client is not None: self.client = client else: - self.client = new_client(config) + self._client = new_client(config) if debug: logger.setLevel(logging.DEBUG) + @property + @deprecated( + "Direct access to client is deprecated and will be removed in a future version. Use specific client properties (Hatchet.admin, Hatchet.dispatcher, Hatchet.event, Hatchet.rest) instead. [0.32.0]", + ) + def client(self) -> Client: + return self._client + + @property + def admin(self): + return self._client.admin + + @property + def dispatcher(self): + return self._client.dispatcher + + @property + def event(self): + return self._client.event + + @property + def rest(self): + return self._client.rest + concurrency = staticmethod(concurrency) workflow = staticmethod(workflow) diff --git a/hatchet_sdk/worker.py b/hatchet_sdk/worker/worker.py similarity index 98% rename from hatchet_sdk/worker.py rename to hatchet_sdk/worker/worker.py index 4a72077e..60f858d0 100644 --- a/hatchet_sdk/worker.py +++ b/hatchet_sdk/worker/worker.py @@ -22,20 +22,10 @@ from google.protobuf.timestamp_pb2 import Timestamp from hatchet_sdk.clients.admin import new_admin -from hatchet_sdk.clients.events import EventClientImpl +from hatchet_sdk.clients.events import EventClient from hatchet_sdk.clients.run_event_listener import new_listener from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener -from hatchet_sdk.loader import ClientConfig - -from .client import new_client, new_client_raw -from .clients.dispatcher import ( - Action, - ActionListenerImpl, - GetActionListenerRequest, - new_dispatcher, -) -from .context import Context -from .dispatcher_pb2 import ( +from hatchet_sdk.contracts.dispatcher_pb2 import ( GROUP_KEY_EVENT_TYPE_COMPLETED, GROUP_KEY_EVENT_TYPE_FAILED, GROUP_KEY_EVENT_TYPE_STARTED, @@ -48,8 +38,18 @@ StepActionEvent, StepActionEventType, ) -from .logger import logger -from .workflow import WorkflowMeta +from hatchet_sdk.loader import ClientConfig + +from ..client import new_client, new_client_raw +from ..clients.dispatcher import ( + Action, + ActionListenerImpl, + GetActionListenerRequest, + new_dispatcher, +) +from ..context import Context +from ..logger import logger +from ..workflow import WorkflowMeta wr: contextvars.ContextVar[str | None] = contextvars.ContextVar( "workflow_run_id", default=None @@ -76,7 +76,7 @@ def filter(self, record): # Custom log handler to process log lines class CustomLogHandler(StreamHandler): - def __init__(self, event_client: EventClientImpl, stream=None): + def __init__(self, event_client: EventClient, stream=None): super().__init__(stream) self.logger_thread_pool = ThreadPoolExecutor(max_workers=1) self.event_client = event_client @@ -99,7 +99,7 @@ def emit(self, record): def capture_logs( logger: logging.Logger, - event_client: EventClientImpl, + event_client: EventClient, func: Coroutine[Any, Any, Any], ): @functools.wraps(func) diff --git a/hatchet_sdk/workflow.py b/hatchet_sdk/workflow.py index 290e14da..2a82846e 100644 --- a/hatchet_sdk/workflow.py +++ b/hatchet_sdk/workflow.py @@ -1,7 +1,7 @@ import functools from typing import Any, Callable, List, Tuple -from .workflows_pb2 import ( +from hatchet_sdk.contracts.workflows_pb2 import ( CreateWorkflowJobOpts, CreateWorkflowStepOpts, CreateWorkflowVersionOpts, diff --git a/hatchet_sdk/workflows_pb2.py b/hatchet_sdk/workflows_pb2.py deleted file mode 100644 index 4049443b..00000000 --- a/hatchet_sdk/workflows_pb2.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: workflows.proto -# Protobuf Python Version: 4.25.1 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fworkflows.proto\x1a\x1fgoogle/protobuf/timestamp.proto\">\n\x12PutWorkflowRequest\x12(\n\x04opts\x18\x01 \x01(\x0b\x32\x1a.CreateWorkflowVersionOpts\"\xaf\x03\n\x19\x43reateWorkflowVersionOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x16\n\x0e\x65vent_triggers\x18\x04 \x03(\t\x12\x15\n\rcron_triggers\x18\x05 \x03(\t\x12\x36\n\x12scheduled_triggers\x18\x06 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12$\n\x04jobs\x18\x07 \x03(\x0b\x32\x16.CreateWorkflowJobOpts\x12-\n\x0b\x63oncurrency\x18\x08 \x01(\x0b\x32\x18.WorkflowConcurrencyOpts\x12\x1d\n\x10schedule_timeout\x18\t \x01(\tH\x00\x88\x01\x01\x12\x17\n\ncron_input\x18\n \x01(\tH\x01\x88\x01\x01\x12\x33\n\x0eon_failure_job\x18\x0b \x01(\x0b\x32\x16.CreateWorkflowJobOptsH\x02\x88\x01\x01\x42\x13\n\x11_schedule_timeoutB\r\n\x0b_cron_inputB\x11\n\x0f_on_failure_job\"n\n\x17WorkflowConcurrencyOpts\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12\x10\n\x08max_runs\x18\x02 \x01(\x05\x12\x31\n\x0elimit_strategy\x18\x03 \x01(\x0e\x32\x19.ConcurrencyLimitStrategy\"h\n\x15\x43reateWorkflowJobOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12&\n\x05steps\x18\x04 \x03(\x0b\x32\x17.CreateWorkflowStepOptsJ\x04\x08\x03\x10\x04\"\xbe\x01\n\x16\x43reateWorkflowStepOpts\x12\x13\n\x0breadable_id\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\t\x12\x0e\n\x06inputs\x18\x04 \x01(\t\x12\x0f\n\x07parents\x18\x05 \x03(\t\x12\x11\n\tuser_data\x18\x06 \x01(\t\x12\x0f\n\x07retries\x18\x07 \x01(\x05\x12)\n\x0brate_limits\x18\x08 \x03(\x0b\x32\x14.CreateStepRateLimit\"1\n\x13\x43reateStepRateLimit\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05units\x18\x02 \x01(\x05\"\x16\n\x14ListWorkflowsRequest\"\x93\x02\n\x17ScheduleWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tschedules\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05input\x18\x03 \x01(\t\x12\x16\n\tparent_id\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x06 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x07 \x01(\tH\x03\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_key\"\xb2\x01\n\x0fWorkflowVersion\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07version\x18\x05 \x01(\t\x12\r\n\x05order\x18\x06 \x01(\x05\x12\x13\n\x0bworkflow_id\x18\x07 \x01(\t\"?\n\x17WorkflowTriggerEventRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x11\n\tevent_key\x18\x02 \x01(\t\"9\n\x16WorkflowTriggerCronRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x0c\n\x04\x63ron\x18\x02 \x01(\t\"\x9d\x02\n\x16TriggerWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x02 \x01(\t\x12\x16\n\tparent_id\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x04 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x05 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x06 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x07 \x01(\tH\x04\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadata\"2\n\x17TriggerWorkflowResponse\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"W\n\x13PutRateLimitRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05limit\x18\x02 \x01(\x05\x12$\n\x08\x64uration\x18\x03 \x01(\x0e\x32\x12.RateLimitDuration\"\x16\n\x14PutRateLimitResponse*l\n\x18\x43oncurrencyLimitStrategy\x12\x16\n\x12\x43\x41NCEL_IN_PROGRESS\x10\x00\x12\x0f\n\x0b\x44ROP_NEWEST\x10\x01\x12\x10\n\x0cQUEUE_NEWEST\x10\x02\x12\x15\n\x11GROUP_ROUND_ROBIN\x10\x03*]\n\x11RateLimitDuration\x12\n\n\x06SECOND\x10\x00\x12\n\n\x06MINUTE\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\x07\n\x03\x44\x41Y\x10\x03\x12\x08\n\x04WEEK\x10\x04\x12\t\n\x05MONTH\x10\x05\x12\x08\n\x04YEAR\x10\x06\x32\x8a\x02\n\x0fWorkflowService\x12\x34\n\x0bPutWorkflow\x12\x13.PutWorkflowRequest\x1a\x10.WorkflowVersion\x12>\n\x10ScheduleWorkflow\x12\x18.ScheduleWorkflowRequest\x1a\x10.WorkflowVersion\x12\x44\n\x0fTriggerWorkflow\x12\x17.TriggerWorkflowRequest\x1a\x18.TriggerWorkflowResponse\x12;\n\x0cPutRateLimit\x12\x14.PutRateLimitRequest\x1a\x15.PutRateLimitResponseBBZ@github.com/hatchet-dev/hatchet/internal/services/admin/contractsb\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'workflows_pb2', _globals) -if _descriptor._USE_C_DESCRIPTORS == False: - _globals['DESCRIPTOR']._options = None - _globals['DESCRIPTOR']._serialized_options = b'Z@github.com/hatchet-dev/hatchet/internal/services/admin/contracts' - _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_start=2072 - _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_end=2180 - _globals['_RATELIMITDURATION']._serialized_start=2182 - _globals['_RATELIMITDURATION']._serialized_end=2275 - _globals['_PUTWORKFLOWREQUEST']._serialized_start=52 - _globals['_PUTWORKFLOWREQUEST']._serialized_end=114 - _globals['_CREATEWORKFLOWVERSIONOPTS']._serialized_start=117 - _globals['_CREATEWORKFLOWVERSIONOPTS']._serialized_end=548 - _globals['_WORKFLOWCONCURRENCYOPTS']._serialized_start=550 - _globals['_WORKFLOWCONCURRENCYOPTS']._serialized_end=660 - _globals['_CREATEWORKFLOWJOBOPTS']._serialized_start=662 - _globals['_CREATEWORKFLOWJOBOPTS']._serialized_end=766 - _globals['_CREATEWORKFLOWSTEPOPTS']._serialized_start=769 - _globals['_CREATEWORKFLOWSTEPOPTS']._serialized_end=959 - _globals['_CREATESTEPRATELIMIT']._serialized_start=961 - _globals['_CREATESTEPRATELIMIT']._serialized_end=1010 - _globals['_LISTWORKFLOWSREQUEST']._serialized_start=1012 - _globals['_LISTWORKFLOWSREQUEST']._serialized_end=1034 - _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_start=1037 - _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_end=1312 - _globals['_WORKFLOWVERSION']._serialized_start=1315 - _globals['_WORKFLOWVERSION']._serialized_end=1493 - _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_start=1495 - _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_end=1558 - _globals['_WORKFLOWTRIGGERCRONREF']._serialized_start=1560 - _globals['_WORKFLOWTRIGGERCRONREF']._serialized_end=1617 - _globals['_TRIGGERWORKFLOWREQUEST']._serialized_start=1620 - _globals['_TRIGGERWORKFLOWREQUEST']._serialized_end=1905 - _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_start=1907 - _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_end=1957 - _globals['_PUTRATELIMITREQUEST']._serialized_start=1959 - _globals['_PUTRATELIMITREQUEST']._serialized_end=2046 - _globals['_PUTRATELIMITRESPONSE']._serialized_start=2048 - _globals['_PUTRATELIMITRESPONSE']._serialized_end=2070 - _globals['_WORKFLOWSERVICE']._serialized_start=2278 - _globals['_WORKFLOWSERVICE']._serialized_end=2544 -# @@protoc_insertion_point(module_scope) diff --git a/pyproject.toml b/pyproject.toml index 5b3b7328..272eba62 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,3 +40,21 @@ known_third_party = [ "pyyaml", "urllib3" ] + +[tool.poetry.scripts] +api = "examples.api.api:main" +async = "examples.async.worker:main" +fanout = "examples.fanout.worker:main" +cancellation = "examples.cancellation.worker:main" +concurrency_limit = "examples.concurrency-limit.worker:main" +concurrency_limit_rr = "examples.concurrency-limit-rr.worker:main" +dag = "examples.dag.worker:main" +delayed = "examples.delayed.worker:main" +events = "examples.events.worker:main" +logger = "examples.logger.worker:main" +manual_trigger = "examples.manual_trigger.worker:main" +on_failure = "examples.on_failure.worker:main" +programatic_replay = "examples.programatic_replay.worker:main" +rate_limit = "examples.rate_limit.worker:main" +simple = "examples.simple.worker:main" +timeout = "examples.timeout.worker:main" diff --git a/tests/test_client.py b/tests/test_client.py index 2bd0fb87..9aaf0b26 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -12,20 +12,3 @@ def hatchet(): def test_client(hatchet): assert hatchet - - -# requires scope module or higher for shared event loop -@pytest.mark.asyncio(scope="session") -async def test_listen(hatchet): - run = hatchet.client.admin.get_workflow_run("839e089c-6708-4708-ae79-bad47c832580") - result = await run.result() - print(result) - assert result - - -@pytest.mark.asyncio(scope="session") -async def test_listen2(hatchet): - run = hatchet.client.admin.get_workflow_run("839e089c-6708-4708-ae79-bad47c832580") - result = await run.result() - print(result) - assert result diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py new file mode 100644 index 00000000..220e788f --- /dev/null +++ b/tests/utils/__init__.py @@ -0,0 +1,2 @@ +from .bg_worker import fixture_bg_worker +from .hatchet_client import hatchet_client_fixture diff --git a/tests/utils/bg_worker.py b/tests/utils/bg_worker.py new file mode 100644 index 00000000..f99372eb --- /dev/null +++ b/tests/utils/bg_worker.py @@ -0,0 +1,20 @@ +import subprocess +import time + +import pytest + + +def fixture_bg_worker(command, startup_time=5): + @pytest.fixture(scope="session", autouse=True) + def fixture_background_hatchet_worker(): + proc = subprocess.Popen(command) + + # sleep long enough to make sure we are up and running + time.sleep(startup_time) + + yield + + proc.terminate() + proc.wait() + + return fixture_background_hatchet_worker diff --git a/tests/utils/hatchet_client.py b/tests/utils/hatchet_client.py new file mode 100644 index 00000000..797cfa08 --- /dev/null +++ b/tests/utils/hatchet_client.py @@ -0,0 +1,14 @@ +import pytest +from dotenv import load_dotenv + +from hatchet_sdk.hatchet import Hatchet + +load_dotenv() + + +def hatchet_client_fixture(): + @pytest.fixture + def hatchet(): + return Hatchet(debug=True) + + return hatchet