diff --git a/.github/workflows/publish-package.yml b/.github/workflows/publish-package.yml index 18799aa..13a2530 100644 --- a/.github/workflows/publish-package.yml +++ b/.github/workflows/publish-package.yml @@ -7,9 +7,15 @@ on: jobs: pypi: runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/${{ github.event.repository.name }} + permissions: + contents: read # add default back in + id-token: write steps: - name: Check out repository - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.1.5 - name: Set up Python 3.x uses: actions/setup-python@v5.1.0 with: @@ -18,6 +24,5 @@ jobs: uses: snok/install-poetry@v1.3.4 - name: Build the package run: poetry build -vvv - - name: Publish to PyPI - run: poetry publish --username __token__ --password ${{ secrets.pypi_token }} - working-directory: ${{ github.workspace }} + - name: Publish package distributions to PyPI + uses: pypa/gh-action-pypi-publish@v1.8.14 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fa5a14a..07f0cc0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,4 @@ repos: - - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.6.0 hooks: diff --git a/README.md b/README.md index 7e079dc..7839e69 100644 --- a/README.md +++ b/README.md @@ -10,3 +10,8 @@ [![GitHub issues](https://img.shields.io/github/issues/JoshKarpel/synthesize)](https://github.com/JoshKarpel/synthesize/issues) [![GitHub pull requests](https://img.shields.io/github/issues-pr/JoshKarpel/synthesize)](https://github.com/JoshKarpel/synthesize/pulls) + +Synthesize is a tool for managing long-lived development workflows that involve multiple tools executing concurrently, +each of which might have bespoke conditions around when and how it needs to be run or re-run. + +See [the documentation](https://www.synth.how) for more information. diff --git a/docs/changelog.md b/docs/changelog.md index a82ecd9..55d0d42 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -2,7 +2,7 @@ ## `0.0.3` -*Unreleased* +Released `2024-07-07` ### Added @@ -25,6 +25,8 @@ and flows (graphs of targets and triggers)." - [#41](https://github.com/JoshKarpel/synthesize/pull/41) Execution duration is printed in the completion message. +- [#49](https://github.com/JoshKarpel/synthesize/pull/49) + Flow nodes can now have multiple triggers. ### Fixed diff --git a/docs/config.md b/docs/config.md new file mode 100644 index 0000000..0c0b0d8 --- /dev/null +++ b/docs/config.md @@ -0,0 +1,3 @@ +# Config + +@schema(synthesize.config, Config) diff --git a/docs/examples/after.yaml b/docs/examples/after.yaml index a3d96e5..3e470fb 100644 --- a/docs/examples/after.yaml +++ b/docs/examples/after.yaml @@ -9,16 +9,16 @@ flows: target: sleep-and-echo D: target: sleep-and-echo - trigger: - after: ["A", "B"] + triggers: + - after: [A, B] E: target: sleep-and-echo - trigger: - after: ["C"] + triggers: + - after: [C] F: target: sleep-and-echo - trigger: - after: ["D", "E"] + triggers: + - after: [D, E] targets: sleep-and-echo: diff --git a/docs/examples/restart-after.yaml b/docs/examples/restart-after.yaml deleted file mode 100644 index cc096f9..0000000 --- a/docs/examples/restart-after.yaml +++ /dev/null @@ -1,22 +0,0 @@ -flows: - default: - nodes: - A: - target: sleep-and-echo - trigger: - type: restart - delay: 10 - B: - target: sleep-and-echo - trigger: - after: ["A"] - C: - target: sleep-and-echo - trigger: - after: ["B"] - -targets: - sleep-and-echo: - commands: | - sleep 2 - echo "Hi from {{ id }}!" diff --git a/docs/examples/restart-and-watch.yaml b/docs/examples/restart-and-watch.yaml new file mode 100644 index 0000000..46da8fc --- /dev/null +++ b/docs/examples/restart-and-watch.yaml @@ -0,0 +1,9 @@ +flows: + default: + nodes: + docs: + target: + commands: mkdocs serve --strict + triggers: + - delay: 1 + - watch: ["docs/hooks/"] diff --git a/docs/examples/restart.yaml b/docs/examples/restart.yaml index 5b75809..401cae2 100644 --- a/docs/examples/restart.yaml +++ b/docs/examples/restart.yaml @@ -3,14 +3,12 @@ flows: nodes: A: target: sleep-and-echo - trigger: - type: restart - delay: 3 + triggers: + - delay: 3 B: target: sleep-and-echo - trigger: - type: restart - delay: 1 + triggers: + - delay: 1 targets: sleep-and-echo: diff --git a/docs/examples/watch.yaml b/docs/examples/watch.yaml index 5048240..e143d9f 100644 --- a/docs/examples/watch.yaml +++ b/docs/examples/watch.yaml @@ -3,14 +3,12 @@ flows: nodes: A: target: sleep-and-echo - trigger: - type: watch - paths: ["synthesize/", "tests/"] + triggers: + - watch: ["synthesize/", "tests/"] B: target: sleep-and-echo - trigger: - type: watch - paths: [ "docs/" ] + triggers: + - watch: [ "docs/" ] targets: sleep-and-echo: diff --git a/docs/flows.md b/docs/flows.md new file mode 100644 index 0000000..269690c --- /dev/null +++ b/docs/flows.md @@ -0,0 +1,5 @@ +# Flows + +@schema(synthesize.config, Flow) + +@schema(synthesize.config, Node) diff --git a/docs/hooks/__init__.py b/docs/hooks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/docs/hooks/schemas.py b/docs/hooks/schemas.py new file mode 100644 index 0000000..3a449e9 --- /dev/null +++ b/docs/hooks/schemas.py @@ -0,0 +1,129 @@ +import importlib +import logging +import re +from collections.abc import Iterator, Mapping + +from mkdocs.config.defaults import MkDocsConfig +from mkdocs.structure.files import Files +from mkdocs.structure.pages import Page +from openapi_pydantic import DataType, Reference, Schema + +logger = logging.getLogger("mkdocs") + +INDENT = " " * 4 + + +def on_page_markdown( + markdown: str, + page: Page, + config: MkDocsConfig, + files: Files, +) -> str: + lines = [] + for line in markdown.splitlines(): + if match := re.match(r"@schema\(([\w\.]+)\s*\,\s*(\w*)\)", line): + mod = importlib.import_module(match.group(1)) + importlib.reload(mod) + model = getattr(mod, match.group(2)) + # lines.append(f"```json\n{json.dumps(model.model_json_schema(), indent=2)}\n```") + schema_raw = model.model_json_schema() + schema = Schema.model_validate(schema_raw) + defs = {k: Schema.model_validate(v) for k, v in schema_raw.get("$defs", {}).items()} + lines.append("") + lines.extend(schema_lines(schema, None, defs)) + lines.append("") + else: + lines.append(line) + + return "\n".join(lines) + + +def indent(lines: Iterator[str]) -> Iterator[str]: + return (INDENT + l for l in lines) + + +sep = "○" + + +def schema_lines( + schema_or_ref: Schema | Reference, key: str | None, defs: Mapping[str, Schema] +) -> Iterator[str]: + schema = ref_to_schema(schema_or_ref, defs) + + dt = italic(display_type(schema, defs)) + + st = schema.title + assert st is not None + + if schema.type in {DataType.STRING, DataType.NUMBER, DataType.BOOLEAN}: + t = mono(st.lower()) if st else "" + default = f" (Default: {mono(repr(schema.default))}) " if not schema.required else " " + yield f"- {t} {dt} {default} {sep if schema.description else ''} {schema.description}" + elif schema.type is DataType.ARRAY: + t = mono(st.lower()) if st else "" + yield f"- {t} {dt} {sep if schema.description else ''} {schema.description}" + elif schema.type is DataType.OBJECT: + default = ( + f" (Default: {mono(repr(schema.default))}) " if key and not schema.required else " " + ) + yield f"- {key or st.title()} {dt} {default} {sep if schema.description else ''} {schema.description or ''}" + if not schema.properties: + return + for k, prop in schema.properties.items(): + yield from indent(schema_lines(prop, mono(k), defs)) + elif schema.type is None: + if schema.anyOf: + yield f"- {mono(st.lower())} {dt} {sep if schema.description else ''} {schema.description}" + else: + raise NotImplementedError( + f"Type {schema.type} not implemented. Appeared in the schema for {st}: {schema!r}." + ) + else: + raise NotImplementedError( + f"Type {schema.type} not implemented. Appeared in the schema for {st}: {schema!r}." + ) + + +def ref_to_schema(schema_or_ref: Schema | Reference, defs: Mapping[str, Schema]) -> Schema: + if isinstance(schema_or_ref, Reference): + try: + return defs[schema_or_ref.ref.removeprefix("#/$defs/")] + except KeyError: + logger.error(f"Could not find reference {schema_or_ref.ref!r} in {defs.keys()!r}") + raise + else: + return schema_or_ref + + +def display_type(schema: Schema | Reference, defs: Mapping[str, Schema]) -> str: + schema = ref_to_schema(schema, defs) + + st = schema.type + + if isinstance(st, DataType) and st in { + DataType.STRING, + DataType.NUMBER, + DataType.BOOLEAN, + DataType.OBJECT, + }: + return str(st.value) + elif st is DataType.ARRAY: + assert schema.items is not None + return f"array[{display_type(schema.items, defs)}]" + elif st is None and (options := schema.anyOf) is not None: + schemas = [ref_to_schema(s, defs) for s in options] + return " | ".join(s.title or str(s.type.value) for s in schemas) # type: ignore[union-attr] + else: + raise NotImplementedError(f"Type {st} not implemented. Schema: {schema!r}.") + + +def italic(s: str) -> str: + return f"*{s}*" + + +def bold(s: str) -> str: + return f"**{s}**" + + +def mono(s: str) -> str: + return f"`{s}`" diff --git a/docs/index.md b/docs/index.md index b11d674..b8fd28c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,7 +1,60 @@ # Synthesize +Synthesize is a tool for managing long-lived development workflows that involve multiple tools executing concurrently, +each of which might have bespoke conditions around when and how it needs to be run or re-run. + +In Synthesize, a **flow** is a graph (potentially disjoint) of **nodes**, +each of which runs a **target** whenever one of that node's **triggers** activates. +Synthesize has a wide variety of triggers: + +- Target `B` should run after target `A` runs. +- Target `W` should run every time file `F` changes. +- Target `R` should be restarted if it ever exits. +- Target `O` should run once when the flow starts. + +These can all coexist as part of same flow, and can even be combined for a single target, +allowing for complex nodes like +["restart target `W` if it exits or if file `F` changes"](./triggers.md#example-restarting-on-completion-or-config-changes). + +## Features + +- Target and trigger definitions can be factored out and shared across multiple nodes and flows. +- Targets are just shell commands, so you can use any tools you'd like. Synthesize works with your existing tools, it doesn't replace them. +- Targets can be parameterized with arguments (each target is actually a [Jinja template](https://jinja.palletsprojects.com/)) and environment variables. + Arguments and environment variables can also be provided at the flow and target levels (most specific wins). +- Nodes can have multiple triggers, allowing you to express complex triggering conditions. +- All command output is combined in a single output stream, with each node's output prefixed with a timestamp and its name. +- The current time and the status of each node is displayed at the bottom of your terminal. +- You can generate [Mermaid](https://mermaid.js.org/) diagrams of your flows for debugging and documentation. + +## Examples + +As an example, here is Synthesize's own `synth.yaml` configuration file: + ```yaml --8<-- "synth.yaml" ``` @mermaid(synth.yaml) + +## Installation + +Synthesize is [available on PyPI](https://pypi.org/project/synthesize/). + +We recommend installing Synthesize via `pipx`: + +```bash +pipx install synthesize +``` + +Then run +``` +synth --help +``` +to get started. + +## Inspirations + +- [`concurrently`](https://www.npmjs.com/package/concurrently) +- [`make`](https://www.gnu.org/software/make/) +- [`just`](https://github.com/casey/just) diff --git a/docs/targets.md b/docs/targets.md new file mode 100644 index 0000000..ebdfe38 --- /dev/null +++ b/docs/targets.md @@ -0,0 +1,3 @@ +# Targets + +@schema(synthesize.config, Target) diff --git a/docs/triggers.md b/docs/triggers.md index edd482e..624fc8b 100644 --- a/docs/triggers.md +++ b/docs/triggers.md @@ -5,7 +5,7 @@ ### Once "Once" triggers run the node just one time during the flow. -This is the default trigger. +This is the default trigger, so it does not need to be specified. Use this trigger when a command needs to run only one time during a flow. @@ -21,6 +21,8 @@ Use this trigger when a command needs to run only one time during a flow. Use this trigger when a node depends on the output of another node. +@schema(synthesize.config, After) + ```yaml --8<-- "docs/examples/after.yaml" ``` @@ -33,6 +35,8 @@ Use this trigger when a node depends on the output of another node. Use this trigger when you want to keep the node's command running. +@schema(synthesize.config, Restart) + ```yaml --8<-- "docs/examples/restart.yaml" ``` @@ -46,18 +50,45 @@ Use this trigger when you want to keep the node's command running. Use this trigger to run a node in reaction to changes in the filesystem. +@schema(synthesize.config, Watch) + ```yaml --8<-- "docs/examples/watch.yaml" ``` @mermaid(docs/examples/watch.yaml) -## Combining Triggers +## Using Multiple Triggers + +### Example: Restarting on Completion or Config Changes + +Synthesize uses `mkdocs` for documentation. +`mkdocs` comes with a built-in command `mkdocs serve` to watch for +configuration and documentation changes and rebuild the site in response, +but it doesn't automatically restart the whole process when +[*hooks*](https://www.mkdocs.org/user-guide/configuration/#hooks) +are changed. +Since hooks are imported Python code, the `mkdocs` process needs to +be restarted when they change in order to pick up changes to them. + +However, if the hooks (or any other configuration) are malformed, +`mkdocs` will exit with an error on startup. +If we were just running `mkdocs serve` by hand on the command line, +we would have to manually restart it every time we changed the hooks, +potentially multiple times if we are debugging. + +To get a hands-off developer flow to enable fast iteration cycles, +we want the following things to all happen: + +- If `mkdocs` exits (for any reason), restart it. +- If any of the hook files changes, restart `mkdocs`. +- If neither of those happen, let `mkdocs serve` keep running forever. -### Restart + After +This is straightforward to express with Synthesize by using both restart and watch triggers +for a target that run `mkdocs serve` (which blocks): ```yaml ---8<-- "docs/examples/restart-after.yaml" +--8<-- "docs/examples/restart-and-watch.yaml" ``` -@mermaid(docs/examples/restart-after.yaml) +@mermaid(docs/examples/restart-and-watch.yaml) diff --git a/mkdocs.yml b/mkdocs.yml index ae464e0..3ade4db 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -56,6 +56,7 @@ plugins: hooks: - docs/hooks/mermaid.py + - docs/hooks/schemas.py markdown_extensions: - admonition @@ -88,5 +89,8 @@ extra: nav: - Introduction: index.md + - config.md + - flows.md + - targets.md - triggers.md - changelog.md diff --git a/poetry.lock b/poetry.lock index c8099cb..939e037 100644 --- a/poetry.lock +++ b/poetry.lock @@ -353,13 +353,13 @@ colorama = ">=0.4" [[package]] name = "hypothesis" -version = "6.104.1" +version = "6.105.0" description = "A library for property-based testing" optional = false python-versions = ">=3.8" files = [ - {file = "hypothesis-6.104.1-py3-none-any.whl", hash = "sha256:a0a898fa78ecaefe76ad248901dc274e598f29198c6015b3053f7f7827670e0e"}, - {file = "hypothesis-6.104.1.tar.gz", hash = "sha256:4033898019a6149823d2feeb8d214921b4ac2d342a05d6b02e40a3ca4be07eea"}, + {file = "hypothesis-6.105.0-py3-none-any.whl", hash = "sha256:383bb2d8b37d8090e82847d7cb1130b9ef04bf8c71addd099f7ee1f7139f00d8"}, + {file = "hypothesis-6.105.0.tar.gz", hash = "sha256:45078be3168c06a0426afca6f139829b28dfea9d2ea79fa1bd498b9783b11c16"}, ] [package.dependencies] @@ -368,10 +368,10 @@ exceptiongroup = {version = ">=1.0.0", markers = "python_version < \"3.11\""} sortedcontainers = ">=2.1.0,<3.0.0" [package.extras] -all = ["backports.zoneinfo (>=0.2.1)", "black (>=19.10b0)", "click (>=7.0)", "crosshair-tool (>=0.0.55)", "django (>=3.2)", "dpcontracts (>=0.4)", "hypothesis-crosshair (>=0.0.4)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.17.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2024.1)"] +all = ["backports.zoneinfo (>=0.2.1)", "black (>=19.10b0)", "click (>=7.0)", "crosshair-tool (>=0.0.58)", "django (>=3.2)", "dpcontracts (>=0.4)", "hypothesis-crosshair (>=0.0.6)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.17.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2024.1)"] cli = ["black (>=19.10b0)", "click (>=7.0)", "rich (>=9.0.0)"] codemods = ["libcst (>=0.3.16)"] -crosshair = ["crosshair-tool (>=0.0.55)", "hypothesis-crosshair (>=0.0.4)"] +crosshair = ["crosshair-tool (>=0.0.58)", "hypothesis-crosshair (>=0.0.6)"] dateutil = ["python-dateutil (>=1.4)"] django = ["django (>=3.2)"] dpcontracts = ["dpcontracts (>=0.4)"] @@ -631,13 +631,13 @@ pyyaml = ">=5.1" [[package]] name = "mkdocs-material" -version = "9.5.27" +version = "9.5.28" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.27-py3-none-any.whl", hash = "sha256:af8cc263fafa98bb79e9e15a8c966204abf15164987569bd1175fd66a7705182"}, - {file = "mkdocs_material-9.5.27.tar.gz", hash = "sha256:a7d4a35f6d4a62b0c43a0cfe7e987da0980c13587b5bc3c26e690ad494427ec0"}, + {file = "mkdocs_material-9.5.28-py3-none-any.whl", hash = "sha256:ff48b11b2a9f705dd210409ec3b418ab443dd36d96915bcba45a41f10ea27bfd"}, + {file = "mkdocs_material-9.5.28.tar.gz", hash = "sha256:9cba305283ad1600e3d0a67abe72d7a058b54793b47be39930911a588fe0336b"}, ] [package.dependencies] @@ -809,6 +809,20 @@ files = [ {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] +[[package]] +name = "openapi-pydantic" +version = "0.4.1" +description = "Pydantic OpenAPI schema implementation" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "openapi_pydantic-0.4.1-py3-none-any.whl", hash = "sha256:4038612afd80b846a5e30758565dceb6b98a6ba5b7e98241452b0a2bc29bd78a"}, + {file = "openapi_pydantic-0.4.1.tar.gz", hash = "sha256:88a8e93090250787723ecc196df201d8ab7d3576245a19840d28fc463fbca6f0"}, +] + +[package.dependencies] +pydantic = ">=1.8" + [[package]] name = "packaging" version = "24.1" @@ -892,109 +906,122 @@ virtualenv = ">=20.10.0" [[package]] name = "pydantic" -version = "2.7.4" +version = "2.8.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"}, - {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"}, + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.18.4" -typing-extensions = ">=4.6.1" +pydantic-core = "2.20.1" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] [package.extras] email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.18.4" +version = "2.20.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, - {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, - {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, - {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, - {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, - {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, - {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, - {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, - {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, - {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, - {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, - {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, - {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, - {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, ] [package.dependencies] @@ -1586,5 +1613,5 @@ anyio = ">=3.0.0" [metadata] lock-version = "2.0" -python-versions = ">=3.10" -content-hash = "766e4897e1ddefe89c187a03f1a8595d78228b455c1f50fb8412b790f7b3a671" +python-versions = ">=3.10,<4" +content-hash = "987aed62c5ee756763478c4237bb4720236d3701e1d92086162afee8416faaec" diff --git a/pyproject.toml b/pyproject.toml index c1bb050..c611b4f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ license = "MIT" include = ["py.typed"] [tool.poetry.dependencies] -python = ">=3.10" +python = ">=3.10,<4" # required by openapi-pydantic pydantic = ">=2" rich = ">=13.3" typer = ">=0.7" @@ -55,6 +55,7 @@ types-pyyaml = ">=6" mkdocs = ">=1.4" mkdocs-material = ">=9" mkdocstrings = {extras = ["python"], version = ">=0.19.0"} +openapi-pydantic = ">=0.4" [tool.poetry.scripts] synth = 'synthesize.cli:cli' diff --git a/synth.yaml b/synth.yaml index 31583df..f3efcad 100644 --- a/synth.yaml +++ b/synth.yaml @@ -3,14 +3,17 @@ flows: nodes: tests: target: tests - trigger: code-changes + triggers: + - code-changes types: target: types - trigger: code-changes + triggers: + - code-changes docs: target: docs - trigger: - type: restart + triggers: + - delay: 1 + - watch: ["docs/hooks/"] targets: tests: @@ -27,10 +30,10 @@ targets: triggers: code-changes: - type: watch - paths: - - synthesize/ - - tests/ - - docs/examples/ - - pyproject.toml - - .coveragerc + watch: + - synthesize/ + - tests/ + - docs/examples/ + - docs/hooks/ + - pyproject.toml + - .coveragerc diff --git a/synthesize/config.py b/synthesize/config.py index 5f129e6..4c204f8 100644 --- a/synthesize/config.py +++ b/synthesize/config.py @@ -8,10 +8,11 @@ from pathlib import Path from random import random from textwrap import dedent -from typing import Annotated, Literal, Union +from typing import Annotated, Union from identify.identify import tags_from_path from jinja2 import Environment +from networkx import DiGraph from pydantic import Field, field_validator from rich.color import Color from typing_extensions import assert_never @@ -30,21 +31,8 @@ ], object, ] -Envs = dict[ - Annotated[ - str, - Field( - min_length=1, - ), - ], - str, -] -ID = Annotated[ - str, - Field( - pattern=r"\w+", - ), -] +Envs = dict[Annotated[str, Field(min_length=1)], str] +ID = Annotated[str, Field(pattern=r"\w+")] def random_color() -> str: @@ -60,11 +48,23 @@ def random_color() -> str: class Target(Model): - commands: str = "" - args: Args = {} - envs: Envs = {} + commands: Annotated[str, Field(description="The commands to run for this target.")] = "" + args: Annotated[ + Args, + Field( + description="Template arguments to apply to this target by default.", + ), + ] = {} + envs: Annotated[ + Envs, + Field( + description="Environment variables to apply to this target by default.", + ), + ] = {} - executable: str = "sh -eu" + executable: Annotated[str, Field(description="The executable to run this target with.")] = ( + "sh -eu" + ) @field_validator("commands") @classmethod @@ -91,18 +91,20 @@ def render(self, args: Args) -> str: class Once(Model): - type: Literal["once"] = "once" + pass class After(Model): - type: Literal["after"] = "after" - - after: Annotated[tuple[str, ...], Field(min_length=1)] + after: Annotated[ + tuple[str, ...], + Field( + min_length=1, + description="The IDs of the nodes to wait for.", + ), + ] class Restart(Model): - type: Literal["restart"] = "restart" - delay: Annotated[ float, Field( @@ -113,9 +115,12 @@ class Restart(Model): class Watch(Model): - type: Literal["watch"] = "watch" - - paths: tuple[str, ...] + watch: Annotated[ + tuple[str, ...], + Field( + description="The paths to watch for changes. Directories are watched recursively.", + ), + ] AnyTrigger = Union[ @@ -126,14 +131,24 @@ class Watch(Model): ] -class FlowNode(Model): +class ResolvedNode(Model): id: str target: Target - args: Args = {} - envs: Envs = {} + args: Annotated[ + Args, + Field( + description="Template arguments to apply to this node.", + ), + ] = {} + envs: Annotated[ + Envs, + Field( + description="Environment variables to apply to this node.", + ), + ] = {} - trigger: AnyTrigger = Once() + triggers: tuple[AnyTrigger, ...] = (Once(),) color: Annotated[str, Field(default_factory=random_color)] @@ -142,35 +157,84 @@ def uid(self) -> str: return md5(self.model_dump_json(exclude={"color"}).encode()) -class UnresolvedFlowNode(Model): - target: Target | ID - args: Args = {} - envs: Envs = {} +class Node(Model): + target: Annotated[ + Target | ID, + Field( + description="The target to run for this node. It may either be the name of a pre-defined target, or a full target definition.", + ), + ] + args: Annotated[ + Args, + Field( + description="Template arguments to apply to this node.", + ), + ] = {} + envs: Annotated[ + Envs, + Field( + description="Environment variables to apply to this node.", + ), + ] = {} - trigger: AnyTrigger | ID = Once() + triggers: Annotated[ + tuple[AnyTrigger | ID, ...], + Field( + description="The list of triggers for this node. Each trigger may be the name of a pre-defined trigger, or a full trigger definition.", + ), + ] = (Once(),) - color: Annotated[str, Field(default_factory=random_color)] + color: Annotated[ + str, + Field( + default_factory=random_color, + description="The color that will be used to help differentiate this node from others.", + ), + ] def resolve( self, id: str, targets: Mapping[str, Target], triggers: Mapping[str, AnyTrigger], - ) -> FlowNode: - return FlowNode( + ) -> ResolvedNode: + return ResolvedNode( id=id, target=targets[self.target] if isinstance(self.target, str) else self.target, args=self.args, envs=self.envs, - trigger=(triggers[self.trigger] if isinstance(self.trigger, str) else self.trigger), + triggers=tuple(triggers[t] if isinstance(t, str) else t for t in self.triggers), color=self.color, ) -class Flow(Model): - nodes: dict[ID, FlowNode] - args: Args = {} - envs: Envs = {} +class ResolvedFlow(Model): + nodes: dict[ID, ResolvedNode] + args: Annotated[ + Args, + Field( + description="Template arguments to apply to all nodes in this flow.", + ), + ] = {} + envs: Annotated[ + Envs, + Field( + description="Environment variables to apply to all nodes in this flow.", + ), + ] = {} + + @cached_property + def graph(self) -> DiGraph: + graph = DiGraph() + + for id, node in self.nodes.items(): + graph.add_node(id) + for t in node.triggers: + if isinstance(t, After): + for predecessor_id in t.after: + graph.add_edge(predecessor_id, id) + + return graph def mermaid(self) -> str: lines = ["flowchart TD"] @@ -179,38 +243,54 @@ def mermaid(self) -> str: for id, node in self.nodes.items(): lines.append(f"{node.id}({id})") - match node.trigger: - case Once(): - pass - case After(after=after): - for a in after: - lines.append(f"{self.nodes[a].id} --> {node.id}") - case Restart(delay=delay): - lines.append(f"{node.id} -->|∞ {delay:.3g}s| {node.id}") - case Watch(paths=paths): - text = "\n".join(paths) - h = md5("".join(paths)) - if h not in seen_watches: - seen_watches.add(h) - lines.append(f'w_{h}[("{text}")]') - lines.append(f"w_{h} -->|👁| {node.id}") - case never: - assert_never(never) + for t in node.triggers: + match t: + case Once(): + pass + case After(after=after): + for a in after: + lines.append(f"{self.nodes[a].id} --> {node.id}") + case Restart(delay=delay): + lines.append(f"{node.id} -->|∞ {delay:.3g}s| {node.id}") + case Watch(watch=paths): + text = "\n".join(paths) + h = md5("".join(paths)) + if h not in seen_watches: + seen_watches.add(h) + lines.append(f'w_{h}[("{text}")]') + lines.append(f"w_{h} -->|👁| {node.id}") + case never: + assert_never(never) return "\n ".join(lines).strip() -class UnresolvedFlow(Model): - nodes: dict[ID, UnresolvedFlowNode] - args: Args = {} - envs: Envs = {} +class Flow(Model): + nodes: Annotated[ + Mapping[ID, Node], + Field( + description="Mapping of IDs to nodes.", + ), + ] = {} + args: Annotated[ + Args, + Field( + description="Template arguments to apply to all nodes in this flow.", + ), + ] = {} + envs: Annotated[ + Envs, + Field( + description="Environment variables to apply to all nodes in this flow.", + ), + ] = {} def resolve( self, targets: Mapping[ID, Target], triggers: Mapping[ID, AnyTrigger], - ) -> Flow: - return Flow( + ) -> ResolvedFlow: + return ResolvedFlow( nodes={id: node.resolve(id, targets, triggers) for id, node in self.nodes.items()}, args=self.args, envs=self.envs, @@ -218,9 +298,24 @@ def resolve( class Config(Model): - flows: dict[ID, UnresolvedFlow] = {} - targets: dict[ID, Target] = {} - triggers: dict[ID, AnyTrigger] = {} + flows: Annotated[ + Mapping[ID, Flow], + Field( + description="A mapping of IDs to flows.", + ), + ] = {} + targets: Annotated[ + Mapping[ID, Target], + Field( + description="A mapping of IDs to targets.", + ), + ] = {} + triggers: Annotated[ + Mapping[ID, AnyTrigger], + Field( + description="A mapping of IDs to triggers.", + ), + ] = {} @classmethod def from_file(cls, file: Path) -> Config: @@ -231,5 +326,5 @@ def from_file(cls, file: Path) -> Config: else: raise NotImplementedError("Currently, only YAML files are supported.") - def resolve(self) -> Mapping[ID, Flow]: + def resolve(self) -> Mapping[ID, ResolvedFlow]: return {id: flow.resolve(self.targets, self.triggers) for id, flow in self.flows.items()} diff --git a/synthesize/execution.py b/synthesize/execution.py index a8ea05a..3d34768 100644 --- a/synthesize/execution.py +++ b/synthesize/execution.py @@ -10,11 +10,11 @@ from stat import S_IEXEC from time import monotonic -from synthesize.config import Args, Envs, FlowNode +from synthesize.config import Args, Envs, ResolvedNode from synthesize.messages import ExecutionCompleted, ExecutionOutput, ExecutionStarted, Message -def write_script(node: FlowNode, args: Args, tmp_dir: Path) -> Path: +def write_script(node: ResolvedNode, args: Args, tmp_dir: Path) -> Path: path = tmp_dir / f"{node.id}-{node.uid}" path.parent.mkdir(parents=True, exist_ok=True) @@ -35,7 +35,7 @@ def write_script(node: FlowNode, args: Args, tmp_dir: Path) -> Path: @dataclass(frozen=True) class Execution: - node: FlowNode + node: ResolvedNode events: Queue[Message] = field(repr=False) @@ -46,7 +46,7 @@ class Execution: @classmethod async def start( cls, - node: FlowNode, + node: ResolvedNode, args: Args, envs: Envs, tmp_dir: Path, @@ -136,7 +136,7 @@ async def wait(self) -> Execution: return self -async def read_output(node: FlowNode, process: Process, events: Queue[Message]) -> None: +async def read_output(node: ResolvedNode, process: Process, events: Queue[Message]) -> None: if process.stdout is None: # pragma: unreachable raise Exception(f"{process} does not have an associated stream reader") @@ -151,3 +151,8 @@ async def read_output(node: FlowNode, process: Process, events: Queue[Message]) text=line.decode("utf-8").rstrip(), ) ) + + +# need to track which trigger caused the node to run, +# because that changes the semantics of the restart +# the manager should protect itself from multiple restarts? diff --git a/synthesize/messages.py b/synthesize/messages.py index 23f2bbb..b4ae399 100644 --- a/synthesize/messages.py +++ b/synthesize/messages.py @@ -3,7 +3,7 @@ from pydantic import Field from watchfiles import Change -from synthesize.config import FlowNode +from synthesize.config import ResolvedNode from synthesize.model import Model @@ -12,24 +12,24 @@ class Message(Model): class ExecutionStarted(Message): - node: FlowNode + node: ResolvedNode pid: int class ExecutionCompleted(Message): - node: FlowNode + node: ResolvedNode pid: int exit_code: int duration: timedelta class ExecutionOutput(Message): - node: FlowNode + node: ResolvedNode text: str class WatchPathChanged(Message): - node: FlowNode + node: ResolvedNode changes: set[tuple[Change, str]] diff --git a/synthesize/orchestrator.py b/synthesize/orchestrator.py index 590b3ad..605eaf7 100644 --- a/synthesize/orchestrator.py +++ b/synthesize/orchestrator.py @@ -1,7 +1,7 @@ from __future__ import annotations import signal -from asyncio import Queue, Task, create_task, gather, sleep +from asyncio import Queue, Task, create_task, gather, get_running_loop, sleep from collections.abc import Iterable from pathlib import Path from tempfile import TemporaryDirectory @@ -9,7 +9,7 @@ from rich.console import Console from watchfiles import awatch -from synthesize.config import Flow, FlowNode, Restart, Watch +from synthesize.config import ResolvedFlow, ResolvedNode, Restart, Watch from synthesize.execution import Execution from synthesize.messages import ( ExecutionCompleted, @@ -21,11 +21,10 @@ ) from synthesize.renderer import Renderer from synthesize.state import FlowState, Status -from synthesize.utils import delay class Orchestrator: - def __init__(self, flow: Flow, console: Console): + def __init__(self, flow: ResolvedFlow, console: Console): self.flow = flow self.console = console @@ -79,22 +78,30 @@ async def handle_messages(self, tmp_dir: Path) -> None: self.state.mark_running(node) case ExecutionCompleted(node=node, exit_code=exit_code): - if isinstance(node.trigger, Restart): - self.state.mark_pending(node) - else: - if exit_code == 0: - self.state.mark_success(node) + if self.state.statuses[node.id] is not Status.Pending: + for t in node.triggers: + if isinstance(t, Restart): + if self.state.statuses[node.id] is not Status.Waiting: + self.state.mark(node, status=Status.Waiting) + + def waiting_to_pending() -> None: + if self.state.statuses[node.id] is Status.Waiting: + self.state.mark_pending(node) + + get_running_loop().call_later(t.delay, waiting_to_pending) + break else: - self.state.mark_failure(node) + if exit_code == 0: + self.state.mark_success(node) + else: + self.state.mark_failure(node) self.state.mark_pending(*self.state.children(node)) case WatchPathChanged(node=node): if e := self.executions.get(node.id): - self.waiters[node.id].add_done_callback( - lambda _: self.state.mark_pending(node) - ) e.terminate() + self.state.mark_pending(node) case Quit(): return @@ -120,38 +127,32 @@ async def start_ready_targets(self, tmp_dir: Path) -> None: if not e.has_exited: continue - async def start() -> None: - e = await Execution.start( - node=node, - args=self.flow.args, - envs=self.flow.envs, - tmp_dir=tmp_dir, - width=self.console.width - self.renderer.prefix_width, - events=self.inbox, - ) - self.executions[node.id] = e - self.waiters[node.id] = create_task(e.wait()) - self.state.mark_running(node) - - # When restarting after first execution, delay - if isinstance(node.trigger, Restart) and node.id in self.executions: - self.state.mark(node, status=Status.Waiting) - delay(node.trigger.delay, start) - else: - await start() + self.state.mark(node, status=Status.Waiting) + + e = await Execution.start( + node=node, + args=self.flow.args, + envs=self.flow.envs, + tmp_dir=tmp_dir, + width=self.console.width - self.renderer.prefix_width, + events=self.inbox, + ) + self.executions[node.id] = e + self.waiters[node.id] = create_task(e.wait()) async def start_watchers(self) -> None: for node in self.flow.nodes.values(): - if isinstance(node.trigger, Watch): - self.watchers[node.id] = create_task( - watch( - node=node, - paths=node.trigger.paths, - events=self.inbox, + for trigger in node.triggers: + if isinstance(trigger, Watch): + self.watchers[node.id] = create_task( + watch( + node=node, + paths=trigger.watch, + events=self.inbox, + ) ) - ) -async def watch(node: FlowNode, paths: Iterable[str | Path], events: Queue[Message]) -> None: +async def watch(node: ResolvedNode, paths: Iterable[str | Path], events: Queue[Message]) -> None: async for changes in awatch(*paths): await events.put(WatchPathChanged(node=node, changes=changes)) diff --git a/synthesize/state.py b/synthesize/state.py index 92cbece..d517349 100644 --- a/synthesize/state.py +++ b/synthesize/state.py @@ -7,32 +7,25 @@ from networkx import DiGraph, ancestors, descendants -from synthesize.config import After, Flow, FlowNode - - -class Status(Enum): - Pending = "pending" - Waiting = "waiting" - Running = "running" - Succeeded = "succeeded" - Failed = "failed" +from synthesize.config import After, ResolvedFlow, ResolvedNode @dataclass(frozen=True) class FlowState: graph: DiGraph - flow: Flow + flow: ResolvedFlow statuses: dict[str, Status] @classmethod - def from_flow(cls, flow: Flow) -> FlowState: + def from_flow(cls, flow: ResolvedFlow) -> FlowState: graph = DiGraph() for id, node in flow.nodes.items(): graph.add_node(id) - if isinstance(node.trigger, After): - for predecessor_id in node.trigger.after: - graph.add_edge(predecessor_id, id) + for t in node.triggers: + if isinstance(t, After): + for predecessor_id in t.after: + graph.add_edge(predecessor_id, id) return FlowState( graph=graph, @@ -40,13 +33,13 @@ def from_flow(cls, flow: Flow) -> FlowState: statuses={id: Status.Pending for id in graph.nodes}, ) - def nodes_by_status(self) -> Mapping[Status, Collection[FlowNode]]: + def nodes_by_status(self) -> Mapping[Status, Collection[ResolvedNode]]: d = defaultdict(list) for id, s in self.statuses.items(): d[s].append(self.flow.nodes[id]) return d - def ready_nodes(self) -> Collection[FlowNode]: + def ready_nodes(self) -> Collection[ResolvedNode]: return tuple( self.flow.nodes[id] for id in self.graph.nodes @@ -61,30 +54,39 @@ def ready_nodes(self) -> Collection[FlowNode]: ) ) - def mark_success(self, *nodes: FlowNode) -> None: + def mark_success(self, *nodes: ResolvedNode) -> None: self.mark(*nodes, status=Status.Succeeded) - def mark_failure(self, *nodes: FlowNode) -> None: + def mark_failure(self, *nodes: ResolvedNode) -> None: self.mark(*nodes, status=Status.Failed) - def mark_pending(self, *nodes: FlowNode) -> None: + def mark_pending(self, *nodes: ResolvedNode) -> None: self.mark(*nodes, status=Status.Pending) - def mark_running(self, *nodes: FlowNode) -> None: + def mark_running(self, *nodes: ResolvedNode) -> None: self.mark(*nodes, status=Status.Running) - def mark(self, *nodes: FlowNode, status: Status) -> None: + def mark(self, *nodes: ResolvedNode, status: Status) -> None: for node in nodes: self.statuses[node.id] = status - def children(self, node: FlowNode) -> Collection[FlowNode]: + def children(self, node: ResolvedNode) -> Collection[ResolvedNode]: return tuple(self.flow.nodes[id] for id in self.graph.successors(node.id)) - def descendants(self, node: FlowNode) -> Collection[FlowNode]: + def descendants(self, node: ResolvedNode) -> Collection[ResolvedNode]: return tuple(self.flow.nodes[id] for id in descendants(self.graph, node.id)) def all_done(self) -> bool: return all(status is Status.Succeeded for status in self.statuses.values()) - def nodes(self) -> Iterator[FlowNode]: + def nodes(self) -> Iterator[ResolvedNode]: yield from self.flow.nodes.values() + + +class Status(Enum): + Pending = "pending" + Waiting = "waiting" + Starting = "starting" + Running = "running" + Succeeded = "succeeded" + Failed = "failed" diff --git a/tests/test_config.py b/tests/test_config.py index ccbfb89..6104eab 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -9,12 +9,12 @@ Args, Config, Flow, - FlowNode, + Node, Once, + ResolvedFlow, + ResolvedNode, Restart, Target, - UnresolvedFlow, - UnresolvedFlowNode, random_color, ) @@ -121,77 +121,77 @@ def test_rendering_fails_for_bogus_executable() -> None: ("unresolved_node", "id", "targets", "triggers", "expected"), ( ( - UnresolvedFlowNode( + Node( target=Target(commands="echo"), - trigger=Once(), + triggers=[Once()], color=color, ), "foo", {}, {}, - FlowNode( + ResolvedNode( id="foo", target=Target(commands="echo"), - trigger=Once(), + triggers=[Once()], color=color, ), ), ( - UnresolvedFlowNode( + Node( target="t", - trigger=Once(), + triggers=[Once()], color=color, ), "foo", {"t": Target(commands="echo")}, {}, - FlowNode( + ResolvedNode( id="foo", target=Target(commands="echo"), - trigger=Once(), + triggers=[Once()], color=color, ), ), ( - UnresolvedFlowNode( + Node( target=Target(commands="echo"), - trigger="r", + triggers=["r"], color=color, ), "foo", {}, {"r": Once()}, - FlowNode( + ResolvedNode( id="foo", target=Target(commands="echo"), - trigger=Once(), + triggers=[Once()], color=color, ), ), ( - UnresolvedFlowNode( + Node( target="t", - trigger="r", + triggers=["r"], color=color, ), "foo", {"t": Target(commands="echo")}, {"r": Once()}, - FlowNode( + ResolvedNode( id="foo", target=Target(commands="echo"), - trigger=Once(), + triggers=[Once()], color=color, ), ), ), ) def test_resolve_flow_node( - unresolved_node: UnresolvedFlowNode, + unresolved_node: Node, id: str, targets: dict[str, Target], triggers: dict[str, AnyTrigger], - expected: FlowNode, + expected: ResolvedNode, ) -> None: assert unresolved_node.resolve(id, targets, triggers) == expected @@ -200,36 +200,36 @@ def test_resolve_flow_node( ("unresolved_flow", "targets", "triggers", "expected"), ( ( - UnresolvedFlow( + Flow( nodes={ - "foo": UnresolvedFlowNode( + "foo": Node( target=Target(commands="echo"), - trigger=Once(), + triggers=[Once()], color=color, ) } ), {}, {}, - Flow( + ResolvedFlow( nodes={ - "foo": FlowNode( + "foo": ResolvedNode( id="foo", target=Target(commands="echo"), - trigger=Once(), + triggers=[Once()], color=color, ) } ), ), ( - UnresolvedFlow( + Flow( nodes={ - "foo": UnresolvedFlowNode( + "foo": Node( target="t", args={"foo": "bar"}, envs={"FOO": "BAR"}, - trigger="r", + triggers=["r"], color=color, ) }, @@ -238,14 +238,14 @@ def test_resolve_flow_node( ), {"t": Target(commands="echo")}, {"r": Restart()}, - Flow( + ResolvedFlow( nodes={ - "foo": FlowNode( + "foo": ResolvedNode( id="foo", target=Target(commands="echo"), args={"foo": "bar"}, envs={"FOO": "BAR"}, - trigger=Restart(), + triggers=[Restart()], color=color, ) }, @@ -256,10 +256,10 @@ def test_resolve_flow_node( ), ) def test_resolve_flow( - unresolved_flow: UnresolvedFlow, + unresolved_flow: Flow, targets: dict[str, Target], triggers: dict[str, AnyTrigger], - expected: Flow, + expected: ResolvedFlow, ) -> None: assert unresolved_flow.resolve(targets, triggers) == expected @@ -270,13 +270,13 @@ def test_resolve_flow( ( Config( flows={ - "flow": UnresolvedFlow( + "flow": Flow( nodes={ - "foo": UnresolvedFlowNode( + "foo": Node( target="t", args={"foo": "bar"}, envs={"FOO": "BAR"}, - trigger="r", + triggers=["r"], color=color, ) }, @@ -288,14 +288,14 @@ def test_resolve_flow( triggers={"r": Restart()}, ), { - "flow": Flow( + "flow": ResolvedFlow( nodes={ - "foo": FlowNode( + "foo": ResolvedNode( id="foo", target=Target(commands="echo"), args={"foo": "bar"}, envs={"FOO": "BAR"}, - trigger=Restart(), + triggers=[Restart()], color=color, ) }, @@ -308,6 +308,6 @@ def test_resolve_flow( ) def test_resolve_config( config: Config, - expected: dict[str, Flow], + expected: dict[str, ResolvedFlow], ) -> None: assert config.resolve() == expected diff --git a/tests/test_execution.py b/tests/test_execution.py index 2574906..33dbb3b 100644 --- a/tests/test_execution.py +++ b/tests/test_execution.py @@ -3,7 +3,7 @@ import pytest -from synthesize.config import Envs, FlowNode, Target, random_color +from synthesize.config import Envs, ResolvedNode, Target, random_color from synthesize.execution import Execution from synthesize.messages import ExecutionCompleted, ExecutionOutput, ExecutionStarted, Message @@ -11,7 +11,7 @@ async def test_execution_lifecycle(tmp_path: Path) -> None: - node = FlowNode( + node = ResolvedNode( id="foo", target=Target(commands="echo 'hi'"), color=color, @@ -53,7 +53,7 @@ async def test_execution_lifecycle(tmp_path: Path) -> None: async def test_termination_before_completion(tmp_path: Path) -> None: - node = FlowNode( + node = ResolvedNode( id="foo", target=Target(commands="sleep 10 && echo 'hi'"), color=color, @@ -90,7 +90,7 @@ async def test_termination_before_completion(tmp_path: Path) -> None: async def test_termination_after_completion(tmp_path: Path) -> None: - node = FlowNode( + node = ResolvedNode( id="foo", target=Target(commands="echo 'hi'"), color=color, @@ -114,7 +114,7 @@ async def test_termination_after_completion(tmp_path: Path) -> None: async def test_execution_kill(tmp_path: Path) -> None: - node = FlowNode( + node = ResolvedNode( id="foo", target=Target(commands="sleep 10 && echo 'hi'"), color=color, @@ -151,7 +151,7 @@ async def test_execution_kill(tmp_path: Path) -> None: async def test_kill_after_completion(tmp_path: Path) -> None: - node = FlowNode( + node = ResolvedNode( id="foo", target=Target(commands="echo 'hi'"), color=color, @@ -178,7 +178,7 @@ async def test_kill_after_completion(tmp_path: Path) -> None: ("node", "envs", "expected"), ( ( - FlowNode( + ResolvedNode( id="foo", target=Target(commands="echo $FORCE_COLOR"), color=color, @@ -187,7 +187,7 @@ async def test_kill_after_completion(tmp_path: Path) -> None: "1", ), ( - FlowNode( + ResolvedNode( id="foo", target=Target(commands="echo $COLUMNS"), color=color, @@ -196,7 +196,7 @@ async def test_kill_after_completion(tmp_path: Path) -> None: "111", # set in test body below ), ( - FlowNode( + ResolvedNode( id="foo", target=Target(commands="echo $SYNTH_NODE_ID"), color=color, @@ -205,7 +205,7 @@ async def test_kill_after_completion(tmp_path: Path) -> None: "foo", ), ( - FlowNode( + ResolvedNode( id="foo", target=Target(commands="echo $FOO"), envs=Envs({"FOO": "bar"}), @@ -215,7 +215,7 @@ async def test_kill_after_completion(tmp_path: Path) -> None: "bar", ), ( - FlowNode( + ResolvedNode( id="foo", target=Target(commands="echo $FOO"), color=color, @@ -224,7 +224,7 @@ async def test_kill_after_completion(tmp_path: Path) -> None: "baz", ), ( - FlowNode( + ResolvedNode( id="foo", target=Target(commands="echo $FOO", envs={"FOO": "bar"}), color=color, @@ -233,7 +233,7 @@ async def test_kill_after_completion(tmp_path: Path) -> None: "bar", ), ( - FlowNode( + ResolvedNode( id="foo", target=Target( commands="echo $A $B $C", @@ -260,7 +260,7 @@ async def test_kill_after_completion(tmp_path: Path) -> None: ) async def test_envs( tmp_path: Path, - node: FlowNode, + node: ResolvedNode, envs: Envs, expected: str, ) -> None: