diff --git a/.gitignore b/.gitignore index daccb40..295cac2 100644 --- a/.gitignore +++ b/.gitignore @@ -155,4 +155,5 @@ projectwithhyphenatend .aider* .lh -.vscode \ No newline at end of file +.vscode +codebase.md diff --git a/Dockerfile b/Dockerfile index 035fcd0..d0c3e16 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,7 +14,7 @@ RUN groupadd --gid $GID user && \ chown user /opt/ USER user -# Create and activate a virtual environment. +# Create and activate a virtual environments. ENV VIRTUAL_ENV /opt/dspygen-env ENV PATH $VIRTUAL_ENV/bin:$PATH RUN python -m venv $VIRTUAL_ENV @@ -44,7 +44,7 @@ RUN --mount=type=cache,target=/var/cache/apt/ \ USER user -# Install the run time Python dependencies in the virtual environment. +# Install the run time Python dependencies in the virtual environments. COPY --chown=user:user poetry.lock* pyproject.toml /workspaces/dspygen/ RUN mkdir -p /home/user/.cache/pypoetry/ && mkdir -p /home/user/.config/pypoetry/ && \ mkdir -p src/dspygen/ && touch src/dspygen/__init__.py && touch README.md @@ -66,7 +66,7 @@ RUN --mount=type=cache,target=/var/cache/apt/ \ echo 'user ALL=(root) NOPASSWD:ALL' > /etc/sudoers.d/user && chmod 0440 /etc/sudoers.d/user USER user -# Install the development Python dependencies in the virtual environment. +# Install the development Python dependencies in the virtual environments. RUN --mount=type=cache,uid=$UID,gid=$GID,target=/home/user/.cache/pypoetry/ \ poetry install --no-interaction @@ -97,7 +97,7 @@ RUN git clone --branch v$ANTIDOTE_VERSION --depth=1 https://github.com/mattmc3/a FROM base AS app -# Copy the virtual environment from the poetry stage. +# Copy the virtual environments from the poetry stage. COPY --from=poetry $VIRTUAL_ENV $VIRTUAL_ENV # Copy the package source code to the working directory. diff --git a/frontend/app/report/page.tsx b/frontend/app/report/page.tsx index 7301fd4..67e9657 100644 --- a/frontend/app/report/page.tsx +++ b/frontend/app/report/page.tsx @@ -141,7 +141,7 @@ export default function DSLPage() { {/**/} diff --git a/poetry.lock b/poetry.lock index e78030b..228cd4d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -404,6 +404,20 @@ typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] +[[package]] +name = "astroid" +version = "3.3.4" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.9.0" +files = [ + {file = "astroid-3.3.4-py3-none-any.whl", hash = "sha256:5eba185467253501b62a9f113c263524b4f5d55e1b30456370eed4cdbd6438fd"}, + {file = "astroid-3.3.4.tar.gz", hash = "sha256:e73d0b62dd680a7c07cb2cd0ce3c22570b044dd01bd994bc3a2dd16c6cbba162"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + [[package]] name = "asttokens" version = "2.4.1" @@ -681,6 +695,17 @@ files = [ {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, ] +[[package]] +name = "bracex" +version = "2.5.post1" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.5.post1-py3-none-any.whl", hash = "sha256:13e5732fec27828d6af308628285ad358047cec36801598368cb28bc631dbaf6"}, + {file = "bracex-2.5.post1.tar.gz", hash = "sha256:12c50952415bfa773d2d9ccb8e79651b8cdb1f31a42f6091b804f6ba2b4a66b6"}, +] + [[package]] name = "build" version = "1.2.1" @@ -1634,6 +1659,39 @@ calendars = ["convertdate", "hijri-converter"] fasttext = ["fasttext"] langdetect = ["langdetect"] +[[package]] +name = "deap" +version = "1.4.1" +description = "Distributed Evolutionary Algorithms in Python" +optional = false +python-versions = "*" +files = [ + {file = "deap-1.4.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f63d783f61536ea277055c92f668da820297901a0a594f9a367814292278a891"}, + {file = "deap-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a81c3c3c7c429f1cab1266efc5ca34ad035899014f70c5b5d4cca927c2af0088"}, + {file = "deap-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96f8ba1a20d66361f9cc05a0727dd0858811ebcc64d85c063d12051e9f1c6c1"}, + {file = "deap-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:6a45dece5a7a8dbdb13040d182400e735ee2dd78049d02bfa7d8c30079adfb84"}, + {file = "deap-1.4.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:9f759e3b056e55a2eb9e095b02fff4f7cf84c119ce90ea243673f3f666636179"}, + {file = "deap-1.4.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e07d1a16c4aefb940e6f97e92ff2facc7a6916cc2c8350593f9e37cb0b8a75d0"}, + {file = "deap-1.4.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e93e3a2e6c47a3053d0e8d26a226d788fc98d853beba19b8310003300028b826"}, + {file = "deap-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:379f6ce86da66c8692358db2e83bbfb97404be3daba1a3b860e5b78a97f5b6c3"}, + {file = "deap-1.4.1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:493a409a56be1a45027f0d6feb1080fbab284a4d2b14156f482c9e4ad2f6784f"}, + {file = "deap-1.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d81640a8bf7c3f5ef85b9592d3c79ce4a467bdbb4e6c56aeb895637609b5958"}, + {file = "deap-1.4.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cd45c2080b364b8949649b012bff14f0543de48590aa2a43096762db85b099"}, + {file = "deap-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1c20f3e64240757a7f2b1e11fafa523dba26237869e19a2428baaf3c218cea2f"}, + {file = "deap-1.4.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:bae8aab5db32652d248f6ec51adccae4cf58b09a49d86279a195bbaeb489576a"}, + {file = "deap-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b86428ea2b09696e284bdb9b889e7c73b244a8d94b004e4a787df364f307101"}, + {file = "deap-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31647fcbc1736127eec8495a25313e823d230844265fdcd254e076e05574bb62"}, + {file = "deap-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0209328a0c0928653d4947dffcf747e8b7c7b87ab63495a84f8c5c3963fad08d"}, + {file = "deap-1.4.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:c7f4b158fe02bc768aab00abae0fe5b090903de010f805c25966dbb4f78b01bb"}, + {file = "deap-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64767d124eeac5223954df83ec5c7c2b7af8035d5a94cf388d1d8fd00e797e49"}, + {file = "deap-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f7963f90a4a716254db16c0ae75bbcf1fdf062d191c6f1488014d9a61c5c19"}, + {file = "deap-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:ccff7486cafe985815c74995d76749bae68d66e7cf666a4aa40eabc07d59e86e"}, + {file = "deap-1.4.1.tar.gz", hash = "sha256:cc01de9892dfa7d1bc9803dab28892fead177f0182c81db47360a240ead778ff"}, +] + +[package.dependencies] +numpy = "*" + [[package]] name = "debugpy" version = "1.8.5" @@ -1732,6 +1790,17 @@ files = [ [package.extras] graph = ["objgraph (>=1.7.2)"] +[[package]] +name = "diskcache" +version = "5.6.3" +description = "Disk Cache -- Disk and file backed persistent cache." +optional = false +python-versions = ">=3" +files = [ + {file = "diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19"}, + {file = "diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc"}, +] + [[package]] name = "distlib" version = "0.3.8" @@ -1796,6 +1865,16 @@ docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] ssh = ["paramiko (>=2.4.3)"] websockets = ["websocket-client (>=1.3.0)"] +[[package]] +name = "docopt" +version = "0.6.2" +description = "Pythonic argument parser, that will make you smile" +optional = false +python-versions = "*" +files = [ + {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, +] + [[package]] name = "docutils" version = "0.21" @@ -1807,22 +1886,40 @@ files = [ {file = "docutils-0.21.post1.tar.gz", hash = "sha256:518e29081124e7d8159550958e6de240622562aa824f945f501ec3d3c5b67d19"}, ] +[[package]] +name = "docx" +version = "0.2.4" +description = "The docx module creates, reads and writes Microsoft Office Word 2007 docx files" +optional = false +python-versions = "*" +files = [ + {file = "docx-0.2.4.tar.gz", hash = "sha256:9d7595eac6e86cda0b7136a2995318d039c1f3eaa368a3300805abbbe5dc8877"}, +] + +[package.dependencies] +lxml = "*" +Pillow = ">=2.0" + [[package]] name = "dspy-ai" -version = "2.4.13" +version = "2.5.2" description = "DSPy" optional = false python-versions = ">=3.9" files = [ - {file = "dspy-ai-2.4.13.tar.gz", hash = "sha256:0ed5648d8267b6a4ebe5b72ec5dbcca9fa194d800885a0182cad93c312cd3166"}, - {file = "dspy_ai-2.4.13-py3-none-any.whl", hash = "sha256:b43aa117b4b6fcb009274f61adcfb0a1dbe1cbb4a370da3bd14cd4d230f17665"}, + {file = "dspy-ai-2.5.2.tar.gz", hash = "sha256:75529b450b3888b1a8ea26eb86dd549916f4a38288fab8ab8065ee8603301c75"}, + {file = "dspy_ai-2.5.2-py3-none-any.whl", hash = "sha256:aaf6bccab4f69b34ca557b42cb5e724fd88be28e5fe2e00142891bdc39e4218e"}, ] [package.dependencies] backoff = "*" datasets = "*" +diskcache = "*" +httpx = "*" joblib = ">=1.3,<2.0" -openai = ">=0.28.1,<2.0.0" +litellm = "*" +magicattr = ">=0.1.6,<0.2.0" +openai = "*" optuna = "*" pandas = "*" pydantic = ">=2.0,<3.0" @@ -1838,6 +1935,8 @@ faiss-cpu = ["faiss-cpu", "sentence-transformers"] fastembed = ["fastembed"] google-vertex-ai = ["google-cloud-aiplatform (==1.43.0)"] groq = ["groq (>=0.8.0,<0.9.0)"] +lancedb = ["lancedb (>=0.11.0,<0.12.0)"] +langfuse = ["langfuse (>=2.36.1,<2.37.0)"] marqo = ["marqo (>=3.1.0,<3.2.0)"] milvus = ["pymilvus (>=2.3.7,<2.4.0)"] mongodb = ["pymongo (>=3.12.0,<3.13.0)"] @@ -3033,17 +3132,6 @@ files = [ {file = "inject-5.2.1.tar.gz", hash = "sha256:f7c305a75cc4e3a331d248e996f25783ba784b88d5a9b9f73c53eacaa6d76985"}, ] -[[package]] -name = "install" -version = "1.3.5" -description = "Install packages from within code" -optional = false -python-versions = ">=2.7, >=3.5" -files = [ - {file = "install-1.3.5-py3-none-any.whl", hash = "sha256:0d3fadf4aa62c95efe8d34757c8507eb46177f86c016c21c6551eafc6a53d5a9"}, - {file = "install-1.3.5.tar.gz", hash = "sha256:e67c8a0be5ccf8cb4ffa17d090f3a61b6e820e6a7e21cd1d2c0f7bc59b18e647"}, -] - [[package]] name = "intervaltree" version = "3.1.0" @@ -3142,6 +3230,20 @@ files = [ [package.dependencies] arrow = ">=0.15.0" +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + [[package]] name = "itsdangerous" version = "2.2.0" @@ -3846,6 +3948,34 @@ websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] +[[package]] +name = "litellm" +version = "1.44.27" +description = "Library to easily interface with LLM API providers" +optional = false +python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" +files = [ + {file = "litellm-1.44.27-py3-none-any.whl", hash = "sha256:88d514aba2041f7805632db343225f8a71230409a7ce3210911be5e6f6ec13b6"}, + {file = "litellm-1.44.27.tar.gz", hash = "sha256:34f11d802a373465648defe619ae6e0dd72d524c0fa9fd11096da2669b134048"}, +] + +[package.dependencies] +aiohttp = "*" +click = "*" +importlib-metadata = ">=6.8.0" +jinja2 = ">=3.1.2,<4.0.0" +jsonschema = ">=4.22.0,<5.0.0" +openai = ">=1.40.0" +pydantic = ">=2.0.0,<3.0.0" +python-dotenv = ">=0.2.0" +requests = ">=2.31.0,<3.0.0" +tiktoken = ">=0.7.0" +tokenizers = "*" + +[package.extras] +extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "resend (>=0.8.0,<0.9.0)"] +proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", "cryptography (>=42.0.5,<43.0.0)", "fastapi (>=0.111.0,<0.112.0)", "fastapi-sso (>=0.10.0,<0.11.0)", "gunicorn (>=22.0.0,<23.0.0)", "orjson (>=3.9.7,<4.0.0)", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.9,<0.0.10)", "pyyaml (>=6.0.1,<7.0.0)", "rq", "uvicorn (>=0.22.0,<0.23.0)"] + [[package]] name = "loguru" version = "0.7.2" @@ -4039,6 +4169,16 @@ files = [ [package.dependencies] lxml = "*" +[[package]] +name = "magicattr" +version = "0.1.6" +description = "A getattr and setattr that works on nested objects, lists, dicts, and any combination thereof without resorting to eval" +optional = false +python-versions = "*" +files = [ + {file = "magicattr-0.1.6-py2.py3-none-any.whl", hash = "sha256:d96b18ee45b5ee83b09c17e15d3459a64de62d538808c2f71182777dd9dbbbdf"}, +] + [[package]] name = "mako" version = "1.3.5" @@ -4232,6 +4372,17 @@ files = [ [package.dependencies] traitlets = "*" +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + [[package]] name = "mdurl" version = "0.1.2" @@ -4792,6 +4943,17 @@ files = [ {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] +[[package]] +name = "nvidia-nccl-cu12" +version = "2.23.4" +description = "NVIDIA Collective Communication Library (NCCL) Runtime" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_nccl_cu12-2.23.4-py3-none-manylinux2014_aarch64.whl", hash = "sha256:aa946c8327e22ced28e7cef508a334673abc42064ec85f02d005ba1785ea4cec"}, + {file = "nvidia_nccl_cu12-2.23.4-py3-none-manylinux2014_x86_64.whl", hash = "sha256:b097258d9aab2fa9f686e33c6fe40ae57b27df60cedbd15d139701bb5509e0c1"}, +] + [[package]] name = "oauthlib" version = "3.2.2" @@ -4808,6 +4970,20 @@ rsa = ["cryptography (>=3.0.0)"] signals = ["blinker (>=1.4.0)"] signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] +[[package]] +name = "ollama" +version = "0.3.3" +description = "The official Python client for Ollama." +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "ollama-0.3.3-py3-none-any.whl", hash = "sha256:ca6242ce78ab34758082b7392df3f9f6c2cb1d070a9dede1a4c545c929e16dba"}, + {file = "ollama-0.3.3.tar.gz", hash = "sha256:f90a6d61803117f40b0e8ff17465cab5e1eb24758a473cfe8101aff38bc13b51"}, +] + +[package.dependencies] +httpx = ">=0.27.0,<0.28.0" + [[package]] name = "onnxruntime" version = "1.18.1" @@ -6089,10 +6265,7 @@ files = [ annotated-types = ">=0.4.0" email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} pydantic-core = "2.20.1" -typing-extensions = [ - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, -] +typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} [package.extras] email = ["email-validator (>=2.0.0)"] @@ -6374,6 +6547,25 @@ files = [ {file = "pygame-2.6.0.tar.gz", hash = "sha256:722d33ae676aa8533c1f955eded966411298831346b8d51a77dad22e46ba3e35"}, ] +[[package]] +name = "pygithub" +version = "2.4.0" +description = "Use the full Github API v3" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyGithub-2.4.0-py3-none-any.whl", hash = "sha256:81935aa4bdc939fba98fee1cb47422c09157c56a27966476ff92775602b9ee24"}, + {file = "pygithub-2.4.0.tar.gz", hash = "sha256:6601e22627e87bac192f1e2e39c6e6f69a43152cfb8f307cee575879320b3051"}, +] + +[package.dependencies] +Deprecated = "*" +pyjwt = {version = ">=2.4.0", extras = ["crypto"]} +pynacl = ">=1.4.0" +requests = ">=2.14.0" +typing-extensions = ">=4.0.0" +urllib3 = ">=1.26.0" + [[package]] name = "pygments" version = "2.18.0" @@ -6388,6 +6580,26 @@ files = [ [package.extras] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pyjwt" +version = "2.9.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + [[package]] name = "pykka" version = "4.0.2" @@ -6399,6 +6611,61 @@ files = [ {file = "pykka-4.0.2.tar.gz", hash = "sha256:05e687c426922b0084d79f22a6c1813e0c4e0c59d8f860aa32c18c5f6127e276"}, ] +[[package]] +name = "pylint" +version = "3.3.1" +description = "python code static checker" +optional = false +python-versions = ">=3.9.0" +files = [ + {file = "pylint-3.3.1-py3-none-any.whl", hash = "sha256:2f846a466dd023513240bc140ad2dd73bfc080a5d85a710afdb728c420a5a2b9"}, + {file = "pylint-3.3.1.tar.gz", hash = "sha256:9f3dcc87b1203e612b78d91a896407787e708b3f189b5fa0b307712d49ff0c6e"}, +] + +[package.dependencies] +astroid = ">=3.3.4,<=3.4.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + +[[package]] +name = "pynacl" +version = "1.5.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, + {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, +] + +[package.dependencies] +cffi = ">=1.4.1" + +[package.extras] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] + [[package]] name = "pyobjc" version = "10.3.1" @@ -9426,6 +9693,22 @@ pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] +[[package]] +name = "pytest-watch" +version = "4.2.0" +description = "Local continuous test runner with pytest and watchdog." +optional = false +python-versions = "*" +files = [ + {file = "pytest-watch-4.2.0.tar.gz", hash = "sha256:06136f03d5b361718b8d0d234042f7b2f203910d8568f63df2f866b547b3d4b9"}, +] + +[package.dependencies] +colorama = ">=0.3.3" +docopt = ">=0.4.0" +pytest = ">=2.6.4" +watchdog = ">=0.6.0" + [[package]] name = "pytest-xdist" version = "3.6.1" @@ -10318,6 +10601,51 @@ files = [ [package.dependencies] psutil = "*" +[[package]] +name = "scikit-learn" +version = "1.5.2" +description = "A set of python modules for machine learning and data mining" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scikit_learn-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:299406827fb9a4f862626d0fe6c122f5f87f8910b86fe5daa4c32dcd742139b6"}, + {file = "scikit_learn-1.5.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:2d4cad1119c77930b235579ad0dc25e65c917e756fe80cab96aa3b9428bd3fb0"}, + {file = "scikit_learn-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c412ccc2ad9bf3755915e3908e677b367ebc8d010acbb3f182814524f2e5540"}, + {file = "scikit_learn-1.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a686885a4b3818d9e62904d91b57fa757fc2bed3e465c8b177be652f4dd37c8"}, + {file = "scikit_learn-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:c15b1ca23d7c5f33cc2cb0a0d6aaacf893792271cddff0edbd6a40e8319bc113"}, + {file = "scikit_learn-1.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03b6158efa3faaf1feea3faa884c840ebd61b6484167c711548fce208ea09445"}, + {file = "scikit_learn-1.5.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1ff45e26928d3b4eb767a8f14a9a6efbf1cbff7c05d1fb0f95f211a89fd4f5de"}, + {file = "scikit_learn-1.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f763897fe92d0e903aa4847b0aec0e68cadfff77e8a0687cabd946c89d17e675"}, + {file = "scikit_learn-1.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8b0ccd4a902836493e026c03256e8b206656f91fbcc4fde28c57a5b752561f1"}, + {file = "scikit_learn-1.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:6c16d84a0d45e4894832b3c4d0bf73050939e21b99b01b6fd59cbb0cf39163b6"}, + {file = "scikit_learn-1.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f932a02c3f4956dfb981391ab24bda1dbd90fe3d628e4b42caef3e041c67707a"}, + {file = "scikit_learn-1.5.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:3b923d119d65b7bd555c73be5423bf06c0105678ce7e1f558cb4b40b0a5502b1"}, + {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd"}, + {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6"}, + {file = "scikit_learn-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1"}, + {file = "scikit_learn-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9"}, + {file = "scikit_learn-1.5.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1"}, + {file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8"}, + {file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca64b3089a6d9b9363cd3546f8978229dcbb737aceb2c12144ee3f70f95684b7"}, + {file = "scikit_learn-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:3bed4909ba187aca80580fe2ef370d9180dcf18e621a27c4cf2ef10d279a7efe"}, + {file = "scikit_learn-1.5.2.tar.gz", hash = "sha256:b4237ed7b3fdd0a4882792e68ef2545d5baa50aca3bb45aa7df468138ad8f94d"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.19.5" +scipy = ">=1.6.0" +threadpoolctl = ">=3.1.0" + +[package.extras] +benchmark = ["matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "pandas (>=1.1.5)"] +build = ["cython (>=3.0.10)", "meson-python (>=0.16.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.5.0)", "sphinx-design (>=0.6.0)", "sphinx-gallery (>=0.16.0)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)"] +examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] +install = ["joblib (>=1.2.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)", "threadpoolctl (>=3.1.0)"] +maintenance = ["conda-lock (==2.5.6)"] +tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.2.1)", "scikit-image (>=0.17.2)"] + [[package]] name = "scipy" version = "1.14.0" @@ -10515,6 +10843,26 @@ files = [ {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] +[[package]] +name = "speechrecognition" +version = "3.10.4" +description = "Library for performing speech recognition, with support for several engines and APIs, online and offline." +optional = false +python-versions = ">=3.8" +files = [ + {file = "SpeechRecognition-3.10.4-py2.py3-none-any.whl", hash = "sha256:723b8155692a8ed11a30013f15f89a3e57c5dc8bc73c8cb024bf9bd14c21fba5"}, + {file = "speechrecognition-3.10.4.tar.gz", hash = "sha256:986bafcf61f14625c2f3cea6a471838edd379ed68aeed7b8f3c0fb41e21f1125"}, +] + +[package.dependencies] +requests = ">=2.26.0" +typing-extensions = "*" + +[package.extras] +dev = ["flake8", "rstcheck"] +whisper-api = ["openai"] +whisper-local = ["openai-whisper", "soundfile"] + [[package]] name = "sqlalchemy" version = "2.0.32" @@ -10667,6 +11015,16 @@ anyio = ">=3.4.0,<5" [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] +[[package]] +name = "stopit" +version = "1.1.2" +description = "Timeout control decorator and context managers, raise any exception in another thread" +optional = false +python-versions = "*" +files = [ + {file = "stopit-1.1.2.tar.gz", hash = "sha256:f7f39c583fd92027bd9d06127b259aee7a5b7945c1f1fa56263811e1e766996d"}, +] + [[package]] name = "streamlit" version = "1.37.1" @@ -10721,39 +11079,52 @@ typing = ["mypy (>=1.4)", "rich", "twisted"] [[package]] name = "sungen" -version = "2024.7.4.2" +version = "2024.9.28" description = "TODO" optional = false -python-versions = "<4.0,>=3.10" +python-versions = "<3.13,>=3.10" files = [ - {file = "sungen-2024.7.4.2-py3-none-any.whl", hash = "sha256:350d0d6a6882d2ab898a2e3509234733231619c95bf8de90091bf0cb9d3bf647"}, - {file = "sungen-2024.7.4.2.tar.gz", hash = "sha256:62cc5c4a9b7d03b9ed5365ef1c0dc42e72c9dea6889af2c47e04fd863f00a02f"}, + {file = "sungen-2024.9.28-py3-none-any.whl", hash = "sha256:0efcb58354e483da1dd1c47964c0b943250e8b2d76620ba09ff4c5a8d37c6bcf"}, + {file = "sungen-2024.9.28.tar.gz", hash = "sha256:c42d30b14b23be41994a1c63133144d24fd67da301e2b0ce8c0e6e7ad7000e36"}, ] [package.dependencies] aiofiles = ">=24.1.0,<25.0.0" asyncer = ">=0.0.7,<0.0.8" +beautifulsoup4 = ">=4.12.3,<5.0.0" coloredlogs = ">=15.0.1" -dspy-ai = ">=2.4.10,<3.0.0" +confz = ">=2.0.1,<3.0.0" +docx = ">=0.2.4,<0.3.0" +dspy-ai = ">=2.5.0,<3.0.0" +ebooklib = ">=0.18,<0.19" factory-boy = ">=3.3.0,<4.0.0" faker = ">=26.0.0,<27.0.0" fastapi = {version = ">=0.110.1", extras = ["all"]} gunicorn = ">=21.2.0" +icontract = ">=2.6.6,<3.0.0" ijson = ">=3.3.0,<4.0.0" inflection = ">=0.5.1,<0.6.0" inject = ">=5.2.1,<6.0.0" -install = ">=1.3.5,<2.0.0" jinja2-ext = ">=0.1,<0.2" jinja2-time = ">=0.2.0,<0.3.0" jsonschema = ">=4.22.0,<5.0.0" loguru = ">=0.7.2,<0.8.0" munch = ">=4.0.0,<5.0.0" +pandasql = ">=0.7.3,<0.8.0" poethepoet = ">=0.25.0" +pygithub = ">=2.3.0,<3.0.0" +pylint = ">=3.2.7,<4.0.0" +pypdf = ">=4.3.1,<5.0.0" +pyperclip = ">=1.9.0,<2.0.0" pytest-mock = ">=3.14.0,<4.0.0" +pytest-watch = ">=4.2.0,<5.0.0" +python-dotenv = ">=1.0.1,<2.0.0" +speechrecognition = ">=3.10.4,<4.0.0" tiktoken = ">=0.7.0,<0.8.0" typer = {version = ">=0.12.0", extras = ["all"]} uuid-utils = ">=0.9.0,<0.10.0" uvicorn = {version = ">=0.29.0", extras = ["standard"]} +wcmatch = ">=9.0,<10.0" [[package]] name = "sympy" @@ -10819,6 +11190,17 @@ files = [ {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, ] +[[package]] +name = "threadpoolctl" +version = "3.5.0" +description = "threadpoolctl" +optional = false +python-versions = ">=3.8" +files = [ + {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, + {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, +] + [[package]] name = "tiktoken" version = "0.7.0" @@ -11039,6 +11421,17 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] + [[package]] name = "toolz" version = "0.12.1" @@ -11070,6 +11463,36 @@ files = [ {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, ] +[[package]] +name = "tpot" +version = "0.12.2" +description = "Tree-based Pipeline Optimization Tool" +optional = false +python-versions = "*" +files = [ + {file = "TPOT-0.12.2-py3-none-any.whl", hash = "sha256:dc0d07b978d89d0086d8d32ceee3c8c3db273c7b2828a92c1ade211504f602e6"}, + {file = "TPOT-0.12.2.tar.gz", hash = "sha256:e92777cf41cbf01ee0439632264d21ef89a1c0b5ec532b73dcb679c3ce67bec1"}, +] + +[package.dependencies] +deap = ">=1.2" +joblib = ">=0.13.2" +numpy = ">=1.16.3" +pandas = ">=0.24.2" +scikit-learn = ">=1.4.1" +scipy = ">=1.3.1" +stopit = ">=1.1.1" +tqdm = ">=4.36.1" +update-checker = ">=0.16" +xgboost = ">=1.1.0" + +[package.extras] +dask = ["dask (>=0.18.2)", "dask-ml (>=1.0.0)", "distributed (>=1.22.1)"] +imblearn = ["imbalanced-learn (>=0.7.0)"] +mdr = ["scikit-mdr (>=0.4.4)"] +skrebate = ["skrebate (>=0.3.4)"] +torch = ["torch (==1.13.1)"] + [[package]] name = "tqdm" version = "4.66.5" @@ -11323,6 +11746,25 @@ files = [ {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, ] +[[package]] +name = "update-checker" +version = "0.18.0" +description = "A python module that will check for package updates." +optional = false +python-versions = "*" +files = [ + {file = "update_checker-0.18.0-py3-none-any.whl", hash = "sha256:cbba64760a36fe2640d80d85306e8fe82b6816659190993b7bdabadee4d4bbfd"}, + {file = "update_checker-0.18.0.tar.gz", hash = "sha256:6a2d45bb4ac585884a6b03f9eade9161cedd9e8111545141e9aa9058932acb13"}, +] + +[package.dependencies] +requests = ">=2.3.0" + +[package.extras] +dev = ["black", "flake8", "pytest (>=2.7.3)"] +lint = ["black", "flake8"] +test = ["pytest (>=2.7.3)"] + [[package]] name = "uri-template" version = "1.3.0" @@ -11672,6 +12114,20 @@ files = [ [package.dependencies] anyio = ">=3.0.0" +[[package]] +name = "wcmatch" +version = "9.0" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.8" +files = [ + {file = "wcmatch-9.0-py3-none-any.whl", hash = "sha256:af25922e2b6dbd1550fa37a4c8de7dd558d6c1bb330c641de9b907b9776cb3c4"}, + {file = "wcmatch-9.0.tar.gz", hash = "sha256:567d66b11ad74384954c8af86f607857c3bdf93682349ad32066231abd556c92"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + [[package]] name = "wcwidth" version = "0.2.13" @@ -11927,6 +12383,36 @@ files = [ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] +[[package]] +name = "xgboost" +version = "2.1.1" +description = "XGBoost Python Package" +optional = false +python-versions = ">=3.8" +files = [ + {file = "xgboost-2.1.1-py3-none-macosx_10_15_x86_64.macosx_11_0_x86_64.macosx_12_0_x86_64.whl", hash = "sha256:4163ab55118628f605cfccf950e2d667150640f6fc746bb5a173bddfd935950f"}, + {file = "xgboost-2.1.1-py3-none-macosx_12_0_arm64.whl", hash = "sha256:40d1f647022f497c1b0f69073765baf50ff5802ca77c6bb1aca55a6bc65df00d"}, + {file = "xgboost-2.1.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:4c534818aa08ab327ac2239ef211ef78db65a8573d069bc9898f824830fa2308"}, + {file = "xgboost-2.1.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:deef471e8d353afa99e5cc0e2af7d99ace7013f40684fcf3eed9124de033265d"}, + {file = "xgboost-2.1.1-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:8f3246a6d839dceb4553d3e5ea64ed718f9c692f072ee8275eeb895b58e283e6"}, + {file = "xgboost-2.1.1-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:6475ca35dede1f87d1dc485b362caba08f69f6020f4440e97b167676a533850e"}, + {file = "xgboost-2.1.1-py3-none-win_amd64.whl", hash = "sha256:fcf8413f3c621e97fdaaa45abb7ae808319c88eff5447328eff14c419c7c6ae0"}, + {file = "xgboost-2.1.1.tar.gz", hash = "sha256:4b1729837f9f1ba88a32ef1be3f8efb860fee6454a68719b196dc88032c23d97"}, +] + +[package.dependencies] +numpy = "*" +nvidia-nccl-cu12 = {version = "*", markers = "platform_system == \"Linux\" and platform_machine != \"aarch64\""} +scipy = "*" + +[package.extras] +dask = ["dask", "distributed", "pandas"] +datatable = ["datatable"] +pandas = ["pandas (>=1.2)"] +plotting = ["graphviz", "matplotlib"] +pyspark = ["cloudpickle", "pyspark", "scikit-learn"] +scikit-learn = ["scikit-learn"] + [[package]] name = "xlsxwriter" version = "3.2.0" @@ -12175,5 +12661,5 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" -python-versions = ">=3.10,<4.0" -content-hash = "793662c8a04d7164f4881109ec7b774035bd80ecefe1d1f0e88002ebfe6efc1d" +python-versions = ">=3.10,<3.13" +content-hash = "3c669e9f9db7ae27247087b1c6ac972bfa8ba514c01a7dea318b3a6b94891e71" diff --git a/pyproject.toml b/pyproject.toml index a2c0e4c..f4da919 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,7 @@ fastapi = { extras = ["all"], version = ">=0.92.0" } gunicorn = ">=20.1.0" poethepoet = ">=0.20.0" pydantic = ">=1.10.7" -python = ">=3.10,<4.0" +python = ">=3.10,<3.13" streamlit = ">=1.19.0" typer = { extras = ["all"], version = ">=0.9.0" } uvicorn = { extras = ["standard"], version = ">=0.20.0" } @@ -27,7 +27,6 @@ jinja2 = "^3.1.3" inflection = "^0.5.1" jinja2-time = "^0.2.0" jinja2-ext = "^0.1" -dspy-ai = "^2.4.5" openai = "^1.12.0" pyperclip = "^1.8.2" asyncer = "0.0.7" @@ -67,7 +66,7 @@ httpx = "^0.27.0" inject = "^5.2.1" pddlpy = "^0.4.4" clingo = "^5.7.1" -sungen = "^2024.7.4.2" +sungen = "^2024.9.28" numpy = "<2.0.0" confz = "^2.0.1" python-pptx = "^0.6.23" @@ -84,6 +83,9 @@ pyobjc-framework-contacts = "^10.3.1" pytz = "^2024.1" apscheduler = "^3.10.4" prefect = "^2.20.4" +ollama = "^0.3.3" +dspy-ai = "^2.5.2" +tpot = "^0.12.2" [tool.poetry.group.test.dependencies] # https://python-poetry.org/docs/master/managing-dependencies/ coverage = { extras = ["toml"], version = ">=7.2.5" } diff --git a/src/dspygen/__init__.py b/src/dspygen/__init__.py index 080a23e..6d4e258 100644 --- a/src/dspygen/__init__.py +++ b/src/dspygen/__init__.py @@ -1,15 +1,15 @@ -import inject -import EventKit - # import inject -# from dspygen.di_configure import configure -# -# inject.configure(configure) - -# Configure the injector -def configure_injector(binder): - event_store = EventKit.EKEventStore.alloc().init() - binder.bind(EventKit.EKEventStore, event_store) - - -inject.configure(configure_injector) \ No newline at end of file +# import EventKit +# +# # import inject +# # from dspygen.di_configure import configure +# # +# # inject.configure(configure) +# +# # Configure the injector +# def configure_injector(binder): +# event_store = EventKit.EKEventStore.alloc().init() +# binder.bind(EventKit.EKEventStore, event_store) +# +# +# inject.configure(configure_injector) \ No newline at end of file diff --git a/src/dspygen/agents/coder_agent_v4.py b/src/dspygen/agents/coder_agent_v4.py index 1c35e53..61ee663 100644 --- a/src/dspygen/agents/coder_agent_v4.py +++ b/src/dspygen/agents/coder_agent_v4.py @@ -7,8 +7,10 @@ from dspygen.modules.function_invoke_module import function_invoke_call from dspygen.modules.python_source_code_module import python_source_code_call +from dspygen.rdddy.base_inhabitant import BaseInhabitant -class CoderAgent(FSMMixin): + +class CoderAgent(FSMMixin, BaseInhabitant): def __init__(self, requirements: str): super().setup_fsm(CoderAgentState, initial=CoderAgentState.ANALYZING_REQUIREMENTS) self.requirements = requirements diff --git a/src/dspygen/agents/pytest_agent.py b/src/dspygen/agents/pytest_agent.py index e305551..a9e2fb7 100644 --- a/src/dspygen/agents/pytest_agent.py +++ b/src/dspygen/agents/pytest_agent.py @@ -87,7 +87,7 @@ def errors_detected(self): """ def main(): - init_dspy(max_tokens=3000) # Initialize the dspy environment + init_dspy(max_tokens=3000) # Initialize the dspy environments code_agent = CoderAgent() agent = PytestAgent(code=example_code) print("Initial state:", agent.state) diff --git a/src/dspygen/api.py b/src/dspygen/api.py index 37b9e22..7ba28ad 100644 --- a/src/dspygen/api.py +++ b/src/dspygen/api.py @@ -18,7 +18,7 @@ from importlib import import_module import os -from dspygen.dsl.dsl_pipeline_executor import router as pipeline_router +from dspygen.llm_pipe.dsl_pipeline_executor import router as pipeline_router @app.on_event("startup") diff --git a/src/dspygen/bpel_models/activities.py b/src/dspygen/bpel_models/activities.py index f7b1805..17b61ed 100644 --- a/src/dspygen/bpel_models/activities.py +++ b/src/dspygen/bpel_models/activities.py @@ -217,7 +217,7 @@ class ScopeActivity(Activity): class ForLoopActivity(Activity): """ Represents a loop activity with a defined start, end, and increment, iterating over a block of activities. - Note: This is an extension for convenience and not part of the standard BPEL model. + Note: This is an extensions for convenience and not part of the standard BPEL model. """ id: str = Field(..., description="Unique identifier for the for loop activity.") start_expression: str = Field(..., description="Expression defining the loop's start value.") diff --git a/src/dspygen/chatgpt_integration.py b/src/dspygen/chatgpt_integration.py index 7216258..f94c795 100644 --- a/src/dspygen/chatgpt_integration.py +++ b/src/dspygen/chatgpt_integration.py @@ -2,7 +2,7 @@ import os from dotenv import load_dotenv -# Load environment variables from .env file +# Load environments variables from .env file load_dotenv() # Initialize OpenAI client diff --git a/src/dspygen/cli.py b/src/dspygen/cli.py index 4980307..259a546 100644 --- a/src/dspygen/cli.py +++ b/src/dspygen/cli.py @@ -83,18 +83,18 @@ def init(project_name: str = typer.Argument(...), "--extra-context", f'{json.dumps(extra_context)}', "--no-input"]) - # We need to install dspygen in the project's virtual environment - # It uses poetry to manage the virtual environment + # We need to install dspygen in the project's virtual environments + # It uses poetry to manage the virtual environments # Change to the project directory - # Run the command to initialize the virtual environment - # Run the command to install dspygen in the virtual environment + # Run the command to initialize the virtual environments + # Run the command to install dspygen in the virtual environments os.chdir(project_name) subprocess.check_call(["poetry", "install"]) - # Create the virtual environment + # Create the virtual environments subprocess.check_call(["poetry", "env", "use", "python"]) - # Install the project in the virtual environment + # Install the project in the virtual environments subprocess.check_call(["poetry", "add", "dspygen"]) subprocess.check_call(["poetry", "run", "pip", "install", "-e", "."]) @@ -110,7 +110,7 @@ def init(project_name: str = typer.Argument(...), Key Features: Quick Initialization: Rapidly configure your AI project, mirroring the simplicity of Ruby on Rails. -Modular Design: Generate and enhance DSPy dspy_modules with ease, promoting a scalable and flexible development environment. +Modular Design: Generate and enhance DSPy dspy_modules with ease, promoting a scalable and flexible development environments. User-Friendly Commands: Manage your AI projects effortlessly through an intuitive command structure. Chatbot Assistance: Embedded support to guide through the development process, enhancing user experience. Using DSPyGen Modules: diff --git a/src/dspygen/email_utils.py b/src/dspygen/email_utils.py index 29a4f9d..a1173f8 100644 --- a/src/dspygen/email_utils.py +++ b/src/dspygen/email_utils.py @@ -3,7 +3,7 @@ from email.mime.text import MIMEText from dotenv import load_dotenv -# Load environment variables from .env file +# Load environments variables from .env file load_dotenv() @@ -28,7 +28,7 @@ def send_email(server, msg, from_email, to_email): def login_to_smtp(): """ - Logs in to the SMTP server using credentials from environment variables. + Logs in to the SMTP server using credentials from environments variables. Returns an authenticated SMTP server object. """ from_email = os.getenv('EMAIL_ADDRESS') diff --git a/src/dspygen/dsl/__init__.py b/src/dspygen/experiments/agile_protocol/__init__.py similarity index 100% rename from src/dspygen/dsl/__init__.py rename to src/dspygen/experiments/agile_protocol/__init__.py diff --git a/src/dspygen/experiments/agile_protocol/aps_models.py b/src/dspygen/experiments/agile_protocol/aps_models.py new file mode 100644 index 0000000..cca5e08 --- /dev/null +++ b/src/dspygen/experiments/agile_protocol/aps_models.py @@ -0,0 +1,139 @@ +from typing import List, Optional, Dict, Union +from pydantic import Field +from datetime import date + +# Assuming DSLModel is defined in dspygen.utils.dsl_tools +from dspygen.utils.dsl_tools import DSLModel +from dspygen.utils.dspy_tools import init_versatile, init_instant + + +class BusinessRequirements(DSLModel): + """Describes the business requirements for the project.""" + key_features: List[str] = Field(..., description="Key features required by the business.") + target_audience: str = Field(..., description="Primary audience for the business requirements.") + success_metrics: List[str] = Field(..., description="Metrics to measure the success of business requirements.") + + +class Development(DSLModel): + """Describes development setup, guidelines, and review processes.""" + setup_steps: List[str] = Field(..., description="Steps to set up the development environments.") + build_command: Optional[str] = Field(None, description="Command to build the project.") + test_command: Optional[str] = Field(None, description="Command to run tests.") + guidelines: Optional[List[str]] = Field(None, description="Guidelines to follow during development.") + review_process: Optional[List[str]] = Field(None, description="Process for reviewing the development work.") + + +class Deployment(DSLModel): + """Represents deployment configurations, platforms, and environments.""" + platform: str = Field(..., description="Deployment platform used.") + cicd_pipeline: Optional[str] = Field(None, description="CI/CD pipeline configuration.") + staging_environment: Optional[str] = Field(None, description="Staging environments setup.") + production_environment: Optional[str] = Field(None, description="Production environments setup.") + review_cycle: Optional[str] = Field(None, description="Frequency of deployment reviews.") + + +class Interaction(DSLModel): + """Defines an interaction between roles, specifying the type and involved roles.""" + interaction_type: str = Field(..., description="Type of interaction between roles.") + with_role: str = Field(..., alias='with', description="Role with which the interaction occurs.") + description: Optional[str] = Field(None, description="Description of the interaction.") + notifications: Optional[List[str]] = Field(None, description="Notifications triggered by the interaction.") + + +class Subtask(DSLModel): + """Represents a subtask within a larger task, including its dependencies and interactions.""" + subtask_id: str = Field(..., description="Unique identifier for the subtask.") + name: str = Field(..., description="Name of the subtask.") + assigned_to: List[str] = Field(..., description="Roles assigned to the subtask.") + dependencies: Optional[List[str]] = Field(None, description="List of task IDs that this subtask depends on.") + estimated_time: Optional[str] = Field(None, description="Estimated time to complete the subtask.") + interactions: Optional[List[Interaction]] = Field(None, description="Interactions involved in the subtask.") + status: Optional[str] = Field(None, description="Current status of the subtask.") + start_date: Optional[date] = Field(None, description="Start date of the subtask.") + end_date: Optional[date] = Field(None, description="End date of the subtask.") + + +class Task(DSLModel): + """Represents a task, including its description, dependencies, and subtasks.""" + task_id: str = Field(..., description="Unique identifier for the task.") + name: str = Field(..., description="Name of the task.") + description: Optional[str] = Field(None, description="Detailed description of the task.") + assigned_to: List[str] = Field(..., description="Roles assigned to the task.") + dependencies: Optional[List[str]] = Field(None, description="List of task IDs that this task depends on.") + interactions: Optional[List[Interaction]] = Field(None, description="Interactions involved in the task.") + subtasks: Optional[List[Subtask]] = Field(None, description="List of subtasks under this task.") + estimated_time: Optional[str] = Field(None, description="Estimated time to complete the task.") + priority: Optional[str] = Field(None, description="Priority level of the task.") + status: Optional[str] = Field(None, description="Current status of the task.") + start_date: Optional[date] = Field(None, description="Start date of the task.") + end_date: Optional[date] = Field(None, description="End date of the task.") + results: Optional[List[str]] = Field(None, description="Results or outputs from the task.") + scheduled_date: Optional[date] = Field(None, description="Scheduled date for the task.") + + +class Workflow(DSLModel): + """Defines the workflow for the project, organizing tasks in a specific order.""" + workflow_type: str = Field(..., description="Type of workflow (Sequential or Parallel).") + tasks: List[str] = Field(..., description="List of task IDs in the workflow order.") + + +class Role(DSLModel): + """Represents a role in the project, with its responsibilities and type.""" + name: str = Field(..., description="Name of the role.") + role_type: str = Field(..., description="Type of the role (Human or AI).") + description: Optional[str] = Field(None, description="Description of the role.") + responsibilities: Optional[List[str]] = Field(None, description="List of responsibilities for the role.") + abbreviation: Optional[str] = Field(None, description="Abbreviation for the role.") + + +class Project(DSLModel): + """Represents a project, its roles, tasks, and overall workflow.""" + name: str = Field(..., description="Name of the project.") + description: Optional[str] = Field(None, description="Description of the project.") + timeframe: Optional[Dict[str, date]] = Field(None, description="Start and end dates of the project.") + roles: List[Role] = Field(..., description="List of roles involved in the project.") + tasks: List[Task] = Field(..., description="List of tasks within the project.") + workflow: Optional[Workflow] = Field(None, description="Workflow structure of the project.") + + +class Amendment(DSLModel): + """Represents an amendment made during a meeting, including the vote required to pass it.""" + amendment_id: str = Field(..., description="Unique identifier for the amendment.") + description: str = Field(..., description="Description of the amendment.") + made_by: str = Field(..., description="Participant who made the amendment.") + seconded_by: Optional[str] = Field(None, description="Participant who seconded the amendment.") + debate_allowed: bool = Field(..., description="Indicates if debate is allowed on the amendment.") + vote_required: str = Field(..., description="Type of vote required to pass the amendment.") + debate: Optional[Dict[str, Union[List[str], List[str]]]] = Field(None, description="Details of the debate if allowed.") + + +class Participant(DSLModel): + """Represents a participant in a meeting.""" + name: str = Field(..., description="Name of the participant.") + role: str = Field(..., description="Role of the participant.") + + +class Meeting(DSLModel): + """Represents a meeting, its participants, agenda, and other details.""" + name: str = Field(..., description="Name of the meeting.") + meeting_date: date = Field(..., description="Date of the meeting.") + location: Optional[str] = Field(None, description="Location where the meeting is held.") + chairperson: str = Field(..., description="Chairperson of the meeting.") + secretary: str = Field(..., description="Secretary responsible for taking minutes.") + participants: List[Participant] = Field(..., description="List of all participants in the meeting.") + agenda: List[str] = Field(..., description="Agenda items for the meeting.") + minutes: Optional[Dict[str, Union[str, bool, date]]] = Field(None, description="Minutes of the meeting.") + rules_of_order: Optional[Dict[str, Union[str, List[str]]]] = Field(None, description="Rules governing the meeting.") + + +def main(): + """Main function""" + init_instant() + + from sungen.dspy_modules.gen_pydantic_instance import GenPydanticInstance + instance = GenPydanticInstance(Meeting)("Fortune 10 Board Meeting. Example values for empty fields") + print(instance) + + +if __name__ == '__main__': + main() diff --git a/src/dspygen/experiments/auto_spider/spider_main.py b/src/dspygen/experiments/auto_spider/spider_main.py index 5c2587f..60a8465 100644 --- a/src/dspygen/experiments/auto_spider/spider_main.py +++ b/src/dspygen/experiments/auto_spider/spider_main.py @@ -252,14 +252,14 @@ def parse_linkedin_profile(linkedin_text: str) -> Optional[LinkedInProfile]: Method Studios · Full-timeMethod Studios · Full-time Feb 2018 - Jun 2020 · 2 yrs 5 mosFeb 2018 to Jun 2020 · 2 yrs 5 mos Greater Los Angeles AreaGreater Los Angeles Area -At Method Studios, as a Principal Engineer focusing on web technologies, I provided extensive expertise in the architectural design and development of sophisticated business applications. I was deeply involved in hands-on development and architectural decisions, utilizing major web frameworks like React, Angular, and Vue to deliver robust and scalable web solutions. My role extended to full-stack infrastructure development, where I demonstrated proficiency with technologies such as Node.js, MongoDB, MySQL, Docker, and Kubernetes. Beyond technical responsibilities, I played a pivotal role in mentoring junior colleagues, sharing knowledge and guiding their professional growth in a dynamic and fast-paced environment. My contributions at Method Studios were characterized by a blend of technical acumen, leadership, and a commitment to fostering a collaborative and learning-driven work culture. +At Method Studios, as a Principal Engineer focusing on web technologies, I provided extensive expertise in the architectural design and development of sophisticated business applications. I was deeply involved in hands-on development and architectural decisions, utilizing major web frameworks like React, Angular, and Vue to deliver robust and scalable web solutions. My role extended to full-stack infrastructure development, where I demonstrated proficiency with technologies such as Node.js, MongoDB, MySQL, Docker, and Kubernetes. Beyond technical responsibilities, I played a pivotal role in mentoring junior colleagues, sharing knowledge and guiding their professional growth in a dynamic and fast-paced environments. My contributions at Method Studios were characterized by a blend of technical acumen, leadership, and a commitment to fostering a collaborative and learning-driven work culture. Highlights: - Led the architectural design and development of advanced business applications using React, Angular, and Vue. - Managed full-stack infrastructure projects involving Node.js, MongoDB, MySQL, Docker, and Kubernetes. - Instrumental in introducing and implementing modern web technologies and frameworks. - Played a key role in mentoring and guiding junior engineers, enhancing team capabilities and knowledge. - - Contributed to building scalable and efficient web solutions, aligning with business goals and user needs.At Method Studios, as a Principal Engineer focusing on web technologies, I provided extensive expertise in the architectural design and development of sophisticated business applications. I was deeply involved in hands-on development and architectural decisions, utilizing major web frameworks like React, Angular, and Vue to deliver robust and scalable web solutions. My role extended to full-stack infrastructure development, where I demonstrated proficiency with technologies such as Node.js, MongoDB, MySQL, Docker, and Kubernetes. Beyond technical responsibilities, I played a pivotal role in mentoring junior colleagues, sharing knowledge and guiding their professional growth in a dynamic and fast-paced environment. My contributions at Method Studios were characterized by a blend of technical acumen, leadership, and a commitment to fostering a collaborative and learning-driven work culture. Highlights: - Led the architectural design and development of advanced business applications using React, Angular, and Vue. - Managed full-stack infrastructure projects involving Node.js, MongoDB, MySQL, Docker, and Kubernetes. - Instrumental in introducing and implementing modern web technologies and frameworks. - Played a key role in mentoring and guiding junior engineers, enhancing team capabilities and knowledge. - Contributed to building scalable and efficient web solutions, aligning with business goals and user needs. + - Contributed to building scalable and efficient web solutions, aligning with business goals and user needs.At Method Studios, as a Principal Engineer focusing on web technologies, I provided extensive expertise in the architectural design and development of sophisticated business applications. I was deeply involved in hands-on development and architectural decisions, utilizing major web frameworks like React, Angular, and Vue to deliver robust and scalable web solutions. My role extended to full-stack infrastructure development, where I demonstrated proficiency with technologies such as Node.js, MongoDB, MySQL, Docker, and Kubernetes. Beyond technical responsibilities, I played a pivotal role in mentoring junior colleagues, sharing knowledge and guiding their professional growth in a dynamic and fast-paced environments. My contributions at Method Studios were characterized by a blend of technical acumen, leadership, and a commitment to fostering a collaborative and learning-driven work culture. Highlights: - Led the architectural design and development of advanced business applications using React, Angular, and Vue. - Managed full-stack infrastructure projects involving Node.js, MongoDB, MySQL, Docker, and Kubernetes. - Instrumental in introducing and implementing modern web technologies and frameworks. - Played a key role in mentoring and guiding junior engineers, enhancing team capabilities and knowledge. - Contributed to building scalable and efficient web solutions, aligning with business goals and user needs. Docker, React.js and +3 skills AT&T logo Lead Software EngineerLead Software Engineer @@ -280,14 +280,14 @@ def parse_linkedin_profile(linkedin_text: str) -> Optional[LinkedInProfile]: PlaysinoPlaysino Jun 2012 - May 2015 · 3 yrsJun 2012 to May 2015 · 3 yrs Santa Monica,CASanta Monica,CA -As a Software Architect at Playsino, I was instrumental in the development of 'Bingo Around the World.' My role involved utilizing a diverse technology stack including Jenkins, Java, Intellij, JavaScript, JSON, REST, EC2, Python, XHTML, Facebook's Open Graph, and Linux. I led the Client Engineering team, fostering a collaborative environment and introducing new technologies. My approach to software architecture was marked by a keen focus on the 'why' behind every project, ensuring that each solution was not only technically sound but also purpose-driven and user-centric. My mentorship of junior team members, including interns like Clinton Jake VanSciver, was geared towards nurturing their potential and instilling a similar approach to technology and project execution. This period was marked by significant contributions to the foundational aspects of eSports and a constant engagement with emerging technologies to keep ahead of industry trends. +As a Software Architect at Playsino, I was instrumental in the development of 'Bingo Around the World.' My role involved utilizing a diverse technology stack including Jenkins, Java, Intellij, JavaScript, JSON, REST, EC2, Python, XHTML, Facebook's Open Graph, and Linux. I led the Client Engineering team, fostering a collaborative environments and introducing new technologies. My approach to software architecture was marked by a keen focus on the 'why' behind every project, ensuring that each solution was not only technically sound but also purpose-driven and user-centric. My mentorship of junior team members, including interns like Clinton Jake VanSciver, was geared towards nurturing their potential and instilling a similar approach to technology and project execution. This period was marked by significant contributions to the foundational aspects of eSports and a constant engagement with emerging technologies to keep ahead of industry trends. Highlights: - Led the architectural development of 'Bingo Around the World,' a key project at Playsino, leveraging advanced Java and web technologies. - Managed the Client Engineering team, introducing and integrating new technologies to enhance project outcomes. - Played a mentorship role, providing guidance and technical leadership to junior team members and interns. - Maintained a focus on purpose-driven development, ensuring that projects delivered both technical excellence and meaningful user experiences. - - Actively engaged with new and emerging technologies, contributing to the foundational growth of eSports and maintaining a forward-thinking approach in software architecture.As a Software Architect at Playsino, I was instrumental in the development of 'Bingo Around the World.' My role involved utilizing a diverse technology stack including Jenkins, Java, Intellij, JavaScript, JSON, REST, EC2, Python, XHTML, Facebook's Open Graph, and Linux. I led the Client Engineering team, fostering a collaborative environment and introducing new technologies. My approach to software architecture was marked by a keen focus on the 'why' behind every project, ensuring that each solution was not only technically sound but also purpose-driven and user-centric. My mentorship of junior team members, including interns like Clinton Jake VanSciver, was geared towards nurturing their potential and instilling a similar approach to technology and project execution. This period was marked by significant contributions to the foundational aspects of eSports and a constant engagement with emerging technologies to keep ahead of industry trends. Highlights: - Led the architectural development of 'Bingo Around the World,' a key project at Playsino, leveraging advanced Java and web technologies. - Managed the Client Engineering team, introducing and integrating new technologies to enhance project outcomes. - Played a mentorship role, providing guidance and technical leadership to junior team members and interns. - Maintained a focus on purpose-driven development, ensuring that projects delivered both technical excellence and meaningful user experiences. - Actively engaged with new and emerging technologies, contributing to the foundational growth of eSports and maintaining a forward-thinking approach in software architecture. + - Actively engaged with new and emerging technologies, contributing to the foundational growth of eSports and maintaining a forward-thinking approach in software architecture.As a Software Architect at Playsino, I was instrumental in the development of 'Bingo Around the World.' My role involved utilizing a diverse technology stack including Jenkins, Java, Intellij, JavaScript, JSON, REST, EC2, Python, XHTML, Facebook's Open Graph, and Linux. I led the Client Engineering team, fostering a collaborative environments and introducing new technologies. My approach to software architecture was marked by a keen focus on the 'why' behind every project, ensuring that each solution was not only technically sound but also purpose-driven and user-centric. My mentorship of junior team members, including interns like Clinton Jake VanSciver, was geared towards nurturing their potential and instilling a similar approach to technology and project execution. This period was marked by significant contributions to the foundational aspects of eSports and a constant engagement with emerging technologies to keep ahead of industry trends. Highlights: - Led the architectural development of 'Bingo Around the World,' a key project at Playsino, leveraging advanced Java and web technologies. - Managed the Client Engineering team, introducing and integrating new technologies to enhance project outcomes. - Played a mentorship role, providing guidance and technical leadership to junior team members and interns. - Maintained a focus on purpose-driven development, ensuring that projects delivered both technical excellence and meaningful user experiences. - Actively engaged with new and emerging technologies, contributing to the foundational growth of eSports and maintaining a forward-thinking approach in software architecture. Software Architecture, PHP and +1 skill Show all 19 experiences EducationEducation diff --git a/src/dspygen/dsl/utils/__init__.py b/src/dspygen/experiments/cliapi/__init__.py similarity index 100% rename from src/dspygen/dsl/utils/__init__.py rename to src/dspygen/experiments/cliapi/__init__.py diff --git a/src/dspygen/experiments/cliapi/cli_app.py b/src/dspygen/experiments/cliapi/cli_app.py new file mode 100644 index 0000000..ab1835b --- /dev/null +++ b/src/dspygen/experiments/cliapi/cli_app.py @@ -0,0 +1,165 @@ + +__app_name__ = 'GitHub CLI' +__version__ = '2.35.0' +__description__ = '''A command-line tool for interacting with GitHub.''' +__author__ = 'GitHub, Inc.' + +import typer + +app = typer.Typer(help=__description__) + + + + +# Configurations +configurations = { + 'globals': { + 'editor': {'default': 'vim', 'description': 'Default text editor for GitHub CLI operations.', 'type': 'string'}, + 'pager': {'default': 'less', 'description': 'Default pager for displaying command outputs.', 'type': 'string'}, + 'theme': {'default': 'auto', 'description': 'Color theme for GitHub CLI output.', 'type': 'string'}, + }, + 'repository': { + 'default_branch': {'default': 'main', 'description': 'Default branch name for new repositories.', 'type': 'string'}, + 'visibility': {'default': 'private', 'description': 'Default visibility for new repositories.', 'type': 'string'}, + } +} + + + + + +# Voice Settings +voice_settings = {'version': '1.0.0', 'voice_format': 'wav', 'voice_language': 'en'} + + + + + +# Accessibility Features +accessibility_features = {'version': '1.0.0', 'screen_reader_support': True, 'high_contrast_mode': False, 'keyboard_navigation': True} + + + + + +gh_app = typer.Typer(help='GitHub CLI main command for interacting with GitHub repositories and services.') + +@gh_app.callback() +def gh_callback(version: bool = typer.Option(False, help='Display the version of GitHub CLI.'), help: bool = typer.Option(False, help='Show help information for GitHub CLI.')): + pass + +repo_app = typer.Typer(help='Manage GitHub repositories.') + +@repo_app.command(name='clone', help='Clone a repository to your local machine.') +def clone(repository: str = typer.Argument(..., help='The repository to clone.'), depth: int = typer.Option(None, help='Create a shallow clone with a history truncated to the specified number of commits.')): + '''Clone a repository to your local machine.''' + typer.echo('Executing clone subcommand') + # Examples: + # gh repo clone owner/repo + # gh repo clone owner/repo --depth 1 + +gh_app.add_typer(repo_app, name='repo') + +app.add_typer(gh_app, name='gh') + + + + + +octo_org/octo_plugin_app = typer.Typer(help='A plugin to enhance GitHub CLI with additional organizational tools.') + +octo_app = typer.Typer(help='Organizational tools provided by the octo-plugin.') + +@octo_app.command(name='sync', help='Synchronize organizational repositories.') +def sync(organization: str = typer.Argument(..., help='Name of the organization to synchronize.'), force: bool = typer.Option(False, help='Force synchronization even if conflicts exist.')): + '''Synchronize organizational repositories.''' + typer.echo('Executing sync subcommand') + # Examples: + # gh octo sync octo-org --force + +octo_org/octo_plugin_app.add_typer(octo_app, name='octo') + +app.add_typer(octo_org/octo_plugin_app, name='octo-org/octo-plugin') + + + + + +gh_extras_app = typer.Typer(help='Additional commands and functionalities for GitHub CLI.') + +extras_app = typer.Typer(help='Extra tools and utilities.') + +@extras_app.command(name='deploy', help='Deploy the repository to a specified environments.') +def deploy(repository: str = typer.Argument(..., help='Repository to deploy.'), env: str = typer.Option(..., help='Target environments for deployment.')): + '''Deploy the repository to a specified environments.''' + typer.echo('Executing deploy subcommand') + # Examples: + # gh extras deploy owner/repo --env production + +@extras_app.command(name='backup', help='Backup the repository data.') +def backup(repository: str = typer.Argument(..., help='Repository to backup.'), destination: str = typer.Option(..., help='Backup destination path.')): + '''Backup the repository data.''' + typer.echo('Executing backup subcommand') + # Examples: + # gh extras backup owner/repo --destination /backups/repo-backup.tar.gz + +gh_extras_app.add_typer(extras_app, name='extras') + +app.add_typer(gh_extras_app, name='gh-extras') + + + + + +GitHub Marketplace_app = typer.Typer(help='A marketplace for GitHub CLI plugins and extensions.') + +@GitHub Marketplace_app.command(name='browse', help='Browse available plugins and extensions in the GitHub Marketplace.') +def browse(query: str = typer.Argument(None, help='Search query term.'), category: str = typer.Option(None, help='Filter by category.'), sort: str = typer.Option(None, help='Sort results by criteria (e.g., popularity, date).')): + '''Browse available plugins and extensions in the GitHub Marketplace.''' + typer.echo('Executing browse marketplace command') + # Examples: + # gh marketplace browse --category productivity + # gh marketplace browse --sort popularity + +@GitHub Marketplace_app.command(name='install', help='Install a plugin or extensions from the GitHub Marketplace.') +def install(item_name: str = typer.Argument(..., help='Name of the plugin or extensions to install.'), source: str = typer.Option(..., help='Source repository of the plugin or extensions.')): + '''Install a plugin or extensions from the GitHub Marketplace.''' + typer.echo('Executing install marketplace command') + # Examples: + # gh marketplace install gh-extras/gh-deploy-plugin + +app.add_typer(GitHub Marketplace_app, name='GitHub Marketplace') + + + + + +hygen_app = typer.Typer(help='Integrate Hygen for code scaffolding.') + +@hygen_app.command(name='hygen', help='Code scaffolding using Hygen templates.') +def hygen(template: str = typer.Option(..., help='Select template for scaffolding.'), component_name: str = typer.Argument(..., help='Name of the component to scaffold.')): + '''Code scaffolding using Hygen templates.''' + typer.echo('Executing hygen command') + # Examples: + # gh hygen --template react component Button + # gh hygen --template node service AuthService + +app.add_typer(hygen_app, name='hygen') + +assist_app = typer.Typer(help='Integrate LLM-powered code assistants for enhanced code generation and assistance.') + +@assist_app.command(name='assist', help='Interact with LLM-powered code assistants.') +def assist(prompt: str = typer.Option(..., help='Provide a prompt for the assistant.'), model: str = typer.Option(None, help='Specify the LLM model to use.'), code_file: str = typer.Argument(None, help='File to apply assistant's suggestions.')): + '''Interact with LLM-powered code assistants.''' + typer.echo('Executing assist command') + # Examples: + # gh assist --prompt 'Optimize this function' main.py + # gh assist --prompt 'Generate unit tests' src/utils.py --model gpt-4-turbo + +app.add_typer(assist_app, name='assist') + + + + +if __name__ == "__main__": + app() diff --git a/src/dspygen/experiments/cliapi/cliapi-history.txt b/src/dspygen/experiments/cliapi/cliapi-history.txt new file mode 100644 index 0000000..f8881c6 --- /dev/null +++ b/src/dspygen/experiments/cliapi/cliapi-history.txt @@ -0,0 +1,34133 @@ +Chat history + + +ChatGPT +ChatGPT +GPT +InfraGen v2024.7.31 +GPT +Solutions Architect AGI v2024.8.3 +GPT +Solutions Architect AGI v2024.8.5 +GPT +Sean Chatman Resume v2024.8.6 + +2 more + +Explore GPTs +Today +New chat +Express Redux Hygen Integration + +Pydantic Recursion Error Fix +YAML JSON Model Documentation +Yesterday +Obsidian Note Organization SPR +GitHub Abstraction Layer Notes +Obsidian Note Structuring Techniques +Obsidian Notes Organization Strategies +Previous 7 Days +Simulating Function Calls AshAi +Issue Resource Design in Elixir +Project API Documentation +Elixir Docstrings and Doctests +Input Mocking Test Update +Previous 30 Days +Nuxt Phoenix Chat Integration +Connect Phoenix Nuxt Fly +Nuxt API POST Request +Vue Ticket Management Component +Nuxt Open Fetch Package +API Ticket Management Forms +NuxtOpenFetch Common Misconceptions +Json Api Route Error +Nuxt Multiple Domain Landing Pages +Integrating LiveSvelte Ads +Elixir Project Renaming Issues +Project Task Decompression Summary +Mix Task Code Generation +Ash Framework Task Matrix +Ash AI Integration Notes +Invite members + +ChatGPT o1-preview + + +You said: + +Get Started +Guide +API +Examples +Community +Introduction +Installation +Configuration +Views +Assets +Styling +Routing +SEO and Meta +Transitions +Data fetching +State Management +Error Handling +Server +Layers +Prerendering +Deployment +Testing + +Express +Express.js is arguably the most popular web framework for Node.js + +A typical app structure for express celebrates the notion of routes and handlers, while views and data are left for interpretation (probably because the rise of microservices and client-side apps). + +So an app structure may look like this: + +app/ + routes.js + handlers/ + health.js + shazam.js +While routes.js glues everything together: + +// ... some code ... +const health = require('./handlers/health') +const shazam = require('./handlers/shazam') +app.get('/health', health) +app.post('/shazam', shazam) + +module.exports = app +Unlike React Native, you could dynamically load modules here. However, there's still a need for judgement when constructing the routes (app.get/post part). + +Using hygen let's see how we could build something like this: + +$ hygen route new --method post --name auth +Since we've been through a few templates as with previous use cases, let's jump straight to the interesting part, the inject part. + +So let's say our generator is structured like this: + +_templates/ + route/ + new/ + handler.ejs.t + inject_handler.ejs.t +Then inject_handler looks like this: + +Copy +--- +inject: true +to: app/routes.js +skip_if: <%= name %> +before: "module.exports = app" +--- +app.<%= method %>('/<%= name %>', <%= name %>) +Note how we're anchoring this inject to before: "module.exports = app". If in previous occasions we appended content to a given line, we're now prepending it. + + +Redux +Redux is the posterchild of hygen. It doesn't matter what level of granularity you select for your Redux architecture, you're still going to end up with a lot of boilerplate. + +For my app architecture I mostly choose ducks, which helps remove a little bit of boilerplate and have modularity baked in. + +My typical Redux architecture would look like this: + +app/ + components/ + icon.js + avatar.js + modules/ + boot.js <---- glues modules together, requires chat, app, and auth. + app/ + index.js + view.js + state.js + auth/ + index.js + view.js + state.js + chat/ + index.js <---- the 'connect' bit for Redux. + view.js <---- the view, separated, for testing. + state.js <---- reducer, actions, types, selectors. +Adding a Module# +Being a modular architecture, there's a huge plus for adding a module. It just means adding a folder, and index, view, state, perhaps a default component, a storybook story, and wiring every thing togather such as a reducer, exported actions and more in boot.js. Fun. + +Should be a breeze with hygen. Here's how your templates look like: + +_templates/ + module/ + new/ + index.ejs.t + view.ejs.t + state.ejs.t + inject_boot.ejs.t <--- adds a 'require' clause to boot.js +Here's how index looks like: + +--- +to: app/modules/<%= name %>/index.js +--- +// +// requires, mappings, etc.... +// +export default connect(...)(<%= Name %>) +A similar trick would do for view and state. + +How would we add a require line given that boot.js looks like this? + +// ... some bootstrapping code ... + +const modules = [ + // --- we want to inject "after" this + require('auth').default, + require('app').default +] + +// ... rest of bootstrapping code ... +Let's build inject_boot.ejs.t. We have to use inject: true because this is an inject template and to locate our injected line after const modules =...: + +--- +to: app/modules/boot.js +inject: true +skip_if: <%= name %> +after: "const modules = [" +--- +require('./<%= name %>).default, +After running it, we end up with this: + +Copy +// ... some bootstrapping code ... + +const modules = [ + require('chat').default + require('auth').default, + require('app').default +] + +// ... rest of bootstrapping code ... +And we're done! Generating a new module is saying this: + +$ hygen module new --name settings + + +Convert a set of notes, captured from images, into a highly structured and organized Obsidian page using **Sparse Priming Representation (SPR) decompression**. Integrate advanced note-taking methodologies such as **Zettelkasten**, **Ivy Lee Six-Item Lists**, **Atomic Notes**, **Progressive Summarization**, and **Backlinking**. Ensure each note is actionable, navigable, and optimized for both human use and AI interpretation, supporting a second-brain system.The output must strictly follow the Obsidian note format, leaving no room for external comments or text outside of the Obsidian note itself. +ChatGPT + +Thought for a couple of seconds + +markdown + +--- +title: Express.js and Redux with Hygen Integration +tags: [Express.js, Redux, Hygen, Node.js, Web Development, Automation] +created: 2024-04-27 +updated: 2024-04-27 +aliases: [] +--- + +# Express.js and Redux with Hygen Integration + +## Overview + +Integrating **Express.js**, **Redux**, and **Hygen** streamlines the development workflow by automating boilerplate code generation and maintaining a modular architecture. This integration leverages advanced methodologies to ensure scalability, maintainability, and efficiency in web application development. + +## Table of Contents + +- [[Express.js Overview]] +- [[App Structure]] +- [[Route Management]] +- [[Hygen Generators]] +- [[Redux Architecture]] +- [[Module Management]] +- [[Automation with Hygen]] +- [[Best Practices]] +- [[References]] + +## Express.js Overview + +**Express.js** is the most popular web framework for Node.js, known for its minimalistic and flexible approach to building web applications and APIs. + +- **Key Features:** + - Simple routing + - Middleware support + - Template engines integration + - Robust API for handling requests and responses + +## App Structure + +A typical **Express.js** application emphasizes routes and handlers, leaving views and data management to be defined based on specific needs, such as microservices or client-side applications. + +### Example Structure + +app/ routes.js handlers/ health.js shazam.js + +vbnet + + +- **routes.js**: Centralizes route definitions. +- **handlers/**: Contains individual request handlers. + +## Route Management + +**routes.js** binds routes to their respective handlers. + +```javascript +const health = require('./handlers/health') +const shazam = require('./handlers/shazam') + +app.get('/health', health) +app.post('/shazam', shazam) + +module.exports = app +Dynamic Module Loading: Unlike some frameworks, Express.js allows dynamic loading of modules. +Route Construction: Careful judgment is required when defining app.get and app.post routes. +Hygen Generators +Hygen is a code generator that automates the creation of boilerplate code, enhancing productivity and consistency. + +Creating a New Route +bash + +$ hygen route new --method post --name auth +Template Structure +arduino + +_templates/ + route/ + new/ + handler.ejs.t + inject_handler.ejs.t +Injecting Handlers +inject_handler.ejs.t + +ejs + +--- +inject: true +to: app/routes.js +skip_if: <%= name %> +before: "module.exports = app" +--- +app.<%= method %>('/<%= name %>', <%= name %>) +Prepending Routes: Inserts the new route before the module exports. +Redux Architecture +Redux is a predictable state container for JavaScript apps, often used with React. Despite its boilerplate, using the ducks pattern can enhance modularity and reduce redundancy. + +Typical Structure +css + +app/ + components/ + icon.js + avatar.js + modules/ + boot.js + app/ + index.js + view.js + state.js + auth/ + index.js + view.js + state.js + chat/ + index.js + view.js + state.js +components/: Reusable UI components. +modules/: Contains feature-specific logic. +boot.js: Initializes and combines modules. +Module Management +Adding a new module involves creating a folder with necessary files and updating the bootstrapping process. + +Adding a Module +bash + +$ hygen module new --name settings +Template Structure +arduino + +_templates/ + module/ + new/ + index.ejs.t + view.ejs.t + state.ejs.t + inject_boot.ejs.t +inject_boot.ejs.t +ejs + +--- +to: app/modules/boot.js +inject: true +skip_if: <%= name %> +after: "const modules = [" +--- +require('./<%= name %>').default, +Boot.js Injection: Adds the new module to the modules array after const modules = [. +Resulting boot.js +javascript + +// ... some bootstrapping code ... + +const modules = [ + require('chat').default, + require('auth').default, + require('app').default +] + +// ... rest of bootstrapping code ... +Automation with Hygen +Hygen automates repetitive tasks, ensuring consistency and reducing errors. + +Generators: Define templates for routes, modules, components, etc. +Injection: Seamlessly integrate new code into existing files. +Best Practices +Modular Architecture: Promote scalability and maintainability. +Automated Code Generation: Use tools like Hygen to minimize boilerplate. +Atomic Notes: Ensure each module or component serves a single purpose. +Backlinking: Connect related modules and components for easy navigation. +References +[[Express.js Documentation]] +[[Redux Documentation]] +[[Hygen Documentation]] +[[Zettelkasten Method]] +[[Ivy Lee Method]] +[[Progressive Summarization]] + + + + +You said: +Now create a note that is the combination of the two in a narrative with code samples format. Include all code and path references +ChatGPT + +Thought for a couple of seconds + +markdown + +--- +title: Building a Modular Express.js and Redux Application with Hygen +tags: [Express.js, Redux, Hygen, Node.js, Web Development, Automation] +created: 2024-04-27 +updated: 2024-04-27 +aliases: [] +--- + +# Building a Modular Express.js and Redux Application with Hygen + +Creating a scalable and maintainable web application often involves structuring your codebase thoughtfully and automating repetitive tasks. In this guide, we'll explore how to integrate **Express.js**, **Redux**, and **Hygen** to build a robust application architecture. We'll walk through setting up your Express server, managing state with Redux, and using Hygen to automate boilerplate code generation. + +## Setting Up Express.js + +**Express.js** is a minimalistic and flexible web framework for Node.js, widely recognized for its simplicity and powerful routing capabilities. A typical Express application focuses on defining routes and their corresponding handlers, leaving views and data management to be tailored based on specific requirements, such as microservices or client-side applications. + +### App Structure + +A well-organized Express application might have the following structure: + +app/ routes.js handlers/ health.js shazam.js + +vbnet + + +- **routes.js**: Centralizes all route definitions. +- **handlers/**: Contains individual request handlers for different endpoints. + +### Defining Routes + +In `app/routes.js`, you bind routes to their respective handlers: + +```javascript +const health = require('./handlers/health'); +const shazam = require('./handlers/shazam'); + +app.get('/health', health); +app.post('/shazam', shazam); + +module.exports = app; +This setup allows for easy scalability, enabling you to add more routes and handlers as your application grows. + +Automating with Hygen +Hygen is a powerful code generator that helps automate the creation of boilerplate code, ensuring consistency and reducing manual effort. By defining templates, Hygen can generate new routes, modules, and other components with ease. + +Creating a New Route +To generate a new route using Hygen, you can run: + +bash + +$ hygen route new --method post --name auth +This command utilizes predefined templates to create the necessary handler and inject the route into app/routes.js. + +Template Structure +Your Hygen templates for routes might look like this: + +arduino + +_templates/ + route/ + new/ + handler.ejs.t + inject_handler.ejs.t +inject_handler.ejs.t +ejs + +--- +inject: true +to: app/routes.js +skip_if: <%= name %> +before: "module.exports = app" +--- +app.<%= method %>('/<%= name %>', <%= name %>); +This template injects the new route definition into app/routes.js just before the module is exported, ensuring that your routes are organized and up-to-date. + +Managing State with Redux +Redux serves as a predictable state container for JavaScript applications, offering a centralized way to manage state across your app. Despite its reputation for boilerplate, adopting the ducks pattern can enhance modularity and reduce redundancy. + +Redux Architecture +A typical Redux setup within your application might follow this structure: + +css + +app/ + components/ + icon.js + avatar.js + modules/ + boot.js + app/ + index.js + view.js + state.js + auth/ + index.js + view.js + state.js + chat/ + index.js + view.js + state.js +components/: Houses reusable UI components. +modules/: Contains feature-specific logic, each with its own index.js, view.js, and state.js. +boot.js: Initializes and combines all modules. +Adding a New Module +With a modular architecture, adding a new module is straightforward. For example, to add a settings module, run: + +bash + +$ hygen module new --name settings +Template Structure +Hygen templates for modules might be organized as follows: + +arduino + +_templates/ + module/ + new/ + index.ejs.t + view.ejs.t + state.ejs.t + inject_boot.ejs.t +inject_boot.ejs.t +ejs + +--- +to: app/modules/boot.js +inject: true +skip_if: <%= name %> +after: "const modules = [" +--- +require('./<%= name %>').default, +This template injects the new module into app/modules/boot.js by appending a require statement after the existing modules array. + +Resulting boot.js +After running the generator, your boot.js will look like this: + +javascript + +// ... some bootstrapping code ... + +const modules = [ + require('chat').default, + require('auth').default, + require('app').default, + require('settings').default, +]; + +// ... rest of bootstrapping code ... +This ensures that your new settings module is seamlessly integrated into the application. + +Integrating Express.js, Redux, and Hygen +Combining Express.js, Redux, and Hygen creates a harmonious development environment where server-side routing, state management, and code generation work together seamlessly. + +Express.js handles HTTP requests and routes them to appropriate handlers. +Redux manages the application state, providing a predictable and centralized state container. +Hygen automates the creation of new routes and modules, reducing manual boilerplate and ensuring consistency. +Example Workflow +Define a New Route: + +bash + +$ hygen route new --method post --name login +This generates a new handler app/handlers/login.js and injects the route into app/routes.js. + +Create a New Redux Module: + +bash + +$ hygen module new --name user +This creates a new module under app/modules/user/ with index.js, view.js, and state.js, and updates boot.js to include the new module. + +Implement Handler and State Logic: + +app/handlers/login.js: +javascript + +module.exports = (req, res) => { + // Handle login logic + res.send('Login successful'); +}; +app/modules/user/state.js: +javascript + +// Initial state +const initialState = { + isAuthenticated: false, + user: null, +}; + +// Actions +const LOGIN_SUCCESS = 'user/LOGIN_SUCCESS'; +const LOGOUT = 'user/LOGOUT'; + +// Reducer +export default function reducer(state = initialState, action) { + switch (action.type) { + case LOGIN_SUCCESS: + return { ...state, isAuthenticated: true, user: action.payload }; + case LOGOUT: + return { ...state, isAuthenticated: false, user: null }; + default: + return state; + } +} + +// Action Creators +export const loginSuccess = (user) => ({ + type: LOGIN_SUCCESS, + payload: user, +}); + +export const logout = () => ({ + type: LOGOUT, +}); +Best Practices +Modular Architecture: Organize your codebase into modules to promote scalability and maintainability. +Automated Code Generation: Utilize tools like Hygen to minimize boilerplate and maintain consistency. +Atomic Notes: Ensure each module or component has a single responsibility, enhancing clarity and testability. +Backlinking: Connect related modules and components within your documentation for easy navigation and reference. +References +[[Express.js Documentation]] +[[Redux Documentation]] +[[Hygen Documentation]] +[[Zettelkasten Method]] +[[Ivy Lee Method]] +[[Progressive Summarization]] + + + + +You said: +The are still not connected. I want one full CRUD with express, react, and redux. Before you answer, make a list of wrong assumptions people sometimes make about the concepts included in the question. Then create the full note +ChatGPT + +Thought for 20 seconds + +markdown + +--- +title: Building a Full CRUD Application with Express, React, Redux, and Hygen +tags: [Express.js, React, Redux, Hygen, Node.js, Web Development, CRUD, Automation] +created: 2024-09-28 +updated: 2024-09-28 +aliases: [] +--- + +# Building a Full CRUD Application with Express, React, Redux, and Hygen + +## Common Misconceptions + +Before diving into building a full CRUD application, it's essential to address some common misconceptions that developers might have about the involved technologies: + +1. **Express.js and React are interchangeable**: + - *Reality*: **Express.js** is a backend framework for Node.js, handling server-side operations, while **React** is a frontend library for building user interfaces. + +2. **Redux is mandatory for state management in React**: + - *Reality*: While **Redux** provides a robust solution for state management, smaller applications might benefit from React's built-in state management or other libraries like Context API. + +3. **Hygen is only for generating boilerplate code**: + - *Reality*: **Hygen** can also be used to automate various repetitive tasks, ensuring consistency and reducing the potential for human error. + +4. **Full CRUD applications require separate repositories for frontend and backend**: + - *Reality*: While separating concerns is beneficial, integrating both frontend and backend in a single repository can simplify development workflows. + +5. **Using Hygen eliminates the need for understanding the underlying code structure**: + - *Reality*: Code generators like **Hygen** assist in scaffolding, but a deep understanding of the codebase and architecture is crucial for effective development and maintenance. + +## Overview + +Building a full CRUD (Create, Read, Update, Delete) application involves setting up both the backend and frontend, managing state, and ensuring smooth interactions between different parts of the application. This guide demonstrates how to integrate **Express.js** for the backend, **React** for the frontend, **Redux** for state management, and **Hygen** for automating boilerplate code generation. + +## Table of Contents + +- [[Common Misconceptions]] +- [[Project Structure]] +- [[Setting Up the Backend with Express.js]] +- [[Setting Up the Frontend with React]] +- [[State Management with Redux]] +- [[Automating with Hygen]] +- [[Implementing CRUD Operations]] + - [[Create]] + - [[Read]] + - [[Update]] + - [[Delete]] +- [[Best Practices]] +- [[References]] + +## Project Structure + +Organizing your project structure effectively is crucial for scalability and maintainability. Here's a combined structure for both backend and frontend: + +project/ backend/ app/ routes.js handlers/ createItem.js readItems.js updateItem.js deleteItem.js models/ item.js config/ db.js server.js frontend/ src/ components/ ItemList.js ItemForm.js modules/ boot.js items/ index.js view.js state.js App.js index.js _templates/ route/ new/ handler.ejs.t inject_handler.ejs.t module/ new/ index.ejs.t view.ejs.t state.ejs.t inject_boot.ejs.t + +arduino + + +- **backend/**: Contains the Express.js server and related backend code. +- **frontend/**: Houses the React application along with Redux state management. +- **_templates/**: Hygen templates for generating boilerplate code. + +## Setting Up the Backend with Express.js + +### Installing Dependencies + +Navigate to the `backend` directory and install necessary packages: + +```bash +cd project/backend +npm init -y +npm install express mongoose hygen +Connecting to the Database +In backend/config/db.js: + +javascript + +const mongoose = require('mongoose'); + +const connectDB = async () => { + try { + await mongoose.connect('mongodb://localhost:27017/yourdb', { + useNewUrlParser: true, + useUnifiedTopology: true, + }); + console.log('MongoDB Connected'); + } catch (err) { + console.error(err.message); + process.exit(1); + } +}; + +module.exports = connectDB; +Defining the Item Model +In backend/models/item.js: + +javascript + +const mongoose = require('mongoose'); + +const ItemSchema = new mongoose.Schema({ + name: { + type: String, + required: true, + }, + description: { + type: String, + }, + createdAt: { + type: Date, + default: Date.now, + }, +}); + +module.exports = mongoose.model('Item', ItemSchema); +Creating Handlers +Each CRUD operation has its own handler. + +Create Item +backend/app/handlers/createItem.js: + +javascript + +const Item = require('../../models/item'); + +module.exports = async (req, res) => { + try { + const newItem = new Item(req.body); + const item = await newItem.save(); + res.status(201).json(item); + } catch (err) { + res.status(500).json({ error: err.message }); + } +}; +Read Items +backend/app/handlers/readItems.js: + +javascript + +const Item = require('../../models/item'); + +module.exports = async (req, res) => { + try { + const items = await Item.find(); + res.json(items); + } catch (err) { + res.status(500).json({ error: err.message }); + } +}; +Update Item +backend/app/handlers/updateItem.js: + +javascript + +const Item = require('../../models/item'); + +module.exports = async (req, res) => { + try { + const updatedItem = await Item.findByIdAndUpdate(req.params.id, req.body, { new: true }); + res.json(updatedItem); + } catch (err) { + res.status(500).json({ error: err.message }); + } +}; +Delete Item +backend/app/handlers/deleteItem.js: + +javascript + +const Item = require('../../models/item'); + +module.exports = async (req, res) => { + try { + await Item.findByIdAndDelete(req.params.id); + res.json({ message: 'Item deleted' }); + } catch (err) { + res.status(500).json({ error: err.message }); + } +}; +Defining Routes +In backend/app/routes.js: + +javascript + +const express = require('express'); +const createItem = require('./handlers/createItem'); +const readItems = require('./handlers/readItems'); +const updateItem = require('./handlers/updateItem'); +const deleteItem = require('./handlers/deleteItem'); + +const router = express.Router(); + +router.post('/items', createItem); +router.get('/items', readItems); +router.put('/items/:id', updateItem); +router.delete('/items/:id', deleteItem); + +module.exports = router; +Setting Up the Server +In backend/server.js: + +javascript + +const express = require('express'); +const connectDB = require('./config/db'); +const routes = require('./app/routes'); + +const app = express(); + +// Connect Database +connectDB(); + +// Middleware +app.use(express.json()); + +// Use Routes +app.use('/api', routes); + +const PORT = process.env.PORT || 5000; + +app.listen(PORT, () => console.log(`Server started on port ${PORT}`)); +Setting Up the Frontend with React +Installing Dependencies +Navigate to the frontend directory and set up a React application: + +bash + +cd project/frontend +npx create-react-app . +npm install redux react-redux hygen axios +Project Structure +css + +frontend/ + src/ + components/ + ItemList.js + ItemForm.js + modules/ + boot.js + items/ + index.js + view.js + state.js + App.js + index.js +Bootstrapping Redux +In frontend/src/modules/boot.js: + +javascript + +import { createStore, combineReducers, applyMiddleware, compose } from 'redux'; +import thunk from 'redux-thunk'; +import itemsReducer from './items/state'; + +const rootReducer = combineReducers({ + items: itemsReducer, + // Add more reducers here +}); + +const composeEnhancers = window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ || compose; +const store = createStore( + rootReducer, + composeEnhancers(applyMiddleware(thunk)) +); + +export default store; +In frontend/src/index.js: + +javascript + +import React from 'react'; +import ReactDOM from 'react-dom'; +import { Provider } from 'react-redux'; +import store from './modules/boot'; +import App from './App'; + +ReactDOM.render( + + + , + document.getElementById('root') +); +Creating Components +ItemList.js +frontend/src/components/ItemList.js: + +javascript + +import React, { useEffect } from 'react'; +import { useDispatch, useSelector } from 'react-redux'; +import { fetchItems, deleteItem } from '../modules/items/state'; + +const ItemList = () => { + const dispatch = useDispatch(); + const { items, loading, error } = useSelector(state => state.items); + + useEffect(() => { + dispatch(fetchItems()); + }, [dispatch]); + + const handleDelete = (id) => { + dispatch(deleteItem(id)); + }; + + if (loading) return

Loading...

; + if (error) return

Error: {error}

; + + return ( + + ); +}; + +export default ItemList; +ItemForm.js +frontend/src/components/ItemForm.js: + +javascript + +import React, { useState } from 'react'; +import { useDispatch } from 'react-redux'; +import { createItem } from '../modules/items/state'; + +const ItemForm = () => { + const [name, setName] = useState(''); + const [description, setDescription] = useState(''); + const dispatch = useDispatch(); + + const onSubmit = (e) => { + e.preventDefault(); + if (name.trim() === '') return; + dispatch(createItem({ name, description })); + setName(''); + setDescription(''); + }; + + return ( +
+ setName(e.target.value)} + required + /> + setDescription(e.target.value)} + /> + +
+ ); +}; + +export default ItemForm; +Managing State with Redux +In frontend/src/modules/items/state.js: + +javascript + +import axios from 'axios'; + +// Initial State +const initialState = { + items: [], + loading: false, + error: null, +}; + +// Action Types +const FETCH_ITEMS_REQUEST = 'items/FETCH_ITEMS_REQUEST'; +const FETCH_ITEMS_SUCCESS = 'items/FETCH_ITEMS_SUCCESS'; +const FETCH_ITEMS_FAILURE = 'items/FETCH_ITEMS_FAILURE'; +const CREATE_ITEM_SUCCESS = 'items/CREATE_ITEM_SUCCESS'; +const DELETE_ITEM_SUCCESS = 'items/DELETE_ITEM_SUCCESS'; + +// Reducer +export default function itemsReducer(state = initialState, action) { + switch (action.type) { + case FETCH_ITEMS_REQUEST: + return { ...state, loading: true, error: null }; + case FETCH_ITEMS_SUCCESS: + return { ...state, loading: false, items: action.payload }; + case FETCH_ITEMS_FAILURE: + return { ...state, loading: false, error: action.payload }; + case CREATE_ITEM_SUCCESS: + return { ...state, items: [...state.items, action.payload] }; + case DELETE_ITEM_SUCCESS: + return { ...state, items: state.items.filter(item => item._id !== action.payload) }; + default: + return state; + } +} + +// Action Creators +export const fetchItems = () => async dispatch => { + dispatch({ type: FETCH_ITEMS_REQUEST }); + try { + const response = await axios.get('/api/items'); + dispatch({ type: FETCH_ITEMS_SUCCESS, payload: response.data }); + } catch (error) { + dispatch({ type: FETCH_ITEMS_FAILURE, payload: error.message }); + } +}; + +export const createItem = (itemData) => async dispatch => { + try { + const response = await axios.post('/api/items', itemData); + dispatch({ type: CREATE_ITEM_SUCCESS, payload: response.data }); + } catch (error) { + // Handle error appropriately + console.error(error); + } +}; + +export const deleteItem = (id) => async dispatch => { + try { + await axios.delete(`/api/items/${id}`); + dispatch({ type: DELETE_ITEM_SUCCESS, payload: id }); + } catch (error) { + // Handle error appropriately + console.error(error); + } +}; +App Component +In frontend/src/App.js: + +javascript + +import React from 'react'; +import ItemList from './components/ItemList'; +import ItemForm from './components/ItemForm'; + +const App = () => ( +
+

CRUD Application

+ + +
+); + +export default App; +Automating with Hygen +Hygen can be used to automate the creation of routes and Redux modules, minimizing boilerplate and ensuring consistency across the codebase. + +Setting Up Hygen Templates +Organize your Hygen templates as follows: + +arduino + +_templates/ + route/ + new/ + handler.ejs.t + inject_handler.ejs.t + module/ + new/ + index.ejs.t + view.ejs.t + state.ejs.t + inject_boot.ejs.t +Creating a New Route +To generate a new route, use the following command: + +bash + +$ hygen route new --method post --name auth +inject_handler.ejs.t +ejs + +--- +inject: true +to: backend/app/routes.js +skip_if: <%= name %> +before: "module.exports = router" +--- +router.<%= method %>('/<%= name %>', <%= name %>); +This template injects the new route into backend/app/routes.js before the module is exported. + +Creating a New Module +To generate a new Redux module, use: + +bash + +$ hygen module new --name settings +inject_boot.ejs.t +ejs + +--- +to: frontend/src/modules/boot.js +inject: true +skip_if: <%= name %> +after: "const rootReducer = combineReducers({" +--- + <%= name %>: require('./<%= name %>/state').default, +This injects the new module's reducer into boot.js after the combineReducers call. + +Implementing CRUD Operations +Let's walk through implementing a complete CRUD cycle: Create, Read, Update, and Delete. + +Create +Backend: The createItem handler in backend/app/handlers/createItem.js handles item creation. +Frontend: The ItemForm component in frontend/src/components/ItemForm.js allows users to input new items. +Redux: The createItem action in frontend/src/modules/items/state.js dispatches the creation action, updating the state upon successful creation. +Read +Backend: The readItems handler in backend/app/handlers/readItems.js fetches all items. +Frontend: The ItemList component in frontend/src/components/ItemList.js displays the list of items. +Redux: The fetchItems action in frontend/src/modules/items/state.js retrieves items from the backend and updates the state. +Update +Backend: The updateItem handler in backend/app/handlers/updateItem.js updates an existing item. +Frontend: You can create an ItemEdit component to handle editing items (similar to ItemForm). +Redux: Implement an updateItem action in the Redux state to handle updates. +Delete +Backend: The deleteItem handler in backend/app/handlers/deleteItem.js removes an item. +Frontend: Add a delete button in the ItemList component to allow users to delete items. +Redux: The deleteItem action in frontend/src/modules/items/state.js dispatches the deletion action and updates the state. +Best Practices +Modular Architecture: Separate concerns by organizing code into modules, enhancing scalability and maintainability. +Automated Code Generation: Utilize Hygen to generate boilerplate code, ensuring consistency and reducing manual errors. +Atomic Notes: Structure documentation so that each module or component has a single responsibility, facilitating clarity and testability. +Backlinking: Connect related concepts and modules within your documentation for easy navigation and reference. +Error Handling: Implement comprehensive error handling both on the backend and frontend to ensure a robust application. +Security: Ensure that your backend routes are secured and validate all incoming data to prevent vulnerabilities. +References +[[Express.js Documentation]] +[[React Documentation]] +[[Redux Documentation]] +[[Hygen Documentation]] +[[Zettelkasten Method]] +[[Ivy Lee Method]] +[[Progressive Summarization]] + + + + +You said: +Now redo the note but only from the template perspective. NO SOURCE FILES JUST TEMPLATES FOR THE PREVIOUS EXAMPLES +ChatGPT + +Thought for a few seconds + +markdown + +--- +title: Hygen Templates for Building a Full CRUD Application with Express, React, Redux +tags: [Hygen, Templates, Express.js, React, Redux, Automation, CRUD] +created: 2024-09-28 +updated: 2024-09-28 +aliases: [] +--- + +# Hygen Templates for Building a Full CRUD Application with Express, React, Redux + +Automating repetitive tasks is crucial for maintaining consistency and efficiency in a full-stack application. **Hygen** provides a robust solution for generating boilerplate code through customizable templates. This guide focuses exclusively on the Hygen templates used to scaffold a CRUD application integrated with **Express.js**, **React**, and **Redux**. + +## Common Misconceptions + +Before delving into the templates, it's important to clarify some common misconceptions about Hygen and the technologies involved: + +1. **Hygen Replaces Developer Understanding**: + - *Reality*: Hygen automates boilerplate code generation but does not replace the need for developers to understand the underlying architecture and code structure. + +2. **Hygen is Limited to Frontend Development**: + - *Reality*: Hygen can be effectively used for both frontend and backend code generation, enhancing productivity across the entire stack. + +3. **Templates are One-Size-Fits-All**: + - *Reality*: Hygen templates are highly customizable and can be tailored to fit the specific architectural patterns and coding standards of a project. + +4. **Hygen Automatically Handles Complex Logic**: + - *Reality*: While Hygen can generate the scaffolding, implementing complex business logic still requires manual coding and developer insight. + +5. **Using Hygen Means Less Code Maintenance**: + - *Reality*: Hygen helps in maintaining consistency but does not eliminate the need for regular code maintenance and refactoring. + +## Template Structure + +Organizing Hygen templates effectively is essential for seamless code generation. Below is the structure of the Hygen templates used for the CRUD application: + +_templates/ route/ new/ handler.ejs.t inject_handler.ejs.t module/ new/ index.ejs.t view.ejs.t state.ejs.t inject_boot.ejs.t + +vbnet + + +- **route/new/**: Templates for creating new Express.js routes. + - `handler.ejs.t`: Template for route handlers. + - `inject_handler.ejs.t`: Template to inject new routes into the routes file. + +- **module/new/**: Templates for creating new Redux modules. + - `index.ejs.t`: Template for the module's index file. + - `view.ejs.t`: Template for React components associated with the module. + - `state.ejs.t`: Template for Redux state management. + - `inject_boot.ejs.t`: Template to inject the new module into the Redux store. + +## Template Files + +### 1. Route Templates + +#### `handler.ejs.t` + +This template generates a new handler file for an Express.js route. + +```ejs +--- +to: backend/app/handlers/<%= capitalize(name) %>Handler.js +--- +const <%= capitalize(name) %> = require('../../models/<%= name %>'); + +module.exports = async (req, res) => { + try { + // Implement <%= capitalize(name) %> logic here + res.status(200).json({ message: '<%= capitalize(name) %> handler executed successfully.' }); + } catch (error) { + res.status(500).json({ error: error.message }); + } +}; +Purpose: Creates a new handler file for the specified route. +Path: backend/app/handlers/ +Usage: Automatically handles HTTP requests for a specific route. +inject_handler.ejs.t +This template injects the new route into the central routes file. + +ejs + +--- +inject: true +to: backend/app/routes.js +skip_if: <%= name %> +before: "module.exports = router" +--- +router.<%= method %>('/<%= name %>', require('./handlers/<%= capitalize(name) %>Handler')); +Purpose: Adds a new route definition to routes.js. +Injection Point: Before the module.exports = router line. +Usage: Ensures the new route is registered with the Express.js application. +2. Module Templates +index.ejs.t +This template generates the entry point for a new Redux module. + +ejs + +--- +to: frontend/src/modules/<%= name %>/index.js +--- +export { default as <%= capitalize(name) %>View } from './view'; +export { default as <%= capitalize(name) %>State } from './state'; +Purpose: Exports the view and state components of the module. +Path: frontend/src/modules// +Usage: Facilitates easy import of module components. +view.ejs.t +This template generates a React component for the module. + +ejs + +--- +to: frontend/src/modules/<%= name %>/view.js +--- +import React from 'react'; +import { useSelector, useDispatch } from 'react-redux'; +import { fetch<%= capitalize(name) %>s } from './state'; + +const <%= capitalize(name) %>View = () => { + const dispatch = useDispatch(); + const <%= name %>s = useSelector(state => state.<%= name %>.items); + + React.useEffect(() => { + dispatch(fetch<%= capitalize(name) %>s()); + }, [dispatch]); + + return ( +
+

<%= capitalize(name) %> List

+
    + {<%= name %>s.map(item => ( +
  • {item.name}
  • + ))} +
+
+ ); +}; + +export default <%= capitalize(name) %>View; +Purpose: Creates a React component to display a list of items. +Path: frontend/src/modules// +Usage: Fetches and displays data from the Redux store. +state.ejs.t +This template generates Redux state management files for the module. + +ejs + +--- +to: frontend/src/modules/<%= name %>/state.js +--- +import axios from 'axios'; + +// Initial State +const initialState = { + items: [], + loading: false, + error: null, +}; + +// Action Types +const FETCH_<%= name.toUpperCase() %>S_REQUEST = 'FETCH_<%= name.toUpperCase() %>S_REQUEST'; +const FETCH_<%= name.toUpperCase() %>S_SUCCESS = 'FETCH_<%= name.toUpperCase() %>S_SUCCESS'; +const FETCH_<%= name.toUpperCase() %>S_FAILURE = 'FETCH_<%= name.toUpperCase() %>S_FAILURE'; + +// Reducer +export default function <%= name %>Reducer(state = initialState, action) { + switch (action.type) { + case FETCH_<%= name.toUpperCase() %>S_REQUEST: + return { ...state, loading: true, error: null }; + case FETCH_<%= name.toUpperCase() %>S_SUCCESS: + return { ...state, loading: false, items: action.payload }; + case FETCH_<%= name.toUpperCase() %>S_FAILURE: + return { ...state, loading: false, error: action.payload }; + default: + return state; + } +} + +// Action Creators +export const fetch<%= capitalize(name) %>s = () => async dispatch => { + dispatch({ type: FETCH_<%= name.toUpperCase() %>S_REQUEST }); + try { + const response = await axios.get('/api/<%= name %>s'); + dispatch({ type: FETCH_<%= name.toUpperCase() %>S_SUCCESS, payload: response.data }); + } catch (error) { + dispatch({ type: FETCH_<%= name.toUpperCase() %>S_FAILURE, payload: error.message }); + } +}; +Purpose: Manages the state and actions for the module using Redux. +Path: frontend/src/modules// +Usage: Handles data fetching and state updates. +inject_boot.ejs.t +This template injects the new module into the Redux store configuration. + +ejs + +--- +inject: true +to: frontend/src/modules/boot.js +skip_if: <%= name %> +after: "const rootReducer = combineReducers({" +--- + <%= name %>: require('./<%= name %>/state').default, +Purpose: Adds the new module's reducer to the root reducer. +Injection Point: After the combineReducers({ line. +Usage: Integrates the new module into the Redux store. +Using the Templates +Hygen templates streamline the process of adding new routes and modules. Here's how to utilize them effectively: + +1. Generating a New Route +To create a new Express.js route with its handler: + +bash + +$ hygen route new --method post --name auth +Parameters: + +--method: HTTP method (e.g., get, post, put, delete). +--name: Name of the route (e.g., auth, login, register). +What Happens: + +Generates a new handler file in backend/app/handlers/. +Injects the new route into backend/app/routes.js. +2. Generating a New Redux Module +To create a new Redux module with its components: + +bash + +$ hygen module new --name user +Parameters: + +--name: Name of the module (e.g., user, product, order). +What Happens: + +Generates index.js, view.js, and state.js in frontend/src/modules/user/. +Injects the new module's reducer into frontend/src/modules/boot.js. +Advantages of Using Hygen Templates +Consistency: Ensures that all routes and modules follow the same structure and conventions. +Efficiency: Reduces the time spent on writing repetitive boilerplate code. +Scalability: Simplifies the process of adding new features as the application grows. +Maintainability: Makes the codebase easier to manage by enforcing a standardized structure. +Best Practices +Customize Templates: Tailor Hygen templates to match your project's specific needs and coding standards. +Version Control Templates: Keep your templates under version control to track changes and collaborate with team members. +Regularly Update Templates: As your project evolves, update your templates to incorporate new patterns and practices. +Document Template Usage: Maintain clear documentation on how to use and modify templates to ensure team-wide understanding. +References +[[Hygen Documentation]] +[[Express.js Documentation]] +[[React Documentation]] +[[Redux Documentation]] +[[Zettelkasten Method]] +[[Ivy Lee Method]] +[[Progressive Summarization]] + + + + +You said: +This is missing elelments like forms. List all of the requirements for a full stack CRUD example and create templates for all of the missing files +ChatGPT + +Thought for 5 seconds + +markdown + +--- +title: Comprehensive Hygen Templates for a Full-Stack CRUD Application with Express, React, Redux +tags: [Hygen, Templates, Express.js, React, Redux, Automation, CRUD, Full-Stack] +created: 2024-04-27 +updated: 2024-09-28 +aliases: [] +--- + +# Comprehensive Hygen Templates for a Full-Stack CRUD Application with Express, React, Redux + +Building a full-stack CRUD (Create, Read, Update, Delete) application requires a well-structured approach to both the backend and frontend components. **Hygen** facilitates this process by automating the generation of boilerplate code through customizable templates. This guide outlines the necessary requirements for a full-stack CRUD application and provides comprehensive Hygen templates to cover all aspects, including forms and additional functionalities. + +## Requirements for a Full-Stack CRUD Application + +To ensure a complete and functional CRUD application, the following components and functionalities must be addressed: + +### Backend (Express.js) + +1. **Database Configuration** + - Connection setup (e.g., MongoDB with Mongoose). + +2. **Data Models** + - Define schemas and models for the data entities. + +3. **Route Handlers** + - Handlers for Create, Read, Update, and Delete operations. + +4. **Routes Definition** + - Define API endpoints and bind them to respective handlers. + +5. **Middleware** + - Error handling, authentication, validation, etc. + +6. **Server Initialization** + - Setup Express server and connect all components. + +### Frontend (React & Redux) + +1. **Project Structure** + - Organized directories for components, modules, and utilities. + +2. **State Management** + - Redux setup with actions, reducers, and store configuration. + +3. **React Components** + - Components for listing items, creating new items, editing existing items, and deleting items. + +4. **Forms** + - Reusable form components for Create and Update operations. + +5. **API Integration** + - Services or utilities for making HTTP requests to the backend. + +6. **Routing** + - Client-side routing to navigate between different views. + +7. **Error Handling** + - Displaying error messages and handling failed operations gracefully. + +8. **Styling** + - Consistent styling across components (CSS, SASS, styled-components, etc.). + +### Automation (Hygen) + +1. **Templates for Backend** + - Handlers, routes, and models. + +2. **Templates for Frontend** + - Redux modules, React components, and forms. + +3. **Injection Templates** + - Automatically inject new routes and Redux modules into existing configuration files. + +## Hygen Template Structure + +Organizing Hygen templates effectively is essential for seamless code generation. Below is the structure of the Hygen templates used for the full-stack CRUD application: + +_templates/ backend/ handler/ new/ handler.ejs.t inject_handler.ejs.t route/ new/ route.ejs.t inject_route.ejs.t model/ new/ model.ejs.t frontend/ redux/ module/ new/ index.ejs.t actions.ejs.t reducer.ejs.t inject_store.ejs.t component/ new/ list.ejs.t form.ejs.t editForm.ejs.t service/ new/ api.ejs.t + +markdown + + +- **backend/**: Templates for backend components. + - **handler/new/**: Templates for creating new route handlers. + - `handler.ejs.t`: Template for CRUD handlers. + - `inject_handler.ejs.t`: Template to inject handlers into the routes file. + - **route/new/**: Templates for defining new API routes. + - `route.ejs.t`: Template for route definitions. + - `inject_route.ejs.t`: Template to inject routes into the main routes file. + - **model/new/**: Templates for creating new data models. + - `model.ejs.t`: Template for Mongoose models. + +- **frontend/**: Templates for frontend components. + - **redux/module/new/**: Templates for Redux modules. + - `index.ejs.t`: Module entry point. + - `actions.ejs.t`: Action creators. + - `reducer.ejs.t`: Reducer logic. + - `inject_store.ejs.t`: Template to inject reducers into the Redux store. + - **component/new/**: Templates for React components. + - `list.ejs.t`: Component to list items. + - `form.ejs.t`: Reusable form component for Create. + - `editForm.ejs.t`: Form component for Update. + - **service/new/**: Templates for API services. + - `api.ejs.t`: API service for HTTP requests. + +## Template Files + +### 1. Backend Templates + +#### a. Handler Templates + +##### `backend/handler/new/handler.ejs.t` + +Generates a new handler file for CRUD operations in Express.js. + +```ejs +--- +to: backend/app/handlers/<%= capitalize(name) %>Handler.js +--- +const <%= capitalize(name) %> = require('../../models/<%= name %>'); + +module.exports = { + create<%= capitalize(name) %>: async (req, res) => { + try { + const new<%= capitalize(name) %> = new <%= capitalize(name) %>(req.body); + const <%= name %> = await new<%= capitalize(name) %>.save(); + res.status(201).json(<%= name %>); + } catch (error) { + res.status(500).json({ error: error.message }); + } + }, + + get<%= capitalize(name) %>s: async (req, res) => { + try { + const <%= name %>s = await <%= capitalize(name) %>.find(); + res.json(<%= name %>s); + } catch (error) { + res.status(500).json({ error: error.message }); + } + }, + + update<%= capitalize(name) %>: async (req, res) => { + try { + const updated<%= capitalize(name) %> = await <%= capitalize(name) %>.findByIdAndUpdate(req.params.id, req.body, { new: true }); + res.json(updated<%= capitalize(name) %>); + } catch (error) { + res.status(500).json({ error: error.message }); + } + }, + + delete<%= capitalize(name) %>: async (req, res) => { + try { + await <%= capitalize(name) %>.findByIdAndDelete(req.params.id); + res.json({ message: '<%= capitalize(name) %> deleted successfully.' }); + } catch (error) { + res.status(500).json({ error: error.message }); + } + }, +}; +Purpose: Creates a handler file with methods for Create, Read, Update, and Delete operations. +Path: backend/app/handlers/ +backend/handler/new/inject_handler.ejs.t +Injects the new handler into the routes file. + +ejs + +--- +inject: true +to: backend/app/routes.js +skip_if: <%= name %> +before: "// Export routes" +--- +const { create<%= capitalize(name) %>, get<%= capitalize(name) %>s, update<%= capitalize(name) %>, delete<%= capitalize(name) %> } = require('./handlers/<%= capitalize(name) %>Handler'); +Purpose: Imports the handler methods into the routes file. +Injection Point: Before the // Export routes comment. +b. Route Templates +backend/route/new/route.ejs.t +Generates new route definitions for CRUD operations. + +ejs + +--- +to: backend/app/routes/<%= name %>Routes.js +--- +const express = require('express'); +const { create<%= capitalize(name) %>, get<%= capitalize(name) %>s, update<%= capitalize(name) %>, delete<%= capitalize(name) %> } = require('./handlers/<%= capitalize(name) %>Handler'); + +const router = express.Router(); + +// Create +router.post('/', create<%= capitalize(name) %>); + +// Read +router.get('/', get<%= capitalize(name) %>s); + +// Update +router.put('/:id', update<%= capitalize(name) %>); + +// Delete +router.delete('/:id', delete<%= capitalize(name) %>); + +module.exports = router; +Purpose: Defines Express.js routes for CRUD operations. +Path: backend/app/routes/ +backend/route/new/inject_route.ejs.t +Injects the new route into the main routes file. + +ejs + +--- +inject: true +to: backend/app/routes.js +skip_if: <%= name %> +after: "const express = require('express');" +--- +const <%= name %>Routes = require('./routes/<%= name %>Routes'); +Purpose: Imports the new route module into the main routes file. +Injection Point: After the const express = require('express'); line. +c. Model Templates +backend/model/new/model.ejs.t +Generates a new Mongoose model. + +ejs + +--- +to: backend/models/<%= name %>.js +--- +const mongoose = require('mongoose'); + +const <%= capitalize(name) %>Schema = new mongoose.Schema({ + name: { + type: String, + required: true, + }, + description: { + type: String, + }, + createdAt: { + type: Date, + default: Date.now, + }, +}); + +module.exports = mongoose.model('<%= capitalize(name) %>', <%= capitalize(name) %>Schema); +Purpose: Creates a Mongoose schema and model for the specified entity. +Path: backend/models/ +2. Frontend Templates +a. Redux Module Templates +frontend/redux/module/new/index.ejs.t +Generates the entry point for a Redux module. + +ejs + +--- +to: frontend/src/modules/<%= name %>/index.js +--- +export { default as <%= capitalize(name) %>Actions } from './actions'; +export { default as <%= capitalize(name) %>Reducer } from './reducer'; +Purpose: Exports actions and reducer for the module. +Path: frontend/src/modules// +frontend/redux/module/new/actions.ejs.t +Generates Redux action creators for CRUD operations. + +ejs + +--- +to: frontend/src/modules/<%= name %>/actions.js +--- +import axios from 'axios'; + +// Action Types +export const CREATE_<%= name.toUpperCase() %> = 'CREATE_<%= name.toUpperCase() %>'; +export const FETCH_<%= name.toUpperCase() %>S = 'FETCH_<%= name.toUpperCase() %>S'; +export const UPDATE_<%= name.toUpperCase() %> = 'UPDATE_<%= name.toUpperCase() %>'; +export const DELETE_<%= name.toUpperCase() %> = 'DELETE_<%= name.toUpperCase() %>'; + +// Action Creators +export const create<%= capitalize(name) %> = (data) => async (dispatch) => { + try { + const response = await axios.post('/api/<%= name %>s', data); + dispatch({ type: CREATE_<%= name.toUpperCase() %>, payload: response.data }); + } catch (error) { + console.error(error); + } +}; + +export const fetch<%= capitalize(name) %>s = () => async (dispatch) => { + try { + const response = await axios.get('/api/<%= name %>s'); + dispatch({ type: FETCH_<%= name.toUpperCase() %>S, payload: response.data }); + } catch (error) { + console.error(error); + } +}; + +export const update<%= capitalize(name) %> = (id, data) => async (dispatch) => { + try { + const response = await axios.put(`/api/<%= name %>s/${id}`, data); + dispatch({ type: UPDATE_<%= name.toUpperCase() %>, payload: response.data }); + } catch (error) { + console.error(error); + } +}; + +export const delete<%= capitalize(name) %> = (id) => async (dispatch) => { + try { + await axios.delete(`/api/<%= name %>s/${id}`); + dispatch({ type: DELETE_<%= name.toUpperCase() %>, payload: id }); + } catch (error) { + console.error(error); + } +}; +Purpose: Defines action creators for CRUD operations using Redux Thunk. +Path: frontend/src/modules// +frontend/redux/module/new/reducer.ejs.t +Generates the Redux reducer for the module. + +ejs + +--- +to: frontend/src/modules/<%= name %>/reducer.js +--- +import { + CREATE_<%= name.toUpperCase() %>, + FETCH_<%= name.toUpperCase() %>S, + UPDATE_<%= name.toUpperCase() %>, + DELETE_<%= name.toUpperCase() %>, +} from './actions'; + +const initialState = { + items: [], + loading: false, + error: null, +}; + +const <%= name %>Reducer = (state = initialState, action) => { + switch (action.type) { + case FETCH_<%= name.toUpperCase() %>S: + return { ...state, items: action.payload, loading: false }; + case CREATE_<%= name.toUpperCase() %>: + return { ...state, items: [...state.items, action.payload] }; + case UPDATE_<%= name.toUpperCase() %>: + return { + ...state, + items: state.items.map(item => + item._id === action.payload._id ? action.payload : item + ), + }; + case DELETE_<%= name.toUpperCase() %>: + return { + ...state, + items: state.items.filter(item => item._id !== action.payload), + }; + default: + return state; + } +}; + +export default <%= name %>Reducer; +Purpose: Handles state changes based on dispatched actions. +Path: frontend/src/modules// +frontend/redux/module/new/inject_store.ejs.t +Injects the new reducer into the Redux store configuration. + +ejs + +--- +inject: true +to: frontend/src/modules/boot.js +skip_if: <%= name %> +after: "import rootReducer from './rootReducer';" +--- +import { <%= name %>Reducer } from './<%= name %>'; + +const rootReducer = combineReducers({ + <%= name %>: <%= name %>Reducer, + // Add more reducers here +}); + +export default rootReducer; +Purpose: Adds the new reducer to the root reducer. +Injection Point: After the import rootReducer from './rootReducer'; line. +b. React Component Templates +frontend/component/new/list.ejs.t +Generates a React component to list items. + +ejs + +--- +to: frontend/src/components/<%= capitalize(name) %>List.js +--- +import React, { useEffect } from 'react'; +import { useDispatch, useSelector } from 'react-redux'; +import { fetch<%= capitalize(name) %>s, delete<%= capitalize(name) %> } from '../modules/<%= name %>/actions'; + +const <%= capitalize(name) %>List = () => { + const dispatch = useDispatch(); + const { items, loading, error } = useSelector(state => state.<%= name %>); + + useEffect(() => { + dispatch(fetch<%= capitalize(name) %>s()); + }, [dispatch]); + + const handleDelete = (id) => { + dispatch(delete<%= capitalize(name) %>(id)); + }; + + if (loading) return

Loading...

; + if (error) return

Error: {error}

; + + return ( +
+

<%= capitalize(name) %> List

+
    + {items.map(item => ( +
  • + {item.name}: {item.description} + + +
  • + ))} +
+
+ ); +}; + +export default <%= capitalize(name) %>List; +Purpose: Displays a list of items with options to delete and edit. +Path: frontend/src/components/ +frontend/component/new/form.ejs.t +Generates a reusable React form component for creating new items. + +ejs + +--- +to: frontend/src/components/<%= capitalize(name) %>Form.js +--- +import React, { useState } from 'react'; +import { useDispatch } from 'react-redux'; +import { create<%= capitalize(name) %> } from '../modules/<%= name %>/actions'; + +const <%= capitalize(name) %>Form = () => { + const [formData, setFormData] = useState({ name: '', description: '' }); + const dispatch = useDispatch(); + + const handleChange = (e) => { + setFormData({ ...formData, [e.target.name]: e.target.value }); + }; + + const handleSubmit = (e) => { + e.preventDefault(); + if (formData.name.trim() === '') return; + dispatch(create<%= capitalize(name) %>(formData)); + setFormData({ name: '', description: '' }); + }; + + return ( +
+ + + +
+ ); +}; + +export default <%= capitalize(name) %>Form; +Purpose: Provides a form for creating new items. +Path: frontend/src/components/ +frontend/component/new/editForm.ejs.t +Generates a React form component for editing existing items. + +ejs + +--- +to: frontend/src/components/<%= capitalize(name) %>EditForm.js +--- +import React, { useState, useEffect } from 'react'; +import { useDispatch, useSelector } from 'react-redux'; +import { update<%= capitalize(name) %> } from '../modules/<%= name %>/actions'; + +const <%= capitalize(name) %>EditForm = ({ itemId }) => { + const dispatch = useDispatch(); + const item = useSelector(state => state.<%= name %>.items.find(i => i._id === itemId)); + + const [formData, setFormData] = useState({ name: '', description: '' }); + + useEffect(() => { + if (item) { + setFormData({ name: item.name, description: item.description }); + } + }, [item]); + + const handleChange = (e) => { + setFormData({ ...formData, [e.target.name]: e.target.value }); + }; + + const handleSubmit = (e) => { + e.preventDefault(); + if (formData.name.trim() === '') return; + dispatch(update<%= capitalize(name) %>(itemId, formData)); + }; + + if (!item) return

Loading...

; + + return ( +
+ + + +
+ ); +}; + +export default <%= capitalize(name) %>EditForm; +Purpose: Provides a form for editing existing items. +Path: frontend/src/components/ +b. API Service Templates +frontend/service/new/api.ejs.t +Generates an API service for handling HTTP requests. + +ejs + +--- +to: frontend/src/services/<%= name %>Api.js +--- +import axios from 'axios'; + +const API_URL = '/api/<%= name %>s'; + +export const create<%= capitalize(name) %> = (data) => axios.post(API_URL, data); +export const fetch<%= capitalize(name) %>s = () => axios.get(API_URL); +export const update<%= capitalize(name) %> = (id, data) => axios.put(`${API_URL}/${id}`, data); +export const delete<%= capitalize(name) %> = (id) => axios.delete(`${API_URL}/${id}`); +Purpose: Centralizes API calls for CRUD operations. +Path: frontend/src/services/ +3. Additional Templates +a. Root Reducer Template +frontend/redux/module/new/rootReducer.ejs.t +Generates the root reducer by combining all module reducers. + +ejs + +--- +to: frontend/src/modules/rootReducer.js +--- +import { combineReducers } from 'redux'; +<% modules.forEach(module => { %> +import { <%= capitalize(module) %>Reducer } from './<%= module %>'; +<% }); %> + +const rootReducer = combineReducers({ + <% modules.forEach(module => { %> + <%= module %>: <%= capitalize(module) %>Reducer, + <% }); %> +}); + +export default rootReducer; +Purpose: Combines all module reducers into a single root reducer. +Path: frontend/src/modules/ +b. Store Configuration Template +frontend/redux/module/new/store.ejs.t +Generates the Redux store configuration. + +ejs + +--- +to: frontend/src/modules/store.js +--- +import { createStore, applyMiddleware, compose } from 'redux'; +import thunk from 'redux-thunk'; +import rootReducer from './rootReducer'; + +const composeEnhancers = window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ || compose; +const store = createStore( + rootReducer, + composeEnhancers(applyMiddleware(thunk)) +); + +export default store; +Purpose: Sets up the Redux store with middleware and DevTools integration. +Path: frontend/src/modules/ +Using the Templates +Hygen templates streamline the process of adding new routes, handlers, models, Redux modules, and React components. Here's how to utilize them effectively: + +1. Generating a New Backend Model +To create a new Mongoose model: + +bash + +$ hygen model new --name product +Parameters: + +--name: Name of the model (e.g., product, user, order). +What Happens: + +Generates a new model file in backend/models/. +2. Generating a New Backend Handler and Route +To create CRUD handlers and routes for a new entity: + +bash + +$ hygen handler new --name product +$ hygen route new --name product --method post +Parameters: + +--name: Name of the entity (e.g., product, user). +--method: HTTP methods as needed (e.g., post, get, put, delete). +What Happens: + +Generates handler methods in backend/app/handlers/. +Defines routes in backend/app/routes/. +Injects handlers and routes into respective configuration files. +3. Generating a New Redux Module +To create a Redux module for state management: + +bash + +$ hygen module new --name product +Parameters: + +--name: Name of the module (e.g., product, user). +What Happens: + +Generates actions.js, reducer.js, and index.js in frontend/src/modules/product/. +Injects the new reducer into the Redux store configuration. +4. Generating React Components +To create React components for listing and managing items: + +bash + +$ hygen component new --name productList +$ hygen component new --name productForm +$ hygen component new --name productEditForm +Parameters: + +--name: Name of the component (e.g., productList, productForm). +What Happens: + +Generates ProductList.js, ProductForm.js, and ProductEditForm.js in frontend/src/components/. +These components are ready to be integrated into the application. +5. Generating API Services +To create an API service for handling HTTP requests: + +bash + +$ hygen service new --name product +Parameters: + +--name: Name of the service (e.g., product, user). +What Happens: + +Generates productApi.js in frontend/src/services/. +Centralizes API calls for the specified entity. +Advantages of Using Comprehensive Hygen Templates +End-to-End Automation: Covers both backend and frontend components, ensuring seamless integration. +Consistency: Maintains a uniform structure across all parts of the application. +Scalability: Simplifies the addition of new features and entities as the application grows. +Efficiency: Reduces the time spent on repetitive tasks, allowing developers to focus on core functionalities. +Maintainability: Facilitates easier code management by enforcing standardized patterns. +Best Practices +Customize Templates: Adapt Hygen templates to align with your project's specific requirements and coding standards. +Version Control: Keep templates under version control to track changes and collaborate effectively with team members. +Regular Updates: Continuously refine and update templates to incorporate new practices and optimizations. +Comprehensive Documentation: Document how to use and modify templates to ensure all team members can utilize them effectively. +Modular Design: Structure templates to promote modularity, making it easier to manage and extend functionalities. +References +[[Hygen Documentation]] +[[Express.js Documentation]] +[[React Documentation]] +[[Redux Documentation]] +[[Zettelkasten Method]] +[[Ivy Lee Method]] +[[Progressive Summarization]] + + + + +You said: +hey everybody welcome back to a new +video on this channel and today we have +a look into BFF best friends forever yes +of course but let's see what BFF means +in the context of app development and +how to use that pattern right after the +intro let's +[Music] +go in last week's video we talked a lot +about using KNX with different backends +Last weeks video +which more or less meant using different +backends like hono Express and fastify +in the nodejs or like JavaScript +environment and then use h3's +compatibility layer to use them inside +knjs and lots of people said like hey +yeah that's great but I don't have like +a JavaScript back end I don't have +something I can integrate into KN so +External APIs make sense +instead I use something like d Jango or +laravel or spring boot or maybe some +bunch of third party apis as well which +is totally valid I mean you don't have +to write everything in JavaScript anyway +right if you have something that needs +hyper performance and machine learning +then maybe another language might be a +really good idea or your backend team +your dedicated backend team is just more +proficient in that so no problem with +that but let's see what the BFF part +means and how we can still improve the +usage of external like backend services +or third party apis in nox and with +Nitro so once again BFF yes best friends +What does BFF mean? +forever like KNX and you are of course +obviously but in this video it's more +about the backend for frontend +architectural pattern and now you might +wonder wait wait wait wait Alex back end +for front end like we we have a backend +and we have a front end so why do we +need anything in between let's have a +look commonly you have exactly that +setup a front end and a general back end +that serves all the data for the front +end but what if for example multiple +applications quarrying that back end and +of course your back end team doesn't +want to build routes for every single of +these applications to cater their needs +so let's say we have a front end and +that front end only needs a user's ID +username birth data and email but of +course the back end gives all the things +like tons of other unnecessary data +which kind of makes sense because in +other calls you might need them so now +you could start and say okay yeah we +have routes we have filters we have +things to improve that on the one hand +this increases the strain on your +backend team plus it's always a question +whether the back end should really focus +on what data needs to be served then +applying a general way of fetching data +and their needs and this is where the +back and for front and comes in right so +we really need something here in the +middle especially if we don't only have +a backend but maybe also data from other +services like headless CMS thirdparty +apis and maybe we can't even control the +back end at all and say hey this is the +data that we get that's it we have to do +the manipulation and transformation on +our own so let's see how it works with a +back end for front end and then how to +implement it with no Nitro and if we +move from this schematic all the way +over to the back and for front and one +you'll see okay there's not that much +happening except that the front end is +now calling the BFF the BFF is calling +the back end the back end is returning +data to BFF and the back end for front +end is returning the data to the front +end and in here the data transformation +will happen so we strip all the +unnecessary content +Benefits of a BFF +now you might say yeah come on for these +few Fields sure I get you but I've seen +projects where you have really like +kilobytes probably sometimes even +megabytes of data uh that you don't +really want to send at all because for +some requests you might just need a list +of names for example to preview some +articles or to yeah have a simple list +overview page and this is where the BFF +is really shining so if your general +backend doesn't allow that to do or +maybe you want to do some things on top +of it like caching for example right or +say oh yeah I have to enrich the data +with data from other apis then we really +need that kind of back end for front end +because you also don't necessarily want +to integrate all the third-party apis in +your general backend because if you want +to switch out a service well then you +have to do it for all the applications +and it will also increase the strain on +your back end while if you say oh yeah +in this case we want to use another +let's say image provider then why not +switching that out in the BFF that is +bound to your application and not the +back end so yeah this is definitely +something we can do and the best part is +we can just imagine the backend our case +as not under our control that makes +things a bit easier just saying like hey +dear backend Engineers can we Implement +that and then we have some big back and +forth um sometimes it's also totally +useful don't get me wrong but if we have +all the data in a way or another then we +can also fetch them so let's see what we +can do in knon Nitro to more or less +make the whole thing happen I will also +refer to some of the videos for example +about caching in Nitro and so on so on +so if you haven't seen these definitely +take a look in some more in-depth +content but let's get started all right +Setup of our Demo Application +folks our demo application is as minimal +as usual as you know already so we have +the compatibility date set I also made a +video if you don't know about that but +don't worry it won't do much for now we +have that little background color so it +will be dark in the background and white +text and then we have our future +compatibility version 4 to opt into the +KNX 4 break making changes whenever they +will be released as next 4 you're +already prepared also made a video about +that link in the description and then we +have an app app. view so we use the new +app folder the structure from Next 4 and +then in our app. view we just say we +have a user we fetch it from the very +simple Json placeholder typic cod.com +API and then we show the content and if +we go to the browser then this will look +like this okay nothing crazy so far +right we fetched some data and we're +good but what if we want to do more like +we said in the example maybe we just +want to strip everything except these +fields ID name username email so we want +to get rid of the rest because just +Remove data in the front end only? +imagine having a big user object not +only with these data but maybe with like +attach comments and this and that and +this will all be fetched by the back end +of course that's not really what we want +so maybe let's remove them and of course +now we could start saying yeah but you +can just do that in the front end you +can just say take the user object delete +the fields or like the structur it or +whatever and you're good but the problem +there is that then the data will still +be transmitted through wire which means +slower page speed right like we need to +S like get lots of data that we don't +even need and especially the bigger the +data the harder the thing is I've +already shown how to solve the problem +with use Fetch and use Asing data with +the transform function which we can just +showcase here real quick and then see +why this is the solution only half way +through and we might still need a BFF so +as mentioned we can use the transform +function here and say okay we have some +input we don't know what it is and then +we just say like okay you know what id +is input. ID can just type it as any +here for now um and then we had the uh +name is input name the same thing with +uh the username is also input the +username and last but not least we +wanted the email which is input email +and if we now eventually switch back to +the browser we'll see okay this is gone +that's great perfect the issue is like +sure if we take a look at the page +Source it's also only in there right +that works fine but if we now say okay +we would navigate on the client side +from A to B like the first request of +course happens on the server side so not +too much data is sent but if we then do +a client side request then we have the +data again because the original API will +be queried and no matter how you +transform your data it's still too much +because you can't just say hey API give +me less we're it's not under our control +in this example so instead we want to +Moving the transformation to the BFF +move the whole transformation over to +our back end for front end which we'll +build right now with Nitro and H3 in our +nox application also this of course can +be a standalone service you don't have +to bake it into KN and also if you use +something else you use Plain View feel +free to just spin up an Nitro server and +do the same but with NX it's a little +bit easier due to well end to end type +safety for example so let's get started +first of course we want to create a +server folder and in here we have a new +API and let's say we called users +similar to our uh example here and then +we have an ID and it's a typescript file +of course so then we create an event +handler uh and more or less we can copy +the code from here because we still have +to f fetch the original API right so we +what we want to do is we always want to +fetch that API but of course we can't +use transform here that's only for use +fetch uh and use Asing data so we have +const result is await the whole call and +now we can just say like okay we return +more or less uh what we did before here +and instead of input well we just say um +result and now of course also yes this +is unknown by default so we can just +type it as end +or you know what let's just do a record +string any and that will solve most of +the issues around that that's also fine +um and now we know okay things are in +there we can have way nicer types but +that's um that's for later of course so +one more thing we want to do is we want +to get the ID so it's say okay we want +to use get router pram here we say pass +in the event and get the ID if there is +no ID or if it's zero then we just throw +a new error created by create error just +saying uh all right no ID supplied uh +provided might be easier and then we of +course have the status code which is um +400 that's fine okay now we want to +insert that ID here and let's see if we +now call the whole thing if we now call +oops if we now call the whole um API +what it will return for that we move +back in the browser and we just use API +users one and here we see that's exactly +what we need right that's exactly the +data that was also shown here in our +page now the only thing it's left to do +is we actually have to call that API in +our very own application so to do this +we go into our app here and then we say +Okay instead of the whole code here we +can just say use fetch API users ID and +the best part is just say one here for +example this already gives us the +wonderful types safety even though we +don't have many types here um it will +eventually still help us when we have +them in place so that seems pretty good +let's see if it works let's go back to +the browser and go to our application +refresh it and it still works as it +should lovely okay so far so good now +Unlocked possibilities +this doesn't seem like a huge +Improvement but the best part is now we +can do a lot of things with that data so +first of all we can cach it which we'll +do next and then why not show all the +users that have been accessed like the +last I don't know two three four five +users uh throughout the whole page so if +someone clicks on a user well then uh +that user will be saved in some kind of +storage and all the other people on the +index page they will see oh yeah these +are the the users that were recently +shown this is kind of an inspirational +feature from a project I've worked on +already uh which basically looks like uh +looks like this so here we also have +like recently searched people and if I +search for more people they will just be +added here and I think that's pretty +helpful the best part is also here I +used the back end for front end mainly +because the data was not really under +our control um so yeah let's let's go +ahead with the caching and then with the +recent users and um last but not least +we can also show a few more tricks here +and there so let's get started the +Using BFF for Caching +caching part is quite interesting uh +I've also made a video about that +caching in Nitro and once again you +could think oh yeah we just use theine +cache demand Handler here but I would +suggest not to mainly because we also +want to implement the recent users +functionality soon so we rather don't +want to catch the whole API in point +because we still want to register +whether or not a user was requested +instead we could just say okay let's +create a function that is caching the +whole API call and the transformation so +basically this part here should be +cached and we can do that of course in +the same file but let's just say we have +server UTS users. TS and then in here we +could say export const uh get user then +this is uh a defined cached a defined +cached function and eventually we have +the ID here which can be a +string and eventually we want to do all +the things in here so let's just quickly +get this whole code over here paste it +in and this is already looking pretty +okay if we need an async to make sure +the await works we have this part in +place great one more thing to suggest +it's always nice to also provide the H3 +event especially if you deploy at some +point at the edge because um let's +quickly import that here H3 went from H3 +because if you for example use club for +workers I think that's even a better +example then you really want to make +sure that these are waiting for all the +calls that might happen and don't have +any dangling promises so uh with theine +cach function you can make sure that's +the case and even we don't need the +event here at all we can just provide it +and will still be considered and then +event at wait until will be called but +that's just as a tiny hit so we have to +get user function now and instead of +calling what we had here before we can +just use get user instead of the code +that we had back here we can just write +get user and then pass in the ID that we +have here we await it and say const user +is get user +wonderful we also need the event of +course that's what we passed in before +and from here we can even say okay later +on to do um make sure the user is added +as recent and then we're fine and of +course we want to return the user still +otherwise things might break okay so +last but not least we want to set up our +cached function because we have it here +but now we can say let's disable S SWR +and have a Max a age of I don't know 10 +or just say like straight away um yeah +let's let's do max age of it doesn't +matter much because the data is stale +anyway like it's it's not changing so we +can do that can even leave SWR on for +that example um and if you don't know +what all of these is definitely check +out the cashing uh in in Nitro video uh +where explained all these in Deb so if +we just open the da tools and refresh +the page then we'll see wow 8 +milliseconds response time that's pretty +decent that's rather clear that there is +a cach behind because in 8 milliseconds +you can't do any API call so we simply +decided to cach the data from our back +end without them being involved at all +it's all cach and also not for only for +the application if you are in another +application carying the same back end +well you can Define your own cach times +or even if you have a different route +and so on so on so you're fully +autonomous in your decision what to cash +Do more with the data +and for how long now last but least +let's implement the recent users thing +and then we're good to go with a very +simple example of back for front end and +for the recent users we still start once +again in our users. Ts for the UIL and +we have an export const here saying or +maybe let's just make a function +straight away get recent users so +somehow we want to get them right it is +probably async so we need this for sure +and then we also have an export async +function uh +that uh adds uh recent +user and then we need the user data here +so we say user user user we don't have a +type for that yet maybe time to finally +type things so say type user is ID +string name String username string and +email string what a surprise here +because we don't have any types here we +can just say as user and then we're fine +and in here we want to return a promise +with the +users and eventually here we can decide +uh to either ignore that and just say +okay this is void right this is also +fine or we say you know what um we want +to get the new users as well after +adding them so we can even say here okay +promise user that's really up to the +implementation detail but of course +nothing is in here so far so let's +improve that okay so first of all we +want to get all the users so let's say +const users or let's say const current +users equals um get users of +course get maybe get recent users is a +bit better +um we want to await the whole thing as +it async and then we have the users in +theory we also want to set uh a user +let's say recent user limit and we can +set it to three so now what we can do is +we want to actually add the user to the +array and eventually if the whole array +is too long well then we just decide to +uh remove the the last one so what we +can do is we can say current users do +push um you can also unshift it it +doesn't matter if it's the the front or +the end it's yeah it doesn't matter much +we can let's just do unshift for here +that's fine we add the user in here now +we say if current users length if it's +greater than the recent user limit well +then of course we want to make sure to +say current users. pop remove the last +one that was added to the list and we're +good and now we have to save the users +again so this this is will be a little +to do for us and same idea we can just +then say return current users uh and +then we fulfilled the contract we set up +with the types so how do we save the +users and how do we get the things well +this we'll figure out now first we need +a key for uh wherever this is stored so +we just say resend users that's fine and +now we use nitro's uh integration with +un storage so we use un storage under +the hood and the use storage function in +night also made a video about that link +in the description as usual so we'll +briefly go through that more details are +in the mentioned video and the first +thing we want to do is of course we want +to say const users equals use +storage then we can Define name here if +we want to um but we can just leave the +default for now get items or in this +case just get item uh we use the key we +just want this one item which is an +array saved in there uh and in here we +want to aade it as well and then just +say return users so now we also want to +make sure it is of the right type so we +don't want to type it key but we want to +type it +user and then we're good to go the only +thing is it could be null that's true so +worst worst case we just do this and +we're fine so in case this can't be +retrieved for whatever reason we just +make sure it will be an empty array and +now we can get the whole thing perfect +so last but not least we want to save +the users so to save the users we just +say a wait we use also use storage once +again and we want to say set item in the +same scenario with the same key and the +values will be current users so also +here we don't even need to type it +because of course it will be inferred by +the current users that I provided here +so then we save the users let's even +keep the comment I think it's pretty +nice and then we're good now the only +thing is we need to implement it somehow +right we need to call these things +somewhere so what we can now do is in +our idts here we know what kind of user +will be fetched so there we what we have +to do we can just say okay you know what +add user or add recent +user and then we just call uh this with +user we await it and we're good to go so +this will make sure that all users that +are fetched will be saved as recent +users users until uh there are three in +there then the the first one will be +removed and last but not least we +actually want to make sure this is also +possible to show in our application so +we need an API in point to show it and +we need to be able to fetch different +users so that's cool let's start with +Additional API endpoint +the API in point so instead of API uh +users idts we can just say uh API users +info resent for example and in here +that's also an event handler and we just +return +uh get uh recent users and that's all we +need to do luckily because we nicely +extracted it already so what we can do +as a test already is we can go back to +the browser in here we can refresh that +page and now we can say users info +recent and see if it's in there or not +and the answer is it is in there so if +we refresh this page a couple times and +then go back we see on resent user one +two and three all the same okay if we +now would say let's just take users two +and users five in here and now we go +back to info recent well then they're in +here as well perfect so our assignment +is kind of done now we just have to do +Front end logic +the front end part to make sure it's +changeable and then we're good to go so +let's get going to implement it I would +just say we do very very simple +variation of the whole thing we just say +we have a constant number here it's a +ref right we just start with a zero here +um then we have this wonderful use fetch +function where we say okay this will be +uh an A G function then could also be +computed that's also fine and in here we +want to use +number value uh we can move into a +template string to make it look a bit +nicer that's never hurts and now we want +to change that number based on well uh +whenever something changes so we could +just say let's have some buttons here uh +button V4 for n in five uh and then say +fetch user +n and add click we just say number +equals n and now you might wonder if +this actually works because it it looks +like black magic a little bit but yes it +does and also this was covered in our +use Asing data versus use fetch video so +let's have a look in the browser and see +if this is doing what it should and I +admit the buttons are not nice I'm not a +UI designer right and not not the point +of the video so if you click it it will +just magically work because use fetch is +watching The Ref and we're good and last +but not least what we also want to do is +refetch the data now whenever we change +something to see the recent users so +back to the code how do we do that well +we can just say we have another const uh +data recent users here and we say await +use fetch uh and then we just +say slash +API uses INF for recent and in here we +could uh just trigger the +watch and we watch user so in every user +changes recent users will also be +retched and now what we could do we +could uh do a UL in Li V4 maybe in h2v +before resend +users user in users uh users and recent +users and then we +just say user.name +here and maybe user. ID and then we're +good so let's see if this works and try +it out in the browser and here we are +clicking around and seeing things will +change and two is in 4 three one is out +five five okay multiple clicking on five +doesn't work because it doesn't refresh +the data that's also fine uh we could +even have a trigger if we want to but +that's okay because data is already +there why should we and this could also +be radio buttons instead of just buttons +of course semantic HTML would be better +Summary +but I think for the demo that's +sufficient and here we are we +implemented a little back and for front +and with some own functionality without +bothering any back and Def um it's fast +due to the caching it's all super close +we have the type safety and we're good +to go I think this is a very nice +capability of what KNX together with H3 +un storage Nitro and all the tools +around can do within yeah just a couple +minutes any questions let me know in the +comments please uh always always I +always check them out and answer I'm +back from vacation now so the videos we +seen were record a little bit ahead um +maybe seen the 10 I'm just kidding +anyway uh let me know if there's any +questions and uh next week next Friday +next video you got it talk to you soon +folks and happy hacking +[Music] +oh +[Music] + + +Get Started +Guide +API +Examples +Community +Introduction +Installation +Configuration +Views +Assets +Styling +Routing +SEO and Meta +Transitions +Data fetching +State Management +Error Handling +Server +Layers +Prerendering +Deployment +Testing + +Getting Started +Introduction +Installation +Theming +Structure +Content + +Components +Aside +AsideLinks +AuthForm +BlogList +BlogPost +ColorModeAvatar +ColorModeButton +ColorModeImage +ColorModeSelect +ColorModeToggle +ContentSearch +ContentSearchButton +ContentSurround +ContentToc +DashboardCard +DashboardLayout +DashboardModal +DashboardNavbar +DashboardNavbarToggle +DashboardPage +DashboardPanel +DashboardPanelContent +DashboardPanelHandle +DashboardSearch +DashboardSearchButton +DashboardSection +DashboardSidebar +DashboardSidebarLinks +DashboardSlideover +DashboardToolbar +Footer +FooterColumns +FooterLinks +Header +HeaderLinks +HeaderPopoverLinks +LandingCard +LandingCTA +LandingFAQ +LandingGrid +LandingHero +LandingLogos +LandingSection +LandingTestimonial +Main +NavigationAccordion +NavigationLinks +NavigationTree +Page +PageBody +PageCard +PageColumns +PageError +PageGrid +PageHeader +PageHero +PageLinks +PricingCard +PricingGrid +PricingToggle + +Prose +Callout +Card +CardGroup +CodeBlock +CodeGroup +Collapsible +Field +FieldGroup +Shortcut +Tabs + +Getting Started +Introduction +Installation +Theming +Shortcuts + +Components +Accordion +Alert +Avatar +Badge +Breadcrumb +Button +Card +Carousel +Checkbox +Chip +CommandPalette +Container +ContextMenu +DatePicker +Divider +Dropdown +Form +FormGroup +HorizontalNavigation +Icon +Input +InputMenu +Kbd +Link +Meter +Modal +Notification +Pagination +Popover +Progress +RadioGroup +Range +Select +SelectMenu +Skeleton +Slideover +Table +Tabs +Textarea +Toggle +Tooltip +VerticalNavigation + + +Form +GitHub +Collect and validate form data. +Usage +Use the Form component to validate form data using schema libraries such as Yup, Zod, Joi, Valibot, or your own validation logic. + +It works with the FormGroup component to display error messages around form elements automatically. + +The form component requires two props: + +state - a reactive object holding the form's state. +schema - a schema object from a validation library like Yup, Zod, Joi or Valibot. +Note that no validation library is included by default, so ensure you install the one you need. + +Yup + +Zod + +Joi + +Valibot +Email +Password +Submit + + + + +Custom validation +Use the validate prop to apply your own validation logic. + +The validation function must return a list of errors with the following attributes: + +message - Error message for display. +path - Path to the form element corresponding to the name attribute. +Note that it can be used alongside the schema prop to handle complex use cases. +Email +Password +Submit + + + + +This can also be used to integrate with other validation libraries. Here is an example with Vuelidate: + + + + + +Backend validation +You can manually set errors after form submission if required. To do this, simply use the form.setErrors function to set the errors as needed. + + + + + +Input events +The Form component automatically triggers validation upon submit, input, blur or change events. + +This ensures that any errors are displayed as soon as the user interacts with the form elements. You can control when validation happens this using the validate-on prop. + +Note that the input event is not triggered until after the initial blur event. This is to prevent the form from being validated as the user is typing. You can override this behavior by setting the eager-validation prop on FormGroup to true. +Input +Input Menu + +Textarea +Select + +Select... +Select Menu + +Select... +Toggle + +Checkbox +Check me +Radio Group + +Option 1 + +Option 2 + +Option 3 + +Radio +Option 1 +Option 2 +Option 3 +Range + +SubmitClear +Take a look at the component! +Error event +You can listen to the @error event to handle errors. This event is triggered when the form is submitted and contains an array of FormError objects with the following fields: + +id - the identifier of the form element. +path - the path to the form element matching the name. +message - the error message to display. +Here's an example that focuses the first input element with an error after the form is submitted: + +Email +Password +Submit + + + + + +FormGroup +GitHub +Display a label and additional informations around a form element. +Usage +Use the FormGroup component around an Input, Textarea, Select or a SelectMenu with a label. The