diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..ee2de08 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,4 @@ +[run] +omit = + src/clients/CatalogClient.py + src/clients/baseclient.py diff --git a/.env b/.env old mode 100644 new mode 100755 index d12f9b9..46f109c --- a/.env +++ b/.env @@ -18,8 +18,7 @@ SERVICE_WIZARD_ADMIN_ROLE="SERVICE_WIZARD_ADMIN" CATALOG_ADMIN_TOKEN="REDACTED" # Kubernetes configs +# Note this also creates a toleration V1Toleration(effect="NoSchedule", key=namespace, operator="Exists") KUBECONFIG="~/.kube/config" -NAMESPACE="staging-dynamic-services" # Note this also creates a toleration V1Toleration(effect="NoSchedule", key=namespace, operator="Exists") +NAMESPACE="staging-dynamic-services" USE_INCLUSTER_CONFIG="false" -TAINT_TOLERATION_EXPRESSIONS="" -# APP_AFFINITY_FILE="app-affinity.yaml" # path to files that force apps to run on specific nodes diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index b44891a..b64f2cc 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -1,12 +1,10 @@ - - name: "Code Scanning - Action" on: push: - branches: [main, develop] + branches: [ main, develop ] pull_request: - branches: [main, develop] + branches: [ main, develop ] schedule: # ┌───────────── minute (0 - 59) # │ ┌───────────── hour (0 - 23) diff --git a/.github/workflows/manual-build.yml b/.github/workflows/manual-build.yml index 944f903..56928e3 100644 --- a/.github/workflows/manual-build.yml +++ b/.github/workflows/manual-build.yml @@ -1,7 +1,7 @@ --- name: Manual Build & Push on: - workflow_dispatch: + workflow_dispatch: jobs: build-push: uses: kbase/.github/.github/workflows/reusable_build-push.yml@main diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f14d783..4514f75 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -20,7 +20,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [ "3.11" ] + python-version: [ "3.12" ] steps: @@ -32,12 +32,10 @@ jobs: with: python-version: ${{ matrix.python-version }} + - name: Install dependencies - # tried VaultVulp/action-pipenv but pytest wasn't on the path post action shell: bash - run: | - pip install pipenv - pipenv sync --system --dev + run: bash scripts/update_dependencies.sh - name: Run pre-commit Hooks shell: bash @@ -45,7 +43,7 @@ jobs: - name: Run tests shell: bash - run: PYTHONPATH=. pytest --cov=src --cov-report=xml test + run: bash scripts/run_tests.sh - name: Upload coverage to Codecov uses: codecov/codecov-action@v3 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3611898..17cbea5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,20 +1,20 @@ repos: -- repo: https://github.com/pre-commit/pre-commit-hooks + - repo: https://github.com/pre-commit/pre-commit-hooks rev: v3.2.0 hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml - - id: check-added-large-files + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files -- repo: https://github.com/psf/black + - repo: https://github.com/psf/black rev: 23.7.0 hooks: - - id: black - language_version: python3.11 + - id: black + language_version: python3.12 -- repo: https://github.com/pycqa/flake8 + - repo: https://github.com/pycqa/flake8 rev: 6.1.0 hooks: - - id: flake8 - args: [--config, pyproject.toml] + - id: flake8 + args: [ --config, pyproject.toml ] diff --git a/Dockerfile b/Dockerfile index 614de64..654cff4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.11 +FROM python:3.12.0-bookworm RUN mkdir -p /app WORKDIR /app diff --git a/Pipfile b/Pipfile index 8b85d8c..2bfcc8d 100644 --- a/Pipfile +++ b/Pipfile @@ -4,21 +4,23 @@ verify_ssl = true name = "pypi" [packages] -fastapi = "==0.95.2" -uvicorn = {version = "==0.22.0", extras = ["standard"]} -sentry-sdk = "==1.25.0" +fastapi = "==0.103.2" +uvicorn = "==0.23.2" +sentry-sdk = "==1.31.0" PySocks = "==1.7.1" requests = "==2.31.0" -prometheus-fastapi-instrumentator = "==6.0.0" -pydantic = "==1.10.8" +prometheus-fastapi-instrumentator = "==6.1.0" cacheout = "==0.14.1" jinja-cli = "==1.2.2" -python-dotenv = "==0.19.1" -httpx = "==0.24.1" -kubernetes = "==26.1.0" +python-dotenv = "==1.0.0" +httpx = "==0.25.0" +kubernetes = "==28.1.0" +flake8-annotations = "==3.0.1" +chardet = "==5.2.0" [dev-packages] pytest = "==7.3.1" +python-dotenv = "==1.0.0" pytest-cov = "==4.0.0" requests_mock = "==1.9.3" pre-commit = "==3.3.3" @@ -27,4 +29,4 @@ flake8 = "==6.1.0" pytest_kind = "==22.11.1" [requires] -python_version = "3.11" +python_version = "3.12" diff --git a/Pipfile.lock b/Pipfile.lock index f70f9ba..433d1f3 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "d7045c364409ddbebc1ce0db5808b04438b847df1f3611043919ad25adc59d06" + "sha256": "b3728afa746ebabefcee447407b6da98f7bd5b5a294f2f2c3d3eff62d6baca3e" }, "pipfile-spec": 6, "requires": { - "python_version": "3.11" + "python_version": "3.12" }, "sources": [ { @@ -16,6 +16,14 @@ ] }, "default": { + "annotated-types": { + "hashes": [ + "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43", + "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d" + ], + "markers": "python_version >= '3.8'", + "version": "==0.6.0" + }, "anyio": { "hashes": [ "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780", @@ -31,12 +39,21 @@ ], "version": "==1.4.2" }, + "attrs": { + "hashes": [ + "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04", + "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015" + ], + "markers": "python_version >= '3.7'", + "version": "==23.1.0" + }, "cacheout": { "hashes": [ "sha256:060bf1c4af3d3903aa634814b547674ef16ec690b7f3576b373420bc29e5e4cb", "sha256:492b04e5c622f764085baf174f36b38ab287f154a6fa84c3ac29ee890c30662b" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==0.14.1" }, "cachetools": { @@ -55,110 +72,152 @@ "markers": "python_version >= '3.6'", "version": "==2023.7.22" }, + "chardet": { + "hashes": [ + "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7", + "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==5.2.0" + }, "charset-normalizer": { "hashes": [ - "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96", - "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c", - "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710", - "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706", - "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020", - "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252", - "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad", - "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329", - "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a", - "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f", - "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6", - "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4", - "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a", - "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46", - "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2", - "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23", - "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace", - "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd", - "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982", - "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10", - "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2", - "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea", - "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09", - "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5", - "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149", - "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489", - "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9", - "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80", - "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592", - "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3", - "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6", - "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed", - "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c", - "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200", - "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a", - "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e", - "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d", - "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6", - "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623", - "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669", - "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3", - "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa", - "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9", - "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2", - "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f", - "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1", - "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4", - "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a", - "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8", - "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3", - "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029", - "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f", - "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959", - "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22", - "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7", - "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952", - "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346", - "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e", - "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d", - "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299", - "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd", - "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a", - "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3", - "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037", - "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94", - "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c", - "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858", - "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a", - "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449", - "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c", - "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918", - "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1", - "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c", - "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac", - "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa" + "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843", + "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786", + "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e", + "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8", + "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4", + "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa", + "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d", + "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82", + "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7", + "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895", + "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d", + "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a", + "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382", + "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678", + "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b", + "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e", + "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741", + "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4", + "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596", + "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9", + "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69", + "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c", + "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77", + "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13", + "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459", + "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e", + "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7", + "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908", + "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a", + "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f", + "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8", + "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482", + "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d", + "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d", + "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545", + "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34", + "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86", + "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6", + "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe", + "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e", + "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc", + "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7", + "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd", + "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c", + "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557", + "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a", + "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89", + "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078", + "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e", + "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4", + "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403", + "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0", + "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89", + "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115", + "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9", + "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05", + "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a", + "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec", + "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56", + "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38", + "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479", + "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c", + "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e", + "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd", + "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186", + "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455", + "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c", + "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65", + "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78", + "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287", + "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df", + "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43", + "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1", + "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7", + "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989", + "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a", + "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63", + "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884", + "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649", + "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810", + "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828", + "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4", + "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2", + "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd", + "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5", + "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe", + "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293", + "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e", + "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e", + "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8" ], "markers": "python_full_version >= '3.7.0'", - "version": "==3.2.0" + "version": "==3.3.0" }, "click": { "hashes": [ - "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd", - "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5" + "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", + "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de" ], "markers": "python_version >= '3.7'", - "version": "==8.1.6" + "version": "==8.1.7" }, "fastapi": { "hashes": [ - "sha256:4d9d3e8c71c73f11874bcf5e33626258d143252e329a01002f767306c64fb982", - "sha256:d374dbc4ef2ad9b803899bd3360d34c534adc574546e25314ab72c0c4411749f" + "sha256:3270de872f0fe9ec809d4bd3d4d890c6d5cc7b9611d721d6438f9dacc8c4ef2e", + "sha256:75a11f6bfb8fc4d2bec0bd710c2d5f2829659c0e8c0afd5560fdda6ce25ec653" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==0.103.2" + }, + "flake8": { + "hashes": [ + "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23", + "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5" + ], + "markers": "python_full_version >= '3.8.1'", + "version": "==6.1.0" + }, + "flake8-annotations": { + "hashes": [ + "sha256:af78e3216ad800d7e144745ece6df706c81b3255290cbf870e54879d495e8ade", + "sha256:ff37375e71e3b83f2a5a04d443c41e2c407de557a884f3300a7fa32f3c41cb0a" ], "index": "pypi", - "version": "==0.95.2" + "markers": "python_full_version >= '3.8.1'", + "version": "==3.0.1" }, "google-auth": { "hashes": [ - "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce", - "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873" + "sha256:6864247895eea5d13b9c57c9e03abb49cb94ce2dc7c58e91cba3248c7477c9e3", + "sha256:a8f4608e65c244ead9e0538f181a96c6e11199ec114d41f1d7b1bffa96937bda" ], - "markers": "python_version >= '3.6'", - "version": "==2.22.0" + "markers": "python_version >= '3.7'", + "version": "==2.23.3" }, "h11": { "hashes": [ @@ -170,59 +229,20 @@ }, "httpcore": { "hashes": [ - "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888", - "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87" - ], - "markers": "python_version >= '3.7'", - "version": "==0.17.3" - }, - "httptools": { - "hashes": [ - "sha256:03bfd2ae8a2d532952ac54445a2fb2504c804135ed28b53fefaf03d3a93eb1fd", - "sha256:0781fedc610293a2716bc7fa142d4c85e6776bc59d617a807ff91246a95dea35", - "sha256:0d0b0571806a5168013b8c3d180d9f9d6997365a4212cb18ea20df18b938aa0b", - "sha256:0fb4a608c631f7dcbdf986f40af7a030521a10ba6bc3d36b28c1dc9e9035a3c0", - "sha256:22c01fcd53648162730a71c42842f73b50f989daae36534c818b3f5050b54589", - "sha256:23b09537086a5a611fad5696fc8963d67c7e7f98cb329d38ee114d588b0b74cd", - "sha256:259920bbae18740a40236807915def554132ad70af5067e562f4660b62c59b90", - "sha256:26326e0a8fe56829f3af483200d914a7cd16d8d398d14e36888b56de30bec81a", - "sha256:274bf20eeb41b0956e34f6a81f84d26ed57c84dd9253f13dcb7174b27ccd8aaf", - "sha256:33eb1d4e609c835966e969a31b1dedf5ba16b38cab356c2ce4f3e33ffa94cad3", - "sha256:35a541579bed0270d1ac10245a3e71e5beeb1903b5fbbc8d8b4d4e728d48ff1d", - "sha256:38f3cafedd6aa20ae05f81f2e616ea6f92116c8a0f8dcb79dc798df3356836e2", - "sha256:3f96d2a351b5625a9fd9133c95744e8ca06f7a4f8f0b8231e4bbaae2c485046a", - "sha256:463c3bc5ef64b9cf091be9ac0e0556199503f6e80456b790a917774a616aff6e", - "sha256:47043a6e0ea753f006a9d0dd076a8f8c99bc0ecae86a0888448eb3076c43d717", - "sha256:4e748fc0d5c4a629988ef50ac1aef99dfb5e8996583a73a717fc2cac4ab89932", - "sha256:5dcc14c090ab57b35908d4a4585ec5c0715439df07be2913405991dbb37e049d", - "sha256:65d802e7b2538a9756df5acc062300c160907b02e15ed15ba035b02bce43e89c", - "sha256:6bdc6675ec6cb79d27e0575750ac6e2b47032742e24eed011b8db73f2da9ed40", - "sha256:6e22896b42b95b3237eccc42278cd72c0df6f23247d886b7ded3163452481e38", - "sha256:721e503245d591527cddd0f6fd771d156c509e831caa7a57929b55ac91ee2b51", - "sha256:72205730bf1be875003692ca54a4a7c35fac77b4746008966061d9d41a61b0f5", - "sha256:72ec7c70bd9f95ef1083d14a755f321d181f046ca685b6358676737a5fecd26a", - "sha256:73e9d66a5a28b2d5d9fbd9e197a31edd02be310186db423b28e6052472dc8201", - "sha256:818325afee467d483bfab1647a72054246d29f9053fd17cc4b86cda09cc60339", - "sha256:82c723ed5982f8ead00f8e7605c53e55ffe47c47465d878305ebe0082b6a1755", - "sha256:82f228b88b0e8c6099a9c4757ce9fdbb8b45548074f8d0b1f0fc071e35655d1c", - "sha256:93f89975465133619aea8b1952bc6fa0e6bad22a447c6d982fc338fbb4c89649", - "sha256:9fc6e409ad38cbd68b177cd5158fc4042c796b82ca88d99ec78f07bed6c6b796", - "sha256:b0a816bb425c116a160fbc6f34cece097fd22ece15059d68932af686520966bd", - "sha256:b703d15dbe082cc23266bf5d9448e764c7cb3fcfe7cb358d79d3fd8248673ef9", - "sha256:cf8169e839a0d740f3d3c9c4fa630ac1a5aaf81641a34575ca6773ed7ce041a1", - "sha256:dea66d94e5a3f68c5e9d86e0894653b87d952e624845e0b0e3ad1c733c6cc75d", - "sha256:e41ccac9e77cd045f3e4ee0fc62cbf3d54d7d4b375431eb855561f26ee7a9ec4", - "sha256:f959e4770b3fc8ee4dbc3578fd910fab9003e093f20ac8c621452c4d62e517cb" + "sha256:13b5e5cd1dca1a6636a6aaea212b19f4f85cd88c366a2b82304181b769aab3c9", + "sha256:adc5398ee0a476567bf87467063ee63584a8bce86078bf748e48754f60202ced" ], - "version": "==0.6.0" + "markers": "python_version >= '3.8'", + "version": "==0.18.0" }, "httpx": { "hashes": [ - "sha256:06781eb9ac53cde990577af654bd990a4949de37a28bdb4a230d434f3a30b9bd", - "sha256:5853a43053df830c20f8110c5e69fe44d035d850b2dfe795e196f00fdb774bdd" + "sha256:181ea7f8ba3a82578be86ef4171554dd45fec26a02556a744db029a0a27b7100", + "sha256:47ecda285389cb32bb2691cc6e069e3ab0205956f681c5b2ad2325719751d875" ], "index": "pypi", - "version": "==0.24.1" + "markers": "python_version >= '3.8'", + "version": "==0.25.0" }, "idna": { "hashes": [ @@ -250,11 +270,12 @@ }, "kubernetes": { "hashes": [ - "sha256:5854b0c508e8d217ca205591384ab58389abdae608576f9c9afc35a3c76a366c", - "sha256:e3db6800abf7e36c38d2629b5cb6b74d10988ee0cba6fba45595a7cbe60c0042" + "sha256:10f56f8160dcb73647f15fafda268e7f60cf7dbc9f8e46d52fcd46d3beb0c18d", + "sha256:1468069a573430fb1cb5ad22876868f57977930f80a6749405da31cd6086a7e9" ], "index": "pypi", - "version": "==26.1.0" + "markers": "python_version >= '3.6'", + "version": "==28.1.0" }, "markupsafe": { "hashes": [ @@ -262,8 +283,11 @@ "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e", "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431", "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686", + "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c", "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559", "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc", + "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb", + "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939", "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c", "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0", "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4", @@ -271,6 +295,7 @@ "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575", "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba", "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d", + "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd", "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3", "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00", "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155", @@ -279,6 +304,7 @@ "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f", "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8", "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b", + "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007", "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24", "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea", "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198", @@ -286,9 +312,12 @@ "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee", "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be", "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2", + "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1", "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707", "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6", + "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c", "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58", + "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823", "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779", "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636", "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c", @@ -307,11 +336,21 @@ "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9", "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57", "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc", - "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2" + "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc", + "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2", + "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11" ], "markers": "python_version >= '3.7'", "version": "==2.1.3" }, + "mccabe": { + "hashes": [ + "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", + "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" + ], + "markers": "python_version >= '3.6'", + "version": "==0.7.0" + }, "oauthlib": { "hashes": [ "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca", @@ -330,11 +369,12 @@ }, "prometheus-fastapi-instrumentator": { "hashes": [ - "sha256:6f66a951a4801667f7311d161f3aebfe0cd202391d0f067fbbe169792e2d987b", - "sha256:f1ddd0b8ead75e71d055bdf4cb7e995ec6a6ca63543245e7bbc5ca9b14c45191" + "sha256:1820d7a90389ce100f7d1285495ead388818ae0882e761c1f3e6e62a410bdf13", + "sha256:2279ac1cf5b9566a4c3a07f78c9c5ee19648ed90976ab87d73d672abc1bfa017" ], "index": "pypi", - "version": "==6.0.0" + "markers": "python_full_version >= '3.7.0' and python_full_version < '4.0.0'", + "version": "==6.1.0" }, "pyasn1": { "hashes": [ @@ -352,47 +392,141 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", "version": "==0.3.0" }, + "pycodestyle": { + "hashes": [ + "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f", + "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67" + ], + "markers": "python_version >= '3.8'", + "version": "==2.11.1" + }, "pydantic": { "hashes": [ - "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375", - "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277", - "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d", - "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4", - "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca", - "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c", - "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01", - "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18", - "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68", - "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887", - "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459", - "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4", - "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5", - "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e", - "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1", - "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33", - "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a", - "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56", - "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108", - "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2", - "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4", - "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878", - "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0", - "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e", - "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6", - "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f", - "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800", - "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea", - "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f", - "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b", - "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1", - "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd", - "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319", - "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab", - "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85", - "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f" + "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7", + "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1" ], - "index": "pypi", - "version": "==1.10.8" + "markers": "python_version >= '3.7'", + "version": "==2.4.2" + }, + "pydantic-core": { + "hashes": [ + "sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e", + "sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33", + "sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7", + "sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7", + "sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea", + "sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4", + "sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0", + "sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7", + "sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94", + "sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff", + "sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82", + "sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd", + "sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893", + "sha256:1396e81b83516b9d5c9e26a924fa69164156c148c717131f54f586485ac3c15e", + "sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d", + "sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901", + "sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9", + "sha256:14e09ff0b8fe6e46b93d36a878f6e4a3a98ba5303c76bb8e716f4878a3bee92c", + "sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7", + "sha256:15d6bca84ffc966cc9976b09a18cf9543ed4d4ecbd97e7086f9ce9327ea48891", + "sha256:1d40f55222b233e98e3921df7811c27567f0e1a4411b93d4c5c0f4ce131bc42f", + "sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a", + "sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9", + "sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5", + "sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e", + "sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a", + "sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c", + "sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f", + "sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514", + "sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b", + "sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302", + "sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096", + "sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0", + "sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27", + "sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884", + "sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a", + "sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357", + "sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430", + "sha256:5cabb9710f09d5d2e9e2748c3e3e20d991a4c5f96ed8f1132518f54ab2967221", + "sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325", + "sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4", + "sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05", + "sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55", + "sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875", + "sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970", + "sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc", + "sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6", + "sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f", + "sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b", + "sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d", + "sha256:82f55187a5bebae7d81d35b1e9aaea5e169d44819789837cdd4720d768c55d15", + "sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118", + "sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee", + "sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e", + "sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6", + "sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208", + "sha256:9badf8d45171d92387410b04639d73811b785b5161ecadabf056ea14d62d4ede", + "sha256:9c120c9ce3b163b985a3b966bb701114beb1da4b0468b9b236fc754783d85aa3", + "sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e", + "sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada", + "sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175", + "sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a", + "sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c", + "sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f", + "sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58", + "sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f", + "sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a", + "sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a", + "sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921", + "sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e", + "sha256:b3dcd587b69bbf54fc04ca157c2323b8911033e827fffaecf0cafa5a892a0904", + "sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776", + "sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52", + "sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf", + "sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8", + "sha256:cfe1090245c078720d250d19cb05d67e21a9cd7c257698ef139bc41cf6c27b4f", + "sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b", + "sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63", + "sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c", + "sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f", + "sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468", + "sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e", + "sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab", + "sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2", + "sha256:e357571bb0efd65fd55f18db0a2fb0ed89d0bb1d41d906b138f088933ae618bb", + "sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb", + "sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132", + "sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b", + "sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607", + "sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934", + "sha256:ebedb45b9feb7258fac0a268a3f6bec0a2ea4d9558f3d6f813f02ff3a6dc6698", + "sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e", + "sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561", + "sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de", + "sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b", + "sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a", + "sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595", + "sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402", + "sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881", + "sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429", + "sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5", + "sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7", + "sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c", + "sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531", + "sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6", + "sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521" + ], + "markers": "python_version >= '3.7'", + "version": "==2.10.1" + }, + "pyflakes": { + "hashes": [ + "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774", + "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc" + ], + "markers": "python_version >= '3.8'", + "version": "==3.1.0" }, "pysocks": { "hashes": [ @@ -400,7 +534,7 @@ "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5", "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0" ], - "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.7.1" }, "python-dateutil": { @@ -413,15 +547,18 @@ }, "python-dotenv": { "hashes": [ - "sha256:14f8185cc8d494662683e6914addcb7e95374771e707601dfc70166946b4c4b8", - "sha256:bbd3da593fc49c249397cbfbcc449cf36cb02e75afc8157fcc6a81df6fb7750a" + "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba", + "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a" ], "index": "pypi", - "version": "==0.19.1" + "markers": "python_version >= '3.8'", + "version": "==1.0.0" }, "pyyaml": { "hashes": [ + "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5", "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc", + "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df", "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741", "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206", "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27", @@ -429,7 +566,10 @@ "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62", "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98", "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696", + "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290", + "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9", "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d", + "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6", "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867", "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47", "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486", @@ -437,9 +577,12 @@ "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3", "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007", "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938", + "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0", "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c", "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735", "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d", + "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28", + "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4", "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba", "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8", "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5", @@ -454,7 +597,9 @@ "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43", "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859", "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673", + "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54", "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a", + "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b", "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab", "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa", "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c", @@ -471,6 +616,7 @@ "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==2.31.0" }, "requests-oauthlib": { @@ -491,19 +637,11 @@ }, "sentry-sdk": { "hashes": [ - "sha256:5be3296fc574fa8a4d9b213b4dcf8c8d0246c08f8bd78315c6286f386c37555a", - "sha256:fe85cf5d0b3d0aa3480df689f9f6dc487de783defb0a95043368375dc893645e" + "sha256:64a7141005fb775b9db298a30de93e3b83e0ddd1232dc6f36eb38aebc1553291", + "sha256:6de2e88304873484207fed836388e422aeff000609b104c802749fd89d56ba5b" ], "index": "pypi", - "version": "==1.25.0" - }, - "setuptools": { - "hashes": [ - "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f", - "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235" - ], - "markers": "python_version >= '3.7'", - "version": "==68.0.0" + "version": "==1.31.0" }, "six": { "hashes": [ @@ -531,175 +669,36 @@ }, "typing-extensions": { "hashes": [ - "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36", - "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2" + "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0", + "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef" ], - "markers": "python_version >= '3.7'", - "version": "==4.7.1" + "markers": "python_version >= '3.8'", + "version": "==4.8.0" }, "urllib3": { "hashes": [ - "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f", - "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14" + "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07", + "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.16" + "markers": "python_version >= '3.6'", + "version": "==1.26.18" }, "uvicorn": { - "extras": [ - "standard" - ], "hashes": [ - "sha256:79277ae03db57ce7d9aa0567830bbb51d7a612f54d6e1e3e92da3ef24c2c8ed8", - "sha256:e9434d3bbf05f310e762147f769c9f21235ee118ba2d2bf1155a7196448bd996" + "sha256:1f9be6558f01239d4fdf22ef8126c39cb1ad0addf76c40e760549d2c2f43ab53", + "sha256:4d3cc12d7727ba72b64d12d3cc7743124074c0a69f7b201512fc50c3e3f1569a" ], "index": "pypi", - "version": "==0.22.0" - }, - "uvloop": { - "hashes": [ - "sha256:0949caf774b9fcefc7c5756bacbbbd3fc4c05a6b7eebc7c7ad6f825b23998d6d", - "sha256:0ddf6baf9cf11a1a22c71487f39f15b2cf78eb5bde7e5b45fbb99e8a9d91b9e1", - "sha256:1436c8673c1563422213ac6907789ecb2b070f5939b9cbff9ef7113f2b531595", - "sha256:23609ca361a7fc587031429fa25ad2ed7242941adec948f9d10c045bfecab06b", - "sha256:2a6149e1defac0faf505406259561bc14b034cdf1d4711a3ddcdfbaa8d825a05", - "sha256:2deae0b0fb00a6af41fe60a675cec079615b01d68beb4cc7b722424406b126a8", - "sha256:307958f9fc5c8bb01fad752d1345168c0abc5d62c1b72a4a8c6c06f042b45b20", - "sha256:30babd84706115626ea78ea5dbc7dd8d0d01a2e9f9b306d24ca4ed5796c66ded", - "sha256:3378eb62c63bf336ae2070599e49089005771cc651c8769aaad72d1bd9385a7c", - "sha256:3d97672dc709fa4447ab83276f344a165075fd9f366a97b712bdd3fee05efae8", - "sha256:3db8de10ed684995a7f34a001f15b374c230f7655ae840964d51496e2f8a8474", - "sha256:3ebeeec6a6641d0adb2ea71dcfb76017602ee2bfd8213e3fcc18d8f699c5104f", - "sha256:45cea33b208971e87a31c17622e4b440cac231766ec11e5d22c76fab3bf9df62", - "sha256:6708f30db9117f115eadc4f125c2a10c1a50d711461699a0cbfaa45b9a78e376", - "sha256:68532f4349fd3900b839f588972b3392ee56042e440dd5873dfbbcd2cc67617c", - "sha256:6aafa5a78b9e62493539456f8b646f85abc7093dd997f4976bb105537cf2635e", - "sha256:7d37dccc7ae63e61f7b96ee2e19c40f153ba6ce730d8ba4d3b4e9738c1dccc1b", - "sha256:864e1197139d651a76c81757db5eb199db8866e13acb0dfe96e6fc5d1cf45fc4", - "sha256:8887d675a64cfc59f4ecd34382e5b4f0ef4ae1da37ed665adba0c2badf0d6578", - "sha256:8efcadc5a0003d3a6e887ccc1fb44dec25594f117a94e3127954c05cf144d811", - "sha256:9b09e0f0ac29eee0451d71798878eae5a4e6a91aa275e114037b27f7db72702d", - "sha256:a4aee22ece20958888eedbad20e4dbb03c37533e010fb824161b4f05e641f738", - "sha256:a5abddb3558d3f0a78949c750644a67be31e47936042d4f6c888dd6f3c95f4aa", - "sha256:c092a2c1e736086d59ac8e41f9c98f26bbf9b9222a76f21af9dfe949b99b2eb9", - "sha256:c686a47d57ca910a2572fddfe9912819880b8765e2f01dc0dd12a9bf8573e539", - "sha256:cbbe908fda687e39afd6ea2a2f14c2c3e43f2ca88e3a11964b297822358d0e6c", - "sha256:ce9f61938d7155f79d3cb2ffa663147d4a76d16e08f65e2c66b77bd41b356718", - "sha256:dbbaf9da2ee98ee2531e0c780455f2841e4675ff580ecf93fe5c48fe733b5667", - "sha256:f1e507c9ee39c61bfddd79714e4f85900656db1aec4d40c6de55648e85c2799c", - "sha256:ff3d00b70ce95adce264462c930fbaecb29718ba6563db354608f37e49e09024" - ], - "version": "==0.17.0" - }, - "watchfiles": { - "hashes": [ - "sha256:0089c6dc24d436b373c3c57657bf4f9a453b13767150d17284fc6162b2791911", - "sha256:09ea3397aecbc81c19ed7f025e051a7387feefdb789cf768ff994c1228182fda", - "sha256:176a9a7641ec2c97b24455135d58012a5be5c6217fc4d5fef0b2b9f75dbf5154", - "sha256:18b28f6ad871b82df9542ff958d0c86bb0d8310bb09eb8e87d97318a3b5273af", - "sha256:20b44221764955b1e703f012c74015306fb7e79a00c15370785f309b1ed9aa8d", - "sha256:3d7d267d27aceeeaa3de0dd161a0d64f0a282264d592e335fff7958cc0cbae7c", - "sha256:5471582658ea56fca122c0f0d0116a36807c63fefd6fdc92c71ca9a4491b6b48", - "sha256:5569fc7f967429d4bc87e355cdfdcee6aabe4b620801e2cf5805ea245c06097c", - "sha256:68dce92b29575dda0f8d30c11742a8e2b9b8ec768ae414b54f7453f27bdf9545", - "sha256:79c533ff593db861ae23436541f481ec896ee3da4e5db8962429b441bbaae16e", - "sha256:7f3920b1285a7d3ce898e303d84791b7bf40d57b7695ad549dc04e6a44c9f120", - "sha256:91633e64712df3051ca454ca7d1b976baf842d7a3640b87622b323c55f3345e7", - "sha256:945be0baa3e2440151eb3718fd8846751e8b51d8de7b884c90b17d271d34cae8", - "sha256:9afd0d69429172c796164fd7fe8e821ade9be983f51c659a38da3faaaaac44dc", - "sha256:9c75eff897786ee262c9f17a48886f4e98e6cfd335e011c591c305e5d083c056", - "sha256:b538014a87f94d92f98f34d3e6d2635478e6be6423a9ea53e4dd96210065e193", - "sha256:b6577b8c6c8701ba8642ea9335a129836347894b666dd1ec2226830e263909d3", - "sha256:c0376deac92377817e4fb8f347bf559b7d44ff556d9bc6f6208dd3f79f104aaf", - "sha256:cae3dde0b4b2078f31527acff6f486e23abed307ba4d3932466ba7cdd5ecec79", - "sha256:cb5d45c4143c1dd60f98a16187fd123eda7248f84ef22244818c18d531a249d1", - "sha256:d9b073073e048081e502b6c6b0b88714c026a1a4c890569238d04aca5f9ca74b", - "sha256:fac19dc9cbc34052394dbe81e149411a62e71999c0a19e1e09ce537867f95ae0" - ], - "version": "==0.19.0" + "markers": "python_version >= '3.8'", + "version": "==0.23.2" }, "websocket-client": { "hashes": [ - "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd", - "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d" + "sha256:084072e0a7f5f347ef2ac3d8698a5e0b4ffbfcab607628cadabc650fc9a83a24", + "sha256:b3324019b3c28572086c4a319f91d1dcd44e6e11cd340232978c684a7650d0df" ], - "markers": "python_version >= '3.7'", - "version": "==1.6.1" - }, - "websockets": { - "hashes": [ - "sha256:01f5567d9cf6f502d655151645d4e8b72b453413d3819d2b6f1185abc23e82dd", - "sha256:03aae4edc0b1c68498f41a6772d80ac7c1e33c06c6ffa2ac1c27a07653e79d6f", - "sha256:0ac56b661e60edd453585f4bd68eb6a29ae25b5184fd5ba51e97652580458998", - "sha256:0ee68fe502f9031f19d495dae2c268830df2760c0524cbac5d759921ba8c8e82", - "sha256:1553cb82942b2a74dd9b15a018dce645d4e68674de2ca31ff13ebc2d9f283788", - "sha256:1a073fc9ab1c8aff37c99f11f1641e16da517770e31a37265d2755282a5d28aa", - "sha256:1d2256283fa4b7f4c7d7d3e84dc2ece74d341bce57d5b9bf385df109c2a1a82f", - "sha256:1d5023a4b6a5b183dc838808087033ec5df77580485fc533e7dab2567851b0a4", - "sha256:1fdf26fa8a6a592f8f9235285b8affa72748dc12e964a5518c6c5e8f916716f7", - "sha256:2529338a6ff0eb0b50c7be33dc3d0e456381157a31eefc561771ee431134a97f", - "sha256:279e5de4671e79a9ac877427f4ac4ce93751b8823f276b681d04b2156713b9dd", - "sha256:2d903ad4419f5b472de90cd2d40384573b25da71e33519a67797de17ef849b69", - "sha256:332d126167ddddec94597c2365537baf9ff62dfcc9db4266f263d455f2f031cb", - "sha256:34fd59a4ac42dff6d4681d8843217137f6bc85ed29722f2f7222bd619d15e95b", - "sha256:3580dd9c1ad0701169e4d6fc41e878ffe05e6bdcaf3c412f9d559389d0c9e016", - "sha256:3ccc8a0c387629aec40f2fc9fdcb4b9d5431954f934da3eaf16cdc94f67dbfac", - "sha256:41f696ba95cd92dc047e46b41b26dd24518384749ed0d99bea0a941ca87404c4", - "sha256:42cc5452a54a8e46a032521d7365da775823e21bfba2895fb7b77633cce031bb", - "sha256:4841ed00f1026dfbced6fca7d963c4e7043aa832648671b5138008dc5a8f6d99", - "sha256:4b253869ea05a5a073ebfdcb5cb3b0266a57c3764cf6fe114e4cd90f4bfa5f5e", - "sha256:54c6e5b3d3a8936a4ab6870d46bdd6ec500ad62bde9e44462c32d18f1e9a8e54", - "sha256:619d9f06372b3a42bc29d0cd0354c9bb9fb39c2cbc1a9c5025b4538738dbffaf", - "sha256:6505c1b31274723ccaf5f515c1824a4ad2f0d191cec942666b3d0f3aa4cb4007", - "sha256:660e2d9068d2bedc0912af508f30bbeb505bbbf9774d98def45f68278cea20d3", - "sha256:6681ba9e7f8f3b19440921e99efbb40fc89f26cd71bf539e45d8c8a25c976dc6", - "sha256:68b977f21ce443d6d378dbd5ca38621755f2063d6fdb3335bda981d552cfff86", - "sha256:69269f3a0b472e91125b503d3c0b3566bda26da0a3261c49f0027eb6075086d1", - "sha256:6f1a3f10f836fab6ca6efa97bb952300b20ae56b409414ca85bff2ad241d2a61", - "sha256:7622a89d696fc87af8e8d280d9b421db5133ef5b29d3f7a1ce9f1a7bf7fcfa11", - "sha256:777354ee16f02f643a4c7f2b3eff8027a33c9861edc691a2003531f5da4f6bc8", - "sha256:84d27a4832cc1a0ee07cdcf2b0629a8a72db73f4cf6de6f0904f6661227f256f", - "sha256:8531fdcad636d82c517b26a448dcfe62f720e1922b33c81ce695d0edb91eb931", - "sha256:86d2a77fd490ae3ff6fae1c6ceaecad063d3cc2320b44377efdde79880e11526", - "sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016", - "sha256:8a34e13a62a59c871064dfd8ffb150867e54291e46d4a7cf11d02c94a5275bae", - "sha256:8c82f11964f010053e13daafdc7154ce7385ecc538989a354ccc7067fd7028fd", - "sha256:92b2065d642bf8c0a82d59e59053dd2fdde64d4ed44efe4870fa816c1232647b", - "sha256:97b52894d948d2f6ea480171a27122d77af14ced35f62e5c892ca2fae9344311", - "sha256:9d9acd80072abcc98bd2c86c3c9cd4ac2347b5a5a0cae7ed5c0ee5675f86d9af", - "sha256:9f59a3c656fef341a99e3d63189852be7084c0e54b75734cde571182c087b152", - "sha256:aa5003845cdd21ac0dc6c9bf661c5beddd01116f6eb9eb3c8e272353d45b3288", - "sha256:b16fff62b45eccb9c7abb18e60e7e446998093cdcb50fed33134b9b6878836de", - "sha256:b30c6590146e53149f04e85a6e4fcae068df4289e31e4aee1fdf56a0dead8f97", - "sha256:b58cbf0697721120866820b89f93659abc31c1e876bf20d0b3d03cef14faf84d", - "sha256:b67c6f5e5a401fc56394f191f00f9b3811fe843ee93f4a70df3c389d1adf857d", - "sha256:bceab846bac555aff6427d060f2fcfff71042dba6f5fca7dc4f75cac815e57ca", - "sha256:bee9fcb41db2a23bed96c6b6ead6489702c12334ea20a297aa095ce6d31370d0", - "sha256:c114e8da9b475739dde229fd3bc6b05a6537a88a578358bc8eb29b4030fac9c9", - "sha256:c1f0524f203e3bd35149f12157438f406eff2e4fb30f71221c8a5eceb3617b6b", - "sha256:c792ea4eabc0159535608fc5658a74d1a81020eb35195dd63214dcf07556f67e", - "sha256:c7f3cb904cce8e1be667c7e6fef4516b98d1a6a0635a58a57528d577ac18a128", - "sha256:d67ac60a307f760c6e65dad586f556dde58e683fab03323221a4e530ead6f74d", - "sha256:dcacf2c7a6c3a84e720d1bb2b543c675bf6c40e460300b628bab1b1efc7c034c", - "sha256:de36fe9c02995c7e6ae6efe2e205816f5f00c22fd1fbf343d4d18c3d5ceac2f5", - "sha256:def07915168ac8f7853812cc593c71185a16216e9e4fa886358a17ed0fd9fcf6", - "sha256:df41b9bc27c2c25b486bae7cf42fccdc52ff181c8c387bfd026624a491c2671b", - "sha256:e052b8467dd07d4943936009f46ae5ce7b908ddcac3fda581656b1b19c083d9b", - "sha256:e063b1865974611313a3849d43f2c3f5368093691349cf3c7c8f8f75ad7cb280", - "sha256:e1459677e5d12be8bbc7584c35b992eea142911a6236a3278b9b5ce3326f282c", - "sha256:e1a99a7a71631f0efe727c10edfba09ea6bee4166a6f9c19aafb6c0b5917d09c", - "sha256:e590228200fcfc7e9109509e4d9125eace2042fd52b595dd22bbc34bb282307f", - "sha256:e6316827e3e79b7b8e7d8e3b08f4e331af91a48e794d5d8b099928b6f0b85f20", - "sha256:e7837cb169eca3b3ae94cc5787c4fed99eef74c0ab9506756eea335e0d6f3ed8", - "sha256:e848f46a58b9fcf3d06061d17be388caf70ea5b8cc3466251963c8345e13f7eb", - "sha256:ed058398f55163a79bb9f06a90ef9ccc063b204bb346c4de78efc5d15abfe602", - "sha256:f2e58f2c36cc52d41f2659e4c0cbf7353e28c8c9e63e30d8c6d3494dc9fdedcf", - "sha256:f467ba0050b7de85016b43f5a22b46383ef004c4f672148a8abf32bc999a87f0", - "sha256:f61bdb1df43dc9c131791fbc2355535f9024b9a04398d3bd0684fc16ab07df74", - "sha256:fb06eea71a00a7af0ae6aefbb932fb8a7df3cb390cc217d51a9ad7343de1b8d0", - "sha256:ffd7dcaf744f25f82190856bc26ed81721508fc5cbf2a330751e135ff1283564" - ], - "version": "==11.0.3" + "markers": "python_version >= '3.8'", + "version": "==1.6.4" }, "xmltodict": { "hashes": [ @@ -737,6 +736,7 @@ "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==23.7.0" }, "certifi": { @@ -757,153 +757,168 @@ }, "charset-normalizer": { "hashes": [ - "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96", - "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c", - "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710", - "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706", - "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020", - "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252", - "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad", - "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329", - "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a", - "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f", - "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6", - "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4", - "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a", - "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46", - "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2", - "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23", - "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace", - "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd", - "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982", - "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10", - "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2", - "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea", - "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09", - "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5", - "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149", - "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489", - "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9", - "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80", - "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592", - "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3", - "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6", - "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed", - "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c", - "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200", - "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a", - "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e", - "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d", - "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6", - "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623", - "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669", - "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3", - "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa", - "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9", - "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2", - "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f", - "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1", - "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4", - "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a", - "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8", - "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3", - "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029", - "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f", - "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959", - "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22", - "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7", - "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952", - "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346", - "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e", - "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d", - "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299", - "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd", - "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a", - "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3", - "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037", - "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94", - "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c", - "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858", - "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a", - "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449", - "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c", - "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918", - "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1", - "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c", - "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac", - "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa" + "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843", + "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786", + "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e", + "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8", + "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4", + "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa", + "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d", + "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82", + "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7", + "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895", + "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d", + "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a", + "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382", + "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678", + "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b", + "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e", + "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741", + "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4", + "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596", + "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9", + "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69", + "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c", + "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77", + "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13", + "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459", + "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e", + "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7", + "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908", + "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a", + "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f", + "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8", + "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482", + "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d", + "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d", + "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545", + "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34", + "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86", + "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6", + "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe", + "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e", + "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc", + "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7", + "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd", + "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c", + "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557", + "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a", + "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89", + "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078", + "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e", + "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4", + "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403", + "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0", + "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89", + "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115", + "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9", + "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05", + "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a", + "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec", + "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56", + "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38", + "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479", + "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c", + "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e", + "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd", + "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186", + "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455", + "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c", + "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65", + "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78", + "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287", + "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df", + "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43", + "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1", + "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7", + "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989", + "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a", + "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63", + "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884", + "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649", + "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810", + "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828", + "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4", + "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2", + "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd", + "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5", + "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe", + "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293", + "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e", + "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e", + "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8" ], "markers": "python_full_version >= '3.7.0'", - "version": "==3.2.0" + "version": "==3.3.0" }, "click": { "hashes": [ - "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd", - "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5" + "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", + "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de" ], "markers": "python_version >= '3.7'", - "version": "==8.1.6" + "version": "==8.1.7" }, "coverage": { "extras": [ "toml" ], "hashes": [ - "sha256:07ea61bcb179f8f05ffd804d2732b09d23a1238642bf7e51dad62082b5019b34", - "sha256:1084393c6bda8875c05e04fce5cfe1301a425f758eb012f010eab586f1f3905e", - "sha256:13c6cbbd5f31211d8fdb477f0f7b03438591bdd077054076eec362cf2207b4a7", - "sha256:211a4576e984f96d9fce61766ffaed0115d5dab1419e4f63d6992b480c2bd60b", - "sha256:2d22172f938455c156e9af2612650f26cceea47dc86ca048fa4e0b2d21646ad3", - "sha256:34f9f0763d5fa3035a315b69b428fe9c34d4fc2f615262d6be3d3bf3882fb985", - "sha256:3558e5b574d62f9c46b76120a5c7c16c4612dc2644c3d48a9f4064a705eaee95", - "sha256:36ce5d43a072a036f287029a55b5c6a0e9bd73db58961a273b6dc11a2c6eb9c2", - "sha256:37d5576d35fcb765fca05654f66aa71e2808d4237d026e64ac8b397ffa66a56a", - "sha256:3c9834d5e3df9d2aba0275c9f67989c590e05732439b3318fa37a725dff51e74", - "sha256:438856d3f8f1e27f8e79b5410ae56650732a0dcfa94e756df88c7e2d24851fcd", - "sha256:477c9430ad5d1b80b07f3c12f7120eef40bfbf849e9e7859e53b9c93b922d2af", - "sha256:49ab200acf891e3dde19e5aa4b0f35d12d8b4bd805dc0be8792270c71bd56c54", - "sha256:49dbb19cdcafc130f597d9e04a29d0a032ceedf729e41b181f51cd170e6ee865", - "sha256:4c8e31cf29b60859876474034a83f59a14381af50cbe8a9dbaadbf70adc4b214", - "sha256:4eddd3153d02204f22aef0825409091a91bf2a20bce06fe0f638f5c19a85de54", - "sha256:5247bab12f84a1d608213b96b8af0cbb30d090d705b6663ad794c2f2a5e5b9fe", - "sha256:5492a6ce3bdb15c6ad66cb68a0244854d9917478877a25671d70378bdc8562d0", - "sha256:56afbf41fa4a7b27f6635bc4289050ac3ab7951b8a821bca46f5b024500e6321", - "sha256:59777652e245bb1e300e620ce2bef0d341945842e4eb888c23a7f1d9e143c446", - "sha256:60f64e2007c9144375dd0f480a54d6070f00bb1a28f65c408370544091c9bc9e", - "sha256:63c5b8ecbc3b3d5eb3a9d873dec60afc0cd5ff9d9f1c75981d8c31cfe4df8527", - "sha256:68d8a0426b49c053013e631c0cdc09b952d857efa8f68121746b339912d27a12", - "sha256:74c160285f2dfe0acf0f72d425f3e970b21b6de04157fc65adc9fd07ee44177f", - "sha256:7a9baf8e230f9621f8e1d00c580394a0aa328fdac0df2b3f8384387c44083c0f", - "sha256:7df91fb24c2edaabec4e0eee512ff3bc6ec20eb8dccac2e77001c1fe516c0c84", - "sha256:7f297e0c1ae55300ff688568b04ff26b01c13dfbf4c9d2b7d0cb688ac60df479", - "sha256:80501d1b2270d7e8daf1b64b895745c3e234289e00d5f0e30923e706f110334e", - "sha256:85b7335c22455ec12444cec0d600533a238d6439d8d709d545158c1208483873", - "sha256:887665f00ea4e488501ba755a0e3c2cfd6278e846ada3185f42d391ef95e7e70", - "sha256:8f39c49faf5344af36042b293ce05c0d9004270d811c7080610b3e713251c9b0", - "sha256:90b6e2f0f66750c5a1178ffa9370dec6c508a8ca5265c42fbad3ccac210a7977", - "sha256:96d7d761aea65b291a98c84e1250cd57b5b51726821a6f2f8df65db89363be51", - "sha256:97af9554a799bd7c58c0179cc8dbf14aa7ab50e1fd5fa73f90b9b7215874ba28", - "sha256:97c44f4ee13bce914272589b6b41165bbb650e48fdb7bd5493a38bde8de730a1", - "sha256:a67e6bbe756ed458646e1ef2b0778591ed4d1fcd4b146fc3ba2feb1a7afd4254", - "sha256:ac0dec90e7de0087d3d95fa0533e1d2d722dcc008bc7b60e1143402a04c117c1", - "sha256:ad0f87826c4ebd3ef484502e79b39614e9c03a5d1510cfb623f4a4a051edc6fd", - "sha256:b3eb0c93e2ea6445b2173da48cb548364f8f65bf68f3d090404080d338e3a689", - "sha256:b543302a3707245d454fc49b8ecd2c2d5982b50eb63f3535244fd79a4be0c99d", - "sha256:b859128a093f135b556b4765658d5d2e758e1fae3e7cc2f8c10f26fe7005e543", - "sha256:bac329371d4c0d456e8d5f38a9b0816b446581b5f278474e416ea0c68c47dcd9", - "sha256:c02cfa6c36144ab334d556989406837336c1d05215a9bdf44c0bc1d1ac1cb637", - "sha256:c9737bc49a9255d78da085fa04f628a310c2332b187cd49b958b0e494c125071", - "sha256:ccc51713b5581e12f93ccb9c5e39e8b5d4b16776d584c0f5e9e4e63381356482", - "sha256:ce2ee86ca75f9f96072295c5ebb4ef2a43cecf2870b0ca5e7a1cbdd929cf67e1", - "sha256:d000a739f9feed900381605a12a61f7aaced6beae832719ae0d15058a1e81c1b", - "sha256:db76a1bcb51f02b2007adacbed4c88b6dee75342c37b05d1822815eed19edee5", - "sha256:e2ac9a1de294773b9fa77447ab7e529cf4fe3910f6a0832816e5f3d538cfea9a", - "sha256:e61260ec93f99f2c2d93d264b564ba912bec502f679793c56f678ba5251f0393", - "sha256:fac440c43e9b479d1241fe9d768645e7ccec3fb65dc3a5f6e90675e75c3f3e3a", - "sha256:fc0ed8d310afe013db1eedd37176d0839dc66c96bcfcce8f6607a73ffea2d6ba" + "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1", + "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63", + "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9", + "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312", + "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3", + "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb", + "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25", + "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92", + "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda", + "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148", + "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6", + "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216", + "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a", + "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640", + "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836", + "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c", + "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f", + "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2", + "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901", + "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed", + "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a", + "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074", + "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc", + "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84", + "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083", + "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f", + "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c", + "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c", + "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637", + "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2", + "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82", + "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f", + "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce", + "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef", + "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f", + "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611", + "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c", + "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76", + "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9", + "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce", + "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9", + "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf", + "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf", + "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9", + "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6", + "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2", + "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a", + "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a", + "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf", + "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738", + "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a", + "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4" ], "markers": "python_version >= '3.8'", - "version": "==7.3.0" + "version": "==7.3.2" }, "distlib": { "hashes": [ @@ -914,27 +929,27 @@ }, "filelock": { "hashes": [ - "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81", - "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec" + "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4", + "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd" ], - "markers": "python_version >= '3.7'", - "version": "==3.12.2" + "markers": "python_version >= '3.8'", + "version": "==3.12.4" }, "flake8": { "hashes": [ "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23", "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5" ], - "index": "pypi", + "markers": "python_full_version >= '3.8.1'", "version": "==6.1.0" }, "identify": { "hashes": [ - "sha256:7243800bce2f58404ed41b7c002e53d4d22bcf3ae1b7900c2d7aefd95394bf7f", - "sha256:c22a8ead0d4ca11f1edd6c9418c3220669b3b7533ada0a0ffa6cc0ef85cf9b54" + "sha256:afe67f26ae29bab007ec21b03d4114f41316ab9dd15aa8736a167481e108da54", + "sha256:f302a4256a15c849b91cfcdcec052a8ce914634b2f77ae87dad29cd749f2d88d" ], "markers": "python_version >= '3.8'", - "version": "==2.5.26" + "version": "==2.5.30" }, "idna": { "hashes": [ @@ -978,11 +993,11 @@ }, "packaging": { "hashes": [ - "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61", - "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f" + "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5", + "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7" ], "markers": "python_version >= '3.7'", - "version": "==23.1" + "version": "==23.2" }, "pathspec": { "hashes": [ @@ -994,19 +1009,19 @@ }, "platformdirs": { "hashes": [ - "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d", - "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d" + "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3", + "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e" ], "markers": "python_version >= '3.7'", - "version": "==3.10.0" + "version": "==3.11.0" }, "pluggy": { "hashes": [ - "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849", - "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3" + "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12", + "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7" ], - "markers": "python_version >= '3.7'", - "version": "==1.2.0" + "markers": "python_version >= '3.8'", + "version": "==1.3.0" }, "pre-commit": { "hashes": [ @@ -1014,15 +1029,16 @@ "sha256:a2256f489cd913d575c145132ae196fe335da32d91a8294b7afe6622335dd023" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==3.3.3" }, "pycodestyle": { "hashes": [ - "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0", - "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8" + "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f", + "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67" ], "markers": "python_version >= '3.8'", - "version": "==2.11.0" + "version": "==2.11.1" }, "pyflakes": { "hashes": [ @@ -1046,6 +1062,7 @@ "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==7.3.1" }, "pytest-cov": { @@ -1054,6 +1071,7 @@ "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470" ], "index": "pypi", + "markers": "python_version >= '3.6'", "version": "==4.0.0" }, "pytest-kind": { @@ -1061,12 +1079,23 @@ "sha256:35fd99e4f94e0374834b5893ee4a95e33ad5dc1c18080356f01fadf21224e830", "sha256:ae7a4c753fcbbf9e44a0cc587d5219a0b8b2b1e7bcc9cbe14234f745dd5db681" ], - "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==22.11.1" }, + "python-dotenv": { + "hashes": [ + "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba", + "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==1.0.0" + }, "pyyaml": { "hashes": [ + "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5", "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc", + "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df", "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741", "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206", "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27", @@ -1074,7 +1103,10 @@ "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62", "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98", "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696", + "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290", + "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9", "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d", + "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6", "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867", "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47", "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486", @@ -1082,9 +1114,12 @@ "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3", "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007", "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938", + "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0", "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c", "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735", "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d", + "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28", + "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4", "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba", "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8", "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5", @@ -1099,7 +1134,9 @@ "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43", "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859", "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673", + "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54", "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a", + "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b", "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab", "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa", "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c", @@ -1116,6 +1153,7 @@ "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==2.31.0" }, "requests-mock": { @@ -1123,16 +1161,15 @@ "sha256:0a2d38a117c08bb78939ec163522976ad59a6b7fdd82b709e23bb98004a44970", "sha256:8d72abe54546c1fc9696fa1516672f1031d72a55a1d66c85184f972a24ba0eba" ], - "index": "pypi", "version": "==1.9.3" }, "setuptools": { "hashes": [ - "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f", - "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235" + "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87", + "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a" ], - "markers": "python_version >= '3.7'", - "version": "==68.0.0" + "markers": "python_version >= '3.8'", + "version": "==68.2.2" }, "six": { "hashes": [ @@ -1144,19 +1181,19 @@ }, "urllib3": { "hashes": [ - "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f", - "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14" + "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07", + "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.16" + "markers": "python_version >= '3.6'", + "version": "==1.26.18" }, "virtualenv": { "hashes": [ - "sha256:95a6e9398b4967fbcb5fef2acec5efaf9aa4972049d9ae41f95e0972a683fd02", - "sha256:e5c3b4ce817b0b328af041506a2a299418c98747c4b1e68cb7527e74ced23efc" + "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b", + "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752" ], "markers": "python_version >= '3.7'", - "version": "==20.24.3" + "version": "==20.24.5" } } } diff --git a/README.md b/README.md index e2a70f1..865efe1 100644 --- a/README.md +++ b/README.md @@ -1,36 +1,44 @@ # Service Wizard 2 +[![codecov](https://codecov.io/gh/kbase/service_wizard2/graph/badge.svg?token=JxuP8XOFwU)](https://codecov.io/gh/kbase/service_wizard2) The service wizard manages the lifecycle of "dynamic services" in KBase. The previous service wizard talked directly to rancher1, this one talks directly to kubernetes. -Dynamic services are responsible for providing data and/or UI components for the KBase UI and Narrative. +Dynamic services are responsible for providing data and/or UI components for the KBase UI and Narrative. # Known issues -* Still does not allow you to update environmental variables for a service that was launched once, it requires a new deployment. + +* Still does not allow you to update environmental variables for a service that was launched once, it requires a new + deployment. * Starting up too many services causes the status endpoint to not respond. * Only supports one type of toleration for now. * Doesn't completely support multiple replicas for now. * Doesn't support volumes, only bind mounts -* Doesn't yet support forcing a dynamic service to land on a specific host (e.g. staticnarrative service, htmlfilsetservice) or define behavior for multiple replicas on specific hosts -* If the catalog admin is not valid, you get an authentication error, but its not clear that its the auth token from the service rather than from the user request - +* Doesn't yet support forcing a dynamic service to land on a specific host (e.g. staticnarrative service, + htmlfilsetservice) or define behavior for multiple replicas on specific hosts +* If the catalog admin is not valid, you get an authentication error, but its not clear that its the auth token from the + service rather than from the user request # Environment Variables -The following environment variables are used to configure the application: +The following environment variables are used to configure the application. +Ensure that all the required environment variables are properly set before running the application. See [.env](.env) file for example +## *Required Environment Variables* + ## Client URLs - `AUTH_SERVICE_URL`: Defines the URL of the authentication service used for user authentication and authorization. - `CATALOG_URL`: Sets the URL for the catalog service, which manages and provides access to application catalogs. -- `AUTH_LEGACY_URL`: Defines the URL of the legacy authentication service to be appended to the env inside the dynamic service - +- `AUTH_LEGACY_URL`: Defines the URL of the legacy authentication service to be appended to the env inside the dynamic + service ## Service Wizard URLs -- `EXTERNAL_SW_URL`: Specifies the URL for the external Service Wizard. +- `EXTERNAL_SW_URL`: Specifies the URL for the external Service Wizard. Also serves as identifier for Sentry - `EXTERNAL_DS_URL`: Sets the URL for the external Dynamic Services. -- `KBASE_SERVICES_ENDPOINT`: Specifies the endpoint URL for the KBase service, which provides various functionalities for the application. +- `KBASE_SERVICES_ENDPOINT`: Specifies the endpoint URL for the KBase service, which provides various functionalities + for the application. - `KBASE_ROOT_ENDPOINT`: Specifies the root endpoint URL for KBase. - `ROOT_PATH`: Specifies the root path for the application. @@ -43,18 +51,33 @@ See [.env](.env) file for example ## Kubernetes configs -- `KUBECONFIG`: Specifies the path to the kubeconfig file. This environment variable is required when `USE_INCLUSTER_CONFIG` is set to "false", else it will read from the default location. +- `KUBECONFIG`: Specifies the path to the kubeconfig file. This environment variable is required + when `USE_INCLUSTER_CONFIG` is set to "false", else it will read from the default location. - `NAMESPACE`: Specifies the namespace for the application where it operates. -- `USE_INCLUSTER_CONFIG`: A boolean flag indicating whether the application should use in-cluster configuration. Set it to "true" to use in-cluster configuration or "false" to use an external configuration file. +- `USE_INCLUSTER_CONFIG`: A boolean flag indicating whether the application should use in-cluster configuration. Set it + to "true" to use in-cluster configuration or "false" to use an external configuration file. +**NOTE THAT** setting the `KUBECONFIG` environment variable will have no effect when `USE_INCLUSTER_CONFIG` is set to " +true". The application will automatically use the in-cluster configuration provided by the underlying infrastructure. If +you want to use an external configuration file, ensure that `USE_INCLUSTER_CONFIG` is set to "false" and provide the +path to the configuration file using the `KUBECONFIG` environment variable. -**NOTE THAT** setting the `KUBECONFIG` environment variable will have no effect when `USE_INCLUSTER_CONFIG` is set to "true". The application will automatically use the in-cluster configuration provided by the underlying infrastructure. If you want to use an external configuration file, ensure that `USE_INCLUSTER_CONFIG` is set to "false" and provide the path to the configuration file using the `KUBECONFIG` environment variable. +**NOTE THAT** setting `NAMESPACE` also creates a toleration V1Toleration(effect="NoSchedule", key=namespace, operator="Exists") -Ensure that all the required environment variables are properly set before running the application. +## *Optional Environment Variables* +## Telemetry and Miscellaneous configs +- `SENTRY_DSN`: The DSN for the sentry instance to use for error reporting +- `METRICS_USERNAME` : The username for the /metrics endpoint which can be used by prometheus +- `METRICS_PASSWORD` : The password for the /metrics endpoint which can be used by prometheus + **NOTE THAT** the `/metrics` endpoint will not be available unless both the username and password are set. +- `DOTENV_FILE_LOCATION`: The location of the .env file to use for local development. Defaults to .env +- `LOG_LEVEL`: The log level to use for the application. Defaults to INFO # Code Review Request + +* Organization and error handling for authorization, files in random places from ripping out FASTAPI parts. * Organization and directory structure of APP * Organization and directory structure of TESTS * Organization and directory structure of TESTS (unit tests) @@ -65,115 +88,84 @@ Ensure that all the required environment variables are properly set before runni * Dependency system design (passing around request.app.state) * Caching * Async/await - +* # Local Development + This repo uses a pipenv to manage dependencies. To install pipenv, run `pip install pipenv` To install dependencies, run + ``` pipenv --python 3.11-service_wizard2 pipenv install --dev pipenv shell ``` + To start the server, run + ``` -uvicorn --host 0.0.0.0 --factory src.factory:create_app --reload --port 1234 -``` +PYTHONPATH=.:src uvicorn --host 0.0.0.0 --factory factory:create_app --reload --port 1234``` + To install pre-commit hook and test it + ``` pre-commit install pre-commit run --all-files ``` - - -Convenience scripts are provided in the [scripts](scripts) directory to setup the pipenv environment and install dependencies. +Convenience scripts are provided in the [scripts](scripts) directory to setup the pipenv environment and install +dependencies. In order to connect to a kubernetes cluster, you will need to have a kubeconfig file in your home directory. The kubeconfig file is typically located at `~/.kube/config`. -Read more about kubeconfig files [here](https://kubernetes.io/docs/concepts/configuration/organize-cluster-access-kubeconfig/). -Ensure that your context is set to the correct cluster and namespace and matches the environmental variables in the [env](test/.env) file. - +Read more about kubeconfig +files [here](https://kubernetes.io/docs/concepts/configuration/organize-cluster-access-kubeconfig/). +Ensure that your context is set to the correct cluster and namespace and matches the environmental variables in +the [env](test/.env) file. # PYCHARM -You can run the service in pycharm as well, but you will need to set the following parameters in the run configuration: -script path =`/Users/XXX/.local/share/virtualenvs/service_wizard2-vG0FwGFD/bin/uvicorn` -parameters = `--reload --port 5002 --host 0.0.0.0 --factory src.factory:create_app ` +You can run the service in pycharm as well, but you will need to set the following parameters in the run configuration: +parameters = `PYTHONPATH=.:src uvicorn --host 0.0.0.0 --factory factory:create_app --reload --port 1234` ## Usage OpenAPI documentation is provided at the `/docs` endpoint of the server (in KBase, this is at `/service/service_wizard2/docs`, for example [https://ci.kbase.us/services/service_wizard2/docs](https://ci.kbase.us/services/service_wizard2/docs)). +However, the RPC endpoints are not documented. See the [original service wizard spec](documentation/ServiceWizard_Artifacts/ServiceWizard.spec) for details on how to use the endpoint. + + ### Error codes -Error codes are listed in [errors.py](src/service/errors.py). +Errors are return as JSONRPC errors. ## Administration -To start the service Docker container: +* Ensure the approproiate kubernetes roles/rolebindings/ are in place for the service account + used by the service. +* Ensure that the namespace is created for both the Service Wizard and the Dynamic Services. +* Ensure that the environment is properly configured for the service. -* The collections listed in - [collection_and_field_names.py](src/common/storage/collection_and_field_names.py) must be - created in ArangoDB. The collections are not created automatically to allow service admins - to specify sharding to their liking. Indexes are created automatically, assuming the collections - exist. -* The environment variables listed in - [collections_config.toml.jinja](collections_config.toml.jinja) - must be provided to the Docker container, unless their default values are acceptable. - In particular, database access and credential information must be provided. ## File structure -* `/src/service` - service code -* `/src/loaders/[collection ID]` - loader code for collections, e.g. `/loaders/gtdb` -* `/src/common` - shared loader and service code -* `/src/common/storage` - data connection and access methods +* `/src/clients` - KBase and Kubernetes clients with caches +* `/src/configs` - the configuration for the app +* `/src/dependencies` - shared service code +* `/src/models` - models for the app returns, logic for calculating service status, other models +* `/src/routes` - the routes for the app +* `/src/rpc` - the RPC endpoints for the app and common code * `/test/src` - test code. Subdirectories should mirror the folder structure above, e.g. - `/test/src/service` contains service test code +* `/test/ServiceWizard_Artifacts` - the original Service Wizard related code ## Development +* Update the release notes in the [RELEASE_NOTES.md](RELEASE_NOTES.md) file. +* You can run the app via `docker-compose.yaml` +* You can update your credentials in your `kubeconfig` to deploy and launch the app in Rancher2 Desktop -### Adding code - -* In this alpha / prototype stage, we will be PRing (do not push directly) to `main`. In the - future, once we want to deploy beyond CI, we will add a `develop` branch. -* The PR creator merges the PR and deletes branches (after builds / tests / linters complete). -* To add new data products, see [Adding data products](/docs/adding_data_products.md) - -#### Timestamps - -* Timestamps visible in the API must be fully qualified ISO8601 timestamps in the format - `2023-01-29T21:41:48.867140+00:00`. -* Timestamps may be stored in the database as either the above format or as Unix epoch - milliseconds, depending on the use case. -* If timestamps are stored as epoch ms, they must be converted to the ISO8601 format prior to - returning them via the API. - -### Versioning - -* The code is versioned according to [Semantic Versioning](https://semver.org/). -* The version must be updated in - * `/src/common/version.py` - * `/RELEASE_NOTES.md` - * any test files that test the version - -### Code requirements for prototype code: - -* Any code committed must at least have a test file that imports it and runs a noop test so that - the code is shown with no coverage in the coverage statistics. This will make it clear what - code needs tests when we move beyond the prototype stage. -* Each module should have its own test file. Eventually these will be expanded into unit tests - (or integration tests in the case of app.py) -* Any code committed must have regular code and user documentation so that future devs - converting the code to production can understand it. -* Release notes are not strictly necessary while deploying to CI, but a concrete version (e.g. - no `-dev*` or `-prototype*` suffix) will be required outside of that environment. On a case by - case basis, add release notes and bump the prototype version (e.g. 0.1.0-prototype3 -> - 0.1.0-prototype4) for changes that should be documented. ### Running tests @@ -184,38 +176,3 @@ pipenv sync --dev # only the first time or when Pipfile.lock changes pipenv shell PYTHONPATH=. pytest test ``` - -## TODO - -* Logging ip properly (X-RealIP, X-Forwarded-For) - * Add request ID to logs and return in errors - * Compare log entries to SDK and see what we should keep - * Take a look at the jgi-kbase IDmapper service - -### Prior to declaring this a non-prototype - -* Coverage badge in Readme -* Run through all code, refactor to production quality -* Add tests where missing (which is a lot) and inspect current tests for completeness and quality - * E.g. don't assume existing tests are any good - * Async testing help - https://tonybaloney.github.io/posts/async-test-patterns-for-pytest-and-unittest.html -* Build & push tool images in GHA - * Consider using a base image for each tool with a "real" image that builds from the base image. - The "real" image should just copy the files into the image and set the entry point. This will - make GHA builds a lot faster - * Alternatively use docker's GHA cache feature - * Manual push only is probably fine, these images won't change that often -* JobRunner repo should be updated to push the callback server to a GHA KBase namespace -* Testing tool containers - * DO NOT import the tool specific scripts and / or run them directly in tests, as that will - require all their dependencies to be installed, creating dependency hell. - * Instead - * Test as a black box using `docker run` - * This won't work for gtdb_tk, probably. Automated testing for that is going to be - problematic. - * If necessary, add a `Dockerfile.test` dockerfile to build a test specific image and run - tests in there. - * Either mount a directory in which to save the coverage info or `docker cp` it when the - run is complete - * Figure out how to merge the various coverage files. diff --git a/codecov.yml b/codecov.yml index f9df2ef..7e7d9f6 100644 --- a/codecov.yml +++ b/codecov.yml @@ -1,2 +1,3 @@ ignore: - - "src/clients" + - "src/clients/baseclient.py" + - "src/clients/CatalogClient.py" diff --git a/docker-compose.yaml b/docker-compose.yaml index f59e33a..5338504 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,6 +1,7 @@ version: '3' # This docker-compose is for developer convenience, not for running in production. +# Be careful as this mounts in your kubeconfig file into here, giving this application access to your k8 connections services: @@ -14,4 +15,6 @@ services: ports: - "5001:5000" env_file: - - .env + - .env + volumes: + - ~/.kube/config:/root/.kube/config diff --git a/k8/README.md b/k8/README.md index c1f2621..bb0be0d 100644 --- a/k8/README.md +++ b/k8/README.md @@ -1,4 +1,4 @@ -# K8 Deployment Files -* You can use these to deploy into rancher desktop -* You will have to volume mount in kubconfig into the container, this is not yet added -* +# K8 Deployment Files For Local Testing and Development + +* You can use these to deploy into rancher desktop or podman desktop +* You will have to volume mount in kubconfig in the service wizard container or use an incluster_config diff --git a/k8/deployment.yaml b/k8/deployment.yaml index 1b17f9a..af82df2 100644 --- a/k8/deployment.yaml +++ b/k8/deployment.yaml @@ -23,14 +23,14 @@ spec: ports: - containerPort: 5000 readinessProbe: - failureThreshold: 3 - httpGet: - path: /status/ - port: 5000 - scheme: HTTP - periodSeconds: 10 - successThreshold: 1 - timeoutSeconds: 1 + failureThreshold: 3 + httpGet: + path: /status/ + port: 5000 + scheme: HTTP + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 securityContext: allowPrivilegeEscalation: false capabilities: @@ -44,5 +44,5 @@ spec: drop: - ALL envFrom: - - configMapRef: - name: service-wizard2-env + - configMapRef: + name: service-wizard2-env diff --git a/k8/env-cfg.yaml b/k8/env-cfg.yaml index d79de23..8f2d059 100644 --- a/k8/env-cfg.yaml +++ b/k8/env-cfg.yaml @@ -4,12 +4,12 @@ metadata: name: service-wizard2-env namespace: staging data: - NAMESPACE: "ci-dynamic-services" - AUTH_SERVICE_URL: "https://ci.kbase.us/services/auth/api/legacy/KBase/Sessions/Login" - KBASE_ENDPOINT: "https://ci.kbase.us/services" - CATALOG_URL: "https://ci.kbase.us/services/catalog/" - CATALOG_ADMIN_TOKEN: "62IYPZGS7O773DBLZZCSE542BP4C2E7G" - KUBECONFIG: "~/.kube/config" - ADMIN_ROLE_1: "KBASE_ADMIN" - ADMIN_ROLE_2: "CATALOG_ADMIN" - ADMIN_ROLE_3: "SERVICE_WIZARD_ADMIN" + NAMESPACE: "ci-dynamic-services" + AUTH_SERVICE_URL: "https://ci.kbase.us/services/auth/api/legacy/KBase/Sessions/Login" + KBASE_ENDPOINT: "https://ci.kbase.us/services" + CATALOG_URL: "https://ci.kbase.us/services/catalog/" + CATALOG_ADMIN_TOKEN: "62IYPZGS7O773DBLZZCSE542BP4C2E7G" + KUBECONFIG: "~/.kube/config" + ADMIN_ROLE_1: "KBASE_ADMIN" + ADMIN_ROLE_2: "CATALOG_ADMIN" + ADMIN_ROLE_3: "SERVICE_WIZARD_ADMIN" diff --git a/k8/roles.yaml b/k8/roles.yaml index a872e83..9827645 100644 --- a/k8/roles.yaml +++ b/k8/roles.yaml @@ -4,12 +4,12 @@ metadata: name: resource-manager-role namespace: staging-dynamic-services rules: - - apiGroups: [""] # Default core API group - resources: ["pods", "pods/log", "deployments", "services", "ingresses"] - verbs: ["get", "list", "watch", "create", "update", "patch", "delete"] - - apiGroups: ["apps"] - resources: ["deployments"] - verbs: ["get", "list", "watch", "create", "update", "patch", "delete"] - - apiGroups: ["networking.k8s.io"] - resources: ["ingresses"] - verbs: ["get", "list", "watch", "create", "update", "patch", "delete"] + - apiGroups: [ "" ] + resources: [ "pods", "pods/log", "deployments", "services", "ingresses" ] + verbs: [ "get", "list", "watch", "create", "update", "patch", "delete" ] + - apiGroups: [ "apps" ] + resources: [ "deployments" ] + verbs: [ "get", "list", "watch", "create", "update", "patch", "delete" ] + - apiGroups: [ "networking.k8s.io" ] + resources: [ "ingresses" ] + verbs: [ "get", "list", "watch", "create", "update", "patch", "delete" ] diff --git a/k8/service.yaml b/k8/service.yaml index 46d19e3..3beeb66 100644 --- a/k8/service.yaml +++ b/k8/service.yaml @@ -1,4 +1,3 @@ - --- apiVersion: v1 kind: Service @@ -8,9 +7,9 @@ metadata: app.kubernetes.io/name: service-wizard2 spec: ports: - - port: 5000 - targetPort: 5000 + - port: 5000 + targetPort: 5000 selector: app.kubernetes.io/name: service-wizard2 status: - loadBalancer: {} + loadBalancer: { } diff --git a/scripts/bootstrap_test_environment.sh b/scripts/bootstrap_test_environment.sh deleted file mode 100644 index 9030847..0000000 --- a/scripts/bootstrap_test_environment.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash - -# Check if a Pipenv environment already exists -if pipenv --venv &>/dev/null; then - echo "Pipenv environment already exists. No need to bootstrap" -else - # If Pipenv environment doesn't exist, create a new one - pipenv --python 3.11-service_wizard2 - echo "Created new Pipenv environment." - - # Install dependencies - pipenv install --dev - pipenv sync - echo "Installed dependencies." -fi - -# Activate the Pipenv environment -pipenv shell - -# Sync the dependencies -pipenv sync diff --git a/scripts/entrypoint-dev.sh b/scripts/entrypoint-dev.sh deleted file mode 100644 index 80e28d1..0000000 --- a/scripts/entrypoint-dev.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -# FastAPI recommends running a single process service per docker container instance as below, -# and scaling via adding more containers. If we need to run multiple processes, use guvicorn as -# a process manger as described in the FastAPI docs -exec uvicorn --host 0.0.0.0 --port 5000 --factory src.factory:create_app --reload diff --git a/scripts/entrypoint.sh b/scripts/entrypoint.sh index 3eb1259..eab069e 100755 --- a/scripts/entrypoint.sh +++ b/scripts/entrypoint.sh @@ -5,4 +5,4 @@ # a process manger as described in the FastAPI docs -exec uvicorn --host 0.0.0.0 --port 5000 --factory src.factory:create_app +PYTHONPATH=.:src exec uvicorn --host 0.0.0.0 --port 5000 --factory factory:create_app diff --git a/scripts/run_tests.sh b/scripts/run_tests.sh new file mode 100644 index 0000000..1057e94 --- /dev/null +++ b/scripts/run_tests.sh @@ -0,0 +1,4 @@ +#!/bin/bash +# Helper script to run tests + +PYTHONPATH=.:src pipenv run pytest --cov=src --cov-report term-missing --cov-fail-under=99 --cov-report=xml:coverage.xml -W ignore::DeprecationWarning test diff --git a/scripts/update_dependencies.sh b/scripts/update_dependencies.sh index 34c24a1..4ae5dd4 100644 --- a/scripts/update_dependencies.sh +++ b/scripts/update_dependencies.sh @@ -1,8 +1,19 @@ #!/bin/bash + +if [ -n "$GITHUB_ACTION" ]; then + echo "This step is intended to be run from Github Actions" + pip install pipenv + rm Pipfile.lock + pipenv install --dev + pipenv sync --system --dev + exit 0 +fi + + if [[ -n $VIRTUAL_ENV ]]; then echo "Pipenv shell is activated and ready for updates" - rm Pipfile.lock + rm Pipfile.lock pipenv install --dev pipenv sync echo "Updated dependencies for: `which python`" diff --git a/src/clients/CachedAuthClient.py b/src/clients/CachedAuthClient.py index 88d8d31..123d87a 100644 --- a/src/clients/CachedAuthClient.py +++ b/src/clients/CachedAuthClient.py @@ -4,7 +4,7 @@ from cacheout import LRUCache from fastapi import HTTPException -from src.configs.settings import Settings, get_settings +from configs.settings import Settings, get_settings class UserAuthRoles: @@ -23,14 +23,14 @@ def is_admin_or_owner(self, owners: list[str]) -> bool: class CachedAuthClient: - valid_tokens = LRUCache(ttl=10) - - def __init__(self, settings: Settings): + def __init__(self, settings: Settings | None = None, valid_tokens_cache: LRUCache | None = None): """ Initialize the CachedAuthClient :param settings: The settings to use, or use the default settings if not provided + :param valid_tokens_cache: The cache to use for valid tokens, or use a new LRUCache if not provided """ - self.settings = get_settings() if not settings else settings + self.settings = get_settings() if settings is None else settings + self.valid_tokens = LRUCache(ttl=10) if valid_tokens_cache is None else valid_tokens_cache self.auth_url = self.settings.auth_service_url self.admin_roles = self.settings.admin_roles @@ -74,10 +74,10 @@ def _validate_token(self, token: str) -> UserAuthRoles: :raises: HTTPException if the token is invalid, expired, or the auth service is down or the auth URL is incorrect """ # TODO Try catch validate errors, auth service URL is bad, etc - username, roles = self.validate_and_get_username_roles(token) + username, roles = self.validate_and_get_username_auth_roles(token) return UserAuthRoles(username=username, user_roles=roles, admin_roles=self.admin_roles, token=token) - def validate_and_get_username_roles(self, token: str) -> tuple[str, list[str]]: + def validate_and_get_username_auth_roles(self, token: str) -> tuple[str, list[str]]: """ This calls out the auth service to validate the token and get the username and auth roles :param token: The token to validate diff --git a/src/clients/CachedCatalogClient.py b/src/clients/CachedCatalogClient.py index 5782a4f..b02c735 100644 --- a/src/clients/CachedCatalogClient.py +++ b/src/clients/CachedCatalogClient.py @@ -2,25 +2,30 @@ from cacheout import LRUCache -from src.clients.CatalogClient import Catalog -from src.configs.settings import Settings, get_settings +from clients.CatalogClient import Catalog +from configs.settings import Settings, get_settings -def get_module_name_hash(module_name: str = None): +def get_module_name_hash(module_name: str) -> str: """ Calculate the MD5 hash of a module name and return the first 20 characters of the hexadecimal digest. This is not a valid DNS name as it doesn't guarantee to start or end with an alphanumeric character. - This doesn't actually get used anywhere, its just here because it was like this in SW1 + This doesn't actually get used anywhere, it's just here because it was like this in SW1 :param module_name: The name of the module. :return: The MD5 hash of the module name. """ return hashlib.md5(module_name.encode()).hexdigest()[:20] -def _get_key(module_name: str, version: str = "release") -> str: +def _clean_version(version: str | int | None) -> str: if version is None: version = "release" - return module_name + "-" + version + + return str(version) + + +def _get_key(module_name: str, version: str = "release") -> str: + return str(module_name) + "-" + str(_clean_version(version)) class CachedCatalogClient: @@ -31,7 +36,7 @@ class CachedCatalogClient: cc: Catalog - def __init__(self, settings: Settings, catalog: Catalog = None): + def __init__(self, settings: Settings, catalog: Catalog | None = None): settings = get_settings() if not settings else settings self.cc = Catalog(url=settings.catalog_url, token=settings.catalog_admin_token) if not catalog else catalog @@ -50,7 +55,7 @@ def get_combined_module_info(self, module_name: str, version: str = "release") - key = _get_key(module_name, version) combined_module_info = self.module_info_cache.get(key=key, default=None) if not combined_module_info: - combined_module_info = self.cc.get_module_version({"module_name": module_name, "version": version}) + combined_module_info = self.cc.get_module_version({"module_name": module_name, "version": _clean_version(version)}) combined_module_info["owners"] = self.cc.get_module_info({"module_name": module_name})["owners"] self.module_info_cache.set(key=key, value=combined_module_info) if combined_module_info.get("dynamic_service") != 1: @@ -68,14 +73,14 @@ def list_service_volume_mounts(self, module_name: str, version: str = "release") key = _get_key(module_name, version) mounts = self.module_volume_mount_cache.get(key=key, default=None) if not mounts: - mounts_list = self.cc.list_volume_mounts(filter={"module_name": module_name, "version": version, "client_group": "service", "function_name": "service"}) + mounts_list = self.cc.list_volume_mounts(filter={"module_name": module_name, "version": _clean_version(version), "client_group": "service", "function_name": "service"}) mounts = [] if len(mounts_list) > 0: mounts = mounts_list[0]["volume_mounts"] self.module_volume_mount_cache.set(key=key, value=mounts) return mounts - def get_secure_params(self, module_name: str, version: str = "release"): + def get_secure_params(self, module_name: str, version: str = "release") -> list: """ Retrieve the secure config parameters for a module from the catalog. :param module_name: The name of the module. @@ -85,11 +90,11 @@ def get_secure_params(self, module_name: str, version: str = "release"): key = _get_key(module_name, version) secure_config_params = self.secure_config_cache.get(key=key, default=None) if not secure_config_params: - secure_config_params = self.cc.get_secure_config_params({"module_name": module_name, "version": version}) + secure_config_params = self.cc.get_secure_config_params({"module_name": module_name, "version": _clean_version(version)}) self.secure_config_cache.set(key=key, value=secure_config_params) return secure_config_params - def get_hash_to_name_mappings(self): + def get_hash_to_name_mappings(self) -> dict[str, dict]: """ Retrieve the hashes of dynamic service modules from the catalog. Connects to the catalog using the provided request, retrieves the list of basic module diff --git a/src/clients/KubernetesClients.py b/src/clients/KubernetesClients.py index 9648361..12c52a5 100644 --- a/src/clients/KubernetesClients.py +++ b/src/clients/KubernetesClients.py @@ -2,10 +2,11 @@ from typing import Optional from cacheout import LRUCache +from fastapi.requests import Request from kubernetes import config -from kubernetes.client import CoreV1Api, AppsV1Api, NetworkingV1Api +from kubernetes.client import CoreV1Api, AppsV1Api, NetworkingV1Api, V1Deployment -from src.configs.settings import Settings +from configs.settings import Settings class K8sClients: @@ -38,9 +39,16 @@ def __init__( ValueError: If more than one Kubernetes client is provided or if none are provided. """ - if sum(x is not None for x in [k8s_core_client, k8s_app_client, k8s_network_client]) > 1: + clients_and_types = [(k8s_core_client, CoreV1Api), (k8s_app_client, AppsV1Api), (k8s_network_client, NetworkingV1Api)] + + num_clients_provided = sum(x is not None for x in [k8s_core_client, k8s_app_client, k8s_network_client]) + if num_clients_provided not in [0, 3]: raise ValueError("All k8s_clients should either be all None or all provided") + for client, expected_type in clients_and_types: + if client is not None and not isinstance(client, expected_type): + raise TypeError(f"Expected client of type {expected_type}, but got {type(client)}") + if k8s_core_client is None: if settings.use_incluster_config is True: # Use a service account token if running in a k8s cluster @@ -59,3 +67,32 @@ def __init__( self.network_client = k8s_network_client self.service_status_cache = LRUCache(ttl=10) self.all_service_status_cache = LRUCache(ttl=10) + + +def get_k8s_core_client(request: Request) -> CoreV1Api: + return request.app.state.k8s_clients.core_client + + +def get_k8s_app_client(request: Request) -> AppsV1Api: + return request.app.state.k8s_clients.app_client + + +def get_k8s_networking_client(request: Request) -> NetworkingV1Api: + return request.app.state.k8s_clients.network_client + + +def get_k8s_service_status_cache(request: Request) -> LRUCache: + return request.app.state.k8s_clients.service_status_cache + + +def get_k8s_all_service_status_cache(request: Request) -> LRUCache: + return request.app.state.k8s_clients.all_service_status_cache + + +def check_service_status_cache(request: Request, label_selector_text: str) -> V1Deployment: + cache = get_k8s_service_status_cache(request) + return cache.get(label_selector_text, None) + + +def populate_service_status_cache(request: Request, label_selector_text: str, data: V1Deployment): + get_k8s_service_status_cache(request).set(label_selector_text, data) diff --git a/src/clients/README.md b/src/clients/README.md new file mode 100644 index 0000000..77be545 --- /dev/null +++ b/src/clients/README.md @@ -0,0 +1,2 @@ +# CLIENTS +* baseclient and CatalogClient are autogenerated kb-sdk clients diff --git a/src/clients/ServiceWizardClient.py b/src/clients/ServiceWizardClient.py deleted file mode 100644 index ab5e5a8..0000000 --- a/src/clients/ServiceWizardClient.py +++ /dev/null @@ -1,261 +0,0 @@ -# -*- coding: utf-8 -*- -############################################################ -# -# Autogenerated by the KBase type compiler - -# any changes made here will be overwritten -# -############################################################ - -from __future__ import print_function - -# the following is a hack to get the baseclient to import whether we're in a -# package or not. This makes pep8 unhappy hence the annotations. -try: - # baseclient and this client are in a package - from .baseclient import BaseClient as _BaseClient # @UnusedImport -except ImportError: - # no they aren't - from baseclient import BaseClient as _BaseClient # @Reimport - - -class ServiceWizard(object): - def __init__( - self, - url=None, - timeout=30 * 60, - user_id=None, - password=None, - token=None, - ignore_authrc=False, - trust_all_ssl_certificates=False, - auth_svc="https://ci.kbase.us/services/auth/api/legacy/KBase/Sessions/Login", - ): - if url is None: - raise ValueError("A url is required") - self._service_ver = None - self._client = _BaseClient( - url, - timeout=timeout, - user_id=user_id, - password=password, - token=token, - ignore_authrc=ignore_authrc, - trust_all_ssl_certificates=trust_all_ssl_certificates, - auth_svc=auth_svc, - ) - - def version(self, context=None): - """ - Get the version of the deployed service wizard endpoint. - :returns: instance of String - """ - return self._client.call_method("ServiceWizard.version", [], self._service_ver, context) - - def start(self, service, context=None): - """ - Try to start the specified service; this will generate an error if the - specified service cannot be started. If the startup did not give any - errors, then the status of the running service is provided. - :param service: instance of type "Service" (module_name - the name of - the service module, case-insensitive version - specify the - service version, which can be either: (1) full git commit hash of - the module version (2) semantic version or semantic version - specification Note: semantic version lookup will only work for - released versions of the module. (3) release tag, which is one of: - dev | beta | release This information is always fetched from the - Catalog, so for more details on specifying the version, see the - Catalog documentation for the get_module_version method.) -> - structure: parameter "module_name" of String, parameter "version" - of String - :returns: instance of type "ServiceStatus" (module_name - name of - the service module version - semantic version number of - the service module git_commit_hash - git commit hash of the - service module release_tags - list of release tags currently - for this service module (dev/beta/release) url - the - url of the service up - 1 if the service is up, 0 - otherwise status - status of the service as reported by - rancher health - health of the service as reported by - Rancher TODO: add something to return: string - last_request_timestamp;) -> structure: parameter "module_name" of - String, parameter "version" of String, parameter "git_commit_hash" - of String, parameter "release_tags" of list of String, parameter - "hash" of String, parameter "url" of String, parameter "up" of - type "boolean", parameter "status" of String, parameter "health" - of String - """ - return self._client.call_method("ServiceWizard.start", [service], self._service_ver, context) - - def stop(self, service, context=None): - """ - Try to stop the specified service; this will generate an error if the - specified service cannot be stopped. If the stop did not give any - errors, then the status of the stopped service is provided. - :param service: instance of type "Service" (module_name - the name of - the service module, case-insensitive version - specify the - service version, which can be either: (1) full git commit hash of - the module version (2) semantic version or semantic version - specification Note: semantic version lookup will only work for - released versions of the module. (3) release tag, which is one of: - dev | beta | release This information is always fetched from the - Catalog, so for more details on specifying the version, see the - Catalog documentation for the get_module_version method.) -> - structure: parameter "module_name" of String, parameter "version" - of String - :returns: instance of type "ServiceStatus" (module_name - name of - the service module version - semantic version number of - the service module git_commit_hash - git commit hash of the - service module release_tags - list of release tags currently - for this service module (dev/beta/release) url - the - url of the service up - 1 if the service is up, 0 - otherwise status - status of the service as reported by - rancher health - health of the service as reported by - Rancher TODO: add something to return: string - last_request_timestamp;) -> structure: parameter "module_name" of - String, parameter "version" of String, parameter "git_commit_hash" - of String, parameter "release_tags" of list of String, parameter - "hash" of String, parameter "url" of String, parameter "up" of - type "boolean", parameter "status" of String, parameter "health" - of String - """ - return self._client.call_method("ServiceWizard.stop", [service], self._service_ver, context) - - def list_service_status(self, params, context=None): - """ - :param params: instance of type "ListServiceStatusParams" (not yet - implemented funcdef pause(Service service) returns (ServiceStatus - status);) -> structure: parameter "is_up" of type "boolean", - parameter "module_names" of list of String - :returns: instance of list of type "ServiceStatus" (module_name - - name of the service module version - semantic version - number of the service module git_commit_hash - git commit hash of - the service module release_tags - list of release tags - currently for this service module (dev/beta/release) url - - the url of the service up - 1 if the service is up, - 0 otherwise status - status of the service as reported by - rancher health - health of the service as reported by - Rancher TODO: add something to return: string - last_request_timestamp;) -> structure: parameter "module_name" of - String, parameter "version" of String, parameter "git_commit_hash" - of String, parameter "release_tags" of list of String, parameter - "hash" of String, parameter "url" of String, parameter "up" of - type "boolean", parameter "status" of String, parameter "health" - of String - """ - return self._client.call_method("ServiceWizard.list_service_status", [params], self._service_ver, context) - - def get_service_status(self, service, context=None): - """ - For a given service, check on the status. If the service is down or - not running, this function will attempt to start or restart the - service once, then return the status. - This function will throw an error if the specified service cannot be - found or encountered errors on startup. - :param service: instance of type "Service" (module_name - the name of - the service module, case-insensitive version - specify the - service version, which can be either: (1) full git commit hash of - the module version (2) semantic version or semantic version - specification Note: semantic version lookup will only work for - released versions of the module. (3) release tag, which is one of: - dev | beta | release This information is always fetched from the - Catalog, so for more details on specifying the version, see the - Catalog documentation for the get_module_version method.) -> - structure: parameter "module_name" of String, parameter "version" - of String - :returns: instance of type "ServiceStatus" (module_name - name of - the service module version - semantic version number of - the service module git_commit_hash - git commit hash of the - service module release_tags - list of release tags currently - for this service module (dev/beta/release) url - the - url of the service up - 1 if the service is up, 0 - otherwise status - status of the service as reported by - rancher health - health of the service as reported by - Rancher TODO: add something to return: string - last_request_timestamp;) -> structure: parameter "module_name" of - String, parameter "version" of String, parameter "git_commit_hash" - of String, parameter "release_tags" of list of String, parameter - "hash" of String, parameter "url" of String, parameter "up" of - type "boolean", parameter "status" of String, parameter "health" - of String - """ - return self._client.call_method("ServiceWizard.get_service_status", [service], self._service_ver, context) - - def get_service_status_without_restart(self, service, context=None): - """ - :param service: instance of type "Service" (module_name - the name of - the service module, case-insensitive version - specify the - service version, which can be either: (1) full git commit hash of - the module version (2) semantic version or semantic version - specification Note: semantic version lookup will only work for - released versions of the module. (3) release tag, which is one of: - dev | beta | release This information is always fetched from the - Catalog, so for more details on specifying the version, see the - Catalog documentation for the get_module_version method.) -> - structure: parameter "module_name" of String, parameter "version" - of String - :returns: instance of type "ServiceStatus" (module_name - name of - the service module version - semantic version number of - the service module git_commit_hash - git commit hash of the - service module release_tags - list of release tags currently - for this service module (dev/beta/release) url - the - url of the service up - 1 if the service is up, 0 - otherwise status - status of the service as reported by - rancher health - health of the service as reported by - Rancher TODO: add something to return: string - last_request_timestamp;) -> structure: parameter "module_name" of - String, parameter "version" of String, parameter "git_commit_hash" - of String, parameter "release_tags" of list of String, parameter - "hash" of String, parameter "url" of String, parameter "up" of - type "boolean", parameter "status" of String, parameter "health" - of String - """ - return self._client.call_method("ServiceWizard.get_service_status_without_restart", [service], self._service_ver, context) - - def get_service_log(self, params, context=None): - """ - :param params: instance of type "GetServiceLogParams" (optional - instance_id to get logs for a specific instance. Otherwise logs - from all instances are returned, TODO: add line number - constraints.) -> structure: parameter "service" of type "Service" - (module_name - the name of the service module, case-insensitive - version - specify the service version, which can be either: - (1) full git commit hash of the module version (2) semantic - version or semantic version specification Note: semantic version - lookup will only work for released versions of the module. (3) - release tag, which is one of: dev | beta | release This - information is always fetched from the Catalog, so for more - details on specifying the version, see the Catalog documentation - for the get_module_version method.) -> structure: parameter - "module_name" of String, parameter "version" of String, parameter - "instance_id" of String - :returns: instance of list of type "ServiceLog" -> structure: - parameter "instance_id" of String, parameter "log" of list of - String - """ - return self._client.call_method("ServiceWizard.get_service_log", [params], self._service_ver, context) - - def get_service_log_web_socket(self, params, context=None): - """ - returns connection info for a websocket connection to get realtime service logs - :param params: instance of type "GetServiceLogParams" (optional - instance_id to get logs for a specific instance. Otherwise logs - from all instances are returned, TODO: add line number - constraints.) -> structure: parameter "service" of type "Service" - (module_name - the name of the service module, case-insensitive - version - specify the service version, which can be either: - (1) full git commit hash of the module version (2) semantic - version or semantic version specification Note: semantic version - lookup will only work for released versions of the module. (3) - release tag, which is one of: dev | beta | release This - information is always fetched from the Catalog, so for more - details on specifying the version, see the Catalog documentation - for the get_module_version method.) -> structure: parameter - "module_name" of String, parameter "version" of String, parameter - "instance_id" of String - :returns: instance of list of type "ServiceLogWebSocket" -> - structure: parameter "instance_id" of String, parameter - "socket_url" of String - """ - return self._client.call_method("ServiceWizard.get_service_log_web_socket", [params], self._service_ver, context) - - def status(self, context=None): - return self._client.call_method("ServiceWizard.status", [], self._service_ver, context) diff --git a/test/rpc_tests/test_rpc.py b/src/clients/__init__.py similarity index 100% rename from test/rpc_tests/test_rpc.py rename to src/clients/__init__.py diff --git a/src/configs/__init__.py b/src/configs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/configs/settings.py b/src/configs/settings.py index 118aabf..6d02cf9 100644 --- a/src/configs/settings.py +++ b/src/configs/settings.py @@ -39,22 +39,22 @@ class Settings: def get_settings() -> Settings: """ Get the settings for the service wizard. These are read from environment variables and then cached. + All variables should be strings. To read more about the variables, see the README.md file. :return: A Settings object """ required_variables = [ - "NAMESPACE", - "AUTH_SERVICE_URL", "AUTH_LEGACY_URL", - "CATALOG_URL", + "AUTH_SERVICE_URL", "CATALOG_ADMIN_TOKEN", - "EXTERNAL_SW_URL", + "CATALOG_URL", "EXTERNAL_DS_URL", - "ROOT_PATH", + "EXTERNAL_SW_URL", "KBASE_ROOT_ENDPOINT", "KBASE_SERVICES_ENDPOINT", + "NAMESPACE", + "ROOT_PATH", ] - # Treat all variables as strings for var in required_variables: value = os.environ.get(var) if not value: @@ -70,13 +70,9 @@ def get_settings() -> Settings: if role ] - # At least one required admin role must be set if len(admin_roles) == 0: raise EnvironmentVariableError("At least one admin role (KBASE_ADMIN_ROLE, CATALOG_ADMIN_ROLE, or SERVICE_WIZARD_ADMIN_ROLE) must be set in the .env file") - # USE_INCLUSTER_CONFIG is a boolean that takes precedence over KUBECONFIG - # USE_INCLUSTER_CONFIG works when running in a k8s cluster - # KUBECONFIG works when running locally and is good for local development if "KUBECONFIG" not in os.environ and "USE_INCLUSTER_CONFIG" not in os.environ: raise EnvironmentVariableError("At least one of the environment variables 'KUBECONFIG' or 'USE_INCLUSTER_CONFIG' must be set") @@ -95,5 +91,5 @@ def get_settings() -> Settings: namespace=os.environ.get("NAMESPACE"), root_path=os.environ.get("ROOT_PATH"), use_incluster_config=os.environ.get("USE_INCLUSTER_CONFIG", "").lower() == "true", - vcs_ref=os.environ.get("GIT_COMMIT_HASH"), + vcs_ref=os.environ.get("GIT_COMMIT_HASH", "unknown"), ) diff --git a/src/dependencies/__init__.py b/src/dependencies/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/dependencies/k8_wrapper.py b/src/dependencies/k8_wrapper.py index df544f0..ac354ee 100644 --- a/src/dependencies/k8_wrapper.py +++ b/src/dependencies/k8_wrapper.py @@ -1,9 +1,7 @@ -import logging import re import time from typing import Optional, List -from cacheout import LRUCache from fastapi import Request from kubernetes import client from kubernetes.client import ( @@ -14,52 +12,27 @@ V1IngressSpec, V1IngressRule, ApiException, - CoreV1Api, - AppsV1Api, - NetworkingV1Api, V1HTTPIngressPath, V1IngressBackend, - V1Deployment, V1HTTPIngressRuleValue, V1Toleration, ) -from src.configs.settings import get_settings - - -def get_k8s_core_client(request: Request) -> CoreV1Api: - return request.app.state.k8s_clients.core_client - - -def get_k8s_app_client(request: Request) -> AppsV1Api: - return request.app.state.k8s_clients.app_client - - -def get_k8s_networking_client(request: Request) -> NetworkingV1Api: - return request.app.state.k8s_clients.network_client - - -def _get_k8s_service_status_cache(request: Request) -> LRUCache: - return request.app.state.k8s_clients.service_status_cache - - -def _get_k8s_all_service_status_cache(request: Request) -> LRUCache: - return request.app.state.k8s_clients.all_service_status_cache - - -def check_service_status_cache(request: Request, label_selector_text) -> V1Deployment: - cache = _get_k8s_service_status_cache(request) - return cache.get(label_selector_text, None) - - -def populate_service_status_cache(request: Request, label_selector_text, data: list): - _get_k8s_service_status_cache(request).set(label_selector_text, data) +from clients.KubernetesClients import ( + get_k8s_core_client, + get_k8s_app_client, + get_k8s_networking_client, + get_k8s_all_service_status_cache, + check_service_status_cache, + populate_service_status_cache, +) +from configs.settings import get_settings def get_pods_in_namespace( k8s_client: client.CoreV1Api, - field_selector=None, - label_selector="dynamic-service=true", + field_selector: str | None = None, + label_selector: str = "dynamic-service=true", ) -> client.V1PodList: """ Retrieve a list of pods in a specific namespace based on the provided field and label selectors. @@ -73,52 +46,52 @@ def get_pods_in_namespace( return pod_list -def v1_volume_mount_factory(mounts): +def v1_volume_mount_factory(mounts: List[str]) -> tuple[list[client.V1Volume], list[client.V1VolumeMount]]: volumes = [] volume_mounts = [] if mounts: for i, mount in enumerate(mounts): + if not mount: + raise ValueError(f"Empty mount provided at index {i}") + mount_parts = mount.split(":") + + # Check that mount string is split into 3 parts if len(mount_parts) != 3: - logging.error(f"Invalid mount format: {mount}") + raise ValueError(f"Invalid mount format: {mount}. Expected format: host_path:mount_path:ro/rw") + + # Ensure third part is either "ro" or "rw" + if mount_parts[2] not in ["ro", "rw"]: + raise ValueError(f"Invalid permission in mount: {mount}. Expected 'ro' or 'rw' but got {mount_parts[2]}") + volumes.append(client.V1Volume(name=f"volume-{i}", host_path=client.V1HostPathVolumeSource(path=mount_parts[0]))) # This is your host path volume_mounts.append(client.V1VolumeMount(name=f"volume-{i}", mount_path=mount_parts[1], read_only=bool(mount_parts[2] == "ro"))) # This is your container path return volumes, volume_mounts -def _sanitize_deployment_name(module_name, module_git_commit_hash): +def sanitize_deployment_name(module_name: str, module_git_commit_hash: str) -> tuple[str, str]: """ - Create a deployment name based on the module name and git commit hash. But adhere to kubernetes api naming rules and be a valid DNS label - :param module_name: - :param module_git_commit_hash: - :return: + Create a deployment name based on the module name and git commit hash. + Adhere to Kubernetes API naming rules and create valid DNS labels. + :param module_name: Name of the module + :param module_git_commit_hash: Git commit hash of the module + :return: Deployment name and service name """ - - sanitized_module_name = re.sub(r"[^a-zA-Z0-9]", "-", module_name) short_git_sha = module_git_commit_hash[:7] - + # 2 characters for 'd-', 7 characters for short_git_sha, 2 characters for '-d', and 1 character for the middle dash + reserved_length = len("d-") + len(short_git_sha) + len("-d") + 1 # +1 for the middle dash + available_length = 63 - reserved_length + sanitized_module_name = re.sub(r"[^a-zA-Z0-9-]", "-", module_name)[:available_length] deployment_name = f"d-{sanitized_module_name}-{short_git_sha}-d".lower() service_name = f"s-{sanitized_module_name}-{short_git_sha}-s".lower() - - # If the deployment name is too long, shorten it - if len(deployment_name) > 63: - excess_length = len(deployment_name) - 63 - deployment_name = f"d-{sanitized_module_name[:-excess_length]}-{short_git_sha}-d" - service_name = f"s-{sanitized_module_name[:-excess_length]}-{short_git_sha}-s" - return deployment_name, service_name - # TODO: Add a test for this function - # TODO: add documentation about maximum length of deployment name being 63 characters, - # Test the function with a very long module name and a git commit hash - # sanitize_deployment_name("My_Module_Name"*10, "7f6d03cf556b2a1e610fd70b68924a2f6700ae44") - -def create_clusterip_service(request, module_name, module_git_commit_hash, labels) -> client.V1Service: +def create_clusterip_service(request: Request, module_name: str, module_git_commit_hash: str, labels: dict[str, str]) -> client.V1Service: core_v1_api = get_k8s_core_client(request) - deployment_name, service_name = _sanitize_deployment_name(module_name, module_git_commit_hash) + deployment_name, service_name = sanitize_deployment_name(module_name, module_git_commit_hash) # Define the service service = V1Service( @@ -137,7 +110,7 @@ def create_clusterip_service(request, module_name, module_git_commit_hash, label return core_v1_api.create_namespaced_service(namespace=get_settings().namespace, body=service) -def _ensure_ingress_exists(request): +def _ensure_ingress_exists(request: Request) -> V1Ingress: # This ensures that the main service wizard ingress exists, and if it doesn't, creates it. # This should only ever be called once, or if in case someone deletes the ingress for it settings = request.app.state.settings @@ -157,57 +130,52 @@ def _ensure_ingress_exists(request): try: return networking_v1_api.read_namespaced_ingress(name="dynamic-services", namespace=settings.namespace) except ApiException as e: - if e.status == 404: + if e.status == 404: # Ingress Not Found return networking_v1_api.create_namespaced_ingress(namespace=settings.namespace, body=ingress) - else: - raise + raise -def _path_exists_in_ingress(ingress, path): +def path_exists_in_ingress(ingress: V1Ingress, path: str) -> bool: """Check if a path already exists in an ingress with one rule only""" - if ingress.spec.rules[0].http: - for existing_path in ingress.spec.rules[0].http.paths: - if existing_path.path == path: - return True + if ingress.spec.rules and ingress.spec.rules[0].http: + return any(existing_path.path == path for existing_path in ingress.spec.rules[0].http.paths) return False -class InvalidIngressError(Exception): - pass - - -def _update_ingress_with_retries(request, new_path, namespace, retries=3): - for retry in range(retries): +def _update_ingress_with_retries(request: Request, new_path: V1HTTPIngressPath, namespace: str, retries: int = 3): + for attempt in range(retries): try: ingress = _ensure_ingress_exists(request) # Initialize http attribute with an empty paths list if it is None if ingress.spec.rules[0].http is None: ingress.spec.rules[0].http = V1HTTPIngressRuleValue(paths=[]) # Only append the path if it doesn't exist already - if not _path_exists_in_ingress(ingress, new_path.path): + if not path_exists_in_ingress(ingress, new_path.path): ingress.spec.rules[0].http.paths.append(new_path) get_k8s_networking_client(request).replace_namespaced_ingress(name=ingress.metadata.name, namespace=namespace, body=ingress) break # if the operation was successful, break the retry loop except ApiException as e: - if e.status not in {409, 422} or retry == retries - 1: - # re-raise the exception on the last retry, or if the error is not a conflict - raise - else: + if e.status in {409, 422} and attempt < retries - 1: + # Sleep and retry if the error is a conflict, and we haven't reached the max retries time.sleep(1) + continue + raise def update_ingress_to_point_to_service(request: Request, module_name: str, git_commit_hash: str): settings = request.app.state.settings namespace = settings.namespace - deployment_name, service_name = _sanitize_deployment_name(module_name, git_commit_hash) + deployment_name, service_name = sanitize_deployment_name(module_name, git_commit_hash) # Need to sync this with Status methods path = f"/{settings.external_ds_url.split('/')[-1]}/{module_name}.{git_commit_hash}(/|$)(.*)" new_path = V1HTTPIngressPath(path=path, path_type="ImplementationSpecific", backend=V1IngressBackend(service={"name": service_name, "port": {"number": 5000}})) _update_ingress_with_retries(request=request, new_path=new_path, namespace=namespace) -def create_and_launch_deployment(request, module_name, module_git_commit_hash, image, labels, annotations, env, mounts) -> client.V1LabelSelector: - deployment_name, service_name = _sanitize_deployment_name(module_name, module_git_commit_hash) +def create_and_launch_deployment( + request: Request, module_name: str, module_git_commit_hash: str, image: str, labels: list, annotations: dict, env: dict, mounts: list +) -> client.V1LabelSelector: + deployment_name, service_name = sanitize_deployment_name(module_name, module_git_commit_hash) namespace = request.app.state.settings.namespace annotations["k8s_deployment_name"] = deployment_name @@ -236,7 +204,7 @@ class DuplicateLabelsException(Exception): pass -def _get_deployment_status(request, label_selector_text) -> Optional[client.V1Deployment]: +def _get_deployment_status(request: Request, label_selector_text: str) -> Optional[client.V1Deployment]: deployment_status = check_service_status_cache(request, label_selector_text) if deployment_status is not None: return deployment_status @@ -256,17 +224,17 @@ def _get_deployment_status(request, label_selector_text) -> Optional[client.V1De return deployment_status -def query_k8s_deployment_status(request, module_name, module_git_commit_hash) -> client.V1Deployment: +def query_k8s_deployment_status(request: Request, module_name: str, module_git_commit_hash: str) -> client.V1Deployment: label_selector_text = f"us.kbase.module.module_name={module_name.lower()}," + f"us.kbase.module.git_commit_hash={module_git_commit_hash}" return _get_deployment_status(request, label_selector_text) -def get_k8s_deployment_status_from_label(request, label_selector: client.V1LabelSelector) -> client.V1Deployment: +def get_k8s_deployment_status_from_label(request: Request, label_selector: client.V1LabelSelector) -> client.V1Deployment: label_selector_text = ",".join([f"{key}={value}" for key, value in label_selector.match_labels.items()]) return _get_deployment_status(request, label_selector_text) -def get_k8s_deployments(request, label_selector="us.kbase.dynamicservice=true") -> List[client.V1Deployment]: +def get_k8s_deployments(request: Request, label_selector: str = "us.kbase.dynamicservice=true") -> List[client.V1Deployment]: """ Get all deployments with the given label selector. This is cached for 5 minutes. :param request: Request object @@ -274,7 +242,7 @@ def get_k8s_deployments(request, label_selector="us.kbase.dynamicservice=true") :return: A list of deployments """ - cache = _get_k8s_all_service_status_cache(request) + cache = get_k8s_all_service_status_cache(request) cached_deployments = cache.get(label_selector, None) if cached_deployments is not None: return cached_deployments @@ -287,22 +255,22 @@ def get_k8s_deployments(request, label_selector="us.kbase.dynamicservice=true") return deployments -def delete_deployment(request, module_name, module_git_commit_hash) -> str: - deployment_name, _ = _sanitize_deployment_name(module_name, module_git_commit_hash) +def delete_deployment(request: Request, module_name: str, module_git_commit_hash: str) -> str: + deployment_name, _ = sanitize_deployment_name(module_name, module_git_commit_hash) namespace = request.app.state.settings.namespace get_k8s_app_client(request).delete_namespaced_deployment(name=deployment_name, namespace=namespace) return deployment_name -def scale_replicas(request, module_name, module_git_commit_hash, replicas: int) -> client.V1Deployment: +def scale_replicas(request: Request, module_name: str, module_git_commit_hash: str, replicas: int) -> client.V1Deployment: deployment = query_k8s_deployment_status(request, module_name, module_git_commit_hash) namespace = request.app.state.settings.namespace deployment.spec.replicas = replicas return get_k8s_app_client(request).replace_namespaced_deployment(name=deployment.metadata.name, namespace=namespace, body=deployment) -def get_logs_for_first_pod_in_deployment(request, module_name, module_git_commit_hash): - deployment_name, _ = _sanitize_deployment_name(module_name, module_git_commit_hash) +def get_logs_for_first_pod_in_deployment(request: Request, module_name: str, module_git_commit_hash: str) -> tuple[str, str] | tuple[str, list[str]]: + deployment_name, _ = sanitize_deployment_name(module_name, module_git_commit_hash) namespace = request.app.state.settings.namespace label_selector_text = f"us.kbase.module.module_name={module_name.lower()}," + f"us.kbase.module.git_commit_hash={module_git_commit_hash}" diff --git a/src/dependencies/lifecycle.py b/src/dependencies/lifecycle.py index ae1df30..4d6d68c 100644 --- a/src/dependencies/lifecycle.py +++ b/src/dependencies/lifecycle.py @@ -7,24 +7,16 @@ from fastapi import Request from kubernetes.client import ApiException -from src.clients.baseclient import ServerError -from src.configs.settings import Settings # noqa: F401 -from src.dependencies.k8_wrapper import ( +from clients.baseclient import ServerError +from configs.settings import Settings # noqa: F401 +from dependencies.k8_wrapper import ( create_and_launch_deployment, create_clusterip_service, update_ingress_to_point_to_service, scale_replicas, ) -from src.dependencies.status import get_service_status_with_retries, lookup_module_info -from src.models.models import DynamicServiceStatus - - -class ServiceAlreadyExistsException(HTTPException): - """ - Exception to be raised when a service already exists. - """ - - pass +from dependencies.status import get_service_status_with_retries, lookup_module_info +from models import DynamicServiceStatus def get_env(request, module_name, module_version) -> Dict[str, str]: @@ -53,7 +45,7 @@ def get_env(request, module_name, module_version) -> Dict[str, str]: return environ_map -def get_volume_mounts(request, module_name, module_version): +def get_volume_mounts(request, module_name, module_version) -> list[str]: """ Get the volume mounts from the KBase Catalog for a module and set it up for the container to use. :param request: The request object @@ -62,12 +54,12 @@ def get_volume_mounts(request, module_name, module_version): :return: """ volume_mounts = request.app.state.catalog_client.list_service_volume_mounts(module_name, module_version) + mounts = [] if len(volume_mounts) > 0: - mounts = [] for vol in volume_mounts: mount_type = "ro" if vol["read_only"] > 0 else "rw" mounts.append(f"{vol['host_dir']}:{vol['container_dir']}:{mount_type}") - return mounts + return mounts def _setup_metadata(module_name, requested_module_version, git_commit_hash, version, git_url) -> Tuple[Dict, Dict]: @@ -130,7 +122,6 @@ def _create_and_launch_deployment_helper( return False except ApiException as e: if e.status == 409: # AlreadyExistsError - logging.warning(e.body) return True else: detail = traceback.format_exc() diff --git a/src/dependencies/logs.py b/src/dependencies/logs.py index d8862d6..2a8cf41 100644 --- a/src/dependencies/logs.py +++ b/src/dependencies/logs.py @@ -2,10 +2,10 @@ from fastapi.requests import Request -from src.clients.baseclient import ServerError -from src.dependencies.k8_wrapper import get_logs_for_first_pod_in_deployment -from src.dependencies.status import lookup_module_info -from src.rpc.models import JSONRPCResponse +from clients.baseclient import ServerError +from dependencies.k8_wrapper import get_logs_for_first_pod_in_deployment +from dependencies.status import lookup_module_info +from rpc.models import JSONRPCResponse def get_service_log(request: Request, module_name: str, module_version: str) -> JSONRPCResponse | list[dict[str, Any]] | None: @@ -18,7 +18,7 @@ def get_service_log(request: Request, module_name: str, module_version: str) -> :param module_version: The module version, normalization not required :return: Logs for a single pod in the deployment """ - user_auth_roles = request.state.user_auth_roles # type: UserAuthRoles + user_auth_roles = request.state.user_auth_roles module_info = lookup_module_info(request, module_name, module_version) tags = module_info.release_tags @@ -29,7 +29,7 @@ def get_service_log(request: Request, module_name: str, module_version: str) -> return [{"instance_id": pod_name, "log": logs}] -def get_service_log_web_socket(request: Request, module_name: str, module_version: str) -> List[dict]: +def get_service_log_web_socket(request: Request, module_name: str, module_version: str) -> List[dict]: # pragma: no cover """ Get logs for a service. This isn't used anywhere but can require a dependency on rancher if implemented. diff --git a/src/dependencies/middleware.py b/src/dependencies/middleware.py index 00c646c..ad71125 100644 --- a/src/dependencies/middleware.py +++ b/src/dependencies/middleware.py @@ -1,7 +1,9 @@ -from fastapi import HTTPException, Request -from fastapi import Header, Cookie +import logging -from src.clients.CachedAuthClient import CachedAuthClient # noqa: F401 +from fastapi import HTTPException +from fastapi import Header, Cookie, Request + +from clients.CachedAuthClient import CachedAuthClient # noqa: F401 # Constants ALPHANUMERIC_PATTERN = r"^[a-zA-Z0-9]+$" @@ -16,19 +18,24 @@ def is_authorized( description="KBase auth token", ), kbase_session: str = Cookie(None, regex=ALPHANUMERIC_PATTERN), + method: str | None = None, + payload: dict | None = None, ) -> bool: """ Check if the user is authorized to access the endpoint in general. This does not check if the user is authorized to STOP or VIEW LOGS for specific services. + :param request: The request to check :param authorization: The authorization header :param kbase_session: The kbase_session cookie + :param method: The method being called to log :return: A boolean indicating if the user is authorized or not """ if not authorization and not kbase_session: + logging.warning(f"No authorization header or kbase_session cookie provided for {method} payload: {payload}") raise HTTPException( - status_code=400, - detail="Please provide the 'Authorization' header or 'kbase_session' cookie", + status_code=401, + detail=f"Please provide the 'Authorization' header or 'kbase_session' cookie for {method} payload: {payload} ", ) try: ac = request.app.state.auth_client # type: CachedAuthClient @@ -41,4 +48,5 @@ def is_authorized( elif e.status_code == 404: raise e else: + logging.warning("Invalid or expired token") raise HTTPException(status_code=400, detail="Invalid or expired token") diff --git a/src/dependencies/status.py b/src/dependencies/status.py index b7cb9d5..1367dd4 100644 --- a/src/dependencies/status.py +++ b/src/dependencies/status.py @@ -1,13 +1,13 @@ import logging import time -from typing import List, Dict +from typing import List, Dict, Optional, Any from fastapi import Request, HTTPException -from src.clients.baseclient import ServerError -from src.configs.settings import get_settings -from src.dependencies.k8_wrapper import query_k8s_deployment_status, get_k8s_deployments, DuplicateLabelsException -from src.models.models import DynamicServiceStatus, CatalogModuleInfo +from clients.baseclient import ServerError +from configs.settings import get_settings +from dependencies.k8_wrapper import query_k8s_deployment_status, get_k8s_deployments, DuplicateLabelsException +from models import DynamicServiceStatus, CatalogModuleInfo def lookup_module_info(request: Request, module_name: str, git_commit: str) -> CatalogModuleInfo: @@ -62,7 +62,7 @@ def get_service_status_with_retries(request, module_name, version, retries=10) - :return: """ # Validate request in catalog first - lookup_module_info(request=request, module_name=module_name, git_commit=version) # type: 'CatalogModuleInfo' + lookup_module_info(request=request, module_name=module_name, git_commit=version) # Then check kubernetes for _ in range(retries): try: @@ -89,15 +89,15 @@ def get_dynamic_service_status_helper(request, module_name, version) -> DynamicS """ Retrieve the status of a service based on the module version and git commit hash. :param request: The request object used to retrieve module information. - :param version: - :param module_name: - + :param version: The version of the module requested (not normalized). + :param module_name: The name of the module. :return: The service status. + :raises HTTPException: If the service is not found with the given module name and version. """ - module_info = lookup_module_info(request=request, module_name=module_name, git_commit=version) # type: 'CatalogModuleInfo' + module_info = lookup_module_info(request=request, module_name=module_name, git_commit=version) - deployment = query_k8s_deployment_status(request, module_name=module_name, module_git_commit_hash=module_info.git_commit_hash) # type: 'V1Deployment' + deployment = query_k8s_deployment_status(request, module_name=module_name, module_git_commit_hash=module_info.git_commit_hash) if deployment: return DynamicServiceStatus( url=module_info.url, @@ -117,6 +117,11 @@ def get_dynamic_service_status_helper(request, module_name, version) -> DynamicS raise HTTPException(status_code=404, detail=f"No dynamic service found with module_name={module_name} and version={version}") +class IncompleteDeploymentAnnotationError(Exception): + def __init__(self, deployment_name): + super().__init__(f"Deployment '{deployment_name}' has missing or None 'module_name' or 'git_commit_hash' annotations.") + + def get_all_dynamic_service_statuses(request: Request, module_name, module_version) -> List[DynamicServiceStatus]: if module_name or module_version: logging.debug("dropping list_service_status params since SW1 doesn't use them") @@ -134,14 +139,17 @@ def get_all_dynamic_service_statuses(request: Request, module_name, module_versi # TODO see if you need to get the list based on running deployments or based on the catalog dynamic_service_statuses = [] for deployment in deployment_statuses: - deployment = deployment # type: 'V1Deployment' + deployment = deployment try: - module_name = deployment.metadata.annotations["module_name"] - git_commit = deployment.metadata.annotations["git_commit_hash"] - except KeyError: + module_name = deployment.metadata.annotations.get("module_name") + git_commit = deployment.metadata.annotations.get("git_commit_hash") + if not module_name or not git_commit: + raise IncompleteDeploymentAnnotationError(deployment.metadata.name) + except IncompleteDeploymentAnnotationError: # If someone deployed a bad service into this namespace, this will protect this query from failing continue - module_info = lookup_module_info(request=request, module_name=module_name, git_commit=git_commit) # type: 'CatalogModuleInfo' + + module_info = lookup_module_info(request=request, module_name=module_name, git_commit=git_commit) dynamic_service_statuses.append( DynamicServiceStatus( url=module_info.url, @@ -157,10 +165,21 @@ def get_all_dynamic_service_statuses(request: Request, module_name, module_versi unavailable_replicas=deployment.status.unavailable_replicas, ) ) + + # Deployments were found, but none of them had the correct annotations, they were missing + # deployment.metadata.annotations.get("module_name") + # deployment.metadata.annotations.get("git_commit_hash") + if len(dynamic_service_statuses) == 0: + raise HTTPException( + status_code=404, + detail=f"No dynamic services found in kubernetes cluster with namespace={get_settings().namespace} and labels=dynamic-service=true! Or " + f"they were found and they were missing the module_name and git_commit_hash annotations!", + ) + return dynamic_service_statuses -def get_status(request: Request, module_name: str, version: str) -> Dict: +def get_status(request: Request, module_name: Optional[Any] = None, version: Optional[Any] = None) -> Dict: if module_name or version: logging.debug("dropping get_status params since SW1 doesn't use them") @@ -173,7 +192,7 @@ def get_status(request: Request, module_name: str, version: str) -> Dict: } -def get_version(request: Request, module_name, version) -> List[str]: +def get_version(request: Request, module_name: Optional[Any] = None, version: Optional[Any] = None) -> List[str]: if module_name or version: logging.debug("dropping get_version params since SW1 doesn't use them") diff --git a/src/factory.py b/src/factory.py index 2c9142c..df01c44 100644 --- a/src/factory.py +++ b/src/factory.py @@ -3,64 +3,68 @@ from typing import Optional import sentry_sdk -from cacheout import LRUCache # noqa F401 from dotenv import load_dotenv from fastapi import FastAPI from fastapi.middleware.gzip import GZipMiddleware -from prometheus_fastapi_instrumentator import Instrumentator -from src.clients.CachedAuthClient import CachedAuthClient -from src.clients.CachedCatalogClient import CachedCatalogClient -from src.clients.KubernetesClients import K8sClients -from src.configs.settings import get_settings, Settings -from src.routes.authenticated_routes import router as sw2_authenticated_router -from src.routes.rpc import router as sw2_rpc_router -from src.routes.unauthenticated_routes import router as sw2_unauthenticated_router +from clients.CachedAuthClient import CachedAuthClient +from clients.CachedCatalogClient import CachedCatalogClient +from clients.KubernetesClients import K8sClients +from configs.settings import get_settings, Settings +from routes.authenticated_routes import router as sw2_authenticated_router +from routes.metrics_routes import router as metrics_router +from routes.rpc_route import router as sw2_rpc_router +from routes.unauthenticated_routes import router as sw2_unauthenticated_router def create_app( catalog_client: Optional[CachedCatalogClient] = None, auth_client: Optional[CachedAuthClient] = None, - k8s_clients: K8sClients = None, + k8s_clients: Optional[K8sClients] = None, settings: Optional[Settings] = None, ) -> FastAPI: """ Create the app with the required dependencies. :param catalog_client: An instance of CachedCatalogClient :param auth_client: An instance of CachedAuthClient - :param k8s_clients: An instance of K8sClients - :param settings: An instance of Settings + :param k8s_clients: An instance of K8sClients + :param settings: An instance of Settings :return: Fastapi app and clients saved it its state attribute """ + logging.basicConfig(level=os.environ.get("LOG_LEVEL", "INFO")) - logging.basicConfig(level=logging.DEBUG) - load_dotenv(os.environ.get("DOTENV_FILE_LOCATION", ".env")) + if os.environ.get("DOTENV_FILE_LOCATION"): + load_dotenv(os.environ.get("DOTENV_FILE_LOCATION", ".env")) + + if not settings: + settings = get_settings() - # Instrumentation for Sentry connection - # This is an administrator telemetry setting and should not be used for local development if os.environ.get("SENTRY_DSN"): sentry_sdk.init( dsn=os.environ["SENTRY_DSN"], traces_sample_rate=1.0, http_proxy=os.environ.get("HTTP_PROXY"), + environment=settings.external_ds_url, ) - if not settings: - settings = get_settings() + app = FastAPI(root_path=settings.root_path) # type: FastAPI - # Set up the state of the app with various clients. Note, when running multiple threads, these will each have their own cache + # Set up the state of the app with various clients. + # Note, when running multiple threads, these will each have their own cache app.state.settings = settings app.state.catalog_client = catalog_client or CachedCatalogClient(settings=settings) app.state.k8s_clients = k8s_clients if k8s_clients else K8sClients(settings=settings) app.state.auth_client = auth_client if auth_client else CachedAuthClient(settings=settings) + # Add the routes app.include_router(sw2_authenticated_router) app.include_router(sw2_unauthenticated_router) app.include_router(sw2_rpc_router) - # Middleware Do we need this? + app.add_middleware(GZipMiddleware, minimum_size=1000) - # Instrumentation for prometheus metrics - Instrumentator().instrument(app).expose(app) + + if os.environ.get("METRICS_USERNAME") and os.environ.get("METRICS_PASSWORD"): + app.include_router(router=metrics_router) return app diff --git a/src/models/__init__.py b/src/models/__init__.py new file mode 100644 index 0000000..60280ce --- /dev/null +++ b/src/models/__init__.py @@ -0,0 +1 @@ +from .models import * # noqa F401 F403 diff --git a/src/models/models.py b/src/models/models.py index cd7ee3f..584aa5b 100644 --- a/src/models/models.py +++ b/src/models/models.py @@ -41,7 +41,7 @@ class DynamicServiceStatus(BaseModel): git_commit_hash: str # Git commit hash of the service status: ServiceStatus # Service status based on replica counts version: str # Version of the service - hash: str = None + hash: str | None = None release_tags: List[str] # List of release tags for the service url: str # URL of the service module_name: str # Name of the service module diff --git a/src/models/rancher1_models.py b/src/models/rancher1_models.py deleted file mode 100644 index cea824b..0000000 --- a/src/models/rancher1_models.py +++ /dev/null @@ -1,31 +0,0 @@ -from enum import Enum - - -class Rancher1ServiceState(Enum): - ACTIVE = "active" # Indicates that the service is currently running or active. - UPGRADING = "upgrading" # Indicates that the service is in the process of being upgraded. - UPGRADING_ROLLBACK = "upgrading-rollback" # Indicates that the service is in the process of rolling back an upgrade. - DEACTIVATED = "deactivated" # Indicates that the service is not running or inactive. - REMOVED = "removed" # Indicates that the service has been removed. - - -class Rancher1HealthState(Enum): - HEALTHY = "healthy" # Indicates that the service is functioning correctly and in a healthy state. - UNHEALTHY = "unhealthy" # Indicates that the service is experiencing issues or is in an unhealthy state. - INITIALIZING = "initializing" # Indicates that the service is in the process of initializing. - INITIALIZING_ROLLBACK = "initializing-rollback" # Indicates that the service is rolling back the initialization process. - UPGRADING = "upgrading" # Indicates that the service is in the process of being upgraded. - UPGRADING_ROLLBACK = "upgrading-rollback" # Indicates that the service is rolling back an upgrade. - UNKNOWN = "unknown" # Indicates that the health state of the service is not known or cannot be determined. - - -class ContainerState(str, Enum): - CREATED = "created" - RUNNING = "running" - EXITED = "exited" - WAITING = "waiting" - TERMINATED = "terminated" - CRASH_LOOP_BACK_OFF = "crash-loop-backoff" - IMAGE_PULL_BACK_OFF = "image-pull-backoff" - INIT = "init" - PENDING = "pending" diff --git a/src/routes/__init__.py b/src/routes/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/routes/authenticated_routes.py b/src/routes/authenticated_routes.py index e541996..239f3af 100644 --- a/src/routes/authenticated_routes.py +++ b/src/routes/authenticated_routes.py @@ -1,7 +1,6 @@ from fastapi import APIRouter, Depends, Request, Header, Cookie -from src.clients.CachedAuthClient import CachedAuthClient # noqa: F401 -from src.dependencies.middleware import is_authorized, ALPHANUMERIC_PATTERN +from dependencies.middleware import is_authorized, ALPHANUMERIC_PATTERN router = APIRouter( tags=["authenticated"], @@ -25,6 +24,6 @@ def whoami( ), kbase_session: str = Cookie(None, regex=ALPHANUMERIC_PATTERN), ): - cac = request.app.state.auth_client # type: CachedAuthClient + cac = request.app.state.auth_client - return cac.validate_and_get_username_roles(token=authorization if authorization else kbase_session) + return cac.validate_and_get_username_auth_roles(token=authorization if authorization else kbase_session) diff --git a/src/routes/metrics_routes.py b/src/routes/metrics_routes.py new file mode 100644 index 0000000..d14e1bf --- /dev/null +++ b/src/routes/metrics_routes.py @@ -0,0 +1,17 @@ +import os + +from fastapi import APIRouter, Depends +from fastapi.responses import PlainTextResponse +from fastapi.security import HTTPBasic, HTTPBasicCredentials +from prometheus_client import generate_latest + +router = APIRouter(tags=["metrics"]) + +security = HTTPBasic() + + +@router.get("/metrics", response_class=PlainTextResponse) +def get_metrics(credentials: HTTPBasicCredentials = Depends(security)): + if credentials.username != os.environ["METRICS_USERNAME"] or credentials.password != os.environ["METRICS_PASSWORD"]: + return PlainTextResponse("Unauthorized", status_code=401) + return generate_latest() diff --git a/src/routes/rpc.py b/src/routes/rpc.py deleted file mode 100644 index 8481799..0000000 --- a/src/routes/rpc.py +++ /dev/null @@ -1,63 +0,0 @@ -from typing import Callable - -from fastapi import Request, APIRouter, HTTPException, Depends -from fastapi.encoders import jsonable_encoder -from fastapi.responses import Response, JSONResponse - -from src.rpc import authenticated_routes, unauthenticated_routes -from src.rpc.common import validate_rpc_request, rpc_auth -from src.rpc.error_responses import ( - method_not_found, -) -from src.rpc.models import JSONRPCResponse - -router = APIRouter( - tags=["rpc"], - responses={404: {"description": "Not found"}}, -) - -# No KBase Token Required -unauthenticated_routes_mapping = { - "ServiceWizard.list_service_status": unauthenticated_routes.list_service_status, - "ServiceWizard.status": unauthenticated_routes.status, - "ServiceWizard.version": unauthenticated_routes.version, - "ServiceWizard.get_service_status_without_restart": unauthenticated_routes.get_service_status_without_restart, -} -# Valid KBase Token Required -kbase_token_required = { - "ServiceWizard.start": authenticated_routes.start, - "ServiceWizard.get_service_status": authenticated_routes.start, -} -# Valid KBase Token and Admin or username in [owners] in kbase.yaml required -admin_or_owner_required = { - "ServiceWizard.get_service_log": authenticated_routes.get_service_log, - "ServiceWizard.stop": authenticated_routes.stop, -} - -authenticated_routes_mapping = {**kbase_token_required, **admin_or_owner_required} - -# Combine the dictionaries -known_methods = {**unauthenticated_routes_mapping, **authenticated_routes_mapping} - - -async def get_body(request: Request): - return await request.body() - - -@router.post("/rpc", response_model=None) -@router.post("/rpc/", response_model=None) -@router.post("/", response_model=None) -def json_rpc(request: Request, body: bytes = Depends(get_body)) -> Response | HTTPException | JSONRPCResponse | JSONResponse: - method, params, jrpc_id = validate_rpc_request(body) - request_function: Callable = known_methods.get(method) - if request_function is None: - return method_not_found(method=method, jrpc_id=jrpc_id) - - if request_function in authenticated_routes_mapping.values(): - request.state.user_auth_roles = rpc_auth(request, jrpc_id) - - valid_response = request_function(request, params, jrpc_id) # type:JSONRPCResponse - converted_response = jsonable_encoder(valid_response) - if "error" in converted_response: - return JSONResponse(content=converted_response, status_code=500) - return JSONResponse(content=converted_response, status_code=200) diff --git a/src/routes/rpc_route.py b/src/routes/rpc_route.py new file mode 100644 index 0000000..d0048f5 --- /dev/null +++ b/src/routes/rpc_route.py @@ -0,0 +1,21 @@ +from fastapi import Request, APIRouter, HTTPException, Depends +from fastapi.responses import Response, JSONResponse + +from rpc.handlers.json_rpc_handler import json_rpc_helper +from rpc.models import JSONRPCResponse + +router = APIRouter( + tags=["rpc"], + responses={404: {"description": "Not found"}}, +) + + +async def get_body(request: Request): + return await request.body() + + +@router.post("/rpc", response_model=None) +@router.post("/rpc/", response_model=None) +@router.post("/", response_model=None) +def json_rpc(request: Request, body: bytes = Depends(get_body)) -> Response | HTTPException | JSONRPCResponse | JSONResponse: + return json_rpc_helper(request, body) diff --git a/src/routes/unauthenticated_routes.py b/src/routes/unauthenticated_routes.py index 719e07d..51ec74f 100644 --- a/src/routes/unauthenticated_routes.py +++ b/src/routes/unauthenticated_routes.py @@ -1,7 +1,6 @@ from fastapi import APIRouter, Request -from src.dependencies.status import get_version, get_status -from src.configs.settings import Settings # noqa: F401 +from dependencies.status import get_version, get_status router = APIRouter( tags=["unauthenticated"], diff --git a/src/rpc/__init__.py b/src/rpc/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/rpc/common.py b/src/rpc/common.py index 49841bc..f22441c 100644 --- a/src/rpc/common.py +++ b/src/rpc/common.py @@ -1,30 +1,18 @@ import json import traceback -from typing import Callable +from typing import Callable, Any -from fastapi import HTTPException -from starlette.requests import Request +from fastapi import HTTPException, Request -from src.clients.CachedAuthClient import UserAuthRoles, CachedAuthClient # noqa: F401 -from src.clients.baseclient import ServerError -from src.dependencies.middleware import is_authorized -from src.rpc.error_responses import ( - token_validation_failed, - json_rpc_response_to_exception, +from clients.CachedAuthClient import UserAuthRoles, CachedAuthClient # noqa: F401 +from clients.baseclient import ServerError +from rpc.error_responses import ( no_params_passed, ) -from src.rpc.models import ErrorResponse, JSONRPCResponse +from rpc.models import ErrorResponse, JSONRPCResponse -class AuthException(Exception): - pass - - -class AuthServiceException(Exception): - pass - - -def validate_rpc_request(body): +def validate_rpc_request(body) -> tuple[str, list[dict], str]: """ Validate the JSON-RPC request body to ensure methods and params are present and of the correct type. :param body: The JSON-RPC request body @@ -46,7 +34,7 @@ def validate_rpc_request(body): params = json_data.get("params", []) jrpc_id = json_data.get("id", 0) - if not isinstance(method, str) or not isinstance(params, list): + if not isinstance(method, str) and not isinstance(params, list): raise ServerError(message=f"`method` must be a valid SW1 method string. Params must be a dictionary. {json_data}", code=-32600, name="Invalid Request") return method, params, jrpc_id @@ -66,18 +54,22 @@ def validate_rpc_response(response: JSONRPCResponse): return response -def rpc_auth(request: Request, jrpc_id: str) -> UserAuthRoles: - # Extract the Authorization header and the kbase_session cookie +def get_user_auth_roles(request: Request, jrpc_id: str, method: str) -> tuple[Any, None] | tuple[None, JSONRPCResponse]: authorization = request.headers.get("Authorization") kbase_session = request.cookies.get("kbase_session") - - # Call the authenticated_user function - authorized = is_authorized(request=request, kbase_session=kbase_session, authorization=authorization) - if not authorized: - raise AuthException(json_rpc_response_to_exception(token_validation_failed(jrpc_id))) - - ac = request.app.state.auth_client # type: CachedAuthClient - return ac.get_user_auth_roles(token=authorization or kbase_session) + try: + return request.app.state.auth_client.get_user_auth_roles(token=authorization or kbase_session), None + except HTTPException as e: + return None, JSONRPCResponse( + id=jrpc_id, + error=ErrorResponse( + message=f"Authentication required for ServiceWizard.{method}", + code=-32000, + name="Authentication error", + error=f"{e.detail}", + ), + ) + # Something unexpected happened, but we STILL don't want to authorize the request! def handle_rpc_request( @@ -88,14 +80,24 @@ def handle_rpc_request( ) -> JSONRPCResponse: method_name = action.__name__ try: - params = params[0] + first_param = params[0] + if not isinstance(first_param, dict): + return JSONRPCResponse( + id=jrpc_id, + error=ErrorResponse( + message=f"Invalid params for ServiceWizard.{method_name}", + code=-32602, + name="Invalid params", + error=f"Params must be a dictionary. Got {type(first_param)}", + ), + ) except IndexError: return no_params_passed(method=method_name, jrpc_id=jrpc_id) # This is for backwards compatibility with SW1 logging functions, as they pass in the "service" dictionary instead of the module_name and version - service = params.get("service", {}) - module_name = service.get("module_name", params.get("module_name")) - module_version = service.get("version", params.get("version")) + service = first_param.get("service", {}) + module_name = service.get("module_name", first_param.get("module_name")) + module_version = service.get("version", first_param.get("version")) try: result = action(request, module_name, module_version) diff --git a/src/rpc/error_responses.py b/src/rpc/error_responses.py index 876d671..9b02f77 100644 --- a/src/rpc/error_responses.py +++ b/src/rpc/error_responses.py @@ -1,13 +1,13 @@ from fastapi.responses import JSONResponse -from src.rpc.models import ErrorResponse, JSONRPCResponse +from rpc.models import ErrorResponse, JSONRPCResponse -def method_not_found(method, jrpc_id) -> JSONRPCResponse: +def method_not_found(method: str, jrpc_id: object) -> JSONRPCResponse: return JSONRPCResponse(id=jrpc_id, error=ErrorResponse(message=f"Method '{method}' not found", code=-32601, name="Method not found", error=None)) -def no_params_passed(method, jrpc_id): +def no_params_passed(method: str, jrpc_id: object) -> JSONRPCResponse: return JSONRPCResponse( id=jrpc_id, error=ErrorResponse( @@ -19,7 +19,7 @@ def no_params_passed(method, jrpc_id): ) -def not_enough_params(method, jrpc_id): +def not_enough_params(method: str, jrpc_id: object) -> JSONRPCResponse: return JSONRPCResponse( id=jrpc_id, error=ErrorResponse( @@ -31,7 +31,7 @@ def not_enough_params(method, jrpc_id): ) -def invalid_params(method, jrpc_id): +def invalid_params(method: str, jrpc_id: object) -> JSONRPCResponse: return JSONRPCResponse( id=jrpc_id, error=ErrorResponse( @@ -43,7 +43,7 @@ def invalid_params(method, jrpc_id): ) -def no_authenticated_headers_passed(jrpc_id): +def no_authenticated_headers_passed(jrpc_id: object) -> JSONRPCResponse: return JSONRPCResponse( id=jrpc_id, error=ErrorResponse( @@ -55,7 +55,7 @@ def no_authenticated_headers_passed(jrpc_id): ) -def token_validation_failed(jrpc_id): +def token_validation_failed(jrpc_id: object) -> JSONRPCResponse: return JSONRPCResponse( id=jrpc_id, error=ErrorResponse( @@ -67,5 +67,5 @@ def token_validation_failed(jrpc_id): ) -def json_rpc_response_to_exception(content: JSONRPCResponse, status_code=500): - return JSONResponse(content=content.dict(), status_code=status_code) +def json_rpc_response_to_exception(content: JSONRPCResponse, status_code: int = 500) -> JSONResponse: + return JSONResponse(content=content.model_dump(), status_code=status_code) diff --git a/src/rpc/handlers/__init__.py b/src/rpc/handlers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/rpc/authenticated_routes.py b/src/rpc/handlers/authenticated_handlers.py similarity index 57% rename from src/rpc/authenticated_routes.py rename to src/rpc/handlers/authenticated_handlers.py index 7c2f395..3648e09 100644 --- a/src/rpc/authenticated_routes.py +++ b/src/rpc/handlers/authenticated_handlers.py @@ -1,17 +1,9 @@ -import logging - from fastapi import Request -from src.dependencies import logs -from src.dependencies.lifecycle import start_deployment, stop_deployment -from src.rpc.common import handle_rpc_request -from src.rpc.models import JSONRPCResponse - -logging.basicConfig(level=logging.INFO) - - -def start(request: Request, params: list[dict], jrpc_id: str) -> JSONRPCResponse: - return handle_rpc_request(request, params, jrpc_id, start_deployment) +from dependencies import logs +from dependencies.lifecycle import stop_deployment +from rpc.common import handle_rpc_request +from rpc.models import JSONRPCResponse def stop(request: Request, params: list[dict], jrpc_id: str) -> JSONRPCResponse: diff --git a/src/rpc/handlers/json_rpc_handler.py b/src/rpc/handlers/json_rpc_handler.py new file mode 100644 index 0000000..b74fa0d --- /dev/null +++ b/src/rpc/handlers/json_rpc_handler.py @@ -0,0 +1,57 @@ +from typing import Callable, Any + +from fastapi import Request, Response, HTTPException +from fastapi.encoders import jsonable_encoder +from fastapi.responses import JSONResponse + +from rpc.common import validate_rpc_request, get_user_auth_roles +from rpc.error_responses import method_not_found +from rpc.handlers import unauthenticated_handlers, authenticated_handlers +from rpc.models import JSONRPCResponse + +# No KBase Token Required +unauthenticated_routes_mapping = { + "ServiceWizard.list_service_status": unauthenticated_handlers.list_service_status, + "ServiceWizard.status": unauthenticated_handlers.status, + "ServiceWizard.version": unauthenticated_handlers.version, + "ServiceWizard.get_service_status_without_restart": unauthenticated_handlers.get_service_status_without_restart, + "ServiceWizard.start": unauthenticated_handlers.start, + "ServiceWizard.get_service_status": unauthenticated_handlers.start, +} + +# Valid KBase Token and Admin or username in [owners] in kbase.yaml required +admin_or_owner_required = { + "ServiceWizard.get_service_log": authenticated_handlers.get_service_log, + "ServiceWizard.stop": authenticated_handlers.stop, +} +# Use star unpacking to create a mapping of known routes +known_methods = {**unauthenticated_routes_mapping, **admin_or_owner_required} + + +def function_requires_auth(request_function: Callable) -> bool: + return request_function in admin_or_owner_required.values() + + +def json_rpc_helper(request: Request, body: bytes) -> Response | HTTPException | JSONRPCResponse | JSONResponse: + method, params, jrpc_id = validate_rpc_request(body) + request_function_candidate = known_methods.get(method) + if request_function_candidate is None: + mnf_response = jsonable_encoder(method_not_found(method=method, jrpc_id=jrpc_id)) + return JSONResponse(content=mnf_response, status_code=500) + + request_function: Callable[[Request, list[dict[Any, Any]], str], JSONRPCResponse] = request_function_candidate + + if function_requires_auth(request_function): + user_auth_roles, auth_error = get_user_auth_roles(request, jrpc_id, method) + if auth_error: + return JSONResponse(content=jsonable_encoder(auth_error), status_code=500) + else: + request.state.user_auth_roles = user_auth_roles + + valid_response = request_function(request, params, jrpc_id) # type:JSONRPCResponse + + converted_response = jsonable_encoder(valid_response) + + if "error" in converted_response: + return JSONResponse(content=converted_response, status_code=500) + return JSONResponse(content=converted_response, status_code=200) diff --git a/src/rpc/unauthenticated_routes.py b/src/rpc/handlers/unauthenticated_handlers.py similarity index 57% rename from src/rpc/unauthenticated_routes.py rename to src/rpc/handlers/unauthenticated_handlers.py index 913e15e..4cf2ea7 100644 --- a/src/rpc/unauthenticated_routes.py +++ b/src/rpc/handlers/unauthenticated_handlers.py @@ -1,8 +1,9 @@ from fastapi.requests import Request -from src.dependencies.status import get_all_dynamic_service_statuses, get_service_status_without_retries, get_version, get_status -from src.rpc.common import handle_rpc_request -from src.rpc.models import JSONRPCResponse +from dependencies.lifecycle import start_deployment +from dependencies.status import get_all_dynamic_service_statuses, get_service_status_without_retries, get_version, get_status +from rpc.common import handle_rpc_request +from rpc.models import JSONRPCResponse def list_service_status(request: Request, params: list[dict], jrpc_id: str) -> JSONRPCResponse: @@ -13,9 +14,15 @@ def get_service_status_without_restart(request: Request, params: list[dict], jrp return handle_rpc_request(request, params, jrpc_id, get_service_status_without_retries) -def status(request: Request, params: list[dict], jrpc_id: str) -> JSONRPCResponse: +def start(request: Request, params: list[dict], jrpc_id: str) -> JSONRPCResponse: + return handle_rpc_request(request, params, jrpc_id, start_deployment) + + +def status(request: Request, params: list[dict], jrpc_id: str) -> JSONRPCResponse: # noqa F811 + params = [{}] return handle_rpc_request(request, params, jrpc_id, get_status) -def version(request: Request, params: list[dict], jrpc_id: str) -> JSONRPCResponse: +def version(request: Request, params: list[dict], jrpc_id: str) -> JSONRPCResponse: # noqa F811 + params = [{}] return handle_rpc_request(request, params, jrpc_id, get_version) diff --git a/src/rpc/models.py b/src/rpc/models.py index 9ca6d00..cd03de0 100644 --- a/src/rpc/models.py +++ b/src/rpc/models.py @@ -1,4 +1,4 @@ -from typing import Optional, Any +from typing import Any, Union from pydantic import BaseModel @@ -7,25 +7,28 @@ class ErrorResponse(BaseModel): message: str code: int name: str - error: str = None + error: str | None = None class JSONRPCResponse(BaseModel): version: str = "1.0" - id: Optional[int | str] - error: Optional[ErrorResponse] - result: Any = None + id: Union[int, str] | None = 0 + error: ErrorResponse | None = None + result: Any | None = None - def dict(self, *args, **kwargs): - response_dict = super().dict(*args, **kwargs) - if self.result is None: - response_dict.pop("result", None) + def model_dump(self, *args, **kwargs) -> dict[str, Any]: + # Default behavior for the serialization + serialized_data = super().model_dump(*args, **kwargs) - if self.error is None: - response_dict.pop("error", None) - response_dict.pop("version", None) + # Custom logic to exclude fields based on their values + if serialized_data.get("result") is None: + serialized_data.pop("result", None) - if self.id is None: - response_dict.pop("id", None) + if serialized_data.get("error") is None: + serialized_data.pop("error", None) + serialized_data.pop("version", None) - return response_dict + if serialized_data.get("id") is None: + serialized_data.pop("id", None) + + return serialized_data diff --git a/test/__init__.py b/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/test/conftest.py b/test/conftest.py index 4be0ff6..3ac3afe 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -1,50 +1,23 @@ +import logging import os +from glob import glob import pytest from dotenv import load_dotenv +logging.basicConfig(level=logging.INFO) + + +def _as_module(fixture_path: str) -> str: + return fixture_path.replace("/", ".").replace("\\", ".").replace(".py", "") + @pytest.fixture(autouse=True) def load_environment(): - # Ensure that the environment variables are loaded before running the tests - load_dotenv() + # Set DOTENV_FILE_LOCATION to override the default .env file location + load_dotenv(os.environ.get("DOTENV_FILE_LOCATION", ".env")) + if os.environ.get("PYCHARM_HOSTED"): + load_dotenv(os.environ.get("src/.env")) -@pytest.fixture(autouse=True) -def generate_kubeconfig(): - # Generate a kubeconfig file for testing - # Overwrite kubeconfig - os.environ["KUBECONFIG"] = "test_kubeconfig_file" - kubeconfig_path = os.environ["KUBECONFIG"] - - kubeconfig_content = """\ -apiVersion: v1 -kind: Config -current-context: test-context -clusters: -- name: test-cluster - cluster: - server: https://test-api-server - insecure-skip-tls-verify: true -contexts: -- name: test-context - context: - cluster: test-cluster - user: test-user -users: -- name: test-user - user: - exec: - command: echo - apiVersion: client.authentication.k8s.io/v1alpha1 - args: - - "access_token" -""" - - with open(kubeconfig_path, "w") as kubeconfig_file: - kubeconfig_file.write(kubeconfig_content.strip()) - - yield - - # Clean up the generated kubeconfig file after the tests - os.remove(kubeconfig_path) +pytest_plugins = [_as_module(fixture) for fixture in glob("src/fixtures/[!_]*.py") + glob("test/src/fixtures/[!_]*.py")] diff --git a/test/src/clients/test_CachedAuthClient.py b/test/src/clients/test_CachedAuthClient.py new file mode 100644 index 0000000..5ac9f37 --- /dev/null +++ b/test/src/clients/test_CachedAuthClient.py @@ -0,0 +1,108 @@ +from unittest.mock import patch, Mock + +import pytest +from cacheout import LRUCache +from fastapi import HTTPException + +from clients.CachedAuthClient import CachedAuthClient, UserAuthRoles +from configs.settings import get_settings + + +@pytest.fixture +def valid_tokens_cache(): + # Using a real cache, mocking the cache seems like its not helpful + # note, bool(MagicMock) == False + # would have to client.valid_tokens.get.side_effect = [None, MagicMock()] to simulate cache behavior + cache = LRUCache(ttl=10) + return cache + + +@pytest.fixture +def client(valid_tokens_cache): + settings = get_settings() + client = CachedAuthClient(settings=settings, valid_tokens_cache=valid_tokens_cache) + # Note: If valid_tokens is a MagicMock, this clear() call will be mocked and won't raise any errors. + # If it's an actual LRUCache instance, it will execute the clear() method of LRUCache. + client.valid_tokens.clear() + return client + + +# No Cache +def test_validate_and_get_username_auth_roles_valid_token(client): + with patch("requests.get", return_value=Mock(status_code=200, json=lambda: {"user": "testuser", "customroles": ["user", "admin"]})): + username, roles = client.validate_and_get_username_auth_roles(token="valid_token") + uar = UserAuthRoles(username=username, user_roles=roles, admin_roles=get_settings().admin_roles, token="valid_token") + + assert username == "testuser" + assert roles == ["user", "admin"] + assert uar.is_admin is False + assert uar.is_admin_or_owner(owners=["testuser"]) is True + + +# No Cache +def test_validate_and_get_username_auth_roles_invalid_token(client): + with patch("requests.get", return_value=Mock(status_code=401, json=lambda: {"error": "Invalid token"})): + with pytest.raises(HTTPException) as excinfo: + client.validate_and_get_username_auth_roles(token="invalid_token") + assert excinfo.value.status_code == 401 + assert excinfo.value.detail == "Invalid token" + + +# No Cache +def test_validate_and_get_username_auth_roles_auth_service_down(client): + with patch("requests.get", side_effect=Exception("Auth service error")): + with pytest.raises(HTTPException) as excinfo: + client.validate_and_get_username_auth_roles(token="any_token") + assert excinfo.value.status_code == 500 + assert excinfo.value.detail == "Auth service is down or bad request" + + +# No Cache +def test_validate_and_get_username_auth_roles_bad_url(client): + with patch("requests.get", return_value=Mock(status_code=404, json=lambda: {"error": "Not Found"})): + with pytest.raises(HTTPException) as excinfo: + client.validate_and_get_username_auth_roles(token="any_token") + assert excinfo.value.status_code == 404 + assert excinfo.value.detail == "Auth URL not configured correctly" + + +def test_is_authorized_invalid_token(client): + with patch("requests.get", return_value=Mock(status_code=401, json=lambda: {"error": "Invalid token"})): + with pytest.raises(HTTPException) as excinfo: + client.is_authorized(token="invalid_token") + assert excinfo.value.status_code == 401 + assert excinfo.value.detail == "Invalid token" + + +def test_is_admin_valid_token_admin_role(client: CachedAuthClient): + with patch("requests.get", return_value=Mock(status_code=200, json=lambda: {"user": "adminuser", "customroles": ["user", get_settings().admin_roles[0]]})): + assert client.is_admin(token="valid_token") + + +def test_is_admin_valid_token_non_admin_role(client): + with patch("requests.get", return_value=Mock(status_code=200, json=lambda: {"user": "regularuser", "customroles": ["user"]})): + assert not client.is_admin(token="valid_token") + + +def test_get_user_auth_roles_cached(client): + # Mocking a cached entry for this token + client.valid_tokens.set("cached_token", UserAuthRoles(username="cacheduser", user_roles=["user"], admin_roles=["admin"], token="cached_token")) + + user_auth_roles = client.get_user_auth_roles("cached_token") + assert user_auth_roles.username == "cacheduser" + assert "user" in user_auth_roles.user_roles + + +def test_get_user_auth_roles_not_cached(client): + with patch("requests.get", return_value=Mock(status_code=200, json=lambda: {"user": "testuser", "customroles": ["user", "admin"]})): + user_auth_roles = client.get_user_auth_roles("new_token") + assert user_auth_roles.username == "testuser" + assert "admin" in user_auth_roles.user_roles + + +def test_get_user_auth_roles_with_invalid_token(client): + with patch("requests.get", return_value=Mock(status_code=401, json=lambda: {"error": "Invalid token"})): + with pytest.raises(HTTPException) as excinfo: + client.get_user_auth_roles(token="invalid_token") + assert excinfo.value.status_code == 401 + assert excinfo.value.detail == "Invalid token" diff --git a/test/src/clients/test_CachedCatalogClient.py b/test/src/clients/test_CachedCatalogClient.py new file mode 100644 index 0000000..6e28117 --- /dev/null +++ b/test/src/clients/test_CachedCatalogClient.py @@ -0,0 +1,125 @@ +import hashlib +from unittest.mock import Mock + +import pytest + +from clients.CachedCatalogClient import CachedCatalogClient, get_module_name_hash, _get_key, _clean_version +from clients.CatalogClient import Catalog +from configs.settings import get_settings + + +@pytest.fixture +def mocked_catalog(): + return Mock() + + +@pytest.fixture +def client(mocked_catalog): + ccc = CachedCatalogClient(settings=get_settings(), catalog=mocked_catalog) + ccc.module_hash_mappings_cache.clear() + ccc.module_info_cache.clear() + ccc.module_volume_mount_cache.clear() + ccc.secure_config_cache.clear() + return ccc + + +def test_get_combined_module_info(client, mocked_catalog): + mocked_catalog.get_module_version.return_value = {"module_name": "test_module", "git_commit_hash": "abcdef123456", "dynamic_service": 1} + mocked_catalog.get_module_info.return_value = {"owners": ["user1", "user2"]} + + result = client.get_combined_module_info(module_name="test_module", version="release") + expected_result = {"module_name": "test_module", "git_commit_hash": "abcdef123456", "dynamic_service": 1, "owners": ["user1", "user2"]} + assert result == expected_result + + +def test_list_service_volume_mounts(client, mocked_catalog): + mocked_catalog.list_volume_mounts.return_value = [{"volume_mounts": [{"path": "/data"}]}] + + result = client.list_service_volume_mounts(module_name="test_module", version="release") + assert result == [{"path": "/data"}] + + +def test_get_secure_params(client, mocked_catalog): + mocked_catalog.get_secure_config_params.return_value = {"param1": "value1", "param2": "value2"} + + result = client.get_secure_params(module_name="test_module", version="release") + assert result == {"param1": "value1", "param2": "value2"} + + +def test_get_hash_to_name_mappings(client, mocked_catalog): + mocked_catalog.list_basic_module_info.return_value = [{"module_name": "test_module", "dynamic_service": 1}, {"module_name": "another_module", "dynamic_service": 0}] + + result = client.get_hash_to_name_mappings() + assert result == {get_module_name_hash("test_module"): "test_module"} + + +def test_get_combined_module_info_not_dynamic_service(client, mocked_catalog): + mocked_catalog.get_module_version.return_value = {"module_name": "test_module", "git_commit_hash": "abcdef123456", "dynamic_service": 0} + mocked_catalog.get_module_info.return_value = {"owners": ["user1", "user2"]} + + with pytest.raises(ValueError, match="not marked as a dynamic service"): + client.get_combined_module_info(module_name="test_module", version="release") + + +def test_get_combined_module_info_cached(client, mocked_catalog): + cached_info = {"module_name": "cached_module", "git_commit_hash": "abcdef123456", "dynamic_service": 1, "owners": ["user1", "user2"]} + client.module_info_cache.set(key="cached_module-release", value=cached_info) + + result = client.get_combined_module_info(module_name="cached_module", version="release") + assert result == cached_info + + +def test_list_service_volume_mounts_no_mounts(client, mocked_catalog): + mocked_catalog.list_volume_mounts.return_value = [] + result = client.list_service_volume_mounts(module_name="test_module", version="release") + assert result == [] + + +def test_list_service_volume_mounts_cached(client, mocked_catalog): + cached_mounts = [{"path": "/cached_data"}] + client.module_volume_mount_cache.set(key="cached_module-release", value=cached_mounts) + + result = client.list_service_volume_mounts(module_name="cached_module", version="release") + assert result == cached_mounts + + +def test_get_secure_params_cached(client, mocked_catalog): + cached_params = {"param1": "cached_value1", "param2": "cached_value2"} + client.secure_config_cache.set(key="cached_module-release", value=cached_params) + + result = client.get_secure_params(module_name="cached_module", version="release") + assert result == cached_params + + +def test_get_hash_to_name_mappings_cached(client, mocked_catalog): + cached_mappings = {get_module_name_hash("cached_module"): "cached_module"} + client.module_hash_mappings_cache.set(key="module_hash_mappings", value=cached_mappings) + + result = client.get_hash_to_name_mappings() + assert result == cached_mappings + + +def test_clean_version(): + assert _clean_version(None) == "release" + assert _clean_version("dev") == "dev" + + +def test_get_key(): + assert _get_key("module_name", "version") == "module_name-version" + assert _get_key("module_name") == "module_name-release" + + +def test_get_module_name_hash(): + result = get_module_name_hash("test_module") + expected_result = hashlib.md5("test_module".encode()).hexdigest()[:20] + assert result == expected_result + + +def test_cached_catalog_client_default_init(mocked_catalog): + ccc = CachedCatalogClient(settings=None) + assert isinstance(ccc.cc, Catalog) + + +def test_cached_catalog_client_custom_catalog(mocked_catalog): + ccc = CachedCatalogClient(settings=get_settings(), catalog=mocked_catalog) + assert ccc.cc == mocked_catalog diff --git a/test/src/clients/test_KubernetesClients.py b/test/src/clients/test_KubernetesClients.py new file mode 100644 index 0000000..6d7e3e0 --- /dev/null +++ b/test/src/clients/test_KubernetesClients.py @@ -0,0 +1,125 @@ +from unittest.mock import patch, Mock + +import kubernetes +import pytest +from kubernetes.client import CoreV1Api, AppsV1Api, NetworkingV1Api + +from clients.KubernetesClients import ( + K8sClients, + get_k8s_core_client, + get_k8s_app_client, + get_k8s_networking_client, + get_k8s_service_status_cache, + get_k8s_all_service_status_cache, + check_service_status_cache, + populate_service_status_cache, +) +from configs.settings import get_settings + + +@pytest.fixture +def settings(): + settings = get_settings() + return settings + + +def test_k8s_clients_all_none(settings): + with patch("kubernetes.config.load_kube_config"): + with patch("kubernetes.client.CoreV1Api", return_value=Mock(spec=CoreV1Api)): + with patch("kubernetes.client.AppsV1Api", return_value=Mock(spec=AppsV1Api)): + with patch("kubernetes.client.NetworkingV1Api", return_value=Mock(spec=NetworkingV1Api)): + client = K8sClients(settings=settings) + assert isinstance(client.core_client, CoreV1Api) + assert isinstance(client.app_client, AppsV1Api) + assert isinstance(client.network_client, NetworkingV1Api) + + +def test_k8s_clients_all_provided(settings): + core_client_mock = Mock(spec=CoreV1Api) + app_client_mock = Mock(spec=AppsV1Api) + network_client_mock = Mock(spec=NetworkingV1Api) + client = K8sClients(settings, k8s_core_client=core_client_mock, k8s_app_client=app_client_mock, k8s_network_client=network_client_mock) + assert client.core_client == core_client_mock + assert client.app_client == app_client_mock + assert client.network_client == network_client_mock + + +def test_k8s_clients_mixed_clients(settings): + with pytest.raises(ValueError, match="All k8s_clients should either be all None or all provided"): + K8sClients(settings, k8s_core_client=Mock(spec=CoreV1Api)) + + with pytest.raises(ValueError, match="All k8s_clients should either be all None or all provided"): + K8sClients(settings, k8s_core_client=Mock(spec=CoreV1Api), k8s_app_client=Mock(spec=AppsV1Api)) + + with pytest.raises(ValueError, match="All k8s_clients should either be all None or all provided"): + K8sClients(settings, k8s_core_client=Mock(spec=CoreV1Api), k8s_app_client=Mock(spec=AppsV1Api), k8s_network_client=None) + + +def test_k8s_clients_incluster_config(settings): + with patch("kubernetes.config.load_incluster_config"): + with patch("kubernetes.client.CoreV1Api", return_value=Mock(spec=CoreV1Api)): + with patch("kubernetes.client.AppsV1Api", return_value=Mock(spec=AppsV1Api)): + with patch("kubernetes.client.NetworkingV1Api", return_value=Mock(spec=NetworkingV1Api)): + client = K8sClients(settings) + assert isinstance(client.core_client, CoreV1Api) + assert isinstance(client.app_client, AppsV1Api) + assert isinstance(client.network_client, NetworkingV1Api) + + +def test_k8s_clients_invalid_client_types(settings): + invalid_client = "invalid_client" + valid_core_client = Mock(spec=CoreV1Api) + valid_app_client = Mock(spec=AppsV1Api) + valid_network_client = Mock(spec=NetworkingV1Api) + + client_combinations = { + CoreV1Api: (invalid_client, valid_app_client, valid_network_client), + AppsV1Api: (valid_core_client, invalid_client, valid_network_client), + NetworkingV1Api: (valid_core_client, valid_app_client, invalid_client), + } + + for expected_type, (core, app, net) in client_combinations.items(): + with pytest.raises(TypeError, match=f"Expected client of type {expected_type}, but got"): + K8sClients(settings, k8s_core_client=core, k8s_app_client=app, k8s_network_client=net) + + +def test_k8s_clients_config_load_errors(settings): + with pytest.raises(kubernetes.config.config_exception.ConfigException, match="Invalid kube-config file. No configuration found."): + settings.use_incluster_config = False + settings.kubeconfig = "/invalid_path/to/kubeconfig" + K8sClients(settings) + + with pytest.raises(kubernetes.config.config_exception.ConfigException, match="Service host/port is not set."): + settings.use_incluster_config = True + K8sClients(settings) + + +def test_getter_functions(mock_request): + # Mock the Kubernetes clients and caches in the app state + mock_core_client = Mock() + mock_app_client = Mock() + mock_network_client = Mock() + mock_service_status_cache = Mock() + mock_all_service_status_cache = Mock() + + mock_request.app.state.k8s_clients.core_client = mock_core_client + mock_request.app.state.k8s_clients.app_client = mock_app_client + mock_request.app.state.k8s_clients.network_client = mock_network_client + mock_request.app.state.k8s_clients.service_status_cache = mock_service_status_cache + mock_request.app.state.k8s_clients.all_service_status_cache = mock_all_service_status_cache + + # Test each getter function and assert the results + assert get_k8s_core_client(mock_request) == mock_core_client + assert get_k8s_app_client(mock_request) == mock_app_client + assert get_k8s_networking_client(mock_request) == mock_network_client + assert get_k8s_service_status_cache(mock_request) == mock_service_status_cache + assert get_k8s_all_service_status_cache(mock_request) == mock_all_service_status_cache + + # Define label selector text and data for cache testing + label_selector_text = "example_selector" + data = ["data1", "data2"] + + # Test check_service_status_cache and populate_service_status_cache + assert check_service_status_cache(mock_request, label_selector_text) == mock_service_status_cache.get.return_value + populate_service_status_cache(mock_request, label_selector_text, data) + mock_service_status_cache.set.assert_called_once_with(label_selector_text, data) diff --git a/test/src/configs/test_configs.py b/test/src/configs/test_configs.py deleted file mode 100644 index fbd8e1c..0000000 --- a/test/src/configs/test_configs.py +++ /dev/null @@ -1,87 +0,0 @@ -import os -from unittest.mock import patch - -import pytest - -from src.configs.settings import EnvironmentVariableError, get_settings - - -def test_get_settings_success(setup_env_variables): - # Test case for successful retrieval of settings - settings = get_settings() - - assert settings.namespace == "test_namespace" - assert settings.auth_service_url == "http://test_auth_service" - assert settings.kbase_endpoint == "http://test_kbase" - assert settings.catalog_url == "http://test_catalog" - assert settings.catalog_admin_token == "test_catalog_token" - assert settings.kubeconfig == "/path/to/kubeconfig" - assert settings.admin_roles == ["kbase_admin", "catalog_admin", "service_wizard"] - - -@patch.dict(os.environ, clear=True) -def test_get_settings_missing_variables(): - # Test case for missing environment variables - with pytest.raises(EnvironmentVariableError): - get_settings() - - -def test_get_settings_missing_admin_roles(setup_env_variables): - # Test case for missing admin roles - os.environ.pop("KBASE_ADMIN_ROLE") - os.environ.pop("CATALOG_ADMIN_ROLE") - os.environ.pop("SERVICE_WIZARD_ROLE") - - with pytest.raises(EnvironmentVariableError): - get_settings() - - -def test_get_settings_empty_admin_roles(setup_env_variables): - # Test case for empty admin roles - os.environ["KBASE_ADMIN_ROLE"] = "" - os.environ["CATALOG_ADMIN_ROLE"] = "" - os.environ["SERVICE_WIZARD_ROLE"] = "" - - with pytest.raises(EnvironmentVariableError): - get_settings() - - -def test_get_settings_single_admin_role(setup_env_variables): - # Test case for setting only one admin role - os.environ["KBASE_ADMIN_ROLE"] = "kbase_admin" - os.environ["CATALOG_ADMIN_ROLE"] = "" - os.environ["SERVICE_WIZARD_ROLE"] = "" - - settings = get_settings() - - assert settings.admin_roles == ["kbase_admin"] - - -@pytest.fixture(autouse=True) -def setup_env_variables(): - # Clear the cache for get_settings() to ensure that the environment variables are reloaded - get_settings.cache_clear() - - # Set up the required environment variables for testing - os.environ["NAMESPACE"] = "test_namespace" - os.environ["AUTH_SERVICE_URL"] = "http://test_auth_service" - os.environ["KBASE_ENDPOINT"] = "http://test_kbase" - os.environ["CATALOG_URL"] = "http://test_catalog" - os.environ["CATALOG_ADMIN_TOKEN"] = "test_catalog_token" - os.environ["KUBECONFIG"] = "/path/to/kubeconfig" - os.environ["KBASE_ADMIN_ROLE"] = "kbase_admin" - os.environ["CATALOG_ADMIN_ROLE"] = "catalog_admin" - os.environ["SERVICE_WIZARD_ROLE"] = "service_wizard" - - yield - - # Clean up the environment variables after testing - os.environ.pop("NAMESPACE", None) - os.environ.pop("AUTH_SERVICE_URL", None) - os.environ.pop("KBASE_ENDPOINT", None) - os.environ.pop("CATALOG_URL", None) - os.environ.pop("CATALOG_ADMIN_TOKEN", None) - os.environ.pop("KUBECONFIG", None) - os.environ.pop("KBASE_ADMIN_ROLE", None) - os.environ.pop("CATALOG_ADMIN_ROLE", None) - os.environ.pop("SERVICE_WIZARD_ROLE", None) diff --git a/test/src/configs/test_configs_dotenv.py b/test/src/configs/test_configs_dotenv.py deleted file mode 100644 index 2528d8d..0000000 --- a/test/src/configs/test_configs_dotenv.py +++ /dev/null @@ -1,27 +0,0 @@ -import os - -import pytest -from dotenv import load_dotenv - -from src.configs.settings import EnvironmentVariableError, get_settings - - -def test_missing_roles_and_clear_settings_cache(): - get_settings() - - del os.environ["KBASE_ADMIN_ROLE"] - del os.environ["CATALOG_ADMIN_ROLE"] - del os.environ["SERVICE_WIZARD_ROLE"] - get_settings.cache_clear() - with pytest.raises(EnvironmentVariableError): - get_settings() - - # Load environment again - load_dotenv() - get_settings() - - -@pytest.fixture(autouse=True) -def clear_cache(): - # Clear the cache for get_settings() to ensure that the environment variables are reloaded - get_settings.cache_clear() diff --git a/test/src/configs/test_settings.py b/test/src/configs/test_settings.py new file mode 100644 index 0000000..0bb1c68 --- /dev/null +++ b/test/src/configs/test_settings.py @@ -0,0 +1,119 @@ +import os + +import pytest + +from configs.settings import get_settings, EnvironmentVariableError + + +@pytest.fixture +def cleared_settings(): + """Fixture to clear the cache of the get_settings function and then return the Settings object.""" + """ In theory these tests could clobber the state of os.environ for each other if run in parallel""" + get_settings.cache_clear() + return get_settings() + + +def test_get_settings_from_env(cleared_settings): + """Keep this test in sync with the .env file""" + assert cleared_settings.namespace == "staging-dynamic-services" + assert cleared_settings.auth_service_url == "https://ci.kbase.us/services/auth/api/V2/me" + assert cleared_settings.auth_legacy_url == "https://ci.kbase.us/services/auth/api/legacy/KBase/Sessions/Login" + assert cleared_settings.kbase_root_endpoint == "https://ci.kbase.us" + assert cleared_settings.kbase_services_endpoint == "https://ci.kbase.us/services" + assert cleared_settings.catalog_url == "https://ci.kbase.us/services/catalog" + assert cleared_settings.catalog_admin_token == "REDACTED" + assert cleared_settings.kubeconfig == "test_kubeconfig_file" + assert cleared_settings.admin_roles == ["KBASE_ADMIN", "CATALOG_ADMIN", "SERVICE_WIZARD_ADMIN"] + assert cleared_settings.external_ds_url == "https://ci.kbase.us/dynamic_services" + assert cleared_settings.external_sw_url == "https://ci.kbase.us/services/service_wizard" + assert cleared_settings.git_url == "https://github.com/kbase/service_wizard2" + assert cleared_settings.root_path == "/" + assert cleared_settings.use_incluster_config is False + assert cleared_settings.vcs_ref == os.environ.get("GIT_COMMIT_HASH", "unknown") + + +def test_missing_env(cleared_settings): + env_vars_and_expected_errors = { + "AUTH_SERVICE_URL": "AUTH_SERVICE_URL is not set in the .env file", + "AUTH_LEGACY_URL": "AUTH_LEGACY_URL is not set in the .env file", + "CATALOG_ADMIN_TOKEN": "CATALOG_ADMIN_TOKEN is not set in the .env file", + "CATALOG_URL": "CATALOG_URL is not set in the .env file", + "EXTERNAL_DS_URL": "EXTERNAL_DS_URL is not set in the .env file", + "EXTERNAL_SW_URL": "EXTERNAL_SW_URL is not set in the .env file", + "KBASE_ROOT_ENDPOINT": "KBASE_ROOT_ENDPOINT is not set in the .env file", + "KBASE_SERVICES_ENDPOINT": "KBASE_SERVICES_ENDPOINT is not set in the .env file", + "NAMESPACE": "NAMESPACE is not set in the .env file", + "ROOT_PATH": "ROOT_PATH is not set in the .env file", + } + + for env_var, expected_error in env_vars_and_expected_errors.items(): + original_value = os.environ.get(env_var) + os.environ.pop(env_var, None) # Temporarily remove the env variable to simulate it being missing + + # Clear the cache for get_settings + get_settings.cache_clear() + + with pytest.raises(EnvironmentVariableError, match=expected_error): + get_settings() + if original_value: # Restore the original value after the test for this variable + os.environ[env_var] = original_value + + +def test_missing_admin_roles(): + admin_roles_vars = ["KBASE_ADMIN_ROLE", "CATALOG_ADMIN_ROLE", "SERVICE_WIZARD_ADMIN_ROLE"] + admin_roles_values = ["KBASE_ADMIN", "CATALOG_ADMIN", "SERVICE_WIZARD_ADMIN"] + expected_error = "At least one admin role (KBASE_ADMIN_ROLE, CATALOG_ADMIN_ROLE, or SERVICE_WIZARD_ADMIN_ROLE) must be set in the .env file" + + # Test for cases where 0, 1, or 2 of the admin roles are set + for i in range(3): + # Clear all the admin roles first + for role_var in admin_roles_vars: + if role_var in os.environ: + os.environ.pop(role_var) + + # Set i number of admin roles + for j in range(i): + os.environ[admin_roles_vars[j]] = admin_roles_values[j] + + # Clear the cache for get_settings + get_settings.cache_clear() + + # If no roles are set, an error should be raised. Otherwise, get_settings should succeed. + if i == 0: + with pytest.raises(EnvironmentVariableError) as exc_info: + get_settings() + assert str(exc_info.value) == expected_error + else: + settings = get_settings() + assert settings.admin_roles == admin_roles_values[:i] + + # Restore the original admin roles after testing + for role_var in admin_roles_vars: + original_value = os.environ.get(role_var) + if original_value: + os.environ[role_var] = original_value + + +def test_missing_kube_config_or_incluster_config(): + # Back up the original values of the environment variables + original_kubeconfig = os.environ.get("KUBECONFIG") + original_use_incluster_config = os.environ.get("USE_INCLUSTER_CONFIG") + + # Remove the KUBECONFIG and USE_INCLUSTER_CONFIG environment variables + if "KUBECONFIG" in os.environ: + os.environ.pop("KUBECONFIG") + if "USE_INCLUSTER_CONFIG" in os.environ: + os.environ.pop("USE_INCLUSTER_CONFIG") + + # Clear the cache for get_settings + get_settings.cache_clear() + + # Expect an error since neither KUBECONFIG nor USE_INCLUSTER_CONFIG is set + with pytest.raises(EnvironmentVariableError, match="At least one of the environment variables 'KUBECONFIG' or 'USE_INCLUSTER_CONFIG' must be set"): + get_settings() + + # Restore the original values of the environment variables after testing + if original_kubeconfig: + os.environ["KUBECONFIG"] = original_kubeconfig + if original_use_incluster_config: + os.environ["USE_INCLUSTER_CONFIG"] = original_use_incluster_config diff --git a/test/src/dependencies/test_deps.py b/test/src/dependencies/test_deps.py deleted file mode 100644 index 2c3a59e..0000000 --- a/test/src/dependencies/test_deps.py +++ /dev/null @@ -1,127 +0,0 @@ -import pytest -import requests_mock -from cacheout import LRUCache -from fastapi.testclient import TestClient - -from src.configs.settings import get_settings -from src.factory import create_app - - -@pytest.fixture -def app(): - token_cache = LRUCache(maxsize=100, ttl=300) - catalog_cache = LRUCache(maxsize=100, ttl=300) - return create_app(token_cache=token_cache, catalog_cache=catalog_cache) - - -@pytest.fixture -def client_with_authorization(app): - def _get_client_with_authorization(authorization_value="faketoken", cookies=None): - client = TestClient(app) - client.headers["Authorization"] = f"{authorization_value}" - if cookies: - client.cookies["kbase_session"] = f"{authorization_value}" - return client - - return _get_client_with_authorization - - -@pytest.fixture -def client(app): - with TestClient(app) as test_client: - yield test_client - - -@pytest.fixture -def auth_service_mock(auth_url=None, user="testuser", custom_roles=None): - if auth_url is None: - auth_url = get_settings().auth_service_url - - if custom_roles is None: - custom_roles = list() - - with requests_mock.Mocker() as mocker: - # Mock the response from the AUTH_SERVICE_URL endpoint - mocker.get(auth_url, json={"user": user, "customroles": custom_roles}, status_code=200) - yield mocker - - -def test_get_bad_token(client_with_authorization, auth_service_mock): - with client_with_authorization("_bad_token_") as client: - response = client.get("/get_service_log/123/123") - assert response.status_code == 422 - assert ( - response.json() - == { - "detail": [ - { - "ctx": {"pattern": "^[a-zA-Z0-9]+$"}, - "loc": ["header", "Authorization"], - "msg": 'string does not match regex "^[a-zA-Z0-9]+$"', - "type": "value_error.str.regex", - } - ] - } - != {"instance_id": "123", "logs": ["log1", "log2"]} - ) - - -def test_get_service_log(client_with_authorization, auth_service_mock): - with client_with_authorization() as client: - response = client.get("/get_service_log/123/123") - assert response.status_code == 200 - assert response.json() == {"instance_id": "123", "logs": ["log1", "log2"]} - - -def test_missing_auth(client): - response = client.get("/get_service_log/123/123") - assert response.status_code == 400 - assert response.json() == {"detail": "Please provide the 'Authorization' header or 'kbase_session' cookie"} - - -def test_successful_authentication(client_with_authorization, auth_service_mock): - with client_with_authorization() as client: - response = client.get("/get_service_log/123/123") - assert response.status_code == 200 - assert response.json() == {"instance_id": "123", "logs": ["log1", "log2"]} - - -def test_token_cache(client_with_authorization, auth_service_mock): - with client_with_authorization("cachedtoken") as client: - # Test Token Cache Miss - response = client.get("/get_service_log/456/456") - assert auth_service_mock.call_count == 1 # Cache miss, so one call to authentication service - assert response.status_code == 200 - assert response.json() == {"instance_id": "456", "logs": ["log1", "log2"]} - - # Test Token Cache Hit - response = client.get("/get_service_log/123/123") - assert auth_service_mock.call_count == 1 # Cache hit, so no call to authentication service - assert response.status_code == 200 - assert response.json() == {"instance_id": "123", "logs": ["log1", "log2"]} - - with client_with_authorization("cachedtoken2") as client: - # Test Token Cache Miss - response = client.get("/get_service_log/456/456") - assert auth_service_mock.call_count == 2 # Cache miss, so one call to authentication service - assert response.status_code == 200 - assert response.json() == {"instance_id": "456", "logs": ["log1", "log2"]} - - # Test Token Cache Hit - response = client.get("/get_service_log/123/123") - assert auth_service_mock.call_count == 2 # Cache hit, so no call to authentication service - assert response.status_code == 200 - assert response.json() == {"instance_id": "123", "logs": ["log1", "log2"]} - - -# def test_list_service_status_rpc(client_with_authorization, auth_service_mock): -# #TODO Mock out kubernetes -# with client_with_authorization() as client: -# headers = {"Content-Type": "application/json"} # Set the content type to JSON -# payload = { -# "method": "ServiceWizard.list_service_status", -# "id": 22, -# "params": [{"module": "onerepotest"}] -# } -# response = client.post("/rpc/", data=json.dumps(payload), headers=headers) -# print(response.json()) diff --git a/test/src/dependencies/test_helpers.py b/test/src/dependencies/test_helpers.py new file mode 100644 index 0000000..c3f7b93 --- /dev/null +++ b/test/src/dependencies/test_helpers.py @@ -0,0 +1,85 @@ +import traceback + +from kubernetes.client import V1DeploymentStatus, V1LabelSelector, V1PodTemplateSpec, V1ObjectMeta, V1DeploymentSpec, V1Deployment + +from configs.settings import get_settings +from models.models import DynamicServiceStatus, CatalogModuleInfo + + +def get_running_deployment_status(deployment_name) -> DynamicServiceStatus: + module_info = sample_catalog_module_info() + deployment = create_sample_deployment(deployment_name=deployment_name, ready_replicas=1, replicas=1, available_replicas=1, unavailable_replicas=0) + deployment_status = _create_deployment_status(module_info, deployment) + return deployment_status + + +def get_stopped_deployment_status(deployment_name) -> DynamicServiceStatus: + module_info = sample_catalog_module_info() + deployment = create_sample_deployment(deployment_name=deployment_name, ready_replicas=0, available_replicas=0, unavailable_replicas=1, replicas=0) + deployment_status = _create_deployment_status(module_info, deployment) + return deployment_status + + +def _create_deployment_status(module_info, deployment) -> DynamicServiceStatus: + return DynamicServiceStatus( + url=module_info.url, + version=module_info.version, + module_name=module_info.module_name, + release_tags=module_info.release_tags, + git_commit_hash=module_info.git_commit_hash, + deployment_name=deployment.metadata.name, + replicas=deployment.spec.replicas, + updated_replicas=deployment.status.updated_replicas, + ready_replicas=deployment.status.ready_replicas, + available_replicas=deployment.status.available_replicas, + unavailable_replicas=deployment.status.unavailable_replicas, + ) + + +def create_sample_deployment(deployment_name, replicas, ready_replicas, available_replicas, unavailable_replicas, module_name="test_module", module_version="test_version"): + deployment_status = V1DeploymentStatus( + updated_replicas=replicas, ready_replicas=ready_replicas, available_replicas=available_replicas, unavailable_replicas=unavailable_replicas + ) + + selector = V1LabelSelector(match_labels={"app": deployment_name}) + + pod_template = V1PodTemplateSpec(metadata=V1ObjectMeta(labels={"app": deployment_name})) + + deployment_spec = V1DeploymentSpec(replicas=replicas, selector=selector, template=pod_template) + + # Add annotations to the metadata + annotations = { + "module_name": module_name, + "git_commit_hash": module_version, + "version": module_version, + } + metadata = V1ObjectMeta(name=deployment_name, annotations=annotations) + + deployment = V1Deployment(metadata=metadata, spec=deployment_spec, status=deployment_status) + + return deployment + + +def sample_catalog_module_info(module_name="test_module", git_commit_hash="test_hash", version="test_version", release_tags=None, owners=None) -> CatalogModuleInfo: + if owners is None: + owners = ["test_owner"] + if release_tags is None: + release_tags = ["test_tag"] + + settings = get_settings() + m_info = {"module_name": module_name, "git_commit_hash": git_commit_hash, "version": version, "release_tags": release_tags, "owners": owners} + return CatalogModuleInfo( + # Need to sync this URL with kubernetes methods + url=f"{settings.external_ds_url}/{m_info['module_name']}.{m_info['git_commit_hash']}", + version=m_info["version"], + module_name=m_info["module_name"], + release_tags=m_info["release_tags"], + git_commit_hash=m_info["git_commit_hash"], + owners=m_info["owners"], + ) + + +def assert_exception_correct(got: Exception, expected: Exception): + err = "".join(traceback.TracebackException.from_exception(got).format()) + assert got.args == expected.args, err + assert type(got) == type(expected) # noqa E721 diff --git a/test/src/dependencies/test_k8_wrapper.py b/test/src/dependencies/test_k8_wrapper.py index 16a316d..67dd294 100644 --- a/test/src/dependencies/test_k8_wrapper.py +++ b/test/src/dependencies/test_k8_wrapper.py @@ -1,182 +1,410 @@ -from unittest.mock import create_autospec +import time +from unittest.mock import call, patch, MagicMock import pytest from cacheout import LRUCache -from dotenv import load_dotenv -from fastapi.testclient import TestClient -from kubernetes import config, client -from pytest_kind import KindCluster - -from src.dependencies.middleware import is_authorized -from src.clients.CatalogClient import Catalog -from src.configs.settings import get_settings -from src.factory import create_app - - -@pytest.fixture(autouse=True) -def load_environment(): - # Ensure that the environment variables are loaded before running the tests - load_dotenv("/Users/bsadkhin/modules/kbase/service_wizard2/.env") - - -@pytest.fixture(scope="session") -def kind_cluster(): - # Will need to load_env to run this function - cluster = KindCluster("service-wizard") - # For race conditions: - try: - cluster.delete() - except Exception as e: - print(e) - print("Creating cluster") - cluster.create() - # Create a namespace - try: - cluster.kubectl("create", "namespace", get_settings().namespace) - except Exception as e: - print(e) - - yield cluster - cluster.delete() - - -@pytest.fixture -def k8_api_client(kind_cluster): - kubeconfig_path = str(kind_cluster.kubeconfig_path) - config.load_kube_config(config_file=kubeconfig_path) - api_client = client.ApiClient() - yield api_client - - -@pytest.fixture -def mock_catalog_client(): - cc = create_autospec(Catalog) - - """ - from biokbase.catalog.Client import Catalog - cc = Catalog(url="https://ci.kbase.us/services/catalog") - cc.version() - cc.get_module_version({"module_name": "NarrativeService", "version": "8a9bb32f9e2ec5169815b984de8e8df550699630"}) - """ - cc_result = { - "module_name": "NarrativeService", - "released": 1, - "released_timestamp": None, - "notes": "", - "timestamp": 1651522838549, - "registration_id": "1651522838549_531b1651-c528-4112-bf69-20d78a479020", - "version": "0.5.2", - "git_commit_hash": "8a9bb32f9e2ec5169815b984de8e8df550699630", - "git_commit_message": "Merge pull request #92 from kbaseapps/fix_get_narrative_doc_worksheets\n\nFix get narrative doc worksheets", - "narrative_methods": [], - "local_functions": [], - "docker_img_name": "dockerhub-ci.kbase.us/kbase:narrativeservice.8a9bb32f9e2ec5169815b984de8e8df550699630", - "dynamic_service": 1, - "release_timestamp": 1651522963611, - "git_url": "https://github.com/kbaseapps/NarrativeService", - "release_tags": ["release", "beta", "dev"], - } - cc.get_combined_module_info.return_value = cc_result - cc.get_secure_config_params.return_value = [ - { - "module_name": "NarrativeService", - "version": "", - "param_name": "service_token", - "param_value": "", - "is_password": 1, - }, - { - "module_name": "NarrativeService", - "version": "", - "param_name": "ws_admin_token", - "param_value": "", - "is_password": 1, - }, - ] - cc.list_volume_mounts.return_value = [ - { - "module_name": "NarrativeService", - "function_name": "service", - "client_group": "service", - "volume_mounts": [{"host_dir": "/data/static_narratives", "container_dir": "/kb/module/work/nginx", "read_only": 0}], - } +from kubernetes import client +from kubernetes.client import ( + V1Ingress, + V1HTTPIngressRuleValue, + V1IngressRule, + V1IngressSpec, + V1IngressBackend, + V1HTTPIngressPath, + V1Service, + V1ServiceSpec, + V1ServicePort, + ApiException, + V1LabelSelector, +) + +from configs.settings import get_settings +from dependencies.k8_wrapper import ( + get_pods_in_namespace, + v1_volume_mount_factory, + sanitize_deployment_name, + create_clusterip_service, + update_ingress_to_point_to_service, + path_exists_in_ingress, + create_and_launch_deployment, + query_k8s_deployment_status, + get_k8s_deployment_status_from_label, + get_k8s_deployments, + delete_deployment, + scale_replicas, + DuplicateLabelsException, + get_logs_for_first_pod_in_deployment, +) + +# Import the necessary Kubernetes client classes if not already imported + + +# Reusable Sample Data +sample_field_selector = "test-field_selector" +sample_label_selector = "test-label-selector" + +# Sample Data +sample_module_name = "test_module" +sample_git_commit_hash = "1234567" +sample_image = "test_image" +sample_labels = {"test_label": "label_value"} +sample_annotations = {"test_annotation": "annotation_value"} +sample_env = {"TEST_ENV": "value"} +sample_mounts_ro = ["/host/path:/container/path:ro"] +sample_mounts_rw = ["/host/path:/container/path:ro"] + +# Sample Kubernetes Objects +sample_deployment = client.V1Deployment( + metadata=client.V1ObjectMeta(name="mock_deployment_name"), + spec=client.V1DeploymentSpec( + replicas=1, # initial replica count + selector=client.V1LabelSelector(match_labels={"key": "value"}), # example selector + template=client.V1PodTemplateSpec( + metadata=client.V1ObjectMeta(labels={"key": "value"}), spec=client.V1PodSpec(containers=[client.V1Container(name="container-name", image="container-image")]) + ), + ), +) + + +def test_get_pods_in_namespace(mock_request): + namespace = mock_request.app.state.settings.namespace + corev1api = mock_request.app.state.k8_clients.corev1api + get_pods_in_namespace(corev1api, field_selector=sample_field_selector, label_selector=sample_label_selector) + assert corev1api.list_namespaced_pod.call_args == call(namespace, field_selector="test-field_selector", label_selector="test-label-selector") + + +def test_v1_volume_mount_factory(): + for mount in sample_mounts_ro, sample_mounts_rw: + volumes, volume_mounts = v1_volume_mount_factory(mount) + expected_volumes = [client.V1Volume(name=f"volume-{0}", host_path=client.V1HostPathVolumeSource(path=mount[0].split(":")[0]))] + expected_volume_mounts = [client.V1VolumeMount(name=f"volume-{0}", mount_path=mount[0].split(":")[1], read_only=mount[0].split(":")[2] == "ro")] + assert volumes == expected_volumes + assert volume_mounts == expected_volume_mounts + + # Test for empty or None mounts + for bad_mount in [[""], [None]]: + with pytest.raises(ValueError, match="Empty mount provided"): + v1_volume_mount_factory(bad_mount) + + # Test for mounts without 3 parts + bad_format_mount = ["path1:/container1"] + with pytest.raises(ValueError, match="Invalid mount format"): + v1_volume_mount_factory(bad_format_mount) + + # Test for invalid ro/rw values + invalid_ro_rw = ["path1:/container1:invalid"] + with pytest.raises(ValueError, match="Invalid permission in mount"): + v1_volume_mount_factory(invalid_ro_rw) + + # Test for mount with more than 3 parts + extra_parts_mount = ["path1:/container1:ro:extra"] + with pytest.raises(ValueError, match="Invalid mount format"): + v1_volume_mount_factory(extra_parts_mount) + + # Test for multiple valid mounts + multiple_mounts = ["path1:/container1:ro", "path2:/container2:rw"] + volumes, volume_mounts = v1_volume_mount_factory(multiple_mounts) + expected_volumes = [client.V1Volume(name=f"volume-{i}", host_path=client.V1HostPathVolumeSource(path=mount.split(":")[0])) for i, mount in enumerate(multiple_mounts)] + expected_volume_mounts = [ + client.V1VolumeMount(name=f"volume-{i}", mount_path=mount.split(":")[1], read_only=mount.split(":")[2] == "ro") for i, mount in enumerate(multiple_mounts) ] + assert volumes == expected_volumes + assert volume_mounts == expected_volume_mounts + + +@pytest.mark.parametrize( + "module_name, git_commit_hash, expected_deployment_name", + [ + ("test_module", "1234567", "d-test-module-1234567-d"), + ("test.module", "7654321", "d-test-module-7654321-d"), + ("TEST_MODULE", "abcdefg", "d-test-module-abcdefg-d"), + ("test@module", "7654321", "d-test-module-7654321-d"), + ("test!module", "7654321", "d-test-module-7654321-d"), + ("test*module", "7654321", "d-test-module-7654321-d"), + ("test.module.with.many.dots", "7654321", "d-test-module-with-many-dots-7654321-d"), + ("a" * 64, "1234567", "d-" + "a" * (63 - len("d---d") - 7) + "-1234567-d"), + ("", "1234567", "d--1234567-d"), + ("a" * 64, "1234567", "d-" + "a" * (63 - len("d---d") - 7) + "-1234567-d"), # Testing truncation for really long module names + ], +) +def test_sanitize_deployment_name(module_name, git_commit_hash, expected_deployment_name): + # When we sanitize the deployment name + deployment_name, _ = sanitize_deployment_name(module_name, git_commit_hash) + # Then the deployment name should match the expected format + assert deployment_name == expected_deployment_name + assert len(deployment_name) <= 63 + + +@patch("dependencies.k8_wrapper.get_k8s_core_client") +def test_create_clusterip_service(mock_get_k8s_core_client, mock_request): + mock_get_k8s_core_client.return_value.create_namespaced_service.return_value = "success" + result = create_clusterip_service(mock_request, sample_module_name, sample_git_commit_hash, sample_labels) + assert result == "success" + + # Also, let's assert that the mocked method was called with the expected parameters + _, service_name = sanitize_deployment_name(sample_module_name, sample_git_commit_hash) + mock_get_k8s_core_client.return_value.create_namespaced_service.assert_called_once_with( + namespace=get_settings().namespace, + body=V1Service( + api_version="v1", + kind="Service", + metadata=client.V1ObjectMeta(name=service_name, labels=sample_labels), + spec=V1ServiceSpec(selector=sample_labels, ports=[V1ServicePort(port=5000, target_port=5000)], type="ClusterIP"), + ), + ) + + +@patch("dependencies.k8_wrapper._ensure_ingress_exists") +def test_update_ingress_to_point_to_service(mock__ensure_ingress_exists, example_ingress, mock_request): + # Good ingress, no exceptions + mock__ensure_ingress_exists.return_value = example_ingress + with patch("time.sleep"): + update_ingress_to_point_to_service(mock_request, sample_module_name, sample_git_commit_hash) + assert time.sleep.call_count == 0 + + assert mock__ensure_ingress_exists.call_args == call(mock_request) + + # Ingress rules is None + with patch("time.sleep"): + # Force Initialize http attribute with an empty paths list if it is None + example_ingress.spec.rules[0].http = None + mock__ensure_ingress_exists.return_value = example_ingress + update_ingress_to_point_to_service(mock_request, sample_module_name, sample_git_commit_hash) + assert time.sleep.call_count == 0 + + assert mock__ensure_ingress_exists.call_args == call(mock_request) + + # Unhandled exception + api_exception = ApiException(408) + mock__ensure_ingress_exists.side_effect = api_exception + with pytest.raises(ApiException) as e: + with patch("time.sleep"): + update_ingress_to_point_to_service(mock_request, sample_module_name, sample_git_commit_hash) + assert time.sleep.call_count == 0 + assert e.type == ApiException + assert e.value == api_exception + + # Handled exception with a sleep to wait in case something else is changing ingress + api_exception = ApiException(409) + mock__ensure_ingress_exists.side_effect = api_exception + with pytest.raises(ApiException) as e: + with patch("time.sleep"): + update_ingress_to_point_to_service(mock_request, sample_module_name, sample_git_commit_hash) + assert time.sleep.call_count == 2 + assert e.type == ApiException + assert e.value == api_exception + + +def test_ensure_ingress_exists(mock_request, example_ingress): + with patch("time.sleep"), patch("dependencies.k8_wrapper.get_k8s_networking_client") as mock_get_k8s_networking_client: + mock_networking_v1_api = MagicMock() + mock_get_k8s_networking_client.return_value = mock_networking_v1_api + mock_networking_v1_api.read_namespaced_ingress.return_value = example_ingress + + update_ingress_to_point_to_service(mock_request, sample_module_name, sample_git_commit_hash) + + mock_networking_v1_api.read_namespaced_ingress.assert_called_once_with(name="dynamic-services", namespace=mock_request.app.state.settings.namespace) + + # Non 404 case + with pytest.raises(ApiException) as e: + mock_networking_v1_api.read_namespaced_ingress.side_effect = ApiException(409, "Conflict") + update_ingress_to_point_to_service(mock_request, sample_module_name, sample_git_commit_hash) + assert e.type == ApiException + assert e.value.status == 409 + + # 404 case + + mock_networking_v1_api.read_namespaced_ingress.side_effect = ApiException(404, "Not Found") + update_ingress_to_point_to_service(mock_request, sample_module_name, sample_git_commit_hash) + assert mock_networking_v1_api.create_namespaced_ingress.call_count == 1 + + # The fixture has this field filled out, so delete it! + example_ingress.spec.rules[0].http = None + + mock_networking_v1_api.create_namespaced_ingress.assert_called_once_with(namespace=mock_request.app.state.settings.namespace, body=example_ingress) - yield cc +def test_path_exists_in_ingress(): + # 1. Test when the path exists + test_path1 = "/test-path" + http_paths = [V1HTTPIngressPath(path=test_path1, path_type="Prefix", backend=V1IngressBackend(service=None, resource=None))] -@pytest.fixture -def v1_core_client(k8_api_client): - v1_core = client.CoreV1Api(k8_api_client) - yield v1_core + ingress_spec = V1IngressSpec(rules=[V1IngressRule(host="host", http=V1HTTPIngressRuleValue(paths=http_paths))]) + ingress = V1Ingress( + api_version="networking.k8s.io/v1", + kind="Ingress", + metadata=client.V1ObjectMeta( + name="dynamic-services", + annotations={ + "nginx.ingress.kubernetes.io/rewrite-target": "/$2", + }, + ), + spec=ingress_spec, + ) + + assert path_exists_in_ingress(ingress, test_path1) is True + + # 2. Test when the path doesn't exist + test_path2 = "/nonexistent-path" + assert path_exists_in_ingress(ingress, test_path2) is False + + # 3. Test when there are multiple paths + test_path3 = "/another-path" + ingress.spec.rules[0].http.paths.append(V1HTTPIngressPath(path=test_path3, path_type="Prefix", backend=V1IngressBackend(service=None, resource=None))) + + assert path_exists_in_ingress(ingress, test_path3) is True + + # 4. Test when there's no rule specified + ingress_no_rule = V1Ingress( + api_version="networking.k8s.io/v1", + kind="Ingress", + metadata=client.V1ObjectMeta( + name="dynamic-services", + annotations={ + "nginx.ingress.kubernetes.io/rewrite-target": "/$2", + }, + ), + spec=V1IngressSpec(), + ) + + assert path_exists_in_ingress(ingress_no_rule, test_path1) is False + + +@patch("dependencies.k8_wrapper.sanitize_deployment_name", return_value=("mock_deployment_name", "mock_service_name")) +@patch("dependencies.k8_wrapper.v1_volume_mount_factory", return_value=([], [])) +def test_create_and_launch_deployment(mock_v1_volume_mount_factory, mock_sanitize_deployment_name, mock_request): + selector = create_and_launch_deployment( + request=mock_request, + module_name=sample_module_name, + module_git_commit_hash=sample_git_commit_hash, + image=sample_image, + labels=sample_labels, + annotations=sample_annotations, + env=sample_env, + mounts=sample_mounts_ro, + ) + expected_selector = V1LabelSelector(match_expressions=None, match_labels={"us.kbase.module.git_commit_hash": "1234567", "us.kbase.module.module_name": "test_module"}) + assert selector == expected_selector + mock_sanitize_deployment_name.assert_called_once_with(sample_module_name, sample_git_commit_hash) + mock_v1_volume_mount_factory.assert_called_once_with(sample_mounts_ro) + + args, kwargs = mock_request.app.state.k8s_clients.app_client.create_namespaced_deployment.call_args + actual_deployment_body = kwargs["body"] + + # Validate the relevant parts of the deployment + assert actual_deployment_body.metadata.name == "mock_deployment_name" + assert actual_deployment_body.metadata.labels == sample_labels + assert actual_deployment_body.metadata.annotations == sample_annotations + assert actual_deployment_body.spec.template.spec.containers[0].image == sample_image + + +@patch("dependencies.k8_wrapper._get_deployment_status") +def test_query_k8s_deployment_status(mock_get_deployment_status, mock_request): + query_k8s_deployment_status(mock_request, sample_module_name, sample_git_commit_hash) + expected_label_selector = "us.kbase.module.module_name=test_module,us.kbase.module.git_commit_hash=1234567" + mock_get_deployment_status.assert_called_once_with(mock_request, expected_label_selector) + + +@patch("dependencies.k8_wrapper._get_deployment_status") +def test_get_k8s_deployment_status_from_label(mock_get_deployment_status, mock_request): + label_selector = client.V1LabelSelector(match_labels={"key1": "value1", "key2": "value2"}) + + # Call the function + get_k8s_deployment_status_from_label(mock_request, label_selector) + + # Validate that _get_deployment_status was called with the correct label selector + expected_label_selector = "key1=value1,key2=value2" + mock_get_deployment_status.assert_called_once_with(mock_request, expected_label_selector) -@pytest.fixture -def apps_v1_client(k8_api_client): - apps_v1 = client.AppsV1Api(k8_api_client) - yield apps_v1 +@patch("dependencies.k8_wrapper.get_k8s_all_service_status_cache") +def test_get_k8s_deployments(mock_get_k8s_all_service_status_cache, mock_request): + expected_label_selector = "us.kbase.dynamicservice=true" + all_service_status_cache = MagicMock(spec=LRUCache) + mock_request.app.state.k8s_clients.all_service_status_cache = all_service_status_cache + mock_get_k8s_all_service_status_cache.return_value = all_service_status_cache + # Scenario 1: Deployments are in the cache + example_deployments = ["deployment1", "deployment2"] + all_service_status_cache.get.return_value = example_deployments -@pytest.fixture -def app(kind_cluster, mock_catalog_client, v1_core_client, apps_v1_client): - token_cache = LRUCache(maxsize=100, ttl=300) - catalog_cache = LRUCache(maxsize=100, ttl=300) - app = create_app( - token_cache=token_cache, - catalog_cache=catalog_cache, - catalog_client=mock_catalog_client, - k8s_app_client=apps_v1_client, - k8s_core_client=v1_core_client, + assert get_k8s_deployments(mock_request) == example_deployments + + all_service_status_cache.get.assert_called_with(expected_label_selector, None) + + assert all_service_status_cache.set.call_count == 0 + + # Scenario 2: Deployments not in cache, fetch from K8s with no deployments matching label + all_service_status_cache.get.return_value = None + get_k8s_deployments(mock_request) + mock_request.app.state.k8s_clients.app_client.list_namespaced_deployment.assert_called_with(mock_request.app.state.settings.namespace, label_selector=expected_label_selector) + all_service_status_cache.set.assert_called_with(expected_label_selector, mock_request.app.state.k8s_clients.app_client.list_namespaced_deployment().items) + + # Scenario 3: Deployments not in cache, fetch from K8s with one or more deployments matching label + all_service_status_cache.get.return_value = None + mock_request.app.state.k8s_clients.app_client.list_namespaced_deployment.return_value.items = example_deployments + get_k8s_deployments(mock_request) + assert get_k8s_deployments(mock_request) == example_deployments + all_service_status_cache.set.assert_called_with(expected_label_selector, example_deployments) + + +@patch("dependencies.k8_wrapper.sanitize_deployment_name", return_value=("mock_deployment_name", "mock_service_name")) +def test_delete_deployment(mock_sanitize_deployment_name, mock_request): + result = delete_deployment(mock_request, sample_module_name, sample_git_commit_hash) + mock_sanitize_deployment_name.assert_called_once_with(sample_module_name, sample_git_commit_hash) + mock_request.app.state.k8s_clients.app_client.delete_namespaced_deployment.assert_called_once_with( + name="mock_deployment_name", namespace=mock_request.app.state.settings.namespace ) - app.dependency_overrides[is_authorized] = lambda: ... - return app - - -@pytest.fixture -def client_with_authorization(app): - def _get_client_with_authorization(authorization_value="faketoken", cookies=None): - client = TestClient(app) - client.headers["Authorization"] = f"{authorization_value}" - if cookies: - client.cookies["kbase_session"] = f"{authorization_value}" - return client - - return _get_client_with_authorization - - -# -# def test_get_start(client_with_authorization): -# with client_with_authorization() as client: -# response = client.get("/start/?module_name=StaticNarrative&version=beta") -# -# assert response.json() != [] -# assert response.json() == [123] -# assert response.status_code == 200 - - -def test_get_status_nonexistent(client_with_authorization): - with client_with_authorization() as client: - response = client.get("/get_service_status?module_name=StaticNarrative&version=beta") - assert response.json() != [] - assert response.json() == [123] - assert response.status_code == 200 - - -def test_get_good_status(client_with_authorization): - { - "git_commit_hash": "8a9bb32f9e2ec5169815b984de8e8df550699630", - "status": "active", - "version": "0.5.2", - "hash": "8a9bb32f9e2ec5169815b984de8e8df550699630", - "release_tags": ["release", "beta", "dev"], - "url": "https://ci.kbase.us:443/dynserv/8a9bb32f9e2ec5169815b984de8e8df550699630.NarrativeService", - "module_name": "NarrativeService", - "health": "healthy", - "up": 1, - } - - with client_with_authorization() as client: - response = client.get("/get_service_status?module_name=NarrativeService&version=beta") - assert response.json() != [] - assert response.json() == [123] - assert response.status_code == 200 + assert result == "mock_deployment_name" + + +@patch("dependencies.k8_wrapper.query_k8s_deployment_status") +def test_scale_replicas(mock_query_deployment_status, mock_request): + desired_replicas = 3 + mock_query_deployment_status.return_value = sample_deployment + scale_replicas(mock_request, sample_module_name, sample_git_commit_hash, desired_replicas) + mock_query_deployment_status.assert_called_once_with(mock_request, sample_module_name, sample_git_commit_hash) + mock_request.app.state.k8s_clients.app_client.replace_namespaced_deployment.assert_called_once_with( + name="mock_deployment_name", namespace=mock_request.app.state.settings.namespace, body=sample_deployment + ) + + +@patch("dependencies.k8_wrapper.check_service_status_cache") +@patch("dependencies.k8_wrapper.get_k8s_all_service_status_cache") +def test__get_deployment_status(mock_get_k8s_all_service_status_cache, mock_check_service_status_cache, mock_request): + """Testing through the public interface""" + mock_request.app.state.k8s_clients.service_status_cache = MagicMock() + # + # # Scenario 1: Deployment is in the cache + mock_check_service_status_cache.return_value = sample_deployment + scale_replicas(mock_request, sample_module_name, sample_git_commit_hash, 123) + + # # Scenario 2: Deployment is not in the cache, need to look up k8 api + mock_check_service_status_cache.return_value = None + mock_request.app.state.k8s_clients.app_client.list_namespaced_deployment.return_value.items = [sample_deployment] + scale_replicas(mock_request, sample_module_name, sample_git_commit_hash, 123) + + # # Scenario 3: Deployment is not in the cache, need to look up k8 api but multiple deployments match + mock_check_service_status_cache.return_value = None + mock_request.app.state.k8s_clients.app_client.list_namespaced_deployment.return_value.items = [sample_deployment, sample_deployment] + with pytest.raises(DuplicateLabelsException): + scale_replicas(mock_request, sample_module_name, sample_git_commit_hash, 123) + + +def test_get_logs_for_first_pod_in_deployment(mock_request): + # Pod is found + mock_request.app.state.k8s_clients.core_client.list_namespaced_pod.return_value.items = [MagicMock()] + get_logs_for_first_pod_in_deployment(mock_request, sample_module_name, sample_git_commit_hash) + mock_request.app.state.k8s_clients.core_client.list_namespaced_pod.assert_called_once_with( + mock_request.app.state.settings.namespace, label_selector="us.kbase.module.module_name=test_module,us.kbase.module.git_commit_hash=1234567" + ) + mock_request.app.state.k8s_clients.core_client.read_namespaced_pod_log.assert_called_once_with( + name=mock_request.app.state.k8s_clients.core_client.list_namespaced_pod().items[0].metadata.name, namespace=mock_request.app.state.settings.namespace, timestamps=True + ) + # No Pod is found + label_selector_text = f"us.kbase.module.module_name={sample_module_name.lower()}," + f"us.kbase.module.git_commit_hash={sample_git_commit_hash}" + + mock_request.app.state.k8s_clients.core_client.list_namespaced_pod.return_value.items = None + expected_message = (f"No Matching Pods in namespace:{mock_request.app.state.settings.namespace} could be found with label_selector" f"={label_selector_text}",) * 2 + + assert get_logs_for_first_pod_in_deployment(mock_request, sample_module_name, sample_git_commit_hash) == expected_message diff --git a/test/src/dependencies/test_lifecycle.py b/test/src/dependencies/test_lifecycle.py new file mode 100644 index 0000000..69cf7df --- /dev/null +++ b/test/src/dependencies/test_lifecycle.py @@ -0,0 +1,216 @@ +import logging +import re +from unittest.mock import patch + +import pytest +from fastapi import HTTPException +from kubernetes.client import ApiException + +from clients.baseclient import ServerError +from dependencies import lifecycle +from models import ServiceStatus, DynamicServiceStatus +from test.src.dependencies import test_helpers as tlh + + +def test_simple_get_volume_mounts(mock_request): + mock_request.app.state.catalog_client.list_service_volume_mounts.return_value = [ + {"host_dir": "host1", "container_dir": "container1", "read_only": 1}, + {"host_dir": "host2", "container_dir": "container2", "read_only": 0}, + ] + result = lifecycle.get_volume_mounts(mock_request, None, None) + expected_result = ["host1:container1:ro", "host2:container2:rw"] + assert result == expected_result + + +def test_simple_setup_metadata(): + module_name = "test_module" + requested_module_version = "1.0" + git_commit_hash = "hash123" + version = "1.0" + git_url = "https://github.com/test/repo" + + labels, annotations = lifecycle._setup_metadata(module_name, requested_module_version, git_commit_hash, version, git_url) + assert labels == { + "us.kbase.dynamicservice": "true", + "us.kbase.module.git_commit_hash": git_commit_hash, + "us.kbase.module.module_name": module_name.lower(), + } + assert annotations == { + "git_commit_hash": git_commit_hash, + "module_name": module_name, + "module_version_from_request": requested_module_version, + "us.kbase.catalog.moduleversion": version, + "description": re.sub(r"^(https?://)", "", git_url), + "k8s_deployment_name": "to_be_overwritten", + "k8s_service_name": "to_be_overwritten", + } + + +def test_simple_get_env(mock_request): + envs = lifecycle.get_env(request=mock_request, module_name=None, module_version=None) + s = mock_request.app.state.settings + + expected_environ_map = { + "KBASE_ENDPOINT": s.kbase_services_endpoint, + "AUTH_SERVICE_URL": s.auth_legacy_url, + "AUTH_SERVICE_URL_ALLOW_INSECURE": "false", + "KBASE_SECURE_CONFIG_PARAM_test_secure_param_name": "test_secure_param_value", + } + for item in expected_environ_map: + assert expected_environ_map[item] == envs[item] + + +@patch("dependencies.lifecycle.scale_replicas") +@patch("dependencies.lifecycle.get_service_status_with_retries") +@patch("dependencies.lifecycle._create_cluster_ip_service_helper") +@patch("dependencies.lifecycle._update_ingress_for_service_helper") +@patch("dependencies.lifecycle._setup_metadata") +@patch("dependencies.lifecycle._create_and_launch_deployment_helper") +def test_start_deployment( + _create_and_launch_deployment_helper_mock, + _setup_metadata_mock, + _update_ingress_for_service_helper_mock, + _create_cluster_ip_service_helper_mock, + get_service_status_with_retries_mock, + scale_replicas_mock, + mock_request, +): + # Test Deployment Does Not Already exist, no need to scale replicas + _create_and_launch_deployment_helper_mock.return_value = False + _setup_metadata_mock.return_value = {}, {} + get_service_status_with_retries_mock.return_value = tlh.get_stopped_deployment_status("tester") + + rv = lifecycle.start_deployment(request=mock_request, module_name="test_module", module_version="dev") + scale_replicas_mock.assert_not_called() + assert rv == tlh.get_stopped_deployment_status("tester") + + # Test Deployment Already Exists, need to scale instead of recreate + _create_and_launch_deployment_helper_mock.return_value = True + lifecycle.start_deployment(request=mock_request, module_name="test_module", module_version="dev") + scale_replicas_mock.assert_called_once() # + + +@patch("dependencies.lifecycle.create_and_launch_deployment") +def test__create_and_launch_deployment_helper(mock_create_and_launch, mock_request): + # Test truthiness based on api exception + module_name = "test_module" + git_commit_hash = "hash123" + image = "test_image" + labels = {} + annotations = {} + env = {} + mounts = [] + + mock_exception = ApiException(status=409) + mock_create_and_launch.side_effect = mock_exception + + # Act + result = lifecycle._create_and_launch_deployment_helper( + annotations=annotations, env=env, image=image, labels=labels, module_git_commit_hash=git_commit_hash, module_name=module_name, mounts=mounts, request=mock_request + ) + + # Assert + assert result is True + + mock_create_and_launch.side_effect = None + result = lifecycle._create_and_launch_deployment_helper( + annotations=annotations, env=env, image=image, labels=labels, module_git_commit_hash=git_commit_hash, module_name=module_name, mounts=mounts, request=mock_request + ) + assert result is False + + with pytest.raises(HTTPException) as e: + mock_create_and_launch.side_effect = ApiException(status=500) + lifecycle._create_and_launch_deployment_helper( + annotations=annotations, env=env, image=image, labels=labels, module_git_commit_hash=git_commit_hash, module_name=module_name, mounts=mounts, request=mock_request + ) + assert e.value.status_code == 500 + + +@patch("dependencies.lifecycle.create_clusterip_service") +@patch.object(logging, "warning") +def test__create_cluster_ip_service_helper(mock_logging_warning, mock_create_clusterip_service, mock_request): + # Test truthiness based on api exception + module_name = "test_module" + git_commit_hash = "hash123" + labels = {} + + mock_create_clusterip_service.side_effect = None + lifecycle._create_cluster_ip_service_helper(request=mock_request, module_name=module_name, catalog_git_commit_hash=git_commit_hash, labels=labels) + assert mock_create_clusterip_service.call_count == 1 + assert mock_logging_warning.call_count == 0 + + mock_create_clusterip_service.side_effect = ApiException(status=409) + lifecycle._create_cluster_ip_service_helper(request=mock_request, module_name=module_name, catalog_git_commit_hash=git_commit_hash, labels=labels) + mock_logging_warning.assert_called_once_with("Service already exists, skipping creation") + assert mock_logging_warning.call_count == 1 + assert mock_create_clusterip_service.call_count == 2 + + with pytest.raises(HTTPException) as e: + mock_create_clusterip_service.side_effect = ApiException(status=500) + lifecycle._create_cluster_ip_service_helper(request=mock_request, module_name=module_name, catalog_git_commit_hash=git_commit_hash, labels=labels) + assert e.value.status_code == 500 + assert mock_logging_warning.call_count == 1 + assert mock_create_clusterip_service.call_count == 3 + + +@patch("dependencies.lifecycle.update_ingress_to_point_to_service") +@patch.object(logging, "warning") +def test_create_and_launch_deployment_helper(mock_logging_warning, mock_update_ingress_to_point_to_service, mock_request): + # Test truthiness based on api exception + module_name = "test_module" + git_commit_hash = "hash123" + + mock_update_ingress_to_point_to_service.side_effect = None + lifecycle._update_ingress_for_service_helper(request=mock_request, module_name=module_name, git_commit_hash=git_commit_hash) + assert mock_update_ingress_to_point_to_service.call_count == 1 + assert mock_logging_warning.call_count == 0 + + mock_update_ingress_to_point_to_service.side_effect = ApiException(status=409) + lifecycle._update_ingress_for_service_helper(request=mock_request, module_name=module_name, git_commit_hash=git_commit_hash) + assert mock_update_ingress_to_point_to_service.call_count == 2 + assert mock_logging_warning.call_count == 1 + mock_logging_warning.assert_called_once_with("Ingress already exists, skipping creation") + + with pytest.raises(HTTPException) as e: + mock_update_ingress_to_point_to_service.side_effect = ApiException(status=500) + lifecycle._update_ingress_for_service_helper(request=mock_request, module_name=module_name, git_commit_hash=git_commit_hash) + assert e.value.status_code == 500 + assert mock_update_ingress_to_point_to_service.call_count == 3 + assert mock_logging_warning.call_count == 1 + + +@patch("dependencies.lifecycle.scale_replicas") +def test_stop_deployment(mock_scale_replicas, mock_request): + mock_request.state.user_auth_roles.is_admin_or_owner.return_value = False + with pytest.raises(ServerError) as e: + lifecycle.stop_deployment(request=mock_request, module_name="test_module", module_version="test_version") + assert mock_request.state.user_auth_roles.is_admin_or_owner.call_count == 1 + assert e.value.code == -32000 + assert e.value.message == "Only admins or module owners can stop dynamic services" + + mock_request.state.user_auth_roles.is_admin_or_owner.return_value = True + + deployment = tlh.create_sample_deployment(deployment_name="test_deployment_name", replicas=0, ready_replicas=0, available_replicas=0, unavailable_replicas=0) + + mock_scale_replicas.return_value = deployment + + rv = lifecycle.stop_deployment(request=mock_request, module_name="test_module", module_version="test_version") + + dds = DynamicServiceStatus( + git_commit_hash="test_hash", + status=ServiceStatus.STOPPED, + version="test_version", + hash="test_hash", + release_tags=["test_tag"], + url="https://ci.kbase.us/dynamic_services/test_module.test_hash", + module_name="test_module", + health=ServiceStatus.STOPPED, + up=0, + deployment_name="test_deployment_name", + replicas=0, + updated_replicas=0, + ready_replicas=0, + available_replicas=0, + unavailable_replicas=0, + ) + assert rv == dds diff --git a/test/src/dependencies/test_logs.py b/test/src/dependencies/test_logs.py new file mode 100644 index 0000000..d98c309 --- /dev/null +++ b/test/src/dependencies/test_logs.py @@ -0,0 +1,34 @@ +from unittest.mock import patch, Mock + +import pytest + +from clients.baseclient import ServerError +from dependencies.logs import get_service_log_web_socket, get_service_log +from models import CatalogModuleInfo + +# Sample test data +mock_module_info = Mock(spec=CatalogModuleInfo) +mock_module_info.release_tags = [] +mock_module_info.owners = ["owner1"] + + +@patch("dependencies.logs.get_logs_for_first_pod_in_deployment", return_value=("pod1", "sample_logs")) +@patch("dependencies.status.lookup_module_info", return_value=mock_module_info) +def test_get_service_log(mock_lookup_module_info, mock_get_logs_for_first_pod_in_deployment, mock_request): + # Test for owner trying to access logs of a dev service + mock_request.app.state.user_auth_roles.is_admin_or_owner.return_value = True + logs = get_service_log(mock_request, "test_module", "test_version") + assert logs == [{"instance_id": "pod1", "log": "sample_logs"}] + + # Test for non-admin, non-owner user trying to access logs of a non-dev service + mock_request.state.user_auth_roles.is_admin_or_owner.return_value = False + with pytest.raises(ServerError): + get_service_log(mock_request, "test_module", "test_version") + + +# Test for the not implemented function +def test_get_service_log_web_socket(): + mock_request = Mock() + + with pytest.raises(NotImplementedError): + get_service_log_web_socket(mock_request, "test_module", "test_version") diff --git a/test/src/dependencies/test_middleware.py b/test/src/dependencies/test_middleware.py new file mode 100644 index 0000000..f3ac1fe --- /dev/null +++ b/test/src/dependencies/test_middleware.py @@ -0,0 +1,31 @@ +import pytest +from fastapi import HTTPException + +from dependencies.middleware import is_authorized + + +@pytest.mark.parametrize( + "authorization, kbase_session, auth_client_response, expected", + [ + (None, None, None, HTTPException(401, detail="Please provide the 'Authorization' header or 'kbase_session' cookie for None payload: None ")), + ("validToken", None, True, True), # Valid token, no kbase_session, auth_client returns True + (None, "validSession", True, True), # No token, valid kbase_session, auth_client returns True + ("validToken", None, HTTPException(401), HTTPException(401)), # auth_client raises 401 + ("validToken", None, HTTPException(500), HTTPException(500, detail="Auth service is down")), # auth_client raises 500 + ("validToken", None, HTTPException(404), HTTPException(404)), # auth_client raises 404 + ("validToken", None, HTTPException(403), HTTPException(400, detail="Invalid or expired token")), # auth_client raises any other status code + ], +) +def test_is_authorized(authorization, kbase_session, auth_client_response, expected, mock_request): + if isinstance(auth_client_response, HTTPException): + mock_request.app.state.auth_client.is_authorized.side_effect = auth_client_response + else: + mock_request.app.state.auth_client.is_authorized.return_value = auth_client_response + + if isinstance(expected, HTTPException): + with pytest.raises(HTTPException) as exc_info: + is_authorized(mock_request, authorization, kbase_session) + assert exc_info.value.status_code == expected.status_code + assert exc_info.value.detail == expected.detail + else: + assert is_authorized(mock_request, authorization, kbase_session) == expected diff --git a/test/src/dependencies/test_status.py b/test/src/dependencies/test_status.py new file mode 100644 index 0000000..ea3b1ac --- /dev/null +++ b/test/src/dependencies/test_status.py @@ -0,0 +1,191 @@ +from unittest.mock import patch + +import pytest +from fastapi import HTTPException + +import clients.baseclient +from dependencies.k8_wrapper import DuplicateLabelsException +from dependencies.status import ( + lookup_module_info, + get_service_status_without_retries, + get_service_status_with_retries, + get_dynamic_service_status_helper, + get_status, + get_version, + get_all_dynamic_service_statuses, +) +from models import CatalogModuleInfo +from test.src.dependencies.test_helpers import assert_exception_correct, get_running_deployment_status, sample_catalog_module_info, create_sample_deployment + +sample_module_name = "test_module" +sample_git_commit = "test_hash" + + +def test_lookup_module_info(mock_request): + # Good request + lookup_module_info(mock_request, sample_module_name, sample_git_commit) + mock_request.app.state.catalog_client.get_combined_module_info.assert_called_once_with(sample_module_name, sample_git_commit) + + # Catalog is down + mock_request.app.state.catalog_client.get_combined_module_info.side_effect = clients.baseclient.ServerError(name="test", code=0, message=0) + with pytest.raises(HTTPException): + lookup_module_info(mock_request, sample_module_name, sample_git_commit) + + # Something unexpected happens + mock_request.app.state.catalog_client.get_combined_module_info.side_effect = Exception() + evr = CatalogModuleInfo( + url="No Valid URL Found, or possible programming error ", + version=sample_git_commit, + module_name=sample_module_name, + release_tags=[], + git_commit_hash="test_hash", + owners=["Unknown"], + ) + + assert lookup_module_info(mock_request, sample_module_name, sample_git_commit) == evr + + +@patch("dependencies.status.get_service_status_with_retries") +def test_get_service_status_without_retries(mock_get_service_status_with_retries, mock_request): + get_service_status_without_retries(mock_request, sample_module_name, sample_git_commit) + mock_get_service_status_with_retries.assert_called_once_with(mock_request, sample_module_name, sample_git_commit, retries=0) + + +@patch("time.sleep") +@patch("dependencies.status.get_dynamic_service_status_helper") +@patch("dependencies.status.lookup_module_info") +def test_get_service_status_with_retries( + mock_lookup_module_info, mock_get_dynamic_service_status_helper, mock_sleep, mock_request, example_dynamic_service_status_up, example_dynamic_service_status_down +): + # Test ServerError + mock_get_dynamic_service_status_helper.side_effect = clients.baseclient.ServerError(name="test", code=0, message="Server Error!") + with pytest.raises(HTTPException) as exc_info: + get_service_status_with_retries(mock_request, sample_module_name, sample_git_commit, retries=1) + expected_exception = HTTPException(status_code=500, detail="test: 0. Server Error!\n") + assert_exception_correct(got=exc_info.value, expected=expected_exception) + + # Test DuplicateLabelsException + mock_get_dynamic_service_status_helper.side_effect = DuplicateLabelsException() + with pytest.raises(HTTPException) as exc_info: + get_service_status_with_retries(mock_request, sample_module_name, sample_git_commit, retries=1) + expected_exception = HTTPException(status_code=500, detail="Duplicate labels found in deployment, an admin screwed something up!") + assert_exception_correct(got=exc_info.value, expected=expected_exception) + + # Test General Exception + mock_get_dynamic_service_status_helper.side_effect = Exception("Some unexpected error!") + with pytest.raises(Exception) as exc_info: # Catch the exception + get_service_status_with_retries(mock_request, sample_module_name, sample_git_commit, retries=1) + expected_exception = Exception("Failed to get service status after maximum retries") + assert_exception_correct(got=exc_info.value, expected=expected_exception) + + # Reset the side effect for the next tests + mock_get_dynamic_service_status_helper.side_effect = None + + with pytest.raises(Exception) as e: + get_service_status_with_retries(mock_request, sample_module_name, sample_git_commit, retries=10) + assert_exception_correct(got=e.value, expected=Exception("Failed to get service status after maximum retries")) + + # Deployment is up + mock_get_dynamic_service_status_helper.return_value = example_dynamic_service_status_up + rv = get_service_status_with_retries(mock_request, sample_module_name, sample_git_commit, retries=10) + assert rv.up + + # Deployment is down + mock_get_dynamic_service_status_helper.return_value = example_dynamic_service_status_down + rv = get_service_status_with_retries(mock_request, sample_module_name, sample_git_commit, retries=10) + assert not rv.up + assert rv.replicas == 0 + + +@patch("dependencies.status.lookup_module_info") +@patch("dependencies.status.query_k8s_deployment_status") +def test_get_dynamic_service_status_helper(mock_query_k8s_deployment_status, mock_lookup_module_info, mock_request): + # Found it! + mock_lookup_module_info.return_value = sample_catalog_module_info() + mock_query_k8s_deployment_status.return_value = create_sample_deployment("test", 1, 1, 1, 0) + rv = get_dynamic_service_status_helper(mock_request, sample_module_name, sample_git_commit) + expected_dss = get_running_deployment_status("test") + assert rv == expected_dss + + # Test the case where no dynamic service is found + mock_query_k8s_deployment_status.return_value = None + with pytest.raises(HTTPException) as e: + get_dynamic_service_status_helper(mock_request, sample_module_name, sample_git_commit) + expected_exception = HTTPException(status_code=404, detail=f"No dynamic service found with module_name={sample_module_name} and version={sample_git_commit}") + assert e.value.status_code == 404 + assert e.value.detail == expected_exception.detail + assert_exception_correct(e.value, expected_exception) + + +@patch("dependencies.status.get_k8s_deployments") +def test_get_all_dynamic_service_statuses(mock_get_k8s_deployments, mock_request): + # No Deployments found + mock_get_k8s_deployments.return_value = [] + with pytest.raises(HTTPException) as e: + get_all_dynamic_service_statuses(mock_request, sample_module_name, sample_git_commit) + # No kubernetes found! + expected_exception = HTTPException( + status_code=404, + detail=f"No deployments found in kubernetes cluster with namespace=" f"{mock_request.app.state.settings.namespace} and labels=dynamic-service=true!", + ) + assert e.value.status_code == 404 + assert e.value.detail == expected_exception.detail + assert_exception_correct(e.value, expected_exception) + + # Get running deployment + mock_get_k8s_deployments.return_value = [create_sample_deployment("test", 1, 1, 1, 0)] + rv = get_all_dynamic_service_statuses(mock_request, sample_module_name, sample_git_commit) + expected_dss = get_running_deployment_status("test") + assert rv == [expected_dss] + + # Inject a bad key + mock_get_k8s_deployments.return_value = [create_sample_deployment("test", 1, 1, 1, 0)] + mock_get_k8s_deployments.return_value[0].metadata.annotations["module_name"] = None + mock_get_k8s_deployments.return_value[0].metadata.annotations["git_commit_hash"] = None + with pytest.raises(HTTPException) as e: + get_all_dynamic_service_statuses(mock_request, sample_module_name, sample_git_commit) + + expected_exception = HTTPException( + status_code=404, + detail=f"No dynamic services found in kubernetes cluster with namespace={mock_request.app.state.settings.namespace} and " + f"labels=dynamic-service=true! Or " + f"they were found and they were missing the module_name and git_commit_hash annotations!", + ) + assert e.value.status_code == expected_exception.status_code + assert e.value.detail == expected_exception.detail + assert_exception_correct(e.value, expected_exception) + + # NO dynamic services found in the catalog + mock_request.app.state.catalog_client.get_hash_to_name_mappings.return_value = None + with pytest.raises(HTTPException) as e: + get_all_dynamic_service_statuses(mock_request, sample_module_name, sample_git_commit) + expected_exception = HTTPException(status_code=404, detail="No dynamic services found in catalog!") + assert e.value.status_code == 404 + assert e.value.detail == expected_exception.detail + assert_exception_correct(e.value, expected_exception) + + +def test_get_status(mock_request): + mock_request.app.state.settings.vcs_ref = "1.2.3" + result = get_status(mock_request) + expected = { + "git_commit_hash": "1.2.3", + "state": "OK", + "version": "1.2.3", + "message": "", + "git_url": "https://github.com/kbase/service_wizard2", + } + assert result == expected + + result_with_params = get_status(mock_request, module_name="some_module", version="some_version") + assert result_with_params == expected + + +def test_get_version(mock_request): + mock_request.app.state.settings.vcs_ref = "1.2.3" + result = get_version(mock_request) + expected = ["1.2.3"] + assert result == expected + + result_with_params = get_version(mock_request, module_name="some_module", version="some_version") + assert result_with_params == expected diff --git a/test/src/fixtures/README.md b/test/src/fixtures/README.md new file mode 100644 index 0000000..48702cd --- /dev/null +++ b/test/src/fixtures/README.md @@ -0,0 +1,2 @@ +# Fixtures +* Various helpers to be used throughout the application, loaded by conftest.py diff --git a/test/src/fixtures/fixtures.py b/test/src/fixtures/fixtures.py new file mode 100644 index 0000000..c2bd54a --- /dev/null +++ b/test/src/fixtures/fixtures.py @@ -0,0 +1,137 @@ +import os +from unittest.mock import MagicMock + +import pytest +from fastapi import Request +from kubernetes import client +from kubernetes.client import CoreV1Api, AppsV1Api, NetworkingV1Api +from kubernetes.client import V1Ingress, V1IngressSpec, V1IngressRule + +from src.clients.CachedCatalogClient import CachedCatalogClient +from src.clients.KubernetesClients import K8sClients +from src.configs.settings import get_settings +from src.models import DynamicServiceStatus + + +@pytest.fixture(autouse=True) +def mock_request(): + return get_example_mock_request() + + +@pytest.fixture(autouse=True) +def example_ingress(): + return get_example_ingress() + + +@pytest.fixture(autouse=True) +def generate_kubeconfig(): + # Generate a kubeconfig file for testing + # Overwrite kubeconfig + os.environ["KUBECONFIG"] = "test_kubeconfig_file" + kubeconfig_path = os.environ["KUBECONFIG"] + + kubeconfig_content = """\ +apiVersion: v1 +kind: Config +current-context: test-context +clusters: +- name: test-cluster + cluster: + server: https://test-api-server + insecure-skip-tls-verify: true +contexts: +- name: test-context + context: + cluster: test-cluster + user: test-user +users: +- name: test-user + user: + exec: + command: echo + apiVersion: client.authentication.k8s.io/v1alpha1 + args: + - "access_token" +""" + + with open(kubeconfig_path, "w") as kubeconfig_file: + kubeconfig_file.write(kubeconfig_content.strip()) + + yield + + # Clean up the generated kubeconfig file after the tests + os.remove(kubeconfig_path) + + +def get_example_mock_request(): + request = MagicMock(spec=Request) + request.app.state.settings = get_settings() + + mock_module_info = { + "git_commit_hash": "test_hash", + "version": "test_version", + "git_url": "https://github.com/test/repo", + "module_name": "test_module", + "release_tags": ["test_tag"], + "owners": ["test_owner"], + "docker_img_name": "test_img_name", + } + + request.app.state.catalog_client = MagicMock(autospec=CachedCatalogClient) + request.app.state.catalog_client.get_combined_module_info.return_value = mock_module_info + request.app.state.catalog_client.list_service_volume_mounts.return_value = [] + request.app.state.catalog_client.get_secure_params.return_value = [{"param_name": "test_secure_param_name", "param_value": "test_secure_param_value"}] + + mock_k8s_clients = MagicMock(autospec=K8sClients) + mock_k8s_clients.network_client = MagicMock(autospec=NetworkingV1Api) + mock_k8s_clients.app_client = MagicMock(autospec=AppsV1Api) + mock_k8s_clients.core_client = MagicMock(autospec=CoreV1Api) + request.app.state.k8s_clients = mock_k8s_clients + request.app.state.mock_module_info = mock_module_info + + return request + + +def get_example_ingress(): + settings = get_settings() + ingress_spec = V1IngressSpec(rules=[V1IngressRule(host=settings.kbase_root_endpoint.replace("https://", "").replace("https://", ""), http=None)]) # no paths specified + ingress = V1Ingress( + api_version="networking.k8s.io/v1", + kind="Ingress", + metadata=client.V1ObjectMeta( + name="dynamic-services", + annotations={ + "nginx.ingress.kubernetes.io/rewrite-target": "/$2", + }, + ), + spec=ingress_spec, + ) + + ingress_spec.rules = [V1IngressRule(host="ci.kbase.us", http=None)] + return ingress + + +@pytest.fixture(autouse=True) +def example_dynamic_service_status_up(): + return get_example_dynamic_service_status(replicas=1) + + +@pytest.fixture(autouse=True) +def example_dynamic_service_status_down(): + return get_example_dynamic_service_status(replicas=0) + + +def get_example_dynamic_service_status(replicas=1): + return DynamicServiceStatus( + url="test_url", + version="test_version", + module_name="test_module_name", + release_tags=["test_tag"], + git_commit_hash="test_hash", + deployment_name="test_deployment_name", + replicas=replicas, + updated_replicas=1, + ready_replicas=1, + available_replicas=1, + unavailable_replicas=1, + ) diff --git a/test/src/models/test_models.py b/test/src/models/test_models.py new file mode 100644 index 0000000..4dda528 --- /dev/null +++ b/test/src/models/test_models.py @@ -0,0 +1,67 @@ +from models import DynamicServiceStatus, ServiceStatus, ServiceHealth + + +def test_model_creation(): + data = { + "git_commit_hash": "abcdef123456", + "version": "1.0.0", + "release_tags": ["beta", "latest"], + "url": "http://example.com", + "module_name": "TestModule", + "deployment_name": "test-deployment", + "replicas": 2, + "available_replicas": 2, + } + + model = DynamicServiceStatus(**data) + + assert model.git_commit_hash == "abcdef123456" + assert model.version == "1.0.0" + assert model.release_tags == ["beta", "latest"] + assert model.url == "http://example.com" + assert model.module_name == "TestModule" + assert model.deployment_name == "test-deployment" + assert model.replicas == 2 + assert model.available_replicas == 2 + assert model.up == 1 + assert model.status == ServiceStatus.RUNNING + assert model.health == ServiceHealth.HEALTHY + + +def test_model_with_error_status(): + data = { + "git_commit_hash": "abcdef123456", + "version": "1.0.0", + "release_tags": ["beta", "latest"], + "url": "http://example.com", + "module_name": "TestModule", + "deployment_name": "test-deployment", + "replicas": 3, + "available_replicas": 2, # This should trigger the ERROR status + } + + model = DynamicServiceStatus(**data) + + assert model.status == ServiceStatus.ERROR + + +def test_calculate_up(): + assert DynamicServiceStatus.calculate_up(0, 0) == 0 + assert DynamicServiceStatus.calculate_up(2, 0) == 0 + assert DynamicServiceStatus.calculate_up(0, 2) == 0 + assert DynamicServiceStatus.calculate_up(2, 2) == 1 + + +def test_calculate_status(): + assert DynamicServiceStatus.calculate_status(0, 0) == ServiceStatus.STOPPED + assert DynamicServiceStatus.calculate_status(2, 0) == ServiceStatus.STARTING + assert DynamicServiceStatus.calculate_status(0, 2) == ServiceStatus.STOPPED + assert DynamicServiceStatus.calculate_status(2, 2) == ServiceStatus.RUNNING + + +def test_calculate_health(): + assert DynamicServiceStatus.calculate_health(0, 0) == ServiceHealth.UNHEALTHY + assert DynamicServiceStatus.calculate_health(2, 0) == ServiceHealth.UNHEALTHY + assert DynamicServiceStatus.calculate_health(0, 2) == ServiceHealth.UNHEALTHY + assert DynamicServiceStatus.calculate_health(2, 2) == ServiceHealth.HEALTHY + assert DynamicServiceStatus.calculate_health(3, 2) == ServiceHealth.DEGRADED diff --git a/test/src/routes/test_authenticated_routes.py b/test/src/routes/test_authenticated_routes.py index 2fe85ca..4ea1ac6 100644 --- a/test/src/routes/test_authenticated_routes.py +++ b/test/src/routes/test_authenticated_routes.py @@ -1,34 +1,96 @@ -def test_get_good_status(client_with_authorization): - # sw.start({"module_name": "StaticNarrative", "version": "beta"}) - - # rv = { - # "git_commit_hash": "64df4dc3c09b225a9468a73e7129f1cf1631ae4e", - # "status": "active", - # "version": "0.0.15", - # "hash": "64df4dc3c09b225a9468a73e7129f1cf1631ae4e", - # "release_tags": ["beta", "dev"], - # "url": "https://ci.kbase.us:443/dynserv/64df4dc3c09b225a9468a73e7129f1cf1631ae4e.StaticNarrative", - # "module_name": "StaticNarrative", - # "health": "healthy", - # "up": 1, - # } - - # sw.start({"module_name": "NarrativeService", "version": "release"}) - - # rv = { - # "git_commit_hash": "8a9bb32f9e2ec5169815b984de8e8df550699630", - # "status": "active", - # "version": "0.5.2", - # "hash": "8a9bb32f9e2ec5169815b984de8e8df550699630", - # "release_tags": ["release", "beta", "dev"], - # "url": "https://ci.kbase.us:443/dynserv/8a9bb32f9e2ec5169815b984de8e8df550699630.NarrativeService", - # "module_name": "NarrativeService", - # "health": "healthy", - # "up": 1, - # } - - with client_with_authorization() as client: - response = client.get("/get_service_status?module_name=NarrativeService&version=beta") - assert response.json() != [] - assert response.json() == [123] +from unittest.mock import MagicMock, patch + +import pytest +from fastapi.testclient import TestClient + +from clients.CachedAuthClient import CachedAuthClient +from factory import create_app + + +# Create app fixture +@pytest.fixture +def app(): + return create_app() + + +def test_whoami_without_auth(app): + test_client = TestClient(app) + response = test_client.get("/whoami/") + assert response.status_code == 401 + + +def test_whoami_with_bad_auth(app): + test_client = TestClient(app) + response = test_client.get("/whoami/", cookies={"kbase_session": "invalid_session"}) + assert response.status_code == 422 + expected_response = { + "detail": [ + { + "type": "string_pattern_mismatch", + "loc": ["cookie", "kbase_session"], + "msg": "String should match pattern '^[" "a-zA-Z0-9]+$'", + "input": "invalid_session", + "ctx": {"pattern": "^[a-zA-Z0-9]+$"}, + "url": "https://errors.pydantic.dev/2.4/v/string_pattern_mismatch", + }, + { + "type": "string_pattern_mismatch", + "loc": ["cookie", "kbase_session"], + "msg": "String should match pattern '^[a-zA-Z0-9]+$'", + "input": "invalid_session", + "ctx": {"pattern": "^[a-zA-Z0-9]+$"}, + "url": "https://errors.pydantic.dev/2.4/v/string_pattern_mismatch", + }, + ] + } + + assert response.json() == expected_response + + # Test with authorization header + test_client.headers["Authorization"] = "invalid_header" + response = test_client.get( + "/whoami/", + ) + assert response.status_code == 422 + + expected_response["detail"][0]["input"] = "invalid_header" + expected_response["detail"][1]["input"] = "invalid_header" + + expected_response["detail"][0]["loc"] = ["header", "Authorization"] + expected_response["detail"][1]["loc"] = ["header", "Authorization"] + + assert response.json() == expected_response + + +def test_whoami_with_mocked_auth_client(): + mock_auth_client = MagicMock(spec=CachedAuthClient) + + app_with_mock_auth = create_app(auth_client=mock_auth_client) + test_client = TestClient(app_with_mock_auth) + response = test_client.get("/whoami/", cookies={"kbase_session": "invalid_session"}) + assert response.status_code == 422 + + response = test_client.get("/whoami/", cookies={"kbase_session": "validsession"}) + assert response.status_code == 200 + + mock_auth_client.validate_and_get_username_auth_roles.assert_called_with(token="validsession") + + +def test_get_metrics(): + TEST_USERNAME = "testuser" + TEST_PASSWORD = "testpass" + + with patch.dict("os.environ", {"METRICS_USERNAME": TEST_USERNAME, "METRICS_PASSWORD": TEST_PASSWORD}): + test_client = TestClient(create_app()) + + # Test with correct credentials + response = test_client.get("/metrics", auth=(TEST_USERNAME, TEST_PASSWORD)) assert response.status_code == 200 + + # Test with incorrect credentials + response = test_client.get("/metrics", auth=("wrongusername", "wrongpassword")) + assert response.status_code == 401 + + # Test without credentials + response = test_client.get("/metrics") + assert response.status_code == 401 diff --git a/test/src/routes/test_rpc_route.py b/test/src/routes/test_rpc_route.py new file mode 100644 index 0000000..4b0ee7b --- /dev/null +++ b/test/src/routes/test_rpc_route.py @@ -0,0 +1,132 @@ +from unittest.mock import MagicMock, ANY +from unittest.mock import patch + +import pytest +from fastapi.testclient import TestClient + +from clients import KubernetesClients +from clients.CachedAuthClient import CachedAuthClient +from clients.CachedCatalogClient import CachedCatalogClient +from factory import create_app +from rpc.handlers.json_rpc_handler import known_methods, admin_or_owner_required + + +@pytest.fixture +def app(): + with patch("rpc.handlers.authenticated_handlers.stop_deployment") as mock_stop: + mock_stop.__name__ = "stop_deployment" # Set the __name__ attribute + yield create_app( + auth_client=MagicMock(autospec=CachedAuthClient), + catalog_client=MagicMock(autospec=CachedCatalogClient), + k8s_clients=MagicMock(autospec=KubernetesClients.K8sClients), + ) + + +@pytest.fixture +def test_client(app): + return TestClient(app) + + +def test_unknown_method(test_client): + with patch("rpc.handlers.json_rpc_handler.validate_rpc_request", return_value=("unknown_method", {}, 1)): + response = test_client.post("/rpc", json={"jsonrpc": "2.0", "method": "unknown_method", "id": 1}) + assert response.status_code == 500 + + +def mock_request_function(*args, **kwargs): + return {"result": "mocked_response"} + + +def test_unauthenticated_route(): + method = next(iter(known_methods.keys())) # Get the first known method + # if method in admin_or_owner_required: + # return # skip if it's an authenticated route + + # Mock the known_methods dictionary to return the mock_request_function for the given method + with patch.dict("rpc.handlers.json_rpc_handler.known_methods", {method: mock_request_function}): + app = create_app( + auth_client=MagicMock(autospec=CachedAuthClient), + catalog_client=MagicMock(autospec=CachedCatalogClient), + k8s_clients=MagicMock(autospec=KubernetesClients.K8sClients), + ) + test_client = TestClient(app) + payload_with_params = {"jsonrpc": "2.0", "method": method, "params": [{"module_name": "sample_module", "version": "sample_version"}], "id": 1} + response = test_client.post("/rpc", json=payload_with_params) + assert response.status_code == 200 # Assuming the handler function for this method returns a successful response + assert response.json() == {"result": "mocked_response"} + + +def test_authenticated_route_no_auth(test_client): + method = next(iter(admin_or_owner_required.keys())) # Get the first authenticated method + # Mocking validate_rpc_request and get_user_auth_roles (to return an auth error) + with patch("rpc.handlers.json_rpc_handler.validate_rpc_request", return_value=(method, {}, 1)): + with patch("rpc.handlers.json_rpc_handler.get_user_auth_roles", return_value=(None, "Auth Error")): + response = test_client.post("/rpc", json={"jsonrpc": "2.0", "method": method, "id": 1}) + assert response.status_code == 500 + + +def test_authenticated_route_with_auth(test_client): + method = next(iter(admin_or_owner_required.keys())) # Get the first authenticated method + # Mocking validate_rpc_request and get_user_auth_roles (to return valid auth roles) + with patch("rpc.handlers.json_rpc_handler.validate_rpc_request", return_value=(method, {}, 1)): + with patch("rpc.handlers.json_rpc_handler.get_user_auth_roles", return_value=(["admin"], None)): + with patch.dict("rpc.handlers.json_rpc_handler.known_methods", {method: mock_request_function}): + response = test_client.post("/rpc", json={"jsonrpc": "2.0", "method": method, "id": 1}) + assert response.status_code == 200 + assert response.json() == {"result": "mocked_response"} + + +def test_known_method_error_response(test_client): + method = next(iter(known_methods.keys())) # Get the first known method + + def mock_error_function(*args, **kwargs): + return {"error": "Some error occurred."} + + with patch.dict("rpc.handlers.json_rpc_handler.known_methods", {method: mock_error_function}): + response = test_client.post("/rpc", json={"jsonrpc": "2.0", "method": method, "id": 1}) + assert response.status_code == 500 + assert response.json() == {"error": "Some error occurred."} + + +def test_request_function_called_correctly(test_client): + method = next(iter(known_methods.keys())) # Get the first known method + # Mock the request_function to track its calls + mock_function = MagicMock(return_value={"result": "mocked_response"}) + with patch.dict("rpc.handlers.json_rpc_handler.known_methods", {method: mock_function}): + with patch("rpc.handlers.json_rpc_handler.validate_rpc_request", return_value=(method, {"param1": "value1"}, 1)): + test_client.post("/rpc", json={"jsonrpc": "2.0", "method": method, "id": 1}) + mock_function.assert_called_once_with(ANY, {"param1": "value1"}, 1) # Using 'anything()' to ignore matching the request argument + + +def test_request_function_sets_user_auth_roles(test_client): + method = next(iter(admin_or_owner_required.keys())) # Get the first authenticated method + + # Mocking validate_rpc_request to return the method and get_user_auth_roles to return valid auth roles and no errors + with patch("rpc.handlers.json_rpc_handler.validate_rpc_request", return_value=(method, {}, 1)): + with patch("rpc.handlers.json_rpc_handler.get_user_auth_roles", return_value=(["admin"], None)): + # Mock the known_methods dictionary to use the side_effect_check for this method + with patch.dict("rpc.handlers.json_rpc_handler.known_methods", {method: mock_request_function}): + response = test_client.post("/rpc", json={"jsonrpc": "2.0", "method": method, "id": 1}) + assert response.status_code == 200 + assert response.json() == {"result": "mocked_response"} + + +def test_authenticated_route(test_client): + method = next(iter(admin_or_owner_required.keys())) # Get the first authenticated method + + # Scenario 1: Test with an auth error + with patch("rpc.handlers.json_rpc_handler.validate_rpc_request", return_value=(method, {}, 1)): + with patch("rpc.handlers.json_rpc_handler.get_user_auth_roles", return_value=(None, {"error": "Authentication failed."})): + with patch("rpc.handlers.json_rpc_handler.function_requires_auth", return_value=True): # Mock to return that function requires authentication + response = test_client.post("/rpc", json={"jsonrpc": "2.0", "method": method, "id": 1}) + assert response.status_code == 500 + assert response.json() == {"error": "Authentication failed."} + + # Scenario 2: Test with successful authentication + with patch("rpc.handlers.json_rpc_handler.validate_rpc_request", return_value=(method, {}, 1)): + with patch("rpc.handlers.json_rpc_handler.get_user_auth_roles", return_value=(["admin"], None)): + with patch("rpc.handlers.json_rpc_handler.function_requires_auth", return_value=True): # Mock to return that function requires authentication + with patch.dict("rpc.handlers.json_rpc_handler.known_methods", {method: mock_request_function}): + response = test_client.post("/rpc", json={"jsonrpc": "2.0", "method": method, "id": 1}) + assert response.status_code == 200 + assert response.json() == {"result": "mocked_response"} diff --git a/test/src/routes/test_unauthenticated_routes.py b/test/src/routes/test_unauthenticated_routes.py new file mode 100644 index 0000000..eed5290 --- /dev/null +++ b/test/src/routes/test_unauthenticated_routes.py @@ -0,0 +1,23 @@ +import pytest +from fastapi.testclient import TestClient + +from factory import create_app + + +@pytest.fixture +def app(): + return create_app() + + +def test_status(app): + client = TestClient(app) + response = client.get("/status") + assert response.status_code == 200 + assert response.json() == {"git_commit_hash": "unknown", "git_url": "https://github.com/kbase/service_wizard2", "message": "", "state": "OK", "version": "unknown"} + + +def test_version(app): + client = TestClient(app) + response = client.get("/version") + assert response.status_code == 200 + assert response.json() == ["unknown"] # 'None' in pycharm diff --git a/test/src/rpc/test_common.py b/test/src/rpc/test_common.py new file mode 100644 index 0000000..0bd51a5 --- /dev/null +++ b/test/src/rpc/test_common.py @@ -0,0 +1,118 @@ +import json +from unittest.mock import MagicMock + +import pytest +from fastapi import HTTPException + +from clients.baseclient import ServerError +from rpc.common import validate_rpc_request, validate_rpc_response, get_user_auth_roles, handle_rpc_request +from rpc.models import JSONRPCResponse, ErrorResponse + + +def test_validate_rpc_request_invalid_json(): + with pytest.raises(ServerError, match="Parse error JSON format"): + validate_rpc_request(b"invalid json") + + +def test_validate_rpc_request_invalid_request(): + with pytest.raises(ServerError, match="Invalid Request"): + validate_rpc_request(json.dumps([]).encode("utf-8")) + + +def test_validate_rpc_request_valid_json(): + method, params, jrpc_id = validate_rpc_request(json.dumps({"method": "test_method", "params": [], "id": 1}).encode("utf-8")) + assert method == "test_method" + assert params == [] + assert jrpc_id == 1 + + +def test_validate_rpc_response_invalid_response(): + response = JSONRPCResponse(id=1) + result = validate_rpc_response(response) + assert isinstance(result, HTTPException) + assert result.status_code == 500 + assert "Programming Error: Invalid JSON-RPC response format" in result.detail + + +def test_validate_rpc_response_valid_response(): + response = validate_rpc_response(JSONRPCResponse(id=1, result="test_result")) + assert response.id == 1 + assert response.result == "test_result" + + +def test_get_user_auth_roles_auth_error(): + request = MagicMock() + request.headers = {"Authorization": None} + request.cookies = {"kbase_session": None} + request.app.state.auth_client.get_user_auth_roles.side_effect = HTTPException(status_code=401, detail="Unauthorized") + _, error = get_user_auth_roles(request, "1", "test_method") + assert error.id == "1" + assert isinstance(error.error, ErrorResponse) + + +def test_handle_rpc_request_invalid_params(): + request = MagicMock() + response = handle_rpc_request(request, [], "1", lambda req, module_name, module_version: "test_result") + assert response.id == "1" + assert isinstance(response.error, ErrorResponse) + + +def test_handle_rpc_request_server_error(): + request = MagicMock() + action = MagicMock() + action.__name__ = "test_action" + action.side_effect = ServerError(name="name", message="test server error", code=500) + response = handle_rpc_request(request, [{"module_name": "test_module", "version": "1.0"}], "1", action) + assert response.id == "1" + assert isinstance(response.error, ErrorResponse) + + +def test_handle_rpc_request_success(): + request = MagicMock() + action = MagicMock(return_value="test_result") + action.__name__ = "test_action" + response = handle_rpc_request(request, [{"module_name": "test_module", "version": "1.0"}], "1", action) + assert response.id == "1" + assert response.result == ["test_result"] + + +def mock_action(request, module_name, module_version): + return {"test": "data"} + + +# 1. Test when params is an empty list: +def test_handle_rpc_request_no_params(): + response = handle_rpc_request(request=MagicMock(), params=[], jrpc_id="1", action=mock_action) + assert response.error is not None + assert response.error.name == "Invalid params" + assert response.error.message == "No params passed to method mock_action" + + +# 2. Test when the first item in params is not a dictionary: +def test_handle_rpc_request_invalid_params2(): + response = handle_rpc_request(request=MagicMock(), params=["invalid"], jrpc_id="1", action=mock_action) + assert response.error is not None + assert response.error.name == "Invalid params" + assert response.error.message == "Invalid params for ServiceWizard.mock_action" + + +# 3. Test for unexpected exception: +def mock_action_with_exception(request, module_name, module_version): + raise ValueError("Unexpected error") + + +def test_handle_rpc_request_unexpected_exception(): + response = handle_rpc_request(request=MagicMock(), params=[{"module_name": "test", "version": "1.0"}], jrpc_id="1", action=mock_action_with_exception) + assert response.error is not None + assert response.error.name == "Internal error - An internal error occurred on the server while processing the request" + assert "Unexpected error" in response.error.message + + +def test_validate_rpc_request_invalid_method_and_params(): + with pytest.raises(ServerError) as exc_info: + validate_rpc_request(body=json.dumps({"method": 123, "params": "not_a_list", "id": 1}).encode("utf-8")) + + error = exc_info.value + assert error.code == -32600 + assert error.name == "Invalid Request" + assert "`method` must be a valid SW1 method string. Params must be a dictionary." in error.message diff --git a/test/src/rpc/test_error_responses.py b/test/src/rpc/test_error_responses.py new file mode 100644 index 0000000..a58f0a5 --- /dev/null +++ b/test/src/rpc/test_error_responses.py @@ -0,0 +1,55 @@ +import pytest +from fastapi.responses import JSONResponse + +from rpc.error_responses import ( + json_rpc_response_to_exception, + method_not_found, + no_params_passed, + not_enough_params, + invalid_params, + no_authenticated_headers_passed, + token_validation_failed, +) +from rpc.models import ErrorResponse, JSONRPCResponse + + +# Functions under test should be imported. + + +@pytest.mark.parametrize( + "func, method, jrpc_id, expected_message, expected_code", + [ + (method_not_found, "testMethod", "1", "Method 'testMethod' not found", -32601), + (no_params_passed, "testMethod", "2", "No params passed to method testMethod", -32602), + (not_enough_params, "testMethod", "3", "Not enough params passed to method testMethod", -32602), + (invalid_params, "testMethod", "4", "Invalid params passed method testMethod, see the spec for more details", -32602), + ( + no_authenticated_headers_passed, + None, + "5", + "Token validation failed: Must supply token: Authentication required for ServiceWizard2 but no authentication header or kbase_session cookie was passed", + -32000, + ), + (token_validation_failed, None, "6", "Token validation failed: Error connecting to auth service: 401 Unauthorized\n10020 Invalid token", -32000), + ], +) +def test_error_functions(func, method, jrpc_id, expected_message, expected_code): + if method: + response = func(method, jrpc_id) + else: + response = func(jrpc_id) + assert isinstance(response, JSONRPCResponse) + assert response.id == jrpc_id + assert response.error.message == expected_message + assert response.error.code == expected_code + + +def test_json_rpc_response_to_exception(): + error_response = ErrorResponse(message="Test Error", code=-32000, name="Server error") + jrpc_response = JSONRPCResponse(id="7", error=error_response) + + response = json_rpc_response_to_exception(jrpc_response) + assert isinstance(response, JSONResponse) + assert response.status_code == 500 + # assert response.body == b'{"version":"1.0","id": "7", "error": {"message": "Test Error", "code": -32000, "name": "Server error"}}' + assert response.body == b'{"version":"1.0","id":"7","error":{"message":"Test Error","code":-32000,"name":"Server error","error":null}}' diff --git a/test/src/rpc/test_handlers.py b/test/src/rpc/test_handlers.py new file mode 100644 index 0000000..4aa1817 --- /dev/null +++ b/test/src/rpc/test_handlers.py @@ -0,0 +1,63 @@ +from unittest.mock import Mock, patch + +from fastapi.requests import Request + +from rpc.handlers import authenticated_handlers, unauthenticated_handlers + +from dependencies import logs, status, lifecycle +from dependencies.lifecycle import stop_deployment + +# Mocking the Request object +mock_request = Mock(spec=Request) + +# Common mock params and id +mock_params = [{}] +mock_jrpc_id = "test_id" + + +@patch("rpc.handlers.authenticated_handlers.handle_rpc_request") +def test_stop(mock_handle_rpc): + authenticated_handlers.stop(mock_request, mock_params, mock_jrpc_id) + mock_handle_rpc.assert_called_once_with(mock_request, mock_params, mock_jrpc_id, stop_deployment) + + +@patch("rpc.handlers.authenticated_handlers.handle_rpc_request") +def test_get_service_log(mock_handle_rpc): + authenticated_handlers.get_service_log(mock_request, mock_params, mock_jrpc_id) + mock_handle_rpc.assert_called_once_with(mock_request, mock_params, mock_jrpc_id, logs.get_service_log) + + +@patch("rpc.handlers.authenticated_handlers.handle_rpc_request") +def test_get_service_log_web_socket(mock_handle_rpc): + authenticated_handlers.get_service_log_web_socket(mock_request, mock_params, mock_jrpc_id) + mock_handle_rpc.assert_called_once_with(mock_request, mock_params, mock_jrpc_id, logs.get_service_log_web_socket) + + +@patch("rpc.handlers.unauthenticated_handlers.handle_rpc_request") +def test_list_service_status(mock_handle_rpc): + unauthenticated_handlers.list_service_status(mock_request, mock_params, mock_jrpc_id) + mock_handle_rpc.assert_called_once_with(mock_request, mock_params, mock_jrpc_id, status.get_all_dynamic_service_statuses) + + +@patch("rpc.handlers.unauthenticated_handlers.handle_rpc_request") +def test_get_service_status_without_restart(mock_handle_rpc): + unauthenticated_handlers.get_service_status_without_restart(mock_request, mock_params, mock_jrpc_id) + mock_handle_rpc.assert_called_once_with(mock_request, mock_params, mock_jrpc_id, status.get_service_status_without_retries) + + +@patch("rpc.handlers.unauthenticated_handlers.handle_rpc_request") +def test_start(mock_handle_rpc): + unauthenticated_handlers.start(mock_request, mock_params, mock_jrpc_id) + mock_handle_rpc.assert_called_once_with(mock_request, mock_params, mock_jrpc_id, lifecycle.start_deployment) + + +@patch("rpc.handlers.unauthenticated_handlers.handle_rpc_request") +def test_status(mock_handle_rpc): + unauthenticated_handlers.status(mock_request, mock_params, mock_jrpc_id) + mock_handle_rpc.assert_called_once_with(mock_request, mock_params, mock_jrpc_id, status.get_status) + + +@patch("rpc.handlers.unauthenticated_handlers.handle_rpc_request") +def test_version(mock_handle_rpc): + unauthenticated_handlers.version(mock_request, mock_params, mock_jrpc_id) + mock_handle_rpc.assert_called_once_with(mock_request, mock_params, mock_jrpc_id, status.get_version) diff --git a/test/src/rpc/test_rpc_models.py b/test/src/rpc/test_rpc_models.py new file mode 100644 index 0000000..ffa166a --- /dev/null +++ b/test/src/rpc/test_rpc_models.py @@ -0,0 +1,132 @@ +import pytest +from pydantic import ValidationError + +from rpc.models import ErrorResponse, JSONRPCResponse + + +# 1. Test that the models can be instantiated with valid data. + + +def test_error_response_creation(): + data = { + "message": "An error occurred", + "code": 400, + "name": "BadRequest", + } + response = ErrorResponse(**data) + assert response.message == data["message"] + assert response.code == data["code"] + assert response.name == data["name"] + assert response.error is None + + +def test_jsonrpc_response_creation(): + data = { + "result": "Success", + } + response = JSONRPCResponse(**data) + assert response.version == "1.0" + assert response.id == 0 + assert response.result == data["result"] + assert response.error is None + + +# 2. Test that the models raise validation errors for invalid data. + + +def test_invalid_error_response_creation(): + data = { + "message": "An error occurred", + } + with pytest.raises(ValidationError): + ErrorResponse(**data) + + +def test_invalid_jsonrpc_response_creation(): + data = { + "version": 2, + } + with pytest.raises(ValidationError): + JSONRPCResponse(**data) + + +# 3. Test the custom logic in the model_dump method. + + +def test_model_dump(): + data = { + "result": "Success", + } + response = JSONRPCResponse(**data) + serialized_data = response.model_dump() + + assert "result" in serialized_data + assert "error" not in serialized_data + assert "version" not in serialized_data + assert serialized_data["result"] == "Success" + + error_data = { + "message": "An error occurred", + "code": 400, + "name": "BadRequest", + } + response_with_error = JSONRPCResponse(error=ErrorResponse(**error_data)) + serialized_data_with_error = response_with_error.model_dump() + + assert "error" in serialized_data_with_error + assert "version" in serialized_data_with_error + assert "result" not in serialized_data_with_error + assert serialized_data_with_error["error"]["message"] == "An error occurred" + + +# Tests for ErrorResponse + + +def test_full_error_response_creation(): + data = {"message": "An error occurred", "code": 500, "name": "InternalServerError", "error": "Some error string"} + response = ErrorResponse(**data) + assert response.message == data["message"] + assert response.code == data["code"] + assert response.name == data["name"] + assert response.error == data["error"] + + +# Tests for JSONRPCResponse + + +def test_full_jsonrpc_response_creation(): + data = {"version": "1.0", "id": "some-id", "result": "Success", "error": None} + response = JSONRPCResponse(**data) + assert response.version == data["version"] + assert response.id == data["id"] + assert response.result == data["result"] + assert response.error == data["error"] + + +def test_jsonrpc_model_dump_with_both_fields(): + data = { + "result": "Success", + "error": { + "message": "An error occurred", + "code": 400, + "name": "BadRequest", + }, + } + response = JSONRPCResponse(**data) + serialized_data = response.model_dump() + assert "error" in serialized_data + assert "result" in serialized_data + + +def test_jsonrpc_model_dump_with_id(): + data = {"id": "some-id", "result": "Success"} + response = JSONRPCResponse(**data) + serialized_data = response.model_dump() + assert serialized_data["id"] == "some-id" + + +def test_jsonrpc_model_dump_without_id(): + data = {"id": None, "result": "Success"} + response = JSONRPCResponse(**data) + serialized_data = response.model_dump() + assert "id" not in serialized_data diff --git a/test/src/test_factory.py b/test/src/test_factory.py new file mode 100644 index 0000000..91be4d3 --- /dev/null +++ b/test/src/test_factory.py @@ -0,0 +1,72 @@ +from unittest.mock import patch, Mock + +import pytest +from fastapi import FastAPI + +from factory import create_app, sw2_authenticated_router, sw2_unauthenticated_router, sw2_rpc_router +from routes.metrics_routes import router as metrics_router + + +@pytest.fixture +def mock_env_vars(monkeypatch): + # Mock some environment variables + monkeypatch.setenv("LOG_LEVEL", "DEBUG") + monkeypatch.setenv("SENTRY_DSN", "mock_sentry_dsn") + monkeypatch.setenv("METRICS_USERNAME", "user") + monkeypatch.setenv("METRICS_PASSWORD", "password") + monkeypatch.setenv("DOTENV_FILE_LOCATION", ".env") + + +@pytest.fixture +def mock_clients(): + return { + "catalog_client": Mock(), + "auth_client": Mock(), + "k8s_clients": Mock(), + } + + +def test_create_app_with_defaults(mock_env_vars, mock_clients): + with patch("factory.CachedCatalogClient", return_value=mock_clients["catalog_client"]), patch("factory.CachedAuthClient", return_value=mock_clients["auth_client"]), patch( + "factory.K8sClients", return_value=mock_clients["k8s_clients"] + ), patch("sentry_sdk.init") as mock_sentry_init: + app = create_app() + + assert isinstance(app, FastAPI) + # Test clients initialization in app's state + assert app.state.catalog_client == mock_clients["catalog_client"] + assert app.state.auth_client == mock_clients["auth_client"] + assert app.state.k8s_clients == mock_clients["k8s_clients"] + + all_paths = [route.path for route in app.routes] + for path in sw2_authenticated_router.routes: + assert path.path in all_paths + + for path in sw2_unauthenticated_router.routes: + assert path.path in all_paths + + for path in sw2_rpc_router.routes: + assert path.path in all_paths + + for path in metrics_router.routes: + assert path.path in all_paths + + mock_sentry_init.assert_called_once_with(dsn="mock_sentry_dsn", traces_sample_rate=1.0, http_proxy=None, environment="https://ci.kbase.us/dynamic_services") + + +def test_create_app_without_metrics(mock_env_vars, mock_clients, monkeypatch): + monkeypatch.delenv("METRICS_USERNAME", raising=False) + monkeypatch.delenv("METRICS_PASSWORD", raising=False) + + with patch("factory.CachedCatalogClient", return_value=mock_clients["catalog_client"]), patch("factory.CachedAuthClient", return_value=mock_clients["auth_client"]), patch( + "factory.K8sClients", return_value=mock_clients["k8s_clients"] + ), patch("sentry_sdk.init") as mock_sentry_init: + app = create_app() + + # Test the inclusion of routers + router_names = [r.name for r in app.routes] + assert "metrics_router" not in router_names + mock_sentry_init.assert_called_once_with(dsn="mock_sentry_dsn", traces_sample_rate=1.0, http_proxy=None, environment="https://ci.kbase.us/dynamic_services") + + +# You can expand with more test functions or scenarios as needed