diff --git a/docker/teach-inference.Dockerfile b/docker/teach-inference.Dockerfile deleted file mode 100644 index 96db343c..00000000 --- a/docker/teach-inference.Dockerfile +++ /dev/null @@ -1,45 +0,0 @@ -FROM ubuntu:18.04 - -# install python3.8 -# hadolint ignore=DL3008 -RUN apt-get update -qq \ - && apt-get install --no-install-recommends -y build-essential software-properties-common \ - && add-apt-repository -y ppa:deadsnakes/ppa \ - && apt-get install --no-install-recommends -y python3.8 python3.8-dev python3.8-distutils \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && apt-get autoremove -y - -# register the version in alternatives and set higher priority to 3.8 -RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.6 1 -# hadolint ignore=DL3059 -RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.8 2 - -# hadolint ignore=DL3027 -RUN apt update && DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends \ - ffmpeg \ - vim \ - curl \ - git - -# upgrade pip to latest version -RUN curl -s https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \ - python3 get-pip.py --force-reinstall && \ - rm get-pip.py - -# Create a folder for the app -WORKDIR /app - -RUN git clone https://github.com/alexa/teach.git . -# hadolint ignore=DL3059 -RUN pip install --no-cache-dir -r requirements.txt - -ENV PYTHONPATH ./src - -RUN pip install --no-cache-dir -e . - -# Download AI2Thor executable -# hadolint ignore=DL3059 -RUN python3 -c "from teach.settings import get_settings; from teach.simulators.simulator_THOR import COMMIT_ID, TEAChController; TEAChController(base_dir=get_settings().AI2THOR_BASE_DIR, download_only=True, commit_id=COMMIT_ID);" - -CMD ["/bin/bash"] diff --git a/poetry.lock b/poetry.lock index c489c01b..5b226c0c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "anyio" version = "4.1.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -25,6 +26,7 @@ trio = ["trio (>=0.23)"] name = "astor" version = "0.8.1" description = "Read/rewrite/write Python ASTs" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ @@ -36,6 +38,7 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -54,6 +57,7 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "aws-sam-translator" version = "1.81.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" +category = "dev" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ @@ -62,18 +66,19 @@ files = [ ] [package.dependencies] -boto3 = ">=1.19.5,<2.dev0" +boto3 = ">=1.19.5,<2.0.0" jsonschema = ">=3.2,<5" pydantic = ">=1.8,<3" typing-extensions = ">=4.4,<5" [package.extras] -dev = ["black (==23.3.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.dev0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "importlib-metadata", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.284)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] +dev = ["black (==23.3.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.0.0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "importlib-metadata", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.284)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] [[package]] name = "aws-xray-sdk" version = "2.12.1" description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -89,6 +94,7 @@ wrapt = "*" name = "bandit" version = "1.7.5" description = "Security oriented static analyser for python code." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -112,6 +118,7 @@ yaml = ["PyYAML"] name = "black" version = "23.11.0" description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -152,466 +159,49 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.23.3" +version = "1.33.6" description = "The AWS SDK for Python" +category = "main" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" files = [ - {file = "boto3-1.23.3-py3-none-any.whl", hash = "sha256:5f27eec9b0a43edbfb3ed5e748837b10219a972f0728fecd78f84ec3629f2092"}, - {file = "boto3-1.23.3.tar.gz", hash = "sha256:9d5ce5ae3ddd4429cf752efe7c9f39691db6c85b6b5f1cfc8861b8f23b72b67a"}, + {file = "boto3-1.33.6-py3-none-any.whl", hash = "sha256:b88f0f305186c5fd41f168e006baa45b7002a33029aec8e5bef373237a172fca"}, + {file = "boto3-1.33.6.tar.gz", hash = "sha256:4f62fc1c7f3ea2d22917aa0aa07b86f119abd90bed3d815e4b52fb3d84773e15"}, ] [package.dependencies] -botocore = ">=1.26.3,<1.27.0" +botocore = ">=1.33.6,<1.34.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.5.0,<0.6.0" +s3transfer = ">=0.8.2,<0.9.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] -[[package]] -name = "boto3-stubs" -version = "1.33.5" -description = "Type annotations for boto3 1.33.5 generated with mypy-boto3-builder 7.21.0" -optional = false -python-versions = ">=3.7" -files = [ - {file = "boto3-stubs-1.33.5.tar.gz", hash = "sha256:40d7a52e60d477822655938083be43a9097a405f1d748ce86f5233685e0cddcc"}, - {file = "boto3_stubs-1.33.5-py3-none-any.whl", hash = "sha256:4f19917a817f5530c5a05924ff009929218664c75140f47fd57e3ba6d477ab48"}, -] - -[package.dependencies] -botocore-stubs = "*" -mypy-boto3-cloudformation = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-dynamodb = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-ec2 = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-lambda = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-rds = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-s3 = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-secretsmanager = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"secretsmanager\""} -mypy-boto3-sqs = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} -types-s3transfer = "*" -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} - -[package.extras] -accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.33.0,<1.34.0)"] -account = ["mypy-boto3-account (>=1.33.0,<1.34.0)"] -acm = ["mypy-boto3-acm (>=1.33.0,<1.34.0)"] -acm-pca = ["mypy-boto3-acm-pca (>=1.33.0,<1.34.0)"] -alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.33.0,<1.34.0)"] -all = ["mypy-boto3-accessanalyzer (>=1.33.0,<1.34.0)", "mypy-boto3-account (>=1.33.0,<1.34.0)", "mypy-boto3-acm (>=1.33.0,<1.34.0)", "mypy-boto3-acm-pca (>=1.33.0,<1.34.0)", "mypy-boto3-alexaforbusiness (>=1.33.0,<1.34.0)", "mypy-boto3-amp (>=1.33.0,<1.34.0)", "mypy-boto3-amplify (>=1.33.0,<1.34.0)", "mypy-boto3-amplifybackend (>=1.33.0,<1.34.0)", "mypy-boto3-amplifyuibuilder (>=1.33.0,<1.34.0)", "mypy-boto3-apigateway (>=1.33.0,<1.34.0)", "mypy-boto3-apigatewaymanagementapi (>=1.33.0,<1.34.0)", "mypy-boto3-apigatewayv2 (>=1.33.0,<1.34.0)", "mypy-boto3-appconfig (>=1.33.0,<1.34.0)", "mypy-boto3-appconfigdata (>=1.33.0,<1.34.0)", "mypy-boto3-appfabric (>=1.33.0,<1.34.0)", "mypy-boto3-appflow (>=1.33.0,<1.34.0)", "mypy-boto3-appintegrations (>=1.33.0,<1.34.0)", "mypy-boto3-application-autoscaling (>=1.33.0,<1.34.0)", "mypy-boto3-application-insights (>=1.33.0,<1.34.0)", "mypy-boto3-applicationcostprofiler (>=1.33.0,<1.34.0)", "mypy-boto3-appmesh (>=1.33.0,<1.34.0)", "mypy-boto3-apprunner (>=1.33.0,<1.34.0)", "mypy-boto3-appstream (>=1.33.0,<1.34.0)", "mypy-boto3-appsync (>=1.33.0,<1.34.0)", "mypy-boto3-arc-zonal-shift (>=1.33.0,<1.34.0)", "mypy-boto3-athena (>=1.33.0,<1.34.0)", "mypy-boto3-auditmanager (>=1.33.0,<1.34.0)", "mypy-boto3-autoscaling (>=1.33.0,<1.34.0)", "mypy-boto3-autoscaling-plans (>=1.33.0,<1.34.0)", "mypy-boto3-b2bi (>=1.33.0,<1.34.0)", "mypy-boto3-backup (>=1.33.0,<1.34.0)", "mypy-boto3-backup-gateway (>=1.33.0,<1.34.0)", "mypy-boto3-backupstorage (>=1.33.0,<1.34.0)", "mypy-boto3-batch (>=1.33.0,<1.34.0)", "mypy-boto3-bcm-data-exports (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock-agent (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock-agent-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-billingconductor (>=1.33.0,<1.34.0)", "mypy-boto3-braket (>=1.33.0,<1.34.0)", "mypy-boto3-budgets (>=1.33.0,<1.34.0)", "mypy-boto3-ce (>=1.33.0,<1.34.0)", "mypy-boto3-chime (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-identity (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-meetings (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-messaging (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-voice (>=1.33.0,<1.34.0)", "mypy-boto3-cleanrooms (>=1.33.0,<1.34.0)", "mypy-boto3-cleanroomsml (>=1.33.0,<1.34.0)", "mypy-boto3-cloud9 (>=1.33.0,<1.34.0)", "mypy-boto3-cloudcontrol (>=1.33.0,<1.34.0)", "mypy-boto3-clouddirectory (>=1.33.0,<1.34.0)", "mypy-boto3-cloudformation (>=1.33.0,<1.34.0)", "mypy-boto3-cloudfront (>=1.33.0,<1.34.0)", "mypy-boto3-cloudfront-keyvaluestore (>=1.33.0,<1.34.0)", "mypy-boto3-cloudhsm (>=1.33.0,<1.34.0)", "mypy-boto3-cloudhsmv2 (>=1.33.0,<1.34.0)", "mypy-boto3-cloudsearch (>=1.33.0,<1.34.0)", "mypy-boto3-cloudsearchdomain (>=1.33.0,<1.34.0)", "mypy-boto3-cloudtrail (>=1.33.0,<1.34.0)", "mypy-boto3-cloudtrail-data (>=1.33.0,<1.34.0)", "mypy-boto3-cloudwatch (>=1.33.0,<1.34.0)", "mypy-boto3-codeartifact (>=1.33.0,<1.34.0)", "mypy-boto3-codebuild (>=1.33.0,<1.34.0)", "mypy-boto3-codecatalyst (>=1.33.0,<1.34.0)", "mypy-boto3-codecommit (>=1.33.0,<1.34.0)", "mypy-boto3-codedeploy (>=1.33.0,<1.34.0)", "mypy-boto3-codeguru-reviewer (>=1.33.0,<1.34.0)", "mypy-boto3-codeguru-security (>=1.33.0,<1.34.0)", "mypy-boto3-codeguruprofiler (>=1.33.0,<1.34.0)", "mypy-boto3-codepipeline (>=1.33.0,<1.34.0)", "mypy-boto3-codestar (>=1.33.0,<1.34.0)", "mypy-boto3-codestar-connections (>=1.33.0,<1.34.0)", "mypy-boto3-codestar-notifications (>=1.33.0,<1.34.0)", "mypy-boto3-cognito-identity (>=1.33.0,<1.34.0)", "mypy-boto3-cognito-idp (>=1.33.0,<1.34.0)", "mypy-boto3-cognito-sync (>=1.33.0,<1.34.0)", "mypy-boto3-comprehend (>=1.33.0,<1.34.0)", "mypy-boto3-comprehendmedical (>=1.33.0,<1.34.0)", "mypy-boto3-compute-optimizer (>=1.33.0,<1.34.0)", "mypy-boto3-config (>=1.33.0,<1.34.0)", "mypy-boto3-connect (>=1.33.0,<1.34.0)", "mypy-boto3-connect-contact-lens (>=1.33.0,<1.34.0)", "mypy-boto3-connectcampaigns (>=1.33.0,<1.34.0)", "mypy-boto3-connectcases (>=1.33.0,<1.34.0)", "mypy-boto3-connectparticipant (>=1.33.0,<1.34.0)", "mypy-boto3-controltower (>=1.33.0,<1.34.0)", "mypy-boto3-cost-optimization-hub (>=1.33.0,<1.34.0)", "mypy-boto3-cur (>=1.33.0,<1.34.0)", "mypy-boto3-customer-profiles (>=1.33.0,<1.34.0)", "mypy-boto3-databrew (>=1.33.0,<1.34.0)", "mypy-boto3-dataexchange (>=1.33.0,<1.34.0)", "mypy-boto3-datapipeline (>=1.33.0,<1.34.0)", "mypy-boto3-datasync (>=1.33.0,<1.34.0)", "mypy-boto3-datazone (>=1.33.0,<1.34.0)", "mypy-boto3-dax (>=1.33.0,<1.34.0)", "mypy-boto3-detective (>=1.33.0,<1.34.0)", "mypy-boto3-devicefarm (>=1.33.0,<1.34.0)", "mypy-boto3-devops-guru (>=1.33.0,<1.34.0)", "mypy-boto3-directconnect (>=1.33.0,<1.34.0)", "mypy-boto3-discovery (>=1.33.0,<1.34.0)", "mypy-boto3-dlm (>=1.33.0,<1.34.0)", "mypy-boto3-dms (>=1.33.0,<1.34.0)", "mypy-boto3-docdb (>=1.33.0,<1.34.0)", "mypy-boto3-docdb-elastic (>=1.33.0,<1.34.0)", "mypy-boto3-drs (>=1.33.0,<1.34.0)", "mypy-boto3-ds (>=1.33.0,<1.34.0)", "mypy-boto3-dynamodb (>=1.33.0,<1.34.0)", "mypy-boto3-dynamodbstreams (>=1.33.0,<1.34.0)", "mypy-boto3-ebs (>=1.33.0,<1.34.0)", "mypy-boto3-ec2 (>=1.33.0,<1.34.0)", "mypy-boto3-ec2-instance-connect (>=1.33.0,<1.34.0)", "mypy-boto3-ecr (>=1.33.0,<1.34.0)", "mypy-boto3-ecr-public (>=1.33.0,<1.34.0)", "mypy-boto3-ecs (>=1.33.0,<1.34.0)", "mypy-boto3-efs (>=1.33.0,<1.34.0)", "mypy-boto3-eks (>=1.33.0,<1.34.0)", "mypy-boto3-eks-auth (>=1.33.0,<1.34.0)", "mypy-boto3-elastic-inference (>=1.33.0,<1.34.0)", "mypy-boto3-elasticache (>=1.33.0,<1.34.0)", "mypy-boto3-elasticbeanstalk (>=1.33.0,<1.34.0)", "mypy-boto3-elastictranscoder (>=1.33.0,<1.34.0)", "mypy-boto3-elb (>=1.33.0,<1.34.0)", "mypy-boto3-elbv2 (>=1.33.0,<1.34.0)", "mypy-boto3-emr (>=1.33.0,<1.34.0)", "mypy-boto3-emr-containers (>=1.33.0,<1.34.0)", "mypy-boto3-emr-serverless (>=1.33.0,<1.34.0)", "mypy-boto3-entityresolution (>=1.33.0,<1.34.0)", "mypy-boto3-es (>=1.33.0,<1.34.0)", "mypy-boto3-events (>=1.33.0,<1.34.0)", "mypy-boto3-evidently (>=1.33.0,<1.34.0)", "mypy-boto3-finspace (>=1.33.0,<1.34.0)", "mypy-boto3-finspace-data (>=1.33.0,<1.34.0)", "mypy-boto3-firehose (>=1.33.0,<1.34.0)", "mypy-boto3-fis (>=1.33.0,<1.34.0)", "mypy-boto3-fms (>=1.33.0,<1.34.0)", "mypy-boto3-forecast (>=1.33.0,<1.34.0)", "mypy-boto3-forecastquery (>=1.33.0,<1.34.0)", "mypy-boto3-frauddetector (>=1.33.0,<1.34.0)", "mypy-boto3-freetier (>=1.33.0,<1.34.0)", "mypy-boto3-fsx (>=1.33.0,<1.34.0)", "mypy-boto3-gamelift (>=1.33.0,<1.34.0)", "mypy-boto3-glacier (>=1.33.0,<1.34.0)", "mypy-boto3-globalaccelerator (>=1.33.0,<1.34.0)", "mypy-boto3-glue (>=1.33.0,<1.34.0)", "mypy-boto3-grafana (>=1.33.0,<1.34.0)", "mypy-boto3-greengrass (>=1.33.0,<1.34.0)", "mypy-boto3-greengrassv2 (>=1.33.0,<1.34.0)", "mypy-boto3-groundstation (>=1.33.0,<1.34.0)", "mypy-boto3-guardduty (>=1.33.0,<1.34.0)", "mypy-boto3-health (>=1.33.0,<1.34.0)", "mypy-boto3-healthlake (>=1.33.0,<1.34.0)", "mypy-boto3-honeycode (>=1.33.0,<1.34.0)", "mypy-boto3-iam (>=1.33.0,<1.34.0)", "mypy-boto3-identitystore (>=1.33.0,<1.34.0)", "mypy-boto3-imagebuilder (>=1.33.0,<1.34.0)", "mypy-boto3-importexport (>=1.33.0,<1.34.0)", "mypy-boto3-inspector (>=1.33.0,<1.34.0)", "mypy-boto3-inspector-scan (>=1.33.0,<1.34.0)", "mypy-boto3-inspector2 (>=1.33.0,<1.34.0)", "mypy-boto3-internetmonitor (>=1.33.0,<1.34.0)", "mypy-boto3-iot (>=1.33.0,<1.34.0)", "mypy-boto3-iot-data (>=1.33.0,<1.34.0)", "mypy-boto3-iot-jobs-data (>=1.33.0,<1.34.0)", "mypy-boto3-iot-roborunner (>=1.33.0,<1.34.0)", "mypy-boto3-iot1click-devices (>=1.33.0,<1.34.0)", "mypy-boto3-iot1click-projects (>=1.33.0,<1.34.0)", "mypy-boto3-iotanalytics (>=1.33.0,<1.34.0)", "mypy-boto3-iotdeviceadvisor (>=1.33.0,<1.34.0)", "mypy-boto3-iotevents (>=1.33.0,<1.34.0)", "mypy-boto3-iotevents-data (>=1.33.0,<1.34.0)", "mypy-boto3-iotfleethub (>=1.33.0,<1.34.0)", "mypy-boto3-iotfleetwise (>=1.33.0,<1.34.0)", "mypy-boto3-iotsecuretunneling (>=1.33.0,<1.34.0)", "mypy-boto3-iotsitewise (>=1.33.0,<1.34.0)", "mypy-boto3-iotthingsgraph (>=1.33.0,<1.34.0)", "mypy-boto3-iottwinmaker (>=1.33.0,<1.34.0)", "mypy-boto3-iotwireless (>=1.33.0,<1.34.0)", "mypy-boto3-ivs (>=1.33.0,<1.34.0)", "mypy-boto3-ivs-realtime (>=1.33.0,<1.34.0)", "mypy-boto3-ivschat (>=1.33.0,<1.34.0)", "mypy-boto3-kafka (>=1.33.0,<1.34.0)", "mypy-boto3-kafkaconnect (>=1.33.0,<1.34.0)", "mypy-boto3-kendra (>=1.33.0,<1.34.0)", "mypy-boto3-kendra-ranking (>=1.33.0,<1.34.0)", "mypy-boto3-keyspaces (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-archived-media (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-media (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-signaling (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.33.0,<1.34.0)", "mypy-boto3-kinesisanalytics (>=1.33.0,<1.34.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.33.0,<1.34.0)", "mypy-boto3-kinesisvideo (>=1.33.0,<1.34.0)", "mypy-boto3-kms (>=1.33.0,<1.34.0)", "mypy-boto3-lakeformation (>=1.33.0,<1.34.0)", "mypy-boto3-lambda (>=1.33.0,<1.34.0)", "mypy-boto3-launch-wizard (>=1.33.0,<1.34.0)", "mypy-boto3-lex-models (>=1.33.0,<1.34.0)", "mypy-boto3-lex-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-lexv2-models (>=1.33.0,<1.34.0)", "mypy-boto3-lexv2-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-license-manager (>=1.33.0,<1.34.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.33.0,<1.34.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.33.0,<1.34.0)", "mypy-boto3-lightsail (>=1.33.0,<1.34.0)", "mypy-boto3-location (>=1.33.0,<1.34.0)", "mypy-boto3-logs (>=1.33.0,<1.34.0)", "mypy-boto3-lookoutequipment (>=1.33.0,<1.34.0)", "mypy-boto3-lookoutmetrics (>=1.33.0,<1.34.0)", "mypy-boto3-lookoutvision (>=1.33.0,<1.34.0)", "mypy-boto3-m2 (>=1.33.0,<1.34.0)", "mypy-boto3-machinelearning (>=1.33.0,<1.34.0)", "mypy-boto3-macie2 (>=1.33.0,<1.34.0)", "mypy-boto3-managedblockchain (>=1.33.0,<1.34.0)", "mypy-boto3-managedblockchain-query (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-agreement (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-catalog (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-deployment (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-entitlement (>=1.33.0,<1.34.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.33.0,<1.34.0)", "mypy-boto3-mediaconnect (>=1.33.0,<1.34.0)", "mypy-boto3-mediaconvert (>=1.33.0,<1.34.0)", "mypy-boto3-medialive (>=1.33.0,<1.34.0)", "mypy-boto3-mediapackage (>=1.33.0,<1.34.0)", "mypy-boto3-mediapackage-vod (>=1.33.0,<1.34.0)", "mypy-boto3-mediapackagev2 (>=1.33.0,<1.34.0)", "mypy-boto3-mediastore (>=1.33.0,<1.34.0)", "mypy-boto3-mediastore-data (>=1.33.0,<1.34.0)", "mypy-boto3-mediatailor (>=1.33.0,<1.34.0)", "mypy-boto3-medical-imaging (>=1.33.0,<1.34.0)", "mypy-boto3-memorydb (>=1.33.0,<1.34.0)", "mypy-boto3-meteringmarketplace (>=1.33.0,<1.34.0)", "mypy-boto3-mgh (>=1.33.0,<1.34.0)", "mypy-boto3-mgn (>=1.33.0,<1.34.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.33.0,<1.34.0)", "mypy-boto3-migrationhub-config (>=1.33.0,<1.34.0)", "mypy-boto3-migrationhuborchestrator (>=1.33.0,<1.34.0)", "mypy-boto3-migrationhubstrategy (>=1.33.0,<1.34.0)", "mypy-boto3-mobile (>=1.33.0,<1.34.0)", "mypy-boto3-mq (>=1.33.0,<1.34.0)", "mypy-boto3-mturk (>=1.33.0,<1.34.0)", "mypy-boto3-mwaa (>=1.33.0,<1.34.0)", "mypy-boto3-neptune (>=1.33.0,<1.34.0)", "mypy-boto3-neptunedata (>=1.33.0,<1.34.0)", "mypy-boto3-network-firewall (>=1.33.0,<1.34.0)", "mypy-boto3-networkmanager (>=1.33.0,<1.34.0)", "mypy-boto3-nimble (>=1.33.0,<1.34.0)", "mypy-boto3-oam (>=1.33.0,<1.34.0)", "mypy-boto3-omics (>=1.33.0,<1.34.0)", "mypy-boto3-opensearch (>=1.33.0,<1.34.0)", "mypy-boto3-opensearchserverless (>=1.33.0,<1.34.0)", "mypy-boto3-opsworks (>=1.33.0,<1.34.0)", "mypy-boto3-opsworkscm (>=1.33.0,<1.34.0)", "mypy-boto3-organizations (>=1.33.0,<1.34.0)", "mypy-boto3-osis (>=1.33.0,<1.34.0)", "mypy-boto3-outposts (>=1.33.0,<1.34.0)", "mypy-boto3-panorama (>=1.33.0,<1.34.0)", "mypy-boto3-payment-cryptography (>=1.33.0,<1.34.0)", "mypy-boto3-payment-cryptography-data (>=1.33.0,<1.34.0)", "mypy-boto3-pca-connector-ad (>=1.33.0,<1.34.0)", "mypy-boto3-personalize (>=1.33.0,<1.34.0)", "mypy-boto3-personalize-events (>=1.33.0,<1.34.0)", "mypy-boto3-personalize-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-pi (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint-email (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint-sms-voice (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.33.0,<1.34.0)", "mypy-boto3-pipes (>=1.33.0,<1.34.0)", "mypy-boto3-polly (>=1.33.0,<1.34.0)", "mypy-boto3-pricing (>=1.33.0,<1.34.0)", "mypy-boto3-privatenetworks (>=1.33.0,<1.34.0)", "mypy-boto3-proton (>=1.33.0,<1.34.0)", "mypy-boto3-qbusiness (>=1.33.0,<1.34.0)", "mypy-boto3-qconnect (>=1.33.0,<1.34.0)", "mypy-boto3-qldb (>=1.33.0,<1.34.0)", "mypy-boto3-qldb-session (>=1.33.0,<1.34.0)", "mypy-boto3-quicksight (>=1.33.0,<1.34.0)", "mypy-boto3-ram (>=1.33.0,<1.34.0)", "mypy-boto3-rbin (>=1.33.0,<1.34.0)", "mypy-boto3-rds (>=1.33.0,<1.34.0)", "mypy-boto3-rds-data (>=1.33.0,<1.34.0)", "mypy-boto3-redshift (>=1.33.0,<1.34.0)", "mypy-boto3-redshift-data (>=1.33.0,<1.34.0)", "mypy-boto3-redshift-serverless (>=1.33.0,<1.34.0)", "mypy-boto3-rekognition (>=1.33.0,<1.34.0)", "mypy-boto3-repostspace (>=1.33.0,<1.34.0)", "mypy-boto3-resiliencehub (>=1.33.0,<1.34.0)", "mypy-boto3-resource-explorer-2 (>=1.33.0,<1.34.0)", "mypy-boto3-resource-groups (>=1.33.0,<1.34.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.33.0,<1.34.0)", "mypy-boto3-robomaker (>=1.33.0,<1.34.0)", "mypy-boto3-rolesanywhere (>=1.33.0,<1.34.0)", "mypy-boto3-route53 (>=1.33.0,<1.34.0)", "mypy-boto3-route53-recovery-cluster (>=1.33.0,<1.34.0)", "mypy-boto3-route53-recovery-control-config (>=1.33.0,<1.34.0)", "mypy-boto3-route53-recovery-readiness (>=1.33.0,<1.34.0)", "mypy-boto3-route53domains (>=1.33.0,<1.34.0)", "mypy-boto3-route53resolver (>=1.33.0,<1.34.0)", "mypy-boto3-rum (>=1.33.0,<1.34.0)", "mypy-boto3-s3 (>=1.33.0,<1.34.0)", "mypy-boto3-s3control (>=1.33.0,<1.34.0)", "mypy-boto3-s3outposts (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-edge (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-geospatial (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-metrics (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-savingsplans (>=1.33.0,<1.34.0)", "mypy-boto3-scheduler (>=1.33.0,<1.34.0)", "mypy-boto3-schemas (>=1.33.0,<1.34.0)", "mypy-boto3-sdb (>=1.33.0,<1.34.0)", "mypy-boto3-secretsmanager (>=1.33.0,<1.34.0)", "mypy-boto3-securityhub (>=1.33.0,<1.34.0)", "mypy-boto3-securitylake (>=1.33.0,<1.34.0)", "mypy-boto3-serverlessrepo (>=1.33.0,<1.34.0)", "mypy-boto3-service-quotas (>=1.33.0,<1.34.0)", "mypy-boto3-servicecatalog (>=1.33.0,<1.34.0)", "mypy-boto3-servicecatalog-appregistry (>=1.33.0,<1.34.0)", "mypy-boto3-servicediscovery (>=1.33.0,<1.34.0)", "mypy-boto3-ses (>=1.33.0,<1.34.0)", "mypy-boto3-sesv2 (>=1.33.0,<1.34.0)", "mypy-boto3-shield (>=1.33.0,<1.34.0)", "mypy-boto3-signer (>=1.33.0,<1.34.0)", "mypy-boto3-simspaceweaver (>=1.33.0,<1.34.0)", "mypy-boto3-sms (>=1.33.0,<1.34.0)", "mypy-boto3-sms-voice (>=1.33.0,<1.34.0)", "mypy-boto3-snow-device-management (>=1.33.0,<1.34.0)", "mypy-boto3-snowball (>=1.33.0,<1.34.0)", "mypy-boto3-sns (>=1.33.0,<1.34.0)", "mypy-boto3-sqs (>=1.33.0,<1.34.0)", "mypy-boto3-ssm (>=1.33.0,<1.34.0)", "mypy-boto3-ssm-contacts (>=1.33.0,<1.34.0)", "mypy-boto3-ssm-incidents (>=1.33.0,<1.34.0)", "mypy-boto3-ssm-sap (>=1.33.0,<1.34.0)", "mypy-boto3-sso (>=1.33.0,<1.34.0)", "mypy-boto3-sso-admin (>=1.33.0,<1.34.0)", "mypy-boto3-sso-oidc (>=1.33.0,<1.34.0)", "mypy-boto3-stepfunctions (>=1.33.0,<1.34.0)", "mypy-boto3-storagegateway (>=1.33.0,<1.34.0)", "mypy-boto3-sts (>=1.33.0,<1.34.0)", "mypy-boto3-support (>=1.33.0,<1.34.0)", "mypy-boto3-support-app (>=1.33.0,<1.34.0)", "mypy-boto3-swf (>=1.33.0,<1.34.0)", "mypy-boto3-synthetics (>=1.33.0,<1.34.0)", "mypy-boto3-textract (>=1.33.0,<1.34.0)", "mypy-boto3-timestream-query (>=1.33.0,<1.34.0)", "mypy-boto3-timestream-write (>=1.33.0,<1.34.0)", "mypy-boto3-tnb (>=1.33.0,<1.34.0)", "mypy-boto3-transcribe (>=1.33.0,<1.34.0)", "mypy-boto3-transfer (>=1.33.0,<1.34.0)", "mypy-boto3-translate (>=1.33.0,<1.34.0)", "mypy-boto3-trustedadvisor (>=1.33.0,<1.34.0)", "mypy-boto3-verifiedpermissions (>=1.33.0,<1.34.0)", "mypy-boto3-voice-id (>=1.33.0,<1.34.0)", "mypy-boto3-vpc-lattice (>=1.33.0,<1.34.0)", "mypy-boto3-waf (>=1.33.0,<1.34.0)", "mypy-boto3-waf-regional (>=1.33.0,<1.34.0)", "mypy-boto3-wafv2 (>=1.33.0,<1.34.0)", "mypy-boto3-wellarchitected (>=1.33.0,<1.34.0)", "mypy-boto3-wisdom (>=1.33.0,<1.34.0)", "mypy-boto3-workdocs (>=1.33.0,<1.34.0)", "mypy-boto3-worklink (>=1.33.0,<1.34.0)", "mypy-boto3-workmail (>=1.33.0,<1.34.0)", "mypy-boto3-workmailmessageflow (>=1.33.0,<1.34.0)", "mypy-boto3-workspaces (>=1.33.0,<1.34.0)", "mypy-boto3-workspaces-thin-client (>=1.33.0,<1.34.0)", "mypy-boto3-workspaces-web (>=1.33.0,<1.34.0)", "mypy-boto3-xray (>=1.33.0,<1.34.0)"] -amp = ["mypy-boto3-amp (>=1.33.0,<1.34.0)"] -amplify = ["mypy-boto3-amplify (>=1.33.0,<1.34.0)"] -amplifybackend = ["mypy-boto3-amplifybackend (>=1.33.0,<1.34.0)"] -amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.33.0,<1.34.0)"] -apigateway = ["mypy-boto3-apigateway (>=1.33.0,<1.34.0)"] -apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.33.0,<1.34.0)"] -apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.33.0,<1.34.0)"] -appconfig = ["mypy-boto3-appconfig (>=1.33.0,<1.34.0)"] -appconfigdata = ["mypy-boto3-appconfigdata (>=1.33.0,<1.34.0)"] -appfabric = ["mypy-boto3-appfabric (>=1.33.0,<1.34.0)"] -appflow = ["mypy-boto3-appflow (>=1.33.0,<1.34.0)"] -appintegrations = ["mypy-boto3-appintegrations (>=1.33.0,<1.34.0)"] -application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.33.0,<1.34.0)"] -application-insights = ["mypy-boto3-application-insights (>=1.33.0,<1.34.0)"] -applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.33.0,<1.34.0)"] -appmesh = ["mypy-boto3-appmesh (>=1.33.0,<1.34.0)"] -apprunner = ["mypy-boto3-apprunner (>=1.33.0,<1.34.0)"] -appstream = ["mypy-boto3-appstream (>=1.33.0,<1.34.0)"] -appsync = ["mypy-boto3-appsync (>=1.33.0,<1.34.0)"] -arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.33.0,<1.34.0)"] -athena = ["mypy-boto3-athena (>=1.33.0,<1.34.0)"] -auditmanager = ["mypy-boto3-auditmanager (>=1.33.0,<1.34.0)"] -autoscaling = ["mypy-boto3-autoscaling (>=1.33.0,<1.34.0)"] -autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.33.0,<1.34.0)"] -b2bi = ["mypy-boto3-b2bi (>=1.33.0,<1.34.0)"] -backup = ["mypy-boto3-backup (>=1.33.0,<1.34.0)"] -backup-gateway = ["mypy-boto3-backup-gateway (>=1.33.0,<1.34.0)"] -backupstorage = ["mypy-boto3-backupstorage (>=1.33.0,<1.34.0)"] -batch = ["mypy-boto3-batch (>=1.33.0,<1.34.0)"] -bcm-data-exports = ["mypy-boto3-bcm-data-exports (>=1.33.0,<1.34.0)"] -bedrock = ["mypy-boto3-bedrock (>=1.33.0,<1.34.0)"] -bedrock-agent = ["mypy-boto3-bedrock-agent (>=1.33.0,<1.34.0)"] -bedrock-agent-runtime = ["mypy-boto3-bedrock-agent-runtime (>=1.33.0,<1.34.0)"] -bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.33.0,<1.34.0)"] -billingconductor = ["mypy-boto3-billingconductor (>=1.33.0,<1.34.0)"] -boto3 = ["boto3 (==1.33.5)", "botocore (==1.33.5)"] -braket = ["mypy-boto3-braket (>=1.33.0,<1.34.0)"] -budgets = ["mypy-boto3-budgets (>=1.33.0,<1.34.0)"] -ce = ["mypy-boto3-ce (>=1.33.0,<1.34.0)"] -chime = ["mypy-boto3-chime (>=1.33.0,<1.34.0)"] -chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.33.0,<1.34.0)"] -chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.33.0,<1.34.0)"] -chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.33.0,<1.34.0)"] -chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.33.0,<1.34.0)"] -chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.33.0,<1.34.0)"] -cleanrooms = ["mypy-boto3-cleanrooms (>=1.33.0,<1.34.0)"] -cleanroomsml = ["mypy-boto3-cleanroomsml (>=1.33.0,<1.34.0)"] -cloud9 = ["mypy-boto3-cloud9 (>=1.33.0,<1.34.0)"] -cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.33.0,<1.34.0)"] -clouddirectory = ["mypy-boto3-clouddirectory (>=1.33.0,<1.34.0)"] -cloudformation = ["mypy-boto3-cloudformation (>=1.33.0,<1.34.0)"] -cloudfront = ["mypy-boto3-cloudfront (>=1.33.0,<1.34.0)"] -cloudfront-keyvaluestore = ["mypy-boto3-cloudfront-keyvaluestore (>=1.33.0,<1.34.0)"] -cloudhsm = ["mypy-boto3-cloudhsm (>=1.33.0,<1.34.0)"] -cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.33.0,<1.34.0)"] -cloudsearch = ["mypy-boto3-cloudsearch (>=1.33.0,<1.34.0)"] -cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.33.0,<1.34.0)"] -cloudtrail = ["mypy-boto3-cloudtrail (>=1.33.0,<1.34.0)"] -cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.33.0,<1.34.0)"] -cloudwatch = ["mypy-boto3-cloudwatch (>=1.33.0,<1.34.0)"] -codeartifact = ["mypy-boto3-codeartifact (>=1.33.0,<1.34.0)"] -codebuild = ["mypy-boto3-codebuild (>=1.33.0,<1.34.0)"] -codecatalyst = ["mypy-boto3-codecatalyst (>=1.33.0,<1.34.0)"] -codecommit = ["mypy-boto3-codecommit (>=1.33.0,<1.34.0)"] -codedeploy = ["mypy-boto3-codedeploy (>=1.33.0,<1.34.0)"] -codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.33.0,<1.34.0)"] -codeguru-security = ["mypy-boto3-codeguru-security (>=1.33.0,<1.34.0)"] -codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.33.0,<1.34.0)"] -codepipeline = ["mypy-boto3-codepipeline (>=1.33.0,<1.34.0)"] -codestar = ["mypy-boto3-codestar (>=1.33.0,<1.34.0)"] -codestar-connections = ["mypy-boto3-codestar-connections (>=1.33.0,<1.34.0)"] -codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.33.0,<1.34.0)"] -cognito-identity = ["mypy-boto3-cognito-identity (>=1.33.0,<1.34.0)"] -cognito-idp = ["mypy-boto3-cognito-idp (>=1.33.0,<1.34.0)"] -cognito-sync = ["mypy-boto3-cognito-sync (>=1.33.0,<1.34.0)"] -comprehend = ["mypy-boto3-comprehend (>=1.33.0,<1.34.0)"] -comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.33.0,<1.34.0)"] -compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.33.0,<1.34.0)"] -config = ["mypy-boto3-config (>=1.33.0,<1.34.0)"] -connect = ["mypy-boto3-connect (>=1.33.0,<1.34.0)"] -connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.33.0,<1.34.0)"] -connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.33.0,<1.34.0)"] -connectcases = ["mypy-boto3-connectcases (>=1.33.0,<1.34.0)"] -connectparticipant = ["mypy-boto3-connectparticipant (>=1.33.0,<1.34.0)"] -controltower = ["mypy-boto3-controltower (>=1.33.0,<1.34.0)"] -cost-optimization-hub = ["mypy-boto3-cost-optimization-hub (>=1.33.0,<1.34.0)"] -cur = ["mypy-boto3-cur (>=1.33.0,<1.34.0)"] -customer-profiles = ["mypy-boto3-customer-profiles (>=1.33.0,<1.34.0)"] -databrew = ["mypy-boto3-databrew (>=1.33.0,<1.34.0)"] -dataexchange = ["mypy-boto3-dataexchange (>=1.33.0,<1.34.0)"] -datapipeline = ["mypy-boto3-datapipeline (>=1.33.0,<1.34.0)"] -datasync = ["mypy-boto3-datasync (>=1.33.0,<1.34.0)"] -datazone = ["mypy-boto3-datazone (>=1.33.0,<1.34.0)"] -dax = ["mypy-boto3-dax (>=1.33.0,<1.34.0)"] -detective = ["mypy-boto3-detective (>=1.33.0,<1.34.0)"] -devicefarm = ["mypy-boto3-devicefarm (>=1.33.0,<1.34.0)"] -devops-guru = ["mypy-boto3-devops-guru (>=1.33.0,<1.34.0)"] -directconnect = ["mypy-boto3-directconnect (>=1.33.0,<1.34.0)"] -discovery = ["mypy-boto3-discovery (>=1.33.0,<1.34.0)"] -dlm = ["mypy-boto3-dlm (>=1.33.0,<1.34.0)"] -dms = ["mypy-boto3-dms (>=1.33.0,<1.34.0)"] -docdb = ["mypy-boto3-docdb (>=1.33.0,<1.34.0)"] -docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.33.0,<1.34.0)"] -drs = ["mypy-boto3-drs (>=1.33.0,<1.34.0)"] -ds = ["mypy-boto3-ds (>=1.33.0,<1.34.0)"] -dynamodb = ["mypy-boto3-dynamodb (>=1.33.0,<1.34.0)"] -dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.33.0,<1.34.0)"] -ebs = ["mypy-boto3-ebs (>=1.33.0,<1.34.0)"] -ec2 = ["mypy-boto3-ec2 (>=1.33.0,<1.34.0)"] -ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.33.0,<1.34.0)"] -ecr = ["mypy-boto3-ecr (>=1.33.0,<1.34.0)"] -ecr-public = ["mypy-boto3-ecr-public (>=1.33.0,<1.34.0)"] -ecs = ["mypy-boto3-ecs (>=1.33.0,<1.34.0)"] -efs = ["mypy-boto3-efs (>=1.33.0,<1.34.0)"] -eks = ["mypy-boto3-eks (>=1.33.0,<1.34.0)"] -eks-auth = ["mypy-boto3-eks-auth (>=1.33.0,<1.34.0)"] -elastic-inference = ["mypy-boto3-elastic-inference (>=1.33.0,<1.34.0)"] -elasticache = ["mypy-boto3-elasticache (>=1.33.0,<1.34.0)"] -elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.33.0,<1.34.0)"] -elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.33.0,<1.34.0)"] -elb = ["mypy-boto3-elb (>=1.33.0,<1.34.0)"] -elbv2 = ["mypy-boto3-elbv2 (>=1.33.0,<1.34.0)"] -emr = ["mypy-boto3-emr (>=1.33.0,<1.34.0)"] -emr-containers = ["mypy-boto3-emr-containers (>=1.33.0,<1.34.0)"] -emr-serverless = ["mypy-boto3-emr-serverless (>=1.33.0,<1.34.0)"] -entityresolution = ["mypy-boto3-entityresolution (>=1.33.0,<1.34.0)"] -es = ["mypy-boto3-es (>=1.33.0,<1.34.0)"] -essential = ["mypy-boto3-cloudformation (>=1.33.0,<1.34.0)", "mypy-boto3-dynamodb (>=1.33.0,<1.34.0)", "mypy-boto3-ec2 (>=1.33.0,<1.34.0)", "mypy-boto3-lambda (>=1.33.0,<1.34.0)", "mypy-boto3-rds (>=1.33.0,<1.34.0)", "mypy-boto3-s3 (>=1.33.0,<1.34.0)", "mypy-boto3-sqs (>=1.33.0,<1.34.0)"] -events = ["mypy-boto3-events (>=1.33.0,<1.34.0)"] -evidently = ["mypy-boto3-evidently (>=1.33.0,<1.34.0)"] -finspace = ["mypy-boto3-finspace (>=1.33.0,<1.34.0)"] -finspace-data = ["mypy-boto3-finspace-data (>=1.33.0,<1.34.0)"] -firehose = ["mypy-boto3-firehose (>=1.33.0,<1.34.0)"] -fis = ["mypy-boto3-fis (>=1.33.0,<1.34.0)"] -fms = ["mypy-boto3-fms (>=1.33.0,<1.34.0)"] -forecast = ["mypy-boto3-forecast (>=1.33.0,<1.34.0)"] -forecastquery = ["mypy-boto3-forecastquery (>=1.33.0,<1.34.0)"] -frauddetector = ["mypy-boto3-frauddetector (>=1.33.0,<1.34.0)"] -freetier = ["mypy-boto3-freetier (>=1.33.0,<1.34.0)"] -fsx = ["mypy-boto3-fsx (>=1.33.0,<1.34.0)"] -gamelift = ["mypy-boto3-gamelift (>=1.33.0,<1.34.0)"] -glacier = ["mypy-boto3-glacier (>=1.33.0,<1.34.0)"] -globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.33.0,<1.34.0)"] -glue = ["mypy-boto3-glue (>=1.33.0,<1.34.0)"] -grafana = ["mypy-boto3-grafana (>=1.33.0,<1.34.0)"] -greengrass = ["mypy-boto3-greengrass (>=1.33.0,<1.34.0)"] -greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.33.0,<1.34.0)"] -groundstation = ["mypy-boto3-groundstation (>=1.33.0,<1.34.0)"] -guardduty = ["mypy-boto3-guardduty (>=1.33.0,<1.34.0)"] -health = ["mypy-boto3-health (>=1.33.0,<1.34.0)"] -healthlake = ["mypy-boto3-healthlake (>=1.33.0,<1.34.0)"] -honeycode = ["mypy-boto3-honeycode (>=1.33.0,<1.34.0)"] -iam = ["mypy-boto3-iam (>=1.33.0,<1.34.0)"] -identitystore = ["mypy-boto3-identitystore (>=1.33.0,<1.34.0)"] -imagebuilder = ["mypy-boto3-imagebuilder (>=1.33.0,<1.34.0)"] -importexport = ["mypy-boto3-importexport (>=1.33.0,<1.34.0)"] -inspector = ["mypy-boto3-inspector (>=1.33.0,<1.34.0)"] -inspector-scan = ["mypy-boto3-inspector-scan (>=1.33.0,<1.34.0)"] -inspector2 = ["mypy-boto3-inspector2 (>=1.33.0,<1.34.0)"] -internetmonitor = ["mypy-boto3-internetmonitor (>=1.33.0,<1.34.0)"] -iot = ["mypy-boto3-iot (>=1.33.0,<1.34.0)"] -iot-data = ["mypy-boto3-iot-data (>=1.33.0,<1.34.0)"] -iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.33.0,<1.34.0)"] -iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.33.0,<1.34.0)"] -iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.33.0,<1.34.0)"] -iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.33.0,<1.34.0)"] -iotanalytics = ["mypy-boto3-iotanalytics (>=1.33.0,<1.34.0)"] -iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.33.0,<1.34.0)"] -iotevents = ["mypy-boto3-iotevents (>=1.33.0,<1.34.0)"] -iotevents-data = ["mypy-boto3-iotevents-data (>=1.33.0,<1.34.0)"] -iotfleethub = ["mypy-boto3-iotfleethub (>=1.33.0,<1.34.0)"] -iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.33.0,<1.34.0)"] -iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.33.0,<1.34.0)"] -iotsitewise = ["mypy-boto3-iotsitewise (>=1.33.0,<1.34.0)"] -iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.33.0,<1.34.0)"] -iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.33.0,<1.34.0)"] -iotwireless = ["mypy-boto3-iotwireless (>=1.33.0,<1.34.0)"] -ivs = ["mypy-boto3-ivs (>=1.33.0,<1.34.0)"] -ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.33.0,<1.34.0)"] -ivschat = ["mypy-boto3-ivschat (>=1.33.0,<1.34.0)"] -kafka = ["mypy-boto3-kafka (>=1.33.0,<1.34.0)"] -kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.33.0,<1.34.0)"] -kendra = ["mypy-boto3-kendra (>=1.33.0,<1.34.0)"] -kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.33.0,<1.34.0)"] -keyspaces = ["mypy-boto3-keyspaces (>=1.33.0,<1.34.0)"] -kinesis = ["mypy-boto3-kinesis (>=1.33.0,<1.34.0)"] -kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.33.0,<1.34.0)"] -kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.33.0,<1.34.0)"] -kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.33.0,<1.34.0)"] -kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.33.0,<1.34.0)"] -kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.33.0,<1.34.0)"] -kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.33.0,<1.34.0)"] -kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.33.0,<1.34.0)"] -kms = ["mypy-boto3-kms (>=1.33.0,<1.34.0)"] -lakeformation = ["mypy-boto3-lakeformation (>=1.33.0,<1.34.0)"] -lambda = ["mypy-boto3-lambda (>=1.33.0,<1.34.0)"] -launch-wizard = ["mypy-boto3-launch-wizard (>=1.33.0,<1.34.0)"] -lex-models = ["mypy-boto3-lex-models (>=1.33.0,<1.34.0)"] -lex-runtime = ["mypy-boto3-lex-runtime (>=1.33.0,<1.34.0)"] -lexv2-models = ["mypy-boto3-lexv2-models (>=1.33.0,<1.34.0)"] -lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.33.0,<1.34.0)"] -license-manager = ["mypy-boto3-license-manager (>=1.33.0,<1.34.0)"] -license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.33.0,<1.34.0)"] -license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.33.0,<1.34.0)"] -lightsail = ["mypy-boto3-lightsail (>=1.33.0,<1.34.0)"] -location = ["mypy-boto3-location (>=1.33.0,<1.34.0)"] -logs = ["mypy-boto3-logs (>=1.33.0,<1.34.0)"] -lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.33.0,<1.34.0)"] -lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.33.0,<1.34.0)"] -lookoutvision = ["mypy-boto3-lookoutvision (>=1.33.0,<1.34.0)"] -m2 = ["mypy-boto3-m2 (>=1.33.0,<1.34.0)"] -machinelearning = ["mypy-boto3-machinelearning (>=1.33.0,<1.34.0)"] -macie2 = ["mypy-boto3-macie2 (>=1.33.0,<1.34.0)"] -managedblockchain = ["mypy-boto3-managedblockchain (>=1.33.0,<1.34.0)"] -managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.33.0,<1.34.0)"] -marketplace-agreement = ["mypy-boto3-marketplace-agreement (>=1.33.0,<1.34.0)"] -marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.33.0,<1.34.0)"] -marketplace-deployment = ["mypy-boto3-marketplace-deployment (>=1.33.0,<1.34.0)"] -marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.33.0,<1.34.0)"] -marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.33.0,<1.34.0)"] -mediaconnect = ["mypy-boto3-mediaconnect (>=1.33.0,<1.34.0)"] -mediaconvert = ["mypy-boto3-mediaconvert (>=1.33.0,<1.34.0)"] -medialive = ["mypy-boto3-medialive (>=1.33.0,<1.34.0)"] -mediapackage = ["mypy-boto3-mediapackage (>=1.33.0,<1.34.0)"] -mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.33.0,<1.34.0)"] -mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.33.0,<1.34.0)"] -mediastore = ["mypy-boto3-mediastore (>=1.33.0,<1.34.0)"] -mediastore-data = ["mypy-boto3-mediastore-data (>=1.33.0,<1.34.0)"] -mediatailor = ["mypy-boto3-mediatailor (>=1.33.0,<1.34.0)"] -medical-imaging = ["mypy-boto3-medical-imaging (>=1.33.0,<1.34.0)"] -memorydb = ["mypy-boto3-memorydb (>=1.33.0,<1.34.0)"] -meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.33.0,<1.34.0)"] -mgh = ["mypy-boto3-mgh (>=1.33.0,<1.34.0)"] -mgn = ["mypy-boto3-mgn (>=1.33.0,<1.34.0)"] -migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.33.0,<1.34.0)"] -migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.33.0,<1.34.0)"] -migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.33.0,<1.34.0)"] -migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.33.0,<1.34.0)"] -mobile = ["mypy-boto3-mobile (>=1.33.0,<1.34.0)"] -mq = ["mypy-boto3-mq (>=1.33.0,<1.34.0)"] -mturk = ["mypy-boto3-mturk (>=1.33.0,<1.34.0)"] -mwaa = ["mypy-boto3-mwaa (>=1.33.0,<1.34.0)"] -neptune = ["mypy-boto3-neptune (>=1.33.0,<1.34.0)"] -neptunedata = ["mypy-boto3-neptunedata (>=1.33.0,<1.34.0)"] -network-firewall = ["mypy-boto3-network-firewall (>=1.33.0,<1.34.0)"] -networkmanager = ["mypy-boto3-networkmanager (>=1.33.0,<1.34.0)"] -nimble = ["mypy-boto3-nimble (>=1.33.0,<1.34.0)"] -oam = ["mypy-boto3-oam (>=1.33.0,<1.34.0)"] -omics = ["mypy-boto3-omics (>=1.33.0,<1.34.0)"] -opensearch = ["mypy-boto3-opensearch (>=1.33.0,<1.34.0)"] -opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.33.0,<1.34.0)"] -opsworks = ["mypy-boto3-opsworks (>=1.33.0,<1.34.0)"] -opsworkscm = ["mypy-boto3-opsworkscm (>=1.33.0,<1.34.0)"] -organizations = ["mypy-boto3-organizations (>=1.33.0,<1.34.0)"] -osis = ["mypy-boto3-osis (>=1.33.0,<1.34.0)"] -outposts = ["mypy-boto3-outposts (>=1.33.0,<1.34.0)"] -panorama = ["mypy-boto3-panorama (>=1.33.0,<1.34.0)"] -payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.33.0,<1.34.0)"] -payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.33.0,<1.34.0)"] -pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.33.0,<1.34.0)"] -personalize = ["mypy-boto3-personalize (>=1.33.0,<1.34.0)"] -personalize-events = ["mypy-boto3-personalize-events (>=1.33.0,<1.34.0)"] -personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.33.0,<1.34.0)"] -pi = ["mypy-boto3-pi (>=1.33.0,<1.34.0)"] -pinpoint = ["mypy-boto3-pinpoint (>=1.33.0,<1.34.0)"] -pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.33.0,<1.34.0)"] -pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.33.0,<1.34.0)"] -pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.33.0,<1.34.0)"] -pipes = ["mypy-boto3-pipes (>=1.33.0,<1.34.0)"] -polly = ["mypy-boto3-polly (>=1.33.0,<1.34.0)"] -pricing = ["mypy-boto3-pricing (>=1.33.0,<1.34.0)"] -privatenetworks = ["mypy-boto3-privatenetworks (>=1.33.0,<1.34.0)"] -proton = ["mypy-boto3-proton (>=1.33.0,<1.34.0)"] -qbusiness = ["mypy-boto3-qbusiness (>=1.33.0,<1.34.0)"] -qconnect = ["mypy-boto3-qconnect (>=1.33.0,<1.34.0)"] -qldb = ["mypy-boto3-qldb (>=1.33.0,<1.34.0)"] -qldb-session = ["mypy-boto3-qldb-session (>=1.33.0,<1.34.0)"] -quicksight = ["mypy-boto3-quicksight (>=1.33.0,<1.34.0)"] -ram = ["mypy-boto3-ram (>=1.33.0,<1.34.0)"] -rbin = ["mypy-boto3-rbin (>=1.33.0,<1.34.0)"] -rds = ["mypy-boto3-rds (>=1.33.0,<1.34.0)"] -rds-data = ["mypy-boto3-rds-data (>=1.33.0,<1.34.0)"] -redshift = ["mypy-boto3-redshift (>=1.33.0,<1.34.0)"] -redshift-data = ["mypy-boto3-redshift-data (>=1.33.0,<1.34.0)"] -redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.33.0,<1.34.0)"] -rekognition = ["mypy-boto3-rekognition (>=1.33.0,<1.34.0)"] -repostspace = ["mypy-boto3-repostspace (>=1.33.0,<1.34.0)"] -resiliencehub = ["mypy-boto3-resiliencehub (>=1.33.0,<1.34.0)"] -resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.33.0,<1.34.0)"] -resource-groups = ["mypy-boto3-resource-groups (>=1.33.0,<1.34.0)"] -resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.33.0,<1.34.0)"] -robomaker = ["mypy-boto3-robomaker (>=1.33.0,<1.34.0)"] -rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.33.0,<1.34.0)"] -route53 = ["mypy-boto3-route53 (>=1.33.0,<1.34.0)"] -route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.33.0,<1.34.0)"] -route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.33.0,<1.34.0)"] -route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.33.0,<1.34.0)"] -route53domains = ["mypy-boto3-route53domains (>=1.33.0,<1.34.0)"] -route53resolver = ["mypy-boto3-route53resolver (>=1.33.0,<1.34.0)"] -rum = ["mypy-boto3-rum (>=1.33.0,<1.34.0)"] -s3 = ["mypy-boto3-s3 (>=1.33.0,<1.34.0)"] -s3control = ["mypy-boto3-s3control (>=1.33.0,<1.34.0)"] -s3outposts = ["mypy-boto3-s3outposts (>=1.33.0,<1.34.0)"] -sagemaker = ["mypy-boto3-sagemaker (>=1.33.0,<1.34.0)"] -sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.33.0,<1.34.0)"] -sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.33.0,<1.34.0)"] -sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.33.0,<1.34.0)"] -sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.33.0,<1.34.0)"] -sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.33.0,<1.34.0)"] -sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.33.0,<1.34.0)"] -savingsplans = ["mypy-boto3-savingsplans (>=1.33.0,<1.34.0)"] -scheduler = ["mypy-boto3-scheduler (>=1.33.0,<1.34.0)"] -schemas = ["mypy-boto3-schemas (>=1.33.0,<1.34.0)"] -sdb = ["mypy-boto3-sdb (>=1.33.0,<1.34.0)"] -secretsmanager = ["mypy-boto3-secretsmanager (>=1.33.0,<1.34.0)"] -securityhub = ["mypy-boto3-securityhub (>=1.33.0,<1.34.0)"] -securitylake = ["mypy-boto3-securitylake (>=1.33.0,<1.34.0)"] -serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.33.0,<1.34.0)"] -service-quotas = ["mypy-boto3-service-quotas (>=1.33.0,<1.34.0)"] -servicecatalog = ["mypy-boto3-servicecatalog (>=1.33.0,<1.34.0)"] -servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.33.0,<1.34.0)"] -servicediscovery = ["mypy-boto3-servicediscovery (>=1.33.0,<1.34.0)"] -ses = ["mypy-boto3-ses (>=1.33.0,<1.34.0)"] -sesv2 = ["mypy-boto3-sesv2 (>=1.33.0,<1.34.0)"] -shield = ["mypy-boto3-shield (>=1.33.0,<1.34.0)"] -signer = ["mypy-boto3-signer (>=1.33.0,<1.34.0)"] -simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.33.0,<1.34.0)"] -sms = ["mypy-boto3-sms (>=1.33.0,<1.34.0)"] -sms-voice = ["mypy-boto3-sms-voice (>=1.33.0,<1.34.0)"] -snow-device-management = ["mypy-boto3-snow-device-management (>=1.33.0,<1.34.0)"] -snowball = ["mypy-boto3-snowball (>=1.33.0,<1.34.0)"] -sns = ["mypy-boto3-sns (>=1.33.0,<1.34.0)"] -sqs = ["mypy-boto3-sqs (>=1.33.0,<1.34.0)"] -ssm = ["mypy-boto3-ssm (>=1.33.0,<1.34.0)"] -ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.33.0,<1.34.0)"] -ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.33.0,<1.34.0)"] -ssm-sap = ["mypy-boto3-ssm-sap (>=1.33.0,<1.34.0)"] -sso = ["mypy-boto3-sso (>=1.33.0,<1.34.0)"] -sso-admin = ["mypy-boto3-sso-admin (>=1.33.0,<1.34.0)"] -sso-oidc = ["mypy-boto3-sso-oidc (>=1.33.0,<1.34.0)"] -stepfunctions = ["mypy-boto3-stepfunctions (>=1.33.0,<1.34.0)"] -storagegateway = ["mypy-boto3-storagegateway (>=1.33.0,<1.34.0)"] -sts = ["mypy-boto3-sts (>=1.33.0,<1.34.0)"] -support = ["mypy-boto3-support (>=1.33.0,<1.34.0)"] -support-app = ["mypy-boto3-support-app (>=1.33.0,<1.34.0)"] -swf = ["mypy-boto3-swf (>=1.33.0,<1.34.0)"] -synthetics = ["mypy-boto3-synthetics (>=1.33.0,<1.34.0)"] -textract = ["mypy-boto3-textract (>=1.33.0,<1.34.0)"] -timestream-query = ["mypy-boto3-timestream-query (>=1.33.0,<1.34.0)"] -timestream-write = ["mypy-boto3-timestream-write (>=1.33.0,<1.34.0)"] -tnb = ["mypy-boto3-tnb (>=1.33.0,<1.34.0)"] -transcribe = ["mypy-boto3-transcribe (>=1.33.0,<1.34.0)"] -transfer = ["mypy-boto3-transfer (>=1.33.0,<1.34.0)"] -translate = ["mypy-boto3-translate (>=1.33.0,<1.34.0)"] -trustedadvisor = ["mypy-boto3-trustedadvisor (>=1.33.0,<1.34.0)"] -verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.33.0,<1.34.0)"] -voice-id = ["mypy-boto3-voice-id (>=1.33.0,<1.34.0)"] -vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.33.0,<1.34.0)"] -waf = ["mypy-boto3-waf (>=1.33.0,<1.34.0)"] -waf-regional = ["mypy-boto3-waf-regional (>=1.33.0,<1.34.0)"] -wafv2 = ["mypy-boto3-wafv2 (>=1.33.0,<1.34.0)"] -wellarchitected = ["mypy-boto3-wellarchitected (>=1.33.0,<1.34.0)"] -wisdom = ["mypy-boto3-wisdom (>=1.33.0,<1.34.0)"] -workdocs = ["mypy-boto3-workdocs (>=1.33.0,<1.34.0)"] -worklink = ["mypy-boto3-worklink (>=1.33.0,<1.34.0)"] -workmail = ["mypy-boto3-workmail (>=1.33.0,<1.34.0)"] -workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.33.0,<1.34.0)"] -workspaces = ["mypy-boto3-workspaces (>=1.33.0,<1.34.0)"] -workspaces-thin-client = ["mypy-boto3-workspaces-thin-client (>=1.33.0,<1.34.0)"] -workspaces-web = ["mypy-boto3-workspaces-web (>=1.33.0,<1.34.0)"] -xray = ["mypy-boto3-xray (>=1.33.0,<1.34.0)"] - [[package]] name = "botocore" -version = "1.26.10" +version = "1.33.6" description = "Low-level, data-driven core of boto 3." +category = "main" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" files = [ - {file = "botocore-1.26.10-py3-none-any.whl", hash = "sha256:8a4a984bf901ccefe40037da11ba2abd1ddbcb3b490a492b7f218509c99fc12f"}, - {file = "botocore-1.26.10.tar.gz", hash = "sha256:5df2cf7ebe34377470172bd0bbc582cf98c5cbd02da0909a14e9e2885ab3ae9c"}, + {file = "botocore-1.33.6-py3-none-any.whl", hash = "sha256:14282cd432c0683770eee932c43c12bb9ad5730e23755204ad102897c996693a"}, + {file = "botocore-1.33.6.tar.gz", hash = "sha256:938056bab831829f90e09ecd70dd6b295afd52b1482f5582ee7a11d8243d9661"}, ] [package.dependencies] jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" -urllib3 = ">=1.25.4,<1.27" - -[package.extras] -crt = ["awscrt (==0.13.8)"] - -[[package]] -name = "botocore-stubs" -version = "1.33.5" -description = "Type annotations and code completion for botocore" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "botocore_stubs-1.33.5-py3-none-any.whl", hash = "sha256:af2d6f5a00c003df38bd28ac21a52d4d23cce9d5cc9f864656a85b569b88657c"}, - {file = "botocore_stubs-1.33.5.tar.gz", hash = "sha256:826147bc4134cffd5310c36065131e9955d434b30ddb0ccecb16fc66ea3461fd"}, -] - -[package.dependencies] -types-awscrt = "*" +urllib3 = {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""} [package.extras] -botocore = ["botocore"] +crt = ["awscrt (==0.19.17)"] [[package]] name = "certifi" version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -623,6 +213,7 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -687,6 +278,7 @@ pycparser = "*" name = "cfgv" version = "3.4.0" description = "Validate configuration and produce human readable error messages." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -698,6 +290,7 @@ files = [ name = "cfn-lint" version = "0.83.3" description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" +category = "dev" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ @@ -721,6 +314,7 @@ sympy = ">=1.0.0" name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -820,6 +414,7 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -834,6 +429,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "cloudpathlib" version = "0.16.0" description = "pathlib-style classes for cloud storage services." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -855,6 +451,7 @@ s3 = ["boto3"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -866,6 +463,7 @@ files = [ name = "convert-case" version = "1.2.3" description = "Convert between string cases with built-in case inference." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -884,6 +482,7 @@ tests = ["assertpy", "coverage", "freezegun", "mock", "pytest", "pytest-mocha", name = "coverage" version = "7.3.2" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -951,6 +550,7 @@ toml = ["tomli"] name = "cryptography" version = "41.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -996,6 +596,7 @@ test-randomorder = ["pytest-randomly"] name = "darglint" version = "1.8.1" description = "A utility for ensuring Google-style docstrings stay up to date with the source code." +category = "dev" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -1007,6 +608,7 @@ files = [ name = "decopatch" version = "1.4.10" description = "Create decorators easily in python." +category = "dev" optional = false python-versions = "*" files = [ @@ -1021,6 +623,7 @@ makefun = ">=1.5.0" name = "distlib" version = "0.3.7" description = "Distribution utilities" +category = "dev" optional = false python-versions = "*" files = [ @@ -1032,6 +635,7 @@ files = [ name = "dnspython" version = "2.4.2" description = "DNS toolkit" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -1051,6 +655,7 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] name = "docker" version = "6.1.3" description = "A Python library for the Docker Engine API." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1072,6 +677,7 @@ ssh = ["paramiko (>=2.4.3)"] name = "docutils" version = "0.20.1" description = "Docutils -- Python Documentation Utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1083,6 +689,7 @@ files = [ name = "ecdsa" version = "0.18.0" description = "ECDSA cryptographic signature library (pure python)" +category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1101,6 +708,7 @@ gmpy2 = ["gmpy2"] name = "email-validator" version = "2.1.0.post1" description = "A robust email address syntax and deliverability validation library." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1116,6 +724,7 @@ idna = ">=2.0.0" name = "emma-common" version = "2.3.1" description = "Common modules which are used a lot throughout EMMA repositories" +category = "main" optional = false python-versions = ">=3.9,<3.11" files = [] @@ -1143,6 +752,7 @@ resolved_reference = "c52a5b573306c92f42fd46f70abc00506a58b6f8" name = "eradicate" version = "2.3.0" description = "Removes commented-out code." +category = "dev" optional = false python-versions = "*" files = [ @@ -1154,6 +764,7 @@ files = [ name = "exceptiongroup" version = "1.2.0" description = "Backport of PEP 654 (exception groups)" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1168,6 +779,7 @@ test = ["pytest (>=6)"] name = "execnet" version = "2.0.2" description = "execnet: rapid multi-Python deployment" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1182,6 +794,7 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] name = "fastapi" version = "0.99.1" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1210,6 +823,7 @@ all = ["email-validator (>=1.1.1)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)" name = "filelock" version = "3.13.1" description = "A platform independent file lock." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1226,6 +840,7 @@ typing = ["typing-extensions (>=4.8)"] name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -1242,6 +857,7 @@ pyflakes = ">=3.1.0,<3.2.0" name = "flake8-bandit" version = "4.1.1" description = "Automated security testing with bandit and flake8." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1257,6 +873,7 @@ flake8 = ">=5.0.0" name = "flake8-broken-line" version = "1.0.0" description = "Flake8 plugin to forbid backslashes for line breaks" +category = "dev" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -1271,6 +888,7 @@ flake8 = ">5" name = "flake8-bugbear" version = "23.11.28" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -1289,6 +907,7 @@ dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", name = "flake8-commas" version = "2.1.0" description = "Flake8 lint for trailing commas." +category = "dev" optional = false python-versions = "*" files = [ @@ -1303,6 +922,7 @@ flake8 = ">=2" name = "flake8-comprehensions" version = "3.14.0" description = "A flake8 plugin to help you write better list/set/dict comprehensions." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1317,6 +937,7 @@ flake8 = ">=3.0,<3.2.0 || >3.2.0" name = "flake8-debugger" version = "4.1.2" description = "ipdb/pdb statement checker plugin for flake8" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1332,6 +953,7 @@ pycodestyle = "*" name = "flake8-docstrings" version = "1.7.0" description = "Extension for flake8 which uses pydocstyle to check docstrings" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1347,6 +969,7 @@ pydocstyle = ">=2.1" name = "flake8-eradicate" version = "1.5.0" description = "Flake8 plugin to find commented out code" +category = "dev" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -1363,6 +986,7 @@ flake8 = ">5" name = "flake8-isort" version = "6.1.1" description = "flake8 plugin that integrates isort" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1381,6 +1005,7 @@ test = ["pytest"] name = "flake8-quotes" version = "3.3.2" description = "Flake8 lint for quotes." +category = "dev" optional = false python-versions = "*" files = [ @@ -1394,6 +1019,7 @@ flake8 = "*" name = "flake8-rst-docstrings" version = "0.3.0" description = "Python docstring reStructuredText (RST) validator for flake8" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1413,6 +1039,7 @@ develop = ["build", "twine"] name = "flake8-string-format" version = "0.3.0" description = "string format checker, plugin for flake8" +category = "dev" optional = false python-versions = "*" files = [ @@ -1427,6 +1054,7 @@ flake8 = "*" name = "fsspec" version = "2023.10.0" description = "File-system specification" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1462,6 +1090,7 @@ tqdm = ["tqdm"] name = "gitdb" version = "4.0.11" description = "Git Object Database" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1476,6 +1105,7 @@ smmap = ">=3.0.1,<6" name = "gitpython" version = "3.1.40" description = "GitPython is a Python library used to interact with Git repositories" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1493,6 +1123,7 @@ test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre name = "graphql-core" version = "3.2.3" description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +category = "dev" optional = false python-versions = ">=3.6,<4" files = [ @@ -1504,6 +1135,7 @@ files = [ name = "gunicorn" version = "21.2.0" description = "WSGI HTTP Server for UNIX" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1524,6 +1156,7 @@ tornado = ["tornado (>=0.2)"] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1535,6 +1168,7 @@ files = [ name = "httpcore" version = "1.0.2" description = "A minimal low-level HTTP client." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1549,13 +1183,14 @@ h11 = ">=0.13,<0.15" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] trio = ["trio (>=0.22.0,<0.23.0)"] [[package]] name = "httptools" version = "0.6.1" description = "A collection of framework independent HTTP protocol utils." +category = "main" optional = false python-versions = ">=3.8.0" files = [ @@ -1604,6 +1239,7 @@ test = ["Cython (>=0.29.24,<0.30.0)"] name = "httpx" version = "0.25.2" description = "The next generation HTTP client." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1614,20 +1250,21 @@ files = [ [package.dependencies] anyio = "*" certifi = "*" -httpcore = "==1.*" +httpcore = ">=1.0.0,<2.0.0" idna = "*" sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "huggingface-hub" version = "0.19.4" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +category = "main" optional = false python-versions = ">=3.8.0" files = [ @@ -1661,6 +1298,7 @@ typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "t name = "hypothesis" version = "6.91.0" description = "A library for property-based testing" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1693,6 +1331,7 @@ zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2023.3)"] name = "identify" version = "2.5.32" description = "File identification library for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1707,6 +1346,7 @@ license = ["ukkonen"] name = "idna" version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1718,6 +1358,7 @@ files = [ name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1729,6 +1370,7 @@ files = [ name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1746,6 +1388,7 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1757,6 +1400,7 @@ files = [ name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1774,6 +1418,7 @@ i18n = ["Babel (>=2.7)"] name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1785,6 +1430,7 @@ files = [ name = "jschema-to-python" version = "1.2.3" description = "Generate source code for Python classes from a JSON schema." +category = "dev" optional = false python-versions = ">= 2.7" files = [ @@ -1801,6 +1447,7 @@ pbr = "*" name = "jsondiff" version = "2.0.0" description = "Diff JSON and JSON-like structures in Python" +category = "dev" optional = false python-versions = "*" files = [ @@ -1812,6 +1459,7 @@ files = [ name = "jsonpatch" version = "1.33" description = "Apply JSON-Patches (RFC 6902)" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1826,6 +1474,7 @@ jsonpointer = ">=1.9" name = "jsonpickle" version = "3.0.2" description = "Python library for serializing any arbitrary object graph into JSON" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1842,6 +1491,7 @@ testing-libs = ["simplejson", "ujson"] name = "jsonpointer" version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1853,6 +1503,7 @@ files = [ name = "jsonschema" version = "4.20.0" description = "An implementation of JSON Schema validation for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1874,6 +1525,7 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-path" version = "0.3.2" description = "JSONSchema Spec with object-oriented paths" +category = "dev" optional = false python-versions = ">=3.8.0,<4.0.0" files = [ @@ -1891,6 +1543,7 @@ requests = ">=2.31.0,<3.0.0" name = "jsonschema-specifications" version = "2023.11.2" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1905,10 +1558,10 @@ referencing = ">=0.31.0" name = "junit-xml" version = "1.9" description = "Creates JUnit XML test result documents that can be read by tools such as Jenkins" +category = "dev" optional = false python-versions = "*" files = [ - {file = "junit-xml-1.9.tar.gz", hash = "sha256:de16a051990d4e25a3982b2dd9e89d671067548718866416faec14d9de56db9f"}, {file = "junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732"}, ] @@ -1919,6 +1572,7 @@ six = "*" name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1964,6 +1618,7 @@ files = [ name = "loguru" version = "0.7.2" description = "Python logging made (stupidly) simple" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1982,6 +1637,7 @@ dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptio name = "makefun" version = "1.15.2" description = "Small library to dynamically create python functions." +category = "dev" optional = false python-versions = "*" files = [ @@ -1993,6 +1649,7 @@ files = [ name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2017,6 +1674,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2040,16 +1698,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -2086,6 +1734,7 @@ files = [ name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2097,6 +1746,7 @@ files = [ name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2108,6 +1758,7 @@ files = [ name = "methodtools" version = "0.4.7" description = "Expand standard functools to methods" +category = "main" optional = false python-versions = "*" files = [ @@ -2125,6 +1776,7 @@ test = ["functools32 (>=3.2.3-2)", "pytest (>=4.6.7)", "pytest-cov (>=2.6.1)"] name = "more-itertools" version = "10.1.0" description = "More routines for operating on iterables, beyond itertools" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2136,6 +1788,7 @@ files = [ name = "moto" version = "4.2.10" description = "" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2199,6 +1852,7 @@ xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] name = "mpmath" version = "1.3.0" description = "Python library for arbitrary-precision floating-point arithmetic" +category = "dev" optional = false python-versions = "*" files = [ @@ -2216,6 +1870,7 @@ tests = ["pytest (>=4.6)"] name = "multipart" version = "0.2.4" description = "Parser for multipart/form-data." +category = "dev" optional = false python-versions = "*" files = [ @@ -2227,6 +1882,7 @@ files = [ name = "mypy" version = "1.7.1" description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2270,122 +1926,11 @@ install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] -[[package]] -name = "mypy-boto3-cloudformation" -version = "1.33.0" -description = "Type annotations for boto3.CloudFormation 1.33.0 service generated with mypy-boto3-builder 7.20.3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-cloudformation-1.33.0.tar.gz", hash = "sha256:e1c27b910ffce96ed0ae126e4c5755bdd90a56dc7548e543264334a35608df0a"}, - {file = "mypy_boto3_cloudformation-1.33.0-py3-none-any.whl", hash = "sha256:f56659fa8d804017075e3fb27ac8df17cd6c9ea3f686d79832bf5add51eb2965"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} - -[[package]] -name = "mypy-boto3-dynamodb" -version = "1.33.0" -description = "Type annotations for boto3.DynamoDB 1.33.0 service generated with mypy-boto3-builder 7.20.3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-dynamodb-1.33.0.tar.gz", hash = "sha256:2cfe1089c89de61b1ec0e69a72ba3e6865a013ea0a37d318ab564983785d42f9"}, - {file = "mypy_boto3_dynamodb-1.33.0-py3-none-any.whl", hash = "sha256:619ea2cc311ced0ecb44b6e8d3bf3dd851fb7c53a34128b4ff6d6e6a11fdd41f"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} - -[[package]] -name = "mypy-boto3-ec2" -version = "1.33.0" -description = "Type annotations for boto3.EC2 1.33.0 service generated with mypy-boto3-builder 7.20.3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-ec2-1.33.0.tar.gz", hash = "sha256:07578937f974f6129a79d88f788e53a0a38d22e5a9c0c63d589f24e39b8fb90d"}, - {file = "mypy_boto3_ec2-1.33.0-py3-none-any.whl", hash = "sha256:ac24fe3a7d849735c84bf53abaa2ba809e39d2582e98bd5538adecb585fce9b0"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} - -[[package]] -name = "mypy-boto3-lambda" -version = "1.33.0" -description = "Type annotations for boto3.Lambda 1.33.0 service generated with mypy-boto3-builder 7.20.3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-lambda-1.33.0.tar.gz", hash = "sha256:beac0cb4b94f83a444242db16f601405bdfb6c15808c2c52720224d907e7af40"}, - {file = "mypy_boto3_lambda-1.33.0-py3-none-any.whl", hash = "sha256:10e0f04168f4064e89ba136218162003f1cb6826dcbfa95ca982d3cb792fc9f7"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} - -[[package]] -name = "mypy-boto3-rds" -version = "1.33.0" -description = "Type annotations for boto3.RDS 1.33.0 service generated with mypy-boto3-builder 7.20.3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-rds-1.33.0.tar.gz", hash = "sha256:2a50e40aa473b34e6651672a4233873b5fafbe42218c33af27a8ecf8571e9169"}, - {file = "mypy_boto3_rds-1.33.0-py3-none-any.whl", hash = "sha256:dda63ea8b2358e549a73001ad7b56c3b4ce3da90f00b520aa37e3ada99e4ab69"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} - -[[package]] -name = "mypy-boto3-s3" -version = "1.33.2" -description = "Type annotations for boto3.S3 1.33.2 service generated with mypy-boto3-builder 7.20.3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-s3-1.33.2.tar.gz", hash = "sha256:f54a3ad3288f4e4719ebada3dde68c320507b0fc451d59bc68af7e6ab15cbdad"}, - {file = "mypy_boto3_s3-1.33.2-py3-none-any.whl", hash = "sha256:9d463df6def30de31a467d49ab92ff7795d46709d56eff6f52216a08bac27918"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} - -[[package]] -name = "mypy-boto3-secretsmanager" -version = "1.33.0" -description = "Type annotations for boto3.SecretsManager 1.33.0 service generated with mypy-boto3-builder 7.20.3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-secretsmanager-1.33.0.tar.gz", hash = "sha256:ea765e79988689a2cf6ba9307666aa8a3784f715b371b8fdebcb7694f4e92b9a"}, - {file = "mypy_boto3_secretsmanager-1.33.0-py3-none-any.whl", hash = "sha256:f0f1552ed294fd2f09ca38fd1af025149eeadde49500e0fca948ad1ada7d9c3f"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} - -[[package]] -name = "mypy-boto3-sqs" -version = "1.33.0" -description = "Type annotations for boto3.SQS 1.33.0 service generated with mypy-boto3-builder 7.20.3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-sqs-1.33.0.tar.gz", hash = "sha256:81f4838e81cbb0c088a10e287922fdf6a3f317cbab6647993ab9dbd567c0e8fb"}, - {file = "mypy_boto3_sqs-1.33.0-py3-none-any.whl", hash = "sha256:81f71d5f461e5e670d2ca93df92c93efdd7c29be33eabf8475df5f071e638583"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} - [[package]] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2397,6 +1942,7 @@ files = [ name = "networkx" version = "3.2.1" description = "Python package for creating and manipulating graphs and networks" +category = "dev" optional = false python-versions = ">=3.9" files = [ @@ -2415,6 +1961,7 @@ test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] name = "nodeenv" version = "1.8.0" description = "Node.js virtual environment builder" +category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ @@ -2429,6 +1976,7 @@ setuptools = "*" name = "numpy" version = "1.26.2" description = "Fundamental package for array computing in Python" +category = "main" optional = false python-versions = ">=3.9" files = [ @@ -2474,6 +2022,7 @@ files = [ name = "nvidia-cublas-cu11" version = "11.10.3.66" description = "CUBLAS native runtime libraries" +category = "main" optional = false python-versions = ">=3" files = [ @@ -2489,6 +2038,7 @@ wheel = "*" name = "nvidia-cuda-nvrtc-cu11" version = "11.7.99" description = "NVRTC native runtime libraries" +category = "main" optional = false python-versions = ">=3" files = [ @@ -2505,6 +2055,7 @@ wheel = "*" name = "nvidia-cuda-runtime-cu11" version = "11.7.99" description = "CUDA Runtime native Libraries" +category = "main" optional = false python-versions = ">=3" files = [ @@ -2520,6 +2071,7 @@ wheel = "*" name = "nvidia-cudnn-cu11" version = "8.5.0.96" description = "cuDNN runtime libraries" +category = "main" optional = false python-versions = ">=3" files = [ @@ -2535,6 +2087,7 @@ wheel = "*" name = "openapi-schema-validator" version = "0.6.2" description = "OpenAPI schema validation for Python" +category = "dev" optional = false python-versions = ">=3.8.0,<4.0.0" files = [ @@ -2551,6 +2104,7 @@ rfc3339-validator = "*" name = "openapi-spec-validator" version = "0.7.1" description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3 spec validator" +category = "dev" optional = false python-versions = ">=3.8.0,<4.0.0" files = [ @@ -2568,6 +2122,7 @@ openapi-schema-validator = ">=0.6.0,<0.7.0" name = "orjson" version = "3.9.10" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2627,6 +2182,7 @@ files = [ name = "overrides" version = "7.4.0" description = "A decorator to automatically detect mismatch when overriding a method." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2638,6 +2194,7 @@ files = [ name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2649,6 +2206,7 @@ files = [ name = "pastel" version = "0.2.1" description = "Bring colors to your terminal." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2660,6 +2218,7 @@ files = [ name = "pathable" version = "0.4.3" description = "Object-oriented paths" +category = "dev" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ @@ -2671,6 +2230,7 @@ files = [ name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2682,6 +2242,7 @@ files = [ name = "pbr" version = "6.0.0" description = "Python Build Reasonableness" +category = "dev" optional = false python-versions = ">=2.6" files = [ @@ -2693,6 +2254,7 @@ files = [ name = "pep8-naming" version = "0.13.3" description = "Check PEP-8 naming conventions, plugin for flake8" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2707,6 +2269,7 @@ flake8 = ">=5.0.0" name = "pillow" version = "10.1.0" description = "Python Imaging Library (Fork)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2774,6 +2337,7 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa name = "platformdirs" version = "4.0.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2789,6 +2353,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2804,6 +2369,7 @@ testing = ["pytest", "pytest-benchmark"] name = "ply" version = "3.11" description = "Python Lex & Yacc" +category = "main" optional = false python-versions = "*" files = [ @@ -2815,6 +2381,7 @@ files = [ name = "poethepoet" version = "0.24.4" description = "A task runner that works well with poetry." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2833,6 +2400,7 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] name = "pre-commit" version = "3.5.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2851,6 +2419,7 @@ virtualenv = ">=20.10.0" name = "py-cpuinfo" version = "9.0.0" description = "Get CPU info with pure Python" +category = "dev" optional = false python-versions = "*" files = [ @@ -2862,6 +2431,7 @@ files = [ name = "py-partiql-parser" version = "0.4.2" description = "Pure Python PartiQL Parser" +category = "dev" optional = false python-versions = "*" files = [ @@ -2876,6 +2446,7 @@ dev = ["black (==22.6.0)", "flake8", "mypy (==0.971)", "pytest"] name = "pyasn1" version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -2887,6 +2458,7 @@ files = [ name = "pycodestyle" version = "2.11.1" description = "Python style guide checker" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2898,6 +2470,7 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2909,6 +2482,7 @@ files = [ name = "pydantic" version = "1.10.13" description = "Data validation and settings management using python type hints" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2962,6 +2536,7 @@ email = ["email-validator (>=1.0.3)"] name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2979,6 +2554,7 @@ toml = ["tomli (>=1.2.3)"] name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2990,6 +2566,7 @@ files = [ name = "pygments" version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3005,6 +2582,7 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pyparsing" version = "3.1.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "dev" optional = false python-versions = ">=3.6.8" files = [ @@ -3019,6 +2597,7 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pytest" version = "7.4.3" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3041,6 +2620,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-benchmark" version = "4.0.0" description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3061,6 +2641,7 @@ histogram = ["pygal", "pygaljs"] name = "pytest-cases" version = "3.8.1" description = "Separate test code from test cases in pytest." +category = "dev" optional = false python-versions = "*" files = [ @@ -3077,6 +2658,7 @@ packaging = "*" name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3095,6 +2677,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-httpx" version = "0.27.0" description = "Send responses to httpx." +category = "dev" optional = false python-versions = ">=3.9" files = [ @@ -3103,16 +2686,17 @@ files = [ ] [package.dependencies] -httpx = "==0.25.*" -pytest = "==7.*" +httpx = ">=0.25.0,<0.26.0" +pytest = ">=7.0.0,<8.0.0" [package.extras] -testing = ["pytest-asyncio (==0.21.*)", "pytest-cov (==4.*)"] +testing = ["pytest-asyncio (>=0.21.0,<0.22.0)", "pytest-cov (>=4.0.0,<5.0.0)"] [[package]] name = "pytest-xdist" version = "3.5.0" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3133,6 +2717,7 @@ testing = ["filelock"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -3147,6 +2732,7 @@ six = ">=1.5" name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3161,6 +2747,7 @@ cli = ["click (>=5.0)"] name = "python-jose" version = "3.3.0" description = "JOSE implementation in Python" +category = "dev" optional = false python-versions = "*" files = [ @@ -3183,6 +2770,7 @@ pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] name = "python-multipart" version = "0.0.6" description = "A streaming multipart parser for Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3197,6 +2785,7 @@ dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatc name = "pywin32" version = "306" description = "Python for Window Extensions" +category = "dev" optional = false python-versions = "*" files = [ @@ -3220,6 +2809,7 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3279,6 +2869,7 @@ files = [ name = "referencing" version = "0.31.1" description = "JSON Referencing + Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3294,6 +2885,7 @@ rpds-py = ">=0.7.0" name = "regex" version = "2023.10.3" description = "Alternative regular expression module, to replace re." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3391,6 +2983,7 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3412,6 +3005,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "responses" version = "0.24.1" description = "A utility library for mocking out the `requests` Python library." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3431,6 +3025,7 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy name = "restructuredtext-lint" version = "1.4.0" description = "reStructuredText linter" +category = "dev" optional = false python-versions = "*" files = [ @@ -3444,6 +3039,7 @@ docutils = ">=0.11,<1.0" name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3458,6 +3054,7 @@ six = "*" name = "rich" version = "13.7.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -3476,6 +3073,7 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "rpds-py" version = "0.13.2" description = "Python bindings to Rust's persistent data structures (rpds)" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3584,6 +3182,7 @@ files = [ name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" +category = "dev" optional = false python-versions = ">=3.6,<4" files = [ @@ -3598,6 +3197,7 @@ pyasn1 = ">=0.1.3" name = "rule-engine" version = "4.1.0" description = "A lightweight, optionally typed expression language with a custom grammar for matching arbitrary Python objects." +category = "main" optional = false python-versions = "*" files = [ @@ -3610,25 +3210,27 @@ python-dateutil = ">=2.7,<3.0" [[package]] name = "s3transfer" -version = "0.5.2" +version = "0.8.2" description = "An Amazon S3 Transfer Manager" +category = "main" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" files = [ - {file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"}, - {file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"}, + {file = "s3transfer-0.8.2-py3-none-any.whl", hash = "sha256:c9e56cbe88b28d8e197cf841f1f0c130f246595e77ae5b5a05b69fe7cb83de76"}, + {file = "s3transfer-0.8.2.tar.gz", hash = "sha256:368ac6876a9e9ed91f6bc86581e319be08188dc60d50e0d56308ed5765446283"}, ] [package.dependencies] -botocore = ">=1.12.36,<2.0a.0" +botocore = ">=1.33.2,<2.0a.0" [package.extras] -crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] [[package]] name = "sarif-om" version = "1.0.4" description = "Classes implementing the SARIF 2.1.0 object model." +category = "dev" optional = false python-versions = ">= 2.7" files = [ @@ -3644,6 +3246,7 @@ pbr = "*" name = "setuptools" version = "69.0.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3660,6 +3263,7 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3671,6 +3275,7 @@ files = [ name = "smmap" version = "5.0.1" description = "A pure Python implementation of a sliding window memory map manager" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3682,6 +3287,7 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3693,6 +3299,7 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" optional = false python-versions = "*" files = [ @@ -3704,6 +3311,7 @@ files = [ name = "sortedcontainers" version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +category = "dev" optional = false python-versions = "*" files = [ @@ -3715,6 +3323,7 @@ files = [ name = "sshpubkeys" version = "3.3.1" description = "SSH public key parser" +category = "dev" optional = false python-versions = ">=3" files = [ @@ -3733,6 +3342,7 @@ dev = ["twine", "wheel", "yapf"] name = "starlette" version = "0.27.0" description = "The little ASGI library that shines." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3751,6 +3361,7 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyam name = "stevedore" version = "5.1.0" description = "Manage dynamic plugins for Python applications" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3765,6 +3376,7 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" name = "sympy" version = "1.12" description = "Computer algebra system (CAS) in Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3779,6 +3391,7 @@ mpmath = ">=0.19" name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3790,6 +3403,7 @@ files = [ name = "torch" version = "1.13.1" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -3830,6 +3444,7 @@ opt-einsum = ["opt-einsum (>=3.3)"] name = "tqdm" version = "4.66.1" description = "Fast, Extensible Progress Meter" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3850,6 +3465,7 @@ telegram = ["requests"] name = "typer" version = "0.9.0" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3867,21 +3483,11 @@ dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2 doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] -[[package]] -name = "types-awscrt" -version = "0.19.19" -description = "Type annotations and code completion for awscrt" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "types_awscrt-0.19.19-py3-none-any.whl", hash = "sha256:a577c4d60a7fb7e21b436a73207a66f6ba50329d578b347934c5d99d4d612901"}, - {file = "types_awscrt-0.19.19.tar.gz", hash = "sha256:850d5ad95d8f337b15fb154790f39af077faf5c08d43758fd750f379a87d5f73"}, -] - [[package]] name = "types-pyyaml" version = "6.0.12.12" description = "Typing stubs for PyYAML" +category = "dev" optional = false python-versions = "*" files = [ @@ -3889,21 +3495,11 @@ files = [ {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, ] -[[package]] -name = "types-s3transfer" -version = "0.8.2" -description = "Type annotations and code completion for s3transfer" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "types_s3transfer-0.8.2-py3-none-any.whl", hash = "sha256:5e084ebcf2704281c71b19d5da6e1544b50859367d034b50080d5316a76a9418"}, - {file = "types_s3transfer-0.8.2.tar.gz", hash = "sha256:2e41756fcf94775a9949afa856489ac4570308609b0493dfbd7b4d333eb423e6"}, -] - [[package]] name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3915,6 +3511,7 @@ files = [ name = "ujson" version = "5.8.0" description = "Ultra fast JSON encoder and decoder for Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3985,6 +3582,7 @@ files = [ name = "urllib3" version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -4001,6 +3599,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "uvicorn" version = "0.24.0.post1" description = "The lightning-fast ASGI server." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4016,7 +3615,7 @@ httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standar python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} @@ -4027,6 +3626,7 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", name = "uvloop" version = "0.19.0" description = "Fast implementation of asyncio event loop on top of libuv" +category = "main" optional = false python-versions = ">=3.8.0" files = [ @@ -4071,6 +3671,7 @@ test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)" name = "virtualenv" version = "20.24.7" description = "Virtual Python Environment builder" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4091,6 +3692,7 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess name = "watchfiles" version = "0.21.0" description = "Simple, modern and high performance file watching and code reload in python." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4178,6 +3780,7 @@ anyio = ">=3.0.0" name = "websocket-client" version = "1.6.4" description = "WebSocket client for Python with low level API options" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4194,6 +3797,7 @@ test = ["websockets"] name = "websockets" version = "12.0" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4275,6 +3879,7 @@ files = [ name = "wemake-python-styleguide" version = "0.18.0" description = "The strictest and most opinionated python linter ever" +category = "dev" optional = false python-versions = ">=3.8.1,<4.0" files = [ @@ -4308,6 +3913,7 @@ typing_extensions = ">=4.0,<5.0" name = "werkzeug" version = "3.0.1" description = "The comprehensive WSGI web application library." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4325,6 +3931,7 @@ watchdog = ["watchdog (>=2.3)"] name = "wheel" version = "0.42.0" description = "A built-package format for Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4339,6 +3946,7 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] name = "win32-setctime" version = "1.1.0" description = "A small Python utility to set file creation time on Windows" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -4353,6 +3961,7 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] name = "wirerope" version = "0.4.7" description = "'Turn functions and methods into fully controllable objects'" +category = "main" optional = false python-versions = "*" files = [ @@ -4370,6 +3979,7 @@ test = ["pytest (>=4.6.7)", "pytest-cov (>=2.6.1)"] name = "wrapt" version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4449,6 +4059,7 @@ files = [ name = "xmltodict" version = "0.13.0" description = "Makes working with XML feel like you are working with JSON" +category = "dev" optional = false python-versions = ">=3.4" files = [ @@ -4459,4 +4070,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.10" -content-hash = "7b1e563a0b190bd7ee98795475401ac79931377aaa192143194d0e7a9d4e62cd" +content-hash = "b5f6968fbd5059ad93280881ade908a8b3c944f3fa2cabc065c58105b03770d2" diff --git a/pyproject.toml b/pyproject.toml index 3e00fdf3..af967f5d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,6 @@ cmd = "pytest -v --durations=40 --color=yes --junitxml=pytest.xml --cov=src -m ' python = ">=3.9,<3.10" torch = ">=1.13.1,<2" rich = ">=12.5.1" -boto3 = "1.23.3" Pillow = ">=9.2.0" httpx = ">=0.23.0" orjson = ">=3.8.0" @@ -61,10 +60,6 @@ isort = ">=5.10.1" pytest-cases = ">=3.6.13" pytest-xdist = ">=2.5.0" pytest-cov = ">=3.0.0" -boto3-stubs = { extras = [ - "essential", - "secretsmanager", -], version = ">=1.26.27" } moto = { extras = ["all"], version = ">=4.0.5" } pytest-httpx = ">=0.21.0" types-pyyaml = ">=6.0.12.1" diff --git a/scripts/setup-ec2-instance.sh b/scripts/setup-ec2-instance.sh deleted file mode 100644 index ac446c1d..00000000 --- a/scripts/setup-ec2-instance.sh +++ /dev/null @@ -1,144 +0,0 @@ -#!/usr/bin/env bash - -GITHUB_PAT=$1 - -# ---------------------------- Install GitHub CLI ---------------------------- # -type -p curl >/dev/null || sudo apt install curl -y -curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg && - sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg && - echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list >/dev/null && - sudo apt update && - sudo apt install gh -y - -echo "$GITHUB_PAT" | gh auth login --with-token -gh auth setup-git -gh auth status - -# ---------------------- Install pyenv for python sanity --------------------- # -curl https://pyenv.run | bash - -# --------------------- Update the shell config for bash --------------------- # -# shellcheck disable=SC2016 -{ - echo 'export PYENV_ROOT="$HOME/.pyenv"' - echo 'command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"' - echo 'eval "$(pyenv init -)"' - echo 'eval "$(pyenv virtualenv-init -)"' -} >>~/.bashrc - -# ------------------------------ Install Poetry ------------------------------ # -curl -sSL https://install.python-poetry.org | python3 - - -# Add Poetry to the PATH -# shellcheck disable=SC2016 -echo 'export PATH="$HOME/.local/bin:$PATH"' >>~/.bashrc - -# Create venvs within the project -/home/ubuntu/.local/bin/poetry config virtualenvs.in-project true - -# Handle temporary poetry issue -# https://github.com/python-poetry/poetry/issues/1917#issuecomment-1251667047 -echo 'export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring' >>~/.bashrc - -# Add poethepoet for all the poetry hooks -/home/ubuntu/.local/bin/poetry self add 'poethepoet[poetry_plugin]' - -# --------------------- Install Python build dependencies -------------------- # -sudo apt update -y && - sudo apt install -y make build-essential libssl-dev zlib1g-dev \ - libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm \ - libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev - -# ------------------------------ Set tmux color ------------------------------ # -echo 'set -g default-terminal "screen-256color"' >>~/.tmux.conf - -# ------------------------------- Install CUDA ------------------------------- # -wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/cuda-keyring_1.0-1_all.deb && - sudo dpkg -i cuda-keyring_1.0-1_all.deb && - sudo apt update -y && - sudo apt install -y cuda-11-8 - -# shellcheck disable=SC2016 -{ - echo 'export PATH=/usr/local/cuda-11.8/bin${PATH:+:${PATH}}' - echo 'export LD_LIBRARY_PATH=/usr/local/cuda-11.8/lib${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}' -} >>~/.bashrc - -# ------------------------------ Install Docker ------------------------------ # -curl -fsSL https://download.docker.com/linux/ubuntu/gpg | - sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg && - echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | - sudo tee /etc/apt/sources.list.d/docker.list >/dev/null && - sudo apt update -y && - sudo apt install -y docker-ce docker-ce-cli containerd.io docker-compose-plugin - -# -------------------------- Install nvidia-docker2 -------------------------- # -curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | sudo gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg -# shellcheck source=/dev/null -distribution=$( - . /etc/os-release - echo "$ID""$VERSION_ID" -) && { - curl -s -L https://nvidia.github.io/libnvidia-container/"$distribution"/libnvidia-container.list | - sed 's#deb https://#deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://#g' | - sudo tee /etc/apt/sources.list.d/nvidia-container-toolkit.list -} -sudo apt update -y && - sudo apt install -y nvidia-docker2 && - sudo systemctl restart docker - -# ------------------------------ Install AWS CLI ----------------------------- # -sudo apt install -y unzip -curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" -unzip awscliv2.zip -sudo ./aws/install - -# ------------------------------- Login to AWS ------------------------------- # -aws configure --profile TeamProfile -# TODO: Configure profile - -# ---------------------------- Install NFS common ---------------------------- # -sudo apt-get -y install nfs-common - -# ------------------------- Install Amazon EFS Utils ------------------------- # -sudo apt-get -y install git binutils -git clone https://github.com/aws/efs-utils .aws-utils -cd .aws-utils || exit -./build-deb.sh -sudo apt-get -y install ./build/amazon-efs-utils*deb - -# ---------------------------- Create dir for EMMA --------------------------- # -mkdir emma -cd emma || exit - -# ----------------------- Connect EFS for SimBot cache ----------------------- # -mkdir cache -sudo mount -t efs -o tls,accesspoint=fsap-09425fce1a442352e fs-09152325b5864bf9f:/ cache -sudo chmod 777 cache -echo "fs-09152325b5864bf9f:/ /home/ubuntu/emma/cache efs _netdev,noresvport,tls,iam,accesspoint=fsap-09425fce1a442352e 0 0" | sudo tee -a /etc/fstab - -# -------------------- Connect EFS for auxiliary metadata -------------------- # -mkdir auxiliary_metadata -echo 10.0.112.83 fs-0bfa9bdb8b799cb84.efs.us-east-1.amazonaws.com | sudo tee -a /etc/hosts -sudo mount -t efs -o tls,accesspoint=fsap-022169cf9e6e76aa9 fs-0bfa9bdb8b799cb84 auxiliary_metadata -echo "fs-0bfa9bdb8b799cb84:/ /home/ubuntu/emma/auxiliary_metadata efs _netdev,noresvport,tls,iam,accesspoint=fsap-022169cf9e6e76aa9 0 0" | sudo tee -a /etc/fstab - -# ----------------- Setup EFS for staging auxiliary metadata ----------------- # -mkdir staging_auxiliary_metadata -echo 10.0.96.154 fs-02d492af78a2f5b1a.efs.us-east-1.amazonaws.com | sudo tee -a /etc/hosts -sudo mount -t efs -o tls,accesspoint=fsap-01eda13760c255e37 fs-02d492af78a2f5b1a staging_auxiliary_metadata -echo "fs-02d492af78a2f5b1a:/ /home/ubuntu/emma/staging_auxiliary_metadata efs _netdev,noresvport,tls,iam,accesspoint=fsap-01eda13760c255e37 0 0" | sudo tee -a /etc/fstab - -# ----------------------- Import experience-hub project ---------------------- # -git clone https://github.com/emma-simbot/experience-hub experience-hub && - cd experience-hub || exit - -# -------------------------- Prepare Experience hub -------------------------- # -pyenv install -poetry env use "$(pyenv which python)" - -# ----------------------------- Authenticate ECR ----------------------------- # -aws ecr get-login-password --region us-east-1 | sudo docker login --username AWS --password-stdin 020417957102.dkr.ecr.us-east-1.amazonaws.com - -# ------------------------------- Restart shell ------------------------------ # -exec "$SHELL" diff --git a/src/emma_experience_hub/__main__.py b/src/emma_experience_hub/__main__.py index 7c795ff9..6c47374e 100644 --- a/src/emma_experience_hub/__main__.py +++ b/src/emma_experience_hub/__main__.py @@ -3,9 +3,6 @@ from emma_experience_hub.commands.simbot.cli import app as simbot_cli -# from emma_experience_hub.commands.teach import app as teach_cli - - app = typer.Typer( add_completion=False, no_args_is_help=True, @@ -15,7 +12,6 @@ app.add_typer(simbot_cli, name="simbot") -# app.add_typer(teach_cli, name="teach") if __name__ == "__main__": app() diff --git a/src/emma_experience_hub/api/clients/__init__.py b/src/emma_experience_hub/api/clients/__init__.py index 6d2592c2..f41274d6 100644 --- a/src/emma_experience_hub/api/clients/__init__.py +++ b/src/emma_experience_hub/api/clients/__init__.py @@ -3,7 +3,6 @@ from emma_experience_hub.api.clients.confirmation_response_classifier import ( ConfirmationResponseClassifierClient, ) -from emma_experience_hub.api.clients.dynamo_db import DynamoDbClient from emma_experience_hub.api.clients.emma_policy import EmmaPolicyClient from emma_experience_hub.api.clients.feature_extractor import FeatureExtractorClient from emma_experience_hub.api.clients.out_of_domain_detector import OutOfDomainDetectorClient diff --git a/src/emma_experience_hub/api/clients/dynamo_db.py b/src/emma_experience_hub/api/clients/dynamo_db.py deleted file mode 100644 index e4158644..00000000 --- a/src/emma_experience_hub/api/clients/dynamo_db.py +++ /dev/null @@ -1,18 +0,0 @@ -import boto3 - -from emma_experience_hub.api.clients.client import Client - - -class DynamoDbClient(Client): - """Base client for connecting to Amazon DynamoDB.""" - - primary_key: str - sort_key: str - data_key: str - - def __init__(self, resource_region: str, table_name: str) -> None: - self._resource_region = resource_region - self._table_name = table_name - - self._db = boto3.resource("dynamodb", self._resource_region) - self._table = self._db.Table(self._table_name) diff --git a/src/emma_experience_hub/api/clients/simbot/__init__.py b/src/emma_experience_hub/api/clients/simbot/__init__.py index 3a6b5181..700cbe83 100644 --- a/src/emma_experience_hub/api/clients/simbot/__init__.py +++ b/src/emma_experience_hub/api/clients/simbot/__init__.py @@ -11,4 +11,3 @@ from emma_experience_hub.api.clients.simbot.placeholder_vision import SimBotPlaceholderVisionClient from emma_experience_hub.api.clients.simbot.qa_intent import SimBotQAIntentClient from emma_experience_hub.api.clients.simbot.session_db import SimBotSessionDbClient -from emma_experience_hub.api.clients.simbot.session_local_db import SimBotSQLLiteClient diff --git a/src/emma_experience_hub/api/clients/simbot/session_db.py b/src/emma_experience_hub/api/clients/simbot/session_db.py index 9cb924cf..899e960d 100644 --- a/src/emma_experience_hub/api/clients/simbot/session_db.py +++ b/src/emma_experience_hub/api/clients/simbot/session_db.py @@ -1,88 +1,120 @@ +import sqlite3 from concurrent.futures import ThreadPoolExecutor -from typing import Any +from pathlib import Path -import boto3 -from boto3.dynamodb.conditions import Key -from botocore.exceptions import ClientError from loguru import logger -from emma_experience_hub.api.clients.dynamo_db import DynamoDbClient from emma_experience_hub.datamodels.simbot import SimBotSessionTurn -class SimBotSessionDbClient(DynamoDbClient): - """Client for storing SimBot session data.""" +class SimBotSessionDbClient: + """Local Client for storing SimBot session data.""" - primary_key = "session_id" - sort_key = "idx" - data_key = "turn" + primary_key: str + sort_key: str + data_key: str + + def __init__(self, db_file: Path) -> None: + self._db_file = db_file + self.create_table() + + def create_table(self) -> None: + """Create table.""" + if self._db_file.exists(): + return + + try: # noqa: WPS229 + connection = sqlite3.connect(self._db_file) + sqlite_create_table_query = """CREATE TABLE session_table ( + primary_key TEXT NOT NULL, + sort_key INTEGER NOT NULL, + data_key TEXT NOT NULL, + PRIMARY KEY (primary_key, sort_key) + );""" + + cursor = connection.cursor() + cursor.execute(sqlite_create_table_query) + connection.commit() + logger.info("SQLite table created") + + cursor.close() + + except sqlite3.Error as error: + logger.exception("Error while creating a sqlite table", error) + finally: + if connection: + connection.close() def healthcheck(self) -> bool: """Verify that the DB can be accessed and that it is ready.""" - dynamodb_client = boto3.client( - "dynamodb", region_name=self._resource_region # pyright: ignore - ) - try: - dynamodb_client.describe_table(TableName=self._table_name) - except dynamodb_client.exceptions.ResourceNotFoundException: - logger.exception("Cannot find DynamoDB table") + sqlite3.connect(self._db_file) + except Exception: + logger.exception("Cannot find db table") return False return True def add_session_turn(self, session_turn: SimBotSessionTurn) -> None: """Add a session turn to the table.""" - try: - response = self._table.put_item( - Item={ - self.primary_key: session_turn.session_id, - self.sort_key: session_turn.idx, - self.data_key: session_turn.json(by_alias=True), - }, - ConditionExpression="attribute_not_exists(#sort_key)", - ExpressionAttributeNames={"#sort_key": self.sort_key}, + try: # noqa: WPS229 + connection = sqlite3.connect(self._db_file) + cursor = connection.cursor() + + sqlite_insert_with_param = """INSERT OR REPLACE INTO session_table + (primary_key, sort_key, data_key) + VALUES (?, ?, ?);""" + + data_tuple = ( + session_turn.session_id, + session_turn.idx, + session_turn.json(by_alias=True), ) - logger.debug(response) - except ClientError as err: - logger.exception("Could not add turn to table.") + cursor.execute(sqlite_insert_with_param, data_tuple) + connection.commit() + logger.info("Successfully inserted turn into table") + + cursor.close() - error_code = err.response["Error"]["Code"] # pyright: ignore - if error_code != "ConditionalCheckFailedException": - raise err + except sqlite3.Error as error: + logger.exception("Failed to insert turn into table") + raise error + finally: + if connection: + connection.close() def put_session_turn(self, session_turn: SimBotSessionTurn) -> None: """Put a session turn to the table. If the turn already exists, it WILL overwrite it. """ - try: - self._table.put_item( - Item={ - self.primary_key: session_turn.session_id, - self.sort_key: session_turn.idx, - self.data_key: session_turn.json(by_alias=True), - }, - ) - except ClientError as err: - logger.exception("Could not add turn to table.") - raise err + self.add_session_turn(session_turn) def get_session_turn(self, session_id: str, idx: int) -> SimBotSessionTurn: """Get the session turn from the table.""" - try: - response = self._table.get_item(Key={self.primary_key: session_id, self.sort_key: idx}) - except ClientError as err: - logger.exception("Could not get session turn from table") - raise err + try: # noqa: WPS229 + connection = sqlite3.connect(self._db_file) + cursor = connection.cursor() + + sql_select_query = "select * from session_table where primary_key = ? and sort_key = ?" + cursor.execute(sql_select_query, (session_id, idx)) + turn = cursor.fetchone() + cursor.close() - return SimBotSessionTurn.parse_obj(response["Item"][self.data_key]) + except sqlite3.Error as error: + logger.exception("Failed to read data from table") + raise error + finally: + if connection: + connection.close() + + return SimBotSessionTurn.parse_raw(turn[2]) def get_all_session_turns(self, session_id: str) -> list[SimBotSessionTurn]: """Get all the turns for a given session.""" try: all_raw_turns = self._get_all_session_turns(session_id) - except ClientError as query_err: + except Exception as query_err: logger.exception("Could not query for session turns") raise query_err @@ -92,7 +124,7 @@ def get_all_session_turns(self, session_id: str) -> list[SimBotSessionTurn]: parsed_responses = list( thread_pool.map( SimBotSessionTurn.parse_raw, - (response_item[self.data_key] for response_item in all_raw_turns), + (response_item[2] for response_item in all_raw_turns), ) ) except Exception: @@ -108,18 +140,23 @@ def get_all_session_turns(self, session_id: str) -> list[SimBotSessionTurn]: return sorted_responses - def _get_all_session_turns(self, session_id: str) -> list[dict[str, Any]]: - response = self._table.query(KeyConditionExpression=Key(self.primary_key).eq(session_id)) - - all_response_items = response["Items"] + def _get_all_session_turns(self, session_id: str) -> list[tuple[str, int, str]]: + try: # noqa: WPS229 + connection = sqlite3.connect(self._db_file) + cursor = connection.cursor() - # If not all the instances have been returned, get the next set - while "LastEvaluatedKey" in response: - response = self._table.query( - KeyConditionExpression=Key(self.primary_key).eq(session_id), - ExclusiveStartKey=response["LastEvaluatedKey"], + sql_select_query = ( + "select * from session_table where primary_key = ? ORDER BY sort_key" ) - - all_response_items.extend(response["Items"]) - - return all_response_items # type: ignore[unreachable] + cursor.execute(sql_select_query, (session_id,)) + turns = cursor.fetchall() + cursor.close() + + except sqlite3.Error as error: + logger.exception("Failed to read data from table") + raise error + finally: + if connection: + connection.close() + + return turns diff --git a/src/emma_experience_hub/api/clients/simbot/session_local_db.py b/src/emma_experience_hub/api/clients/simbot/session_local_db.py deleted file mode 100644 index 72e931c6..00000000 --- a/src/emma_experience_hub/api/clients/simbot/session_local_db.py +++ /dev/null @@ -1,162 +0,0 @@ -import sqlite3 -from concurrent.futures import ThreadPoolExecutor -from pathlib import Path - -from loguru import logger - -from emma_experience_hub.datamodels.simbot import SimBotSessionTurn - - -class SimBotSQLLiteClient: - """Local Client for storing SimBot session data.""" - - primary_key: str - sort_key: str - data_key: str - - def __init__(self, db_file: Path) -> None: - self._db_file = db_file - self.create_table() - - def create_table(self) -> None: - """Create table.""" - if self._db_file.exists(): - return - - try: # noqa: WPS229 - connection = sqlite3.connect(self._db_file) - sqlite_create_table_query = """CREATE TABLE session_table ( - primary_key TEXT NOT NULL, - sort_key INTEGER NOT NULL, - data_key TEXT NOT NULL, - PRIMARY KEY (primary_key, sort_key) - );""" - - cursor = connection.cursor() - cursor.execute(sqlite_create_table_query) - connection.commit() - logger.info("SQLite table created") - - cursor.close() - - except sqlite3.Error as error: - logger.exception("Error while creating a sqlite table", error) - finally: - if connection: - connection.close() - - def healthcheck(self) -> bool: - """Verify that the DB can be accessed and that it is ready.""" - try: - sqlite3.connect(self._db_file) - except Exception: - logger.exception("Cannot find db table") - return False - - return True - - def add_session_turn(self, session_turn: SimBotSessionTurn) -> None: - """Add a session turn to the table.""" - try: # noqa: WPS229 - connection = sqlite3.connect(self._db_file) - cursor = connection.cursor() - - sqlite_insert_with_param = """INSERT OR REPLACE INTO session_table - (primary_key, sort_key, data_key) - VALUES (?, ?, ?);""" - - data_tuple = ( - session_turn.session_id, - session_turn.idx, - session_turn.json(by_alias=True), - ) - cursor.execute(sqlite_insert_with_param, data_tuple) - connection.commit() - logger.info("Successfully inserted turn into table") - - cursor.close() - - except sqlite3.Error as error: - logger.exception("Failed to insert turn into table") - raise error - finally: - if connection: - connection.close() - - def put_session_turn(self, session_turn: SimBotSessionTurn) -> None: - """Put a session turn to the table. - - If the turn already exists, it WILL overwrite it. - """ - self.add_session_turn(session_turn) - - def get_session_turn(self, session_id: str, idx: int) -> SimBotSessionTurn: - """Get the session turn from the table.""" - try: # noqa: WPS229 - connection = sqlite3.connect(self._db_file) - cursor = connection.cursor() - - sql_select_query = "select * from session_table where primary_key = ? and sort_key = ?" - cursor.execute(sql_select_query, (session_id, idx)) - turn = cursor.fetchone() - cursor.close() - - except sqlite3.Error as error: - logger.exception("Failed to read data from table") - raise error - finally: - if connection: - connection.close() - - return SimBotSessionTurn.parse_raw(turn[2]) - - def get_all_session_turns(self, session_id: str) -> list[SimBotSessionTurn]: - """Get all the turns for a given session.""" - try: - all_raw_turns = self._get_all_session_turns(session_id) - except Exception as query_err: - logger.exception("Could not query for session turns") - raise query_err - - with ThreadPoolExecutor() as thread_pool: - # Try parse everything and hope it doesn't crash - try: - parsed_responses = list( - thread_pool.map( - SimBotSessionTurn.parse_raw, - (response_item[2] for response_item in all_raw_turns), - ) - ) - except Exception: - logger.exception( - "Could not parse session turns from response. Returning an empty list." - ) - return [] - - logger.debug(f"Successfully got previous `{len(parsed_responses)}` turns") - - # Sort the responses by the sort key before returning - sorted_responses = sorted(parsed_responses, key=lambda turn: turn.idx) - - return sorted_responses - - def _get_all_session_turns(self, session_id: str) -> list[tuple[str, int, str]]: - try: # noqa: WPS229 - connection = sqlite3.connect(self._db_file) - cursor = connection.cursor() - - sql_select_query = ( - "select * from session_table where primary_key = ? ORDER BY sort_key" - ) - cursor.execute(sql_select_query, (session_id,)) - turns = cursor.fetchall() - cursor.close() - - except sqlite3.Error as error: - logger.exception("Failed to read data from table") - raise error - finally: - if connection: - connection.close() - - return turns diff --git a/src/emma_experience_hub/api/controllers/simbot/clients.py b/src/emma_experience_hub/api/controllers/simbot/clients.py index e2db85cc..544a0c9b 100644 --- a/src/emma_experience_hub/api/controllers/simbot/clients.py +++ b/src/emma_experience_hub/api/controllers/simbot/clients.py @@ -24,7 +24,7 @@ SimBotNLUIntentClient, SimBotPlaceholderVisionClient, SimBotQAIntentClient, - SimBotSQLLiteClient, + SimBotSessionDbClient, ) from emma_experience_hub.common.settings import SimBotSettings @@ -37,7 +37,7 @@ class SimBotControllerClients(BaseModel, arbitrary_types_allowed=True): features: SimBotFeaturesClient nlu_intent: SimBotNLUIntentClient action_predictor: SimbotActionPredictionClient - session_db: SimBotSQLLiteClient + session_db: SimBotSessionDbClient profanity_filter: ProfanityFilterClient out_of_domain_detector: OutOfDomainDetectorClient confirmation_response_classifier: ConfirmationResponseClassifierClient @@ -65,7 +65,7 @@ def from_simbot_settings(cls, simbot_settings: SimBotSettings) -> "SimBotControl timeout=simbot_settings.client_timeout, ), ), - session_db=SimBotSQLLiteClient( + session_db=SimBotSessionDbClient( db_file=Path(simbot_settings.session_local_db_file), ), nlu_intent=SimBotNLUIntentClient( diff --git a/src/emma_experience_hub/api/simbot.py b/src/emma_experience_hub/api/simbot.py index ea7e436e..9018b2e1 100644 --- a/src/emma_experience_hub/api/simbot.py +++ b/src/emma_experience_hub/api/simbot.py @@ -19,7 +19,6 @@ async def startup_event() -> None: """Handle the startup of the API.""" simbot_settings = SimBotSettings.from_env() - # boto3.setup_default_session(profile_name=simbot_settings.aws_profile) state["controller"] = SimBotController.from_simbot_settings(simbot_settings) diff --git a/src/emma_experience_hub/commands/simbot/cli.py b/src/emma_experience_hub/commands/simbot/cli.py index a7d3fb8a..bf171ff2 100644 --- a/src/emma_experience_hub/commands/simbot/cli.py +++ b/src/emma_experience_hub/commands/simbot/cli.py @@ -21,10 +21,7 @@ from rich.syntax import Syntax from emma_common.api.gunicorn import create_gunicorn_server -from emma_common.api.instrumentation import instrument_app -from emma_common.aws.cloudwatch import add_cloudwatch_handler_to_logger -from emma_common.logging import InstrumentedInterceptHandler, setup_logging, setup_rich_logging -from emma_experience_hub._version import __version__ # noqa: WPS436 +from emma_common.logging import InterceptHandler, setup_logging, setup_rich_logging from emma_experience_hub.api.simbot import app as simbot_api from emma_experience_hub.common.settings import SimBotSettings from emma_experience_hub.constants.simbot import get_service_registry_file_path @@ -269,14 +266,7 @@ def run_controller_api( simbot_settings = SimBotSettings.from_env() if observability: - instrument_app( - simbot_api, - otlp_endpoint=simbot_settings.otlp_endpoint, - service_name=simbot_settings.opensearch_service_name, - service_version=__version__, - service_namespace="SimBot", - ) - setup_logging(sys.stdout, InstrumentedInterceptHandler()) + setup_logging(sys.stdout, InterceptHandler()) else: setup_rich_logging(rich_traceback_show_locals=False) @@ -288,15 +278,6 @@ def run_controller_api( timeout=timeout, ) - if observability: - add_cloudwatch_handler_to_logger( - boto3_profile_name=simbot_settings.aws_profile, - log_stream_name=simbot_settings.watchtower_log_stream_name, - log_group_name=simbot_settings.watchtower_log_group_name, - send_interval=1, - enable_trace_logging=observability, - ) - server.run() diff --git a/src/emma_experience_hub/commands/teach/__init__.py b/src/emma_experience_hub/commands/teach/__init__.py deleted file mode 100644 index a2b908e8..00000000 --- a/src/emma_experience_hub/commands/teach/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from emma_experience_hub.commands.teach.cli import app diff --git a/src/emma_experience_hub/commands/teach/api.py b/src/emma_experience_hub/commands/teach/api.py deleted file mode 100644 index 0f5c0806..00000000 --- a/src/emma_experience_hub/commands/teach/api.py +++ /dev/null @@ -1,134 +0,0 @@ -import subprocess -from pathlib import Path - -import typer -from rich.console import Console - -from emma_experience_hub.commands.teach.constants import ( - API_CONTAINER_NAME, - DOCKER_NETWORK_NAME, - FEATURE_EXTRACTOR_CONTAINER_NAME, - FEATURE_EXTRACTOR_DEFAULT_PORT, - MODEL_DIR_WITHIN_CONTAINER, - POLICY_API_DEFAULT_PORT, - TEAChDatasetSplit, - TEAChPaths, -) -from emma_experience_hub.common.docker import create_network_if_not_exists, stop_container -from emma_experience_hub.common.system import ( - is_xserver_display_running, - machine_supports_inference_without_display, -) -from emma_experience_hub.common.torch import is_cuda_available - - -console = Console() - - -def launch_feature_extractor_api( - api_port: int = typer.Option( - default=FEATURE_EXTRACTOR_DEFAULT_PORT, - help="Exposed API port for the feature extractor API.", - ), - log_level: str = typer.Option("debug", envvar="LOG_LEVEL", help="Log level for the API"), - use_gpu: bool = typer.Option( - default=is_cuda_available(), help="Run using GPU", show_envvar=False - ), -) -> None: - """Launch the feature extractor API for TEACh. - - This is useful for debugging inference with Policy. - """ - # Stop the container if it is running already. - stop_feature_extractor_api() - - paths = TEAChPaths() - - command = [ - "docker", - "run", - "--rm", - f"--name {FEATURE_EXTRACTOR_CONTAINER_NAME}", - f"-p {api_port}:{FEATURE_EXTRACTOR_DEFAULT_PORT}", - '--gpus "device=0"' if use_gpu else "", - f"-v {paths.models.resolve()}:{MODEL_DIR_WITHIN_CONTAINER}", - f"-e LOG_LEVEL={log_level}", - f"-e PERCEPTION_MODEL_FILE={MODEL_DIR_WITHIN_CONTAINER}/{paths.perception_model.name}", - "-e PERCEPTION_CONFIG_FILE=src/emma_perception/constants/vinvl_x152c4_alfred.yaml", - "-e ONLY_PERCEPTION=1", - f"--mount type=bind,source={Path.cwd()}/src/emma_experience_hub/entrypoints/run-teach-api.sh,target=/app/run-teach-api.sh", - '--entrypoint "/bin/bash"', - "-t heriot-watt/emma-simbot:full", - "/app/run-teach-api.sh", - ] - - subprocess.run(" ".join(command), shell=True, check=True) - - -def launch_api( - dataset_split: TEAChDatasetSplit = typer.Option(..., show_envvar=False), - log_level: str = typer.Option( - "debug", envvar="LOG_LEVEL", help="Log level for the Policy and Perception APIs" - ), - clear_output_dir: bool = typer.Option( - default=False, help="Clear the output directory", show_envvar=False - ), - use_gpu: bool = typer.Option( - default=is_cuda_available(), help="Run using GPU", show_envvar=False - ), - api_port: int = typer.Option( - default=POLICY_API_DEFAULT_PORT, - help="Exposed API port from the API container", - show_envvar=False, - ), -) -> None: - """Run the TEACh API for EMMA.""" - # Stop the container if it is running already. - stop_api() - - # Check whether the API is going to be running without a display - should_run_without_display = ( - machine_supports_inference_without_display() and is_xserver_display_running() - ) - if should_run_without_display: - console.log( - "Launch the API to function [b u]without a display[/]. If this is [b u]not desired[/] then verify that your machine supports running inference without a display and that the display is running." - ) - create_network_if_not_exists(DOCKER_NETWORK_NAME) - - paths = TEAChPaths() - paths.create_output_dir(clear_output_dir) - - command = [ - "docker", - "run", - "--rm", - f"--name {API_CONTAINER_NAME}", - f"--network {DOCKER_NETWORK_NAME}" if should_run_without_display else "", - f"-p {api_port}:{POLICY_API_DEFAULT_PORT}", - '--gpus "device=0"' if use_gpu else "", - f"-v {paths.models.resolve()}:{MODEL_DIR_WITHIN_CONTAINER}", - f"-v {paths.data.resolve()}:/data:ro", - f"-v {paths.output_frames.resolve()}:/images:ro", - f"-e SPLIT={dataset_split.value}", - f"-e LOG_LEVEL={log_level}", - f"-e PERCEPTION_MODEL_FILE={MODEL_DIR_WITHIN_CONTAINER}/{paths.perception_model.name}", - f"-e POLICY_MODEL_FILE={MODEL_DIR_WITHIN_CONTAINER}/{paths.policy_model.name}", - "-e PERCEPTION_CONFIG_FILE=src/emma_perception/constants/vinvl_x152c4_alfred.yaml", - f"--mount type=bind,source={Path.cwd()}/src/emma_experience_hub/entrypoints/run-teach-api.sh,target=/app/run-teach-api.sh", - '--entrypoint "/bin/bash"', - "-t heriot-watt/emma-simbot:full", - "/app/run-teach-api.sh", - ] - - subprocess.run(" ".join(command), shell=True, check=True) - - -def stop_api() -> None: - """Stop the container running the API.""" - return stop_container(API_CONTAINER_NAME) - - -def stop_feature_extractor_api() -> None: - """Stop the container running the Feature Extractor API.""" - return stop_container(FEATURE_EXTRACTOR_CONTAINER_NAME) diff --git a/src/emma_experience_hub/commands/teach/cli.py b/src/emma_experience_hub/commands/teach/cli.py deleted file mode 100644 index 87a305a4..00000000 --- a/src/emma_experience_hub/commands/teach/cli.py +++ /dev/null @@ -1,93 +0,0 @@ -from typing import Optional - -import typer - -from emma_experience_hub.commands.teach.api import ( - launch_api, - launch_feature_extractor_api, - stop_api, - stop_feature_extractor_api, -) -from emma_experience_hub.commands.teach.constants import TEAChDatasetSplit -from emma_experience_hub.commands.teach.dataset import ( - filter_edh_instances, - restore_filtered_edh_instances, -) -from emma_experience_hub.commands.teach.download import ( - download_edh_instances, - download_games, - download_images_for_edh_instances, - download_models, - download_teach_data, -) -from emma_experience_hub.commands.teach.inference import ( - launch_inference_runner, - machine_supports_inference_without_display, - prepare_inference_runner, - stop_inference_runner_container, -) -from emma_experience_hub.commands.teach.metrics import compute_metrics -from emma_experience_hub.commands.teach.xserver import launch_xserver - - -app = typer.Typer( - add_completion=False, - no_args_is_help=True, - short_help="Setup and run EMMA with TEACh.", - help="Simplified commands for setup and running TEACh with EMMA.", -) - - -@app.command() -def prepare_everything( - remote_perception_model_uri: str = typer.Option( - ..., envvar="REMOTE_PERCEPTION_MODEL_URI", help="URI for the Perception model file." - ), - remote_policy_model_uri: str = typer.Option( - ..., envvar="REMOTE_POLICY_MODEL_URI", help="URI for the Policy model file." - ), - count: Optional[int] = typer.Option( - None, - help="Optionally, only download a given number of instances for each dataset split.", - ), -) -> None: - """Prepare everything to run TEAch without thinking about it.""" - confirm = typer.confirm( - "This will download and build everything and might take some time. Are you sure you want to continue?" - ) - - if not confirm: - raise typer.Abort() - - download_models(remote_perception_model_uri, remote_policy_model_uri) - download_games() - download_edh_instances(TEAChDatasetSplit.valid_seen, count) - download_edh_instances(TEAChDatasetSplit.valid_unseen, count) - download_images_for_edh_instances(TEAChDatasetSplit.valid_seen) - download_images_for_edh_instances(TEAChDatasetSplit.valid_unseen) - prepare_inference_runner(with_display=True, force_reset=True) - - if machine_supports_inference_without_display(): - prepare_inference_runner(with_display=False, force_reset=True) - - -app.command(rich_help_panel="Download Commands")(download_models) -app.command(rich_help_panel="Download Commands")(download_teach_data) -app.command(rich_help_panel="Download Commands")(download_games) -app.command(rich_help_panel="Download Commands")(download_edh_instances) -app.command(rich_help_panel="Download Commands")(download_images_for_edh_instances) - -app.command(rich_help_panel="API Commands")(launch_api) -app.command(rich_help_panel="API Commands")(stop_api) -app.command(rich_help_panel="API Commands")(launch_feature_extractor_api) -app.command(rich_help_panel="API Commands")(stop_feature_extractor_api) - -app.command(rich_help_panel="Metrics Commands")(compute_metrics) - -app.command(rich_help_panel="Inference Runner Commands")(prepare_inference_runner) -app.command(rich_help_panel="Inference Runner Commands")(launch_inference_runner) -app.command(rich_help_panel="Inference Runner Commands")(launch_xserver) -app.command(rich_help_panel="Inference Runner Commands")(stop_inference_runner_container) - -app.command(rich_help_panel="Dataset Commands")(filter_edh_instances) -app.command(rich_help_panel="Dataset Commands")(restore_filtered_edh_instances) diff --git a/src/emma_experience_hub/commands/teach/constants.py b/src/emma_experience_hub/commands/teach/constants.py deleted file mode 100644 index 29416ab0..00000000 --- a/src/emma_experience_hub/commands/teach/constants.py +++ /dev/null @@ -1,74 +0,0 @@ -from enum import Enum -from pathlib import Path -from shutil import rmtree - - -DOCKER_NETWORK_NAME = "emma-teach-inference" -API_CONTAINER_NAME = "emma-teach-api" -FEATURE_EXTRACTOR_CONTAINER_NAME = "emma-teach-feature-extractor-api" -INFERENCE_RUNNER_CONTAINER_NAME = "teach-inference-runner" -INFERENCE_RUNNER_IMAGE_NAME = "heriot-watt/emma-simbot:teach-inference" -MODEL_DIR_WITHIN_CONTAINER = "/app/models" -FEATURE_EXTRACTOR_DEFAULT_PORT = "5500" -POLICY_API_DEFAULT_PORT = "5000" - - -class TEAChDatasetSplit(Enum): - """Variants of dataset splits available for the TEACh dataset.""" - - # all = "all" - # train = "train" - # valid = "valid" - valid_seen = "valid_seen" - valid_unseen = "valid_unseen" - # test = "test" - # test_seen = "test_seen" - # test_unseen = "test_unseen" - - -class TEAChPaths: - """Paths used for the TEACh API.""" - - storage = Path.cwd().joinpath("storage/", "teach/") - models = storage.joinpath("models/") - - output = storage.joinpath("output/") - output_frames = output.joinpath("frames/") - output_metadata = output.joinpath("metadata/") - - data = storage.joinpath("data/") - data_images = data.joinpath("images/") - data_edh_instances = data.joinpath("edh_instances/") - data_games = data.joinpath("games/") - data_unused_edh_instances = data.joinpath("_unused_edh_instances") - data_filtered_edh_instances = data.joinpath("_filtered_edh_instances") - - policy_model = models.joinpath("policy_model_checkpoint") - perception_model = models.joinpath("perception_model_checkpoint") - - alexa_teach_repo = storage.joinpath("alexa_teach_repo/") - alexa_teach_repo_venv = alexa_teach_repo.joinpath(".venv/") - alexa_teach_repo_python = alexa_teach_repo_venv.joinpath("bin/", "python") - - s3_bucket_name = "emma-simbot" - s3_teach_prefix = "datasets/teach" - s3_edh_instances_prefix = f"{s3_teach_prefix}/edh_instances" - s3_images_prefix = f"{s3_teach_prefix}/images" - - def create_storage_dir(self) -> None: - """Create the storage dir.""" - self.storage.mkdir(parents=True, exist_ok=True) - - def clear_output_dir(self) -> None: - """Clear the output directory of all files.""" - rmtree(self.output) - - def create_output_dir(self, clear_output_dir: bool = False) -> None: - """Create the output directories if they do not exist.""" - if clear_output_dir: - self.clear_output_dir() - - output_dirs = [self.output_frames, self.output_metadata] - - for output_dir in output_dirs: - output_dir.mkdir(parents=True, exist_ok=True) diff --git a/src/emma_experience_hub/commands/teach/dataset.py b/src/emma_experience_hub/commands/teach/dataset.py deleted file mode 100644 index 30029b05..00000000 --- a/src/emma_experience_hub/commands/teach/dataset.py +++ /dev/null @@ -1,127 +0,0 @@ -import random -from typing import Optional - -import typer -from rich.console import Console - -from emma_datasets.datamodels.datasets.teach import TeachEdhInstance as TEAChEDHInstance -from emma_experience_hub.commands.teach.constants import TEAChDatasetSplit, TEAChPaths - - -console = Console() - - -def limit_edh_instances_evaluated(count: int, dataset_split: TEAChDatasetSplit) -> None: - """Limit the number of instances being evalauted.""" - edh_instances_dir = TEAChPaths.data_edh_instances.joinpath(dataset_split.value) - if count > len(list(edh_instances_dir.iterdir())): - raise AssertionError( - "The maximum number of instances is greater than the number of instances available." - ) - - temp_instances_dir = TEAChPaths.data_unused_edh_instances.joinpath(dataset_split.value) - temp_instances_dir.mkdir(parents=True, exist_ok=True) - - all_instance_paths = list(edh_instances_dir.iterdir()) - selected_instances = random.sample(all_instance_paths, count) - unselected_instances = ( - instance_path - for instance_path in all_instance_paths - if instance_path not in selected_instances - ) - - for instance_path in unselected_instances: - instance_path.rename(temp_instances_dir.joinpath(instance_path.name)) - - -def restore_unselected_edh_instances() -> None: - """Restore EDH instances that were not evaluated on.""" - for instance_path in TEAChPaths.data_unused_edh_instances.rglob("*.json"): - instance_path.rename( - TEAChPaths.data_edh_instances.joinpath( - instance_path.parent.parts[-1], instance_path.name - ) - ) - - for unused_dir in TEAChPaths.data_unused_edh_instances.iterdir(): - unused_dir.rmdir() - TEAChPaths.data_unused_edh_instances.rmdir() - - -def filter_edh_instances( - dataset_split: TEAChDatasetSplit = typer.Option( - ..., help="Dataset split to perform filtering on" - ), - max_action_future_length: Optional[int] = typer.Option( - None, help="Set the maximum length of the driver action futures for the instances." - ), - has_interaction_action_in_future: Optional[bool] = typer.Option( - None, help="Ensure future actions contain at least one interaction action." - ), -) -> None: - """Filter EDH instances by a set of criteria.""" - paths = TEAChPaths() - instances_dir = paths.data_edh_instances.joinpath(dataset_split.value) - filtered_instances_dir = paths.data_filtered_edh_instances.joinpath(dataset_split.value) - filtered_instances_dir.mkdir(parents=True, exist_ok=True) - - with console.status("Loading all EDH instances..."): - edh_instances = [ - TEAChEDHInstance.parse_file(instance_path) - for instance_path in instances_dir.iterdir() - if instance_path.name.endswith("json") - ] - - console.log(f"{len(edh_instances)} EDH instances found") - - if max_action_future_length: - num_instances_before_filter = len(edh_instances) - edh_instances = [ - instance - for instance in edh_instances - if len(instance.driver_actions_future) <= max_action_future_length - ] - console.log( - f"{num_instances_before_filter - len(edh_instances)} EDH instances have more than {max_action_future_length} action in their future. {len(edh_instances)} EDH instances remaining..." - ) - - if has_interaction_action_in_future: - num_instances_before_filter = len(edh_instances) - edh_instances = [ - instance - for instance in edh_instances - if any(action.obj_interaction_action for action in instance.driver_actions_future) - ] - console.log( - f"{num_instances_before_filter - len(edh_instances)} EDH instances [cyan]do not have an interaction action[/] in their future. {len(edh_instances)} EDH instances remaining..." - ) - - with console.status("Removing instances which do not match the filters..."): - instance_names_to_keep = [f"{instance.instance_id}.json" for instance in edh_instances] - - for instance_path in instances_dir.iterdir(): - if instance_path.name in instance_names_to_keep: - continue - - instance_path.rename(filtered_instances_dir.joinpath(instance_path.name)) - console.log(f"Removed {instance_path}") - - console.rule("Done!") - console.log( - "Run [u]`python -m emma_experience_hub teach restore-filtered-edh-instances`[/] to restore all the filtered instances." - ) - - -def restore_filtered_edh_instances() -> None: - """Restore instances that have been previously filtered.""" - with console.status("Restoring instances which have been previously filtered..."): - for instance_path in TEAChPaths.data_filtered_edh_instances.rglob("*.json"): - instance_path.rename( - TEAChPaths.data_edh_instances.joinpath( - instance_path.parent.parts[-1], instance_path.name - ) - ) - - for unused_dir in TEAChPaths.data_filtered_edh_instances.iterdir(): - unused_dir.rmdir() - TEAChPaths.data_filtered_edh_instances.rmdir() diff --git a/src/emma_experience_hub/commands/teach/download.py b/src/emma_experience_hub/commands/teach/download.py deleted file mode 100644 index 8db25912..00000000 --- a/src/emma_experience_hub/commands/teach/download.py +++ /dev/null @@ -1,238 +0,0 @@ -import logging -import random -import subprocess -from concurrent.futures import ThreadPoolExecutor -from functools import partial -from typing import Optional, cast - -import boto3 -import typer -from rich.console import Console -from rich.progress import BarColumn, MofNCompleteColumn, Progress, TextColumn, TimeRemainingColumn - -from emma_experience_hub.commands.teach.constants import TEAChDatasetSplit, TEAChPaths - - -# Without this, boto print so many logs, it crashes the terminal. -logging.getLogger("boto3").setLevel(logging.CRITICAL) -logging.getLogger("botocore").setLevel(logging.CRITICAL) -logging.getLogger("nose").setLevel(logging.CRITICAL) -logging.getLogger("s3transfer").setLevel(logging.CRITICAL) -logging.getLogger("urllib3").setLevel(logging.CRITICAL) - - -console = Console() - -progress = Progress( - TextColumn("[bold blue]{task.description}", justify="right"), - BarColumn(bar_width=None), - "[progress.percentage]{task.percentage:>3.1f}%", - MofNCompleteColumn(), - TimeRemainingColumn(), -) - - -def download_models( - remote_perception_checkpoint_uri: str = typer.Option( - ..., - envvar="REMOTE_PERCEPTION_CHECKPOINT_URI", - help="URI for the Perception model file on S3.", - ), - remote_policy_checkpoint_uri: str = typer.Option( - ..., envvar="REMOTE_POLICY_CHECKPOINT_URI", help="URI for the Policy model file on S3." - ), -) -> None: - """Download EMMA model files.""" - paths = TEAChPaths() - - paths.create_storage_dir() - - # Delete the previous model files if they exist - if paths.policy_model.exists(): - paths.policy_model.unlink() - if paths.perception_model.exists(): - paths.perception_model.unlink() - - # Download Models using S3 - subprocess.run( - f"aws s3 cp {remote_perception_checkpoint_uri} {paths.policy_model.resolve()}", - shell=True, - check=True, - ) - subprocess.run( - f"aws s3 cp {remote_policy_checkpoint_uri} {paths.perception_model.resolve()}", - shell=True, - check=True, - ) - - -def download_games() -> None: - """Download TEACh Games.""" - paths = TEAChPaths() - - for dataset_split in TEAChDatasetSplit: - command = [ - "aws s3 cp", - f"s3://emma-simbot/datasets/teach/games/{dataset_split.value}", - str(paths.data.joinpath("games", dataset_split.value).resolve()), - "--recursive", - ] - subprocess.run(" ".join(command), shell=True, check=True) - - -def download_edh_instances( - dataset_split: TEAChDatasetSplit = typer.Option(..., help="Dataset split to download"), - count: Optional[int] = typer.Option( - None, - help="Only download a number of EDH instances, chosen randomly.", - min=1, - show_envvar=False, - ), - download_images: bool = typer.Option( - default=True, help="Download images for the EDH instances" - ), -) -> None: - """Download TEACh EDH instances.""" - paths = TEAChPaths() - - s3 = boto3.client("s3") - - with console.status("Getting all the paths to download..."): - # Get URIs for all the EDH instances - edh_instances_list: list[str] = [ - raw_object["Key"] - for raw_object in s3.list_objects_v2( - Bucket=paths.s3_bucket_name, - Prefix=f"{paths.s3_edh_instances_prefix}/{dataset_split.value}", - )["Contents"] - ] - - # If desired, randomly sample from the list - if count: - edh_instances_list = random.sample(edh_instances_list, count) - - # Create progress bar tasks - instances_task_id = progress.add_task("Downloading instances", total=len(edh_instances_list)) - - with progress: - # Create the output directory for the EDH instances - instances_output_dir = paths.data.joinpath("edh_instances", dataset_split.value) - instances_output_dir.mkdir(parents=True, exist_ok=True) - - # Download the EDH instances - for instance_key in edh_instances_list: - s3.download_file( - Bucket=paths.s3_bucket_name, - Key=instance_key, - Filename=instances_output_dir.joinpath(instance_key.split("/")[-1]).as_posix(), - ) - - progress.advance(instances_task_id) - - if download_images: - download_images_for_edh_instances(dataset_split) - - -def _get_available_images_for_edh_instance( - edh_instance_id: str, *, dataset_split: TEAChDatasetSplit -) -> set[str]: - """Return a set of image paths which should be downloaded for a given EDH instance. - - This also ensure that images which already exist are not downloaded. - """ - s3 = boto3.client("s3") - - paths = TEAChPaths() - images_dir = paths.data_images.joinpath(dataset_split.value) - - # If there are already some images downloaded, get a list of them - existing_images = ( - {image.name for image in images_dir.joinpath(edh_instance_id).iterdir()} - if images_dir.joinpath(edh_instance_id).exists() - else set() - ) - - # Get a list of all the images that can be downloaded for the instance - available_images_for_instance = ( - image_prefix["Key"] - for image_prefix in s3.list_objects_v2( - Bucket=paths.s3_bucket_name, - Prefix="{images_prefix}/{split}/{instance}".format( - images_prefix=paths.s3_images_prefix, - split=dataset_split.value, - instance=edh_instance_id.split(".")[0], - ), - )["Contents"] - if "Key" in image_prefix and "driver" in image_prefix["Key"] - ) - - # Filter out images which already exist - image_prefixes_to_download: set[str] = { - prefix - for prefix in available_images_for_instance - if prefix.split("/")[-1] not in existing_images - } - - return image_prefixes_to_download - - -def download_images_for_edh_instances( - dataset_split: TEAChDatasetSplit = typer.Option(..., help="Dataset split to download") -) -> None: - """Download images for EDH instances.""" - s3 = boto3.client("s3") - paths = TEAChPaths() - - with console.status("Getting list of downloaded instances..."): - downloaded_edh_instance_ids = [ - instance.stem - for instance in paths.data_edh_instances.joinpath(dataset_split.value).iterdir() - ] - - task_id = progress.add_task("Determining which images to download...", start=False, total=0) - - with progress: - with ThreadPoolExecutor() as pool: - instance_image_prefixes_iterator = pool.map( - partial(_get_available_images_for_edh_instance, dataset_split=dataset_split), - downloaded_edh_instance_ids, - ) - - instance_image_prefixes_list = [] - - for prefixes in instance_image_prefixes_iterator: - instance_image_prefixes_list.append(prefixes) - progress.update( - task_id, total=cast(float, progress.tasks[task_id].total) + len(prefixes) - ) - - for instance_image_keys in instance_image_prefixes_list: - progress.update(task_id, description="Downloading images", start=True) - - for image_key in instance_image_keys: - # Create the directory for all the images - images_output_dir = paths.data.joinpath( - "images", dataset_split.value, image_key.split("/")[-2] - ) - images_output_dir.mkdir(parents=True, exist_ok=True) - - # Download the images for the instance - s3.download_file( - Bucket=paths.s3_bucket_name, - Key=image_key, - Filename=images_output_dir.joinpath(image_key.split("/")[-1]).as_posix(), - ) - - progress.advance(task_id) - - -def download_teach_data() -> None: - """Download EVERYTHING related to TEACh. - - This just runs all the other commands sequentially. - """ - download_games() - download_edh_instances(TEAChDatasetSplit.valid_seen) - download_edh_instances(TEAChDatasetSplit.valid_unseen) - download_images_for_edh_instances(TEAChDatasetSplit.valid_seen) - download_images_for_edh_instances(TEAChDatasetSplit.valid_unseen) diff --git a/src/emma_experience_hub/commands/teach/inference.py b/src/emma_experience_hub/commands/teach/inference.py deleted file mode 100644 index 2008128a..00000000 --- a/src/emma_experience_hub/commands/teach/inference.py +++ /dev/null @@ -1,225 +0,0 @@ -import subprocess -from shutil import rmtree -from typing import Optional -from venv import create as create_virtualenv - -import typer -from rich.console import Console - -from emma_experience_hub.commands.teach.constants import ( - API_CONTAINER_NAME, - DOCKER_NETWORK_NAME, - INFERENCE_RUNNER_CONTAINER_NAME, - INFERENCE_RUNNER_IMAGE_NAME, - POLICY_API_DEFAULT_PORT, - TEAChDatasetSplit, - TEAChPaths, -) -from emma_experience_hub.commands.teach.dataset import ( - limit_edh_instances_evaluated, - restore_unselected_edh_instances, -) -from emma_experience_hub.common.docker import ( - create_network_if_not_exists, - is_container_running, - stop_container, -) -from emma_experience_hub.common.system import ( - get_active_display_index, - machine_supports_inference_without_display, -) -from emma_experience_hub.common.torch import is_cuda_available - - -console = Console() - - -def prepare_inference_runner_without_display() -> None: - """Build the TEACh Inference Runner Docker image to run inference without a display.""" - subprocess.run("docker buildx use default", shell=True, check=True) - subprocess.run( - "docker buildx bake -f docker/docker-bake.hcl teach-inference", shell=True, check=True - ) - - -def prepare_inference_runner_with_display(force_reset: bool = False) -> None: - """Prepare to run the inference runner with a display. - - This clones the `alexa/teach` repo locally, creates a virtualenv and installs the dependencies - within it, keeping it isolated from the remainder of the package. - """ - # Clean up if `force_reset` is true - if force_reset and TEAChPaths.alexa_teach_repo.exists(): - rmtree(TEAChPaths.alexa_teach_repo.resolve()) - - # Clone the `alexa/teach` repository - if not TEAChPaths.alexa_teach_repo.exists(): - subprocess.run( - f"git clone https://github.com/alexa/teach.git {TEAChPaths.alexa_teach_repo.resolve()}", - shell=True, - check=True, - ) - - # Create a new virtualenv for running TEACh - if not TEAChPaths.alexa_teach_repo_venv.exists(): - create_virtualenv(TEAChPaths.alexa_teach_repo_venv.resolve(), with_pip=True) - - # Install the dependencies - subprocess.run( - f"{TEAChPaths.alexa_teach_repo_python.resolve()} -m pip install -r {TEAChPaths.alexa_teach_repo.joinpath('requirements.txt')}", - shell=True, - check=True, - ) - # Install the alexa teach repo to its own venv - subprocess.run( - f"{TEAChPaths.alexa_teach_repo_python.resolve()} -m pip install -e {TEAChPaths.alexa_teach_repo.resolve()}", - shell=True, - check=True, - ) - - -def launch_inference_runner_with_display( - dataset_split: TEAChDatasetSplit, clear_output_dir: bool = False -) -> None: - """Run the TEACh EDH inference runner locally, without Docker.""" - paths = TEAChPaths() - paths.create_output_dir(clear_output_dir) - - command = [ - f"{TEAChPaths.alexa_teach_repo_python} -m teach_inference", - f"--data_dir {paths.data.resolve()}", - f"--output_dir {paths.output_metadata.resolve()}", - f"--images_dir {paths.output_frames.resolve()}", - f"--split {dataset_split.value}", - f"--metrics_file {paths.output_metadata.resolve()}/metrics", - "--model_module teach.inference.remote_model", - "--model_class RemoteModel", - f"--model_api_host_and_port localhost:{POLICY_API_DEFAULT_PORT}", - ] - - subprocess.run(" ".join(command), shell=True, check=True) - - -def launch_inference_runner_without_display( - dataset_split: TEAChDatasetSplit, clear_output_dir: bool = False -) -> None: - """Launch the inference runner for TEACh using Docker. - - You need to ensure that the X Server is running before running this. - """ - if not machine_supports_inference_without_display(): - raise typer.Abort( - "Your machine does not support using the inference runner without a display." - ) - - # Stop the container if it is running already. - stop_inference_runner_container() - create_network_if_not_exists(DOCKER_NETWORK_NAME) - - # Verify that the inference runner is running - if not is_container_running(API_CONTAINER_NAME): - raise AssertionError( - "The TEACh API is not already running. Run `python -m emma_experience_hub teach launch_api` before running this command." - ) - - if not is_cuda_available(): - raise AssertionError( - "This command only supports running the inference runner on a machine without a display (i.e. Linux with an NVIDIA GPU)" - ) - - paths = TEAChPaths() - paths.create_output_dir(clear_output_dir) - - command = [ - "docker", - "run", - "--rm", - "--privileged", - f"--name {INFERENCE_RUNNER_CONTAINER_NAME}", - f"--network {DOCKER_NETWORK_NAME}", - f"-e DISPLAY=:{get_active_display_index()}", - "-e NVIDIA_DRIVER_CAPABILITIES=all", - '--gpus "device=0"', - "-v /tmp/.X11-unix:/tmp/.X11-unix:ro", - f"-v {paths.data.resolve()}:/data:ro", - f"-v {paths.output_frames.resolve()}:/images", - f"-v {paths.output_metadata.resolve()}:/output", - # Run CMD - f"{INFERENCE_RUNNER_IMAGE_NAME} teach_inference", - "--data_dir /data", - "--output_dir /output", - "--images_dir /images", - f"--split {dataset_split.value}", - "--metrics_file /output/metrics", - "--model_module teach.inference.remote_model", - "--model_class RemoteModel", - f"--model_api_host_and_port {API_CONTAINER_NAME}:{POLICY_API_DEFAULT_PORT}", - ] - - subprocess.run(" ".join(command), shell=True, check=True) - - -def prepare_inference_runner( - with_display: bool = typer.Option( - ... if machine_supports_inference_without_display() else True, - help="Whether or not inference will be run with or without a display", - hidden=not machine_supports_inference_without_display(), - ), - force_reset: bool = typer.Option( - default=False, help="Force reset the prepared inference runner and set it up again" - ), -) -> None: - """Prepare the inference runner for TEACh EDH instances.""" - if not with_display and not machine_supports_inference_without_display(): - raise typer.BadParameter( - "Your machine does not support using the inference runner without a display." - ) - - if with_display: - prepare_inference_runner_with_display(force_reset) - else: - prepare_inference_runner_without_display() - - -def launch_inference_runner( - use_display: bool = typer.Option( - ... if machine_supports_inference_without_display() else True, - help="Run inference with or without a display", - hidden=not machine_supports_inference_without_display(), - ), - dataset_split: TEAChDatasetSplit = typer.Option( - ..., help="Choose with split to evaluate on", show_envvar=False - ), - clear_output_dir: bool = typer.Option( - default=False, - help="Clear the output directory to run on all the instances", - show_envvar=False, - ), - limit_instances: Optional[int] = typer.Option( - None, - help="Optionally, randomly select a maximum number of instances to use during inference.", - min=1, - ), -) -> None: - """Launch the inference runner for TEACh EDH instances.""" - if not use_display and not machine_supports_inference_without_display(): - raise typer.BadParameter( - "Your machine does not support using the inference runner without a display." - ) - - with console.status("Preparing instances to evaluate on..."): - if TEAChPaths.data_unused_edh_instances.exists(): - restore_unselected_edh_instances() - - if limit_instances: - limit_edh_instances_evaluated(count=limit_instances, dataset_split=dataset_split) - - if use_display: - launch_inference_runner_with_display(dataset_split, clear_output_dir) - else: - launch_inference_runner_without_display(dataset_split, clear_output_dir) - - -def stop_inference_runner_container() -> None: - """Stop the Docker container running inference.""" - return stop_container(INFERENCE_RUNNER_CONTAINER_NAME) diff --git a/src/emma_experience_hub/commands/teach/metrics.py b/src/emma_experience_hub/commands/teach/metrics.py deleted file mode 100644 index 7fb06047..00000000 --- a/src/emma_experience_hub/commands/teach/metrics.py +++ /dev/null @@ -1,187 +0,0 @@ -import itertools - -import typer -from pydantic import BaseModel, Field, parse_file_as -from rich import box -from rich.live import Live -from rich.spinner import Spinner -from rich.table import Table - -from emma_experience_hub.commands.teach.constants import TEAChPaths - - -class EDHInstanceMetrics(BaseModel): - """Metrics for a single EDH instance.""" - - instance_id: str - game_id: str - - # Goal Conditions - completed_goal_conditions: int - total_goal_conditions: int - goal_condition_success: float - - success_spl: float - path_len_weighted_success_spl: float - goal_condition_spl: float - path_len_weighted_goal_condition_spl: float - ground_truth_path_length: float = Field(..., alias="gt_path_len") - - reward: float - success: bool - trajectory_length: int = Field(..., alias="traj_len") - - predicted_stop: bool - num_api_fails: int - error: int - init_success: bool - - -class SummaryMetrics(BaseModel): - """Summary metrics, aggregated from all instances.""" - - # Success Rate - num_successes: int - num_trials: int - - # Goal Conditions - completed_goal_conditions: int - total_goal_conditions: int - - # Path-Length Weighted - plw_success_rate: float - plw_goal_condition_success_rate: float - - @property - def success_rate(self) -> float: - """Get the success rate.""" - if self.num_trials == 0: - return 0 - - return self.num_successes / self.num_trials - - @property - def goal_condition_success_rate(self) -> float: - """Get the success rate.""" - if self.total_goal_conditions == 0: - return 0 - - return self.completed_goal_conditions / self.total_goal_conditions - - @classmethod - def from_all_edh_instances( - cls, all_individual_metrics: list[EDHInstanceMetrics] - ) -> "SummaryMetrics": - """Create a SummaryMetrics from all the individual metrics.""" - num_successes = sum(instance.success for instance in all_individual_metrics) - num_trials = len(all_individual_metrics) - - completed_goal_conditions = sum( - instance.completed_goal_conditions for instance in all_individual_metrics - ) - total_goal_conditions = sum( - instance.total_goal_conditions for instance in all_individual_metrics - ) - - total_path_length = sum( - instance.ground_truth_path_length for instance in all_individual_metrics - ) - - if total_path_length > 0: - # PLW == Path Length Weighted - plw_success_rate = ( - sum(instance.path_len_weighted_success_spl for instance in all_individual_metrics) - / total_path_length - ) - plw_goal_condition_success_rate = ( - sum( - instance.path_len_weighted_goal_condition_spl - for instance in all_individual_metrics - ) - / total_path_length - ) - else: - plw_success_rate = 0 - plw_goal_condition_success_rate = 0 - - return cls( - num_successes=num_successes, - num_trials=num_trials, - completed_goal_conditions=completed_goal_conditions, - total_goal_conditions=total_goal_conditions, - # total_path_length=total_path_length, - plw_success_rate=plw_success_rate, - plw_goal_condition_success_rate=plw_goal_condition_success_rate, - ) - - -def render_summary_metrics_table(metrics: SummaryMetrics) -> Table: - """Render the results table from the metrics.""" - table = Table( - expand=True, - box=box.SIMPLE_HEAD, - pad_edge=False, - border_style="bright_yellow", - ) - - table.add_column() - table.add_column("Successful/Total", justify="right") - table.add_column("Average", justify="right") - table.add_column("Path-length Weighted Avg.", justify="right") - - table.add_row( - Spinner("dots", text="Overall Success"), - f"{metrics.num_successes}/{metrics.num_trials}", - f"{metrics.success_rate:.3f}", - f"{metrics.plw_success_rate:.3f}", - ) - - table.add_row( - Spinner("dots", text="Goal Condition"), - f"{metrics.completed_goal_conditions}/{metrics.total_goal_conditions}", - f"{metrics.goal_condition_success_rate:.3f}", - f"{metrics.plw_goal_condition_success_rate:.3f}", - ) - - return table - - -def get_metrics_from_output_dir() -> list[EDHInstanceMetrics]: - """Get all the metrics from the output metrics files.""" - all_metrics_files = ( - file_path - for file_path in TEAChPaths.output_metadata.iterdir() - if file_path.stem.startswith("metrics") - ) - - all_instance_metrics_per_file = ( - parse_file_as(dict[str, EDHInstanceMetrics], metric_file).values() - for metric_file in all_metrics_files - ) - - all_instance_metrics = list(itertools.chain.from_iterable(all_instance_metrics_per_file)) - - return all_instance_metrics - - -def create_summary_metrics_table() -> Table: - """Create the summary metrics table and return the renderable.""" - all_metrics = get_metrics_from_output_dir() - summary_metrics = SummaryMetrics.from_all_edh_instances(all_metrics) - return render_summary_metrics_table(summary_metrics) - - -def compute_metrics( - watch: bool = typer.Option(default=False, help="Watch the metrics for changes") -) -> None: - """Aggregate and compute metrics from all the EDH instances.""" - with Live(create_summary_metrics_table()) as live: - while watch: - table = create_summary_metrics_table() - table.caption = "[i]Press CTRL-C to exit.[/]" - - live.update(table) - - -if __name__ == "__main__": - compute_metrics() diff --git a/src/emma_experience_hub/commands/teach/xserver.py b/src/emma_experience_hub/commands/teach/xserver.py deleted file mode 100644 index deb22c7a..00000000 --- a/src/emma_experience_hub/commands/teach/xserver.py +++ /dev/null @@ -1,127 +0,0 @@ -import os -import platform -import re -import shlex -import subprocess -import tempfile -from typing import Any - -from rich.console import Console - - -console = Console() - - -def pci_records() -> list[dict[str, Any]]: - """Get the PCI records?""" - records = [] - command = shlex.split("lspci -vmm") - output = subprocess.check_output(command).decode() - - for devices in output.strip().split("\n\n"): - record: dict[str, Any] = {} - records.append(record) - for row in devices.split("\n"): - key, device_row_value = row.split("\t") - record[key.split(":")[0]] = device_row_value - - return records - - -def generate_xorg_conf(devices: list[Any]) -> str: - """Generate a config for Xorg.""" - xorg_conf = [] - - device_section = """ -Section "Device" - Identifier "Device{device_id}" - Driver "nvidia" - VendorName "NVIDIA Corporation" - BusID "{bus_id}" -EndSection -""" - server_layout_section = """ -Section "ServerLayout" - Identifier "Layout0" - {screen_records} -EndSection -""" - screen_section = """ -Section "Screen" - Identifier "Screen{screen_id}" - Device "Device{device_id}" - DefaultDepth 24 - Option "AllowEmptyInitialConfiguration" "True" - SubSection "Display" - Depth 24 - Virtual 1250 1250 - EndSubSection -EndSection -""" - screen_records = [] - for i, bus_id in enumerate(devices): - xorg_conf.append(device_section.format(device_id=i, bus_id=bus_id)) - xorg_conf.append(screen_section.format(device_id=i, screen_id=i)) - screen_records.append('Screen {screen_id} "Screen{screen_id}" 0 0'.format(screen_id=i)) - - xorg_conf.append(server_layout_section.format(screen_records="\n ".join(screen_records))) - - output = "\n".join(xorg_conf) - console.print(output) - return output - - -def startx(display: int) -> None: # noqa: WPS231 - """Start the X server.""" - if platform.system() != "Linux": - raise OSError("Can only run startx on linux") - - devices = [] - - for record in pci_records(): - is_valid_vendor = record.get("Vendor", "") == "NVIDIA Corporation" - is_valid_record_class = record["Class"] in {"VGA compatible controller", "3D controller"} - - if is_valid_vendor and is_valid_record_class: - bus_id_list = [ - str(int(slot, 16)) for slot in re.split(r"[:\.]", record["Slot"]) # noqa: WPS432 - ] - bus_id = f"PCI:{':'.join(bus_id_list)}" - devices.append(bus_id) - - if not devices: - raise RuntimeError("No NVIDIA cards found") - - fd, path = tempfile.mkstemp() - with open(path, "w") as f: - f.write(generate_xorg_conf(devices)) - - command = shlex.split( - f"Xorg -noreset +extension GLX +extension RANDR +extension RENDER -config {path} :{display}" - ) - - # Create an environment variable for the display - if not os.environ["DISPLAY"]: - os.environ["DISPLAY"] = str(display) - - try: # noqa: WPS501 - subprocess.call(command) - finally: - os.close(fd) - os.unlink(path) - - # Remove the display environment variable if it exists - if os.environ["DISPLAY"]: - del os.environ["DISPLAY"] # noqa: WPS420 - - -def launch_xserver() -> None: - """Launch the X Server (needed if running inference without a display). - - This just runs a mildly modified version of the one from alexa/teach. - """ - display = 0 - # if len(sys.argv) > 1: - # display = int(sys.argv[1]) - console.print(f"Starting X on DISPLAY=:{display}") - startx(display) diff --git a/src/emma_experience_hub/common/settings/simbot.py b/src/emma_experience_hub/common/settings/simbot.py index f0693c20..7e53d59e 100644 --- a/src/emma_experience_hub/common/settings/simbot.py +++ b/src/emma_experience_hub/common/settings/simbot.py @@ -1,14 +1,6 @@ from typing import Any, Optional -from pydantic import ( - AnyHttpUrl, - BaseModel, - BaseSettings, - DirectoryPath, - Field, - root_validator, - validator, -) +from pydantic import AnyHttpUrl, BaseModel, BaseSettings, DirectoryPath, root_validator, validator from emma_experience_hub.datamodels.common import GFHLocationType from emma_experience_hub.datamodels.enums import SearchPlannerType @@ -86,8 +78,6 @@ class SimBotSettings(BaseSettings): client_timeout: Optional[int] = 5 - aws_profile: str = Field(default="TeamProfile", env="aws_profile") - watchtower_log_group_name: str = "simbot_challenge" opensearch_service_name: str = "experience-hub" watchtower_log_stream_name: str = "experience-hub/{machine_name}/{logger_name}/{process_id}" diff --git a/src/emma_experience_hub/datamodels/simbot/payloads/auxiliary_metadata.py b/src/emma_experience_hub/datamodels/simbot/payloads/auxiliary_metadata.py index 2b2ead40..551cf82a 100644 --- a/src/emma_experience_hub/datamodels/simbot/payloads/auxiliary_metadata.py +++ b/src/emma_experience_hub/datamodels/simbot/payloads/auxiliary_metadata.py @@ -25,13 +25,7 @@ class SimBotAuxiliaryMetadataUri(AnyUrl): __slots__ = () def resolve_path(self, game_metadata_efs_dir: Path) -> FilePath: - """Fully resolve the path to the game metadata file. - - This follows the provided example: - - - https://us-east-1.console.aws.amazon.com/codesuite/codecommit/repositories/AlexaSimbotModelInferenceService/browse/refs/heads/main/--/alexa_simbot_action_inference_model_wrapper/service/models/V103.py?region=us-east-1&lines=1479-1481 - """ + """Fully resolve the path to the game metadata file.""" # Perform the replace as they've done (from the example) efs_dir_as_string = str(game_metadata_efs_dir.resolve()) corrected_image_uri = self.replace("efs://", f"{efs_dir_as_string}/") diff --git a/src/emma_experience_hub/pipelines/simbot/request_processing.py b/src/emma_experience_hub/pipelines/simbot/request_processing.py index 19a4e93f..f651975b 100644 --- a/src/emma_experience_hub/pipelines/simbot/request_processing.py +++ b/src/emma_experience_hub/pipelines/simbot/request_processing.py @@ -2,7 +2,7 @@ from loguru import logger -from emma_experience_hub.api.clients.simbot import SimBotSQLLiteClient +from emma_experience_hub.api.clients.simbot import SimBotSessionDbClient from emma_experience_hub.datamodels.simbot import ( SimBotActionStatus, SimBotRequest, @@ -14,7 +14,7 @@ class SimBotRequestProcessingPipeline: """Process the incoming requests and build the session data.""" - def __init__(self, session_db_client: SimBotSQLLiteClient) -> None: + def __init__(self, session_db_client: SimBotSessionDbClient) -> None: self._session_db_client = session_db_client def run(self, request: SimBotRequest) -> SimBotSession: