diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml index c054342..71d3c9b 100644 --- a/.github/workflows/checks.yaml +++ b/.github/workflows/checks.yaml @@ -24,6 +24,15 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} + - name: Cache Docker images + uses: ScribeMD/docker-cache@0.4.0 + with: + key: ${{ runner.os }}-docker-${{ hashFiles('**/docker-compose.yaml') }} + - name: Cache mypy + uses: actions/cache@v4 + with: + path: "**/.mypy_cache" + key: ${{ runner.os }}-${{ matrix.python-version }}-mypy-${{ hashFiles('**/poetry.lock') }} - name: Install Poetry uses: snok/install-poetry@v1 with: diff --git a/.vscode/launch.json b/.vscode/launch.json index 6d37e30..321ae1b 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -12,9 +12,6 @@ "purpose": ["debug-test"], "console": "integratedTerminal", "justMyCode": false, - "env": { - "PYTEST_ADDOPTS": "--no-cov" - }, } ] } diff --git a/.vscode/recommended_settings.json b/.vscode/recommended_settings.json index dfe9c79..1eebca0 100644 --- a/.vscode/recommended_settings.json +++ b/.vscode/recommended_settings.json @@ -4,6 +4,7 @@ "**/.pytest_cache": true, "**/.mypy_cache": true, "**/.ruff_cache": true, + "**/localstack_volume": true, "**/htmlcov": true, }, diff --git a/README.md b/README.md index a70f48a..633c361 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,71 @@ # saritasa-s3-tools -Tools For S3 Used By Saritasa + +![GitHub Workflow Status (with event)](https://img.shields.io/github/actions/workflow/status/saritasa-nest/saritasa-s3-tools/checks.yml) +![PyPI](https://img.shields.io/pypi/v/saritasa-s3-tools) +![PyPI - Status](https://img.shields.io/pypi/status/saritasa-s3-tools) +![PyPI - Python Version](https://img.shields.io/pypi/pyversions/saritasa-s3-tools) +![PyPI - License](https://img.shields.io/pypi/l/saritasa-s3-tools) +![PyPI - Downloads](https://img.shields.io/pypi/dm/saritasa-s3-tools) +[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) + +Extension for django and drf to ease work with S3 + +## Installation + +```bash +pip install saritasa-s3-tools +``` + +or if you are using [poetry](https://python-poetry.org/) + +```bash +poetry add saritasa-s3-tools +``` + +To install all optional dependencies add `[all]` + +## Features + +* `S3Client` and `AsyncS3Client` for integrations with s3 buckets +* `S3FileTypeConfig` for defining configuration parameters for direct upload to s3 +* `S3Key` for generating unique keys for s3 upload +* `pytest` plugin with fixtures for `boto3`, `S3Client` and `AsyncS3Client` + +## Direct upload example + +```python +import saritasa_s3_tools +import pathlib +import xml.etree.ElementTree + +s3_client = saritasa_s3_tools.S3Client( + boto3_client=boto3_client, + default_bucket=s3_bucket, +) +s3_params = s3_client.generate_params( + filename=__file__.split("/")[-1], + config=saritasa_s3_tools.S3FileTypeConfig.configs["files"], + content_type="application/x-python-code", + extra_metadata={ + "test": "123", + }, +) +with ( + httpx.Client() as client, + pathlib.Path(__file__).open("rb") as upload_file, +): + upload_response = client.post( + url=s3_params.url, + data={ + key: value + for key, value in s3_params.params.items() + if value is not None + }, + files={"file": upload_file.read()}, + ) +parsed_response = xml.etree.ElementTree.fromstring( # noqa: S314 + upload_response.content.decode(), +) +file_key = parsed_response[2].text +file_url = parsed_response[0].text +``` diff --git a/poetry.lock b/poetry.lock index b02b664..21c7c9c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,25 @@ # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +[[package]] +name = "anyio" +version = "4.3.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + [[package]] name = "asttokens" version = "2.4.1" @@ -20,17 +40,17 @@ test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] [[package]] name = "boto3" -version = "1.34.84" +version = "1.34.85" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.84-py3-none-any.whl", hash = "sha256:7a02f44af32095946587d748ebeb39c3fa15b9d7275307ff612a6760ead47e04"}, - {file = "boto3-1.34.84.tar.gz", hash = "sha256:91e6343474173e9b82f603076856e1d5b7b68f44247bdd556250857a3f16b37b"}, + {file = "boto3-1.34.85-py3-none-any.whl", hash = "sha256:135f1358fbc7d7dc89ad1a4346cb8da621fdc2aea69deb7b20c71ffec7cde111"}, + {file = "boto3-1.34.85.tar.gz", hash = "sha256:de73d0f2dec1819074caf3f0888e18f6e13a9fb75ef5f17b1bdd9d1acc127b33"}, ] [package.dependencies] -botocore = ">=1.34.84,<1.35.0" +botocore = ">=1.34.85,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -39,13 +59,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "boto3-stubs" -version = "1.34.84" -description = "Type annotations for boto3 1.34.84 generated with mypy-boto3-builder 7.23.2" +version = "1.34.85" +description = "Type annotations for boto3 1.34.85 generated with mypy-boto3-builder 7.23.2" optional = false python-versions = ">=3.8" files = [ - {file = "boto3_stubs-1.34.84-py3-none-any.whl", hash = "sha256:dd8b6147297b5aefd52212645179c96c4b5bcb4e514667dca6170485c1d4954a"}, - {file = "boto3_stubs-1.34.84.tar.gz", hash = "sha256:73bbb509a69c4ac8cce038afb1510686b88398cbd46d5df1e3238fce66df9af5"}, + {file = "boto3_stubs-1.34.85-py3-none-any.whl", hash = "sha256:1c7d9659fdbac1707ea6114f40c29925cc64b4c5f939c926574d49bacf335b14"}, + {file = "boto3_stubs-1.34.85.tar.gz", hash = "sha256:aa4f17a0d7bff4112551d47d449928d67ffefc81b9c156f07a259368a7c0e2f9"}, ] [package.dependencies] @@ -97,7 +117,7 @@ bedrock-agent = ["mypy-boto3-bedrock-agent (>=1.34.0,<1.35.0)"] bedrock-agent-runtime = ["mypy-boto3-bedrock-agent-runtime (>=1.34.0,<1.35.0)"] bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.34.0,<1.35.0)"] billingconductor = ["mypy-boto3-billingconductor (>=1.34.0,<1.35.0)"] -boto3 = ["boto3 (==1.34.84)", "botocore (==1.34.84)"] +boto3 = ["boto3 (==1.34.85)", "botocore (==1.34.85)"] braket = ["mypy-boto3-braket (>=1.34.0,<1.35.0)"] budgets = ["mypy-boto3-budgets (>=1.34.0,<1.35.0)"] ce = ["mypy-boto3-ce (>=1.34.0,<1.35.0)"] @@ -443,13 +463,13 @@ xray = ["mypy-boto3-xray (>=1.34.0,<1.35.0)"] [[package]] name = "botocore" -version = "1.34.84" +version = "1.34.85" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.84-py3-none-any.whl", hash = "sha256:da1ae0a912e69e10daee2a34dafd6c6c106450d20b8623665feceb2d96c173eb"}, - {file = "botocore-1.34.84.tar.gz", hash = "sha256:a2b309bf5594f0eb6f63f355ade79ba575ce8bf672e52e91da1a7933caa245e6"}, + {file = "botocore-1.34.85-py3-none-any.whl", hash = "sha256:9abae3f7925a8cc2b91b6ff3f09e631476c74826d45dc44fb30d1d15960639db"}, + {file = "botocore-1.34.85.tar.gz", hash = "sha256:18548525d4975bbe982f393f6470ba45249919a93f5dc6a69e37e435dd2cf579"}, ] [package.dependencies] @@ -458,17 +478,17 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} [package.extras] -crt = ["awscrt (==0.19.19)"] +crt = ["awscrt (==0.20.9)"] [[package]] name = "botocore-stubs" -version = "1.34.84" +version = "1.34.85" description = "Type annotations and code completion for botocore" optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "botocore_stubs-1.34.84-py3-none-any.whl", hash = "sha256:fa6d7e3792ce195d0fd7be12e938df8a26d627c14b15ea52b158819b59318ae7"}, - {file = "botocore_stubs-1.34.84.tar.gz", hash = "sha256:b7e0fef9d3cb0fb630f86bc1601dc62e392f31eafd59db41e32d0f22a529c1c7"}, + {file = "botocore_stubs-1.34.85-py3-none-any.whl", hash = "sha256:bd7977f42326e0c0bb739e4f002d4b658711f05e03baee16307edc066c4816f5"}, + {file = "botocore_stubs-1.34.85.tar.gz", hash = "sha256:04691efc4559b9012ffbae43a37e5712d8ad5d549b547b016b26742402234e24"}, ] [package.dependencies] @@ -477,6 +497,17 @@ types-awscrt = "*" [package.extras] botocore = ["botocore"] +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + [[package]] name = "cfgv" version = "3.4.0" @@ -615,6 +646,62 @@ docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1 testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8)"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httpx" +version = "0.27.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + [[package]] name = "identify" version = "2.5.35" @@ -629,6 +716,17 @@ files = [ [package.extras] license = ["ukkonen"] +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + [[package]] name = "iniconfig" version = "2.0.0" @@ -1029,24 +1127,6 @@ pluggy = ">=1.4,<2.0" [package.extras] testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] -[[package]] -name = "pytest-cov" -version = "5.0.0" -description = "Pytest plugin for measuring coverage." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, - {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] - [[package]] name = "pytest-sugar" version = "1.0.0" @@ -1209,6 +1289,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + [[package]] name = "stack-data" version = "0.6.3" @@ -1309,13 +1400,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.25.1" +version = "20.25.2" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, - {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, + {file = "virtualenv-20.25.2-py3-none-any.whl", hash = "sha256:6e1281a57849c8a54da89ba82e5eb7c8937b9d057ff01aaf5bc9afaa3552e90f"}, + {file = "virtualenv-20.25.2.tar.gz", hash = "sha256:fa7edb8428620518010928242ec17aa7132ae435319c29c1651d1cf4c4173aad"}, ] [package.dependencies] @@ -1324,7 +1415,7 @@ filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] @@ -1338,7 +1429,11 @@ files = [ {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] +[extras] +all = ["anyio"] +async = ["anyio"] + [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "c69357df31fc7acdab0ea14e19d5922c6e979619c7d1247dfabdb84652e95ab9" +content-hash = "0a418f5f7f27943405ee79ba85acfc4175dcf1ff6b69f657e83ef5ce8e489ea6" diff --git a/pyproject.toml b/pyproject.toml index de1af5a..d4bf7e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,9 @@ packages = [ { include = "saritasa_s3_tools" } ] +[tool.poetry.plugins.pytest11] +saritasa_s3_tools_plugin = "saritasa_s3_tools.testing.plugin" + [tool.poetry.dependencies] python = "^3.11" # The AWS SDK for Python @@ -30,7 +33,14 @@ boto3 = "*" # Mypy stubs for boto3 # https://pypi.org/project/boto3-stubs/#how-to-install boto3-stubs = {extras=["s3"], version = "*"} +# AnyIO is an asynchronous networking and concurrency library +# that works on top of either asyncio or trio. +# https://anyio.readthedocs.io/en/stable/ +anyio = {version= "<5", optional = true} +[tool.poetry.extras] +async = ["anyio"] +all = ["anyio"] [tool.poetry.group.dev.dependencies] # Improved REPL @@ -55,9 +65,14 @@ mypy = "^1.9.0" pytest = "^8.1.1" # To prettify pytest output pytest-sugar = "^1.0.0" -# Coverage plugin for pytest. -# https://github.com/pytest-dev/pytest-cov -pytest-cov = "^5.0.0" +# Coverage.py is a tool for measuring code coverage of Python programs. +# It monitors your program, noting which parts of the code have been executed, +# then analyzes the source to identify code that could have been executed but was not. +# https://coverage.readthedocs.io/en/latest/ +coverage = "^7.4.4" +# HTTPX is a fully featured HTTP client for Python 3, which provides sync and async APIs, and support for both HTTP/1.1 and HTTP/2. +# https://www.python-httpx.org/ +httpx = {version ="^0.27.0"} [build-system] requires = ["poetry-core"] @@ -157,13 +172,17 @@ split-on-trailing-comma = true section-order = [ "future", "standard-library", - "boto3", "third-party", + "boto3", "first-party", "local-folder", ] [tool.ruff.lint.isort.sections] -boto3 = ["boto3"] +boto3 = [ + "boto3", + "botocore", + "mypy_boto3_s3", +] [tool.ruff.lint.flake8-pytest-style] fixture-parentheses = false @@ -212,18 +231,25 @@ ignore = [ addopts = [ "--capture=no", "--ff", - "--cov=saritasa_s3_tools", - "--cov-report=html", ] # skip all files inside following dirs norecursedirs = [ "venv", ".venv", ] +# Configuration for s3 +s3_endpoint_url = "https://localhost.localstack.cloud:4566" +s3_region="us-west-1" +s3_bucket="saritasa-s3-tools-files" +s3_access_key="root" +s3_secret_key="rootroot" [tool.coverage.run] omit = [] +[tool.coverage.report] +include = ["saritasa_s3_tools/**"] + # https://docformatter.readthedocs.io/en/latest/configuration.html# [tool.docformatter] wrap-descriptions=0 diff --git a/saritasa_s3_tools/__init__.py b/saritasa_s3_tools/__init__.py index e69de29..79e7cab 100644 --- a/saritasa_s3_tools/__init__.py +++ b/saritasa_s3_tools/__init__.py @@ -0,0 +1,19 @@ +import contextlib + +from . import keys +from .client import S3Client +from .configs import S3FileTypeConfig + +with contextlib.suppress(ImportError): + from .async_client import AsyncS3Client + +with contextlib.suppress(ImportError): + from . import testing + +__all__ = ( + "keys", + "S3Client", + "S3FileTypeConfig", + "AsyncS3Client", + "testing", +) diff --git a/saritasa_s3_tools/async_client/__init__.py b/saritasa_s3_tools/async_client/__init__.py new file mode 100644 index 0000000..c5e106f --- /dev/null +++ b/saritasa_s3_tools/async_client/__init__.py @@ -0,0 +1 @@ +from .client import AsyncS3Client diff --git a/saritasa_s3_tools/async_client/client.py b/saritasa_s3_tools/async_client/client.py new file mode 100644 index 0000000..37695eb --- /dev/null +++ b/saritasa_s3_tools/async_client/client.py @@ -0,0 +1,129 @@ +import collections.abc +import functools +import typing + +import anyio + +import mypy_boto3_s3.type_defs + +from .. import client, configs + +ReturnT = typing.TypeVar("ReturnT") +ParamT = typing.ParamSpec("ParamT") + + +class AsyncS3Client(client.S3Client): + """Async Client for interacting with s3 based on boto3 client.""" + + async def run_sync_as_async( + self, + func: collections.abc.Callable[ParamT, ReturnT], + *args: ParamT.args, + **kwargs: ParamT.kwargs, + ) -> ReturnT: + """Make sync function run in async env.""" + return await anyio.to_thread.run_sync( # type: ignore + functools.partial(func, *args, **kwargs), + ) + + async def async_generate_params( + self, + filename: str, + config: configs.S3FileTypeConfig, + content_type: str, + bucket: str = "", + upload_folder: str = "", + extra_metadata: dict[str, str] | None = None, + ) -> client.S3UploadParams: + """Generate params for s3 upload in async env.""" + return await self.run_sync_as_async( + self.generate_params, + filename=filename, + upload_folder=upload_folder, + config=config, + bucket=bucket, + content_type=content_type, + extra_metadata=extra_metadata, + ) + + async def async_upload_file( + self, + filename: str, + config: configs.S3FileTypeConfig, + file_obj: mypy_boto3_s3.type_defs.FileobjTypeDef, + bucket: str = "", + ) -> str: + """Upload file to s3 in async env.""" + return await self.run_sync_as_async( + self.upload_file, + filename=filename, + config=config, + bucket=bucket, + file_obj=file_obj, + ) + + async def async_download_file( + self, + key: str, + file_obj: mypy_boto3_s3.type_defs.FileobjTypeDef, + bucket: str = "", + ) -> mypy_boto3_s3.type_defs.FileobjTypeDef: + """Download file from s3 in async env.""" + return await self.run_sync_as_async( + self.download_file, + file_obj=file_obj, + bucket=bucket, + key=key, + ) + + async def async_get_file_metadata( + self, + key: str, + bucket: str = "", + ) -> mypy_boto3_s3.type_defs.HeadObjectOutputTypeDef: + """Get file's metadata in async env.""" + return await self.run_sync_as_async( + self.get_file_metadata, + bucket=bucket, + key=key, + ) + + async def async_is_file_in_bucket( + self, + key: str, + bucket: str = "", + ) -> bool: + """Check if file is in bucket in async env.""" + return await self.run_sync_as_async( + self.is_file_in_bucket, + bucket=bucket, + key=key, + ) + + async def async_copy_object( + self, + key: str, + source_key: str, + bucket: str = "", + source_bucket: str = "", + ) -> None: + """Copy file object from copy source to key path in async env.""" + return await self.run_sync_as_async( + self.copy_object, + key=key, + source_key=source_key, + bucket=bucket, + source_bucket=source_bucket, + ) + + async def async_delete_object( + self, + key: str, + bucket: str = "", + ) -> None: + """Delete file object from s3 bucket is async env.""" + return await self.run_sync_as_async( + self.delete_object, + key=key, + bucket=bucket, + ) diff --git a/saritasa_s3_tools/client.py b/saritasa_s3_tools/client.py new file mode 100644 index 0000000..f041431 --- /dev/null +++ b/saritasa_s3_tools/client.py @@ -0,0 +1,248 @@ +import collections.abc +import dataclasses + +import boto3 +import botocore.client +import botocore.config +import botocore.credentials +import botocore.exceptions +import botocore.response +import mypy_boto3_s3 +import mypy_boto3_s3.type_defs + +from . import configs + + +def get_boto3_s3_client( + access_key_getter: collections.abc.Callable[ + [], + botocore.credentials.Credentials, + ], + s3_endpoint_url_getter: ( + collections.abc.Callable[ + [], + str | None, + ] + | None + ) = None, + region: str = "", + max_pool_connections: int = 100, +) -> mypy_boto3_s3.S3Client: + """Prepare boto3's s3 client for usage.""" + endpoint_url = None + if s3_endpoint_url_getter: + endpoint_url = s3_endpoint_url_getter() + credentials = access_key_getter() + return boto3.client( + service_name="s3", # type: ignore + region_name=region, + aws_session_token=credentials.token or None, + aws_access_key_id=credentials.access_key or None, + aws_secret_access_key=credentials.secret_key or None, + endpoint_url=endpoint_url, + config=botocore.config.Config( + # Increase for work in async env + max_pool_connections=max_pool_connections, + ), + ) + + +@dataclasses.dataclass +class S3UploadParams: + """Representation of s3 upload params.""" + + url: str + params: dict[str, str] + + +class S3Client: + """Client for interacting with s3 based on boto3 client.""" + + def __init__( + self, + boto3_client: mypy_boto3_s3.S3Client, + default_bucket: str, + default_download_expiration: int = 3600, + ) -> None: + self.boto3_client = boto3_client + self.default_bucket = default_bucket + self.default_download_expiration = default_download_expiration + + def _get_fields( + self, + config: configs.S3FileTypeConfig, + content_type: str, + meta_data: dict[str, str], + ) -> dict[str, int | str]: + """Prepare fields for s3 upload.""" + fields: dict[str, int | str] = { + "success_action_status": config.success_action_status, + "Content-Type": content_type, + } + fields.update(**meta_data) + if config.content_disposition: + fields["Content-Disposition"] = config.content_disposition + return fields + + def _get_conditions( + self, + config: configs.S3FileTypeConfig, + content_type: str, + meta_data: dict[str, str], + ) -> list[list[str | int] | dict[str, str | int]]: + """Prepare conditions for s3 upload.""" + conditions: list[list[str | int] | dict[str, str | int]] = [ + {"success_action_status": str(config.success_action_status)}, + {"Content-Type": content_type}, + ] + if config.content_length_range: + conditions.append( + [ + "content-length-range", + *list(config.content_length_range), + ], + ) + if config.content_disposition: + conditions.append( + {"Content-Disposition": config.content_disposition}, + ) + for key, value in meta_data.items(): + conditions.append({key: value}) + return conditions + + def generate_params( + self, + filename: str, + config: configs.S3FileTypeConfig, + content_type: str, + bucket: str = "", + upload_folder: str = "", + extra_metadata: dict[str, str] | None = None, + ) -> S3UploadParams: + """Generate params for s3 upload.""" + meta_data = { + "x-amz-meta-config-name": config.name, + } + for key, value in (extra_metadata or {}).items(): + meta_data[f"x-amz-meta-{key}"] = value + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3/client/generate_presigned_post.html + s3_params = self.boto3_client.generate_presigned_post( + Bucket=bucket or self.default_bucket, + Key="/".join( + filter(None, (upload_folder, config.key(filename=filename))), + ), + Fields=self._get_fields( + config=config, + content_type=content_type, + meta_data=meta_data, + ), + Conditions=self._get_conditions( + config=config, + content_type=content_type, + meta_data=meta_data, + ), + ExpiresIn=config.expires_in, + ) + return S3UploadParams( + url=s3_params["url"], + params=s3_params["fields"], + ) + + def upload_file( + self, + filename: str, + config: configs.S3FileTypeConfig, + file_obj: mypy_boto3_s3.type_defs.FileobjTypeDef, + bucket: str = "", + ) -> str: + """Upload file to s3.""" + key = config.key(filename=filename) + self.boto3_client.upload_fileobj( + Fileobj=file_obj, + Bucket=bucket or self.default_bucket, + Key=key, + ) + return key + + def download_file( + self, + key: str, + file_obj: mypy_boto3_s3.type_defs.FileobjTypeDef, + bucket: str = "", + ) -> mypy_boto3_s3.type_defs.FileobjTypeDef: + """Download file from s3.""" + self.boto3_client.download_fileobj( + Fileobj=file_obj, + Bucket=bucket or self.default_bucket, + Key=key, + ) + return file_obj + + def generate_presigned_url( + self, + key: str, + bucket: str = "", + expiration: int = 0, + ) -> str: + """Generate url for viewing/downloading file.""" + return self.boto3_client.generate_presigned_url( + ClientMethod="get_object", + Params={ + "Bucket": bucket or self.default_bucket, + "Key": key, + }, + ExpiresIn=expiration or self.default_download_expiration, + ) + + def get_file_metadata( + self, + key: str, + bucket: str = "", + ) -> mypy_boto3_s3.type_defs.HeadObjectOutputTypeDef: + """Get file's metadata.""" + return self.boto3_client.head_object( + Key=key, + Bucket=bucket or self.default_bucket, + ) + + def is_file_in_bucket( + self, + key: str, + bucket: str = "", + ) -> bool: + """Check if file is in bucket.""" + try: + self.get_file_metadata( + key=key, + bucket=bucket, + ) + return True + except botocore.exceptions.ClientError as error: + if error.response.get("Error", {}).get("Code") == "404": + return False + raise error # pragma: no cover + + def copy_object( + self, + key: str, + source_key: str, + bucket: str = "", + source_bucket: str = "", + ) -> None: + """Copy file object from copy source to key path.""" + self.boto3_client.copy_object( + Bucket=bucket or self.default_bucket, + CopySource=f"{source_bucket or self.default_bucket}/{source_key}", + Key=key, + ) + + def delete_object( + self, + key: str, + bucket: str = "", + ) -> None: + """Delete file object from s3 bucket.""" + self.boto3_client.delete_object( + Bucket=bucket or self.default_bucket, + Key=key, + ) diff --git a/saritasa_s3_tools/configs.py b/saritasa_s3_tools/configs.py new file mode 100644 index 0000000..bd2de58 --- /dev/null +++ b/saritasa_s3_tools/configs.py @@ -0,0 +1,44 @@ +import dataclasses +import typing + +from . import keys + + +class S3FileTypeConfigMeta(type): + """Meta class for S3FileTypeConfig.""" + + def __call__( + cls, + *args, # noqa: ANN002 + **kwargs, + ) -> "S3FileTypeConfig": + """Update mapping of S3SupportedFieldConfigs.""" + instance: S3FileTypeConfig = super().__call__(*args, **kwargs) + if instance.name in S3FileTypeConfig.configs: + raise ValueError(f"{instance.name} config is already defined") + S3FileTypeConfig.configs[instance.name] = instance + return instance + + +@dataclasses.dataclass(frozen=True) +class S3FileTypeConfig(metaclass=S3FileTypeConfigMeta): + """Configuration for S3 file upload.""" + + configs: typing.ClassVar[dict[str, "S3FileTypeConfig"]] = {} + + name: str + # S3Key are used to generate file's path + key: keys.S3Key + # Mime types are allowed, None - for all + allowed: tuple[str, ...] | None = None + # Perform checks against user + auth: typing.Callable[[typing.Any | None], bool] | None = None + # Define allowed size limits for file (in bytes) + content_length_range: tuple[int, int] | None = None + # In how much second pre-signed URL for upload will expire + expires_in: int = 3600 + success_action_status: int = 201 + # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Disposition + content_disposition: ( + typing.Literal["attachment"] | typing.Literal["inline"] + ) = "attachment" diff --git a/saritasa_s3_tools/keys.py b/saritasa_s3_tools/keys.py new file mode 100644 index 0000000..ef8f799 --- /dev/null +++ b/saritasa_s3_tools/keys.py @@ -0,0 +1,94 @@ +import abc +import pathlib +import unicodedata +import uuid + + +class S3Key: + """Base class for s3 keys.""" + + @abc.abstractmethod + def __call__(self, filename: str | None) -> str: + """Abstract method for calling keys.""" + + def remove_special_characters(self, filename: str) -> str: + """Remove characters from filename that are not allowed in some OS.""" + special_characters = r"<>:\"/\\|?*" + return filename.translate({ord(i): None for i in special_characters}) + + def normalize_string_value(self, value: str) -> str: + """Normalize string value. + + 1. Remove leading and trailing whitespaces. + 2. Replace all space characters with the Space char. + 3. Normalize Unicode string using `NFKC` form. See the details: + https://docs.python.org/3/library/unicodedata.html#unicodedata.normalize + + """ + cleaned = " ".join(value.strip().split()).strip() + return unicodedata.normalize("NFKC", cleaned) + + def clean_filename(self, filename: str) -> str: + """Remove `garbage` characters that cause problems with file names.""" + cleaned = self.remove_special_characters(filename) + normalized = self.normalize_string_value(cleaned) + + return normalized + + def get_random_filename(self, filename: str) -> str: + """Get random filename. + + Generation random filename that contains unique identifier and + filename extension like: ``photo.jpg``. + + Args: + ---- + filename (str): Name of file. + + Returns: + ------- + new_filename (str): ``9841422d-c041-45a5-b7b3-467179f4f127.ext``. + + """ + path = str(uuid.uuid4()) + ext = pathlib.Path(filename).suffix.lower() + + return "".join((path, ext)) + + +class S3KeyWithUUID(S3Key): + """Prefixed key generator. + + Example: + ------- + prefix/{UUID.extension} + + """ + + def __init__(self, prefix: str) -> None: + self.prefix = prefix + + def __call__(self, filename: str | None) -> str: + """Return prefixed S3 key.""" + if not filename: + return f"{self.prefix}/{uuid.uuid4()}.incorrect" + return f"{self.prefix}/{self.get_random_filename(filename)}" + + +class S3KeyWithPrefix(S3Key): + """Class to create S3 key for destination. + + Example: + ------- + prefix/{UUID}/filename + + """ + + def __init__(self, prefix: str) -> None: + self.prefix = prefix + + def __call__(self, filename: str | None) -> str: + """Create key for destination using filename.""" + if not filename: + return f"{self.prefix}/{uuid.uuid4()}/{uuid.uuid4()}.incorrect" + return f"{self.prefix}/{uuid.uuid4()}/{self.clean_filename(filename)}" diff --git a/saritasa_s3_tools/testing/__init__.py b/saritasa_s3_tools/testing/__init__.py new file mode 100644 index 0000000..65406ea --- /dev/null +++ b/saritasa_s3_tools/testing/__init__.py @@ -0,0 +1 @@ +from .shortcuts import upload_file, upload_file_and_verify diff --git a/saritasa_s3_tools/testing/plugin.py b/saritasa_s3_tools/testing/plugin.py new file mode 100644 index 0000000..e8a427e --- /dev/null +++ b/saritasa_s3_tools/testing/plugin.py @@ -0,0 +1,119 @@ +import collections.abc + +import pytest +from _pytest.fixtures import SubRequest + +import botocore.credentials +import mypy_boto3_s3 + +import saritasa_s3_tools + + +@pytest.fixture +def access_key_getter( + request: SubRequest, +) -> collections.abc.Callable[ + [], + botocore.credentials.Credentials, +]: + """Set up cred getter.""" + if ( + s3_access_key := request.config.inicfg.get( + "s3_access_key", + "", + ) + ) and ( + s3_secret_key := request.config.inicfg.get( + "s3_secret_key", + "", + ) + ): + return lambda: botocore.credentials.Credentials( + access_key=str(s3_access_key), + secret_key=str(s3_secret_key), + ) + raise NotImplementedError( # pragma: no cover + "Please set up `access_key_getter` fixture or " + "set `s3_access_key` and `s3_secret_key` in `.ini` file.", + ) + + +@pytest.fixture +def s3_endpoint_url_getter( + request: SubRequest, +) -> ( + collections.abc.Callable[ + [], + str | None, + ] + | None +): + """Set up url getter.""" + if s3_endpoint_url := request.config.inicfg.get("s3_endpoint_url", ""): + return lambda: str(s3_endpoint_url) + return None + + +@pytest.fixture +def s3_region( + request: SubRequest, +) -> str: + """Get s3 region.""" + return str(request.config.inicfg.get("s3_region", "")) + + +@pytest.fixture +def boto3_client( + access_key_getter: collections.abc.Callable[ + [], + botocore.credentials.Credentials, + ], + s3_endpoint_url_getter: collections.abc.Callable[ + [], + str | None, + ] + | None, + s3_region: str, +) -> mypy_boto3_s3.S3Client: + """Prepare boto3 client.""" + return saritasa_s3_tools.client.get_boto3_s3_client( + access_key_getter=access_key_getter, + s3_endpoint_url_getter=s3_endpoint_url_getter, + region=s3_region, + ) + + +@pytest.fixture +def s3_bucket( + request: SubRequest, +) -> str: + """Get the name of s3 bucket.""" + if bucket := request.config.inicfg.get("s3_bucket", ""): + return str(bucket) + raise NotImplementedError( # pragma: no cover + "Please set up `s3_bucket` fixture", + ) + + +@pytest.fixture +def s3_client( + boto3_client: mypy_boto3_s3.S3Client, + s3_bucket: str, +) -> saritasa_s3_tools.S3Client: + """Set up s3 client.""" + return saritasa_s3_tools.S3Client( + boto3_client=boto3_client, + default_bucket=s3_bucket, + ) + + +@pytest.fixture +def async_s3_client( + boto3_client: mypy_boto3_s3.S3Client, + s3_bucket: str, +) -> saritasa_s3_tools.S3Client: + """Set up s3 client.""" + return saritasa_s3_tools.AsyncS3Client( + boto3_client=boto3_client, + default_bucket=s3_bucket, + ) diff --git a/saritasa_s3_tools/testing/shortcuts.py b/saritasa_s3_tools/testing/shortcuts.py new file mode 100644 index 0000000..fcfae5f --- /dev/null +++ b/saritasa_s3_tools/testing/shortcuts.py @@ -0,0 +1,51 @@ +import pathlib +import xml.etree.ElementTree + +import httpx + +from .. import client + + +def upload_file( + filepath: str, + s3_params: client.S3UploadParams, +) -> httpx.Response: + """Upload file to s3.""" + url = s3_params.url + params = s3_params.params + # Test file upload itself + with ( + httpx.Client() as client, + pathlib.Path(filepath).open("rb") as upload_file, + ): + upload_response = client.post( + url=url, + data={ + key: value + for key, value in params.items() + if value is not None + }, + files={"file": upload_file.read()}, + ) + # Validate that request was okay, and we got file url + return upload_response + + +def upload_file_and_verify( + filepath: str, + s3_params: client.S3UploadParams, +) -> tuple[str, str]: + """Upload and verify that file is uploaded.""" + upload_response = upload_file( + filepath=filepath, + s3_params=s3_params, + ) + assert upload_response.is_success, upload_response.content # noqa: S101 + parsed_response = xml.etree.ElementTree.fromstring( # noqa: S314 + upload_response.content.decode(), + ) + file_key = parsed_response[2].text + file_url = parsed_response[0].text + assert file_url, upload_response.content # noqa: S101 + assert file_key, upload_response.content # noqa: S101 + return file_url, file_key diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..7e7b7f3 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,20 @@ +import pytest + +import saritasa_s3_tools + +saritasa_s3_tools.S3FileTypeConfig( + name="files", + key=saritasa_s3_tools.keys.S3KeyWithPrefix("files"), +) + +saritasa_s3_tools.S3FileTypeConfig( + name="expires", + key=saritasa_s3_tools.keys.S3KeyWithUUID("expires"), + expires_in=1, +) + + +@pytest.fixture +def anyio_backend() -> str: + """Specify async backend.""" + return "asyncio" diff --git a/tests/test_async_client.py b/tests/test_async_client.py new file mode 100644 index 0000000..55d5dbd --- /dev/null +++ b/tests/test_async_client.py @@ -0,0 +1,91 @@ +import io +import pathlib + +import pytest + +import saritasa_s3_tools + + +@pytest.mark.usefixtures("anyio_backend") +async def test_upload( + async_s3_client: saritasa_s3_tools.AsyncS3Client, +) -> None: + """Test file upload in async env.""" + s3_params = await async_s3_client.async_generate_params( + filename=__file__.split("/")[-1], + config=saritasa_s3_tools.S3FileTypeConfig.configs["files"], + content_type="application/x-python-code", + extra_metadata={ + "test": "123", + }, + ) + _, file_key = saritasa_s3_tools.testing.upload_file_and_verify( + filepath=__file__, + s3_params=s3_params, + ) + meta_data = await async_s3_client.async_get_file_metadata( + key=file_key, + ) + assert meta_data["Metadata"]["config-name"] == "files" + assert meta_data["Metadata"]["test"] == "123" + file_data = await async_s3_client.async_download_file( + key=file_key, + file_obj=io.BytesIO(), + ) + file_data.seek(0) + with pathlib.Path(__file__).open("rb") as upload_file: + assert file_data.read() == upload_file.read() + + +@pytest.mark.usefixtures("anyio_backend") +async def test_direct_upload( + async_s3_client: saritasa_s3_tools.AsyncS3Client, +) -> None: + """Test direct file upload in async env.""" + with pathlib.Path(__file__).open("rb") as upload_file: + upload_key = await async_s3_client.async_upload_file( + filename=__file__.split("/")[-1], + config=saritasa_s3_tools.S3FileTypeConfig.configs["files"], + file_obj=upload_file, + ) + assert await async_s3_client.async_is_file_in_bucket( + key=upload_key, + ), upload_key + + +@pytest.mark.usefixtures("anyio_backend") +async def test_delete( + async_s3_client: saritasa_s3_tools.AsyncS3Client, +) -> None: + """Test file deletion.""" + with pathlib.Path(__file__).open("rb") as upload_file: + upload_key = await async_s3_client.async_upload_file( + filename=__file__.split("/")[-1], + config=saritasa_s3_tools.S3FileTypeConfig.configs["files"], + file_obj=upload_file, + ) + await async_s3_client.async_delete_object(key=upload_key) + assert not await async_s3_client.async_is_file_in_bucket( + key=upload_key, + ), upload_key + + +@pytest.mark.usefixtures("anyio_backend") +async def test_copy( + async_s3_client: saritasa_s3_tools.AsyncS3Client, +) -> None: + """Test file copy.""" + with pathlib.Path(__file__).open("rb") as upload_file: + upload_key = await async_s3_client.async_upload_file( + filename=__file__.split("/")[-1], + config=saritasa_s3_tools.S3FileTypeConfig.configs["files"], + file_obj=upload_file, + ) + copy_key = saritasa_s3_tools.keys.S3KeyWithUUID("copy")(None) + await async_s3_client.async_copy_object( + key=copy_key, + source_key=upload_key, + ) + assert await async_s3_client.async_is_file_in_bucket( + key=copy_key, + ), copy_key diff --git a/tests/test_client.py b/tests/test_client.py new file mode 100644 index 0000000..6b9a766 --- /dev/null +++ b/tests/test_client.py @@ -0,0 +1,111 @@ +import io +import pathlib +import time +import xml.etree.ElementTree + +import httpx + +import saritasa_s3_tools + + +def test_upload(s3_client: saritasa_s3_tools.S3Client) -> None: + """Test file upload.""" + s3_params = s3_client.generate_params( + filename=__file__.split("/")[-1], + config=saritasa_s3_tools.S3FileTypeConfig.configs["files"], + content_type="application/x-python-code", + extra_metadata={ + "test": "123", + }, + ) + _, file_key = saritasa_s3_tools.testing.upload_file_and_verify( + filepath=__file__, + s3_params=s3_params, + ) + meta_data = s3_client.get_file_metadata(key=file_key) + assert meta_data["Metadata"]["config-name"] == "files" + assert meta_data["Metadata"]["test"] == "123" + file_data = s3_client.download_file( + key=file_key, + file_obj=io.BytesIO(), + ) + file_data.seek(0) + with pathlib.Path(__file__).open("rb") as upload_file: + assert file_data.read() == upload_file.read() + + +def test_direct_upload(s3_client: saritasa_s3_tools.S3Client) -> None: + """Test direct file upload.""" + with pathlib.Path(__file__).open("rb") as upload_file: + upload_key = s3_client.upload_file( + filename=__file__.split("/")[-1], + config=saritasa_s3_tools.S3FileTypeConfig.configs["files"], + file_obj=upload_file, + ) + assert s3_client.is_file_in_bucket(key=upload_key), upload_key + + +def test_delete(s3_client: saritasa_s3_tools.S3Client) -> None: + """Test file deletion.""" + with pathlib.Path(__file__).open("rb") as upload_file: + upload_key = s3_client.upload_file( + filename=__file__.split("/")[-1], + config=saritasa_s3_tools.S3FileTypeConfig.configs["files"], + file_obj=upload_file, + ) + s3_client.delete_object(key=upload_key) + assert not s3_client.is_file_in_bucket(key=upload_key), upload_key + + +def test_copy(s3_client: saritasa_s3_tools.S3Client) -> None: + """Test file copy.""" + with pathlib.Path(__file__).open("rb") as upload_file: + upload_key = s3_client.upload_file( + filename=__file__.split("/")[-1], + config=saritasa_s3_tools.S3FileTypeConfig.configs["files"], + file_obj=upload_file, + ) + copy_key = saritasa_s3_tools.keys.S3KeyWithUUID("copy")(None) + s3_client.copy_object( + key=copy_key, + source_key=upload_key, + ) + assert s3_client.is_file_in_bucket(key=copy_key), copy_key + + +def test_presigned_url(s3_client: saritasa_s3_tools.S3Client) -> None: + """Test file generation of presigned url.""" + with pathlib.Path(__file__).open("rb") as upload_file: + upload_key = s3_client.upload_file( + filename=__file__.split("/")[-1], + config=saritasa_s3_tools.S3FileTypeConfig.configs["files"], + file_obj=upload_file, + ) + presigned_url = s3_client.generate_presigned_url(key=upload_key) + with httpx.Client() as client: + response = client.get(presigned_url) + assert response.is_success, response.content + + +def test_upload_expiration(s3_client: saritasa_s3_tools.S3Client) -> None: + """Test file upload expiration.""" + s3_params = s3_client.generate_params( + filename=__file__.split("/")[-1], + config=saritasa_s3_tools.S3FileTypeConfig.configs["expires"], + content_type="application/x-python-code", + ) + + time.sleep( + saritasa_s3_tools.S3FileTypeConfig.configs["expires"].expires_in + 0.1, + ) + response = saritasa_s3_tools.testing.upload_file( + filepath=__file__, + s3_params=s3_params, + ) + assert not response.is_success, response.content + error = xml.etree.ElementTree.fromstring( # noqa: S314 + response.content.decode(), + )[1].text + assert ( + error == "Invalid according to Policy: Policy expired." + ), response.content diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 0000000..75ca35c --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,17 @@ +import re + +import pytest + +import saritasa_s3_tools + + +def test_config_duplicate() -> None: + """Check that it's impossible to create duplicate of config.""" + with pytest.raises( + ValueError, + match=re.escape("files config is already defined"), + ): + saritasa_s3_tools.S3FileTypeConfig( + name="files", + key=saritasa_s3_tools.keys.S3KeyWithPrefix("files"), + )