diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 6127491..0000000 --- a/.coveragerc +++ /dev/null @@ -1,3 +0,0 @@ -[report] -show_missing = True -omit = aioftp/__main__.py diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..2f226d3 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,48 @@ +name: build +on: [push, pull_request] + +jobs: + + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.11' + - run: | + pip install -e ./[dev] + pre-commit run -a + + tests: + needs: lint + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.11', '3.12'] + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - run: | + pip install -e ./[dev] + pytest + - uses: codecov/codecov-action@v2 + if: github.ref == 'refs/heads/master' + with: + fail_ci_if_error: true + verbose: true + + deploy: + needs: tests + runs-on: ubuntu-latest + if: github.ref == 'refs/heads/master' + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + - uses: casperdcl/deploy-pypi@v2 + with: + password: ${{ secrets.PYPI_TOKEN }} + build: true + skip_existing: true diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 8f37e23..0000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: build -on: [push, pull_request] - -jobs: - - lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - with: - python-version: '3.x' - - run: | - pip install flake8 - flake8 aioftp tests - - tests: - needs: lint - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ['3.7', '3.8', '3.9', '3.10'] - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - run: | - pip install -e ./[tests] - pytest - - uses: codecov/codecov-action@v2 - if: github.ref == 'refs/heads/master' - with: - fail_ci_if_error: true - verbose: true - - deploy: - needs: tests - runs-on: ubuntu-latest - if: github.ref == 'refs/heads/master' - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - - uses: casperdcl/deploy-pypi@v2 - with: - password: ${{ secrets.PYPI_TOKEN }} - build: true - skip_existing: true diff --git a/.gitignore b/.gitignore index 693f0ea..da858d9 100644 --- a/.gitignore +++ b/.gitignore @@ -13,4 +13,5 @@ __pycache__ .vscode .mypy_cache .pytest_cache +.ruff_cache coverage.xml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..db08f11 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,44 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks + +.python-linters: &python-linters + pass_filenames: false + language: system + types: [python] + +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-ast + - id: trailing-whitespace + - id: check-toml + - id: end-of-file-fixer + +- repo: https://github.com/asottile/add-trailing-comma + rev: v2.1.0 + hooks: + - id: add-trailing-comma + +- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks + rev: v2.1.0 + hooks: + - id: pretty-format-yaml + args: + - --autofix + - --preserve-quotes + - --indent=2 + +- repo: local + hooks: + - <<: *python-linters + id: black + name: Format with Black + entry: black + args: ["."] + + - <<: *python-linters + id: ruff + name: Check with ruff + entry: ruff + args: ["check", "--fix", "."] diff --git a/MANIFEST.in b/MANIFEST.in index 375bfab..6670510 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,4 @@ include README.rst include license.txt include history.rst -include doc-requirements.txt -include nose.cfg recursive-include tests * diff --git a/README.rst b/README.rst index c56e91a..3b3a5d0 100644 --- a/README.rst +++ b/README.rst @@ -100,7 +100,9 @@ pyftpdlib 1.5.2 Dependencies ------------ -- Python 3.7+ +- Python 3.11+ + +0.21.4 is the last version which supports python 3.7+ 0.13.0 is the last version which supports python 3.5.3+ diff --git a/doc-requirements.txt b/doc-requirements.txt deleted file mode 100644 index b904a16..0000000 --- a/doc-requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -sphinx -alabaster -docutils < 0.18.0 diff --git a/docs/conf.py b/docs/conf.py index 1408b6a..02bba66 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # # aioftp documentation build configuration file, created by # sphinx-quickstart on Fri Apr 17 16:21:03 2015. @@ -13,51 +12,50 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os -import shlex +import sys import alabaster # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath("..")) -import aioftp +import aioftp # noqa # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', - 'alabaster', + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "alabaster", ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'aioftp' -copyright = '2016, pohmelie' -author = 'pohmelie' +project = "aioftp" +copyright = "2016, pohmelie" +author = "pohmelie" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -77,37 +75,37 @@ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False @@ -117,26 +115,26 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'alabaster' +html_theme = "alabaster" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - 'logo': 'logo.png', - 'description': 'ftp client/server for asyncio', - 'github_user': 'pohmelie', - 'github_repo': 'aioftp', - 'github_button': True, - 'github_banner': True, + "logo": "logo.png", + "description": "ftp client/server for asyncio", + "github_user": "pohmelie", + "github_repo": "aioftp", + "github_button": True, + "github_banner": True, # 'travis_button': True, - 'pre_bg': '#FFF6E5', - 'note_bg': '#E5ECD1', - 'note_border': '#BFCF8C', - 'body_text': '#482C0A', - 'sidebar_text': '#49443E', - 'sidebar_header': '#4B4032', - 'page_width': "90%", + "pre_bg": "#FFF6E5", + "note_bg": "#E5ECD1", + "note_border": "#BFCF8C", + "body_text": "#482C0A", + "sidebar_text": "#49443E", + "sidebar_header": "#4B4032", + "page_width": "90%", } # Add any paths that contain custom themes here, relative to this directory. @@ -144,14 +142,14 @@ # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 @@ -161,114 +159,116 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' -#html_search_language = 'en' +# html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} +# html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' +# html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'aioftpdoc' +htmlhelp_basename = "aioftpdoc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', - -# Latex figure (float) alignment -#'figure_align': 'htbp', + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'aioftp.tex', 'aioftp Documentation', - 'pohmelie', 'manual'), + ( + master_doc, + "aioftp.tex", + "aioftp Documentation", + "pohmelie", + "manual", + ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- @@ -276,12 +276,17 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'aioftp', 'aioftp Documentation', - [author], 1) + ( + master_doc, + "aioftp", + "aioftp Documentation", + [author], + 1, + ), ] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------- @@ -290,22 +295,28 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'aioftp', 'aioftp Documentation', - author, 'aioftp', 'One line description of project.', - 'Miscellaneous'), + ( + master_doc, + "aioftp", + "aioftp Documentation", + author, + "aioftp", + "One line description of project.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False intersphinx_mapping = { "python": ("https://docs.python.org/3", None), diff --git a/docs/index.rst b/docs/index.rst index 337c155..43f1a59 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -71,7 +71,9 @@ pyftpdlib 1.5.2 Dependencies ------------ -- Python 3.7+ +- Python 3.11+ + +0.21.4 is the last version which supports python 3.7+ 0.13.0 is the last version which supports python 3.5.3+ diff --git a/ftpbench.py b/ftpbench.py index 309e051..e5a6d2a 100644 --- a/ftpbench.py +++ b/ftpbench.py @@ -128,9 +128,11 @@ def hilite(string, ok=True, bold=False): def print_bench(what, value, unit=""): - s = "%s %s %-8s" % (hilite("%-50s" % what, ok=None, bold=0), - hilite("%8.2f" % value), - unit) + s = "%s %s %-8s" % ( + hilite("%-50s" % what, ok=None, bold=0), + hilite("%8.2f" % value), + unit, + ) if server_memory: s += "%s" % hilite(server_memory.pop()) print(s.strip()) @@ -356,8 +358,11 @@ def bench_multi_connect(): def bench_multi_retr(clients): stor(clients[0]) - with timethis("%s concurrent clients (RETR %s file)" % ( - howmany, bytes2human(FILE_SIZE))): + with timethis( + "%s concurrent clients (RETR %s file)" % ( + howmany, bytes2human(FILE_SIZE), + ), + ): for ftp in clients: ftp.voidcmd('TYPE I') conn = ftp.transfercmd("RETR " + TESTFN) @@ -368,8 +373,11 @@ def bench_multi_retr(clients): ftp.voidresp() def bench_multi_stor(clients): - with timethis("%s concurrent clients (STOR %s file)" % ( - howmany, bytes2human(FILE_SIZE))): + with timethis( + "%s concurrent clients (STOR %s file)" % ( + howmany, bytes2human(FILE_SIZE), + ), + ): for ftp in clients: ftp.voidcmd('TYPE I') conn = ftp.transfercmd("STOR " + TESTFN) @@ -490,35 +498,55 @@ def main(): DEBUG USAGE = "%s -u USERNAME -p PASSWORD [-H] [-P] [-b] [-n] [-s] [-k] " \ "[-t] [-d] [-S]" % (os.path.basename(__file__)) - parser = optparse.OptionParser(usage=USAGE, - epilog=__doc__[__doc__.find('Example'):], - formatter=OptFormatter()) + parser = optparse.OptionParser( + usage=USAGE, + epilog=__doc__[__doc__.find('Example'):], + formatter=OptFormatter(), + ) parser.add_option('-u', '--user', dest='user', help='username') parser.add_option('-p', '--pass', dest='password', help='password') - parser.add_option('-H', '--host', dest='host', default=HOST, - help='hostname') - parser.add_option('-P', '--port', dest='port', default=PORT, help='port', - type=int) - parser.add_option('-b', '--benchmark', dest='benchmark', - default='transfer', - help="benchmark type ('transfer', 'download', 'upload', " - "'concurrence', 'all')") - parser.add_option('-n', '--clients', dest='clients', default=200, - type="int", - help="number of concurrent clients used by " - "'concurrence' benchmark") - parser.add_option('-s', '--filesize', dest='filesize', default="10M", - help="file size used by 'concurrence' benchmark " - "(e.g. '10M')") - parser.add_option('-k', '--pid', dest='pid', default=None, type="int", - help="the PID of the FTP server process, to track its " - "memory usage") - parser.add_option('-t', '--timeout', dest='timeout', - default=TIMEOUT, type="int", help="the socket timeout") - parser.add_option('-d', '--debug', action='store_true', dest='debug', - help="whether to print debugging info") - parser.add_option('-S', '--ssl', action='store_true', dest='ssl', - help="whether to use FTPS") + parser.add_option( + '-H', '--host', dest='host', default=HOST, + help='hostname', + ) + parser.add_option( + '-P', '--port', dest='port', default=PORT, help='port', + type=int, + ) + parser.add_option( + '-b', '--benchmark', dest='benchmark', + default='transfer', + help="benchmark type ('transfer', 'download', 'upload', " + "'concurrence', 'all')", + ) + parser.add_option( + '-n', '--clients', dest='clients', default=200, + type="int", + help="number of concurrent clients used by " + "'concurrence' benchmark", + ) + parser.add_option( + '-s', '--filesize', dest='filesize', default="10M", + help="file size used by 'concurrence' benchmark " + "(e.g. '10M')", + ) + parser.add_option( + '-k', '--pid', dest='pid', default=None, type="int", + help="the PID of the FTP server process, to track its " + "memory usage", + ) + parser.add_option( + '-t', '--timeout', dest='timeout', + default=TIMEOUT, type="int", help="the socket timeout", + ) + parser.add_option( + '-d', '--debug', action='store_true', dest='debug', + help="whether to print debugging info", + ) + parser.add_option( + '-S', '--ssl', action='store_true', dest='ssl', + help="whether to use FTPS", + ) options, args = parser.parse_args() if not options.user or not options.password: @@ -554,8 +582,11 @@ def main(): # start benchmark if SERVER_PROC is not None: register_memory() - print("(starting with %s of memory being used)" % ( - hilite(server_memory.pop()))) + print( + "(starting with %s of memory being used)" % ( + hilite(server_memory.pop()) + ), + ) if options.benchmark == 'download': stor() bench_retr() diff --git a/history.rst b/history.rst index 2b7b312..b4c329a 100644 --- a/history.rst +++ b/history.rst @@ -3,6 +3,8 @@ x.x.x (xx-xx-xxxx) 0.22.0 (xx-xx-xxxx) ------------------- - client.list: fix infinite symlink loop for `.` and `..` on FTP servers with UNIX-like filesystem for `client.list(path, recursive=True)` +- project file structure: refactor to use `pyproject.toml` +- minimal python version bumped to 3.11 0.21.4 (13-10-2022) ------------------- diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..4734f73 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,97 @@ +[project] +name = "aioftp" +version = "0.21.4" +description = "ftp client/server for asyncio" +readme = "README.rst" +requires-python = ">= 3.11" +license = {file = "license.txt"} +authors = [ + {name = "pohmelie", email = "multisosnooley@gmail.com"}, + {name = "yieyu"}, + {name = "rsichnyi"}, + {name = "jw4js"}, + {name = "asvetlov", email = "andrew.svetlov@gmail.com"}, + {name = "decaz", email = "decaz89@gmail.com"}, + {name = "oleksandr-kuzmenko"}, + {name = "ndhansen"}, + {name = "janneronkko", email="janne.ronkko@iki.fi"}, + {name = "thirtyseven", email="ted@shlashdot.org"}, + {name = "modelmat"}, + {name = "greut"}, + {name = "ported-pw", email="contact@ported.pw"}, + {name = "PonyPC"}, + {name = "jacobtomlinson"}, + {name = "Olegt0rr", email="t0rr@mail.ru"}, + {name = "michalc", email="michal@charemza.name"}, + {name = "bachya"}, + {name = "ch3pjw", email="paul@concertdaw.co.uk"}, + {name = "puddly", email="puddly3@gmail.com"}, + {name = "CrafterKolyan"}, + {name = "jkr78"}, + {name = "AMDmi3", email="amdmi3@amdmi3.ru"}, + {name = "webknjaz", email="webknjaz+github/profile@redhat.com"}, +] +classifiers = [ + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Development Status :: 5 - Production/Stable", + "Topic :: Internet :: File Transfer Protocol (FTP)", +] + +[project.urls] +Github = "https://github.com/aio-libs/aioftp" +Documentation = "https://aioftp.readthedocs.io" + +[project.optional-dependencies] +socks = [ + "siosocks >= 0.2.0", +] +dev = [ + # tests + "async_timeout >= 4.0.0", + "pytest-asyncio", + "pytest-cov", + "pytest", + "siosocks", + "trustme", + + # linters + "pre-commit", + "black", + "ruff", + + # docs + "sphinx", + "alabaster", + "docutils < 0.18.0", +] + +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools] +packages.find.where = ["src"] + +# tools +[tool.black] +line-length = 120 +target-version = ["py311"] + +[tool.ruff] +line-length = 120 +target-version = "py311" +select = ["E", "W", "F", "Q", "UP", "I", "ASYNC"] + +[tool.coverage] +run.source = ["./src/aioftp"] +run.omit = ["./src/aioftp/__main__.py"] +report.show_missing = true + +[tool.pytest.ini_options] +addopts = "-x --durations 10 -p no:anyio --cov" +testpaths = "tests" +log_format = "%(asctime)s.%(msecs)03d %(name)-20s %(levelname)-8s %(filename)-15s %(lineno)-4d %(message)s" +log_date_format = "%H:%M:%S" +log_level = "DEBUG" +asyncio_mode = "strict" diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 2fb6751..0000000 --- a/pytest.ini +++ /dev/null @@ -1,7 +0,0 @@ -[pytest] -addopts = -x --durations=10 --cov-config=.coveragerc --cov=aioftp --cov-report=xml --cov-report=term --cov-report=term-missing -testpaths = tests -log_format = %(asctime)s.%(msecs)03d %(name)-20s %(levelname)-8s %(filename)-15s %(lineno)-4d %(message)s -log_date_format = %H:%M:%S -log_level = DEBUG -asyncio_mode = strict diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index fba792a..0000000 --- a/setup.cfg +++ /dev/null @@ -1,30 +0,0 @@ -[metadata] -name = aioftp -version = attr: aioftp.__version__ -url = https://github.com/aio-libs/aioftp -author = pohmelie -author_email = multisosnooley@gmail.com -description = ftp client/server for asyncio -long_description = file: README.rst -license = Apache-2.0 -license_file = license.txt -classifiers = - Programming Language :: Python - Programming Language :: Python :: 3 - Development Status :: 5 - Production/Stable - Topic :: Internet :: File Transfer Protocol (FTP) - -[options] -packages = aioftp -python_requires = >= 3.7 - -[options.extras_require] -socks = - siosocks >= 0.2.0 -tests = - pytest - pytest-asyncio - pytest-cov - trustme - async_timeout >= 4.0.0 - siosocks diff --git a/setup.py b/setup.py deleted file mode 100644 index b024da8..0000000 --- a/setup.py +++ /dev/null @@ -1,4 +0,0 @@ -from setuptools import setup - - -setup() diff --git a/aioftp/__init__.py b/src/aioftp/__init__.py similarity index 53% rename from aioftp/__init__.py rename to src/aioftp/__init__.py index 10a5cb0..d992bd8 100644 --- a/aioftp/__init__.py +++ b/src/aioftp/__init__.py @@ -1,20 +1,17 @@ """ftp client/server for asyncio""" # flake8: noqa +import importlib.metadata + from .client import * from .common import * from .errors import * from .pathio import * from .server import * -__version__ = "0.21.4" +__version__ = importlib.metadata.version(__package__) version = tuple(map(int, __version__.split("."))) __all__ = ( - client.__all__ + - server.__all__ + - errors.__all__ + - common.__all__ + - pathio.__all__ + - ("version", "__version__") + client.__all__ + server.__all__ + errors.__all__ + common.__all__ + pathio.__all__ + ("version", "__version__") ) diff --git a/aioftp/__main__.py b/src/aioftp/__main__.py similarity index 58% rename from aioftp/__main__.py rename to src/aioftp/__main__.py index 8ee10db..a09b0b0 100644 --- a/aioftp/__main__.py +++ b/src/aioftp/__main__.py @@ -10,24 +10,50 @@ parser = argparse.ArgumentParser( prog="aioftp", usage="%(prog)s [options]", - description="Simple aioftp-based server with one user (anonymous or not)." -) -parser.add_argument("--user", metavar="LOGIN", dest="login", - help="user name to login") -parser.add_argument("--pass", metavar="PASSWORD", dest="password", - help="password to login") -parser.add_argument("-d", metavar="DIRECTORY", dest="home", - help="the directory to share (default current directory)") -parser.add_argument("-q", "--quiet", action="store_true", - help="set logging level to 'ERROR' instead of 'INFO'") + description="Simple aioftp-based server with one user (anonymous or not).", +) +parser.add_argument( + "--user", + metavar="LOGIN", + dest="login", + help="user name to login", +) +parser.add_argument( + "--pass", + metavar="PASSWORD", + dest="password", + help="password to login", +) +parser.add_argument( + "-d", + metavar="DIRECTORY", + dest="home", + help="the directory to share (default current directory)", +) +parser.add_argument( + "-q", + "--quiet", + action="store_true", + help="set logging level to 'ERROR' instead of 'INFO'", +) parser.add_argument("--memory", action="store_true", help="use memory storage") -parser.add_argument("--host", default=None, - help="host for binding [default: %(default)s]") -parser.add_argument("--port", type=int, default=2121, - help="port for binding [default: %(default)s]") -parser.add_argument("--family", choices=("ipv4", "ipv6", "auto"), - default="auto", - help="Socket family [default: %(default)s]") +parser.add_argument( + "--host", + default=None, + help="host for binding [default: %(default)s]", +) +parser.add_argument( + "--port", + type=int, + default=2121, + help="port for binding [default: %(default)s]", +) +parser.add_argument( + "--family", + choices=("ipv4", "ipv6", "auto"), + default="auto", + help="Socket family [default: %(default)s]", +) args = parser.parse_args() print(f"aioftp v{aioftp.__version__}") diff --git a/aioftp/client.py b/src/aioftp/client.py similarity index 93% rename from aioftp/client.py rename to src/aioftp/client.py index 0c3ec85..338f73c 100644 --- a/aioftp/client.py +++ b/src/aioftp/client.py @@ -45,6 +45,7 @@ class Code(str): """ Representation of server status code. """ + def matches(self, mask): """ :param mask: Template for comparision. If mask symbol is not digit @@ -75,6 +76,7 @@ class DataConnectionThrottleStreamIO(ThrottleStreamIO): :param **kwargs: keyword arguments passed to :py:class:`aioftp.ThrottleStreamIO` """ + def __init__(self, client, *args, **kwargs): super().__init__(*args, **kwargs) self.client = client @@ -105,20 +107,22 @@ async def __aexit__(self, exc_type, exc, tb): class BaseClient: - - def __init__(self, *, - socket_timeout=None, - connection_timeout=None, - read_speed_limit=None, - write_speed_limit=None, - path_timeout=None, - path_io_factory=pathio.PathIO, - encoding="utf-8", - ssl=None, - parse_list_line_custom=None, - parse_list_line_custom_first=True, - passive_commands=("epsv", "pasv"), - **siosocks_asyncio_kwargs): + def __init__( + self, + *, + socket_timeout=None, + connection_timeout=None, + read_speed_limit=None, + write_speed_limit=None, + path_timeout=None, + path_io_factory=pathio.PathIO, + encoding="utf-8", + ssl=None, + parse_list_line_custom=None, + parse_list_line_custom_first=True, + passive_commands=("epsv", "pasv"), + **siosocks_asyncio_kwargs, + ): self.socket_timeout = socket_timeout self.connection_timeout = connection_timeout self.throttle = StreamThrottle.from_limits( @@ -133,8 +137,7 @@ def __init__(self, *, self.parse_list_line_custom = parse_list_line_custom self.parse_list_line_custom_first = parse_list_line_custom_first self._passive_commands = passive_commands - self._open_connection = partial(open_connection, ssl=self.ssl, - **siosocks_asyncio_kwargs) + self._open_connection = partial(open_connection, ssl=self.ssl, **siosocks_asyncio_kwargs) async def connect(self, host, port=DEFAULT_PORT): self.server_host = host @@ -223,11 +226,13 @@ def check_codes(self, expected_codes, received_code, info): if not any(map(received_code.matches, expected_codes)): raise errors.StatusCodeError(expected_codes, received_code, info) - async def command(self, - command=None, - expected_codes=(), - wait_codes=(), - censor_after=None): + async def command( + self, + command=None, + expected_codes=(), + wait_codes=(), + censor_after=None, + ): """ :py:func:`asyncio.coroutine` @@ -320,10 +325,10 @@ def parse_directory_response(s): directory = "" for ch in s: if not start: - if ch == "\"": + if ch == '"': start = True else: - if ch == "\"": + if ch == '"': seq_quotes += 1 else: if seq_quotes == 1: @@ -400,13 +405,14 @@ def parse_ls_date(cls, s, *, now=None): try: if now is None: now = datetime.datetime.now() - if s.startswith('Feb 29'): + if s.startswith("Feb 29"): # Need to find the nearest previous leap year prev_leap_year = now.year while not calendar.isleap(prev_leap_year): prev_leap_year -= 1 d = datetime.datetime.strptime( - f"{prev_leap_year} {s}", "%Y %b %d %H:%M" + f"{prev_leap_year} {s}", + "%Y %b %d %H:%M", ) # Check if it's next leap year diff = (now - d).total_seconds() @@ -472,9 +478,9 @@ def parse_list_line_unix(self, b): s = s[12:].strip() if info["type"] == "link": i = s.rindex(" -> ") - link_dst = s[i + 4:] + link_dst = s[i + 4 :] link_src = s[:i] - i = -2 if link_dst[-1] == "\'" or link_dst[-1] == "\"" else -1 + i = -2 if link_dst[-1] == "'" or link_dst[-1] == '"' else -1 info["type"] = "dir" if link_dst[i] == "/" else "file" s = link_src return pathlib.PurePosixPath(s), info @@ -491,9 +497,9 @@ def parse_list_line_windows(self, b): """ line = b.decode(encoding=self.encoding).rstrip("\r\n") date_time_end = line.index("M") - date_time_str = line[:date_time_end + 1].strip().split(" ") + date_time_str = line[: date_time_end + 1].strip().split(" ") date_time_str = " ".join([x for x in date_time_str if len(x) > 0]) - line = line[date_time_end + 1:].lstrip() + line = line[date_time_end + 1 :].lstrip() with setlocale("C"): strptime = datetime.datetime.strptime date_time = strptime(date_time_str, "%m/%d/%Y %I:%M %p") @@ -612,6 +618,7 @@ class Client(BaseClient): :type parse_list_line_custom_first: :py:class:`bool` :param **siosocks_asyncio_kwargs: siosocks key-word only arguments """ + async def connect(self, host, port=DEFAULT_PORT): """ :py:func:`asyncio.coroutine` @@ -628,8 +635,12 @@ async def connect(self, host, port=DEFAULT_PORT): code, info = await self.command(None, "220", "120") return info - async def login(self, user=DEFAULT_USER, password=DEFAULT_PASSWORD, - account=DEFAULT_ACCOUNT): + async def login( + self, + user=DEFAULT_USER, + password=DEFAULT_PASSWORD, + account=DEFAULT_ACCOUNT, + ): """ :py:func:`asyncio.coroutine` @@ -656,8 +667,11 @@ async def login(self, user=DEFAULT_USER, password=DEFAULT_PASSWORD, cmd = "ACCT " + account else: raise errors.StatusCodeError("33x", code, info) - code, info = await self.command(cmd, ("230", "33x"), - censor_after=censor_after) + code, info = await self.command( + cmd, + ("230", "33x"), + censor_after=censor_after, + ) async def get_current_directory(self): """ @@ -755,6 +769,7 @@ def list(self, path="", *, recursive=False, raw_command=None): >>> stats = await client.list() """ + class AsyncLister(AsyncListerMixin): stream = None @@ -762,8 +777,9 @@ async def _new_stream(cls, local_path): cls.path = local_path cls.parse_line = self.parse_mlsx_line if raw_command not in [None, "MLSD", "LIST"]: - raise ValueError("raw_command must be one of MLSD or " - f"LIST, but got {raw_command}") + raise ValueError( + "raw_command must be one of MLSD or " f"LIST, but got {raw_command}", + ) if raw_command in [None, "MLSD"]: try: command = ("MLSD " + str(cls.path)).strip() @@ -925,7 +941,7 @@ async def remove(self, path): if info["type"] == "file": await self.remove_file(path) elif info["type"] == "dir": - for name, info in (await self.list(path)): + for name, info in await self.list(path): if info["type"] in ("dir", "file"): await self.remove(name) await self.remove_directory(path) @@ -966,8 +982,7 @@ def append_stream(self, destination, *, offset=0): offset=offset, ) - async def upload(self, source, destination="", *, write_into=False, - block_size=DEFAULT_BLOCK_SIZE): + async def upload(self, source, destination="", *, write_into=False, block_size=DEFAULT_BLOCK_SIZE): """ :py:func:`asyncio.coroutine` @@ -994,8 +1009,7 @@ async def upload(self, source, destination="", *, write_into=False, destination = destination / source.name if await self.path_io.is_file(source): await self.make_directory(destination.parent) - async with self.path_io.open(source, mode="rb") as file_in, \ - self.upload_stream(destination) as stream: + async with self.path_io.open(source, mode="rb") as file_in, self.upload_stream(destination) as stream: async for block in file_in.iter_by_block(block_size): await stream.write(block) elif await self.path_io.is_dir(source): @@ -1016,7 +1030,7 @@ async def upload(self, source, destination="", *, write_into=False, path, relative, write_into=True, - block_size=block_size + block_size=block_size, ) def download_stream(self, source, *, offset=0): @@ -1035,8 +1049,7 @@ def download_stream(self, source, *, offset=0): """ return self.get_stream("RETR " + str(source), "1xx", offset=offset) - async def download(self, source, destination="", *, write_into=False, - block_size=DEFAULT_BLOCK_SIZE): + async def download(self, source, destination="", *, write_into=False, block_size=DEFAULT_BLOCK_SIZE): """ :py:func:`asyncio.coroutine` @@ -1062,19 +1075,25 @@ async def download(self, source, destination="", *, write_into=False, if not write_into: destination = destination / source.name if await self.is_file(source): - await self.path_io.mkdir(destination.parent, - parents=True, exist_ok=True) - async with self.path_io.open(destination, mode="wb") as file_out, \ - self.download_stream(source) as stream: + await self.path_io.mkdir( + destination.parent, + parents=True, + exist_ok=True, + ) + async with self.path_io.open(destination, mode="wb") as file_out, self.download_stream(source) as stream: async for block in stream.iter_by_block(block_size): await file_out.write(block) elif await self.is_dir(source): await self.path_io.mkdir(destination, parents=True, exist_ok=True) - for name, info in (await self.list(source)): + for name, info in await self.list(source): full = destination / name.relative_to(source) if info["type"] in ("file", "dir"): - await self.download(name, full, write_into=True, - block_size=block_size) + await self.download( + name, + full, + write_into=True, + block_size=block_size, + ) async def quit(self): """ @@ -1095,8 +1114,11 @@ async def _do_pasv(self): ip, port = self.parse_pasv_response(info[-1]) return ip, port - async def get_passive_connection(self, conn_type="I", - commands=None): + async def get_passive_connection( + self, + conn_type="I", + commands=None, + ): """ :py:func:`asyncio.coroutine` @@ -1185,9 +1207,9 @@ async def abort(self, *, wait=True): @classmethod @contextlib.asynccontextmanager - async def context(cls, host, port=DEFAULT_PORT, user=DEFAULT_USER, - password=DEFAULT_PASSWORD, account=DEFAULT_ACCOUNT, - **kwargs): + async def context( + cls, host, port=DEFAULT_PORT, user=DEFAULT_USER, password=DEFAULT_PASSWORD, account=DEFAULT_ACCOUNT, **kwargs + ): """ Classmethod async context manager. This create :py:class:`aioftp.Client`, make async call to diff --git a/aioftp/common.py b/src/aioftp/common.py similarity index 97% rename from aioftp/common.py rename to src/aioftp/common.py index 024d5db..41eec46 100644 --- a/aioftp/common.py +++ b/src/aioftp/common.py @@ -49,7 +49,9 @@ def wrapper(cls, *args, **kwargs): coro = f(cls, *args, **kwargs) timeout = getattr(cls, name) return asyncio.wait_for(coro, timeout) + return wrapper + return decorator @@ -96,7 +98,6 @@ def with_timeout(name): class AsyncStreamIterator: - def __init__(self, read_coro): self.read_coro = read_coro @@ -121,6 +122,7 @@ class AsyncListerMixin: ... ... >>> results = await Context(...) """ + async def _to_list(self): items = [] async for item in self: @@ -159,6 +161,7 @@ class AbstractAsyncLister(AsyncListerMixin, abc.ABC): >>> result [block, block, block, ...] """ + def __init__(self, *, timeout=None): super().__init__() self.timeout = timeout @@ -210,11 +213,10 @@ def async_enterable(f): ... # do """ + @functools.wraps(f) def wrapper(*args, **kwargs): - class AsyncEnterableInstance: - async def __aenter__(self): self.context = await f(*args, **kwargs) return await self.context.__aenter__() @@ -257,8 +259,8 @@ class StreamIO: `timeout` :type write_timeout: :py:class:`int`, :py:class:`float` or :py:class:`None` """ - def __init__(self, reader, writer, *, timeout=None, read_timeout=None, - write_timeout=None): + + def __init__(self, reader, writer, *, timeout=None, read_timeout=None, write_timeout=None): self.reader = reader self.writer = writer self.read_timeout = read_timeout or timeout @@ -342,8 +344,7 @@ async def wait(self): Wait until can do IO """ - if self._limit is not None and self._limit > 0 and \ - self._start is not None: + if self._limit is not None and self._limit > 0 and self._start is not None: now = _now() end = self._start + self._sum / self._limit await asyncio.sleep(max(0, end - now)) @@ -393,8 +394,7 @@ def clone(self): return Throttle(limit=self._limit, reset_rate=self.reset_rate) def __repr__(self): - return f"{self.__class__.__name__}(limit={self._limit!r}, " \ - f"reset_rate={self.reset_rate!r})" + return f"{self.__class__.__name__}(limit={self._limit!r}, " f"reset_rate={self.reset_rate!r})" class StreamThrottle(collections.namedtuple("StreamThrottle", "read write")): @@ -407,13 +407,14 @@ class StreamThrottle(collections.namedtuple("StreamThrottle", "read write")): :param write: stream write throttle :type write: :py:class:`aioftp.Throttle` """ + def clone(self): """ Clone throttles without memory """ return StreamThrottle( read=self.read.clone(), - write=self.write.clone() + write=self.write.clone(), ) @classmethod @@ -429,8 +430,10 @@ def from_limits(cls, read_speed_limit=None, write_speed_limit=None): :py:class:`None` for unlimited :type write_speed_limit: :py:class:`int` or :py:class:`None` """ - return cls(read=Throttle(limit=read_speed_limit), - write=Throttle(limit=write_speed_limit)) + return cls( + read=Throttle(limit=read_speed_limit), + write=Throttle(limit=write_speed_limit), + ) class ThrottleStreamIO(StreamIO): diff --git a/aioftp/errors.py b/src/aioftp/errors.py similarity index 94% rename from aioftp/errors.py rename to src/aioftp/errors.py index 2955cc4..25af25c 100644 --- a/aioftp/errors.py +++ b/src/aioftp/errors.py @@ -1,6 +1,5 @@ from . import common - __all__ = ( "AIOFTPException", "StatusCodeError", @@ -41,9 +40,11 @@ class StatusCodeError(AIOFTPException): Exception members are tuples, even for one code. """ + def __init__(self, expected_codes, received_codes, info): - super().__init__(f"Waiting for {expected_codes} but got " - f"{received_codes} {info!r}") + super().__init__( + f"Waiting for {expected_codes} but got " f"{received_codes} {info!r}", + ) self.expected_codes = common.wrap_with_container(expected_codes) self.received_codes = common.wrap_with_container(received_codes) self.info = info @@ -70,6 +71,7 @@ class PathIOError(AIOFTPException): ... elif ... ... # handle """ + def __init__(self, *args, reason=None, **kwargs): super().__init__(*args, **kwargs) self.reason = reason diff --git a/aioftp/pathio.py b/src/aioftp/pathio.py similarity index 98% rename from aioftp/pathio.py rename to src/aioftp/pathio.py index b9252bc..8335ea6 100644 --- a/aioftp/pathio.py +++ b/src/aioftp/pathio.py @@ -46,6 +46,7 @@ class AsyncPathIOContext: ... await file.close() """ + def __init__(self, pathio, args, kwargs): self.close = None self.pathio = pathio @@ -77,12 +78,16 @@ def universal_exception(coro): `NotImplementedError`) with universal exception :py:class:`aioftp.PathIOError` """ + @functools.wraps(coro) async def wrapper(*args, **kwargs): try: return await coro(*args, **kwargs) - except (asyncio.CancelledError, NotImplementedError, - StopAsyncIteration): + except ( + asyncio.CancelledError, + NotImplementedError, + StopAsyncIteration, + ): raise except Exception as exc: raise errors.PathIOError(reason=sys.exc_info()) from exc @@ -91,7 +96,6 @@ async def wrapper(*args, **kwargs): class PathIONursery: - def __init__(self, factory): self.factory = factory self.state = None @@ -108,12 +112,15 @@ def defend_file_methods(coro): Decorator. Raises exception when file methods called with wrapped by :py:class:`aioftp.AsyncPathIOContext` file object. """ + @functools.wraps(coro) async def wrapper(self, file, *args, **kwargs): if isinstance(file, AsyncPathIOContext): - raise ValueError("Native path io file methods can not be used " - "with wrapped file object") + raise ValueError( + "Native path io file methods can not be used " "with wrapped file object", + ) return await coro(self, file, *args, **kwargs) + return wrapper @@ -129,6 +136,7 @@ class AbstractPathIO(abc.ABC): :param state: shared pathio state per server """ + def __init__(self, timeout=None, connection=None, state=None): self.timeout = timeout self.connection = connection @@ -405,7 +413,6 @@ async def unlink(self, path): return path.unlink() def list(self, path): - class Lister(AbstractAsyncLister): iter = None @@ -460,6 +467,7 @@ async def wrapper(self, *args, **kwargs): self.executor, functools.partial(f, self, *args, **kwargs), ) + return wrapper @@ -473,6 +481,7 @@ class AsyncPathIO(AbstractPathIO): :param executor: executor for running blocking tasks :type executor: :py:class:`concurrent.futures.Executor` """ + def __init__(self, *args, executor=None, **kwargs): super().__init__(*args, **kwargs) self.executor = executor @@ -514,7 +523,6 @@ def unlink(self, path): return path.unlink() def list(self, path): - class Lister(AbstractAsyncLister): iter = None @@ -586,7 +594,6 @@ def rename(self, source, destination): class Node: - def __init__(self, type, name, ctime=None, mtime=None, *, content): self.type = type self.name = name @@ -595,9 +602,11 @@ def __init__(self, type, name, ctime=None, mtime=None, *, content): self.content = content def __repr__(self): - return f"{self.__class__.__name__}(type={self.type!r}, " \ - f"name={self.name!r}, ctime={self.ctime!r}, " \ - f"mtime={self.mtime!r}, content={self.content!r})" + return ( + f"{self.__class__.__name__}(type={self.type!r}, " + f"name={self.name!r}, ctime={self.ctime!r}, " + f"mtime={self.mtime!r}, content={self.content!r})" + ) class MemoryPathIO(AbstractPathIO): @@ -614,7 +623,7 @@ class MemoryPathIO(AbstractPathIO): "st_mtime", "st_nlink", "st_mode", - ) + ), ) def __init__(self, *args, state=None, cwd=None, **kwargs): @@ -727,7 +736,6 @@ async def unlink(self, path): parent.content.pop(i) def list(self, path): - class Lister(AbstractAsyncLister): iter = None diff --git a/aioftp/server.py b/src/aioftp/server.py similarity index 93% rename from aioftp/server.py rename to src/aioftp/server.py index 723faf0..1830d78 100644 --- a/aioftp/server.py +++ b/src/aioftp/server.py @@ -58,6 +58,7 @@ class Permission: :param writable: is writable :type writable: :py:class:`bool` """ + def __init__(self, path="/", *, readable=True, writable=True): self.path = pathlib.PurePosixPath(path) self.readable = readable @@ -71,8 +72,7 @@ def is_parent(self, other): return False def __repr__(self): - return f"{self.__class__.__name__}({self.path!r}, " \ - f"readable={self.readable!r}, writable={self.writable!r})" + return f"{self.__class__.__name__}({self.path!r}, " f"readable={self.readable!r}, writable={self.writable!r})" class User: @@ -113,17 +113,21 @@ class User: connection in bytes per second :type write_speed_limit_per_connection: :py:class:`int` or :py:class:`None` """ - def __init__(self, - login=None, - password=None, *, - base_path=pathlib.Path("."), - home_path=pathlib.PurePosixPath("/"), - permissions=None, - maximum_connections=None, - read_speed_limit=None, - write_speed_limit=None, - read_speed_limit_per_connection=None, - write_speed_limit_per_connection=None): + + def __init__( + self, + login=None, + password=None, + *, + base_path=pathlib.Path("."), + home_path=pathlib.PurePosixPath("/"), + permissions=None, + maximum_connections=None, + read_speed_limit=None, + write_speed_limit=None, + read_speed_limit_per_connection=None, + write_speed_limit_per_connection=None, + ): self.login = login self.password = password self.base_path = pathlib.Path(base_path) @@ -136,8 +140,7 @@ def __init__(self, self.write_speed_limit = write_speed_limit self.read_speed_limit_per_connection = read_speed_limit_per_connection # damn 80 symbols - self.write_speed_limit_per_connection = \ - write_speed_limit_per_connection + self.write_speed_limit_per_connection = write_speed_limit_per_connection async def get_permissions(self, path): """ @@ -158,17 +161,19 @@ async def get_permissions(self, path): return perm def __repr__(self): - return f"{self.__class__.__name__}({self.login!r}, " \ - f"{self.password!r}, base_path={self.base_path!r}, " \ - f"home_path={self.home_path!r}, " \ - f"permissions={self.permissions!r}, " \ - f"maximum_connections={self.maximum_connections!r}, " \ - f"read_speed_limit={self.read_speed_limit!r}, " \ - f"write_speed_limit={self.write_speed_limit!r}, " \ - f"read_speed_limit_per_connection=" \ - f"{self.read_speed_limit_per_connection!r}, " \ - f"write_speed_limit_per_connection=" \ - f"{self.write_speed_limit_per_connection!r})" + return ( + f"{self.__class__.__name__}({self.login!r}, " + f"{self.password!r}, base_path={self.base_path!r}, " + f"home_path={self.home_path!r}, " + f"permissions={self.permissions!r}, " + f"maximum_connections={self.maximum_connections!r}, " + f"read_speed_limit={self.read_speed_limit!r}, " + f"write_speed_limit={self.write_speed_limit!r}, " + f"read_speed_limit_per_connection=" + f"{self.read_speed_limit_per_connection!r}, " + f"write_speed_limit_per_connection=" + f"{self.write_speed_limit_per_connection!r})" + ) class AbstractUserManager(abc.ABC): @@ -181,7 +186,7 @@ class AbstractUserManager(abc.ABC): GetUserResponse = enum.Enum( "UserManagerResponse", - "OK PASSWORD_REQUIRED ERROR" + "OK PASSWORD_REQUIRED ERROR", ) def __init__(self, *, timeout=None): @@ -233,13 +238,11 @@ class MemoryUserManager(AbstractUserManager): :type users: :py:class:`list`, :py:class:`tuple`, etc. of :py:class:`aioftp.User` """ + def __init__(self, users, *args, **kwargs): super().__init__(*args, **kwargs) self.users = users or [User()] - self.available_connections = dict( - (user, AvailableConnections(user.maximum_connections)) - for user in self.users - ) + self.available_connections = dict((user, AvailableConnections(user.maximum_connections)) for user in self.users) async def get_user(self, login): user = None @@ -309,7 +312,6 @@ class Connection(collections.defaultdict): __slots__ = ("future",) class Container: - def __init__(self, storage): self.storage = storage @@ -421,11 +423,12 @@ class ConnectionConditions: ... def foo(self, connection, rest): ... ... """ + user_required = ("user", "no user (use USER firstly)") login_required = ("logged", "not logged in") passive_server_started = ( "passive_server", - "no listen socket created (use PASV firstly)" + "no listen socket created (use PASV firstly)", ) data_connection_made = ("data_connection", "no data connection made") rename_from_required = ("rename_from", "no filename (use RNFR firstly)") @@ -437,7 +440,6 @@ def __init__(self, *fields, wait=False, fail_code="503", fail_info=None): self.fail_info = fail_info def __call__(self, f): - @functools.wraps(f) async def wrapper(cls, connection, rest, *args): futures = {connection[name]: msg for name, msg in self.fields} @@ -483,6 +485,7 @@ class PathConditions: ... def foo(self, connection, path): ... ... """ + path_must_exists = ("exists", False, "path does not exists") path_must_not_exists = ("exists", True, "path already exists") path_must_be_dir = ("is_dir", False, "path is not a directory") @@ -492,7 +495,6 @@ def __init__(self, *conditions): self.conditions = conditions def __call__(self, f): - @functools.wraps(f) async def wrapper(cls, connection, rest, *args): real_path, virtual_path = cls.get_paths(connection, rest) @@ -525,6 +527,7 @@ class PathPermissions: ... def foo(self, connection, path): ... ... """ + readable = "readable" writable = "writable" @@ -532,7 +535,6 @@ def __init__(self, *permissions): self.permissions = permissions def __call__(self, f): - @functools.wraps(f) async def wrapper(cls, connection, rest, *args): real_path, virtual_path = cls.get_paths(connection, rest) @@ -560,6 +562,7 @@ def worker(f): ... ... """ + @functools.wraps(f) async def wrapper(cls, connection, rest): try: @@ -637,32 +640,34 @@ class Server: Please look :py:meth:`asyncio.loop.create_server` docs. :type ssl: :py:class:`ssl.SSLContext` """ - def __init__(self, - users=None, - *, - block_size=DEFAULT_BLOCK_SIZE, - socket_timeout=None, - idle_timeout=None, - wait_future_timeout=1, - path_timeout=None, - path_io_factory=pathio.PathIO, - maximum_connections=None, - read_speed_limit=None, - write_speed_limit=None, - read_speed_limit_per_connection=None, - write_speed_limit_per_connection=None, - ipv4_pasv_forced_response_address=None, - data_ports=None, - encoding="utf-8", - ssl=None): + + def __init__( + self, + users=None, + *, + block_size=DEFAULT_BLOCK_SIZE, + socket_timeout=None, + idle_timeout=None, + wait_future_timeout=1, + path_timeout=None, + path_io_factory=pathio.PathIO, + maximum_connections=None, + read_speed_limit=None, + write_speed_limit=None, + read_speed_limit_per_connection=None, + write_speed_limit_per_connection=None, + ipv4_pasv_forced_response_address=None, + data_ports=None, + encoding="utf-8", + ssl=None, + ): self.block_size = block_size self.socket_timeout = socket_timeout self.idle_timeout = idle_timeout self.wait_future_timeout = wait_future_timeout self.path_io_factory = pathio.PathIONursery(path_io_factory) self.path_timeout = path_timeout - self.ipv4_pasv_forced_response_address = \ - ipv4_pasv_forced_response_address + self.ipv4_pasv_forced_response_address = ipv4_pasv_forced_response_address if data_ports is not None: self.available_data_ports = asyncio.PriorityQueue() for data_port in data_ports: @@ -901,7 +906,7 @@ async def dispatcher(self, reader, writer): writer, throttles=dict( server_global=self.throttle, - server_per_connection=self.throttle_per_connection.clone() + server_per_connection=self.throttle_per_connection.clone(), ), read_timeout=self.idle_timeout, write_timeout=self.socket_timeout, @@ -926,8 +931,10 @@ async def dispatcher(self, reader, writer): restart_offset=0, _dispatcher=get_current_task(), ) - connection.path_io = self.path_io_factory(timeout=self.path_timeout, - connection=connection) + connection.path_io = self.path_io_factory( + timeout=self.path_timeout, + connection=connection, + ) pending = { asyncio.create_task(self.greeting(connection, "")), asyncio.create_task(self.response_writer(stream, response_queue)), @@ -955,13 +962,13 @@ async def dispatcher(self, reader, writer): # this is parse_command result elif isinstance(result, tuple): pending.add( - asyncio.create_task(self.parse_command(stream)) + asyncio.create_task(self.parse_command(stream)), ) cmd, rest = result f = self.commands_mapping.get(cmd) if f is not None: pending.add( - asyncio.create_task(f(connection, rest)) + asyncio.create_task(f(connection, rest)), ) if cmd not in ("retr", "stor", "appe"): connection.restart_offset = 0 @@ -991,7 +998,7 @@ async def dispatcher(self, reader, writer): self.available_connections.release() if connection.future.user.done(): task = asyncio.create_task( - self.user_manager.notify_logout(connection.user) + self.user_manager.notify_logout(connection.user), ) tasks_to_wait.append(task) self.connections.pop(key) @@ -1076,7 +1083,7 @@ async def user(self, connection, rest): user_per_connection=StreamThrottle.from_limits( connection.user.read_speed_limit_per_connection, connection.user.write_speed_limit_per_connection, - ) + ), ) connection.response(code, info) return True @@ -1099,14 +1106,15 @@ async def quit(self, connection, rest): @ConnectionConditions(ConnectionConditions.login_required) async def pwd(self, connection, rest): - code, info = "257", f"\"{connection.current_directory}\"" + code, info = "257", f'"{connection.current_directory}"' connection.response(code, info) return True @ConnectionConditions(ConnectionConditions.login_required) @PathConditions( PathConditions.path_must_exists, - PathConditions.path_must_be_dir) + PathConditions.path_must_be_dir, + ) @PathPermissions(PathPermissions.readable) async def cwd(self, connection, rest): real_path, virtual_path = self.get_paths(connection, rest) @@ -1130,7 +1138,8 @@ async def mkd(self, connection, rest): @ConnectionConditions(ConnectionConditions.login_required) @PathConditions( PathConditions.path_must_exists, - PathConditions.path_must_be_dir) + PathConditions.path_must_be_dir, + ) @PathPermissions(PathPermissions.writable) async def rmd(self, connection, rest): real_path, virtual_path = self.get_paths(connection, rest) @@ -1170,16 +1179,17 @@ async def build_mlsx_string(self, connection, path): @ConnectionConditions( ConnectionConditions.login_required, - ConnectionConditions.passive_server_started) + ConnectionConditions.passive_server_started, + ) @PathConditions(PathConditions.path_must_exists) @PathPermissions(PathPermissions.readable) async def mlsd(self, connection, rest): - @ConnectionConditions( ConnectionConditions.data_connection_made, wait=True, fail_code="425", - fail_info="Can't open data connection") + fail_info="Can't open data connection", + ) @worker async def mlsd_worker(self, connection, rest): stream = connection.data_connection @@ -1221,23 +1231,24 @@ async def build_list_string(self, connection, path): "none", str(stats.st_size), mtime, - path.name + path.name, ) s = " ".join(fields) return s @ConnectionConditions( ConnectionConditions.login_required, - ConnectionConditions.passive_server_started) + ConnectionConditions.passive_server_started, + ) @PathConditions(PathConditions.path_must_exists) @PathPermissions(PathPermissions.readable) async def list(self, connection, rest): - @ConnectionConditions( ConnectionConditions.data_connection_made, wait=True, fail_code="425", - fail_info="Can't open data connection") + fail_info="Can't open data connection", + ) @worker async def list_worker(self, connection, rest): stream = connection.data_connection @@ -1280,7 +1291,8 @@ async def rnfr(self, connection, rest): @ConnectionConditions( ConnectionConditions.login_required, - ConnectionConditions.rename_from_required) + ConnectionConditions.rename_from_required, + ) @PathConditions(PathConditions.path_must_not_exists) @PathPermissions(PathPermissions.writable) async def rnto(self, connection, rest): @@ -1294,7 +1306,8 @@ async def rnto(self, connection, rest): @ConnectionConditions(ConnectionConditions.login_required) @PathConditions( PathConditions.path_must_exists, - PathConditions.path_must_be_file) + PathConditions.path_must_be_file, + ) @PathPermissions(PathPermissions.writable) async def dele(self, connection, rest): real_path, virtual_path = self.get_paths(connection, rest) @@ -1304,15 +1317,16 @@ async def dele(self, connection, rest): @ConnectionConditions( ConnectionConditions.login_required, - ConnectionConditions.passive_server_started) + ConnectionConditions.passive_server_started, + ) @PathPermissions(PathPermissions.writable) async def stor(self, connection, rest, mode="wb"): - @ConnectionConditions( ConnectionConditions.data_connection_made, wait=True, fail_code="425", - fail_info="Can't open data connection") + fail_info="Can't open data connection", + ) @worker async def stor_worker(self, connection, rest): stream = connection.data_connection @@ -1343,18 +1357,20 @@ async def stor_worker(self, connection, rest): @ConnectionConditions( ConnectionConditions.login_required, - ConnectionConditions.passive_server_started) + ConnectionConditions.passive_server_started, + ) @PathConditions( PathConditions.path_must_exists, - PathConditions.path_must_be_file) + PathConditions.path_must_be_file, + ) @PathPermissions(PathPermissions.readable) async def retr(self, connection, rest): - @ConnectionConditions( ConnectionConditions.data_connection_made, wait=True, fail_code="425", - fail_info="Can't open data connection") + fail_info="Can't open data connection", + ) @worker async def retr_worker(self, connection, rest): stream = connection.data_connection @@ -1435,7 +1451,6 @@ async def _start_passive_server(self, connection, handler_callback): @ConnectionConditions(ConnectionConditions.login_required) async def pasv(self, connection, rest): - async def handler(reader, writer): if connection.future.data_connection.done(): writer.close() @@ -1471,7 +1486,7 @@ async def handler(reader, writer): connection.response("503", ["this server started in ipv6 mode"]) return False - nums = tuple(map(int, host.split("."))) + (port >> 8, port & 0xff) + nums = tuple(map(int, host.split("."))) + (port >> 8, port & 0xFF) info.append(f"({','.join(map(str, nums))})") if connection.future.data_connection.done(): connection.data_connection.close() @@ -1481,7 +1496,6 @@ async def handler(reader, writer): @ConnectionConditions(ConnectionConditions.login_required) async def epsv(self, connection, rest): - async def handler(reader, writer): if connection.future.data_connection.done(): writer.close() diff --git a/tests/conftest.py b/tests/conftest.py index 4c1b743..44562ed 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,22 +1,21 @@ -import ssl +import asyncio import collections import contextlib -import tempfile -import asyncio -import math -import time import functools +import math import socket +import ssl +import tempfile +import time from pathlib import Path import pytest import pytest_asyncio import trustme from async_timeout import timeout - -import aioftp from siosocks.io.asyncio import socks_server_handler +import aioftp # No ssl tests since https://bugs.python.org/issue36098 ca = trustme.CA() @@ -54,22 +53,25 @@ def _wrap_with_defaults(kwargs): @pytest.fixture(params=["127.0.0.1", "::1"]) def pair_factory(request): - class Factory: - - def __init__(self, client=None, server=None, *, - connected=True, logged=True, do_quit=True, - host=request.param, - server_factory=aioftp.Server, - client_factory=aioftp.Client): + def __init__( + self, + client=None, + server=None, + *, + connected=True, + logged=True, + do_quit=True, + host=request.param, + server_factory=aioftp.Server, + client_factory=aioftp.Client, + ): if client is None: client = Container() - self.client = client_factory(*client.args, - **_wrap_with_defaults(client.kwargs)) + self.client = client_factory(*client.args, **_wrap_with_defaults(client.kwargs)) if server is None: server = Container() - self.server = server_factory(*server.args, - **_wrap_with_defaults(server.kwargs)) + self.server = server_factory(*server.args, **_wrap_with_defaults(server.kwargs)) self.connected = connected self.logged = logged self.do_quit = do_quit @@ -90,8 +92,11 @@ async def make_client_files(self, *paths, size=None, atom=b"-"): size = aioftp.DEFAULT_BLOCK_SIZE * 3 data = atom * size for p in map(Path, paths): - await self.client.path_io.mkdir(p.parent, parents=True, - exist_ok=True) + await self.client.path_io.mkdir( + p.parent, + parents=True, + exist_ok=True, + ) async with self.client.path_io.open(p, mode="wb") as f: await f.write(data) @@ -119,8 +124,10 @@ async def __aenter__(self): await self.timeout.__aenter__() await self.server.start(host=self.host) if self.connected: - await self.client.connect(self.server.server_host, - self.server.server_port) + await self.client.connect( + self.server.server_host, + self.server.server_port, + ) if self.logged: await self.client.login() return self @@ -145,11 +152,17 @@ def context(*codes): assert set(e.received_codes) == set(codes) else: raise RuntimeError("There was no exception") + return context -@pytest.fixture(params=[aioftp.MemoryPathIO, aioftp.PathIO, - aioftp.AsyncPathIO]) +@pytest.fixture( + params=[ + aioftp.MemoryPathIO, + aioftp.PathIO, + aioftp.AsyncPathIO, + ], +) def path_io(request): return request.param() @@ -164,7 +177,6 @@ def temp_dir(path_io): class Sleep: - def __init__(self): self.delay = 0 self.first_sleep = None @@ -179,8 +191,9 @@ async def sleep(self, delay, result=None, **kwargs): def is_close(self, delay, *, rel_tol=0.05, abs_tol=0.5): ok = math.isclose(self.delay, delay, rel_tol=rel_tol, abs_tol=abs_tol) if not ok: - print(f"latest sleep: {self.delay}; expected delay: " - f"{delay}; rel: {rel_tol}") + print( + f"latest sleep: {self.delay}; expected delay: " f"{delay}; rel: {rel_tol}", + ) return ok @@ -192,8 +205,12 @@ def skip_sleep(monkeypatch): yield sleeper -@pytest_asyncio.fixture(params=[("127.0.0.1", socket.AF_INET), - ("::1", socket.AF_INET6)]) +@pytest_asyncio.fixture( + params=[ + ("127.0.0.1", socket.AF_INET), + ("::1", socket.AF_INET6), + ], +) async def socks(request, unused_tcp_port): handler = functools.partial( socks_server_handler, @@ -204,8 +221,12 @@ async def socks(request, unused_tcp_port): Socks = collections.namedtuple("Socks", "host port server") host, family = request.param port = unused_tcp_port - server = await asyncio.start_server(handler, host=host, port=port, - family=family) + server = await asyncio.start_server( + handler, + host=host, + port=port, + family=family, + ) yield Socks(host, port, server) server.close() await server.wait_closed() diff --git a/tests/test_abort.py b/tests/test_abort.py index 8a18778..3199621 100644 --- a/tests/test_abort.py +++ b/tests/test_abort.py @@ -18,7 +18,6 @@ async def test_abort_stor(pair_factory): class SlowReadMemoryPathIO(aioftp.MemoryPathIO): - async def read(self, *args, **kwargs): await asyncio.sleep(0.01) return await super().read(*args, **kwargs) @@ -40,8 +39,11 @@ async def test_abort_retr(pair_factory, Server): @pytest.mark.asyncio -async def test_abort_retr_no_wait(pair_factory, Server, - expect_codes_in_exception): +async def test_abort_retr_no_wait( + pair_factory, + Server, + expect_codes_in_exception, +): s = Server(path_io_factory=SlowReadMemoryPathIO) async with pair_factory(None, s) as pair: await pair.make_server_files("test.txt") @@ -63,14 +65,11 @@ async def test_nothing_to_abort(pair_factory): class SlowListMemoryPathIO(aioftp.MemoryPathIO): - async def is_file(self, *a, **kw): return True def list(self, *args, **kwargs): - class Lister(aioftp.AbstractAsyncLister): - async def __anext__(cls): await asyncio.sleep(0.01) return pathlib.PurePath("/test.txt") @@ -82,6 +81,7 @@ class Stat: st_size = 0 st_mtime = 0 st_ctime = 0 + return Stat diff --git a/tests/test_client_side_socks.py b/tests/test_client_side_socks.py index f1a8b4a..cebf705 100644 --- a/tests/test_client_side_socks.py +++ b/tests/test_client_side_socks.py @@ -4,16 +4,26 @@ @pytest.mark.asyncio async def test_socks_success(pair_factory, Client, socks): - client = Client(socks_host=socks.host, socks_port=socks.port, - socks_version=5, username="foo", password="bar") + client = Client( + socks_host=socks.host, + socks_port=socks.port, + socks_version=5, + username="foo", + password="bar", + ) async with pair_factory(client): pass @pytest.mark.asyncio async def test_socks_fail(pair_factory, Client, socks): - client = Client(socks_host=socks.host, socks_port=socks.port, - socks_version=5, username="bar", password="bar") + client = Client( + socks_host=socks.host, + socks_port=socks.port, + socks_version=5, + username="bar", + password="bar", + ) with pytest.raises(SocksException): async with pair_factory(client): pass diff --git a/tests/test_connection.py b/tests/test_connection.py index ac14132..27cb9e5 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -46,7 +46,7 @@ async def test_custom_passive_commands(pair_factory): pair.client._passive_commands = None await pair.client.get_passive_connection( "A", - commands=["pasv", "epsv"] + commands=["pasv", "epsv"], ) @@ -82,8 +82,11 @@ async def test_pasv_connection_ports_not_added(pair_factory): @pytest.mark.asyncio -async def test_pasv_connection_ports(pair_factory, Server, - unused_tcp_port_factory): +async def test_pasv_connection_ports( + pair_factory, + Server, + unused_tcp_port_factory, +): ports = [unused_tcp_port_factory(), unused_tcp_port_factory()] async with pair_factory(None, Server(data_ports=ports)) as pair: r, w = await pair.client.get_passive_connection() @@ -99,8 +102,11 @@ async def test_data_ports_remains_empty(pair_factory, Server): @pytest.mark.asyncio -async def test_pasv_connection_port_reused(pair_factory, Server, - unused_tcp_port): +async def test_pasv_connection_port_reused( + pair_factory, + Server, + unused_tcp_port, +): s = Server(data_ports=[unused_tcp_port]) async with pair_factory(None, s) as pair: r, w = await pair.client.get_passive_connection() @@ -111,8 +117,10 @@ async def test_pasv_connection_port_reused(pair_factory, Server, await pair.client.quit() pair.client.close() assert pair.server.available_data_ports.qsize() == 1 - await pair.client.connect(pair.server.server_host, - pair.server.server_port) + await pair.client.connect( + pair.server.server_host, + pair.server.server_port, + ) await pair.client.login() r, w = await pair.client.get_passive_connection() host, port, *_ = w.transport.get_extra_info("peername") @@ -121,9 +129,11 @@ async def test_pasv_connection_port_reused(pair_factory, Server, @pytest.mark.asyncio -async def test_pasv_connection_pasv_forced_response_address(pair_factory, - Server, - unused_tcp_port): +async def test_pasv_connection_pasv_forced_response_address( + pair_factory, + Server, + unused_tcp_port, +): def ipv4_used(): try: ipaddress.IPv4Address(pair.host) @@ -132,9 +142,9 @@ def ipv4_used(): return False async with pair_factory( - server=Server(ipv4_pasv_forced_response_address='127.0.0.2'), + server=Server(ipv4_pasv_forced_response_address="127.0.0.2"), ) as pair: - assert pair.server.ipv4_pasv_forced_response_address == '127.0.0.2' + assert pair.server.ipv4_pasv_forced_response_address == "127.0.0.2" if ipv4_used(): # The connection fails here because the server starts to listen for @@ -145,19 +155,22 @@ def ipv4_used(): # pair.server.ipv4_pasv_forced_response_address failed to know that # the server returned correct external IP with pytest.raises(OSError): - await pair.client.get_passive_connection(commands=['pasv']) + await pair.client.get_passive_connection(commands=["pasv"]) # With epsv the connection should open as that does not use the # external IPv4 address but just tells the client the port to connect # to - await pair.client.get_passive_connection(commands=['epsv']) + await pair.client.get_passive_connection(commands=["epsv"]) @pytest.mark.parametrize("method", ["epsv", "pasv"]) @pytest.mark.asyncio -async def test_pasv_connection_no_free_port(pair_factory, Server, - expect_codes_in_exception, - method): +async def test_pasv_connection_no_free_port( + pair_factory, + Server, + expect_codes_in_exception, + method, +): s = Server(data_ports=[]) async with pair_factory(None, s, do_quit=False, host="127.0.0.1") as pair: assert pair.server.available_data_ports.qsize() == 0 @@ -166,8 +179,11 @@ async def test_pasv_connection_no_free_port(pair_factory, Server, @pytest.mark.asyncio -async def test_pasv_connection_busy_port(pair_factory, Server, - unused_tcp_port_factory): +async def test_pasv_connection_busy_port( + pair_factory, + Server, + unused_tcp_port_factory, +): ports = [unused_tcp_port_factory(), unused_tcp_port_factory()] async with pair_factory(None, Server(data_ports=ports)) as pair: conflicting_server = await asyncio.start_server( @@ -184,9 +200,12 @@ async def test_pasv_connection_busy_port(pair_factory, Server, @pytest.mark.asyncio -async def test_pasv_connection_busy_port2(pair_factory, Server, - unused_tcp_port_factory, - expect_codes_in_exception): +async def test_pasv_connection_busy_port2( + pair_factory, + Server, + unused_tcp_port_factory, + expect_codes_in_exception, +): ports = [unused_tcp_port_factory()] s = Server(data_ports=ports) async with pair_factory(None, s, do_quit=False) as pair: @@ -218,8 +237,10 @@ async def test_client_session_context_manager(pair_factory): @pytest.mark.asyncio -async def test_long_login_sequence_fail(pair_factory, - expect_codes_in_exception): +async def test_long_login_sequence_fail( + pair_factory, + expect_codes_in_exception, +): class CustomServer(aioftp.Server): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -237,8 +258,11 @@ async def acct(self, connection, rest): connection.response("333") return True - factory = pair_factory(logged=False, server_factory=CustomServer, - do_quit=False) + factory = pair_factory( + logged=False, + server_factory=CustomServer, + do_quit=False, + ) async with factory as pair: with expect_codes_in_exception("333"): await pair.client.login() @@ -249,6 +273,7 @@ async def test_bad_sublines_seq(pair_factory, expect_codes_in_exception): class CustomServer(aioftp.Server): async def write_response(self, stream, code, lines="", list=False): import functools + lines = aioftp.wrap_with_container(lines) write = functools.partial(self.write_line, stream) *body, tail = lines @@ -260,6 +285,8 @@ async def write_response(self, stream, code, lines="", list=False): factory = pair_factory(connected=False, server_factory=CustomServer) async with factory as pair: with expect_codes_in_exception("220"): - await pair.client.connect(pair.server.server_host, - pair.server.server_port) + await pair.client.connect( + pair.server.server_host, + pair.server.server_port, + ) await pair.client.login() diff --git a/tests/test_corner_cases.py b/tests/test_corner_cases.py index 5f1c974..b12c8e8 100644 --- a/tests/test_corner_cases.py +++ b/tests/test_corner_cases.py @@ -14,6 +14,7 @@ def __init__(self, *args, **kwargs): async def custom(*args, **kwargs): raise RuntimeError("Test error") + factory = pair_factory(server_factory=CustomServer, do_quit=False) async with factory as pair: with pytest.raises(ConnectionResetError): @@ -56,8 +57,11 @@ async def test_epsv_extra_arg(pair_factory, expect_codes_in_exception): @pytest.mark.asyncio -async def test_bad_server_path_io(pair_factory, Server, - expect_codes_in_exception): +async def test_bad_server_path_io( + pair_factory, + Server, + expect_codes_in_exception, +): class BadPathIO(aioftp.MemoryPathIO): async def is_file(*a, **kw): return False diff --git a/tests/test_directory_actions.py b/tests/test_directory_actions.py index 9899aff..d503b2f 100644 --- a/tests/test_directory_actions.py +++ b/tests/test_directory_actions.py @@ -49,8 +49,10 @@ async def test_change_directory(pair_factory): @pytest.mark.asyncio -async def test_change_directory_not_exist(pair_factory, - expect_codes_in_exception): +async def test_change_directory_not_exist( + pair_factory, + expect_codes_in_exception, +): async with pair_factory() as pair: with expect_codes_in_exception("550"): await pair.client.change_directory("bar") @@ -79,9 +81,7 @@ async def test_rename_non_empty_directory(pair_factory): class FakeErrorPathIO(aioftp.MemoryPathIO): - def list(self, path): - class Lister(aioftp.AbstractAsyncLister): @aioftp.pathio.universal_exception async def __anext__(self): @@ -91,8 +91,11 @@ async def __anext__(self): @pytest.mark.asyncio -async def test_exception_in_list(pair_factory, Server, - expect_codes_in_exception): +async def test_exception_in_list( + pair_factory, + Server, + expect_codes_in_exception, +): s = Server(path_io_factory=FakeErrorPathIO) async with pair_factory(None, s) as pair: with expect_codes_in_exception("451"): diff --git a/tests/test_file.py b/tests/test_file.py index eb81ac6..2a5cbb6 100644 --- a/tests/test_file.py +++ b/tests/test_file.py @@ -1,5 +1,5 @@ -import math import datetime as dt +import math from pathlib import PurePosixPath import pytest @@ -201,11 +201,12 @@ async def test_download_file_write_into(pair_factory): @pytest.mark.asyncio -async def test_upload_file_os_error(pair_factory, Server, - expect_codes_in_exception): - +async def test_upload_file_os_error( + pair_factory, + Server, + expect_codes_in_exception, +): class OsErrorPathIO(aioftp.MemoryPathIO): - @aioftp.pathio.universal_exception async def write(self, fout, data): raise OSError("test os error") @@ -218,8 +219,10 @@ async def write(self, fout, data): @pytest.mark.asyncio -async def test_upload_path_unreachable(pair_factory, - expect_codes_in_exception): +async def test_upload_path_unreachable( + pair_factory, + expect_codes_in_exception, +): async with pair_factory() as pair: with expect_codes_in_exception("550"): async with pair.client.upload_stream("foo/bar/foo") as stream: @@ -244,5 +247,8 @@ async def test_stat_mlst(pair_factory): assert info["type"] == "file" for fact in ("modify", "create"): received = dt.datetime.strptime(info[fact], "%Y%m%d%H%M%S") - assert math.isclose(now.timestamp(), received.timestamp(), - abs_tol=10) + assert math.isclose( + now.timestamp(), + received.timestamp(), + abs_tol=10, + ) diff --git a/tests/test_list_fallback.py b/tests/test_list_fallback.py index 0fde577..7bc7429 100644 --- a/tests/test_list_fallback.py +++ b/tests/test_list_fallback.py @@ -1,6 +1,6 @@ +import contextlib import pathlib import textwrap -import contextlib import pytest @@ -53,7 +53,8 @@ async def test_client_list_override_invalid_raw_command(pair_factory): def test_client_list_windows(): - test_str = textwrap.dedent("""\ + test_str = textwrap.dedent( + """\ 11/4/2018 9:09 PM . 8/10/2018 1:02 PM .. 9/23/2018 2:16 PM bin @@ -70,7 +71,8 @@ def test_client_list_windows(): 10/29/2018 10:55 AM 219 win7.sh 6 files 75,978,506,648 bytes 3 directories 22,198,362,112 bytes free - """) + """, + ) test_str = test_str.strip().split("\n") entities = {} parse = aioftp.Client(encoding="utf-8").parse_list_line_windows @@ -79,8 +81,14 @@ def test_client_list_windows(): path, stat = parse(x.encode("utf-8")) entities[path] = stat dirs = ["bin", "Desktop", "dow", "Downloads", "msc", "opt"] - files = ["win10.img", "win10.iso", "win10.sh", "win7.img", - "win7.iso", "win7.sh"] + files = [ + "win10.img", + "win10.iso", + "win10.sh", + "win7.img", + "win7.iso", + "win7.sh", + ] assert len(entities) == len(dirs + files) for d in dirs: p = pathlib.PurePosixPath(d) @@ -100,10 +108,12 @@ async def test_client_list_override_with_custom(pair_factory, Client): def parser(b): import pickle + return pickle.loads(bytes.fromhex(b.decode().rstrip("\r\n"))) async def builder(_, path): import pickle + return pickle.dumps((path, meta)).hex() async with pair_factory(Client(parse_list_line_custom=parser)) as pair: @@ -123,10 +133,12 @@ async def test_client_list_override_with_custom_last(pair_factory, Client): def parser(b): import pickle + return pickle.loads(bytes.fromhex(b.decode().rstrip("\r\n"))) async def builder(_, path): import pickle + return pickle.dumps((path, meta)).hex() client = Client( diff --git a/tests/test_login.py b/tests/test_login.py index 08afb10..bd38960 100644 --- a/tests/test_login.py +++ b/tests/test_login.py @@ -38,7 +38,10 @@ async def test_login_with_login_and_password(pair_factory, Server): @pytest.mark.asyncio async def test_login_with_login_and_password_no_such_user( - pair_factory, Server, expect_codes_in_exception): + pair_factory, + Server, + expect_codes_in_exception, +): s = Server([aioftp.User("foo", "bar")]) async with pair_factory(None, s, logged=False) as pair: with expect_codes_in_exception("530"): @@ -47,7 +50,10 @@ async def test_login_with_login_and_password_no_such_user( @pytest.mark.asyncio async def test_login_with_login_and_password_bad_password( - pair_factory, Server, expect_codes_in_exception): + pair_factory, + Server, + expect_codes_in_exception, +): s = Server([aioftp.User("foo", "bar")]) async with pair_factory(None, s, logged=False) as pair: with expect_codes_in_exception("530"): @@ -55,8 +61,11 @@ async def test_login_with_login_and_password_bad_password( @pytest.mark.asyncio -async def test_pass_after_login(pair_factory, Server, - expect_codes_in_exception): +async def test_pass_after_login( + pair_factory, + Server, + expect_codes_in_exception, +): s = Server([aioftp.User("foo", "bar")]) async with pair_factory(None, s, logged=False) as pair: await pair.client.login("foo", "bar") diff --git a/tests/test_maximum_connections.py b/tests/test_maximum_connections.py index b23548b..8cc4118 100644 --- a/tests/test_maximum_connections.py +++ b/tests/test_maximum_connections.py @@ -7,8 +7,10 @@ @pytest.mark.asyncio async def test_multiply_connections_no_limits(pair_factory): - Client = functools.partial(aioftp.Client, - path_io_factory=aioftp.MemoryPathIO) + Client = functools.partial( + aioftp.Client, + path_io_factory=aioftp.MemoryPathIO, + ) async with pair_factory() as pair: s = pair.server clients = [Client() for _ in range(4)] @@ -20,10 +22,15 @@ async def test_multiply_connections_no_limits(pair_factory): @pytest.mark.asyncio -async def test_multiply_connections_limited_error(pair_factory, Server, - expect_codes_in_exception): - Client = functools.partial(aioftp.Client, - path_io_factory=aioftp.MemoryPathIO) +async def test_multiply_connections_limited_error( + pair_factory, + Server, + expect_codes_in_exception, +): + Client = functools.partial( + aioftp.Client, + path_io_factory=aioftp.MemoryPathIO, + ) s = Server(maximum_connections=4) async with pair_factory(None, s) as pair: s = pair.server @@ -47,9 +54,14 @@ async def test_multiply_user_commands(pair_factory, Server): @pytest.mark.asyncio async def test_multiply_connections_with_user_limited_error( - pair_factory, Server, expect_codes_in_exception): - Client = functools.partial(aioftp.Client, - path_io_factory=aioftp.MemoryPathIO) + pair_factory, + Server, + expect_codes_in_exception, +): + Client = functools.partial( + aioftp.Client, + path_io_factory=aioftp.MemoryPathIO, + ) s = Server([aioftp.User("foo", maximum_connections=4)]) async with pair_factory(None, s, connected=False) as pair: s = pair.server @@ -66,9 +78,14 @@ async def test_multiply_connections_with_user_limited_error( @pytest.mark.asyncio async def test_multiply_connections_relogin_balanced( - pair_factory, Server, expect_codes_in_exception): - Client = functools.partial(aioftp.Client, - path_io_factory=aioftp.MemoryPathIO) + pair_factory, + Server, + expect_codes_in_exception, +): + Client = functools.partial( + aioftp.Client, + path_io_factory=aioftp.MemoryPathIO, + ) s = Server(maximum_connections=4) async with pair_factory(None, s, connected=False) as pair: s = pair.server diff --git a/tests/test_passive.py b/tests/test_passive.py index f52de3a..98ab9c3 100644 --- a/tests/test_passive.py +++ b/tests/test_passive.py @@ -14,8 +14,10 @@ async def test_client_fallback_to_pasv_at_list(pair_factory): @pytest.mark.asyncio -async def test_client_fail_fallback_to_pasv_at_list(pair_factory, - expect_codes_in_exception): +async def test_client_fail_fallback_to_pasv_at_list( + pair_factory, + expect_codes_in_exception, +): async with pair_factory(host="127.0.0.1") as pair: pair.server.commands_mapping["epsv"] = not_implemented with expect_codes_in_exception("502"): diff --git a/tests/test_permissions.py b/tests/test_permissions.py index ec143b6..15403ee 100644 --- a/tests/test_permissions.py +++ b/tests/test_permissions.py @@ -4,11 +4,16 @@ @pytest.mark.asyncio -async def test_permission_denied(pair_factory, Server, - expect_codes_in_exception): - s = Server([ - aioftp.User(permissions=[aioftp.Permission(writable=False)]) - ]) +async def test_permission_denied( + pair_factory, + Server, + expect_codes_in_exception, +): + s = Server( + [ + aioftp.User(permissions=[aioftp.Permission(writable=False)]), + ], + ) async with pair_factory(None, s) as pair: with expect_codes_in_exception("550"): await pair.client.make_directory("foo") @@ -16,14 +21,16 @@ async def test_permission_denied(pair_factory, Server, @pytest.mark.asyncio async def test_permission_overriden(pair_factory, Server): - s = Server([ - aioftp.User( - permissions=[ - aioftp.Permission("/", writable=False), - aioftp.Permission("/foo"), - ] - ) - ]) + s = Server( + [ + aioftp.User( + permissions=[ + aioftp.Permission("/", writable=False), + aioftp.Permission("/foo"), + ], + ), + ], + ) async with pair_factory(None, s) as pair: await pair.client.make_directory("foo") await pair.client.remove_directory("foo") diff --git a/tests/test_restart.py b/tests/test_restart.py index 2fe3794..45fd85d 100644 --- a/tests/test_restart.py +++ b/tests/test_restart.py @@ -20,8 +20,7 @@ async def test_restart_stor_appe(pair_factory, offset, method): atom = b"foobar" name = "foo.txt" insert = b"123" - expect = atom[:offset] + b"\x00" * (offset - len(atom)) + insert + \ - atom[offset + len(insert):] + expect = atom[:offset] + b"\x00" * (offset - len(atom)) + insert + atom[offset + len(insert) :] await pair.make_server_files(name, size=1, atom=atom) stream_factory = getattr(pair.client, method) async with stream_factory(name, offset=offset) as stream: diff --git a/tests/test_simple_functions.py b/tests/test_simple_functions.py index 5fbc312..49c6cac 100644 --- a/tests/test_simple_functions.py +++ b/tests/test_simple_functions.py @@ -1,7 +1,7 @@ -import pathlib import asyncio import datetime import itertools +import pathlib import pytest @@ -64,6 +64,7 @@ def _c_locale_time(d, format="%b %d %H:%M"): def test_parse_ls_date_of_leap_year(): def date_to_p(d): return d.strftime("%Y%m%d%H%M00") + p = aioftp.Client.parse_ls_date # Leap year date to test d = datetime.datetime(year=2000, month=2, day=29) @@ -71,39 +72,39 @@ def date_to_p(d): # 2016 (leap) ( datetime.datetime(year=2016, month=2, day=29), - datetime.datetime(year=2016, month=2, day=29) + datetime.datetime(year=2016, month=2, day=29), ), # 2017 ( datetime.datetime(year=2017, month=2, day=28), - datetime.datetime(year=2016, month=2, day=29) + datetime.datetime(year=2016, month=2, day=29), ), ( datetime.datetime(year=2017, month=3, day=1), - datetime.datetime(year=2016, month=2, day=29) + datetime.datetime(year=2016, month=2, day=29), ), # 2018 ( datetime.datetime(year=2018, month=2, day=28), - datetime.datetime(year=2016, month=2, day=29) + datetime.datetime(year=2016, month=2, day=29), ), ( datetime.datetime(year=2018, month=3, day=1), - datetime.datetime(year=2020, month=2, day=29) + datetime.datetime(year=2020, month=2, day=29), ), # 2019 ( datetime.datetime(year=2019, month=2, day=28), - datetime.datetime(year=2020, month=2, day=29) + datetime.datetime(year=2020, month=2, day=29), ), ( datetime.datetime(year=2019, month=3, day=1), - datetime.datetime(year=2020, month=2, day=29) + datetime.datetime(year=2020, month=2, day=29), ), # 2020 (leap) ( datetime.datetime(year=2020, month=2, day=29), - datetime.datetime(year=2020, month=2, day=29) + datetime.datetime(year=2020, month=2, day=29), ), ) for now, expected in current_and_expected_dates: @@ -113,6 +114,7 @@ def date_to_p(d): def test_parse_ls_date_not_older_than_6_month_format(): def date_to_p(d): return d.strftime("%Y%m%d%H%M00") + p = aioftp.Client.parse_ls_date dates = ( datetime.datetime(year=2002, month=1, day=1), @@ -128,6 +130,7 @@ def date_to_p(d): def test_parse_ls_date_older_than_6_month_format(): def date_to_p(d): return d.strftime("%Y%m%d%H%M00") + p = aioftp.Client.parse_ls_date dates = ( datetime.datetime(year=2002, month=1, day=1), @@ -147,6 +150,7 @@ def date_to_p(d): def test_parse_ls_date_short(): def date_to_p(d): return d.strftime("%Y%m%d%H%M00") + p = aioftp.Client.parse_ls_date dates = ( datetime.datetime(year=2002, month=1, day=1), @@ -170,7 +174,7 @@ def test_parse_list_line_unix(): ], "unknown": [ "Erw-rw-r-- 1 poh poh 6595 Feb 27 04:14 history.rst", - ] + ], } p = aioftp.Client(encoding="utf-8").parse_list_line_unix for t, stack in lines.items(): diff --git a/tests/test_throttle.py b/tests/test_throttle.py index dcc262b..2fcc8ca 100644 --- a/tests/test_throttle.py +++ b/tests/test_throttle.py @@ -20,8 +20,13 @@ async def test_patched_sleep(skip_sleep): @pytest.mark.parametrize("type", ["read", "write"]) @pytest.mark.parametrize("direction", ["download", "upload"]) @pytest.mark.asyncio -async def test_client_side_throttle(pair_factory, skip_sleep, times, type, - direction): +async def test_client_side_throttle( + pair_factory, + skip_sleep, + times, + type, + direction, +): async with pair_factory() as pair: await pair.make_server_files("foo", size=SIZE) await pair.make_client_files("foo", size=SIZE) @@ -37,20 +42,34 @@ async def test_client_side_throttle(pair_factory, skip_sleep, times, type, @pytest.mark.parametrize("users", [1, 2, 3]) @pytest.mark.parametrize("throttle_direction", ["read", "write"]) @pytest.mark.parametrize("data_direction", ["download", "upload"]) -@pytest.mark.parametrize("throttle_level", ["throttle", - "throttle_per_connection"]) +@pytest.mark.parametrize( + "throttle_level", + [ + "throttle", + "throttle_per_connection", + ], +) @pytest.mark.asyncio -async def test_server_side_throttle(pair_factory, skip_sleep, times, users, - throttle_direction, data_direction, - throttle_level): +async def test_server_side_throttle( + pair_factory, + skip_sleep, + times, + users, + throttle_direction, + data_direction, + throttle_level, +): async with pair_factory() as pair: names = [] for i in range(users): name = f"foo{i}" names.append(name) await pair.make_server_files(name, size=SIZE) - throttle = reduce(getattr, [throttle_level, throttle_direction], - pair.server) + throttle = reduce( + getattr, + [throttle_level, throttle_direction], + pair.server, + ) throttle.limit = SIZE / times clients = [] for name in names: