From bb7bfc4b27a4297a2823b06fe89aa6ebcdf2e040 Mon Sep 17 00:00:00 2001 From: joncrall Date: Tue, 10 Feb 2026 14:09:41 -0500 Subject: [PATCH 01/13] Test new xcookie --- .github/workflows/tests.yml | 260 ++++++++++++++++++------------------ pyproject.toml | 5 +- 2 files changed, 133 insertions(+), 132 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index c0ae4529..4276cb26 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -22,10 +22,10 @@ jobs: steps: - name: Checkout source uses: actions/checkout@v4.2.2 - - name: Set up Python 3.13 for linting + - name: Set up Python 3.14 for linting uses: actions/setup-python@v5.6.0 with: - python-version: '3.13' + python-version: '3.14' - name: Install dependencies run: |- python -m pip install pip uv -U @@ -34,6 +34,15 @@ jobs: run: |- # stop the build if there are Python syntax errors or undefined names flake8 ./pyhesaff --count --select=E9,F63,F7,F82 --show-source --statistics + - name: Typecheck + run: |- + python -m pip install mypy + pip install -r requirements/runtime.txt + mypy --install-types --non-interactive ./pyhesaff + mypy ./pyhesaff + python -m pip install ty + pip install -r requirements/runtime.txt + ty check ./pyhesaff build_binpy_wheels: ## # Build the binary wheels. Note: even though cibuildwheel will test @@ -60,6 +69,11 @@ jobs: steps: - name: Checkout source uses: actions/checkout@v4.2.2 + - name: Enable MSVC 64bit + uses: ilammy/msvc-dev-cmd@v1 + if: ${{ startsWith(matrix.os, 'windows-') }} && ${{ contains(matrix.cibw_skip, '*-win32') }} + with: + arch: ${{ contains(matrix.os, 'arm') && 'arm64' || 'x64' }} - name: Set up QEMU uses: docker/setup-qemu-action@v3.0.0 if: runner.os == 'Linux' && matrix.arch != 'auto' @@ -68,7 +82,7 @@ jobs: - name: Set vcpkg cache paths (Windows) if: runner.os == 'Windows' shell: pwsh - run: | + run: |- "VCPKG_ARCHIVES_DIR=$env:LOCALAPPDATA\vcpkg\archives" >> $env:GITHUB_ENV "VCPKG_DOWNLOADS_DIR=C:\vcpkg\downloads" >> $env:GITHUB_ENV New-Item -ItemType Directory -Force -Path "$env:LOCALAPPDATA\vcpkg\archives" | Out-Null @@ -81,15 +95,38 @@ jobs: path: |- ${{ env.VCPKG_ARCHIVES_DIR }} ${{ env.VCPKG_DOWNLOADS_DIR }} - key: vcpkg-${{ runner.os }}-${{ hashFiles('pyproject.toml', 'CMakeLists.txt', 'setup.py') }} - restore-keys: | - vcpkg-${{ runner.os }}- - - name: Check dumpbin availability (Windows) + key: vcpkg-${{ runner.os }}-${{ hashFiles('pyproject.toml', 'CMakeLists.txt', 'setup.py', 'vcpkg.json', 'vcpkg-configuration.json') }} + restore-keys: vcpkg-${{ runner.os }}- + - name: Ensure vcpkg (Windows) + if: runner.os == 'Windows' + shell: pwsh + run: |- + if (-not (Test-Path "C:\vcpkg")) { + git clone https://github.com/microsoft/vcpkg C:\vcpkg + } + Set-Location C:\vcpkg + .\bootstrap-vcpkg.bat -disableMetrics + "C:\vcpkg" | Out-File -FilePath $env:GITHUB_PATH -Append + - name: Install OpenCV via vcpkg (Windows) + if: runner.os == 'Windows' + shell: pwsh + run: vcpkg install opencv4:x64-windows + - name: Show cibuildwheel Windows env (Windows) if: runner.os == 'Windows' shell: bash + env: + CIBW_ENVIRONMENT_WINDOWS: |- + VCPKG_ROOT=C:/vcpkg + VCPKG_TARGET_TRIPLET=x64-windows + VCPKG_DOWNLOADS=C:/vcpkg/downloads + PATH=C:/vcpkg;C:/vcpkg/installed/x64-windows/bin;{PATH} + OpenCV_DIR=C:/vcpkg/installed/x64-windows/share/opencv4 + OpenCV_ROOT=C:/vcpkg/installed/x64-windows + CMAKE_PREFIX_PATH=C:/vcpkg/installed/x64-windows + CMAKE_ARGS=-DCMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake;-DOpenCV_DIR=C:/vcpkg/installed/x64-windows/share/opencv4 run: |- - where dumpbin || true - dumpbin /? || true + echo "CIBW_ENVIRONMENT_WINDOWS:" + printf '%s\n' "$CIBW_ENVIRONMENT_WINDOWS" - name: Build binary wheels uses: pypa/cibuildwheel@v3.1.2 with: @@ -97,28 +134,22 @@ jobs: config-file: pyproject.toml env: CIBW_SKIP: ${{ matrix.cibw_skip }} + CIBW_TEST_SKIP: '*-win_arm64' CIBW_ARCHS_LINUX: ${{ matrix.arch }} + PYTHONUTF8: '1' + VSCMD_ARG_TGT_ARCH: '' CIBW_ARCHS_WINDOWS: AMD64 - CIBW_ENVIRONMENT_WINDOWS: | + CIBW_ENVIRONMENT_WINDOWS: |- VCPKG_ROOT=C:/vcpkg VCPKG_TARGET_TRIPLET=x64-windows + VCPKG_DOWNLOADS=C:/vcpkg/downloads + PATH=C:/vcpkg;C:/vcpkg/installed/x64-windows/bin;{PATH} OpenCV_DIR=C:/vcpkg/installed/x64-windows/share/opencv4 OpenCV_ROOT=C:/vcpkg/installed/x64-windows CMAKE_PREFIX_PATH=C:/vcpkg/installed/x64-windows CMAKE_ARGS=-DCMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake;-DOpenCV_DIR=C:/vcpkg/installed/x64-windows/share/opencv4 - VCPKG_BINARY_SOURCES=clear;files,C:/vcpkg-bincache,readwrite - VCPKG_DOWNLOADS=C:/vcpkg/downloads - PATH=C:/vcpkg/installed/x64-windows/bin;{PATH} - VCPKG_TARGET_TRIPLET=x64-windows - PYTHONUTF8: '1' VCPKG_ROOT: C:\vcpkg VCPKG_TARGET_TRIPLET: x64-windows - - name: Show cibuildwheel Windows env (Windows) - if: runner.os == 'Windows' - shell: bash - run: |- - echo "CIBW_ENVIRONMENT_WINDOWS:" - printf '%s\n' "$CIBW_ENVIRONMENT_WINDOWS" - name: Save vcpkg caches (Windows, even on failure) if: runner.os == 'Windows' && always() uses: actions/cache/save@v4 @@ -130,11 +161,11 @@ jobs: - name: Show built files shell: bash run: ls -la wheelhouse - - name: Set up Python 3.13 to combine coverage + - name: Set up Python 3.14 to combine coverage uses: actions/setup-python@v5.6.0 if: runner.os == 'Linux' with: - python-version: '3.13' + python-version: '3.14' - name: Combine coverage Linux if: runner.os == 'Linux' run: |- @@ -187,13 +218,25 @@ jobs: # for testing instead of using the more concise matrix notation. include: - python-version: '3.9' - install-extras: tests,runtime,headless + install-extras: tests-strict,runtime-strict,headless-strict os: ubuntu-latest arch: auto - - python-version: '3.13' - install-extras: tests,runtime,optional,headless + - python-version: '3.9' + install-extras: tests-strict,runtime-strict,headless-strict + os: windows-latest + arch: auto + - python-version: '3.14' + install-extras: tests-strict,runtime-strict,optional-strict,headless-strict os: ubuntu-latest arch: auto + - python-version: '3.14' + install-extras: tests-strict,runtime-strict,optional-strict,headless-strict + os: windows-latest + arch: auto + - python-version: '3.14' + install-extras: tests,headless + os: windows-latest + arch: auto - python-version: '3.9' install-extras: tests,optional,headless os: ubuntu-latest @@ -226,6 +269,10 @@ jobs: install-extras: tests,optional,headless os: windows-latest arch: auto + - python-version: '3.11' + install-extras: tests,optional,headless + os: windows-latest + arch: auto - python-version: '3.12' install-extras: tests,optional,headless os: windows-latest @@ -234,9 +281,18 @@ jobs: install-extras: tests,optional,headless os: windows-latest arch: auto + - python-version: '3.14' + install-extras: tests,optional,headless + os: windows-latest + arch: auto steps: - name: Checkout source uses: actions/checkout@v4.2.2 + - name: Enable MSVC 64bit + uses: ilammy/msvc-dev-cmd@v1 + if: ${{ startsWith(matrix.os, 'windows-') }} + with: + arch: ${{ contains(matrix.os, 'arm') && 'arm64' || 'x64' }} - name: Set up QEMU uses: docker/setup-qemu-action@v3.0.0 if: runner.os == 'Linux' && matrix.arch != 'auto' @@ -252,63 +308,6 @@ jobs: pattern: wheels-* merge-multiple: true path: wheelhouse - - name: Inspect wheel contents (Windows) - if: runner.os == 'Windows' - shell: bash - run: |- - python - <<'PY' - import pathlib - import zipfile - dist_dpath = pathlib.Path("wheelhouse") - wheels = sorted(dist_dpath.glob("pyhesaff*-win_amd64.whl")) - if not wheels: - raise SystemExit("No Windows wheels found in wheelhouse") - print("Found Windows wheels:", [w.name for w in wheels]) - for wheel in wheels: - print("===", wheel.name, "===") - with zipfile.ZipFile(wheel) as zf: - members = [m for m in zf.namelist() if "pyhesaff/" in m] - for name in sorted(members): - print(name) - PY - - name: Validate wheel native module (Windows) - if: runner.os == 'Windows' - shell: bash - run: |- - python - <<'PY' - import pathlib - import zipfile - dist_dpath = pathlib.Path("wheelhouse") - wheels = sorted(dist_dpath.glob("pyhesaff*-win_amd64.whl")) - if not wheels: - raise SystemExit("No Windows wheels found in wheelhouse") - for wheel in wheels: - with zipfile.ZipFile(wheel) as zf: - members = set(zf.namelist()) - pyds = [name for name in members if name.startswith("pyhesaff/_hesaff") and name.endswith(".pyd")] - if not pyds: - raise SystemExit(f"{wheel.name} missing pyhesaff/_hesaff*.pyd") - dlls = [name for name in members if name.startswith("pyhesaff.libs/") and name.lower().endswith(".dll")] - if not dlls: - raise SystemExit(f"{wheel.name} missing pyhesaff.libs/*.dll") - print(f"{wheel.name} contains {pyds[0]} and {len(dlls)} bundled dll(s).") - PY - - name: Show wheel DLL dependencies (Windows) - if: runner.os == 'Windows' - shell: bash - run: |- - python -m pip install delvewheel - python - <<'PY' - import pathlib - import subprocess - import sys - wheels = sorted(pathlib.Path("wheelhouse").glob("pyhesaff*-win_amd64.whl")) - if not wheels: - raise SystemExit("No Windows wheels found in wheelhouse") - for wheel in wheels: - print("===", wheel.name, "===") - subprocess.run([sys.executable, "-m", "delvewheel", "show", str(wheel)], check=False) - PY - name: Install wheel ${{ matrix.install-extras }} shell: bash env: @@ -327,72 +326,73 @@ jobs: from packaging import tags from packaging.utils import parse_wheel_filename dist_dpath = pathlib.Path('wheelhouse') - candidates = sorted(dist_dpath.glob('pyhesaff*.whl')) - if not candidates: - raise SystemExit('No wheels found in wheelhouse') - sys_tags = set(tags.sys_tags()) - def is_compatible(path): - _, _, _, wheel_tags = parse_wheel_filename(path.name) - return any(tag in sys_tags for tag in wheel_tags) - compatible = [path for path in candidates if is_compatible(path)] - fpath = sorted(compatible or candidates)[-1] + wheels = sorted(dist_dpath.glob('pyhesaff*.whl')) + if wheels: + sys_tags = set(tags.sys_tags()) + matching = [] + for w in wheels: + try: + _, _, _, wheel_tags = parse_wheel_filename(w.name) + except Exception: + continue + if any(t in sys_tags for t in wheel_tags): + matching.append(w) + fpath = sorted(matching or wheels)[-1] + else: + sdists = sorted(dist_dpath.glob('pyhesaff*.tar.gz')) + if not sdists: + raise SystemExit('No wheel artifacts found in wheelhouse') + fpath = sdists[-1] print(str(fpath).replace(chr(92), chr(47))) ") export MOD_VERSION=$(python -c "if 1: - from packaging.utils import parse_wheel_filename + from pkginfo import Wheel, SDist + import pathlib fpath = '$WHEEL_FPATH' - _, version, _, _ = parse_wheel_filename(fpath.split('/')[-1]) - print(version) + cls = Wheel if fpath.endswith('.whl') else SDist + item = cls(fpath) + print(item.version) ") echo "WHEEL_FPATH=$WHEEL_FPATH" echo "INSTALL_EXTRAS=$INSTALL_EXTRAS" echo "UV_RESOLUTION=$UV_RESOLUTION" echo "MOD_VERSION=$MOD_VERSION" - python -m uv pip install --prerelease=allow "pyhesaff[$INSTALL_EXTRAS]==$MOD_VERSION" -f wheelhouse + python -m pip install --prefer-binary "pyhesaff[$INSTALL_EXTRAS]==$MOD_VERSION" -f wheelhouse echo "Install finished." - - name: Windows wheel diagnostics + - name: Smoke test wheel on Windows if: runner.os == 'Windows' shell: bash run: |- python - <<'PY' - import os - import sys - import site - print("site.getsitepackages():", site.getsitepackages()) - try: - import pyhesaff - print("pyhesaff imported from:", pyhesaff.__file__) - except Exception as exc: - print("pyhesaff import failed:", repr(exc)) - for sp in site.getsitepackages(): - pkg_dir = os.path.join(sp, "pyhesaff") - if os.path.isdir(pkg_dir): - print("Contents of", pkg_dir) - for name in sorted(os.listdir(pkg_dir)): - print(" -", name) - PY - - name: Windows smoke test - if: runner.os == 'Windows' - shell: bash - run: |- - python - <<'PY' - import os - import sys - repo_dir = os.path.abspath(os.getcwd()) - sys.path = [ - path for path in sys.path - if path and os.path.abspath(path) != repo_dir - ] - import pyhesaff - print("pyhesaff imported from:", pyhesaff.__file__) - print("cpp_version:", pyhesaff.get_cpp_version()) + import os, sys + import pathlib + + ws = os.environ.get("GITHUB_WORKSPACE") + if ws: + ws_path = pathlib.Path(ws).resolve() + new_sys_path = [] + for entry in sys.path: + if not entry: + new_sys_path.append(entry) + continue + try: + p = pathlib.Path(entry).resolve() + if p.is_relative_to(ws_path): + continue + except Exception: + pass + new_sys_path.append(entry) + sys.path[:] = new_sys_path + + import pyhesaff as mod + print("pyhesaff:", mod.__file__) + PY - name: Test wheel ${{ matrix.install-extras }} shell: bash env: CI_PYTHON_VERSION: py${{ matrix.python-version }} run: |- - python -m pip install pytest pytest-cov xdoctest echo "Creating test sandbox directory" export WORKSPACE_DNAME="testdir_${CI_PYTHON_VERSION}_${GITHUB_RUN_ID}_${RUNNER_OS}" echo "WORKSPACE_DNAME=$WORKSPACE_DNAME" @@ -639,4 +639,4 @@ jobs: # --secret=EROTEMIC_TWINE_USERNAME=$EROTEMIC_TWINE_USERNAME \ # --secret=EROTEMIC_CI_SECRET=$EROTEMIC_CI_SECRET \ # --secret=EROTEMIC_TEST_TWINE_USERNAME=$EROTEMIC_TEST_TWINE_USERNAME \ -# --secret=EROTEMIC_TEST_TWINE_PASSWORD=$EROTEMIC_TEST_TWINE_PASSWORD +# --secret=EROTEMIC_TEST_TWINE_PASSWORD=$EROTEMIC_TEST_TWINE_PASSWORD \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index a6209697..f5e6738b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,11 +78,11 @@ ignore_missing_imports = true exclude = ".*" [tool.xcookie] -tags = [ "github", "cv2", "erotemic", "binpy", "nosrcdist"] +tags = [ "github", "cv2", "erotemic", "binpy", "nosrcdist", "vcpkg", "opencv_link", "win_smoke", "ci_debug_windows_env" ] mod_name = "pyhesaff" rel_mod_parent_dpath = "." #os = [ "linux", "osx", "win",] -os = [ "linux" ] +os = [ "linux", "win" ] repo_name = "pyhesaff" min_python = "3.9" author = "Krystian Mikolajczyk, Michal Perdoch, Jon Crall, Avi Weinstock" @@ -91,6 +91,7 @@ author_email = "erotemic@gmail.com" version = "{mod_dpath}/__init__.py::__version__" license = "Apache 2" dev_status = "beta" +use_uv = true description = "Routines for computation of hessian affine keypoints in images." From b722f0d04d4f4c7cbb2407d9713abda66f5dfebb Mon Sep 17 00:00:00 2001 From: joncrall Date: Tue, 10 Feb 2026 15:26:05 -0500 Subject: [PATCH 02/13] regen the run tests command --- requirements/runtime.txt | 11 ---- run_linter.sh | 5 +- run_tests.py | 122 +++++++++++++++++++++++++++------------ 3 files changed, 89 insertions(+), 49 deletions(-) diff --git a/requirements/runtime.txt b/requirements/runtime.txt index 3cc786f4..e69de29b 100644 --- a/requirements/runtime.txt +++ b/requirements/runtime.txt @@ -1,11 +0,0 @@ -# 1.19.2 was important for some versions of tensorflow -numpy>=2.1.0 ; python_version < '4.0' and python_version >= '3.13' # Python 3.13+ -numpy>=1.26.0 ; python_version < '3.13' and python_version >= '3.12' # Python 3.12 -numpy>=1.23.2 ; python_version < '3.12' and python_version >= '3.11' # Python 3.11 -numpy>=1.21.6 ; python_version < '3.11' and python_version >= '3.10' # Python 3.10 -numpy>=1.19.3 ; python_version < '3.10' and python_version >= '3.9' # Python 3.9 -numpy>=1.19.3 ; python_version < '3.9' and python_version >= '3.8' # Python 3.8 - - -ubelt >= 1.3.4 -# smc.freeimage diff --git a/run_linter.sh b/run_linter.sh index 7b8b3f27..ac985abd 100755 --- a/run_linter.sh +++ b/run_linter.sh @@ -1,2 +1,3 @@ -#!/bin/bash -flake8 ./pyhesaff --count --select=E9,F63,F7,F82 --show-source --statistics +#!/usr/bin/env bash +flake8 --count --select=E9,F63,F7,F82 --show-source --statistics pyhesaff +flake8 --count --select=E9,F63,F7,F82 --show-source --statistics ./tests \ No newline at end of file diff --git a/run_tests.py b/run_tests.py index 3ed351b4..8179d814 100755 --- a/run_tests.py +++ b/run_tests.py @@ -1,8 +1,11 @@ #!/usr/bin/env python -from os.path import dirname, join, abspath +""" +Based on template in rc/run_tests.binpy.py.in +""" + +import os import sqlite3 import sys -import os import re @@ -11,18 +14,17 @@ def is_cibuildwheel(): return 'CIBUILDWHEEL' in os.environ -def temp_rename_kernprof(repo_dir): - """ - Hacky workaround so kernprof.py doesn't get covered twice (installed and local). - This needed to combine the .coverage files, since file paths need to be unique. - - """ - original_path = repo_dir + '/kernprof.py' - tmp_path = original_path + '.tmp' - if os.path.isfile(original_path): - os.rename(original_path, tmp_path) - elif os.path.isfile(tmp_path): - os.rename(tmp_path, original_path) +# def temp_rename_kernprof(repo_dir): +# """ +# Hacky workaround so kernprof.py doesn't get covered twice (installed and local). +# This needed to combine the .coverage files, since file paths need to be unique. +# """ +# original_path = repo_dir + '/kernprof.py' +# tmp_path = original_path + '.tmp' +# if os.path.isfile(original_path): +# os.rename(original_path, tmp_path) +# elif os.path.isfile(tmp_path): +# os.rename(tmp_path, original_path) def replace_docker_path(path, runner_project_dir): @@ -79,13 +81,23 @@ def copy_coverage_cibuildwheel_docker(runner_project_dir): os.rename(coverage_path, '/output/.coverage.{}'.format(env_hash)) -if __name__ == '__main__': - cwd = os.getcwd() - repo_dir = abspath(dirname(__file__)) - test_dir = join(repo_dir, 'tests') - print('cwd = {!r}'.format(cwd)) +def main(): + import pathlib + + orig_cwd = os.getcwd() + repo_dir = pathlib.Path(__file__).parent.absolute() + test_dir = repo_dir / 'tests' + print('[run_tests] cwd = {!r}'.format(orig_cwd)) + + print('[run_tests] Changing dirs to test_dir={!r}'.format(test_dir)) + os.chdir(test_dir) + + testdir_contents = list(pathlib.Path(test_dir).glob('*')) + pyproject_fpath = repo_dir / 'pyproject.toml' - import pytest + print(f'[run_tests] repo_dir = {repo_dir}') + print(f'[run_tests] pyproject_fpath = {pyproject_fpath}') + print(f'[run_tests] test_dir={test_dir}') # Prefer testing the installed version, but fallback to testing the # development version. @@ -94,30 +106,58 @@ def copy_coverage_cibuildwheel_docker(runner_project_dir): except ImportError: print('running this test script requires ubelt') raise + + print(f'[run_tests] testdir_contents = {ub.urepr(testdir_contents, nl=1)}') + print(f'[run_tests] sys.path = {ub.urepr(sys.path, nl=1)}') + package_name = 'pyhesaff' # Statically check if ``package_name`` is installed outside of the repo. # To do this, we make a copy of PYTHONPATH, remove the repodir, and use # ubelt to check to see if ``package_name`` can be resolved to a path. - temp_path = list(map(abspath, sys.path)) - if repo_dir in temp_path: - temp_path.remove(repo_dir) - modpath = ub.modname_to_modpath(package_name, sys_path=temp_path) + temp_path = [pathlib.Path(p).resolve() for p in sys.path] + _resolved_repo_dir = repo_dir.resolve() + print(f'[run_tests] Searching for installed version of {package_name}.') + try: + _idx = temp_path.index(_resolved_repo_dir) + except IndexError: + print('[run_tests] Confirmed repo dir is not in sys.path') + else: + print( + f'[run_tests] Removing _resolved_repo_dir={_resolved_repo_dir} from search path' + ) + del temp_path[_idx] + if is_cibuildwheel(): + # Remove from sys.path to prevent the import mechanism from testing + # the source repo rather than the installed wheel. + print( + f'[run_tests] Removing _resolved_repo_dir={_resolved_repo_dir} from sys.path to ensure wheels are tested' + ) + del sys.path[_idx] + print(f'[run_tests] sys.path = {ub.urepr(sys.path, nl=1)}') + + _temp_path = [os.fspath(p) for p in temp_path] + print(f'[run_tests] Search Paths: {ub.urepr(_temp_path, nl=1)}') + modpath = ub.modname_to_modpath(package_name, sys_path=_temp_path) if modpath is not None: # If it does, then import it. This should cause the installed version # to be used on further imports even if the repo_dir is in the path. - print(f'Using installed version of {package_name}') - module = ub.import_module_from_path(modpath, index=0) - print('Installed module = {!r}'.format(module)) + print(f'[run_tests] Found installed version of {package_name}') + print(f'[run_tests] modpath={modpath}') + modpath_contents = list(pathlib.Path(modpath).glob('*')) + print( + f'[run_tests] modpath_contents = {ub.urepr(modpath_contents, nl=1)}' + ) + # module = ub.import_module_from_path(modpath, index=0) + # print(f'[run_tests] Installed module = {module!r}') else: - print(f'No installed version of {package_name} found') + print(f'[run_tests] No installed version of {package_name} found') try: - print('Changing dirs to test_dir={!r}'.format(test_dir)) - os.chdir(test_dir) + import pytest pytest_args = [ '--cov-config', - '../pyproject.toml', + os.fspath(pyproject_fpath), '--cov-report', 'html', '--cov-report', @@ -125,19 +165,29 @@ def copy_coverage_cibuildwheel_docker(runner_project_dir): '--cov-report', 'xml', '--cov=' + package_name, - modpath, - '.', + os.fspath(modpath), + os.fspath(test_dir), ] if is_cibuildwheel(): pytest_args.append('--cov-append') pytest_args = pytest_args + sys.argv[1:] - sys.exit(pytest.main(pytest_args)) + print(f'[run_tests] Exec pytest with args={pytest_args}') + retcode = pytest.main(pytest_args) + print(f'[run_tests] pytest returned ret={retcode}') + except Exception as ex: + print(f'[run_tests] pytest exception: {ex}') + retcode = 1 finally: - os.chdir(cwd) + os.chdir(orig_cwd) if is_cibuildwheel(): # for CIBW under linux copy_coverage_cibuildwheel_docker( f'/home/runner/work/{package_name}/{package_name}' ) - print('Restoring cwd = {!r}'.format(cwd)) + print('[run_tests] Restoring cwd = {!r}'.format(orig_cwd)) + return retcode + + +if __name__ == '__main__': + sys.exit(main()) From e6a56b2ea316b8e0e2be18fa0439aac0edf1ac7c Mon Sep 17 00:00:00 2001 From: joncrall Date: Tue, 10 Feb 2026 17:59:24 -0500 Subject: [PATCH 03/13] wip --- run_tests.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) mode change 100755 => 100644 run_tests.py diff --git a/run_tests.py b/run_tests.py old mode 100755 new mode 100644 index 8179d814..2c39542a --- a/run_tests.py +++ b/run_tests.py @@ -4,6 +4,7 @@ """ import os +import pathlib import sqlite3 import sys import re @@ -14,6 +15,19 @@ def is_cibuildwheel(): return 'CIBUILDWHEEL' in os.environ +def safe_resolve_path(path): + """ + Resolve a path for comparison, but tolerate Windows junctions that are + known to raise PermissionError (e.g. C:\\Documents and Settings). + """ + p = pathlib.Path(path) + try: + return p.resolve() + except (PermissionError, OSError) as ex: + print(f'[run_tests] Unable to resolve sys.path entry {p!r}: {ex!r}') + return p.absolute() + + # def temp_rename_kernprof(repo_dir): # """ # Hacky workaround so kernprof.py doesn't get covered twice (installed and local). @@ -82,8 +96,6 @@ def copy_coverage_cibuildwheel_docker(runner_project_dir): def main(): - import pathlib - orig_cwd = os.getcwd() repo_dir = pathlib.Path(__file__).parent.absolute() test_dir = repo_dir / 'tests' @@ -114,7 +126,7 @@ def main(): # Statically check if ``package_name`` is installed outside of the repo. # To do this, we make a copy of PYTHONPATH, remove the repodir, and use # ubelt to check to see if ``package_name`` can be resolved to a path. - temp_path = [pathlib.Path(p).resolve() for p in sys.path] + temp_path = [safe_resolve_path(p) for p in sys.path] _resolved_repo_dir = repo_dir.resolve() print(f'[run_tests] Searching for installed version of {package_name}.') try: From 2c486b63f466cb6ff7ada5fb89ccb00e955574fd Mon Sep 17 00:00:00 2001 From: joncrall Date: Tue, 10 Feb 2026 18:18:10 -0500 Subject: [PATCH 04/13] wip --- pyproject.toml | 12 +++++++++++- run_tests.py | 4 ++++ 2 files changed, 15 insertions(+), 1 deletion(-) mode change 100644 => 100755 run_tests.py diff --git a/pyproject.toml b/pyproject.toml index f5e6738b..5bdc44d2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -142,7 +142,17 @@ repair-wheel-command = "python -m pip install delvewheel && python -m delvewheel [tool.pytest.ini_options] addopts = "-p no:doctest --xdoctest --xdoctest-style=google --ignore-glob=setup.py" -norecursedirs = ".git ignore build __pycache__ dev _skbuild" +norecursedirs = [ + ".git", + "ignore", + "build", + "__pycache__", + "dev", + "_skbuild", + "Documents and Settings", + "System Volume Information", + "$Recycle.Bin", +] filterwarnings = [ "default", "ignore:.*No cfgstr given in Cacher constructor or call.*:Warning", "ignore:.*Define the __nice__ method for.*:Warning", "ignore:.*private pytest class or function.*:Warning",] [tool.coverage.run] diff --git a/run_tests.py b/run_tests.py old mode 100644 new mode 100755 index 2c39542a..10ed75ba --- a/run_tests.py +++ b/run_tests.py @@ -180,6 +180,10 @@ def main(): os.fspath(modpath), os.fspath(test_dir), ] + if os.name == 'nt': + # Legacy Windows junction that can trigger PermissionError during + # pytest collection on CI workers. + pytest_args.append('--ignore=C:\\Documents and Settings') if is_cibuildwheel(): pytest_args.append('--cov-append') From 2d66cde12adabdf1f98be9c9d1bc3504d08ee75a Mon Sep 17 00:00:00 2001 From: joncrall Date: Tue, 10 Feb 2026 18:34:56 -0500 Subject: [PATCH 05/13] attempt to fix lint job --- .github/workflows/tests.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 4276cb26..56979ac6 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -38,7 +38,6 @@ jobs: run: |- python -m pip install mypy pip install -r requirements/runtime.txt - mypy --install-types --non-interactive ./pyhesaff mypy ./pyhesaff python -m pip install ty pip install -r requirements/runtime.txt From af5ab17a92f04e796288260dbe9463826f3cee6f Mon Sep 17 00:00:00 2001 From: joncrall Date: Tue, 10 Feb 2026 18:40:39 -0500 Subject: [PATCH 06/13] Fix version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 5bdc44d2..35ec8700 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "scikit_build_core.build" [project] name = "pyhesaff" -version = "2.2.0" +version = "2.2.1" # FIXME: need single source of truth for the version description = "Routines for computation of hessian affine keypoints in images." readme = "README.rst" requires-python = ">=3.9" From aa877af47f43958dc58b2b01de2e194d2fb4558c Mon Sep 17 00:00:00 2001 From: joncrall Date: Tue, 10 Feb 2026 18:40:53 -0500 Subject: [PATCH 07/13] Remove unused code --- pyhesaff/ctypes_interface.py | 344 ----------------------------------- 1 file changed, 344 deletions(-) delete mode 100755 pyhesaff/ctypes_interface.py diff --git a/pyhesaff/ctypes_interface.py b/pyhesaff/ctypes_interface.py deleted file mode 100755 index 430a001b..00000000 --- a/pyhesaff/ctypes_interface.py +++ /dev/null @@ -1,344 +0,0 @@ -""" -This helps find the shared library that contains the compiled subroutines. -Its a bit hacky and could use a cleanup by someone who really understands -how python c-extension libraries are named and placed depending on system. -""" - -from os.path import join, exists, normpath -import sys -import os -import ctypes - - -# ============================ -# general ctypes interface -# ============================ - -__DEBUG_CLIB__ = '--debug' in sys.argv or '--debug-clib' in sys.argv - - -def get_plat_specifier(): - """ - Standard platform specifier used by distutils - """ - try: - import distutils - except ImportError: - return get_plat_specifier2() - try: - plat_name = distutils.util.get_platform() - except AttributeError: - plat_name = distutils.sys.platform - plat_specifier = '.{}-{}'.format(plat_name, sys.version[0:3]) - if hasattr(sys, 'gettotalrefcount'): - plat_specifier += '-pydebug' - return plat_specifier - - -def _py_ver_str(): - """Return 'MAJOR.MINOR' (e.g., '3.12').""" - return f'{sys.version_info.major}.{sys.version_info.minor}' - - -def _norm_arch(): - """Normalize common architecture names to the ones your code expects.""" - import platform - - m = (platform.machine() or '').lower() - if m in {'x86_64', 'amd64'}: - return 'x86_64' - if m in {'i386', 'i686', 'x86'}: - return 'i686' - if m in {'aarch64', 'arm64'}: - return 'arm64' - return m or ('x86_64' if sys.maxsize > 2**32 else 'i686') - - -def get_plat_specifier2(): - """ - Standard platform specifier (distutils-free). - Mirrors your existing format: '.-' + optional '-pydebug'. - """ - import sysconfig - - plat_name = sysconfig.get_platform() or sys.platform - plat_specifier = f'.{plat_name}-{_py_ver_str()}' - if hasattr(sys, 'gettotalrefcount'): # CPython debug builds - plat_specifier += '-pydebug' - return plat_specifier - - -def get_candidate_plat_specifiers2(): - """ - Produce a list of plausible platform suffixes without using distutils. - Keeps your legacy candidates and adds a few modern ones (manylinux, macOS). - """ - import sysconfig - - arch = _norm_arch() - py_ver = _py_ver_str() - plat_name = sysconfig.get_platform() or sys.platform - - plat_name_cands = [plat_name] - - if sys.platform.startswith('linux'): - # Keep broad fallbacks and add some manylinux variants that show up in practice. - plat_name_cands += [ - 'linux', - 'manylinux', - 'manylinux1', - 'manylinux2010', - 'manylinux2014', - ] - # Wheel-style tags sometimes include glibc floor; include a couple likely ones. - # (Your filenames use '-' not '_', but we’ll keep your format below.) - if arch: - plat_name_cands += [ - f'manylinux_2_17_{arch}', - f'manylinux_2_5_{arch}', - ] - - elif sys.platform.startswith('darwin'): - # Keep your historical macOS entries; add modern versions and universal2. - plat_name_cands += [ - 'macosx-10.6', - 'macosx-10.7', - 'macosx-10.9', - 'macosx-10.12', - 'macosx-11.0', - 'macosx-12.0', - 'macosx-13.0', - 'macosx-10.6-intel', - 'macosx-10.7-intel', - 'macosx-10.9-intel', - 'macosx-10.12-intel', - 'macosx-11.0-universal2', - 'macosx-12.0-universal2', - ] - - elif sys.platform.startswith('win32'): - # Keep both in case filenames vary. - plat_name_cands += ['win-amd64', 'win32'] - - spec_list = [] - for pn in plat_name_cands: - spec_list.extend( - [ - f'.{pn}-{py_ver}', - f'.{pn}-{arch}-{py_ver}', - ] - ) - - # Bare suffix (your original behavior) - spec_list.append('') - return spec_list - - -def get_candidate_plat_specifiers(): - try: - import distutils - except ImportError: - return get_candidate_plat_specifiers2() - if sys.maxsize > 2**32: - arch = 'x86_64' # TODO: get correct arch spec - else: - arch = 'i686' # TODO: get correct arch spec - - py_ver = sys.version[0:3] - - try: - plat_name = distutils.util.get_platform() - except AttributeError: - plat_name = distutils.sys.platform - - plat_name_cands = [plat_name] - if sys.platform.startswith('linux'): - plat_name_cands.append('linux') - plat_name_cands.append('manylinux1') - plat_name_cands.append('manylinux') - elif sys.platform.startswith('darwin'): - # HACK: - # on travis, wheel builds as libhesaff.macosx-10.12-x86_64-2.7.dylib, - # but we seem to want libhesaff.macosx-10.6-intel-2.7.dylib - # TODO: what is the proper way to determine the ABI tag? - plat_name_cands.append('macosx-10.6') - plat_name_cands.append('macosx-10.7') - plat_name_cands.append('macosx-10.9') - plat_name_cands.append('macosx-10.12') - plat_name_cands.append('macosx-10.6-intel') - plat_name_cands.append('macosx-10.7-intel') - plat_name_cands.append('macosx-10.9-intel') - plat_name_cands.append('macosx-10.12-intel') - elif sys.platform.startswith('win32'): - # hack for win32 - plat_name_cands.append('win-amd64') - pass - - spec_list = [] - for plat_name in plat_name_cands: - spec_list.extend( - [ - '.{}-{}'.format(plat_name, sys.version[0:3]), - '.{}-{}-{}'.format(plat_name, arch, py_ver), - ] - ) - spec_list.append('') - return spec_list - - -def get_lib_fname_candidates(libname): - """ - Args: - libname (str): library name (e.g. 'hesaff', not 'libhesaff') - - Returns: - list: libnames - list of plausible library file names - - CommandLine: - python -m pyhesaff.ctypes_interface get_lib_fname_candidates - - Example: - >>> from pyhesaff.ctypes_interface import * # NOQA - >>> libname = 'hesaff' - >>> libnames = get_lib_fname_candidates(libname) - >>> import ubelt as ub - >>> print('libnames = {}'.format(ub.repr2(libnames))) - """ - spec_list = get_candidate_plat_specifiers() - - prefix_list = ['lib' + libname] - if sys.platform.startswith('win32'): - # windows doesnt start names with lib - prefix_list.append(libname) - ext = '.dll' - elif sys.platform.startswith('darwin'): - ext = '.dylib' - elif sys.platform.startswith('linux'): - ext = '.so' - else: - raise Exception('Unknown operating system: %s' % sys.platform) - # Construct priority ordering of libnames - libnames = [ - ''.join((prefix, spec, ext)) - for spec in spec_list - for prefix in prefix_list - ] - return libnames - - -def get_lib_dpath_list(root_dir): - """ - input : deepest directory to look for a library (dll, so, dylib) - returns : list of plausible directories to look. - """ - 'returns possible lib locations' - get_lib_dpath_list = [ - root_dir, - # join(root_dir, 'lib'), - # join(root_dir, 'build'), - # join(root_dir, 'build', 'lib'), - ] - return get_lib_dpath_list - - -def find_lib_fpath(libname, root_dir, verbose=False): - """Search for the library""" - lib_fname_list = get_lib_fname_candidates(libname) - tried_fpaths = [] - - FINAL_LIB_FPATH = None - - for lib_fname in lib_fname_list: - if verbose: - print('--') - curr_dpath = root_dir - # max_depth = 0 - - for lib_dpath in get_lib_dpath_list(curr_dpath): - lib_fpath = normpath(join(lib_dpath, lib_fname)) - tried_fpaths.append(lib_fpath) - flag = exists(lib_fpath) - if verbose: - print('[c] Check: {}, exists={}'.format(lib_fpath, int(flag))) - if flag: - if verbose: - print('using: {}'.format(lib_fpath)) - FINAL_LIB_FPATH = lib_fpath - return lib_fpath - - if FINAL_LIB_FPATH is not None: - return FINAL_LIB_FPATH - else: - contents = os.listdir(root_dir) - msg = ( - '\n[C!] find_lib_fpath(libname={!r}, root_dir={!r})'.format( - libname, root_dir - ) - + '\n[c!] Cannot FIND dynamic library' - ) - print(msg) - print('\n[c!] Checked: '.join(tried_fpaths)) - print('UNABLE TO FIND LIB IN DPATH contents = {!r}'.format(contents)) - raise ImportError(msg) - - -def load_clib(libname, root_dir): - """ - Searches for a library matching libname and loads it - - Args: - libname: library name (e.g. 'hesaff', not 'libhesaff') - - root_dir: the directory that should contain the - library file (dll, dylib, or so). - Returns: - clib: a ctypes object used to interface with the library - """ - ex = None - lib_fpath = find_lib_fpath(libname, root_dir) - try: - if sys.platform.startswith('win32'): - clib = ctypes.windll[lib_fpath] - else: - clib = ctypes.cdll[lib_fpath] - except OSError as ex_: - ex = ex_ - print('[C!] Caught OSError:\n{!r}'.format(ex)) - errsuffix = 'Is there a missing dependency?' - except Exception as ex_: - ex = ex_ - print('[C!] Caught Exception:\n{!r}'.format(ex)) - errsuffix = 'Was the library correctly compiled?' - else: - - def def_cfunc(return_type, func_name, arg_type_list): - "Function to define the types that python needs to talk to c" - cfunc = getattr(clib, func_name) - cfunc.restype = return_type - cfunc.argtypes = arg_type_list - - clib.__LIB_FPATH__ = lib_fpath - return clib, def_cfunc, lib_fpath - print('[C!] cwd={!r}'.format(os.getcwd())) - print( - '[C!] load_clib(libname={!r}, root_dir={!r})'.format(libname, root_dir) - ) - print('[C!] lib_fpath = {!r}'.format(lib_fpath)) - errmsg = ( - '[C] Cannot LOAD {!r} dynamic library. Caused by ex={!r}. {}'.format( - libname, ex, errsuffix - ) - ) - print(errmsg) - raise ImportError(errmsg) - - -if __name__ == '__main__': - r""" - CommandLine: - python -m pyhesaff.ctypes_interface - python -m pyhesaff.ctypes_interface --allexamples - """ - import xdoctest - - xdoctest.doctest_module(__file__) From 612583d6e120199e855cbe2ec30c3ce9cc4b16fe Mon Sep 17 00:00:00 2001 From: joncrall Date: Tue, 10 Feb 2026 19:08:44 -0500 Subject: [PATCH 08/13] type annotations --- pyhesaff/_pyhesaff.py | 88 +++++++++++++++++++++++++------------------ pyhesaff/py.typed | 0 pyproject.toml | 14 +++++++ 3 files changed, 65 insertions(+), 37 deletions(-) create mode 100644 pyhesaff/py.typed diff --git a/pyhesaff/_pyhesaff.py b/pyhesaff/_pyhesaff.py index dc863f79..b24941fa 100755 --- a/pyhesaff/_pyhesaff.py +++ b/pyhesaff/_pyhesaff.py @@ -1,4 +1,6 @@ #!/usr/bin/env python +from __future__ import annotations + """ The python hessian affine keypoint module @@ -8,11 +10,17 @@ python -m pyhesaff detect_feats --show --siftPower=0.5, """ +import os import numpy as np import ubelt as ub from collections import OrderedDict +from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Sequence, Tuple from pyhesaff import _hesaff +if TYPE_CHECKING: + ... + # from collections.abc import Iterable + # ============================ # hesaff ctypes interface # ============================ @@ -23,7 +31,7 @@ img_dtype = np.uint8 img32_dtype = np.float32 # THE ORDER OF THIS LIST IS IMPORTANT! -HESAFF_TYPED_PARAMS = [ +HESAFF_TYPED_PARAMS: List[Tuple[type, str, Any]] = [ # Pyramid Params (int, 'numberOfScales', 3), # number of scale per octave ( @@ -80,12 +88,12 @@ (float, 'siftPower', 1.0), ] -HESAFF_PARAM_DICT = OrderedDict( +HESAFF_PARAM_DICT: OrderedDict[str, Any] = OrderedDict( [(key, val) for (type_, key, val) in HESAFF_TYPED_PARAMS] ) -def grab_test_imgpath(p='astro'): +def grab_test_imgpath(p: str = 'astro') -> str: from pyhesaff._demodata import grab_test_image_fpath fpath = grab_test_image_fpath(p) @@ -97,13 +105,15 @@ def grab_test_imgpath(p='astro'): return fpath -def imread(fpath): +def imread(fpath: str | os.PathLike) -> np.ndarray | None: import cv2 - return cv2.imread(fpath) + return cv2.imread(os.fspath(fpath)) -def _build_typed_params_kwargs_docstr_block(typed_params): +def _build_typed_params_kwargs_docstr_block( + typed_params: Sequence[Tuple[type, str, Any]], +) -> str: r""" Args: typed_params (dict): @@ -118,7 +128,7 @@ def _build_typed_params_kwargs_docstr_block(typed_params): >>> result = build_typed_params_docstr(typed_params) >>> print(result) """ - kwargs_lines = [] + kwargs_lines: List[str] = [] for tup in typed_params: type_, name, default = tup typestr = getattr(type_, '__name__', str(type_)) @@ -136,7 +146,7 @@ def _build_typed_params_kwargs_docstr_block(typed_params): ) -def argparse_hesaff_params(): +def argparse_hesaff_params() -> Dict[str, Any]: alias_dict = {'affine_invariance': 'ai'} alias_dict = {'rotation_invariance': 'ri'} default_dict_ = get_hesaff_default_params() @@ -150,8 +160,8 @@ def argparse_hesaff_params(): return hesskw -KPTS_DIM = _hesaff.get_kpts_dim() -DESC_DIM = _hesaff.get_desc_dim() +KPTS_DIM: int = _hesaff.get_kpts_dim() +DESC_DIM: int = _hesaff.get_desc_dim() # ============================ @@ -159,25 +169,25 @@ def argparse_hesaff_params(): # ============================ -def alloc_patches(nKpts, size=41): +def alloc_patches(nKpts: int, size: int = 41) -> np.ndarray: patches = np.empty((nKpts, size, size), np.float32) return patches -def alloc_vecs(nKpts): +def alloc_vecs(nKpts: int) -> np.ndarray: # array of bytes vecs = np.empty((nKpts, DESC_DIM), vecs_dtype) return vecs -def alloc_kpts(nKpts): +def alloc_kpts(nKpts: int) -> np.ndarray: # array of floats kpts = np.empty((nKpts, KPTS_DIM), kpts_dtype) # kpts = np.zeros((nKpts, KPTS_DIM), kpts_dtype) - 1.0 # array of floats return kpts -def _make_hesaff_cpp_params(kwargs): +def _make_hesaff_cpp_params(kwargs: Mapping[str, Any]) -> OrderedDict[str, Any]: hesaff_params = HESAFF_PARAM_DICT.copy() for key, val in kwargs.items(): if key in hesaff_params: @@ -192,15 +202,15 @@ def _make_hesaff_cpp_params(kwargs): # ============================ -def get_hesaff_default_params(): +def get_hesaff_default_params() -> OrderedDict[str, Any]: return HESAFF_PARAM_DICT.copy() -def get_is_debug_mode(): +def get_is_debug_mode() -> bool: return _hesaff.is_debug_mode() -def get_cpp_version(): +def get_cpp_version() -> int: r""" Returns: int: cpp_version @@ -232,8 +242,11 @@ def get_cpp_version(): def detect_feats( - img_fpath, use_adaptive_scale=False, nogravity_hack=False, **kwargs -): + img_fpath: str | os.PathLike, + use_adaptive_scale: bool = False, + nogravity_hack: bool = False, + **kwargs: Any, +) -> tuple: r""" driver function for detecting hessian affine keypoints from an image path. extra parameters can be passed to the hessian affine detector by using @@ -343,7 +356,7 @@ def detect_feats( >>> #pt.show_if_requested() """ # Load image - kpts, vecs = _hesaff.detect_fpath(img_fpath, **kwargs) + kpts, vecs = _hesaff.detect_fpath(os.fspath(img_fpath), **kwargs) if use_adaptive_scale: # Adapt scale if requested kpts, vecs = adapt_scale(img_fpath, kpts) if nogravity_hack: @@ -351,7 +364,7 @@ def detect_feats( return kpts, vecs -def detect_feats2(img_or_fpath, **kwargs): +def detect_feats2(img_or_fpath: str | os.PathLike | np.ndarray, **kwargs: Any): """ General way of detecting from either an fpath or ndarray @@ -361,7 +374,7 @@ def detect_feats2(img_or_fpath, **kwargs): Returns: tuple """ - if isinstance(img_or_fpath, str): + if isinstance(img_or_fpath, (str, os.PathLike)): fpath = img_or_fpath return detect_feats(fpath, **kwargs) else: @@ -369,7 +382,7 @@ def detect_feats2(img_or_fpath, **kwargs): return detect_feats_in_image(img, **kwargs) -def detect_feats_list(image_paths_list, **kwargs): +def detect_feats_list(image_paths_list: Sequence[str | os.PathLike], **kwargs: Any): """ Args: image_paths_list (list): A list of image paths @@ -420,7 +433,7 @@ def detect_feats_list(image_paths_list, **kwargs): return kpts_list, vecs_list -def detect_feats_in_image(img, **kwargs): +def detect_feats_in_image(img: np.ndarray, **kwargs: Any): r""" Takes a preloaded image and detects keypoints and descriptors @@ -451,7 +464,7 @@ def detect_feats_in_image(img, **kwargs): return _hesaff.detect_image(img, **kwargs) -def detect_num_feats_in_image(img, **kwargs): +def detect_num_feats_in_image(img: np.ndarray, **kwargs: Any): r""" Just quickly returns how many keypoints are in the image. Does not attempt to return or store the values. @@ -512,7 +525,7 @@ def detect_num_feats_in_image(img, **kwargs): # just extraction -def extract_vecs(img_fpath, kpts, **kwargs): +def extract_vecs(img_fpath: str | os.PathLike | np.ndarray, kpts: np.ndarray, **kwargs: Any): r""" Extract SIFT descriptors at keypoint locations @@ -572,12 +585,12 @@ def extract_vecs(img_fpath, kpts, **kwargs): >>> pt.show_if_requested() """ kpts = np.ascontiguousarray(kpts, dtype=kpts_dtype) - if isinstance(img_fpath, str): - return _hesaff.extract_desc_fpath(img_fpath, kpts, **kwargs) + if isinstance(img_fpath, (str, os.PathLike)): + return _hesaff.extract_desc_fpath(os.fspath(img_fpath), kpts, **kwargs) return _hesaff.extract_desc_image(img_fpath, kpts, **kwargs) -def extract_patches(img_or_fpath, kpts, **kwargs): +def extract_patches(img_or_fpath: str | os.PathLike | np.ndarray, kpts: np.ndarray, **kwargs: Any): r""" Extract patches used to compute SIFT descriptors. @@ -616,12 +629,12 @@ def extract_patches(img_or_fpath, kpts, **kwargs): >>> pt.show_if_requested() """ kpts = np.ascontiguousarray(kpts, dtype=kpts_dtype) - if isinstance(img_or_fpath, str): - return _hesaff.extract_patches_fpath(img_or_fpath, kpts, **kwargs) + if isinstance(img_or_fpath, (str, os.PathLike)): + return _hesaff.extract_patches_fpath(os.fspath(img_or_fpath), kpts, **kwargs) return _hesaff.extract_patches_image(img_or_fpath, kpts, **kwargs) -def extract_desc_from_patches(patch_list): +def extract_desc_from_patches(patch_list: np.ndarray): r""" Careful about the way the patches are extracted here. @@ -717,7 +730,7 @@ def extract_desc_from_patches(patch_list): # ============================ -def test_rot_invar(): +def test_rot_invar() -> None: r""" CommandLine: python -m pyhesaff test_rot_invar --show @@ -772,6 +785,7 @@ def test_rot_invar(): # print(vt.kpts_repr(kpts_ripy)) # Verify results plot pt.figure(fnum=fnum, pnum=next_pnum()) + assert imgBGR is not None pt.imshow(imgBGR) # if len(kpts_gv) > 0: # pt.draw_kpts2(kpts_gv, ori=True, ell_color=pt.BLUE, ell_linewidth=10.5) @@ -800,24 +814,24 @@ def test_rot_invar(): pt.show_if_requested() -def vtool_adapt_rotation(img_fpath, kpts): +def vtool_adapt_rotation(img_fpath: str | os.PathLike, kpts: np.ndarray): """rotation invariance in python""" import vtool.patch as ptool import vtool.image as gtool - imgBGR = gtool.imread(img_fpath) + imgBGR = gtool.imread(os.fspath(img_fpath)) kpts2 = ptool.find_kpts_direction(imgBGR, kpts) vecs2 = extract_vecs(img_fpath, kpts2) return kpts2, vecs2 -def adapt_scale(img_fpath, kpts): +def adapt_scale(img_fpath: str | os.PathLike, kpts: np.ndarray): import vtool.ellipse as etool nScales = 16 nSamples = 16 low, high = -1, 2 - kpts2 = etool.adaptive_scale(img_fpath, kpts, nScales, low, high, nSamples) + kpts2 = etool.adaptive_scale(os.fspath(img_fpath), kpts, nScales, low, high, nSamples) # passing in 0 orientation results in gravity vector direction keypoint vecs2 = extract_vecs(img_fpath, kpts2) return kpts2, vecs2 diff --git a/pyhesaff/py.typed b/pyhesaff/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/pyproject.toml b/pyproject.toml index 35ec8700..037b47b0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -182,3 +182,17 @@ indent-style = "space" skip-magic-trailing-comma = false line-ending = "auto" docstring-code-format = false + + +[tool.ty.src] +exclude = [ +] + +[[tool.ty.overrides]] +# Apply the ignore unresolved rules to these files +include = [ + "pyhesaff/_pyhesaff.py", + "pyhesaff/__main__.py", + "pyhesaff/_demodata.py", +] +rules = { unresolved-import = "ignore", possibly-missing-attribute = "ignore" } From 98b5c4f9f4661ef89a6af807ae9eede7399de46c Mon Sep 17 00:00:00 2001 From: joncrall Date: Tue, 10 Feb 2026 19:18:03 -0500 Subject: [PATCH 09/13] Fix pyproject.toml --- pyproject.toml | 154 ++++++++++++++++++++++++++++++++------- requirements/runtime.txt | 11 +++ 2 files changed, 139 insertions(+), 26 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 037b47b0..ccad9a68 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,42 +37,144 @@ classifiers = [ ] [project.optional-dependencies] -nanobind = ["nanobind>=2.0.0"] + +nanobind = [ + "nanobind>=2.0.0", +] + runtime = [ - "numpy>=2.1.0; python_version >= '3.13'", - "numpy>=1.26.0; python_version >= '3.12' and python_version < '3.13'", - "numpy>=1.23.2; python_version >= '3.11' and python_version < '3.12'", - "numpy>=1.21.6; python_version >= '3.10' and python_version < '3.11'", - "numpy>=1.19.3; python_version >= '3.9' and python_version < '3.10'", - "ubelt>=1.3.4", + "numpy>=2.1.0 ; python_version < '4.0' and python_version >= '3.13'", + "numpy>=1.26.0 ; python_version < '3.13' and python_version >= '3.12'", + "numpy>=1.23.2 ; python_version < '3.12' and python_version >= '3.11'", + "numpy>=1.21.6 ; python_version < '3.11' and python_version >= '3.10'", + "numpy>=1.19.3 ; python_version < '3.10' and python_version >= '3.9'", + "numpy>=1.19.3 ; python_version < '3.9' and python_version >= '3.8'", + "ubelt >= 1.3.4", ] + tests = [ - "utool>=2.2.1", - "xdoctest>=1.1.5", - "pytest>=8.1.1; python_version >= '3.11'", - "pytest>=6.2.5; python_version < '3.11'", - "pytest-cov>=3.0.0", - "pytest_timeout>=2.3.1; python_version >= '3.12'", - "pytest_timeout>=1.4.2; python_version < '3.12'", - "coverage>=7.3.0; python_version >= '3.12'", - "coverage>=6.1.1; python_version >= '3.10' and python_version < '3.12'", - "coverage>=5.3.1; python_version < '3.10'", - "requests>=2.27.1", + "utool >= 2.2.1", + "xdoctest >= 1.1.5", + "pytest>=8.1.1 ; python_version < '4.0' and python_version >= '3.13'", + "pytest>=8.1.1 ; python_version < '3.13' and python_version >= '3.12'", + "pytest>=8.1.1 ; python_version < '3.12' and python_version >= '3.11'", + "pytest>=6.2.5 ; python_version < '3.11' and python_version >= '3.10'", + "pytest>=6.2.5 ; python_version < '3.10' and python_version >= '3.8'", + "pytest-cov>=3.0.0", + "pytest_timeout>=2.3.1 ; python_version < '4.0' and python_version >= '3.12'", + "pytest_timeout>=1.4.2 ; python_version < '3.12'", + "coverage>=7.3.0 ; python_version < '4.0' and python_version >= '3.12'", + "coverage>=6.1.1 ; python_version < '3.12' and python_version >= '3.10'", + "coverage>=5.3.1 ; python_version < '3.10' and python_version >= '3.9'", + "coverage>=6.1.1 ; python_version < '3.9' and python_version >= '3.8'", + "requests>=2.27.1", ] + headless = [ - "opencv-python-headless>=4.10.0.84; python_version >= '3.13'", - "opencv-python-headless>=4.5.5.64; python_version >= '3.11' and python_version < '3.13'", - "opencv-python-headless>=4.5.4.58; python_version >= '3.10' and python_version < '3.11'", - "opencv-python-headless>=3.4.15.55; python_version >= '3.9' and python_version < '3.10'", + "opencv-python-headless>=4.10.0.84 ; python_version < '4.0' and python_version >= '3.13'", + "opencv-python-headless>=4.5.5.64 ; python_version < '3.13' and python_version >= '3.11'", + "opencv-python-headless>=4.5.4.58 ; python_version < '3.11' and python_version >= '3.10'", + "opencv-python-headless>=3.4.15.55 ; python_version < '3.10' and python_version >= '3.9'", ] + graphics = [ - "opencv-python>=4.10.0.84; python_version >= '3.13'", - "opencv-python>=4.5.5.64; python_version >= '3.11' and python_version < '3.13'", - "opencv-python>=4.5.4.58; python_version >= '3.10' and python_version < '3.11'", - "opencv-python>=3.4.15.55; python_version >= '3.9' and python_version < '3.10'", + "opencv-python>=4.10.0.84 ; python_version < '4.0' and python_version >= '3.13'", + "opencv-python>=4.5.5.64 ; python_version < '3.13' and python_version >= '3.11'", + "opencv-python>=4.5.4.58 ; python_version < '3.11' and python_version >= '3.10'", + "opencv-python>=3.4.15.55 ; python_version < '3.10' and python_version >= '3.9'", +] + +docs = [ + "sphinx >= 5.0.1", + "sphinx-autobuild >= 2021.3.14", + "sphinx_rtd_theme >= 1.0.0", + "sphinxcontrib-napoleon >= 0.7", + "sphinx-autoapi >= 1.8.4", + "Pygments >= 2.9.0", + "myst_parser >= 0.18.0", + "sphinx-reredirects >= 0.0.1", +] + +build = [ + "cmake", + "ninja", + "scikit-build", + "setuptools", + "setuptools_scm[toml]", + "wheel", ] + optional = [] +# ---- strict extras: >= -> == (as requested) ---- + +runtime-strict = [ + "numpy==2.1.0 ; python_version < '4.0' and python_version >= '3.13'", + "numpy==1.26.0 ; python_version < '3.13' and python_version >= '3.12'", + "numpy==1.23.2 ; python_version < '3.12' and python_version >= '3.11'", + "numpy==1.21.6 ; python_version < '3.11' and python_version >= '3.10'", + "numpy==1.19.3 ; python_version < '3.10' and python_version >= '3.9'", + "numpy==1.19.3 ; python_version < '3.9' and python_version >= '3.8'", + "ubelt==1.3.4", +] + +tests-strict = [ + "utool==2.2.1", + "xdoctest==1.1.5", + "pytest==8.1.1 ; python_version < '4.0' and python_version >= '3.13'", + "pytest==8.1.1 ; python_version < '3.13' and python_version >= '3.12'", + "pytest==8.1.1 ; python_version < '3.12' and python_version >= '3.11'", + "pytest==6.2.5 ; python_version < '3.11' and python_version >= '3.10'", + "pytest==6.2.5 ; python_version < '3.10' and python_version >= '3.8'", + "pytest-cov==3.0.0", + "pytest_timeout==2.3.1 ; python_version < '4.0' and python_version >= '3.12'", + "pytest_timeout==1.4.2 ; python_version < '3.12'", + "coverage==7.3.0 ; python_version < '4.0' and python_version >= '3.12'", + "coverage==6.1.1 ; python_version < '3.12' and python_version >= '3.10'", + "coverage==5.3.1 ; python_version < '3.10' and python_version >= '3.9'", + "coverage==6.1.1 ; python_version < '3.9' and python_version >= '3.8'", + "requests==2.27.1", +] + +headless-strict = [ + "opencv-python-headless==4.10.0.84 ; python_version < '4.0' and python_version >= '3.13'", + "opencv-python-headless==4.5.5.64 ; python_version < '3.13' and python_version >= '3.11'", + "opencv-python-headless==4.5.4.58 ; python_version < '3.11' and python_version >= '3.10'", + "opencv-python-headless==3.4.15.55 ; python_version < '3.10' and python_version >= '3.9'", +] + +graphics-strict = [ + "opencv-python==4.10.0.84 ; python_version < '4.0' and python_version >= '3.13'", + "opencv-python==4.5.5.64 ; python_version < '3.13' and python_version >= '3.11'", + "opencv-python==4.5.4.58 ; python_version < '3.11' and python_version >= '3.10'", + "opencv-python==3.4.15.55 ; python_version < '3.10' and python_version >= '3.9'", +] + +docs-strict = [ + "sphinx==5.0.1", + "sphinx-autobuild==2021.3.14", + "sphinx_rtd_theme==1.0.0", + "sphinxcontrib-napoleon==0.7", + "sphinx-autoapi==1.8.4", + "Pygments==2.9.0", + "myst_parser==0.18.0", + "sphinx-reredirects==0.0.1", +] + +# build.txt has no versions, so strict == non-strict +build-strict = [ + "cmake", + "ninja", + "scikit-build", + "setuptools", + "setuptools_scm[toml]", + "wheel", +] + +optional-strict = [] + + + [tool.mypy] ignore_missing_imports = true exclude = ".*" diff --git a/requirements/runtime.txt b/requirements/runtime.txt index e69de29b..3cc786f4 100644 --- a/requirements/runtime.txt +++ b/requirements/runtime.txt @@ -0,0 +1,11 @@ +# 1.19.2 was important for some versions of tensorflow +numpy>=2.1.0 ; python_version < '4.0' and python_version >= '3.13' # Python 3.13+ +numpy>=1.26.0 ; python_version < '3.13' and python_version >= '3.12' # Python 3.12 +numpy>=1.23.2 ; python_version < '3.12' and python_version >= '3.11' # Python 3.11 +numpy>=1.21.6 ; python_version < '3.11' and python_version >= '3.10' # Python 3.10 +numpy>=1.19.3 ; python_version < '3.10' and python_version >= '3.9' # Python 3.9 +numpy>=1.19.3 ; python_version < '3.9' and python_version >= '3.8' # Python 3.8 + + +ubelt >= 1.3.4 +# smc.freeimage From 7270d18e42b113bc98193a24785e3a710c3d751d Mon Sep 17 00:00:00 2001 From: joncrall Date: Tue, 10 Feb 2026 19:20:44 -0500 Subject: [PATCH 10/13] wip --- .github/workflows/tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 56979ac6..20d1a013 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -36,9 +36,6 @@ jobs: flake8 ./pyhesaff --count --select=E9,F63,F7,F82 --show-source --statistics - name: Typecheck run: |- - python -m pip install mypy - pip install -r requirements/runtime.txt - mypy ./pyhesaff python -m pip install ty pip install -r requirements/runtime.txt ty check ./pyhesaff From 77dbacda3c259798c6d938ce4979ba8ae5eca385 Mon Sep 17 00:00:00 2001 From: joncrall Date: Tue, 10 Feb 2026 19:23:16 -0500 Subject: [PATCH 11/13] wip --- pyhesaff/__main__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pyhesaff/__main__.py b/pyhesaff/__main__.py index fbbc9e7d..e1d9ed4e 100644 --- a/pyhesaff/__main__.py +++ b/pyhesaff/__main__.py @@ -9,6 +9,7 @@ def detect_feats_main(): import ubelt as ub img_fpath = ub.argval('--fname', default=grab_test_imgpath()) + assert isinstance(img_fpath, str) kwargs = argparse_hesaff_params() print('kwargs = %r' % (kwargs,)) From 48f8f75278905c0c1ede5bb5e89df1234ada943d Mon Sep 17 00:00:00 2001 From: joncrall Date: Tue, 10 Feb 2026 19:44:43 -0500 Subject: [PATCH 12/13] wip --- requirements/runtime.txt | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/requirements/runtime.txt b/requirements/runtime.txt index 3cc786f4..3c979f8f 100644 --- a/requirements/runtime.txt +++ b/requirements/runtime.txt @@ -1,5 +1,6 @@ # 1.19.2 was important for some versions of tensorflow -numpy>=2.1.0 ; python_version < '4.0' and python_version >= '3.13' # Python 3.13+ +numpy>=2.3.4 ; python_version < '4.0' and python_version >= '3.14' # Python 3.14+ +numpy>=2.1.0 ; python_version < '3.14' and python_version >= '3.13' # Python 3.13 numpy>=1.26.0 ; python_version < '3.13' and python_version >= '3.12' # Python 3.12 numpy>=1.23.2 ; python_version < '3.12' and python_version >= '3.11' # Python 3.11 numpy>=1.21.6 ; python_version < '3.11' and python_version >= '3.10' # Python 3.10 @@ -7,5 +8,6 @@ numpy>=1.19.3 ; python_version < '3.10' and python_version >= '3.9' # Python numpy>=1.19.3 ; python_version < '3.9' and python_version >= '3.8' # Python 3.8 + ubelt >= 1.3.4 # smc.freeimage From 6dd50cfb11323fcb6157fcbe678681cece51bab7 Mon Sep 17 00:00:00 2001 From: joncrall Date: Tue, 10 Feb 2026 20:17:50 -0500 Subject: [PATCH 13/13] wip --- pyproject.toml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ccad9a68..79b60413 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,8 @@ nanobind = [ ] runtime = [ - "numpy>=2.1.0 ; python_version < '4.0' and python_version >= '3.13'", + "numpy>=2.3.4 ; python_version < '4.0' and python_version >= '3.14'", + "numpy>=2.1.0 ; python_version < '1.4' and python_version >= '3.13'", "numpy>=1.26.0 ; python_version < '3.13' and python_version >= '3.12'", "numpy>=1.23.2 ; python_version < '3.12' and python_version >= '3.11'", "numpy>=1.21.6 ; python_version < '3.11' and python_version >= '3.10'", @@ -109,7 +110,8 @@ optional = [] # ---- strict extras: >= -> == (as requested) ---- runtime-strict = [ - "numpy==2.1.0 ; python_version < '4.0' and python_version >= '3.13'", + "numpy>=2.3.4 ; python_version < '4.0' and python_version >= '3.14'", + "numpy>=2.1.0 ; python_version < '1.4' and python_version >= '3.13'", "numpy==1.26.0 ; python_version < '3.13' and python_version >= '3.12'", "numpy==1.23.2 ; python_version < '3.12' and python_version >= '3.11'", "numpy==1.21.6 ; python_version < '3.11' and python_version >= '3.10'",