From 261dbb462ea56350fc0420bfc80f0b60a8211141 Mon Sep 17 00:00:00 2001 From: Robert Tuck Date: Wed, 10 Jul 2024 15:44:21 +0100 Subject: [PATCH] (#378) Update to version v2.1.0 of copier template from v1.0.0 --- .copier-answers.yml | 4 +- .devcontainer/devcontainer.json | 54 ++-- .github/CONTRIBUTING.md | 27 ++ .github/CONTRIBUTING.rst | 35 --- .../actions/install_requirements/action.yml | 66 ++--- .github/dependabot.yml | 8 +- .github/pages/index.html | 2 +- .github/pages/make_switcher.py | 29 +-- .github/workflows/_check.yml | 27 ++ .github/workflows/_dist.yml | 36 +++ .github/workflows/{docs.yml => _docs.yml} | 31 ++- .github/workflows/_release.yml | 32 +++ .github/workflows/_test.yml | 62 +++++ .github/workflows/_tox.yml | 22 ++ .github/workflows/ci.yml | 52 ++++ .github/workflows/code.yml | 243 ------------------ .github/workflows/docs_clean.yml | 43 ---- .github/workflows/linkcheck.yml | 29 --- .github/workflows/periodic.yml | 13 + .gitignore | 1 - .pre-commit-config.yaml | 18 +- .vscode/extensions.json | 7 +- .vscode/launch.json | 8 +- .vscode/settings.json | 15 +- .vscode/tasks.json | 2 +- Dockerfile | 28 +- README.md | 39 +++ README.rst | 62 ----- docs/conf.py | 30 +-- docs/developer/explanations/decisions.rst | 17 -- .../0001-record-architecture-decisions.rst | 26 -- .../0002-switched-to-pip-skeleton.rst | 35 --- docs/developer/how-to/build-docs.rst | 38 --- docs/developer/how-to/contribute.rst | 1 - docs/developer/how-to/lint.rst | 39 --- docs/developer/how-to/make-release.rst | 31 --- docs/developer/how-to/pin-requirements.rst | 74 ------ docs/developer/how-to/run-tests.rst | 12 - docs/developer/how-to/static-analysis.rst | 8 - docs/developer/how-to/test-container.rst | 25 -- docs/developer/how-to/update-tools.rst | 16 -- docs/developer/index.rst | 69 ----- docs/developer/tutorials/dev-install.rst | 68 ----- docs/explanations.md | 10 + docs/explanations/decisions.md | 12 + .../0001-record-architecture-decisions.md | 18 ++ ...0002-switched-to-python-copier-template.md | 28 ++ docs/explanations/decisions/COPYME | 19 ++ docs/genindex.md | 3 + docs/genindex.rst | 5 - docs/how-to.md | 10 + docs/how-to/build-docs.md | 39 +++ docs/how-to/contribute.md | 2 + docs/how-to/coverage.md | 8 + .../how-to/create-beamline.rst | 0 docs/how-to/dev-install.md | 56 ++++ docs/how-to/excalidraw.md | 19 ++ docs/how-to/lint.md | 34 +++ docs/how-to/lock-requirements.md | 39 +++ .../how-to/make-new-ophyd-async-device.rst | 0 docs/how-to/make-release.md | 32 +++ docs/{developer => }/how-to/move-code.rst | 0 docs/how-to/pypi.md | 24 ++ docs/how-to/run-tests.md | 20 ++ docs/how-to/static-analysis.md | 7 + docs/how-to/update-template.md | 9 + docs/{developer => }/how-to/zocalo.rst | 0 docs/images/dls-favicon.ico | Bin 99678 -> 0 bytes docs/index.md | 56 ++++ docs/index.rst | 29 --- docs/reference.md | 12 + .../reference/api.rst => reference/api.md} | 19 +- .../reference/device-standards.rst | 0 docs/{developer => }/reference/standards.rst | 14 +- docs/tutorials.md | 10 + docs/{user => }/tutorials/get_started.rst | 0 docs/tutorials/installation.md | 42 +++ docs/user/explanations/docs-structure.rst | 18 -- docs/user/how-to/run-container.rst | 15 -- docs/user/index.rst | 58 ----- docs/user/tutorials/installation.rst | 38 --- pyproject.toml | 27 +- src/dodal/__init__.py | 10 +- tests/conftest.py | 15 +- 84 files changed, 978 insertions(+), 1263 deletions(-) create mode 100644 .github/CONTRIBUTING.md delete mode 100644 .github/CONTRIBUTING.rst create mode 100644 .github/workflows/_check.yml create mode 100644 .github/workflows/_dist.yml rename .github/workflows/{docs.yml => _docs.yml} (67%) create mode 100644 .github/workflows/_release.yml create mode 100644 .github/workflows/_test.yml create mode 100644 .github/workflows/_tox.yml create mode 100644 .github/workflows/ci.yml delete mode 100644 .github/workflows/code.yml delete mode 100644 .github/workflows/docs_clean.yml delete mode 100644 .github/workflows/linkcheck.yml create mode 100644 .github/workflows/periodic.yml create mode 100644 README.md delete mode 100644 README.rst delete mode 100644 docs/developer/explanations/decisions.rst delete mode 100644 docs/developer/explanations/decisions/0001-record-architecture-decisions.rst delete mode 100644 docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst delete mode 100644 docs/developer/how-to/build-docs.rst delete mode 100644 docs/developer/how-to/contribute.rst delete mode 100644 docs/developer/how-to/lint.rst delete mode 100644 docs/developer/how-to/make-release.rst delete mode 100644 docs/developer/how-to/pin-requirements.rst delete mode 100644 docs/developer/how-to/run-tests.rst delete mode 100644 docs/developer/how-to/static-analysis.rst delete mode 100644 docs/developer/how-to/test-container.rst delete mode 100644 docs/developer/how-to/update-tools.rst delete mode 100644 docs/developer/index.rst delete mode 100644 docs/developer/tutorials/dev-install.rst create mode 100644 docs/explanations.md create mode 100644 docs/explanations/decisions.md create mode 100644 docs/explanations/decisions/0001-record-architecture-decisions.md create mode 100644 docs/explanations/decisions/0002-switched-to-python-copier-template.md create mode 100644 docs/explanations/decisions/COPYME create mode 100644 docs/genindex.md delete mode 100644 docs/genindex.rst create mode 100644 docs/how-to.md create mode 100644 docs/how-to/build-docs.md create mode 100644 docs/how-to/contribute.md create mode 100644 docs/how-to/coverage.md rename docs/{developer => }/how-to/create-beamline.rst (100%) create mode 100644 docs/how-to/dev-install.md create mode 100644 docs/how-to/excalidraw.md create mode 100644 docs/how-to/lint.md create mode 100644 docs/how-to/lock-requirements.md rename docs/{developer => }/how-to/make-new-ophyd-async-device.rst (100%) create mode 100644 docs/how-to/make-release.md rename docs/{developer => }/how-to/move-code.rst (100%) create mode 100644 docs/how-to/pypi.md create mode 100644 docs/how-to/run-tests.md create mode 100644 docs/how-to/static-analysis.md create mode 100644 docs/how-to/update-template.md rename docs/{developer => }/how-to/zocalo.rst (100%) delete mode 100644 docs/images/dls-favicon.ico create mode 100644 docs/index.md delete mode 100644 docs/index.rst create mode 100644 docs/reference.md rename docs/{user/reference/api.rst => reference/api.md} (60%) rename docs/{developer => }/reference/device-standards.rst (100%) rename docs/{developer => }/reference/standards.rst (89%) create mode 100644 docs/tutorials.md rename docs/{user => }/tutorials/get_started.rst (100%) create mode 100644 docs/tutorials/installation.md delete mode 100644 docs/user/explanations/docs-structure.rst delete mode 100644 docs/user/how-to/run-container.rst delete mode 100644 docs/user/index.rst delete mode 100644 docs/user/tutorials/installation.rst diff --git a/.copier-answers.yml b/.copier-answers.yml index b60966cd59..bd334579a2 100644 --- a/.copier-answers.yml +++ b/.copier-answers.yml @@ -1,5 +1,5 @@ # Changes here will be overwritten by Copier -_commit: 1.0.0 +_commit: 2.1.0 _src_path: gh:DiamondLightSource/python-copier-template author_email: dominic.oram@diamond.ac.uk author_name: Dominic Oram @@ -11,4 +11,6 @@ docs_type: sphinx git_platform: github.com github_org: DiamondLightSource package_name: dodal +pypi: false repo_name: dodal +type_checker: pyright diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 44de8d36af..79b85ff41a 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -3,52 +3,44 @@ "name": "Python 3 Developer Container", "build": { "dockerfile": "../Dockerfile", - "target": "build", - // Only upgrade pip, we will install the project below - "args": { - "PIP_OPTIONS": "--upgrade pip" - } + "target": "developer" }, "remoteEnv": { + // Allow X11 apps to run inside the container "DISPLAY": "${localEnv:DISPLAY}" }, - // Add the URLs of features you want added when the container is built. - "features": { - "ghcr.io/devcontainers/features/common-utils:1": { - "username": "none", - "upgradePackages": false - } - }, - // Set *default* container specific settings.json values on container create. - "settings": { - "python.defaultInterpreterPath": "/venv/bin/python" - }, "customizations": { "vscode": { + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/venv/bin/python" + }, // Add the IDs of extensions you want installed when the container is created. "extensions": [ "ms-python.python", + "github.vscode-github-actions", "tamasfe.even-better-toml", "redhat.vscode-yaml", - "ryanluker.vscode-coverage-gutters" + "ryanluker.vscode-coverage-gutters", + "charliermarsh.ruff", + "ms-azuretools.vscode-docker" ] } }, - // Make sure the files we are mapping into the container exist on the host - "initializeCommand": "bash -c 'for i in $HOME/.inputrc; do [ -f $i ] || touch $i; done'", + "features": { + // Some default things like git config + "ghcr.io/devcontainers/features/common-utils:2": { + "upgradePackages": false + } + }, "runArgs": [ + // Allow the container to access the host X11 display and EPICS CA "--net=host", - "--security-opt=label=type:container_runtime_t" - ], - "mounts": [ - "source=${localEnv:HOME}/.ssh,target=/root/.ssh,type=bind", - "source=${localEnv:HOME}/.inputrc,target=/root/.inputrc,type=bind", - // map in home directory - not strictly necessary but useful - "source=${localEnv:HOME},target=${localEnv:HOME},type=bind,consistency=cached" + // Make sure SELinux does not disable with access to host filesystems like tmp + "--security-opt=label=disable" ], - // make the workspace folder the same inside and outside of the container - "workspaceMount": "source=${localWorkspaceFolder},target=${localWorkspaceFolder},type=bind", - "workspaceFolder": "${localWorkspaceFolder}", + // Mount the parent as /workspaces so we can pip install peers as editable + "workspaceMount": "source=${localWorkspaceFolder}/..,target=/workspaces,type=bind", // After the container is created, install the python project in editable form - "postCreateCommand": "pip install -e '.[dev]'" -} + "postCreateCommand": "pip install $([ -f dev-requirements.txt ] && echo '-c dev-requirements.txt') -e '.[dev]' && pre-commit install" +} \ No newline at end of file diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md new file mode 100644 index 0000000000..0f26801edf --- /dev/null +++ b/.github/CONTRIBUTING.md @@ -0,0 +1,27 @@ +# Contribute to the project + +Contributions and issues are most welcome! All issues and pull requests are +handled through [GitHub](https://github.com/DiamondLightSource/dodal/issues). Also, please check for any existing issues before +filing a new one. If you have a great idea but it involves big changes, please +file a ticket before making a pull request! We want to make sure you don't spend +your time coding something that might not fit the scope of the project. + +## Issue or Discussion? + +Github also offers [discussions](https://github.com/DiamondLightSource/dodal/discussions) as a place to ask questions and share ideas. If +your issue is open ended and it is not obvious when it can be "closed", please +raise it as a discussion instead. + +## Code Coverage + +While 100% code coverage does not make a library bug-free, it significantly +reduces the number of easily caught bugs! Please make sure coverage remains the +same or is improved by a pull request! + +## Developer Information + +It is recommended that developers use a [vscode devcontainer](https://code.visualstudio.com/docs/devcontainers/containers). This repository contains configuration to set up a containerized development environment that suits its own needs. + +This project was created using the [Diamond Light Source Copier Template](https://github.com/DiamondLightSource/python-copier-template) for Python projects. + +For more information on common tasks like setting up a developer environment, running the tests, and setting a pre-commit hook, see the template's [How-to guides](https://diamondlightsource.github.io/python-copier-template/2.1.0/how-to.html). diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst deleted file mode 100644 index 450b4b8354..0000000000 --- a/.github/CONTRIBUTING.rst +++ /dev/null @@ -1,35 +0,0 @@ -Contributing to the project -=========================== - -Contributions and issues are most welcome! All issues and pull requests are -handled through GitHub_. Also, please check for any existing issues before -filing a new one. If you have a great idea but it involves big changes, please -file a ticket before making a pull request! We want to make sure you don't spend -your time coding something that might not fit the scope of the project. - -.. _GitHub: https://github.com/DiamondLightSource/dodal/issues - -Issue or Discussion? --------------------- - -Github also offers discussions_ as a place to ask questions and share ideas. If -your issue is open ended and it is not obvious when it can be "closed", please -raise it as a discussion instead. - -.. _discussions: https://github.com/DiamondLightSource/dodal/discussions - -Code coverage -------------- - -While 100% code coverage does not make a library bug-free, it significantly -reduces the number of easily caught bugs! Please make sure coverage remains the -same or is improved by a pull request! - -Developer guide ---------------- - -The `Developer Guide`_ contains information on setting up a development -environment, running the tests and what standards the code and documentation -should follow. - -.. _Developer Guide: https://DiamondLightSource.github.io/dodal/main/developer/how-to/contribute.html diff --git a/.github/actions/install_requirements/action.yml b/.github/actions/install_requirements/action.yml index 79d1a71eef..d33e080527 100644 --- a/.github/actions/install_requirements/action.yml +++ b/.github/actions/install_requirements/action.yml @@ -1,60 +1,34 @@ name: Install requirements -description: Run pip install with requirements and upload resulting requirements +description: Install a version of python then call pip install and report what was installed inputs: - requirements_file: - description: Name of requirements file to use and upload - required: true - install_options: + python-version: + description: Python version to install, default is from Dockerfile + default: "dev" + pip-install: description: Parameters to pass to pip install - required: true - artifact_name: - description: A user friendly name to give the produced artifacts - required: true - python_version: - description: Python version to install - default: "3.x" + default: "$([ -f dev-requirements.txt ] && echo '-c dev-requirements.txt') -e .[dev]" runs: using: composite - steps: + - name: Get version of python + run: | + PYTHON_VERSION="${{ inputs.python-version }}" + if [ $PYTHON_VERSION == "dev" ]; then + PYTHON_VERSION=$(sed -n "s/ARG PYTHON_VERSION=//p" Dockerfile) + fi + echo "PYTHON_VERSION=$PYTHON_VERSION" >> "$GITHUB_ENV" + shell: bash + - name: Setup python uses: actions/setup-python@v5 with: - python-version: ${{ inputs.python_version }} + python-version: ${{ env.PYTHON_VERSION }} - - name: Pip install - run: | - touch ${{ inputs.requirements_file }} - # -c uses requirements.txt as constraints, see 'Validate requirements file' - pip install -c ${{ inputs.requirements_file }} ${{ inputs.install_options }} + - name: Install packages + run: pip install ${{ inputs.pip-install }} shell: bash - - name: Create lockfile - run: | - mkdir -p lockfiles - pip freeze --exclude-editable > lockfiles/${{ inputs.requirements_file }} - # delete the self referencing line and make sure it isn't blank - sed -i'' -e '/file:/d' lockfiles/${{ inputs.requirements_file }} - shell: bash - - - name: Upload lockfiles - uses: actions/upload-artifact@v4.0.0 - with: - name: lockfiles-${{ inputs.python_version }}-${{ inputs.artifact_name }}-${{ github.sha }} - path: lockfiles - - # This eliminates the class of problems where the requirements being given no - # longer match what the packages themselves dictate. E.g. In the rare instance - # where I install some-package which used to depend on vulnerable-dependency - # but now uses good-dependency (despite being nominally the same version) - # pip will install both if given a requirements file with -r - - name: If requirements file exists, check it matches pip installed packages - run: | - if [ -s ${{ inputs.requirements_file }} ]; then - if ! diff -u ${{ inputs.requirements_file }} lockfiles/${{ inputs.requirements_file }}; then - echo "Error: ${{ inputs.requirements_file }} need the above changes to be exhaustive" - exit 1 - fi - fi + - name: Report what was installed + run: pip freeze shell: bash diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 2d1af8738d..184ba3631a 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -10,11 +10,15 @@ updates: schedule: interval: "weekly" groups: - github-artifacts: + actions: patterns: - - actions/*-artifact + - "*" - package-ecosystem: "pip" directory: "/" schedule: interval: "weekly" + groups: + dev-dependencies: + patterns: + - "*" diff --git a/.github/pages/index.html b/.github/pages/index.html index c495f39f2f..80f0a00912 100644 --- a/.github/pages/index.html +++ b/.github/pages/index.html @@ -8,4 +8,4 @@ - + \ No newline at end of file diff --git a/.github/pages/make_switcher.py b/.github/pages/make_switcher.py index ae227ab7fd..29f646c3ac 100755 --- a/.github/pages/make_switcher.py +++ b/.github/pages/make_switcher.py @@ -3,28 +3,27 @@ from argparse import ArgumentParser from pathlib import Path from subprocess import CalledProcessError, check_output -from typing import List, Optional -def report_output(stdout: bytes, label: str) -> List[str]: +def report_output(stdout: bytes, label: str) -> list[str]: ret = stdout.decode().strip().split("\n") print(f"{label}: {ret}") return ret -def get_branch_contents(ref: str) -> List[str]: +def get_branch_contents(ref: str) -> list[str]: """Get the list of directories in a branch.""" stdout = check_output(["git", "ls-tree", "-d", "--name-only", ref]) return report_output(stdout, "Branch contents") -def get_sorted_tags_list() -> List[str]: +def get_sorted_tags_list() -> list[str]: """Get a list of sorted tags in descending order from the repository.""" stdout = check_output(["git", "tag", "-l", "--sort=-v:refname"]) return report_output(stdout, "Tags list") -def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[str]: +def get_versions(ref: str, add: str | None) -> list[str]: """Generate the file containing the list of all GitHub Pages builds.""" # Get the directories (i.e. builds) from the GitHub Pages branch try: @@ -36,15 +35,12 @@ def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[st # Add and remove from the list of builds if add: builds.add(add) - if remove: - assert remove in builds, f"Build '{remove}' not in {sorted(builds)}" - builds.remove(remove) # Get a sorted list of tags tags = get_sorted_tags_list() # Make the sorted versions list from main branches and tags - versions: List[str] = [] + versions: list[str] = [] for version in ["master", "main"] + tags: if version in builds: versions.append(version) @@ -58,9 +54,12 @@ def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[st def write_json(path: Path, repository: str, versions: str): org, repo_name = repository.split("/") + pages_url = f"https://{org}.github.io" + if repo_name != f"{org}.github.io": + # Only add the repo name if it isn't the source for the org pages site + pages_url += f"/{repo_name}" struct = [ - {"version": version, "url": f"https://{org}.github.io/{repo_name}/{version}/"} - for version in versions + {"version": version, "url": f"{pages_url}/{version}/"} for version in versions ] text = json.dumps(struct, indent=2) print(f"JSON switcher:\n{text}") @@ -69,16 +68,12 @@ def write_json(path: Path, repository: str, versions: str): def main(args=None): parser = ArgumentParser( - description="Make a versions.txt file from gh-pages directories" + description="Make a versions.json file from gh-pages directories" ) parser.add_argument( "--add", help="Add this directory to the list of existing directories", ) - parser.add_argument( - "--remove", - help="Remove this directory from the list of existing directories", - ) parser.add_argument( "repository", help="The GitHub org and repository name: ORG/REPO", @@ -91,7 +86,7 @@ def main(args=None): args = parser.parse_args(args) # Write the versions file - versions = get_versions("origin/gh-pages", args.add, args.remove) + versions = get_versions("origin/gh-pages", args.add) write_json(args.output, args.repository, versions) diff --git a/.github/workflows/_check.yml b/.github/workflows/_check.yml new file mode 100644 index 0000000000..a6139c19fe --- /dev/null +++ b/.github/workflows/_check.yml @@ -0,0 +1,27 @@ +on: + workflow_call: + outputs: + branch-pr: + description: The PR number if the branch is in one + value: ${{ jobs.pr.outputs.branch-pr }} + +jobs: + pr: + runs-on: "ubuntu-latest" + outputs: + branch-pr: ${{ steps.script.outputs.result }} + steps: + - uses: actions/github-script@v7 + id: script + if: github.event_name == 'push' + with: + script: | + const prs = await github.rest.pulls.list({ + owner: context.repo.owner, + repo: context.repo.repo, + head: context.repo.owner + ':${{ github.ref_name }}' + }) + if (prs.data.length) { + console.log(`::notice ::Skipping CI on branch push as it is already run in PR #${prs.data[0]["number"]}`) + return prs.data[0]["number"] + } diff --git a/.github/workflows/_dist.yml b/.github/workflows/_dist.yml new file mode 100644 index 0000000000..b1c4c93c3b --- /dev/null +++ b/.github/workflows/_dist.yml @@ -0,0 +1,36 @@ +on: + workflow_call: + +jobs: + build: + runs-on: "ubuntu-latest" + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + # Need this to get version number from last tag + fetch-depth: 0 + + - name: Build sdist and wheel + run: > + export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && + pipx run build + + - name: Upload sdist and wheel as artifacts + uses: actions/upload-artifact@v4 + with: + name: dist + path: dist + + - name: Check for packaging errors + run: pipx run twine check --strict dist/* + + - name: Install produced wheel + uses: ./.github/actions/install_requirements + with: + pip-install: dist/*.whl + + - name: Test module --version works using the installed wheel + # If more than one module in src/ replace with module name to test + run: python -m $(ls --hide='*.egg-info' src | head -1) --version diff --git a/.github/workflows/docs.yml b/.github/workflows/_docs.yml similarity index 67% rename from .github/workflows/docs.yml rename to .github/workflows/_docs.yml index d6e4b0e5f4..40446e332b 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/_docs.yml @@ -1,17 +1,13 @@ -name: Docs CI - on: - push: - pull_request: + workflow_call: jobs: - docs: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository + build: runs-on: ubuntu-latest steps: - name: Avoid git conflicts when tag and branch pushed at same time - if: startsWith(github.ref, 'refs/tags') + if: github.ref_type == 'tag' run: sleep 60 - name: Checkout @@ -21,20 +17,23 @@ jobs: fetch-depth: 0 - name: Install system packages - # Can delete this if you don't use graphviz in your docs run: sudo apt-get install graphviz - name: Install python packages uses: ./.github/actions/install_requirements - with: - requirements_file: requirements-dev-3.x.txt - install_options: -e .[dev] - python_version: "3.11" - artifact_name: docs - name: Build docs run: tox -e docs + - name: Remove environment.pickle + run: rm build/html/.doctrees/environment.pickle + + - name: Upload built docs artifact + uses: actions/upload-artifact@v4 + with: + name: docs + path: build + - name: Sanitize ref name for docs version run: echo "DOCS_VERSION=${GITHUB_REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV @@ -45,11 +44,11 @@ jobs: run: python .github/pages/make_switcher.py --add $DOCS_VERSION ${{ github.repository }} .github/pages/switcher.json - name: Publish Docs to gh-pages - if: github.event_name == 'push' && github.actor != 'dependabot[bot]' + if: github.ref_type == 'tag' || github.ref_name == 'main' # We pin to the SHA, not the tag, for security reasons. # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions - uses: peaceiris/actions-gh-pages@64b46b4226a4a12da2239ba3ea5aa73e3163c75b # v3.9.1 + uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: .github/pages - keep_files: true + keep_files: true \ No newline at end of file diff --git a/.github/workflows/_release.yml b/.github/workflows/_release.yml new file mode 100644 index 0000000000..e55efdb37f --- /dev/null +++ b/.github/workflows/_release.yml @@ -0,0 +1,32 @@ +on: + workflow_call: + +jobs: + artifacts: + runs-on: ubuntu-latest + + steps: + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + merge-multiple: true + + - name: Zip up docs + run: | + set -vxeuo pipefail + if [ -d html ]; then + mv html $GITHUB_REF_NAME + zip -r docs.zip $GITHUB_REF_NAME + rm -rf $GITHUB_REF_NAME + fi + + - name: Create GitHub Release + # We pin to the SHA, not the tag, for security reasons. + # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions + uses: softprops/action-gh-release@9d7c94cfd0a1f3ed45544c887983e9fa900f0564 # v2.0.4 + with: + prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }} + files: "*" + generate_release_notes: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/_test.yml b/.github/workflows/_test.yml new file mode 100644 index 0000000000..f652d4145f --- /dev/null +++ b/.github/workflows/_test.yml @@ -0,0 +1,62 @@ +on: + workflow_call: + inputs: + python-version: + type: string + description: The version of python to install + required: true + runs-on: + type: string + description: The runner to run this job on + required: true + secrets: + CODECOV_TOKEN: + required: true + +env: + # https://github.com/pytest-dev/pytest/issues/2042 + PY_IGNORE_IMPORTMISMATCH: "1" + +jobs: + run: + runs-on: ${{ inputs.runs-on }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + # Need this to get version number from last tag + fetch-depth: 0 + + - if: inputs.python-version == 'dev' + name: Install dev versions of python packages + uses: ./.github/actions/install_requirements + + - if: inputs.python-version == 'dev' + name: Write the requirements as an artifact + run: pip freeze --exclude-editable > /tmp/dev-requirements.txt + + - if: inputs.python-version == 'dev' + name: Upload dev-requirements.txt + uses: actions/upload-artifact@v4 + with: + name: dev-requirements + path: /tmp/dev-requirements.txt + + - if: inputs.python-version != 'dev' + name: Install latest versions of python packages + uses: ./.github/actions/install_requirements + with: + python-version: ${{ inputs.python-version }} + pip-install: ".[dev]" + + - name: Run tests + run: tox -e tests + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v4 + with: + name: ${{ inputs.python-version }}/${{ inputs.runs-on }} + files: cov.xml + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/_tox.yml b/.github/workflows/_tox.yml new file mode 100644 index 0000000000..a13536d3a7 --- /dev/null +++ b/.github/workflows/_tox.yml @@ -0,0 +1,22 @@ +on: + workflow_call: + inputs: + tox: + type: string + description: What to run under tox + required: true + + +jobs: + run: + runs-on: "ubuntu-latest" + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install python packages + uses: ./.github/actions/install_requirements + + - name: Run tox + run: tox -e ${{ inputs.tox }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000000..ffea7ddd96 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,52 @@ +name: CI + +on: + push: + pull_request: + +jobs: + check: + uses: ./.github/workflows/_check.yml + + lint: + needs: check + if: needs.check.outputs.branch-pr == '' + uses: ./.github/workflows/_tox.yml + with: + tox: pre-commit,type-checking + + test: + needs: check + if: needs.check.outputs.branch-pr == '' + strategy: + matrix: + runs-on: ["ubuntu-latest"] # can add windows-latest, macos-latest + python-version: ["3.10", "3.11"] + include: + # Include one that runs in the dev environment + - runs-on: "ubuntu-latest" + python-version: "dev" + fail-fast: false + uses: ./.github/workflows/_test.yml + with: + runs-on: ${{ matrix.runs-on }} + python-version: ${{ matrix.python-version }} + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + + docs: + needs: check + if: needs.check.outputs.branch-pr == '' + uses: ./.github/workflows/_docs.yml + + dist: + needs: check + if: needs.check.outputs.branch-pr == '' + uses: ./.github/workflows/_dist.yml + + release: + if: github.ref_type == 'tag' + needs: [dist, docs] + uses: ./.github/workflows/_release.yml + permissions: + contents: write diff --git a/.github/workflows/code.yml b/.github/workflows/code.yml deleted file mode 100644 index c0e734d973..0000000000 --- a/.github/workflows/code.yml +++ /dev/null @@ -1,243 +0,0 @@ -name: Code CI - -on: - push: - pull_request: -env: - # The target python version, which must match the Dockerfile version - CONTAINER_PYTHON: "3.11" - DIST_WHEEL_PATH: dist-${{ github.sha }} - -jobs: - lint: - # pull requests are a duplicate of a branch push if within the same repo. - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - requirements_file: requirements-dev-3.x.txt - install_options: -e .[dev] - artifact_name: lint - - - name: Lint - run: tox -e pre-commit,mypy - - test: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository - strategy: - fail-fast: false - matrix: - os: ["ubuntu-latest"] # can add windows-latest, macos-latest - python: ["3.10", "3.11"] - install: ["-e .[dev]"] - # Make one version be non-editable to test both paths of version code - include: - - os: "ubuntu-latest" - python: "3.10" - install: ".[dev]" - - runs-on: ${{ matrix.os }} - env: - # https://github.com/pytest-dev/pytest/issues/2042 - PY_IGNORE_IMPORTMISMATCH: "1" - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - # Need this to get version number from last tag - fetch-depth: 0 - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - python_version: ${{ matrix.python }} - requirements_file: requirements-test-${{ matrix.os }}-${{ matrix.python }}.txt - install_options: ${{ matrix.install }} - artifact_name: tests - - - name: List dependency tree - run: pipdeptree - - - name: Run tests - run: tox -e pytest - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3 - with: - name: ${{ matrix.python }}/${{ matrix.os }} - files: cov.xml - - dist: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository - runs-on: "ubuntu-latest" - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - # Need this to get version number from last tag - fetch-depth: 0 - - - name: Build sdist and wheel - run: | - export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && \ - pipx run build - - - name: Upload sdist and wheel as artifacts - uses: actions/upload-artifact@v4.0.0 - with: - name: ${{ env.DIST_WHEEL_PATH }} - path: dist - - - name: Check for packaging errors - run: pipx run twine check --strict dist/* - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - python_version: ${{env.CONTAINER_PYTHON}} - requirements_file: requirements.txt - install_options: dist/*.whl - artifact_name: dist - - - name: Test module --version works using the installed wheel - # If more than one module in src/ replace with module name to test - run: python -m $(ls --hide='*.egg-info' src | head -1) --version - - container: - needs: [lint, dist, test] - runs-on: ubuntu-latest - - permissions: - contents: read - packages: write - - env: - TEST_TAG: "testing" - - steps: - - name: Checkout - uses: actions/checkout@v4 - - # image names must be all lower case - - name: Generate image repo name - run: echo IMAGE_REPOSITORY=ghcr.io/$(tr '[:upper:]' '[:lower:]' <<< "${{ github.repository }}") >> $GITHUB_ENV - - - name: Set lockfile location in environment - run: | - echo "DIST_LOCKFILE_PATH=lockfiles-${{ env.CONTAINER_PYTHON }}-dist-${{ github.sha }}" >> $GITHUB_ENV - - - name: Download wheel and lockfiles - uses: actions/download-artifact@v4.1.0 - with: - path: artifacts/ - pattern: "*dist*" - - - name: Log in to GitHub Docker Registry - if: github.event_name != 'pull_request' - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Set up Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v3 - - - name: Build and export to Docker local cache - uses: docker/build-push-action@v5 - with: - # Note build-args, context, file, and target must all match between this - # step and the later build-push-action, otherwise the second build-push-action - # will attempt to build the image again - build-args: | - PIP_OPTIONS=-r ${{ env.DIST_LOCKFILE_PATH }}/requirements.txt ${{ env.DIST_WHEEL_PATH }}/*.whl - context: artifacts/ - file: ./Dockerfile - target: runtime - load: true - tags: ${{ env.TEST_TAG }} - # If you have a long docker build (2+ minutes), uncomment the - # following to turn on caching. For short build times this - # makes it a little slower - #cache-from: type=gha - #cache-to: type=gha,mode=max - - - name: Test cli works in cached runtime image - run: docker run docker.io/library/${{ env.TEST_TAG }} --version - - - name: Create tags for publishing image - id: meta - uses: docker/metadata-action@v5 - with: - images: ${{ env.IMAGE_REPOSITORY }} - tags: | - type=ref,event=tag - type=raw,value=latest, enable=${{ github.ref_type == 'tag' }} - # type=edge,branch=main - # Add line above to generate image for every commit to given branch, - # and uncomment the end of if clause in next step - - - name: Push cached image to container registry - if: github.ref_type == 'tag' # || github.ref_name == 'main' - uses: docker/build-push-action@v5 - # This does not build the image again, it will find the image in the - # Docker cache and publish it - with: - # Note build-args, context, file, and target must all match between this - # step and the previous build-push-action, otherwise this step will - # attempt to build the image again - build-args: | - PIP_OPTIONS=-r ${{ env.DIST_LOCKFILE_PATH }}/requirements.txt ${{ env.DIST_WHEEL_PATH }}/*.whl - context: artifacts/ - file: ./Dockerfile - target: runtime - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - - release: - # upload to PyPI and make a release on every tag - needs: [lint, dist, test] - if: ${{ github.event_name == 'push' && github.ref_type == 'tag' }} - runs-on: ubuntu-latest - env: - HAS_PYPI_TOKEN: ${{ secrets.PYPI_TOKEN != '' }} - - steps: - - name: Download wheel and lockfiles - uses: actions/download-artifact@v4.1.0 - with: - path: artifacts/ - pattern: "*dist*" - - - name: Fixup blank lockfiles - # Github release artifacts can't be blank - run: for f in ${{ env.DIST_LOCKFILE_PATH }}/*; do [ -s $f ] || echo '# No requirements' >> $f; done - - - name: Github Release - # We pin to the SHA, not the tag, for security reasons. - # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15 - with: - prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }} - files: | - ${{ env.DIST_WHEEL_PATH }}/* - ${{ env.DIST_LOCKFILE_PATH }}/* - generate_release_notes: true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Publish to PyPI - if: ${{ env.HAS_PYPI_TOKEN }} - uses: pypa/gh-action-pypi-publish@release/v1 - with: - password: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/docs_clean.yml b/.github/workflows/docs_clean.yml deleted file mode 100644 index e324640e78..0000000000 --- a/.github/workflows/docs_clean.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: Docs Cleanup CI - -# delete branch documentation when a branch is deleted -# also allow manually deleting a documentation version -on: - delete: - workflow_dispatch: - inputs: - version: - description: "documentation version to DELETE" - required: true - type: string - -jobs: - remove: - if: github.event.ref_type == 'branch' || github.event_name == 'workflow_dispatch' - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - ref: gh-pages - - - name: removing documentation for branch ${{ github.event.ref }} - if: ${{ github.event_name != 'workflow_dispatch' }} - run: echo "REF_NAME=${{ github.event.ref }}" >> $GITHUB_ENV - - - name: manually removing documentation version ${{ github.event.inputs.version }} - if: ${{ github.event_name == 'workflow_dispatch' }} - run: echo "REF_NAME=${{ github.event.inputs.version }}" >> $GITHUB_ENV - - - name: Sanitize ref name for docs version - run: echo "DOCS_VERSION=${REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV - - - name: update index and push changes - run: | - rm -r $DOCS_VERSION - python make_switcher.py --remove $DOCS_VERSION ${{ github.repository }} switcher.json - git config --global user.name 'GitHub Actions Docs Cleanup CI' - git config --global user.email 'GithubActionsCleanup@noreply.github.com' - git commit -am "Removing redundant docs version $DOCS_VERSION" - git push diff --git a/.github/workflows/linkcheck.yml b/.github/workflows/linkcheck.yml deleted file mode 100644 index ed9cf57f3e..0000000000 --- a/.github/workflows/linkcheck.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: Link Check - -on: - workflow_dispatch: - schedule: - # Run weekly to check URL links still resolve - - cron: "0 8 * * WED" - -jobs: - docs: - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - requirements_file: requirements-dev-3.x.txt - install_options: -e .[dev] - python_version: "3.11" - artifact_name: link_check - - - name: Check links - run: tox -e docs build -- -b linkcheck - - - name: Keepalive Workflow - uses: gautamkrishnar/keepalive-workflow@v1 diff --git a/.github/workflows/periodic.yml b/.github/workflows/periodic.yml new file mode 100644 index 0000000000..e2a0fd1b9e --- /dev/null +++ b/.github/workflows/periodic.yml @@ -0,0 +1,13 @@ +name: Periodic + +on: + workflow_dispatch: + schedule: + # Run weekly to check URL links still resolve + - cron: "0 8 * * WED" + +jobs: + linkcheck: + uses: ./.github/workflows/_tox.yml + with: + tox: docs build -- -b linkcheck diff --git a/.gitignore b/.gitignore index 2d6cfd2fe2..6802e489a6 100644 --- a/.gitignore +++ b/.gitignore @@ -8,7 +8,6 @@ __pycache__/ # Distribution / packaging .Python env/ -.venv build/ develop-eggs/ dist/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 100add9b66..5a4cbf7b41 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: check-added-large-files - id: check-yaml @@ -8,16 +8,16 @@ repos: - repo: local hooks: - - id: black - name: Run black - stages: [commit] + - id: ruff + name: lint with ruff language: system - entry: black + entry: ruff check --force-exclude types: [python] + require_serial: true - - id: ruff - name: Run ruff - stages: [commit] + - id: ruff-format + name: format with ruff language: system - entry: ruff check + entry: ruff format --force-exclude types: [python] + require_serial: true diff --git a/.vscode/extensions.json b/.vscode/extensions.json index a1227b3482..66ad6324d0 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -1,10 +1,5 @@ { "recommendations": [ "ms-vscode-remote.remote-containers", - "ms-python.python", - "tamasfe.even-better-toml", - "redhat.vscode-yaml", - "ryanluker.vscode-coverage-gutters", - "charliermarsh.Ruff" ] -} +} \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index 6a06170c13..27c9bb2659 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -6,7 +6,7 @@ "configurations": [ { "name": "Debug Unit Test", - "type": "python", + "type": "debugpy", "request": "launch", "justMyCode": false, "program": "${file}", @@ -16,10 +16,8 @@ "console": "integratedTerminal", "preLaunchTask": "load_dials_env", "env": { - // The default config in pyproject.toml's "[tool.pytest.ini_options]" adds coverage. - // Cannot have coverage and debugging at the same time. - // https://github.com/microsoft/vscode-python/issues/693 - "PYTEST_ADDOPTS": "--no-cov" + // Enable break on exception when debugging tests (see: tests/conftest.py) + "PYTEST_RAISE": "1", }, } ] diff --git a/.vscode/settings.json b/.vscode/settings.json index a0b0329bf0..39d49887e1 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,19 +1,12 @@ { - "python.linting.pylintEnabled": false, - "python.linting.flake8Enabled": false, - "python.linting.mypyEnabled": true, - "python.linting.enabled": true, - "python.testing.pytestArgs": [ - "--cov=dodal", - "--cov-report", - "xml:cov.xml" - ], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, - "python.formatting.provider": "black", - "python.languageServer": "Pylance", "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": "explicit" + }, "[python]": { + "editor.defaultFormatter": "charliermarsh.ruff", "editor.rulers": [ 88 ], diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 4ac17c12eb..1b21c0fb27 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -21,4 +21,4 @@ "type": "shell" } ] -} +} \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index a7cf36f3bb..c4404ecabb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,25 +1,13 @@ -# This file is for use as a devcontainer and a runtime container -# -# The devcontainer should use the build target and run as root with podman +# The devcontainer should use the developer target and run as root with podman # or docker with user namespaces. -# -FROM python:3.11 as build +ARG PYTHON_VERSION=3.11 +FROM python:${PYTHON_VERSION} as developer -ARG PIP_OPTIONS=. +# Add any system dependencies for the developer/build environment here +RUN apt-get update && apt-get install -y --no-install-recommends \ + graphviz \ + && rm -rf /var/lib/apt/lists/* -# Add any system dependencies for the developer/build environment here e.g. -# RUN apt-get update && apt-get upgrade -y && \ -# apt-get install -y --no-install-recommends \ -# desired-packages \ -# && rm -rf /var/lib/apt/lists/* - -# set up a virtual environment and put it in PATH +# Set up a virtual environment and put it in PATH RUN python -m venv /venv ENV PATH=/venv/bin:$PATH - -# Copy any required context for the pip install over -COPY . /context -WORKDIR /context - -# install python package into /venv -RUN pip install ${PIP_OPTIONS} diff --git a/README.md b/README.md new file mode 100644 index 0000000000..6bb0d1dc8a --- /dev/null +++ b/README.md @@ -0,0 +1,39 @@ +[![CI](https://github.com/DiamondLightSource/dodal/actions/workflows/ci.yml/badge.svg)](https://github.com/DiamondLightSource/dodal/actions/workflows/ci.yml) +[![Coverage](https://codecov.io/gh/DiamondLightSource/dodal/branch/main/graph/badge.svg)](https://codecov.io/gh/DiamondLightSource/dodal) + +[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) + +# dodal + +Ophyd devices and other utils that could be used across DLS beamlines + +Source | +:---: | :---: +Documentation | +Releases | + +Testing Connectivity +-------------------- + +You can test your connection to a beamline if it's PVs are visible to your machine with: + + +``` + # On any workstation: + dodal connect + + # On a beamline workstation, this should suffice: + dodal connect ${BEAMLINE} +``` + + + +For more options, including a list of valid beamlines, type + +``` + dodal connect --help +``` + + + +See https://diamondlightsource.github.io/dodal for more detailed documentation. diff --git a/README.rst b/README.rst deleted file mode 100644 index 48c5a14b92..0000000000 --- a/README.rst +++ /dev/null @@ -1,62 +0,0 @@ -dodal -============================================================================= - -|code_ci| |docs_ci| |coverage| |pypi_version| |license| - - -Ophyd devices and other utils that could be used across DLS beamlines - -============== ============================================================== -PyPI ``pip install dls-dodal`` -Source code https://github.com/DiamondLightSource/dodal -Documentation https://DiamondLightSource.github.io/dodal -Releases https://github.com/DiamondLightSource/dodal/releases -============== ============================================================== - -Testing Connectivity --------------------- - -You can test your connection to a beamline if it's PVs are visible to your machine with: - -.. code:: shell - - # On any workstation: - dodal connect - - # On a beamline workstation, this should suffice: - dodal connect ${BEAMLINE} - - -For more options, including a list of valid beamlines, type - -.. code:: shell - - dodal connect --help - - -.. |code_ci| image:: https://github.com/DiamondLightSource/dodal/actions/workflows/code.yml/badge.svg?branch=main - :target: https://github.com/DiamondLightSource/dodal/actions/workflows/code.yml - :alt: Code CI - -.. |docs_ci| image:: https://github.com/DiamondLightSource/dodal/actions/workflows/docs.yml/badge.svg?branch=main - :target: https://github.com/DiamondLightSource/dodal/actions/workflows/docs.yml - :alt: Docs CI - -.. |coverage| image:: https://codecov.io/gh/DiamondLightSource/dodal/branch/main/graph/badge.svg - :target: https://codecov.io/gh/DiamondLightSource/dodal - :alt: Test Coverage - -.. |pypi_version| image:: https://img.shields.io/pypi/v/dls-dodal.svg - :target: https://pypi.org/project/dls-dodal - :alt: Latest PyPI version - -.. |license| image:: https://img.shields.io/badge/License-Apache%202.0-blue.svg - :target: https://opensource.org/licenses/Apache-2.0 - :alt: Apache License - - -.. - Anything below this line is used when viewing README.rst and will be replaced - when included in index.rst - -See https://DiamondLightSource.github.io/dodal for more detailed documentation. diff --git a/docs/conf.py b/docs/conf.py index f6c463527d..5d29e18465 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -50,8 +50,13 @@ "sphinxcontrib.mermaid", # Signatures from type hinting "sphinx_autodoc_typehints", + # So we can write markdown files + "myst_parser", ] +# So we can use the ::: syntax +myst_enable_extensions = ["colon_fence"] + # If true, Sphinx will warn about all references where the target cannot # be found. nitpicky = True @@ -114,15 +119,6 @@ # A dictionary of graphviz graph attributes for inheritance diagrams. inheritance_graph_attrs = {"rankdir": "TB"} -# Common links that should be available on every page -rst_epilog = """ -.. _Diamond Light Source: http://www.diamond.ac.uk -.. _black: https://github.com/psf/black -.. _ruff: https://beta.ruff.rs/docs/ -.. _mypy: http://mypy-lang.org/ -.. _pre-commit: https://pre-commit.com/ -""" - # Ignore localhost links for periodic check that links in docs are valid linkcheck_ignore = [r"http://localhost:\d+/"] @@ -153,10 +149,10 @@ # Theme options for pydata_sphinx_theme # We don't check switcher because there are 3 possible states for a repo: # 1. New project, docs are not published so there is no switcher -# 2. Existing project with latest skeleton, switcher exists and works -# 3. Existing project with old skeleton that makes broken switcher, +# 2. Existing project with latest copier template, switcher exists and works +# 3. Existing project with old copier template that makes broken switcher, # switcher exists but is broken -# Point 3 makes checking switcher difficult, because the updated skeleton +# Point 3 makes checking switcher difficult, because the updated copier template # will fix the switcher at the end of the docs workflow, but never gets a chance # to complete as the docs build warns and fails. html_theme_options = { @@ -178,19 +174,13 @@ }, "check_switcher": False, "navbar_end": ["theme-switcher", "icon-links", "version-switcher"], - "external_links": [ - { - "name": "Release Notes", - "url": f"https://github.com/{github_user}/{github_repo}/releases", - } - ], "navigation_with_keys": False, } # A dictionary of values to pass into the template engine’s context for all pages html_context = { "github_user": github_user, - "github_repo": project, + "github_repo": github_repo, "github_version": version, "doc_path": "docs", } @@ -203,7 +193,7 @@ # Logo html_logo = "images/dls-logo.svg" -html_favicon = "images/dls-favicon.ico" +html_favicon = html_logo templates_path = ["_templates"] diff --git a/docs/developer/explanations/decisions.rst b/docs/developer/explanations/decisions.rst deleted file mode 100644 index 5841e6ea07..0000000000 --- a/docs/developer/explanations/decisions.rst +++ /dev/null @@ -1,17 +0,0 @@ -.. This Source Code Form is subject to the terms of the Mozilla Public -.. License, v. 2.0. If a copy of the MPL was not distributed with this -.. file, You can obtain one at http://mozilla.org/MPL/2.0/. - -Architectural Decision Records -============================== - -We record major architectural decisions in Architecture Decision Records (ADRs), -as `described by Michael Nygard -`_. -Below is the list of our current ADRs. - -.. toctree:: - :maxdepth: 1 - :glob: - - decisions/* \ No newline at end of file diff --git a/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst b/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst deleted file mode 100644 index b2d3d0fe87..0000000000 --- a/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst +++ /dev/null @@ -1,26 +0,0 @@ -1. Record architecture decisions -================================ - -Date: 2022-02-18 - -Status ------- - -Accepted - -Context -------- - -We need to record the architectural decisions made on this project. - -Decision --------- - -We will use Architecture Decision Records, as `described by Michael Nygard -`_. - -Consequences ------------- - -See Michael Nygard's article, linked above. To create new ADRs we will copy and -paste from existing ones. diff --git a/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst b/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst deleted file mode 100644 index 33d5698143..0000000000 --- a/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst +++ /dev/null @@ -1,35 +0,0 @@ -2. Adopt python_copier_template for project structure -===================================================== - -Date: 2022-02-18 - -Status ------- - -Accepted - -Context -------- - -We should use the following `python_copier_template `_. -The template will ensure consistency in developer -environments and package management. - -Decision --------- - -We have switched to using the skeleton. - -Consequences ------------- - -This module will use a fixed set of tools as developed in python_copier_template -and can pull from this template to update the packaging to the latest techniques. - -As such, the developer environment may have changed, the following could be -different: - -- linting -- formatting -- pip venv setup -- CI/CD diff --git a/docs/developer/how-to/build-docs.rst b/docs/developer/how-to/build-docs.rst deleted file mode 100644 index 11a5e6386d..0000000000 --- a/docs/developer/how-to/build-docs.rst +++ /dev/null @@ -1,38 +0,0 @@ -Build the docs using sphinx -=========================== - -You can build the `sphinx`_ based docs from the project directory by running:: - - $ tox -e docs - -This will build the static docs on the ``docs`` directory, which includes API -docs that pull in docstrings from the code. - -.. seealso:: - - `documentation_standards` - -The docs will be built into the ``build/html`` directory, and can be opened -locally with a web browser:: - - $ firefox build/html/index.html - -Autobuild ---------- - -You can also run an autobuild process, which will watch your ``docs`` -directory for changes and rebuild whenever it sees changes, reloading any -browsers watching the pages:: - - $ tox -e docs autobuild - -You can view the pages at localhost:: - - $ firefox http://localhost:8000 - -If you are making changes to source code too, you can tell it to watch -changes in this directory too:: - - $ tox -e docs autobuild -- --watch src - -.. _sphinx: https://www.sphinx-doc.org/ diff --git a/docs/developer/how-to/contribute.rst b/docs/developer/how-to/contribute.rst deleted file mode 100644 index 65b992f08e..0000000000 --- a/docs/developer/how-to/contribute.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../../../.github/CONTRIBUTING.rst diff --git a/docs/developer/how-to/lint.rst b/docs/developer/how-to/lint.rst deleted file mode 100644 index 829d9ce037..0000000000 --- a/docs/developer/how-to/lint.rst +++ /dev/null @@ -1,39 +0,0 @@ -Run linting using pre-commit -============================ - -Code linting is handled by black_ and ruff_ run under pre-commit_. - -Running pre-commit ------------------- - -You can run the above checks on all files with this command:: - - $ tox -e pre-commit - -Or you can install a pre-commit hook that will run each time you do a ``git -commit`` on just the files that have changed:: - - $ pre-commit install - -It is also possible to `automatically enable pre-commit on cloned repositories `_. -This will result in pre-commits being enabled on every repo your user clones from now on. - -Fixing issues -------------- - -If black reports an issue you can tell it to reformat all the files in the -repository:: - - $ black . - -Likewise with ruff:: - - $ ruff check --fix . - -Ruff may not be able to automatically fix all issues; in this case, you will have to fix those manually. - -VSCode support --------------- - -The ``.vscode/settings.json`` will run black formatting as well as -ruff checking on save. Issues will be highlighted in the editor window. diff --git a/docs/developer/how-to/make-release.rst b/docs/developer/how-to/make-release.rst deleted file mode 100644 index 83e1386b4e..0000000000 --- a/docs/developer/how-to/make-release.rst +++ /dev/null @@ -1,31 +0,0 @@ -Make a release -============== - -Releases are created through the Github release interface. - -To make a new release, please follow this checklist: - -- Choose a new PEP440 compliant release number (see https://peps.python.org/pep-0440/) (The release version should - look like ``{major}.{minor}.{patch}``). See `Deciding release numbers`_ if you're unsure on what the release version - should be. -- Go to the GitHub release_ page -- Choose ``Draft New Release`` -- Click ``Choose Tag`` and supply the new tag you chose (click create new tag) -- Click ``Generate release notes``, review and edit these notes. Confirm they do not omit anything important and make sense (to a user, not just a developer). -- Choose a title and click ``Publish Release``. This will create a release on ``pypi`` automatically and post to the - ``bluesky`` slack channel. -- Manually confirm that the ``pypi`` version has been updated (after all tests have run) and that slack is notified. - -Note that tagging and pushing to the main branch has the same effect except that -you will not get the option to edit the release notes. - -.. _release: https://github.com/DiamondLightSource/dodal/releases - -Deciding release numbers ------------------------- - -Releases should obviously be versioned higher than the previous latest release. Otherwise you should follow this guide: - -* **Major** - Changes that have the potential to break plans -* **Minor** - New features -* **Patch** - Bug fixes diff --git a/docs/developer/how-to/pin-requirements.rst b/docs/developer/how-to/pin-requirements.rst deleted file mode 100644 index 89639623a1..0000000000 --- a/docs/developer/how-to/pin-requirements.rst +++ /dev/null @@ -1,74 +0,0 @@ -Pinning Requirements -==================== - -Introduction ------------- - -By design this project only defines dependencies in one place, i.e. in -the ``requires`` table in ``pyproject.toml``. - -In the ``requires`` table it is possible to pin versions of some dependencies -as needed. For library projects it is best to leave pinning to a minimum so -that your library can be used by the widest range of applications. - -When CI builds the project it will use the latest compatible set of -dependencies available (after applying your pins and any dependencies' pins). - -This approach means that there is a possibility that a future build may -break because an updated release of a dependency has made a breaking change. - -The correct way to fix such an issue is to work out the minimum pinning in -``requires`` that will resolve the problem. However this can be quite hard to -do and may be time consuming when simply trying to release a minor update. - -For this reason we provide a mechanism for locking all dependencies to -the same version as a previous successful release. This is a quick fix that -should guarantee a successful CI build. - -Finding the lock files ----------------------- - -Every release of the project will have a set of requirements files published -as release assets. - -For example take a look at the release page for python3-pip-skeleton-cli here: -https://github.com/DiamondLightSource/python3-pip-skeleton-cli/releases/tag/3.3.0 - -There is a list of requirements*.txt files showing as assets on the release. - -There is one file for each time the CI installed the project into a virtual -environment. There are multiple of these as the CI creates a number of -different environments. - -The files are created using ``pip freeze`` and will contain a full list -of the dependencies and sub-dependencies with pinned versions. - -You can download any of these files by clicking on them. It is best to use -the one that ran with the lowest Python version as this is more likely to -be compatible with all the versions of Python in the test matrix. -i.e. ``requirements-test-ubuntu-latest-3.8.txt`` in this example. - -Applying the lock file ----------------------- - -To apply a lockfile: - -- copy the requirements file you have downloaded to the root of your - repository -- rename it to requirements.txt -- commit it into the repo -- push the changes - -The CI looks for a requirements.txt in the root and will pass it to pip -when installing each of the test environments. pip will then install exactly -the same set of packages as the previous release. - -Removing dependency locking from CI ------------------------------------ - -Once the reasons for locking the build have been resolved it is a good idea -to go back to an unlocked build. This is because you get an early indication -of any incoming problems. - -To restore unlocked builds in CI simply remove requirements.txt from the root -of the repo and push. diff --git a/docs/developer/how-to/run-tests.rst b/docs/developer/how-to/run-tests.rst deleted file mode 100644 index d2e03644c2..0000000000 --- a/docs/developer/how-to/run-tests.rst +++ /dev/null @@ -1,12 +0,0 @@ -Run the tests using pytest -========================== - -Testing is done with pytest_. It will find functions in the project that `look -like tests`_, and run them to check for errors. You can run it with:: - - $ tox -e pytest - -It will also report coverage to the commandline and to ``cov.xml``. - -.. _pytest: https://pytest.org/ -.. _look like tests: https://docs.pytest.org/explanation/goodpractices.html#test-discovery diff --git a/docs/developer/how-to/static-analysis.rst b/docs/developer/how-to/static-analysis.rst deleted file mode 100644 index 065920e1c6..0000000000 --- a/docs/developer/how-to/static-analysis.rst +++ /dev/null @@ -1,8 +0,0 @@ -Run static analysis using mypy -============================== - -Static type analysis is done with mypy_. It checks type definition in source -files without running them, and highlights potential issues where types do not -match. You can run it with:: - - $ tox -e mypy diff --git a/docs/developer/how-to/test-container.rst b/docs/developer/how-to/test-container.rst deleted file mode 100644 index a4a43a6ffc..0000000000 --- a/docs/developer/how-to/test-container.rst +++ /dev/null @@ -1,25 +0,0 @@ -Container Local Build and Test -============================== - -CI builds a runtime container for the project. The local tests -checks available via ``tox -p`` do not verify this because not -all developers will have docker installed locally. - -If CI is failing to build the container, then it is best to fix and -test the problem locally. This would require that you have docker -or podman installed on your local workstation. - -In the following examples the command ``docker`` is interchangeable with -``podman`` depending on which container cli you have installed. - -To build the container and call it ``test``:: - - cd - docker build -t test . - -To verify that the container runs:: - - docker run -it test --help - -You can pass any other command line parameters to your application -instead of --help. diff --git a/docs/developer/how-to/update-tools.rst b/docs/developer/how-to/update-tools.rst deleted file mode 100644 index c1075ee8c1..0000000000 --- a/docs/developer/how-to/update-tools.rst +++ /dev/null @@ -1,16 +0,0 @@ -Update the tools -================ - -This module is merged with the python3-pip-skeleton_. This is a generic -Python project structure which provides a means to keep tools and -techniques in sync between multiple Python projects. To update to the -latest version of the skeleton, run:: - - $ git pull --rebase=false https://github.com/DiamondLightSource/python3-pip-skeleton - -Any merge conflicts will indicate an area where something has changed that -conflicts with the setup of the current module. Check the `closed pull requests -`_ -of the skeleton module for more details. - -.. _python3-pip-skeleton: https://DiamondLightSource.github.io/python3-pip-skeleton diff --git a/docs/developer/index.rst b/docs/developer/index.rst deleted file mode 100644 index ee3892e5e7..0000000000 --- a/docs/developer/index.rst +++ /dev/null @@ -1,69 +0,0 @@ -Developer Guide -=============== - -Documentation is split into four categories, also accessible from links in the -side-bar. - -.. grid:: 2 - :gutter: 4 - - .. grid-item-card:: :material-regular:`directions_run;3em` - - .. toctree:: - :caption: Tutorials - :maxdepth: 1 - - tutorials/dev-install - - +++ - - Tutorials for getting up and running as a developer. - - .. grid-item-card:: :material-regular:`task;3em` - - .. toctree:: - :caption: How-to Guides - :maxdepth: 1 - - how-to/contribute - how-to/move-code - how-to/build-docs - how-to/run-tests - how-to/make-new-ophyd-async-device - how-to/static-analysis - how-to/lint - how-to/update-tools - how-to/make-release - how-to/pin-requirements - how-to/test-container - how-to/create-beamline - how-to/zocalo - - +++ - - Practical step-by-step guides for day-to-day dev tasks. - - .. grid-item-card:: :material-regular:`apartment;3em` - - .. toctree:: - :caption: Explanations - :maxdepth: 1 - - explanations/decisions - - +++ - - Explanations of how and why the architecture is why it is. - - .. grid-item-card:: :material-regular:`description;3em` - - .. toctree:: - :caption: Reference - :maxdepth: 1 - - reference/standards - reference/device-standards - - +++ - - Technical reference material on standards in use. diff --git a/docs/developer/tutorials/dev-install.rst b/docs/developer/tutorials/dev-install.rst deleted file mode 100644 index b2bdf303b8..0000000000 --- a/docs/developer/tutorials/dev-install.rst +++ /dev/null @@ -1,68 +0,0 @@ -Developer install -================= - -These instructions will take you through the minimal steps required to get a dev -environment setup, so you can run the tests locally. - -Clone the repository --------------------- - -First clone the repository locally using `Git -`_:: - - $ git clone git://github.com/DiamondLightSource/dodal.git - -Install dependencies --------------------- - -You can choose to either develop on the host machine using a `venv` (which -requires python 3.8 or later) or to run in a container under `VSCode -`_ - -.. tab-set:: - - .. tab-item:: Local virtualenv - - .. code:: - - $ cd dodal - $ python3 -m venv venv - $ source venv/bin/activate - $ pip install -e '.[dev]' - - .. tab-item:: VSCode devcontainer - - .. code:: - - $ code dodal - # Click on 'Reopen in Container' when prompted - # Open a new terminal - - .. note:: - - See the epics-containers_ documentation for more complex - use cases, such as integration with podman. - -See what was installed ----------------------- - -To see a graph of the python package dependency tree type:: - - $ pipdeptree - -Build and test --------------- - -Now you have a development environment you can run the tests in a terminal:: - - $ tox -p - -This will run in parallel the following checks: - -- `../how-to/build-docs` -- `../how-to/run-tests` -- `../how-to/static-analysis` -- `../how-to/lint` - - -.. _epics-containers: https://epics-containers.github.io/main/user/tutorials/devcontainer.html diff --git a/docs/explanations.md b/docs/explanations.md new file mode 100644 index 0000000000..73ab289b60 --- /dev/null +++ b/docs/explanations.md @@ -0,0 +1,10 @@ +# Explanations + +Explanations of how it works and why it works that way. + +```{toctree} +:maxdepth: 1 +:glob: + +explanations/* +``` diff --git a/docs/explanations/decisions.md b/docs/explanations/decisions.md new file mode 100644 index 0000000000..0533b98d45 --- /dev/null +++ b/docs/explanations/decisions.md @@ -0,0 +1,12 @@ +# Architectural Decision Records + +Architectural decisions are made throughout a project's lifetime. As a way of keeping track of these decisions, we record these decisions in Architecture Decision Records (ADRs) listed below. + +```{toctree} +:glob: true +:maxdepth: 1 + +decisions/* +``` + +For more information on ADRs see this [blog by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions). diff --git a/docs/explanations/decisions/0001-record-architecture-decisions.md b/docs/explanations/decisions/0001-record-architecture-decisions.md new file mode 100644 index 0000000000..44d234efce --- /dev/null +++ b/docs/explanations/decisions/0001-record-architecture-decisions.md @@ -0,0 +1,18 @@ +# 1. Record architecture decisions + +## Status + +Accepted + +## Context + +We need to record the architectural decisions made on this project. + +## Decision + +We will use Architecture Decision Records, as [described by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions). + +## Consequences + +See Michael Nygard's article, linked above. To create new ADRs we will copy and +paste from existing ones. diff --git a/docs/explanations/decisions/0002-switched-to-python-copier-template.md b/docs/explanations/decisions/0002-switched-to-python-copier-template.md new file mode 100644 index 0000000000..66fe5d8b24 --- /dev/null +++ b/docs/explanations/decisions/0002-switched-to-python-copier-template.md @@ -0,0 +1,28 @@ +# 2. Adopt python-copier-template for project structure + +## Status + +Accepted + +## Context + +We should use the following [python-copier-template](https://github.com/DiamondLightSource/python-copier-template). +The template will ensure consistency in developer +environments and package management. + +## Decision + +We have switched to using the template. + +## Consequences + +This module will use a fixed set of tools as developed in `python-copier-template` +and can pull from this template to update the packaging to the latest techniques. + +As such, the developer environment may have changed, the following could be +different: + +- linting +- formatting +- pip venv setup +- CI/CD diff --git a/docs/explanations/decisions/COPYME b/docs/explanations/decisions/COPYME new file mode 100644 index 0000000000..b466c79299 --- /dev/null +++ b/docs/explanations/decisions/COPYME @@ -0,0 +1,19 @@ +# 3. Short descriptive title + +Date: Today's date + +## Status + +Accepted + +## Context + +Background to allow us to make the decision, to show how we arrived at our conclusions. + +## Decision + +What decision we made. + +## Consequences + +What we will do as a result of this decision. diff --git a/docs/genindex.md b/docs/genindex.md new file mode 100644 index 0000000000..73f1191b0a --- /dev/null +++ b/docs/genindex.md @@ -0,0 +1,3 @@ +# Index + + diff --git a/docs/genindex.rst b/docs/genindex.rst deleted file mode 100644 index 93eb8b2940..0000000000 --- a/docs/genindex.rst +++ /dev/null @@ -1,5 +0,0 @@ -API Index -========= - -.. - https://stackoverflow.com/a/42310803 diff --git a/docs/how-to.md b/docs/how-to.md new file mode 100644 index 0000000000..6b16141727 --- /dev/null +++ b/docs/how-to.md @@ -0,0 +1,10 @@ +# How-to Guides + +Practical step-by-step guides for the more experienced user. + +```{toctree} +:maxdepth: 1 +:glob: + +how-to/* +``` diff --git a/docs/how-to/build-docs.md b/docs/how-to/build-docs.md new file mode 100644 index 0000000000..f557337f4c --- /dev/null +++ b/docs/how-to/build-docs.md @@ -0,0 +1,39 @@ +# Build the docs using sphinx + +You can build the [sphinx](https://www.sphinx-doc.org) based docs from the project directory by running: + +``` +$ tox -e docs +``` + +This will build the static docs on the `docs` directory, which includes API docs that pull in docstrings from the code. + +:::{seealso} +[](documentation_standards) +::: + +The docs will be built into the `build/html` directory, and can be opened locally with a web browse: + +``` +$ firefox build/html/index.html +``` + +## Autobuild + +You can also run an autobuild process, which will watch your `docs` directory for changes and rebuild whenever it sees changes, reloading any browsers watching the pages: + +``` +$ tox -e docs autobuild +``` + +You can view the pages at localhost: + +``` +$ firefox http://localhost:8000 +``` + +If you are making changes to source code too, you can tell it to watch changes in this directory too: + +``` +$ tox -e docs autobuild -- --watch src +``` diff --git a/docs/how-to/contribute.md b/docs/how-to/contribute.md new file mode 100644 index 0000000000..6e41979708 --- /dev/null +++ b/docs/how-to/contribute.md @@ -0,0 +1,2 @@ +```{include} ../../.github/CONTRIBUTING.md +``` diff --git a/docs/how-to/coverage.md b/docs/how-to/coverage.md new file mode 100644 index 0000000000..161ffc2c76 --- /dev/null +++ b/docs/how-to/coverage.md @@ -0,0 +1,8 @@ + +# How to check code coverage + +Code coverage is reported to the command line and to a `cov.xml` file by the command `tox -e tests`. The file is uploaded to the Codecov service in CI. + +## Adding a Codecov Token + +If the repo is not hosted in DiamondLightSource, then you need to visit `https://app.codecov.io/account/gh//org-upload-token` to generate a token for your org, and store it as a secret named `CODECOV_TOKEN` in `https://github.com/organizations//settings/secrets/actions` diff --git a/docs/developer/how-to/create-beamline.rst b/docs/how-to/create-beamline.rst similarity index 100% rename from docs/developer/how-to/create-beamline.rst rename to docs/how-to/create-beamline.rst diff --git a/docs/how-to/dev-install.md b/docs/how-to/dev-install.md new file mode 100644 index 0000000000..3899b55998 --- /dev/null +++ b/docs/how-to/dev-install.md @@ -0,0 +1,56 @@ +# Setup Developer Environment + +These instructions will take you through the minimal steps required to get a dev environment setup, so you can run the tests locally. + +## Clone the repository + +First clone the repository locally using [Git](https://git-scm.com/downloads). There is a link on the GitHub interface to allow you to do this. SSH is recommended if you have setup a key. Enter the directory that it is cloned into to continue. + +## Install dependencies + +You can choose to either develop on the host machine using a `venv` (which requires python 3.10 or later) or to run +in a container under [VSCode](https://code.visualstudio.com/) + + +::::{tab-set} + +:::{tab-item} Local virtualenv +``` +python3 -m venv venv +source venv/bin/activate +pip install -e '.[dev]' +``` + +## See what was installed + +To see a graph of the python package dependency tree type: + + $ pipdeptree + +::: + +:::{tab-item} VSCode devcontainer +If you are at DLS, then first [setup podman and its fix for devcontainer features](https://dev-portal.diamond.ac.uk/guide/containers/tutorials/podman/#enable-use-of-vscode-features) + +``` +code . +# Click on 'Reopen in Container' when prompted +# Open a new terminal +::: + +:::: + +## Build and test + +Now you have a development environment you can run the tests in a terminal: + +``` +tox -p +``` + +This will run in parallel the following checks: + +- [](./build-docs) +- [](./run-tests) +- [](./static-analysis) +- [](./lint) diff --git a/docs/how-to/excalidraw.md b/docs/how-to/excalidraw.md new file mode 100644 index 0000000000..294de96052 --- /dev/null +++ b/docs/how-to/excalidraw.md @@ -0,0 +1,19 @@ +# How to embed Excalidraw diagrams + +Start off by creating your diagram in + +```{raw} html +:file: ../images/excalidraw-example.svg +``` + +Click 'Save as image' and make sure the 'Embed scene' checkbox is enabled. this is required for loading your image back into Excalidraw should you wish to make changes later on. Name your file and export to SVG, saving it inside `docs/images`. + +Add the following to embed it inside your documentation: + + ```{raw} html + :file: ../images/my-diagram.excalidraw.svg + ``` + +It is preferred to use the above convention over the image directive in order to retain the font used by Excalidraw. + +Rebuild the docs and open the resulting html inside a browser. diff --git a/docs/how-to/lint.md b/docs/how-to/lint.md new file mode 100644 index 0000000000..3a706436e9 --- /dev/null +++ b/docs/how-to/lint.md @@ -0,0 +1,34 @@ +# Run linting using pre-commit + +Code linting is handled by [ruff](https://docs.astral.sh/ruff) run under [pre-commit](https://pre-commit.com/). + +## Running pre-commit + +You can run the above checks on all files with this command: + +``` +$ tox -e pre-commit +``` + +Or you can install a pre-commit hook that will run each time you do a `git commit` on just the files that have changed: + +``` +$ pre-commit install +``` + +It is also possible to [automatically enable pre-commit on cloned repositories](https://pre-commit.com/#automatically-enabling-pre-commit-on-repositories). This will result in pre-commits being enabled on every repo your user clones from now on. + +## Fixing issues + +The typical workflow is: + +- Make a code change +- `git add` it +- Try to commit +- Pre-commit will run, and ruff will try and fix any issues it finds +- If anything changes it will be left in your working copy +- Review and commit the results + +## VSCode support + +The `.vscode/settings.json` will run ruff formatters on save, but will not try to auto-fix as that does things like removing unused imports which is too intrusive while editing. diff --git a/docs/how-to/lock-requirements.md b/docs/how-to/lock-requirements.md new file mode 100644 index 0000000000..b81ceeed5e --- /dev/null +++ b/docs/how-to/lock-requirements.md @@ -0,0 +1,39 @@ +# Lock requirements + +## Introduction + +By design this project only defines dependencies in one place, i.e. in the `requires` table in `pyproject.toml`. + +In the `requires` table it is possible to pin versions of some dependencies as needed. For library projects it is best to leave pinning to a minimum so that your library can be used by the widest range of applications. + +When CI builds the project it will use the latest compatible set of dependencies available (after applying your pins and any dependencies' pins). + +This approach means that there is a possibility that a future build may break because an updated release of a dependency has made a breaking change. + +The correct way to fix such an issue is to work out the minimum pinning in `requires` that will resolve the problem. However this can be quite hard to do and may be time consuming when simply trying to release a minor update. + +For this reason we provide a mechanism for locking all dependencies to the same version as a previous successful release. This is a quick fix that should guarantee a successful CI build. + +## Finding the lock files + +Every release of the project will have a set of requirements files published as release assets. + +For example take a look at the release page for python-copier-template [here](https://github.com/DiamondLightSource/python-copier-template/releases/tag/1.1.0) + +There is a single `dev-requirements.txt` file showing as an asset on the release. This has been created using `pip freeze --exclude-editable` on a successful test run using the same version of python as the devcontainer, and will contain a full list of the dependencies and sub-dependencies with pinned versions. You can download this file by clicking on it. + +## Applying the lock file + +To apply a lockfile: + +- copy the requirements file you have downloaded to the root of your repository +- commit it into the repo +- push the changes + +The CI looks for a `dev-requirements.txt` in the root and will pass it to pip as a constraint when installing the dev environment. If a package is required to be installed by `pyproject.toml` then `pip` will use the version specified in `dev-requirements.txt`. + +## Removing dependency locking from CI + +Once the reasons for locking the build have been resolved it is a good idea to go back to an unlocked build. This is because you get an early indication of any incoming problems. + +To restore unlocked builds in CI simply remove `dev-requirements.txt` from the root of the repo and push. diff --git a/docs/developer/how-to/make-new-ophyd-async-device.rst b/docs/how-to/make-new-ophyd-async-device.rst similarity index 100% rename from docs/developer/how-to/make-new-ophyd-async-device.rst rename to docs/how-to/make-new-ophyd-async-device.rst diff --git a/docs/how-to/make-release.md b/docs/how-to/make-release.md new file mode 100644 index 0000000000..caa42a4bca --- /dev/null +++ b/docs/how-to/make-release.md @@ -0,0 +1,32 @@ +# Make a release + +Releases are created through the Github release interface. + +To make a new release, please follow this checklist: + +- Ensure that you have previously followed [](./pypi) +- Choose a new PEP440 compliant release number (see ) (The release version should + look like `{major}.{minor}.{patch}`). See [Deciding release numbers](#Deciding release numbers) if you're unsure on + what the release version should be. +- Go to the GitHub [release] page +- Choose `Draft New Release` +- Click `Choose Tag` and supply the new tag you chose (click create new tag) +- Click `Generate release notes`, review and edit these notes. Confirm they do not omit anything important and make sense (to a user, not just a developer). +- Choose a title and click `Publish Release`. This will create a release on `pypi` automatically and post to the + `bluesky` slack channel. +- Manually confirm that the `pypi` version has been updated (after all tests have run) and that slack is notified. + +Note that tagging and pushing to the main branch has the same effect except that +you will not get the option to edit the release notes. + +A new release will be made and the wheel and sdist uploaded to PyPI. + +[release]: https://github.com/DiamondLightSource/python-copier-template/releases + +## Deciding release numbers + +Releases should obviously be versioned higher than the previous latest release. Otherwise you should follow this guide: + +* **Major** - Changes that have the potential to break plans +* **Minor** - New features +* **Patch** - Bug fixes diff --git a/docs/developer/how-to/move-code.rst b/docs/how-to/move-code.rst similarity index 100% rename from docs/developer/how-to/move-code.rst rename to docs/how-to/move-code.rst diff --git a/docs/how-to/pypi.md b/docs/how-to/pypi.md new file mode 100644 index 0000000000..5cc9eaaf73 --- /dev/null +++ b/docs/how-to/pypi.md @@ -0,0 +1,24 @@ +# Setting up PyPI publishing + +To publish your package on PyPI requires a PyPI account and for PyPI to be setup for [Trusted Publisher](https://docs.pypi.org/trusted-publishers/). + +## Gather the information + +You will need the following information: + +- Owner: The GitHub org that the repo is contained in, e.g. `DiamondLightSource` +- Repository name: The GitHub repository name, e.g. `python-copier-template-example` +- PyPI Project Name: The distribution name on PyPI, e.g. `dls-python-copier-template-example` +- Workflow name: The workflow that does publishing, `_pypi.yml` for `python-copier-template` projects +- Environment name: The GitHub environment that publishing is done with, `release` for `python-copier-template` projects + +## If publishing to the DiamondLightSource PyPI organisation + +If you are publishing to the DiamondLightSource PyPI organisation then use the above information and follow the [Developer Portal Guide on PyPI publishing](https://dev-portal.diamond.ac.uk/guide/python/how-tos/pypi/). + +## If publishing the PyPI project to another organisation + +If you are publishing to a different PyPI organisation then use the above information in one of the following guides: + +- [Creating a PyPI project with a trusted publisher](https://docs.pypi.org/trusted-publishers/creating-a-project-through-oidc/) +- [Adding a trusted publisher to an existing PyPI project](https://docs.pypi.org/trusted-publishers/adding-a-publisher/) diff --git a/docs/how-to/run-tests.md b/docs/how-to/run-tests.md new file mode 100644 index 0000000000..60d8f0bc5e --- /dev/null +++ b/docs/how-to/run-tests.md @@ -0,0 +1,20 @@ +(using-pytest)= + +# Run the tests using pytest + +Testing is done with [pytest]. It will find functions in the project that [look like tests][look like tests], and run them to check for errors. You can run it with: + +``` +$ pytest +``` + +When you have some fully working tests then you can run it with coverage: + +``` +$ tox -e tests +``` + +It will also report coverage to the commandline and to `cov.xml`. + +[look like tests]: https://docs.pytest.org/explanation/goodpractices.html#test-discovery +[pytest]: https://pytest.org/ diff --git a/docs/how-to/static-analysis.md b/docs/how-to/static-analysis.md new file mode 100644 index 0000000000..3ece75002f --- /dev/null +++ b/docs/how-to/static-analysis.md @@ -0,0 +1,7 @@ +# Run static analysis using pyright or mypy + +Static type analysis is done with [pyright](https://microsoft.github.io/pyright) or [mypy](https://www.mypy-lang.org) dependent on the settings in `pyproject.toml`. It checks type definition in source files without running them, and highlights potential issues where types do not match. You can run it with: + +``` +$ tox -e type-checking +``` diff --git a/docs/how-to/update-template.md b/docs/how-to/update-template.md new file mode 100644 index 0000000000..a6a2135672 --- /dev/null +++ b/docs/how-to/update-template.md @@ -0,0 +1,9 @@ +# How to update to the latest template structure + +To track changes to the upstream template, run + +``` +copier update +``` + +This will fetch the latest tagged release of the template, and apply any changes to your working copy. It will prompt for answers again, giving your previous answers as the defaults. diff --git a/docs/developer/how-to/zocalo.rst b/docs/how-to/zocalo.rst similarity index 100% rename from docs/developer/how-to/zocalo.rst rename to docs/how-to/zocalo.rst diff --git a/docs/images/dls-favicon.ico b/docs/images/dls-favicon.ico deleted file mode 100644 index 9a11f508ef8aed28f14c5ce0d8408e1ec8b614a1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 99678 zcmeI537lO;m4{y-5^&fxAd7U62#m-odom^>E-11%hzcU;%qW8>qNAV!X%rBR1O<_G zlo>@u83#o{W)z#S%OZm8BQq&!G^+HP(&n3&@W+)-9$hLOTPl^tjT;RAocFi!Zm+$n;Ww8`pBh^#O`bd$ z-t~}DY10X%Qg3fHyy2+Q{%4m;y8?rxKpcFJm&pY|u-6wCRW5(cjPg@`tAm&CdMS9B z-%o#TQRNE0?HvbX^O@z1HkeVq^0H->N|}gPE~^Af__3Vl@}-qP@2*;2sSxMtEoPQq zYs1-$wB*q@dPX_;yP4(Sk(Y_=xW{?7G2ax2xYNn62IKezl`B5Buo8T2aYcCq3)VS_ z2|mxetNC`;i~d2h<| z1L0&p|I2sR_3;k8>*A623f?_wr#*T>B~WUWL3O6z&+%LSv3#@RlJ;qyHRj!$W|xB( zN%WHym4NyQ9$Hfg9(}nIY|8IzDf?2s?L21)2hy%J={F+IpH>IKr=B0mmvt^~WxsY|c^bETWshNJpW zo$@@vv!?nyiT?vrUORpeluB!QN~QiWrBdJegHP`$_({ZLzALWMD6RO+IG)Ko;$Mxr zZTricy>@2#IB>ms%#88_@SR08{a5sSWpQPZ-fcLue2wC4*IyQkE5reRJkK>V)&{E% z92jcH7t#KVy8@nOXuCIU{mHcfy&?D^&(3*~*uKBK5q)ne?R>4thi)5uo^}hZ1Mv;x z{>%rxJDI*_y$&v2R#^*-Y1_{p;)z-Cfk*5Fyhl_f>NJ@C(okN?Q~cX?FFL&S{xv}W zEy8*M*5Bamnd$?A*(yZ;*}=7!GXGstcPv-!+svtxk;n?+nIj;uKAVVKj4>H-SrGs?lGN^-$l0Z(cPHo;nGh{BdY^4mkch_3#He)3d}>zw>nrufYt`-Uf^x z0&5B|PXf01zW6tJ{!nG#y1%>$ZElsJPn55|eJW#CR`+Fi1pKhZlcHdf=jyHClkkUQ zqrSWEz7GCb-8AGnH+@u?ypIFV$T8NAe+YH9E_?Q&d~`VN--Z$Oo4l`~ZtsoyX5P_P zf_YX)5G(v8{mX6>bd}&2yt8G*7f2(%W#B~l|GM@^IHb8--!6QO3C11uTy*|QW9Sjp7Rc)X`oQHj?0=(Pqw3p^ zqu;wTwitIH@~r#a4T~OU)1K`2+ihDPm^AQF*-*m)ZOP**fh8%qAo4#;w8A1NQUC9Xpx)qI~4V-LvBGFZ5~6 zN8Eg(!oXaJejuDzN9Ak3Q$0{mskHb2d@pVuZsVXjPb;^bzkY8;d#JX_*nY9s+)ALi zyq%ZxdoBI!+wiIlUHDnU>YL&Z)ZZ{3#k){OaPrh#XC-N_BJKFB`J}}g3!fCP2JYq5 z=e;}&c-B-O{nooHh;uA)H%WtMzK1-#e@qbcjtVNJ(v)?j(xf$|QqR&-X|sM8#lYW9pmxw^n**Nr$3;l zcor0v@`QQ}{AF*QQ=Y-MKN9Cs;-1hmyS)8uDOB3zz-dcl%G0)-Rlc8gRntMK%}F2P zy7xM=meNp;2k%`Ie1W*HYgIAGYa5>L@vP)Q=NT{`t{k5!LhU6{s`YXJ3w<5~0 z`Kz;>I6s;&zf&peU<4Z8;5#mNRE)L1bNr^ ziwi#~Ou7djVE({*;?^1;lH$gF(|UQMPP*hc_$luzto?4!`1j$Ic#-h;g*Quw+^F*z z!(2SU{RHN87rF1#!WvVggD%R6w@A00maqFA+%Kga{oZ|_7QP-H5#@e|F!5E|gXS}? z({hLO#P<4z9p_fk!UMg^fX%>djLD%rN*d1QdsLej5BjV%Kb&gW02myvw&q_aF~5}T z<~rZL0PZt*78%^q{HQknEbVAN%YH#HPLAl;XFB~9S*vbMNoDcv3*f$j=cP2f^*yT1 zt1TcC4x_o&JzS?cck@B64}Qd$Xgi<20Pba;)h^tqu-)cOdlCPSikn4$VyAQ4Q`Wvv z#Xq(E*lk|zMRLELzxx~AlwGCa?>%WRZah2ewx=w80sNQqB=%ps&BwJD8xQ@4uMM+t zU_Cw&f2FhAQYAAGP@? z{t_48e*af%y;}0B{VmIH^razx(mGIF*`f0#jKOv5j0U#WI@Mn6`J4Hc#aF(@-N)Q1 zOBy$hX;0E~xZe~;2K^W^&{k3M+hLBrH7b45JKL`4*VIE&+_Y~oxKz-ih4V1l(OqdU ze7|e`%Q)K(&lgTyd~m+s$p6emPKk?`_q}uo#`Q+nG3147(t-2o27lR5(uV4EoF-mg zU;1d{Bv0gp6O|5JSJ8Ir)(q&&v3w{BM%uec=%tL4{wOWJ&v(hqrtXc8zFPfwnGc+# zxLVUN?tql>Ith;Z4IEdZBiz>DZTqyTFS_ybhS8yhHtZ^cw9MSBOgT-tse-T`YW31rt*8EKy_tFp4YY`A z>N%|V!Tn^D0ny9TY&$Koh;?t7Q{En35Jwcz#9P3rKS;a_0`QfIIX9I*C#A`-3U#GtG{o?b2|G@o|K(!L|MYJQI^=fDLW+S619$izU~?F_!3WB`KnEW zYPr9TFT2E=(>@gR2QEDW>EGg<_Ha1#5A|#jYdgz;aRE=;>VdWM_0R!+8vB6fz5=FGTAv(v!xyZ!W1U0*6zNTUdefw$8COJRUxEhoRLC=mF!L_F<_% zFusO;LUt^1PJ2{MJlW+)KON^3cT9EujI41ldsZ{eAsekF`0_`Q7wTj@tu-alNoCNU z#w=^IGoiPhB&WHz%PZhF(!ZS4X!+vObDqF@*osXxGwBhP#GD{TPZzTVC!>bKf#vz=^sqw==jf$NRz``a*2S=}@T&#P=eU;m8_ zKkhfOe~`or8m$|{AL8=2--Gk-_Z?`g4zPz_kGlN14L9w#c!AcXigt@5`g|HL)WMDK zou9uiAcuZCEsv=0K6`(&)>GcK8p?2q+orRG8CO3NRkpNulKW)uS+tAVkC}#x`A%6* z%2H+%hdJ<@C`Y0`Mtz-l?CBWXQ*{RVB-!BG>$64If%MMWJIwhV!Ewk@+DnBj5-V$) z@@s4a*G%%kM;DgYL!P)@bd>yhP_=xrbK$I>KzpYjW1oH#NSwR6w4}@#BH`Y~tH4pV zQpcm?n+WdMfQI$MJnCNdzop8F$QGYWJHIG5qHRj3`cd2AETmIR8;|lqZxfycZ9=mZ z+3F0O*f|r`bWSUfXlEXj@q#GY?>xJ_DSYz9x3W)N`=Dgo^lfYfbT-Pp2(~&$i?ki@ zgrjVH?gwYdV&7q{MY;p&%lBmeDe}p3)(W?D>wt0cG{Z0BeAY~Y-Kd}UF{jnpTRKxq zYf-8noh`;+)1Bmh&3|-;k@N!Er=vZ1+S|JaxFP?i%Ey%B47>cC%}|0rJ|0)@tnaDA zaBgCs|4~$hXs?BI2Re@}D?V}Ym}AfQ#KIw68a8bE#>LI^Ui1B;-DR}nJh;TAc|H0> z(*}@}FN}+q=Y2EeKkf05%#{b9s5F%MyQciKhex8~wwNO!R-+s}Ys%E!H_$ZrZ3bUIjyIW{+BS?{>OIcmd^K&69YRU{o3OF0RkJ z?cGh!94l5naL?PY(+D|d@<;V~o!=PM-t97&-%)$K)RM5Rifl6`oqY8N zSW2DCD;KEzzU@D%&x^muwRanL^E+y-OmdU?p5{mOhdjK1@~i#NNXyUu?)Le#_HL&& zzjd~$>!hDD-?R8p{Xw{7No(Rz_Ib#`nfF-OeLjxA8`w#H#Cm>kJ4(8wG;!awC&?Zk ze0Tw6e~2;gx;WVOd%Ms3ws#wjeqYL5)^*aOxbd=v?f&4y3nc#_|DBbVkKO0qfB*ZKFZbNA6^ffE(S^as(&mA%~f z)Y&oP=ak11FV@ae@_3Rw#=)e_Ppa&4@PPB<;x*&F)(u@J-E=eZii1g+rwx{# zvm5)%`^3d-#(QM0VYV{h(9-hLrVlpd=Y9(Hfx>ivS?WxCh>eptr1jP;>57O$S)XP- z1Z(Ia#~AmSB4B5Qq4gQ#^6X>Inom?b%KC3ZB_I67-iVE%LGHP5R6a@XZektXIISNg z#Vzt1Wn9X>!Oh+BD~vplDhm~bi`MCl)A?CN!A*lh8PAIAAWjQ+N@kwQN zzp>BygQPEHUCiKN`#RUuxc#XM`&-e!ndcnmmM=?~aq=5Q<6__;e+H%oTw87vrwAW@ zKkV%KEM-@mchXo;oPoKYjzkzIhKCVvC*N+Cy4N>?v`c8N1 z$*!nTI8o`r`8Vu6E9AUpY<{#yxA1nLJwgxXyAL9<&M5oOlg?9(qjl2zcgzcI!Nm^> z^e)Raav;X`}MU^iLoFkDF8COrF-gD0vbpDg>Me?P!iBH}Ok!k<=o%6~~ zYwu}wfgH23=8fRuJ$KgrHOT>{JXwA6dXYSP(O+(whF`0`b63*F(xEg{j|A+;$m2Bb zSm>B?yY-8WONskT_J*$KgYUyhy7dh7uBbkNbs;eKMMvyr*YRQ6#aOMeP=>SMnb%RC zJK90HRoXfo*vvo(EUDrOpWtX74 zL$W$?3V2NJ{B({V_ruHw%!NEV6ETOheH!Rh0DJV)@fO|R!kmZnFiF4W&A^4!joSb=;GoowoT z#sl5WuWEl^9=6RL754Yv%vpH5k+$jmtdla}jKK{#gXUcHqTyXgI`<~8(|Evoa3ZaAwvDe# zvt88vI4S%-G0UG-_eG#5UW?uERL(lwxRYqqEL^Z*pTL~C?hYgMqdYV+6`V94Xk5-g z{t$HB-me_|-k=)#(l6+)R-3=T$7Zs&|1J*CZC2H{748YoSH{rJFJvwjsjrdkyzU{* z>(s-qVa?s%ldL&>Bj@(%-dq=+?k38~@57?0Epmoo9qmm!kUoj_`hE7M{9Rj#RdD9q z<^E>$ruUn2#`(IV+nGCgHwWEKtb1+0k8GfR)~J&`zxGLK?%Cf!`!smyo~^j@oA>B7 zALUN^-3ul|%fZcnmtlW@G;5!kZB84J1xy`xs;@Dhb%a#mLx8JF)ASYjZ#-jXZowmHwlOeVu8?h#m zdakftR{OVPHr=m1Qk>Qk;>LWt+;P8IQ}`WcKXE_TmxS{dE%QTypTg{E2Y@EP;n^1ET?h)=^u z#&sIqh0nx+^0wetH?Mc`_YG^^t`WUJRvI-cp5`Aq7s$8VN%65k>ECy5rKL8}Y3+@( z^xptph0@;CfnUv>IYft0q()z*1Xoc z3zh`yp&R{KBl!EI)qMyur051$^qDtF^@L90X7*dP)GqlM^m^zerX=CjjBh$0z0;l6 zA#^uIGs+(a6B+5^sY_d@CqyrKgs)yN4)?6@wLbKUDz^)q?2WRPtB80SYp{Vop%tS5 z>k>Pmn|`qfytBg4I^HkPop+1Vx*_{VTG|G%rC7=O@gB`=14lhq*#JG%Jz43NH=gJ% z9;$VA?x74Gi#a2>liR~Q^w+(de}jEf0KW{FA2+={FeiBQu;t65+DUlirKOL9fG2znk3P-_6XAGmLI5c~&r3g^-`bYA8=xf_-> z(p>uu>^e2SS$FyVpH~(y3t$g_Ao{phOg>3Iyh!6wFp2)Feeh>PU)g4ezRy74h+~31 zYI0;omED8v3%El&(D@lUN9>pc>Vl;DBisk(tE!lGTz%s_o|pT0$K?B6^=;i>+ZK}usKHGew+5dYkr}5 z#(B&)evDi>9r;r85bc3@wQ+Pt~a8i zMjybMLZaQa^qJC2NIxMxh4dBDTink4RCb_2`_}cCqrMI zQJ}q#oLyR*`x_mN>!YuEkK51V!mKGbysj@Dp!AxYwH)d>rSFv9vkx7D^q{Zm0EcL< z{wsTT-G%&9=&Q3+b$0xFsXNwm0_odadisX3)A@ZIz3unhy}2!V-nG8)ed24qQf1P* zh}KF!L0Pp{axLxSPqYu+%OoAsNO985h`x71-|L|71y%aWPDJ;wp<8Xby^z-HS-aiE zrghYB)(_6|p=Gn;YJ5@q&^=w!J9eAXy{7*{yAJtt3+S7L4&04&Q54P1JJxE}qb)w0 z1y(ELXiwM=SRd>br*)84toQoT0G_+x6ARDrjqJN_W!pI@=8oi6 z)m2hHth;}}^mo^%jxS3}+wO1AcZvO9Gw(fVlm^Iw*SU08`1pmD(eQ_XM&UOrz2*|# zG6G;H)v&zYta`+DZ|RZP?YnJ2_8ra2vk17d59$`DdAjzl6;bYHz~DT@!(93!8#e7u zs7A}6{?u)YP_k)jwA{@~kACM;m;T88_cbfOM&N1=xTp)peU~>$|JiCg@T~RB+~ld7 ztaHn`XJ!ld)yrAaw<@0OfW=F@)#>b@?OMDSBnxe1BY5zhW_m7xTV$kC*_Cz za-ehEMvCi1SpXT}Zqc7QF7Z3}U3W=z%=1lSzSglvnv*QRp4pD!1Jv`1ud;6#`;M`! z$CdNYsu^jfjes#fuI+Y`ETA>meK?mBUOS-~bj$;@h%sOLPh|h1b0oEwrcw6@>v)>W z>n{60%c8nL*GaMfXDS?y9C%_LS{0q9(RsecSlO6p{4ls_-SCPA^oD<69nZGCP@j>l zg3nztZgY{X2lMS3jt19u_?+Ky8hXFpcI0j6+2}l9wr|>JYr{0ZS?>sz=9DFOk2$AV zHg!JtNx5yHQ)B_;{)>68GIiB1zmYLtcde)CSZ?&V`_0fw`;e3BLpD1)6FRSk;#Tk$ ze@e=u+27Cu-#|Hj)9ieb;7hlkrw+yMH6~}#t>n=oWxc;%``2~l(VR~zC2k=fLZyyi=%jjuRATn zJq>O?3Tr&@ogcJIFF>1J$r!LOsvOOH=R42$<@YY`;?KVBSnQ5nI9bDa#)Edq0`UG< zcv^avm+zRLMZQm?i_YTmH6hU1Q)zIMzWa^`?N}o~w^42-{e8xKBj4++sHA$%@=fzB z-!r8ex&PP3$!C7hYFR+^ZzccFI_4+obL_hH`K@znvO2Xr*_->oPm1fKFKVSMApXz% zxh4BOvX1$Z@6+@-Np&6fO?&IIx+RDU()Gr{%JZJO&OAS8l`J6n54@T_|I0Gw8-AZf zpOdHleeMC&y$yNt$dV?@co6CZ*jJqeUL$cBj|ZUtU5&s_&l+}Hi^#V72Gs8*af%-|a_7QMy$VHs#d_vJ>OB(Zw(C6gA zSNAVwbvm;+Pach=Ntz!tOBSH-e-8VvgBrm*Ds9x5-)esE;w4!sD+hRYj4g=^vl-#I z`GMA>i=G=%C-1}l^L5O1*A-QksCmBlz0MIUDvvyHkaaSjDHCV+lPBLiY2wC%B4q(+ zUcvq|yhji{;M_cTx@n^3`K^-gU0kBVYKLh~qXc7OTidE|H{*eg@1QJDOh00bUdH{Z z>uV1HbAX$o>dWUH`^xL=_6@&pw~dos2Hne&=CpRJve@a``P-cz%wr*|hWeJ?`Y6o zB2V7FX+DEZSDMo~bG~p}9badHicj5#Jd-DH#{S2CcNw)BuRQrluGaladDf}X`_{&O!vi>9>uq`P=%zFW(^k{mr&uTGrZVNhl`(tR zoe&>dP+1>6Kz|;1-I7M<3Zyyi%^K14XKuUbkolD{rr+B>bAs=73oY~D$zHcWa#NDq zFQ-hE2cLGNVIFa_G<{bT=j;MA%-HD;!rA=>J@yIWOulMiP-#ohyed^8GKuIct* z2A6jDe@ob>r8^0_MV8G|ce3|7;oa~PC$kW|YcExLNQ2z>>nK`By+aqU7kseX4dwF1@rwz2!F9*6FT8GuueE;UzR6Lvj(XQSE6|$o z&D~HoUmTB1*b9EX=bmrhyxSEY-TvKYEGc{41IwD=1ht!X;oPiz51ALw|38~^&v&zM zEefveyrTMf(z~;lj!Yh)`y4mf;{+jN}BgYoCo=(7Vr5kx-O9Sm!PlNxlm%q07IX6DC5j4MVFyf@b-?_3og6* zR^?xGKGO5BC+veUJ>*mW?T&kswHJJ52k!Y!svl_oBHidPn6ce$*{v!Pl+5;Q!U(d%jKk6VGSwZ%6few;k+1x4a2* zNypm`o?`6Qp`9LDpVyoekTGbeCR_J36NvpVNM?Xqx7MCtWf2LmjtXzbk2T5F@zL^8DmH1`vX#m4AM z%on=uOe*C0XTbeyd$ND7C6zUTGdY@b$&eBD)48RrfpzQ|mEbl2j+fEbC%$KXdu*~s za5D&t_Cd}mWqf!W>r3ar7w&|=wrx)m`kI%es{@yB&^`}@=80#kjda?yqkIQ*c0F?A zyXbdkGtS-w-<^xBRrq{TFzMg(C7+U4FY6kI9XL?lq8(*^HP7T4VEuVZc<_O&Kc2uC zd=~Sq%Xw}TzrcSCp79LrWC7vDgcs{K@1EuN<2-ls{@3_dl6DGusuO$q%M&KfE00ai zwL8C>HSl_WU8yw5e$!hjjk3aPRMwuM7kvt^KNME5RH}u6CO65vSUMOUW5Rud;TnL! zU=2Vuc@03AyW;c=0_ZpKs{s2gaRpa#C0K@EI0gDSQHvYF!d*T9v+ z4Eu({VTQd!;jqqzf?`SF7EI`=bC)J@7B4nWxB4nWxBIJhqZFnHqXNN)14fopL zLD&u3pH+bR@RU0ADUcJMWYw-x4hz>6j{>^ky5dpbv~Yhteq(&Yef8Ob9ufIP3F}~rn_UC?g+p`-^>mN>ka{Jd5w?8`J;r+SSt^oRbpB;|i5B>Ic z_(@#>VTf+Hu7Ewm`B`0oT>eM6t^fpWh7|Hs3*nI8S_p>x*g`1e*A_xOf@jtEB!w-6 zrYJm=VVIp&Lt%E-01##u1hou$!sJ64Od1T=N>mM+DzAd8Rbhy&;#4u5Wa3u=)PjQm ZYRRh@^bCDh5vs@!z69bV>$COq{{Z);QUw42 diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000000..730b3fdc1a --- /dev/null +++ b/docs/index.md @@ -0,0 +1,56 @@ +--- +html_theme.sidebar_secondary.remove: true +--- + +```{include} ../README.md +:end-before: + +::::{grid} 2 +:gutter: 4 + +:::{grid-item-card} {material-regular}`directions_walk;2em` +```{toctree} +:maxdepth: 2 +tutorials +``` ++++ +Tutorials for installation and typical usage. New users start here. +::: + +:::{grid-item-card} {material-regular}`directions;2em` +```{toctree} +:maxdepth: 2 +how-to +``` ++++ +Practical step-by-step guides for the more experienced user. +::: + +:::{grid-item-card} {material-regular}`info;2em` +```{toctree} +:maxdepth: 2 +explanations +``` ++++ +Explanations of how it works and why it works that way. +::: + +:::{grid-item-card} {material-regular}`menu_book;2em` +```{toctree} +:maxdepth: 2 +reference +``` ++++ +Technical reference material including APIs and release notes. +::: + +:::: diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index 3cfd8009ee..0000000000 --- a/docs/index.rst +++ /dev/null @@ -1,29 +0,0 @@ -:html_theme.sidebar_secondary.remove: - -.. include:: ../README.rst - :end-before: when included in index.rst - -How the documentation is structured ------------------------------------ - -The documentation is split into 2 sections: - -.. grid:: 2 - - .. grid-item-card:: :material-regular:`person;4em` - :link: user/index - :link-type: doc - - The User Guide contains documentation on how to install and use dodal. - - .. grid-item-card:: :material-regular:`code;4em` - :link: developer/index - :link-type: doc - - The Developer Guide contains documentation on how to develop and contribute changes back to dodal. - -.. toctree:: - :hidden: - - user/index - developer/index diff --git a/docs/reference.md b/docs/reference.md new file mode 100644 index 0000000000..7362151f5c --- /dev/null +++ b/docs/reference.md @@ -0,0 +1,12 @@ +# Reference + +Technical reference material including APIs and release notes. + +```{toctree} +:maxdepth: 1 +:glob: + +reference/* +genindex +Release Notes +``` diff --git a/docs/user/reference/api.rst b/docs/reference/api.md similarity index 60% rename from docs/user/reference/api.rst rename to docs/reference/api.md index eccd886421..7702b870cb 100644 --- a/docs/user/reference/api.rst +++ b/docs/reference/api.md @@ -1,26 +1,17 @@ -API -=== - -.. autosummary:: - :recursive: - :toctree: generated - - dodal.beamlines - dodal.common - dodal.devices - dodal.plans - - -:ref: `modindex` +# API +```{eval-rst} .. automodule:: dodal ``dodal`` ----------------------------------- +``` This is the internal API reference for dodal +```{eval-rst} .. data:: dodal.__version__ :type: str Version number as calculated by https://github.com/pypa/setuptools_scm +``` diff --git a/docs/developer/reference/device-standards.rst b/docs/reference/device-standards.rst similarity index 100% rename from docs/developer/reference/device-standards.rst rename to docs/reference/device-standards.rst diff --git a/docs/developer/reference/standards.rst b/docs/reference/standards.rst similarity index 89% rename from docs/developer/reference/standards.rst rename to docs/reference/standards.rst index c7498b229e..6055ba94c3 100644 --- a/docs/developer/reference/standards.rst +++ b/docs/reference/standards.rst @@ -1,5 +1,5 @@ -General Coding Standards -======================== +Standards +========= This document defines the code and documentation standards used in this repository. @@ -9,9 +9,10 @@ Code Standards The code in this repository conforms to standards set by the following tools: -- black_ for code formatting -- ruff_ for style checks -- mypy_ for static type checking +- ruff_ for code formatting +- flake8_ for style checks +- isort_ for import ordering +- pyright_ for static type checking .. seealso:: @@ -27,7 +28,6 @@ https://numpy.org/neps/nep-0029-deprecation_policy.html. Currently supported versions are: 3.10, 3.11. - Documentation Standards ----------------------- @@ -69,4 +69,4 @@ Docs follow the underlining convention:: .. seealso:: - How-to guide `../how-to/build-docs` + How-to guide `../how-to/build-docs` \ No newline at end of file diff --git a/docs/tutorials.md b/docs/tutorials.md new file mode 100644 index 0000000000..1fe66c541d --- /dev/null +++ b/docs/tutorials.md @@ -0,0 +1,10 @@ +# Tutorials + +Tutorials for installation and typical usage. New users start here. + +```{toctree} +:maxdepth: 1 +:glob: + +tutorials/* +``` diff --git a/docs/user/tutorials/get_started.rst b/docs/tutorials/get_started.rst similarity index 100% rename from docs/user/tutorials/get_started.rst rename to docs/tutorials/get_started.rst diff --git a/docs/tutorials/installation.md b/docs/tutorials/installation.md new file mode 100644 index 0000000000..2b33095142 --- /dev/null +++ b/docs/tutorials/installation.md @@ -0,0 +1,42 @@ +# Installation + +## Check your version of python + +You will need python 3.10 or later. You can check your version of python by +typing into a terminal: + +``` +$ python3 --version +``` + +## Create a virtual environment + +It is recommended that you install into a “virtual environment” so this +installation will not interfere with any existing Python software: + +``` +$ python3 -m venv /path/to/venv +$ source /path/to/venv/bin/activate +``` + +## Installing the library + +You can now use `pip` to install the library and its dependencies: + +``` +$ python3 -m pip install dls-dodal +``` + +If you require a feature that is not currently released you can also install +from github: + +``` +$ python3 -m pip install git+https://github.com/DiamondLightSource/dodal.git +``` + +The library should now be installed and the commandline interface on your path. +You can check the version that has been installed by typing: + +``` +$ dodal --version +``` diff --git a/docs/user/explanations/docs-structure.rst b/docs/user/explanations/docs-structure.rst deleted file mode 100644 index f25a09baad..0000000000 --- a/docs/user/explanations/docs-structure.rst +++ /dev/null @@ -1,18 +0,0 @@ -About the documentation ------------------------ - - :material-regular:`format_quote;2em` - - The Grand Unified Theory of Documentation - - -- David Laing - -There is a secret that needs to be understood in order to write good software -documentation: there isn't one thing called *documentation*, there are four. - -They are: *tutorials*, *how-to guides*, *technical reference* and *explanation*. -They represent four different purposes or functions, and require four different -approaches to their creation. Understanding the implications of this will help -improve most documentation - often immensely. - -`More information on this topic. `_ diff --git a/docs/user/how-to/run-container.rst b/docs/user/how-to/run-container.rst deleted file mode 100644 index 64baca9b3e..0000000000 --- a/docs/user/how-to/run-container.rst +++ /dev/null @@ -1,15 +0,0 @@ -Run in a container -================== - -Pre-built containers with dodal and its dependencies already -installed are available on `Github Container Registry -`_. - -Starting the container ----------------------- - -To pull the container from github container registry and run:: - - $ docker run ghcr.io/DiamondLightSource/dodal:main --version - -To get a released version, use a numbered release instead of ``main``. diff --git a/docs/user/index.rst b/docs/user/index.rst deleted file mode 100644 index 4619b7114b..0000000000 --- a/docs/user/index.rst +++ /dev/null @@ -1,58 +0,0 @@ -User Guide -========== - -Documentation is split into four categories, also accessible from links in the -side-bar. - -.. grid:: 2 - :gutter: 4 - - .. grid-item-card:: :material-regular:`directions_walk;3em` - - .. toctree:: - :caption: Tutorials - :maxdepth: 1 - - tutorials/installation - tutorials/get_started - - +++ - - Tutorials for installation and typical usage. New users start here. - - .. grid-item-card:: :material-regular:`directions;3em` - - .. toctree:: - :caption: How-to Guides - :maxdepth: 1 - - how-to/run-container - - +++ - - Practical step-by-step guides for the more experienced user. - - .. grid-item-card:: :material-regular:`info;3em` - - .. toctree:: - :caption: Explanations - :maxdepth: 1 - - explanations/docs-structure - - +++ - - Explanations of how the library works and why it works that way. - - .. grid-item-card:: :material-regular:`menu_book;3em` - - .. toctree:: - :caption: Reference - :maxdepth: 1 - - reference/api - ../genindex - - +++ - - Technical reference material including APIs and release notes. diff --git a/docs/user/tutorials/installation.rst b/docs/user/tutorials/installation.rst deleted file mode 100644 index 0cf4d4cbdd..0000000000 --- a/docs/user/tutorials/installation.rst +++ /dev/null @@ -1,38 +0,0 @@ -Installation -============ - -Check your version of python ----------------------------- - -You will need python 3.8 or later. You can check your version of python by -typing into a terminal:: - - $ python3 --version - - -Create a virtual environment ----------------------------- - -It is recommended that you install into a “virtual environment” so this -installation will not interfere with any existing Python software:: - - $ python3 -m venv /path/to/venv - $ source /path/to/venv/bin/activate - - -Installing the library ----------------------- - -You can now use ``pip`` to install the library and its dependencies:: - - $ python3 -m pip install dls-dodal - -If you require a feature that is not currently released you can also install -from github:: - - $ python3 -m pip install git+https://github.com/DiamondLightSource/dodal.git - -The library should now be installed and the commandline interface on your path. -You can check the version that has been installed by typing:: - - $ dodal --version diff --git a/pyproject.toml b/pyproject.toml index 5e8e78d383..3204b6e8d4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=64", "setuptools_scm[toml]>=6.2", "wheel"] +requires = ["setuptools>=64", "setuptools_scm[toml]>=6.2"] build-backend = "setuptools.build_meta" [project] @@ -33,7 +33,7 @@ dependencies = [ dynamic = ["version"] license.file = "LICENSE" -readme = "README.rst" +readme = "README.md" requires-python = ">=3.10" [project.optional-dependencies] @@ -41,10 +41,14 @@ dev = [ "black", "mypy", "mockito", + # Commented out due to dependency version conflict with pydantic 1.x + # "copier", + "myst-parser", "pipdeptree", "pre-commit", "psutil", "pydata-sphinx-theme>=0.12", + "pyright", "pytest", "pytest-asyncio", "pytest-cov", @@ -81,9 +85,8 @@ dodal = ["*.txt"] [tool.setuptools_scm] write_to = "src/dodal/_version.py" -[tool.mypy] -plugins = ["pydantic.mypy"] -ignore_missing_imports = true # Ignore missing stubs in imported modules +[tool.pyright] +reportMissingImports = false # Ignore missing stubs in imported modules [tool.pytest.ini_options] # Run pytest with all our checkers, and don't spam us with massive tracebacks on error @@ -127,24 +130,23 @@ legacy_tox_ini = """ [tox] skipsdist=True -[testenv:{pre-commit,mypy,pytest,docs}] +[testenv:{pre-commit,type-checking,tests,docs}] # Don't create a virtualenv for the command, requires tox-direct plugin direct = True passenv = * allowlist_externals = pytest pre-commit - mypy + pyright sphinx-build sphinx-autobuild commands = - pytest: pytest -m 'not s03' {posargs} - mypy: mypy src tests -v --ignore-missing-imports --show-traceback --no-strict-optional --check-untyped-defs {posargs} - pre-commit: pre-commit run --all-files {posargs} + tests: pytest -m 'not s03' --cov=dodal --cov-report term --cov-report xml:cov.xml {posargs} + type-checking: pyright src tests {posargs} + pre-commit: pre-commit run --all-files --show-diff-on-failure {posargs} docs: sphinx-{posargs:build -E} -T docs build/html """ - [tool.ruff] src = ["src", "tests"] line-length = 88 @@ -153,9 +155,12 @@ lint.extend-ignore = [ "F811", # support typing.overload decorator ] lint.select = [ + "B", # flake8-bugbear - https://docs.astral.sh/ruff/rules/#flake8-bugbear-b "C4", # flake8-comprehensions - https://beta.ruff.rs/docs/rules/#flake8-comprehensions-c4 "E", # pycodestyle errors - https://beta.ruff.rs/docs/rules/#error-e "F", # pyflakes rules - https://beta.ruff.rs/docs/rules/#pyflakes-f + "I", # isort - https://docs.astral.sh/ruff/rules/#isort-i "W", # pycodestyle warnings - https://beta.ruff.rs/docs/rules/#warning-w + "UP", # pyupgrade - https://docs.astral.sh/ruff/rules/#pyupgrade-up "I001", # isort ] diff --git a/src/dodal/__init__.py b/src/dodal/__init__.py index d107f020f1..26d23badb6 100644 --- a/src/dodal/__init__.py +++ b/src/dodal/__init__.py @@ -1,11 +1,3 @@ -import sys - -if sys.version_info < (3, 8): - from importlib_metadata import version # noqa -else: - from importlib.metadata import version # noqa - -__version__ = version("dls-dodal") -del version +from ._version import __version__ __all__ = ["__version__"] diff --git a/tests/conftest.py b/tests/conftest.py index b6d39aea79..a4c8168f25 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,9 +4,10 @@ import os import sys import time +from collections.abc import Mapping from os import environ, getenv from pathlib import Path -from typing import Mapping, cast +from typing import cast from unittest.mock import MagicMock, patch import pytest @@ -33,6 +34,18 @@ "s04": mock_paths, } +# Prevent pytest from catching exceptions when debugging in vscode so that break on +# exception works correctly (see: https://github.com/pytest-dev/pytest/issues/7409) +if os.getenv("PYTEST_RAISE", "0") == "1": + + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call): + raise call.excinfo.value + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo): + raise excinfo.value + def mock_beamline_module_filepaths(bl_name, bl_module): if mock_attributes := mock_attributes_table.get(bl_name):