From 8db90d22d8bfe0c8fcd4b091d031cc0cb9de3e9c Mon Sep 17 00:00:00 2001 From: DiamondJoseph <53935796+DiamondJoseph@users.noreply.github.com> Date: Thu, 18 Apr 2024 09:27:23 +0100 Subject: [PATCH 1/3] Adopt Copier template (#207) * Adopt python-copier-template 1.0.0 * Update to python-copier-template 2.1.0 * Ruff and Pyrite linting changes * Adjust link to bluesky autodocs and gitignore --- .copier-answers.yml | 17 +++ .devcontainer/Dockerfile | 37 ----- .devcontainer/devcontainer.json | 95 +++++++----- .git-blame-ignore-revs | 2 + .github/CONTRIBUTING.md | 27 ++++ .github/CONTRIBUTING.rst | 35 ----- .../actions/install_requirements/action.yml | 64 +++----- .github/dependabot.yml | 8 + .github/pages/make_switcher.py | 20 +-- .github/workflows/_check.yml | 27 ++++ .github/workflows/_dist.yml | 36 +++++ .github/workflows/{docs.yml => _docs.yml} | 27 ++-- .github/workflows/_pypi.yml | 17 +++ .github/workflows/_release.yml | 32 ++++ .github/workflows/_test.yml | 62 ++++++++ .github/workflows/_tox.yml | 22 +++ .github/workflows/ci.yml | 59 ++++++++ .github/workflows/code.yml | 143 ------------------ .github/workflows/docs_clean.yml | 43 ------ .github/workflows/linkcheck.yml | 27 ---- .github/workflows/periodic.yml | 13 ++ .gitignore | 8 +- .pre-commit-config.yaml | 18 +-- Dockerfile | 13 ++ README.md | 18 +++ README.rst | 72 --------- docs/_templates/custom-class-template.rst | 1 + docs/conf.py | 101 ++++++++----- docs/developer/explanations/decisions.rst | 17 --- .../0001-record-architecture-decisions.rst | 26 ---- .../0002-switched-to-pip-skeleton.rst | 35 ----- docs/developer/how-to/build-docs.rst | 38 ----- docs/developer/how-to/contribute.rst | 1 - docs/developer/how-to/lint.rst | 41 ----- docs/developer/how-to/make-release.rst | 16 -- docs/developer/how-to/pin-requirements.rst | 74 --------- docs/developer/how-to/run-tests.rst | 12 -- docs/developer/how-to/static-analysis.rst | 8 - docs/developer/how-to/test-container.rst | 25 --- docs/developer/how-to/update-tools.rst | 16 -- docs/developer/index.rst | 64 -------- docs/developer/reference/standards.rst | 142 ----------------- docs/developer/tutorials/dev-install.rst | 60 -------- docs/{user => }/examples/epics_demo.py | 0 docs/explanations.md | 10 ++ docs/explanations/decisions.md | 12 ++ .../0001-record-architecture-decisions.md | 18 +++ ...0002-switched-to-python-copier-template.md | 28 ++++ .../decisions/0003-ophyd-async-migration.rst | 2 +- .../decisions/0004-repository-structure.rst | 0 .../0005-respect-black-line-length.rst | 0 .../0006-procedural-device-definitions.rst | 0 docs/explanations/decisions/COPYME | 19 +++ .../explanations/event-loop-choice.rst | 0 docs/how-to.md | 10 ++ docs/{user => }/how-to/compound-devices.rst | 4 +- docs/how-to/contribute.md | 2 + .../how-to/make-a-simple-device.rst | 34 ++++- .../how-to/write-tests-for-devices.rst | 8 +- docs/index.md | 56 +++++++ docs/index.rst | 29 ---- docs/reference.md | 12 ++ docs/{user => }/reference/api.rst | 2 +- docs/tutorials.md | 10 ++ docs/tutorials/installation.md | 42 +++++ .../tutorials/using-existing-devices.rst | 18 ++- docs/user/explanations/docs-structure.rst | 18 --- docs/user/how-to/run-container.rst | 15 -- docs/user/index.rst | 67 -------- docs/user/tutorials/installation.rst | 43 ------ pyproject.toml | 55 +++---- src/ophyd_async/__init__.py | 5 +- src/ophyd_async/core/sim_signal_backend.py | 8 +- src/ophyd_async/core/utils.py | 2 +- src/ophyd_async/epics/_backend/_aioca.py | 16 +- src/ophyd_async/epics/_backend/_p4p.py | 26 ++-- src/ophyd_async/epics/_backend/common.py | 4 +- .../epics/areadetector/writers/_hdffile.py | 8 +- src/ophyd_async/epics/pvi/pvi.py | 7 +- src/ophyd_async/panda/trigger.py | 1 - src/ophyd_async/panda/writers/hdf_writer.py | 4 +- .../panda/writers/panda_hdf_file.py | 8 +- .../planstubs/prepare_trigger_and_dets.py | 1 - tests/conftest.py | 13 ++ tests/core/test_device.py | 4 +- tests/core/test_flyer.py | 8 +- tests/core/test_sim.py | 10 +- tests/core/test_utils.py | 9 +- tests/epics/test_signals.py | 12 +- tests/panda/test_panda.py | 2 - tests/panda/test_panda_utils.py | 1 - tests/panda/test_writer.py | 8 +- tests/test_flyer_with_panda.py | 8 +- 93 files changed, 898 insertions(+), 1400 deletions(-) create mode 100644 .copier-answers.yml delete mode 100644 .devcontainer/Dockerfile create mode 100644 .github/CONTRIBUTING.md delete mode 100644 .github/CONTRIBUTING.rst create mode 100644 .github/workflows/_check.yml create mode 100644 .github/workflows/_dist.yml rename .github/workflows/{docs.yml => _docs.yml} (74%) create mode 100644 .github/workflows/_pypi.yml create mode 100644 .github/workflows/_release.yml create mode 100644 .github/workflows/_test.yml create mode 100644 .github/workflows/_tox.yml create mode 100644 .github/workflows/ci.yml delete mode 100644 .github/workflows/code.yml delete mode 100644 .github/workflows/docs_clean.yml delete mode 100644 .github/workflows/linkcheck.yml create mode 100644 .github/workflows/periodic.yml create mode 100644 Dockerfile create mode 100644 README.md delete mode 100644 README.rst delete mode 100644 docs/developer/explanations/decisions.rst delete mode 100644 docs/developer/explanations/decisions/0001-record-architecture-decisions.rst delete mode 100644 docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst delete mode 100644 docs/developer/how-to/build-docs.rst delete mode 100644 docs/developer/how-to/contribute.rst delete mode 100644 docs/developer/how-to/lint.rst delete mode 100644 docs/developer/how-to/make-release.rst delete mode 100644 docs/developer/how-to/pin-requirements.rst delete mode 100644 docs/developer/how-to/run-tests.rst delete mode 100644 docs/developer/how-to/static-analysis.rst delete mode 100644 docs/developer/how-to/test-container.rst delete mode 100644 docs/developer/how-to/update-tools.rst delete mode 100644 docs/developer/index.rst delete mode 100644 docs/developer/reference/standards.rst delete mode 100644 docs/developer/tutorials/dev-install.rst rename docs/{user => }/examples/epics_demo.py (100%) create mode 100644 docs/explanations.md create mode 100644 docs/explanations/decisions.md create mode 100644 docs/explanations/decisions/0001-record-architecture-decisions.md create mode 100644 docs/explanations/decisions/0002-switched-to-python-copier-template.md rename docs/{developer => }/explanations/decisions/0003-ophyd-async-migration.rst (95%) rename docs/{developer => }/explanations/decisions/0004-repository-structure.rst (100%) rename docs/{developer => }/explanations/decisions/0005-respect-black-line-length.rst (100%) rename docs/{developer => }/explanations/decisions/0006-procedural-device-definitions.rst (100%) create mode 100644 docs/explanations/decisions/COPYME rename docs/{user => }/explanations/event-loop-choice.rst (100%) create mode 100644 docs/how-to.md rename docs/{user => }/how-to/compound-devices.rst (92%) create mode 100644 docs/how-to/contribute.md rename docs/{user => }/how-to/make-a-simple-device.rst (66%) rename docs/{user => }/how-to/write-tests-for-devices.rst (82%) create mode 100644 docs/index.md delete mode 100644 docs/index.rst create mode 100644 docs/reference.md rename docs/{user => }/reference/api.rst (98%) create mode 100644 docs/tutorials.md create mode 100644 docs/tutorials/installation.md rename docs/{user => }/tutorials/using-existing-devices.rst (91%) delete mode 100644 docs/user/explanations/docs-structure.rst delete mode 100644 docs/user/how-to/run-container.rst delete mode 100644 docs/user/index.rst delete mode 100644 docs/user/tutorials/installation.rst diff --git a/.copier-answers.yml b/.copier-answers.yml new file mode 100644 index 0000000000..878df0ca3f --- /dev/null +++ b/.copier-answers.yml @@ -0,0 +1,17 @@ +# Changes here will be overwritten by Copier +_commit: 2.1.0 +_src_path: gh:DiamondLightSource/python-copier-template +author_email: tom.cobb@diamond.ac.uk +author_name: Tom Cobb +component_owner: '' +description: Asynchronous Bluesky hardware abstraction code, compatible with control + systems like EPICS and Tango +distribution_name: ophyd-async +docker: false +docs_type: sphinx +git_platform: github.com +github_org: bluesky +package_name: ophyd_async +pypi: true +repo_name: ophyd-async +type_checker: pyright diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile deleted file mode 100644 index 901909949d..0000000000 --- a/.devcontainer/Dockerfile +++ /dev/null @@ -1,37 +0,0 @@ -# This file is for use as a devcontainer and a runtime container -# -# The devcontainer should use the build target and run as root with podman -# or docker with user namespaces. -# -FROM python:3.10 as build - -ARG PIP_OPTIONS - -# Add any system dependencies for the developer/build environment here e.g. -# RUN apt-get update && apt-get upgrade -y && \ -# apt-get install -y --no-install-recommends \ -# desired-packages \ -# && rm -rf /var/lib/apt/lists/* - -# set up a virtual environment and put it in PATH -RUN python -m venv /venv -ENV PATH=/venv/bin:$PATH - -# Copy any required context for the pip install over -COPY . /context -WORKDIR /context - -# install python package into /venv -RUN pip install ${PIP_OPTIONS} - -FROM python:3.10-slim as runtime - -# Add apt-get system dependecies for runtime here if needed - -# copy the virtual environment from the build stage and put it in PATH -COPY --from=build /venv/ /venv/ -ENV PATH=/venv/bin:$PATH - -# change this entrypoint if it is not the same as the repo -ENTRYPOINT ["python", "-m", "ophyd_epics_devices"] -CMD ["--version"] diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 3536f89164..a8f81fbc27 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,44 +1,55 @@ // For format details, see https://containers.dev/implementors/json_reference/ { - "name": "Python 3 Developer Container", - "build": { - "dockerfile": "Dockerfile", - "target": "build", - // Only upgrade pip, we will install the project below - "args": { - "PIP_OPTIONS": "--upgrade pip" - } - }, - "remoteEnv": { - "DISPLAY": "${localEnv:DISPLAY}" - }, - "customizations": { - "vscode": { - // Set *default* container specific settings.json values on container create. - // "settings": { - // "python.defaultInterpreterPath": "/venv/bin/python" - // }, - // Add the IDs of extensions you want installed when the container is created. - "extensions": [ - "ms-python.python", - "tamasfe.even-better-toml", - "redhat.vscode-yaml", - "ryanluker.vscode-coverage-gutters" - ] - } - }, - // Make sure the files we are mapping into the container exist on the host - "initializeCommand": "bash -c 'for i in $HOME/.inputrc; do [ -f $i ] || touch $i; done'", - "runArgs": ["--net=host", "--security-opt=label=type:container_runtime_t"], - "mounts": [ - "source=${localEnv:HOME}/.ssh,target=/root/.ssh,type=bind", - "source=${localEnv:HOME}/.inputrc,target=/root/.inputrc,type=bind", - // map in home directory - not strictly necessary but useful - "source=${localEnv:HOME},target=${localEnv:HOME},type=bind,consistency=cached" - ], - // make the workspace folder the same inside and outside of the container - "workspaceMount": "source=${localWorkspaceFolder},target=${localWorkspaceFolder},type=bind", - "workspaceFolder": "${localWorkspaceFolder}", - // After the container is created, install the python project in editable form - "postCreateCommand": "pip install -e .[dev] --config-settings editable_mode=compat" -} + "name": "Python 3 Developer Container", + "build": { + "dockerfile": "../Dockerfile", + "target": "developer" + }, + "remoteEnv": { + // Allow X11 apps to run inside the container + "DISPLAY": "${localEnv:DISPLAY}" + }, + "customizations": { + "vscode": { + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/venv/bin/python", + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": "explicit" + }, + "[python]": { + "editor.defaultFormatter": "charliermarsh.ruff" + } + }, + // Add the IDs of extensions you want installed when the container is created. + "extensions": [ + "ms-python.python", + "github.vscode-github-actions", + "tamasfe.even-better-toml", + "redhat.vscode-yaml", + "ryanluker.vscode-coverage-gutters", + "charliermarsh.ruff", + "ms-azuretools.vscode-docker" + ] + } + }, + "features": { + // Some default things like git config + "ghcr.io/devcontainers/features/common-utils:2": { + "upgradePackages": false + } + }, + "runArgs": [ + // Allow the container to access the host X11 display and EPICS CA + "--net=host", + // Make sure SELinux does not disable with access to host filesystems like tmp + "--security-opt=label=disable" + ], + // Mount the parent as /workspaces so we can pip install peers as editable + "workspaceMount": "source=${localWorkspaceFolder}/..,target=/workspaces,type=bind", + // After the container is created, install the python project in editable form + "postCreateCommand": "pip install $([ -f dev-requirements.txt ] && echo '-c dev-requirements.txt') -e '.[dev]' && pre-commit install" +} \ No newline at end of file diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index ed7090ba61..43c7642f3c 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -6,3 +6,5 @@ 20e1b14b541a0331109158c77bc319248be28989 # Use isort to sort imports 881a35b43584103ca572b6f4e472dd8b6fd6ea87 +# Replace flake8 and mypy with ruff and pyrite +e2f8317e7584e4de788c2b39e5b5edaa98c1bc9e \ No newline at end of file diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md new file mode 100644 index 0000000000..1d6f7ce3ba --- /dev/null +++ b/.github/CONTRIBUTING.md @@ -0,0 +1,27 @@ +# Contribute to the project + +Contributions and issues are most welcome! All issues and pull requests are +handled through [GitHub](https://github.com/bluesky/ophyd-async/issues). Also, please check for any existing issues before +filing a new one. If you have a great idea but it involves big changes, please +file a ticket before making a pull request! We want to make sure you don't spend +your time coding something that might not fit the scope of the project. + +## Issue or Discussion? + +Github also offers [discussions](https://github.com/bluesky/ophyd-async/discussions) as a place to ask questions and share ideas. If +your issue is open ended and it is not obvious when it can be "closed", please +raise it as a discussion instead. + +## Code Coverage + +While 100% code coverage does not make a library bug-free, it significantly +reduces the number of easily caught bugs! Please make sure coverage remains the +same or is improved by a pull request! + +## Developer Information + +It is recommended that developers use a [vscode devcontainer](https://code.visualstudio.com/docs/devcontainers/containers). This repository contains configuration to set up a containerized development environment that suits its own needs. + +This project was created using the [Diamond Light Source Copier Template](https://github.com/DiamondLightSource/python-copier-template) for Python projects. + +For more information on common tasks like setting up a developer environment, running the tests, and setting a pre-commit hook, see the template's [How-to guides](https://diamondlightsource.github.io/python-copier-template/2.1.0/how-to.html). diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst deleted file mode 100644 index 36ea1e20b3..0000000000 --- a/.github/CONTRIBUTING.rst +++ /dev/null @@ -1,35 +0,0 @@ -Contributing to the project -=========================== - -Contributions and issues are most welcome! All issues and pull requests are -handled through GitHub_. Also, please check for any existing issues before -filing a new one. If you have a great idea but it involves big changes, please -file a ticket before making a pull request! We want to make sure you don't spend -your time coding something that might not fit the scope of the project. - -.. _GitHub: https://github.com/bluesky/ophyd-async/issues - -Issue or Discussion? --------------------- - -Github also offers discussions_ as a place to ask questions and share ideas. If -your issue is open ended and it is not obvious when it can be "closed", please -raise it as a discussion instead. - -.. _discussions: https://github.com/bluesky/ophyd-async/discussions - -Code coverage -------------- - -While 100% code coverage does not make a library bug-free, it significantly -reduces the number of easily caught bugs! Please make sure coverage remains the -same or is improved by a pull request! - -Developer guide ---------------- - -The `Developer Guide`_ contains information on setting up a development -environment, running the tests and what standards the code and documentation -should follow. - -.. _Developer Guide: https://blueskyproject.io/ophyd-async/main/developer/how-to/contribute.html diff --git a/.github/actions/install_requirements/action.yml b/.github/actions/install_requirements/action.yml index 84be1b910f..d33e080527 100644 --- a/.github/actions/install_requirements/action.yml +++ b/.github/actions/install_requirements/action.yml @@ -1,58 +1,34 @@ name: Install requirements -description: Run pip install with requirements and upload resulting requirements +description: Install a version of python then call pip install and report what was installed inputs: - requirements_file: - description: Name of requirements file to use and upload - required: true - install_options: + python-version: + description: Python version to install, default is from Dockerfile + default: "dev" + pip-install: description: Parameters to pass to pip install - required: true - python_version: - description: Python version to install - default: "3.10" + default: "$([ -f dev-requirements.txt ] && echo '-c dev-requirements.txt') -e .[dev]" runs: using: composite - steps: - - name: Setup python - uses: actions/setup-python@v4 - with: - python-version: ${{ inputs.python_version }} - - - name: Pip install - run: | - touch ${{ inputs.requirements_file }} - # -c uses requirements.txt as constraints, see 'Validate requirements file' - pip install -c ${{ inputs.requirements_file }} ${{ inputs.install_options }} - shell: bash - - - name: Create lockfile + - name: Get version of python run: | - mkdir -p lockfiles - pip freeze --exclude-editable > lockfiles/${{ inputs.requirements_file }} - # delete the self referencing line and make sure it isn't blank - sed -i'' -e '/file:/d' lockfiles/${{ inputs.requirements_file }} + PYTHON_VERSION="${{ inputs.python-version }}" + if [ $PYTHON_VERSION == "dev" ]; then + PYTHON_VERSION=$(sed -n "s/ARG PYTHON_VERSION=//p" Dockerfile) + fi + echo "PYTHON_VERSION=$PYTHON_VERSION" >> "$GITHUB_ENV" shell: bash - - name: Upload lockfiles - uses: actions/upload-artifact@v3 + - name: Setup python + uses: actions/setup-python@v5 with: - name: lockfiles - path: lockfiles + python-version: ${{ env.PYTHON_VERSION }} - # This eliminates the class of problems where the requirements being given no - # longer match what the packages themselves dictate. E.g. In the rare instance - # where I install some-package which used to depend on vulnerable-dependency - # but now uses good-dependency (despite being nominally the same version) - # pip will install both if given a requirements file with -r - - name: If requirements file exists, check it matches pip installed packages - run: | - if [ -s ${{ inputs.requirements_file }} ]; then - if ! diff -u ${{ inputs.requirements_file }} lockfiles/${{ inputs.requirements_file }}; then - echo "Error: ${{ inputs.requirements_file }} need the above changes to be exhaustive" - exit 1 - fi - fi + - name: Install packages + run: pip install ${{ inputs.pip-install }} shell: bash + - name: Report what was installed + run: pip freeze + shell: bash diff --git a/.github/dependabot.yml b/.github/dependabot.yml index fb7c6ee671..184ba3631a 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -9,8 +9,16 @@ updates: directory: "/" schedule: interval: "weekly" + groups: + actions: + patterns: + - "*" - package-ecosystem: "pip" directory: "/" schedule: interval: "weekly" + groups: + dev-dependencies: + patterns: + - "*" diff --git a/.github/pages/make_switcher.py b/.github/pages/make_switcher.py index d70367aec5..0babd3c6bb 100755 --- a/.github/pages/make_switcher.py +++ b/.github/pages/make_switcher.py @@ -24,7 +24,7 @@ def get_sorted_tags_list() -> List[str]: return report_output(stdout, "Tags list") -def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[str]: +def get_versions(ref: str, add: Optional[str]) -> List[str]: """Generate the file containing the list of all GitHub Pages builds.""" # Get the directories (i.e. builds) from the GitHub Pages branch try: @@ -36,9 +36,6 @@ def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[st # Add and remove from the list of builds if add: builds.add(add) - if remove: - assert remove in builds, f"Build '{remove}' not in {sorted(builds)}" - builds.remove(remove) # Get a sorted list of tags tags = get_sorted_tags_list() @@ -58,9 +55,12 @@ def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[st def write_json(path: Path, repository: str, versions: str): org, repo_name = repository.split("/") + pages_url = f"https://{org}.github.io" + if repo_name != f"{org}.github.io": + # Only add the repo name if it isn't the source for the org pages site + pages_url += f"/{repo_name}" struct = [ - dict(version=version, url=f"https://{org}.github.io/{repo_name}/{version}/") - for version in versions + {"version": version, "url": f"{pages_url}/{version}/"} for version in versions ] text = json.dumps(struct, indent=2) print(f"JSON switcher:\n{text}") @@ -69,16 +69,12 @@ def write_json(path: Path, repository: str, versions: str): def main(args=None): parser = ArgumentParser( - description="Make a versions.txt file from gh-pages directories" + description="Make a versions.json file from gh-pages directories" ) parser.add_argument( "--add", help="Add this directory to the list of existing directories", ) - parser.add_argument( - "--remove", - help="Remove this directory from the list of existing directories", - ) parser.add_argument( "repository", help="The GitHub org and repository name: ORG/REPO", @@ -91,7 +87,7 @@ def main(args=None): args = parser.parse_args(args) # Write the versions file - versions = get_versions("origin/gh-pages", args.add, args.remove) + versions = get_versions("origin/gh-pages", args.add) write_json(args.output, args.repository, versions) diff --git a/.github/workflows/_check.yml b/.github/workflows/_check.yml new file mode 100644 index 0000000000..a6139c19fe --- /dev/null +++ b/.github/workflows/_check.yml @@ -0,0 +1,27 @@ +on: + workflow_call: + outputs: + branch-pr: + description: The PR number if the branch is in one + value: ${{ jobs.pr.outputs.branch-pr }} + +jobs: + pr: + runs-on: "ubuntu-latest" + outputs: + branch-pr: ${{ steps.script.outputs.result }} + steps: + - uses: actions/github-script@v7 + id: script + if: github.event_name == 'push' + with: + script: | + const prs = await github.rest.pulls.list({ + owner: context.repo.owner, + repo: context.repo.repo, + head: context.repo.owner + ':${{ github.ref_name }}' + }) + if (prs.data.length) { + console.log(`::notice ::Skipping CI on branch push as it is already run in PR #${prs.data[0]["number"]}`) + return prs.data[0]["number"] + } diff --git a/.github/workflows/_dist.yml b/.github/workflows/_dist.yml new file mode 100644 index 0000000000..b1c4c93c3b --- /dev/null +++ b/.github/workflows/_dist.yml @@ -0,0 +1,36 @@ +on: + workflow_call: + +jobs: + build: + runs-on: "ubuntu-latest" + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + # Need this to get version number from last tag + fetch-depth: 0 + + - name: Build sdist and wheel + run: > + export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && + pipx run build + + - name: Upload sdist and wheel as artifacts + uses: actions/upload-artifact@v4 + with: + name: dist + path: dist + + - name: Check for packaging errors + run: pipx run twine check --strict dist/* + + - name: Install produced wheel + uses: ./.github/actions/install_requirements + with: + pip-install: dist/*.whl + + - name: Test module --version works using the installed wheel + # If more than one module in src/ replace with module name to test + run: python -m $(ls --hide='*.egg-info' src | head -1) --version diff --git a/.github/workflows/docs.yml b/.github/workflows/_docs.yml similarity index 74% rename from .github/workflows/docs.yml rename to .github/workflows/_docs.yml index 1d8a687a98..40446e332b 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/_docs.yml @@ -1,17 +1,13 @@ -name: Docs CI - on: - push: - pull_request: + workflow_call: jobs: - docs: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository + build: runs-on: ubuntu-latest steps: - name: Avoid git conflicts when tag and branch pushed at same time - if: startsWith(github.ref, 'refs/tags') + if: github.ref_type == 'tag' run: sleep 60 - name: Checkout @@ -21,18 +17,23 @@ jobs: fetch-depth: 0 - name: Install system packages - # Can delete this if you don't use graphviz in your docs run: sudo apt-get install graphviz - name: Install python packages uses: ./.github/actions/install_requirements - with: - requirements_file: requirements-dev-3.x.txt - install_options: -e .[dev] - name: Build docs run: tox -e docs + - name: Remove environment.pickle + run: rm build/html/.doctrees/environment.pickle + + - name: Upload built docs artifact + uses: actions/upload-artifact@v4 + with: + name: docs + path: build + - name: Sanitize ref name for docs version run: echo "DOCS_VERSION=${GITHUB_REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV @@ -43,11 +44,11 @@ jobs: run: python .github/pages/make_switcher.py --add $DOCS_VERSION ${{ github.repository }} .github/pages/switcher.json - name: Publish Docs to gh-pages - if: github.event_name == 'push' && github.actor != 'dependabot[bot]' + if: github.ref_type == 'tag' || github.ref_name == 'main' # We pin to the SHA, not the tag, for security reasons. # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: .github/pages - keep_files: true + keep_files: true \ No newline at end of file diff --git a/.github/workflows/_pypi.yml b/.github/workflows/_pypi.yml new file mode 100644 index 0000000000..0c5258dbee --- /dev/null +++ b/.github/workflows/_pypi.yml @@ -0,0 +1,17 @@ +on: + workflow_call: + +jobs: + upload: + runs-on: ubuntu-latest + environment: release + + steps: + - name: Download dist artifact + uses: actions/download-artifact@v4 + with: + name: dist + path: dist + + - name: Publish to PyPI using trusted publishing + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/_release.yml b/.github/workflows/_release.yml new file mode 100644 index 0000000000..e55efdb37f --- /dev/null +++ b/.github/workflows/_release.yml @@ -0,0 +1,32 @@ +on: + workflow_call: + +jobs: + artifacts: + runs-on: ubuntu-latest + + steps: + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + merge-multiple: true + + - name: Zip up docs + run: | + set -vxeuo pipefail + if [ -d html ]; then + mv html $GITHUB_REF_NAME + zip -r docs.zip $GITHUB_REF_NAME + rm -rf $GITHUB_REF_NAME + fi + + - name: Create GitHub Release + # We pin to the SHA, not the tag, for security reasons. + # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions + uses: softprops/action-gh-release@9d7c94cfd0a1f3ed45544c887983e9fa900f0564 # v2.0.4 + with: + prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }} + files: "*" + generate_release_notes: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/_test.yml b/.github/workflows/_test.yml new file mode 100644 index 0000000000..f652d4145f --- /dev/null +++ b/.github/workflows/_test.yml @@ -0,0 +1,62 @@ +on: + workflow_call: + inputs: + python-version: + type: string + description: The version of python to install + required: true + runs-on: + type: string + description: The runner to run this job on + required: true + secrets: + CODECOV_TOKEN: + required: true + +env: + # https://github.com/pytest-dev/pytest/issues/2042 + PY_IGNORE_IMPORTMISMATCH: "1" + +jobs: + run: + runs-on: ${{ inputs.runs-on }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + # Need this to get version number from last tag + fetch-depth: 0 + + - if: inputs.python-version == 'dev' + name: Install dev versions of python packages + uses: ./.github/actions/install_requirements + + - if: inputs.python-version == 'dev' + name: Write the requirements as an artifact + run: pip freeze --exclude-editable > /tmp/dev-requirements.txt + + - if: inputs.python-version == 'dev' + name: Upload dev-requirements.txt + uses: actions/upload-artifact@v4 + with: + name: dev-requirements + path: /tmp/dev-requirements.txt + + - if: inputs.python-version != 'dev' + name: Install latest versions of python packages + uses: ./.github/actions/install_requirements + with: + python-version: ${{ inputs.python-version }} + pip-install: ".[dev]" + + - name: Run tests + run: tox -e tests + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v4 + with: + name: ${{ inputs.python-version }}/${{ inputs.runs-on }} + files: cov.xml + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/_tox.yml b/.github/workflows/_tox.yml new file mode 100644 index 0000000000..a13536d3a7 --- /dev/null +++ b/.github/workflows/_tox.yml @@ -0,0 +1,22 @@ +on: + workflow_call: + inputs: + tox: + type: string + description: What to run under tox + required: true + + +jobs: + run: + runs-on: "ubuntu-latest" + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install python packages + uses: ./.github/actions/install_requirements + + - name: Run tox + run: tox -e ${{ inputs.tox }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000000..cf5bc80bbc --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,59 @@ +name: CI + +on: + push: + pull_request: + +jobs: + check: + uses: ./.github/workflows/_check.yml + + lint: + needs: check + if: needs.check.outputs.branch-pr == '' + uses: ./.github/workflows/_tox.yml + with: + tox: pre-commit,type-checking + + test: + needs: check + if: needs.check.outputs.branch-pr == '' + strategy: + matrix: + runs-on: ["ubuntu-latest"] # can add windows-latest, macos-latest + python-version: ["3.10","3.11"] # 3.12 should be added when p4p is updated + include: + # Include one that runs in the dev environment + - runs-on: "ubuntu-latest" + python-version: "dev" + fail-fast: false + uses: ./.github/workflows/_test.yml + with: + runs-on: ${{ matrix.runs-on }} + python-version: ${{ matrix.python-version }} + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + + docs: + needs: check + if: needs.check.outputs.branch-pr == '' + uses: ./.github/workflows/_docs.yml + + dist: + needs: check + if: needs.check.outputs.branch-pr == '' + uses: ./.github/workflows/_dist.yml + + pypi: + if: github.ref_type == 'tag' + needs: dist + uses: ./.github/workflows/_pypi.yml + permissions: + id-token: write + + release: + if: github.ref_type == 'tag' + needs: [dist, docs] + uses: ./.github/workflows/_release.yml + permissions: + contents: write diff --git a/.github/workflows/code.yml b/.github/workflows/code.yml deleted file mode 100644 index 41381549fb..0000000000 --- a/.github/workflows/code.yml +++ /dev/null @@ -1,143 +0,0 @@ -name: Code CI - -on: - push: - pull_request: - -jobs: - lint: - # pull requests are a duplicate of a branch push if within the same repo. - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - requirements_file: requirements-dev-3.x.txt - install_options: -e .[dev] - - - name: Lint - run: tox -e pre-commit,mypy - - test: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository - strategy: - fail-fast: false - matrix: - os: ["ubuntu-latest"] # can add windows-latest, macos-latest - python: ["3.10","3.11"] # 3.12 should be added when p4p is updated - install: ["-e .[dev]"] - # Make one version be non-editable to test both paths of version code - include: - - os: "ubuntu-latest" - python: "3.10" - install: ".[dev]" - - runs-on: ${{ matrix.os }} - env: - # https://github.com/pytest-dev/pytest/issues/2042 - PY_IGNORE_IMPORTMISMATCH: "1" - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - # Need this to get version number from last tag - fetch-depth: 0 - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - python_version: ${{ matrix.python }} - requirements_file: requirements-test-${{ matrix.os }}-${{ matrix.python }}.txt - install_options: ${{ matrix.install }} - - - name: List dependency tree - run: pipdeptree - - - name: Run tests - run: tox -e pytest - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v4 - with: - name: ${{ matrix.python }}/${{ matrix.os }} - files: cov.xml - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - - dist: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository - runs-on: "ubuntu-latest" - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - # Need this to get version number from last tag - fetch-depth: 0 - - - name: Build sdist and wheel - run: | - export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && \ - pipx run build - - - name: Upload sdist and wheel as artifacts - uses: actions/upload-artifact@v3 - with: - name: dist - path: dist - - - name: Check for packaging errors - run: pipx run twine check --strict dist/* - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - python_version: "3.11" - requirements_file: requirements.txt - install_options: dist/*.whl - - - name: Test module --version works using the installed wheel - # If more than one module in src/ replace with module name to test - run: python -m $(ls src | head -1) --version - - release: - # upload to PyPI and make a release on every tag - needs: [lint, dist, test] - if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags') }} - runs-on: ubuntu-latest - permissions: - id-token: write - contents: write - env: - HAS_PYPI_TOKEN: ${{ secrets.PYPI_TOKEN != '' }} - - steps: - - uses: actions/download-artifact@v3 - - - name: Fixup blank lockfiles - # Github release artifacts can't be blank - run: for f in lockfiles/*; do [ -s $f ] || echo '# No requirements' >> $f; done - - - name: Github Release - # We pin to the SHA, not the tag, for security reasons. - # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15 - with: - prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }} - files: | - dist/* - lockfiles/* - generate_release_notes: true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Publish wheels to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 - with: - packages-dir: ./dist/ diff --git a/.github/workflows/docs_clean.yml b/.github/workflows/docs_clean.yml deleted file mode 100644 index e324640e78..0000000000 --- a/.github/workflows/docs_clean.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: Docs Cleanup CI - -# delete branch documentation when a branch is deleted -# also allow manually deleting a documentation version -on: - delete: - workflow_dispatch: - inputs: - version: - description: "documentation version to DELETE" - required: true - type: string - -jobs: - remove: - if: github.event.ref_type == 'branch' || github.event_name == 'workflow_dispatch' - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - ref: gh-pages - - - name: removing documentation for branch ${{ github.event.ref }} - if: ${{ github.event_name != 'workflow_dispatch' }} - run: echo "REF_NAME=${{ github.event.ref }}" >> $GITHUB_ENV - - - name: manually removing documentation version ${{ github.event.inputs.version }} - if: ${{ github.event_name == 'workflow_dispatch' }} - run: echo "REF_NAME=${{ github.event.inputs.version }}" >> $GITHUB_ENV - - - name: Sanitize ref name for docs version - run: echo "DOCS_VERSION=${REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV - - - name: update index and push changes - run: | - rm -r $DOCS_VERSION - python make_switcher.py --remove $DOCS_VERSION ${{ github.repository }} switcher.json - git config --global user.name 'GitHub Actions Docs Cleanup CI' - git config --global user.email 'GithubActionsCleanup@noreply.github.com' - git commit -am "Removing redundant docs version $DOCS_VERSION" - git push diff --git a/.github/workflows/linkcheck.yml b/.github/workflows/linkcheck.yml deleted file mode 100644 index 3b24af5584..0000000000 --- a/.github/workflows/linkcheck.yml +++ /dev/null @@ -1,27 +0,0 @@ -name: Link Check - -on: - workflow_dispatch: - schedule: - # Run weekly to check URL links still resolve - - cron: "0 8 * * WED" - -jobs: - docs: - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - requirements_file: requirements-dev-3.x.txt - install_options: -e .[dev] - - - name: Check links - run: tox -e docs build -- -b linkcheck - - - name: Keepalive Workflow - uses: gautamkrishnar/keepalive-workflow@v1 \ No newline at end of file diff --git a/.github/workflows/periodic.yml b/.github/workflows/periodic.yml new file mode 100644 index 0000000000..e2a0fd1b9e --- /dev/null +++ b/.github/workflows/periodic.yml @@ -0,0 +1,13 @@ +name: Periodic + +on: + workflow_dispatch: + schedule: + # Run weekly to check URL links still resolve + - cron: "0 8 * * WED" + +jobs: + linkcheck: + uses: ./.github/workflows/_tox.yml + with: + tox: docs build -- -b linkcheck diff --git a/.gitignore b/.gitignore index 1ce3f7a583..0992cd727a 100644 --- a/.gitignore +++ b/.gitignore @@ -66,7 +66,7 @@ venv* # further build artifacts lockfiles/ -# Origional File +# Original File *.pyc *.swp *.bak @@ -80,8 +80,12 @@ lockfiles/ .ipynb_checkpoints # generated docs -docs/*/generated +docs/generated docs/savefig # generated version number ophyd_async/_version.py + + +# ruff cache +.ruff_cache/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index aa2a4cb2c8..5a4cbf7b41 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: check-added-large-files - id: check-yaml @@ -8,16 +8,16 @@ repos: - repo: local hooks: - - id: black - name: Run black - stages: [commit] + - id: ruff + name: lint with ruff language: system - entry: black --check --diff + entry: ruff check --force-exclude types: [python] + require_serial: true - - id: flake8 - name: Run flake8 - stages: [commit] + - id: ruff-format + name: format with ruff language: system - entry: flake8 + entry: ruff format --force-exclude types: [python] + require_serial: true diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000..c4404ecabb --- /dev/null +++ b/Dockerfile @@ -0,0 +1,13 @@ +# The devcontainer should use the developer target and run as root with podman +# or docker with user namespaces. +ARG PYTHON_VERSION=3.11 +FROM python:${PYTHON_VERSION} as developer + +# Add any system dependencies for the developer/build environment here +RUN apt-get update && apt-get install -y --no-install-recommends \ + graphviz \ + && rm -rf /var/lib/apt/lists/* + +# Set up a virtual environment and put it in PATH +RUN python -m venv /venv +ENV PATH=/venv/bin:$PATH diff --git a/README.md b/README.md new file mode 100644 index 0000000000..3eb3456249 --- /dev/null +++ b/README.md @@ -0,0 +1,18 @@ +[![CI](https://github.com/bluesky/ophyd-async/actions/workflows/ci.yml/badge.svg)](https://github.com/bluesky/ophyd-async/actions/workflows/ci.yml) +[![Coverage](https://codecov.io/gh/bluesky/ophyd-async/branch/main/graph/badge.svg)](https://codecov.io/gh/bluesky/ophyd-async) +[![PyPI](https://img.shields.io/pypi/v/ophyd-async.svg)](https://pypi.org/project/ophyd-async) +[![License](https://img.shields.io/badge/License-BSD_3--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause) + +# ophyd_async + +Asynchronous Bluesky hardware abstraction code, compatible with control systems like EPICS and Tango + +| Source | | +| :-----------: | :-----------------------------------------------: | +| PyPI | `pip install ophyd-async` | +| Documentation | | +| Releases | | + + + +See https://bluesky.github.io/ophyd-async for more detailed documentation. diff --git a/README.rst b/README.rst deleted file mode 100644 index fdb3d76136..0000000000 --- a/README.rst +++ /dev/null @@ -1,72 +0,0 @@ -Ophyd Async -=========== - -|code_ci| |docs_ci| |coverage| |pypi_version| |license| - -Asynchronous device abstraction framework, building on `Ophyd`_. - -============== ============================================================== -PyPI ``pip install ophyd-async`` -Source code https://github.com/bluesky/ophyd-async -Documentation https://blueskyproject.io/ophyd-async -============== ============================================================== - -Python library for asynchronously interfacing with hardware, intended to -be used as an abstraction layer that enables experiment orchestration and data -acquisition code to operate above the specifics of particular devices and control -systems. - -Both ophyd and ophyd-async are typically used with the `Bluesky Run Engine`_ for -experiment orchestration and data acquisition. However, these libraries are -able to be used in a stand-alone fashion. For an example of how a facility defines -and uses ophyd-async devices, see `dls-dodal`_, which is currently using a -mixture of ophyd and ophyd-async devices. - -While `EPICS`_ is the most common control system layer that ophyd-async can -interface with, other control systems like `Tango`_ are used by some facilities -also. In addition to the abstractions provided by ophyd, ophyd-async allows: - -* Asynchronous signal access, opening the possibility for hardware-triggered - scanning (also known as fly-scanning) -* Simpler instantiation of devices (groupings of signals) with less reliance - upon complex class hierarchies - -NOTE: ophyd-async is included on a provisional basis until the v1.0 release. - -See the tutorials for usage examples. - -.. |code_ci| image:: https://github.com/bluesky/ophyd-async/actions/workflows/code.yml/badge.svg?branch=main - :target: https://github.com/bluesky/ophyd-async/actions/workflows/code.yml - :alt: Code CI - -.. |docs_ci| image:: https://github.com/bluesky/ophyd-async/actions/workflows/docs.yml/badge.svg?branch=main - :target: https://github.com/bluesky/ophyd-async/actions/workflows/docs.yml - :alt: Docs CI - -.. |coverage| image:: https://codecov.io/gh/bluesky/ophyd-async/branch/master/graph/badge.svg - :target: https://codecov.io/gh/bluesky/ophyd-async - :alt: Test Coverage - -.. |pypi_version| image:: https://img.shields.io/pypi/v/ophyd-async.svg - :target: https://pypi.org/project/ophyd-async - :alt: Latest PyPI version - -.. |license| image:: https://img.shields.io/badge/License-BSD%203--Clause-blue.svg - :target: https://opensource.org/licenses/BSD-3-Clause - :alt: BSD 3-Clause License - -.. _Bluesky Run Engine: http://blueskyproject.io/bluesky - -.. _Ophyd: http://blueskyproject.io/ophyd - -.. _dls-dodal: https://github.com/DiamondLightSource/dodal - -.. _EPICS: http://www.aps.anl.gov/epics/ - -.. _Tango: https://www.tango-controls.org/ - -.. - Anything below this line is used when viewing README.rst and will be replaced - when included in index.rst - -See https://blueskyproject.io/ophyd-async for more detailed documentation. diff --git a/docs/_templates/custom-class-template.rst b/docs/_templates/custom-class-template.rst index 1be188ca53..236b77063c 100644 --- a/docs/_templates/custom-class-template.rst +++ b/docs/_templates/custom-class-template.rst @@ -9,6 +9,7 @@ .. autoclass:: {{ objname }} :members: + :undoc-members: :show-inheritance: :inherited-members: :special-members: __call__, __add__, __mul__ diff --git a/docs/conf.py b/docs/conf.py index 4a0b72fb7f..856fd9482c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -14,9 +14,7 @@ import ophyd_async # -- General configuration ------------------------------------------------ -# Source code dir relative to this file sys.path.insert(0, os.path.abspath("../../src")) - # General information about the project. project = "ophyd-async" copyright = "2014, Brookhaven National Lab" @@ -71,6 +69,7 @@ # domain name if present. Example entries would be ('py:func', 'int') or # ('envvar', 'LD_LIBRARY_PATH'). nitpick_ignore = [ + # builtins ("py:class", "NoneType"), ("py:class", "'str'"), ("py:class", "'float'"), @@ -78,6 +77,7 @@ ("py:class", "'bool'"), ("py:class", "'object'"), ("py:class", "'id'"), + # typing ("py:class", "typing_extensions.Literal"), ] @@ -118,21 +118,20 @@ # docs in the python documentation. intersphinx_mapping = { "python": ("https://docs.python.org/3", None), - "bluesky": ("https://blueskyproject.io/bluesky/", None), + "bluesky": ("https://blueskyproject.io/bluesky/main", None), "numpy": ("https://numpy.org/devdocs/", None), "databroker": ("https://blueskyproject.io/databroker/", None), "event-model": ("https://blueskyproject.io/event-model/main", None), } # A dictionary of graphviz graph attributes for inheritance diagrams. -inheritance_graph_attrs = dict(rankdir="TB") +inheritance_graph_attrs = {"rankdir": "TB"} # Common links that should be available on every page rst_epilog = """ .. _NSLS: https://www.bnl.gov/nsls2 .. _black: https://github.com/psf/black -.. _flake8: https://flake8.pycqa.org/en/latest/ -.. _isort: https://github.com/PyCQA/isort +.. _ruff: https://beta.ruff.rs/docs/ .. _mypy: http://mypy-lang.org/ .. _pre-commit: https://pre-commit.com/ """ @@ -151,12 +150,11 @@ # a list of builtin themes. # html_theme = "pydata_sphinx_theme" -github_repo = project +github_repo = "ophyd-async" github_user = "bluesky" switcher_json = f"https://{github_user}.github.io/{github_repo}/switcher.json" -# Don't check switcher if it doesn't exist, but warn in a non-failing way -check_switcher = requests.get(switcher_json).ok -if not check_switcher: +switcher_exists = requests.get(switcher_json).ok +if not switcher_exists: print( "*** Can't read version switcher, is GitHub pages enabled? \n" " Once Docs CI job has successfully run once, set the " @@ -166,41 +164,58 @@ ) # Theme options for pydata_sphinx_theme -html_theme_options = dict( - use_edit_page_button=True, - github_url=f"https://github.com/{github_user}/{github_repo}", - icon_links=[ - dict( - name="PyPI", - url=f"https://pypi.org/project/{project}", - icon="fas fa-cube", - ), - dict( - name="Gitter", - url="https://gitter.im/NSLS-II/DAMA", - icon="fas fa-person-circle-question", - ), +# We don't check switcher because there are 3 possible states for a repo: +# 1. New project, docs are not published so there is no switcher +# 2. Existing project with latest skeleton, switcher exists and works +# 3. Existing project with old skeleton that makes broken switcher, +# switcher exists but is broken +# Point 3 makes checking switcher difficult, because the updated skeleton +# will fix the switcher at the end of the docs workflow, but never gets a chance +# to complete as the docs build warns and fails. +html_theme_options = { + "logo": { + "text": project, + }, + "use_edit_page_button": True, + "github_url": f"https://github.com/{github_user}/{github_repo}", + "icon_links": [ + { + "name": "PyPI", + "url": f"https://pypi.org/project/{project}", + "icon": "fas fa-cube", + }, + { + "name": "Gitter", + "url": "https://gitter.im/NSLS-II/DAMA", + "icon": "fas fa-person-circle-question", + }, ], - external_links=[ - dict( - name="Bluesky Project", - url="https://blueskyproject.io", - ) + "switcher": { + "json_url": switcher_json, + "version_match": version, + }, + "check_switcher": False, + "navbar_end": ["theme-switcher", "icon-links", "version-switcher"], + "external_links": [ + { + "name": "Bluesky Project", + "url": "https://blueskyproject.io", + }, + { + "name": "Release Notes", + "url": f"https://github.com/{github_user}/{github_repo}/releases", + }, ], - navigation_with_keys=False, -) - + "navigation_with_keys": False, +} # A dictionary of values to pass into the template engine’s context for all pages -html_context = dict( - github_user=github_user, - github_repo=project, - github_version="master", - doc_path="docs", -) - -html_logo = "images/bluesky_ophyd_logo.svg" -html_favicon = "images/ophyd_favicon.svg" +html_context = { + "github_user": github_user, + "github_repo": project, + "github_version": version, + "doc_path": "docs", +} # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. html_show_sphinx = False @@ -208,6 +223,10 @@ # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. html_show_copyright = False +# Logo +html_logo = "images/bluesky_ophyd_logo.svg" +html_favicon = "images/ophyd_favicon.svg" + # If False and a module has the __all__ attribute set, autosummary documents # every member listed in __all__ and no others. Default is True autosummary_ignore_module_all = False diff --git a/docs/developer/explanations/decisions.rst b/docs/developer/explanations/decisions.rst deleted file mode 100644 index 5841e6ea07..0000000000 --- a/docs/developer/explanations/decisions.rst +++ /dev/null @@ -1,17 +0,0 @@ -.. This Source Code Form is subject to the terms of the Mozilla Public -.. License, v. 2.0. If a copy of the MPL was not distributed with this -.. file, You can obtain one at http://mozilla.org/MPL/2.0/. - -Architectural Decision Records -============================== - -We record major architectural decisions in Architecture Decision Records (ADRs), -as `described by Michael Nygard -`_. -Below is the list of our current ADRs. - -.. toctree:: - :maxdepth: 1 - :glob: - - decisions/* \ No newline at end of file diff --git a/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst b/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst deleted file mode 100644 index b2d3d0fe87..0000000000 --- a/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst +++ /dev/null @@ -1,26 +0,0 @@ -1. Record architecture decisions -================================ - -Date: 2022-02-18 - -Status ------- - -Accepted - -Context -------- - -We need to record the architectural decisions made on this project. - -Decision --------- - -We will use Architecture Decision Records, as `described by Michael Nygard -`_. - -Consequences ------------- - -See Michael Nygard's article, linked above. To create new ADRs we will copy and -paste from existing ones. diff --git a/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst b/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst deleted file mode 100644 index d0cd738402..0000000000 --- a/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst +++ /dev/null @@ -1,35 +0,0 @@ -2. Adopt ophyd-async for project structure -========================================== - -Date: 2022-02-18 - -Status ------- - -Accepted - -Context -------- - -We should use the following `pip-skeleton `_. -The skeleton will ensure consistency in developer -environments and package management. - -Decision --------- - -We have switched to using the skeleton. - -Consequences ------------- - -This module will use a fixed set of tools as developed in ophyd-async -and can pull from this skeleton to update the packaging to the latest techniques. - -As such, the developer environment may have changed, the following could be -different: - -- linting -- formatting -- pip venv setup -- CI/CD diff --git a/docs/developer/how-to/build-docs.rst b/docs/developer/how-to/build-docs.rst deleted file mode 100644 index 0174fc82dd..0000000000 --- a/docs/developer/how-to/build-docs.rst +++ /dev/null @@ -1,38 +0,0 @@ -Build the docs using sphinx -=========================== - -You can build the `sphinx`_ based docs from the project directory by running:: - - $ tox -e docs - -This will build the static docs on the ``docs`` directory, which includes API -docs that pull in docstrings from the code. - -.. seealso:: - - `documentation_standards` - -The docs will be built into the ``build/html`` directory, and can be opened -locally with a web browser:: - - $ firefox build/html/index.html - -Autobuild ---------- - -You can also run an autobuild process, which will watch your ``docs`` -directory for changes and rebuild whenever it sees changes, reloading any -browsers watching the pages:: - - $ tox -e docs autobuild - -You can view the pages at localhost:: - - $ firefox http://localhost:8000 - -If you are making changes to source code too, you can tell it to watch -changes in this directory too:: - - $ tox -e docs autobuild -- --watch src - -.. _sphinx: https://www.sphinx-doc.org/ \ No newline at end of file diff --git a/docs/developer/how-to/contribute.rst b/docs/developer/how-to/contribute.rst deleted file mode 100644 index 65b992f08e..0000000000 --- a/docs/developer/how-to/contribute.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../../../.github/CONTRIBUTING.rst diff --git a/docs/developer/how-to/lint.rst b/docs/developer/how-to/lint.rst deleted file mode 100644 index 8f4e92dbbf..0000000000 --- a/docs/developer/how-to/lint.rst +++ /dev/null @@ -1,41 +0,0 @@ -Run linting using pre-commit -============================ - -Code linting is handled by black_, flake8_ and isort_ run under pre-commit_. - -Running pre-commit ------------------- - -You can run the above checks on all files with this command:: - - $ tox -e pre-commit - -Or you can install a pre-commit hook that will run each time you do a ``git -commit`` on just the files that have changed:: - - $ pre-commit install - -It is also possible to `automatically enable pre-commit on cloned repositories `_. -This will result in pre-commits being enabled on every repo your user clones from now on. - -Fixing issues -------------- - -If black reports an issue you can tell it to reformat all the files in the -repository:: - - $ black . - -Likewise with isort:: - - $ isort . - -If you get any flake8 issues you will have to fix those manually. - -VSCode support --------------- - -The ``.vscode/settings.json`` will run black and isort formatters as well as -flake8 checking on save. Issues will be highlighted in the editor window. - - diff --git a/docs/developer/how-to/make-release.rst b/docs/developer/how-to/make-release.rst deleted file mode 100644 index 29953f9074..0000000000 --- a/docs/developer/how-to/make-release.rst +++ /dev/null @@ -1,16 +0,0 @@ -Make a release -============== - -To make a new release, please follow this checklist: - -- Choose a new PEP440 compliant release number (see https://peps.python.org/pep-0440/) -- Go to the GitHub release_ page -- Choose ``Draft New Release`` -- Click ``Choose Tag`` and supply the new tag you chose (click create new tag) -- Click ``Generate release notes``, review and edit these notes -- Choose a title and click ``Publish Release`` - -Note that tagging and pushing to the main branch has the same effect except that -you will not get the option to edit the release notes. - -.. _release: https://github.com/bluesky/ophyd-async/releases diff --git a/docs/developer/how-to/pin-requirements.rst b/docs/developer/how-to/pin-requirements.rst deleted file mode 100644 index 278bdeed18..0000000000 --- a/docs/developer/how-to/pin-requirements.rst +++ /dev/null @@ -1,74 +0,0 @@ -Pinning Requirements -==================== - -Introduction ------------- - -By design this project only defines dependencies in one place, i.e. in -the ``requires`` table in ``pyproject.toml``. - -In the ``requires`` table it is possible to pin versions of some dependencies -as needed. For library projects it is best to leave pinning to a minimum so -that your library can be used by the widest range of applications. - -When CI builds the project it will use the latest compatible set of -dependencies available (after applying your pins and any dependencies' pins). - -This approach means that there is a possibility that a future build may -break because an updated release of a dependency has made a breaking change. - -The correct way to fix such an issue is to work out the minimum pinning in -``requires`` that will resolve the problem. However this can be quite hard to -do and may be time consuming when simply trying to release a minor update. - -For this reason we provide a mechanism for locking all dependencies to -the same version as a previous successful release. This is a quick fix that -should guarantee a successful CI build. - -Finding the lock files ----------------------- - -Every release of the project will have a set of requirements files published -as release assets. - -For example take a look at the release page for python3-pip-skeleton-cli here: -https://github.com/bluesky/python3-pip-skeleton-cli/releases/tag/3.3.0 - -There is a list of requirements*.txt files showing as assets on the release. - -There is one file for each time the CI installed the project into a virtual -environment. There are multiple of these as the CI creates a number of -different environments. - -The files are created using ``pip freeze`` and will contain a full list -of the dependencies and sub-dependencies with pinned versions. - -You can download any of these files by clicking on them. It is best to use -the one that ran with the lowest Python version as this is more likely to -be compatible with all the versions of Python in the test matrix. -i.e. ``requirements-test-ubuntu-latest-3.10.txt`` in this example. - -Applying the lock file ----------------------- - -To apply a lockfile: - -- copy the requirements file you have downloaded to the root of your - repository -- rename it to requirements.txt -- commit it into the repo -- push the changes - -The CI looks for a requirements.txt in the root and will pass it to pip -when installing each of the test environments. pip will then install exactly -the same set of packages as the previous release. - -Removing dependency locking from CI ------------------------------------ - -Once the reasons for locking the build have been resolved it is a good idea -to go back to an unlocked build. This is because you get an early indication -of any incoming problems. - -To restore unlocked builds in CI simply remove requirements.txt from the root -of the repo and push. diff --git a/docs/developer/how-to/run-tests.rst b/docs/developer/how-to/run-tests.rst deleted file mode 100644 index d2e03644c2..0000000000 --- a/docs/developer/how-to/run-tests.rst +++ /dev/null @@ -1,12 +0,0 @@ -Run the tests using pytest -========================== - -Testing is done with pytest_. It will find functions in the project that `look -like tests`_, and run them to check for errors. You can run it with:: - - $ tox -e pytest - -It will also report coverage to the commandline and to ``cov.xml``. - -.. _pytest: https://pytest.org/ -.. _look like tests: https://docs.pytest.org/explanation/goodpractices.html#test-discovery diff --git a/docs/developer/how-to/static-analysis.rst b/docs/developer/how-to/static-analysis.rst deleted file mode 100644 index 065920e1c6..0000000000 --- a/docs/developer/how-to/static-analysis.rst +++ /dev/null @@ -1,8 +0,0 @@ -Run static analysis using mypy -============================== - -Static type analysis is done with mypy_. It checks type definition in source -files without running them, and highlights potential issues where types do not -match. You can run it with:: - - $ tox -e mypy diff --git a/docs/developer/how-to/test-container.rst b/docs/developer/how-to/test-container.rst deleted file mode 100644 index a4a43a6ffc..0000000000 --- a/docs/developer/how-to/test-container.rst +++ /dev/null @@ -1,25 +0,0 @@ -Container Local Build and Test -============================== - -CI builds a runtime container for the project. The local tests -checks available via ``tox -p`` do not verify this because not -all developers will have docker installed locally. - -If CI is failing to build the container, then it is best to fix and -test the problem locally. This would require that you have docker -or podman installed on your local workstation. - -In the following examples the command ``docker`` is interchangeable with -``podman`` depending on which container cli you have installed. - -To build the container and call it ``test``:: - - cd - docker build -t test . - -To verify that the container runs:: - - docker run -it test --help - -You can pass any other command line parameters to your application -instead of --help. diff --git a/docs/developer/how-to/update-tools.rst b/docs/developer/how-to/update-tools.rst deleted file mode 100644 index 7c78f94a65..0000000000 --- a/docs/developer/how-to/update-tools.rst +++ /dev/null @@ -1,16 +0,0 @@ -Update the tools -================ - -This module is merged with the python3-pip-skeleton_. This is a generic -Python project structure which provides a means to keep tools and -techniques in sync between multiple Python projects. To update to the -latest version of the skeleton, run:: - - $ git pull --rebase=false https://github.com/bluesky/python3-pip-skeleton - -Any merge conflicts will indicate an area where something has changed that -conflicts with the setup of the current module. Check the `closed pull requests -`_ -of the skeleton module for more details. - -.. _python3-pip-skeleton: https://blueskyproject.io/python3-pip-skeleton diff --git a/docs/developer/index.rst b/docs/developer/index.rst deleted file mode 100644 index 8a6369b9cd..0000000000 --- a/docs/developer/index.rst +++ /dev/null @@ -1,64 +0,0 @@ -Developer Guide -=============== - -Documentation is split into four categories, also accessible from links in the -side-bar. - -.. grid:: 2 - :gutter: 4 - - .. grid-item-card:: :material-regular:`directions_run;3em` - - .. toctree:: - :caption: Tutorials - :maxdepth: 1 - - tutorials/dev-install - - +++ - - Tutorials for getting up and running as a developer. - - .. grid-item-card:: :material-regular:`task;3em` - - .. toctree:: - :caption: How-to Guides - :maxdepth: 1 - - how-to/contribute - how-to/build-docs - how-to/run-tests - how-to/static-analysis - how-to/lint - how-to/update-tools - how-to/make-release - how-to/pin-requirements - how-to/test-container - - +++ - - Practical step-by-step guides for day-to-day dev tasks. - - .. grid-item-card:: :material-regular:`apartment;3em` - - .. toctree:: - :caption: Explanations - :maxdepth: 1 - - explanations/decisions - - +++ - - Explanations of how and why the architecture is why it is. - - .. grid-item-card:: :material-regular:`description;3em` - - .. toctree:: - :caption: Reference - :maxdepth: 1 - - reference/standards - - +++ - - Technical reference material on standards in use. diff --git a/docs/developer/reference/standards.rst b/docs/developer/reference/standards.rst deleted file mode 100644 index a5eb405ffb..0000000000 --- a/docs/developer/reference/standards.rst +++ /dev/null @@ -1,142 +0,0 @@ -Standards -========= - -This document defines the code and documentation standards used in this -repository. - -Code Standards --------------- - -The code in this repository conforms to standards set by the following tools: - -- black_ for code formatting -- flake8_ for style checks -- isort_ for import ordering -- mypy_ for static type checking - -.. seealso:: - - How-to guides `../how-to/lint` and `../how-to/static-analysis` - -.. _documentation_standards: - -Documentation Standards ------------------------ - -Docstrings are pre-processed using the Sphinx Napoleon extension. As such, -numpydoc-style_ is considered as standard for this repository. Please use type -hints in the function signature for types. For example: - -.. code:: python - - def foo(var1, var2, *args, long_var_name="hi", only_seldom_used_keyword=0, **kwargs): - r"""Summarize the function in one line. - - Several sentences providing an extended description. Refer to - variables using back-ticks, e.g. `var`. - - Parameters - ---------- - var1 : array_like - Array_like means all those objects -- lists, nested lists, etc. -- - that can be converted to an array. We can also refer to - variables like `var1`. - var2 : int - The type above can either refer to an actual Python type - (e.g. ``int``), or describe the type of the variable in more - detail, e.g. ``(N,) ndarray`` or ``array_like``. - *args : iterable - Other arguments. - long_var_name : {'hi', 'ho'}, optional - Choices in brackets, default first when optional. - - Returns - ------- - type - Explanation of anonymous return value of type ``type``. - describe : type - Explanation of return value named `describe`. - out : type - Explanation of `out`. - type_without_description - - Other Parameters - ---------------- - only_seldom_used_keyword : int, optional - Infrequently used parameters can be described under this optional - section to prevent cluttering the Parameters section. - **kwargs : dict - Other infrequently used keyword arguments. Note that all keyword - arguments appearing after the first parameter specified under the - Other Parameters section, should also be described under this - section. - - Raises - ------ - BadException - Because you shouldn't have done that. - - See Also - -------- - numpy.array : Relationship (optional). - numpy.ndarray : Relationship (optional), which could be fairly long, in - which case the line wraps here. - numpy.dot, numpy.linalg.norm, numpy.eye - - Notes - ----- - Notes about the implementation algorithm (if needed). - - This can have multiple paragraphs. - - You may include some math: - - .. math:: X(e^{j\omega } ) = x(n)e^{ - j\omega n} - - And even use a Greek symbol like :math:`\omega` inline. - - References - ---------- - Cite the relevant literature, e.g. [1]_. You may also cite these - references in the notes section above. - - .. [1] O. McNoleg, "The integration of GIS, remote sensing, - expert systems and adaptive co-kriging for environmental habitat - modelling of the Highland Haggis using object-oriented, fuzzy-logic - and neural-network techniques," Computers & Geosciences, vol. 22, - pp. 585-588, 1996. - - Examples - -------- - These are written in doctest format, and should illustrate how to - use the function. - - >>> a = [1, 2, 3] - >>> print([x + 3 for x in a]) - [4, 5, 6] - >>> print("a\nb") - a - b - - """ - pass - -.. _numpydoc-style: https://numpydoc.readthedocs.io/en/latest/format.html - -Documentation is contained in the ``docs`` directory and extracted from -docstrings of the API. - -Docs follow the underlining convention:: - - Headling 1 (page title) - ======================= - - Heading 2 - --------- - - Heading 3 - ~~~~~~~~~ - -.. seealso:: - - How-to guide `../how-to/build-docs` \ No newline at end of file diff --git a/docs/developer/tutorials/dev-install.rst b/docs/developer/tutorials/dev-install.rst deleted file mode 100644 index 0a32bd19a2..0000000000 --- a/docs/developer/tutorials/dev-install.rst +++ /dev/null @@ -1,60 +0,0 @@ -Developer install -================= - -These instructions will take you through the minimal steps required to get a dev -environment setup, so you can run the tests locally. - -Clone the repository --------------------- - -First clone the repository locally using `Git -`_:: - - $ git clone git://github.com/bluesky/ophyd-async.git - -Install dependencies --------------------- - -You can choose to either develop on the host machine using a `venv` (which -requires python 3.10 or later) or to run in a container under `VSCode -`_ - -.. tab-set:: - - .. tab-item:: Local virtualenv - - .. code:: - - $ cd ophyd-async - $ python3 -m venv venv - $ source venv/bin/activate - $ pip install -e '.[dev]' - - .. tab-item:: VSCode devcontainer - - .. code:: - - $ vscode ophyd-async - # Click on 'Reopen in Container' when prompted - # Open a new terminal - -See what was installed ----------------------- - -To see a graph of the python package dependency tree type:: - - $ pipdeptree - -Build and test --------------- - -Now you have a development environment you can run the tests in a terminal:: - - $ tox -p - -This will run in parallel the following checks: - -- `../how-to/build-docs` -- `../how-to/run-tests` -- `../how-to/static-analysis` -- `../how-to/lint` diff --git a/docs/user/examples/epics_demo.py b/docs/examples/epics_demo.py similarity index 100% rename from docs/user/examples/epics_demo.py rename to docs/examples/epics_demo.py diff --git a/docs/explanations.md b/docs/explanations.md new file mode 100644 index 0000000000..73ab289b60 --- /dev/null +++ b/docs/explanations.md @@ -0,0 +1,10 @@ +# Explanations + +Explanations of how it works and why it works that way. + +```{toctree} +:maxdepth: 1 +:glob: + +explanations/* +``` diff --git a/docs/explanations/decisions.md b/docs/explanations/decisions.md new file mode 100644 index 0000000000..0533b98d45 --- /dev/null +++ b/docs/explanations/decisions.md @@ -0,0 +1,12 @@ +# Architectural Decision Records + +Architectural decisions are made throughout a project's lifetime. As a way of keeping track of these decisions, we record these decisions in Architecture Decision Records (ADRs) listed below. + +```{toctree} +:glob: true +:maxdepth: 1 + +decisions/* +``` + +For more information on ADRs see this [blog by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions). diff --git a/docs/explanations/decisions/0001-record-architecture-decisions.md b/docs/explanations/decisions/0001-record-architecture-decisions.md new file mode 100644 index 0000000000..44d234efce --- /dev/null +++ b/docs/explanations/decisions/0001-record-architecture-decisions.md @@ -0,0 +1,18 @@ +# 1. Record architecture decisions + +## Status + +Accepted + +## Context + +We need to record the architectural decisions made on this project. + +## Decision + +We will use Architecture Decision Records, as [described by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions). + +## Consequences + +See Michael Nygard's article, linked above. To create new ADRs we will copy and +paste from existing ones. diff --git a/docs/explanations/decisions/0002-switched-to-python-copier-template.md b/docs/explanations/decisions/0002-switched-to-python-copier-template.md new file mode 100644 index 0000000000..66fe5d8b24 --- /dev/null +++ b/docs/explanations/decisions/0002-switched-to-python-copier-template.md @@ -0,0 +1,28 @@ +# 2. Adopt python-copier-template for project structure + +## Status + +Accepted + +## Context + +We should use the following [python-copier-template](https://github.com/DiamondLightSource/python-copier-template). +The template will ensure consistency in developer +environments and package management. + +## Decision + +We have switched to using the template. + +## Consequences + +This module will use a fixed set of tools as developed in `python-copier-template` +and can pull from this template to update the packaging to the latest techniques. + +As such, the developer environment may have changed, the following could be +different: + +- linting +- formatting +- pip venv setup +- CI/CD diff --git a/docs/developer/explanations/decisions/0003-ophyd-async-migration.rst b/docs/explanations/decisions/0003-ophyd-async-migration.rst similarity index 95% rename from docs/developer/explanations/decisions/0003-ophyd-async-migration.rst rename to docs/explanations/decisions/0003-ophyd-async-migration.rst index b023bcab5a..2c33c132db 100644 --- a/docs/developer/explanations/decisions/0003-ophyd-async-migration.rst +++ b/docs/explanations/decisions/0003-ophyd-async-migration.rst @@ -47,4 +47,4 @@ Consequences ------------ This will require changing the repository structure of Ophyd Async; see -the decision on repository structure `./0004-repository-structure` for details. \ No newline at end of file +the decision on repository structure :doc:`0004-repository-structure` for details. \ No newline at end of file diff --git a/docs/developer/explanations/decisions/0004-repository-structure.rst b/docs/explanations/decisions/0004-repository-structure.rst similarity index 100% rename from docs/developer/explanations/decisions/0004-repository-structure.rst rename to docs/explanations/decisions/0004-repository-structure.rst diff --git a/docs/developer/explanations/decisions/0005-respect-black-line-length.rst b/docs/explanations/decisions/0005-respect-black-line-length.rst similarity index 100% rename from docs/developer/explanations/decisions/0005-respect-black-line-length.rst rename to docs/explanations/decisions/0005-respect-black-line-length.rst diff --git a/docs/developer/explanations/decisions/0006-procedural-device-definitions.rst b/docs/explanations/decisions/0006-procedural-device-definitions.rst similarity index 100% rename from docs/developer/explanations/decisions/0006-procedural-device-definitions.rst rename to docs/explanations/decisions/0006-procedural-device-definitions.rst diff --git a/docs/explanations/decisions/COPYME b/docs/explanations/decisions/COPYME new file mode 100644 index 0000000000..b466c79299 --- /dev/null +++ b/docs/explanations/decisions/COPYME @@ -0,0 +1,19 @@ +# 3. Short descriptive title + +Date: Today's date + +## Status + +Accepted + +## Context + +Background to allow us to make the decision, to show how we arrived at our conclusions. + +## Decision + +What decision we made. + +## Consequences + +What we will do as a result of this decision. diff --git a/docs/user/explanations/event-loop-choice.rst b/docs/explanations/event-loop-choice.rst similarity index 100% rename from docs/user/explanations/event-loop-choice.rst rename to docs/explanations/event-loop-choice.rst diff --git a/docs/how-to.md b/docs/how-to.md new file mode 100644 index 0000000000..6b16141727 --- /dev/null +++ b/docs/how-to.md @@ -0,0 +1,10 @@ +# How-to Guides + +Practical step-by-step guides for the more experienced user. + +```{toctree} +:maxdepth: 1 +:glob: + +how-to/* +``` diff --git a/docs/user/how-to/compound-devices.rst b/docs/how-to/compound-devices.rst similarity index 92% rename from docs/user/how-to/compound-devices.rst rename to docs/how-to/compound-devices.rst index 4722b58f9a..e4a2d7d92a 100644 --- a/docs/user/how-to/compound-devices.rst +++ b/docs/how-to/compound-devices.rst @@ -11,7 +11,7 @@ Assembly Compound assemblies can be used to group Devices into larger logical Devices: -.. literalinclude:: ../../../src/ophyd_async/epics/demo/__init__.py +.. literalinclude:: ../../src/ophyd_async/epics/demo/__init__.py :pyobject: SampleStage This applies prefixes on construction: @@ -35,7 +35,7 @@ Grouping by Index Sometimes, it makes sense to group devices by number, say an array of sensors: -.. literalinclude:: ../../../src/ophyd_async/epics/demo/__init__.py +.. literalinclude:: ../../src/ophyd_async/epics/demo/__init__.py :pyobject: SensorGroup :class:`~ophyd-async.core.DeviceVector` allows writing maintainable, arbitrary-length device groups instead of fixed classes for each possible grouping. A :class:`~ophyd-async.core.DeviceVector` can be accessed via indices, for example: ``my_sensor_group.sensors[2]``. Here ``sensors`` is a dictionary with integer indices rather than a list so that the most semantically sensible indices may be used, the sensor group above may be 1-indexed, for example, because the sensors' datasheet calls them "sensor 1", "sensor 2" etc. diff --git a/docs/how-to/contribute.md b/docs/how-to/contribute.md new file mode 100644 index 0000000000..f9c4ca1d75 --- /dev/null +++ b/docs/how-to/contribute.md @@ -0,0 +1,2 @@ +```{include} ../../.github/CONTRIBUTING.md +``` \ No newline at end of file diff --git a/docs/user/how-to/make-a-simple-device.rst b/docs/how-to/make-a-simple-device.rst similarity index 66% rename from docs/user/how-to/make-a-simple-device.rst rename to docs/how-to/make-a-simple-device.rst index dcad73bb99..01f12c3e53 100644 --- a/docs/user/how-to/make-a-simple-device.rst +++ b/docs/how-to/make-a-simple-device.rst @@ -11,18 +11,18 @@ Make a Simple Device To make a simple device, you need to subclass from the `StandardReadable` class, create some `Signal` instances, and optionally implement other suitable Bluesky `Protocols ` like -:class:`~bluesky.protocols.Movable`. +:external+bluesky:py:class:`bluesky.protocols.Movable`. The rest of this guide will show examples from ``src/ophyd_async/epics/demo/__init__.py`` Readable -------- -For a simple :class:`~bluesky.protocols.Readable` object like a `Sensor`, you need to +For a simple :external+bluesky:py:class:`bluesky.protocols.Readable` object like a `Sensor`, you need to define some signals, then tell the superclass which signals should contribute to ``read()`` and ``read_configuration()``: -.. literalinclude:: ../../../src/ophyd_async/epics/demo/__init__.py +.. literalinclude:: ../../src/ophyd_async/epics/demo/__init__.py :pyobject: Sensor First some Signals are constructed and stored on the Device. Each one is passed @@ -53,10 +53,10 @@ Movable For a more complicated device like a `Mover`, you can still use `StandardReadable` and implement some addition protocols: -.. literalinclude:: ../../../src/ophyd_async/epics/demo/__init__.py +.. literalinclude:: ../../src/ophyd_async/epics/demo/__init__.py :pyobject: Mover -The ``set()`` method implements :class:`~bluesky.protocols.Movable`. This +The ``set()`` method implements :external+bluesky:py:class:`bluesky.protocols.Movable`. This creates a `coroutine` ``do_set()`` which gets the old position, units and precision in parallel, sets the setpoint, then observes the readback value, informing watchers of the progress. When it gets to the requested value it @@ -64,3 +64,27 @@ completes. This co-routine is wrapped in a timeout handler, and passed to an `AsyncStatus` which will start executing it as soon as the Run Engine adds a callback to it. The ``stop()`` method then pokes a PV if the move needs to be interrupted. + +Assembly +-------- + +Compound assemblies can be used to group Devices into larger logical Devices: + +.. literalinclude:: ../../src/ophyd_async/epics/demo/__init__.py + :pyobject: SampleStage + +This applies prefixes on construction: + +- SampleStage is passed a prefix like ``DEVICE:`` +- SampleStage.x will append its prefix ``X:`` to get ``DEVICE:X:`` +- SampleStage.x.velocity will append its suffix ``Velocity`` to get + ``DEVICE:X:Velocity`` + +If SampleStage is further nested in another Device another layer of prefix +nesting would occur + +.. note:: + + SampleStage does not pass any signals into its superclass init. This means + that its ``read()`` method will return an empty dictionary. This means you + can ``rd sample_stage.x``, but not ``rd sample_stage``. diff --git a/docs/user/how-to/write-tests-for-devices.rst b/docs/how-to/write-tests-for-devices.rst similarity index 82% rename from docs/user/how-to/write-tests-for-devices.rst rename to docs/how-to/write-tests-for-devices.rst index 258d195d9a..c1e5ca1c43 100644 --- a/docs/user/how-to/write-tests-for-devices.rst +++ b/docs/how-to/write-tests-for-devices.rst @@ -24,9 +24,9 @@ Async Tests Sim Backend ----------- -Ophyd devices initialized with a sim backend behave in a similar way to mocks, without requiring you to mock out all the dependencies and internals. The :class:`~ophyd-async.core.DeviceCollector` can initialize any number of devices, and their signals and sub-devices (recursively), with a sim backend. +Ophyd devices initialized with a sim backend behave in a similar way to mocks, without requiring you to mock out all the dependencies and internals. The `DeviceCollector` can initialize any number of devices, and their signals and sub-devices (recursively), with a sim backend. -.. literalinclude:: ../../../tests/epics/demo/test_demo.py +.. literalinclude:: ../../tests/epics/demo/test_demo.py :pyobject: sim_sensor @@ -35,11 +35,11 @@ Sim Utility Functions Sim signals behave as simply as possible, holding a sensible default value when initialized and retaining any value (in memory) to which they are set. This model breaks down in the case of read-only signals, which cannot be set because there is an expectation of some external device setting them in the real world. There is a utility function, ``set_sim_value``, to mock-set values for sim signals, including read-only ones. -.. literalinclude:: ../../../tests/epics/demo/test_demo.py +.. literalinclude:: ../../tests/epics/demo/test_demo.py :pyobject: test_sensor_reading_shows_value There is another utility function, ``set_sim_callback``, for hooking in logic when a sim value changes (e.g. because someone puts to it). -.. literalinclude:: ../../../tests/epics/demo/test_demo.py +.. literalinclude:: ../../tests/epics/demo/test_demo.py :pyobject: test_mover_stopped diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000000..730b3fdc1a --- /dev/null +++ b/docs/index.md @@ -0,0 +1,56 @@ +--- +html_theme.sidebar_secondary.remove: true +--- + +```{include} ../README.md +:end-before: + +::::{grid} 2 +:gutter: 4 + +:::{grid-item-card} {material-regular}`directions_walk;2em` +```{toctree} +:maxdepth: 2 +tutorials +``` ++++ +Tutorials for installation and typical usage. New users start here. +::: + +:::{grid-item-card} {material-regular}`directions;2em` +```{toctree} +:maxdepth: 2 +how-to +``` ++++ +Practical step-by-step guides for the more experienced user. +::: + +:::{grid-item-card} {material-regular}`info;2em` +```{toctree} +:maxdepth: 2 +explanations +``` ++++ +Explanations of how it works and why it works that way. +::: + +:::{grid-item-card} {material-regular}`menu_book;2em` +```{toctree} +:maxdepth: 2 +reference +``` ++++ +Technical reference material including APIs and release notes. +::: + +:::: diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index 054db2b377..0000000000 --- a/docs/index.rst +++ /dev/null @@ -1,29 +0,0 @@ -:html_theme.sidebar_secondary.remove: - -.. include:: ../README.rst - :end-before: when included in index.rst - -How the documentation is structured ------------------------------------ - -The documentation is split into 2 sections: - -.. grid:: 2 - - .. grid-item-card:: :material-regular:`person;4em` - :link: user/index - :link-type: doc - - The User Guide contains documentation on how to install and use ophyd-async. - - .. grid-item-card:: :material-regular:`code;4em` - :link: developer/index - :link-type: doc - - The Developer Guide contains documentation on how to develop and contribute changes back to ophyd-async. - -.. toctree:: - :hidden: - - user/index - developer/index diff --git a/docs/reference.md b/docs/reference.md new file mode 100644 index 0000000000..b9e0e3b834 --- /dev/null +++ b/docs/reference.md @@ -0,0 +1,12 @@ +# Reference + +Technical reference material including APIs and release notes. + +```{toctree} +:maxdepth: 1 +:glob: + +reference/* +genindex +Release Notes +``` diff --git a/docs/user/reference/api.rst b/docs/reference/api.rst similarity index 98% rename from docs/user/reference/api.rst rename to docs/reference/api.rst index 362154c8d1..66747af774 100644 --- a/docs/user/reference/api.rst +++ b/docs/reference/api.rst @@ -26,4 +26,4 @@ This is the internal API reference for ophyd_async core epics - panda + panda \ No newline at end of file diff --git a/docs/tutorials.md b/docs/tutorials.md new file mode 100644 index 0000000000..1fe66c541d --- /dev/null +++ b/docs/tutorials.md @@ -0,0 +1,10 @@ +# Tutorials + +Tutorials for installation and typical usage. New users start here. + +```{toctree} +:maxdepth: 1 +:glob: + +tutorials/* +``` diff --git a/docs/tutorials/installation.md b/docs/tutorials/installation.md new file mode 100644 index 0000000000..a55b96b935 --- /dev/null +++ b/docs/tutorials/installation.md @@ -0,0 +1,42 @@ +# Installation + +## Check your version of python + +You will need python 3.10 or later. You can check your version of python by +typing into a terminal: + +``` +$ python3 --version +``` + +## Create a virtual environment + +It is recommended that you install into a “virtual environment” so this +installation will not interfere with any existing Python software: + +``` +$ python3 -m venv /path/to/venv +$ source /path/to/venv/bin/activate +``` + +## Installing the library + +You can now use `pip` to install the library and its dependencies: + +``` +$ python3 -m pip install ophyd-async +``` + +If you require a feature that is not currently released you can also install +from github: + +``` +$ python3 -m pip install git+https://github.com/bluesky/ophyd-async.git +``` + +The library should now be installed and the commandline interface on your path. +You can check the version that has been installed by typing: + +``` +$ ophyd-async --version +``` diff --git a/docs/user/tutorials/using-existing-devices.rst b/docs/tutorials/using-existing-devices.rst similarity index 91% rename from docs/user/tutorials/using-existing-devices.rst rename to docs/tutorials/using-existing-devices.rst index 067d01ca99..6ac638eefb 100644 --- a/docs/user/tutorials/using-existing-devices.rst +++ b/docs/tutorials/using-existing-devices.rst @@ -82,10 +82,11 @@ You can now run ipython with this startup file:: .. ipython:: python :suppress: + :okexcept: import sys from pathlib import Path - sys.path.append(str(Path(".").absolute()/"docs/user/examples")) + sys.path.append(str(Path(".").absolute()/"docs/examples")) from epics_demo import * # Turn off progressbar and table RE.waiting_hook = None @@ -103,6 +104,7 @@ can be used in plans. We can move the ``samp.x`` mover to 100mm using `bluesky.plan_stubs.mv`: .. ipython:: + :okexcept: In [1]: RE(mov(samp.x, 100)) @@ -111,6 +113,7 @@ If this is too verbose to write, we registered a shorthand with ``RE(my_plan(args))``. The command above can also be run as: .. ipython:: + :okexcept: In [1]: `_ diff --git a/docs/user/how-to/run-container.rst b/docs/user/how-to/run-container.rst deleted file mode 100644 index 2639fb919e..0000000000 --- a/docs/user/how-to/run-container.rst +++ /dev/null @@ -1,15 +0,0 @@ -Run in a container -================== - -Pre-built containers with ophyd-epics-devices and its dependencies already -installed are available on `Github Container Registry -`_. - -Starting the container ----------------------- - -To pull the container from github container registry and run:: - - $ docker run ghcr.io/bluesky/ophyd-async:main --version - -To get a released version, use a numbered release instead of ``main``. diff --git a/docs/user/index.rst b/docs/user/index.rst deleted file mode 100644 index bc6f84e684..0000000000 --- a/docs/user/index.rst +++ /dev/null @@ -1,67 +0,0 @@ -.. note:: - - Ophyd async is included on a provisional basis until the v1.0 release and - may change API on minor release numbers before then - -User Guide -========== - -Documentation is split into four categories, also accessible from links in the -side-bar. - -.. grid:: 2 - :gutter: 4 - - .. grid-item-card:: :material-regular:`directions_walk;3em` - - .. toctree:: - :caption: Tutorials - :maxdepth: 1 - - tutorials/installation - tutorials/using-existing-devices - - +++ - - Tutorials for installation and typical usage. New users start here. - - .. grid-item-card:: :material-regular:`directions;3em` - - .. toctree:: - :caption: How-to Guides - :maxdepth: 1 - - how-to/make-a-simple-device - how-to/compound-devices - how-to/write-tests-for-devices - how-to/run-container - - +++ - - Practical step-by-step guides for the more experienced user. - - .. grid-item-card:: :material-regular:`info;3em` - - .. toctree:: - :caption: Explanations - :maxdepth: 1 - - explanations/docs-structure - explanations/event-loop-choice - - +++ - - Explanations of how the library works and why it works that way. - - .. grid-item-card:: :material-regular:`menu_book;3em` - - .. toctree:: - :caption: Reference - :maxdepth: 1 - - reference/api - ../genindex - - +++ - - Technical reference material including APIs and release notes. diff --git a/docs/user/tutorials/installation.rst b/docs/user/tutorials/installation.rst deleted file mode 100644 index 73874b60cf..0000000000 --- a/docs/user/tutorials/installation.rst +++ /dev/null @@ -1,43 +0,0 @@ -.. note:: - - Ophyd async is included on a provisional basis until the v1.0 release and - may change API on minor release numbers before then - -Installation -============ - -Check your version of python ----------------------------- - -You will need python 3.10 or later. You can check your version of python by -typing into a terminal:: - - $ python3 --version - - -Create a virtual environment ----------------------------- - -It is recommended that you install into a “virtual environment” so this -installation will not interfere with any existing Python software:: - - $ python3 -m venv /path/to/venv - $ source /path/to/venv/bin/activate - - -Installing the library ----------------------- - -You can now use ``pip`` to install the library and its dependencies:: - - $ python3 -m pip install ophyd-async - -If you require a feature that is not currently released you can also install -from github:: - - $ python3 -m pip install git+https://github.com/bluesky/ophyd-async.git - -The library should now be installed and the commandline interface on your path. -You can check the version that has been installed by typing:: - - $ ophyd-async --version diff --git a/pyproject.toml b/pyproject.toml index e007a4435c..3238f5ce0a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,6 @@ classifiers = [ ] description = "Asynchronous Bluesky hardware abstraction code, compatible with control systems like EPICS and Tango" dependencies = [ - "typing-extensions;python_version<'3.8'", "networkx>=2.0", "numpy", "packaging", @@ -22,10 +21,9 @@ dependencies = [ "p4p", "pyyaml", ] - dynamic = ["version"] license.file = "LICENSE" -readme = "README.rst" +readme = "README.md" requires-python = ">=3.10" [project.optional-dependencies] @@ -43,7 +41,6 @@ dev = [ "ipython", "ipywidgets", "matplotlib", - "mypy", "myst-parser", "numpydoc", "ophyd", @@ -59,6 +56,7 @@ dev = [ "pytest-faulthandler", "pytest-rerunfailures", "pytest-timeout", + "ruff", "sphinx-autobuild", "sphinx-copybutton", "sphinx-design", @@ -81,34 +79,15 @@ name = "Tom Cobb" [tool.setuptools_scm] write_to = "src/ophyd_async/_version.py" -[tool.mypy] -ignore_missing_imports = true # Ignore missing stubs in imported modules -plugins = ["numpy.typing.mypy_plugin"] - -[tool.isort] -float_to_top = true -profile = "black" - -[tool.flake8] -extend-ignore = [ - "E203", # See https://github.com/PyCQA/pycodestyle/issues/373 - "F811", # support typing.overload decorator - "F722", # allow Annotated[typ, some_func("some string")] - "W504", - "W503", -] -max-line-length = 88 # Respect black's line length (default 88), -exclude = [".tox", "venv"] - [tool.pytest.ini_options] # Run pytest with all our checkers, and don't spam us with massive tracebacks on error addopts = """ - --tb=native -vv --strict-markers --doctest-modules --doctest-glob="*.rst" - --ignore=docs/user/examples --ignore=docs/examples + --tb=native -vv --strict-markers --doctest-modules + --doctest-glob="*.rst" --doctest-glob="*.md" --ignore=docs/examples --cov=src/ophyd_async --cov-report term --cov-report xml:cov.xml -""" + """ # https://iscinumpy.gitlab.io/post/bound-version-constraints/#watch-for-warnings -filterwarnings = ["error", "ignore::DeprecationWarning:pkg_resources"] +filterwarnings = "error" # Doctest python code in docs, python code in src docstrings, test functions in tests testpaths = "docs src tests" log_format = "%(asctime)s,%(msecs)03d %(levelname)s (%(threadName)s) %(message)s" @@ -134,19 +113,31 @@ legacy_tox_ini = """ [tox] skipsdist=True -[testenv:{pre-commit,mypy,pytest,docs}] +[testenv:{pre-commit,type-checking,tests,docs}] # Don't create a virtualenv for the command, requires tox-direct plugin direct = True passenv = * -allowlist_externals = - pytest +allowlist_externals = + pytest pre-commit mypy sphinx-build sphinx-autobuild commands = - pytest: pytest --cov=ophyd_async --cov-report term --cov-report xml:cov.xml {posargs} - mypy: mypy src tests {posargs} + tests: pytest --cov=ophyd_async --cov-report term --cov-report xml:cov.xml {posargs} + type-checking: ruff check src tests {posargs} pre-commit: pre-commit run --all-files {posargs} docs: sphinx-{posargs:build -EW --keep-going} -T docs build/html """ + + +[tool.ruff] +src = ["src", "tests"] +line-length = 88 +select = [ + "C4", # flake8-comprehensions - https://beta.ruff.rs/docs/rules/#flake8-comprehensions-c4 + "E", # pycodestyle errors - https://beta.ruff.rs/docs/rules/#error-e + "F", # pyflakes rules - https://beta.ruff.rs/docs/rules/#pyflakes-f + "W", # pycodestyle warnings - https://beta.ruff.rs/docs/rules/#warning-w + "I001", # isort +] diff --git a/src/ophyd_async/__init__.py b/src/ophyd_async/__init__.py index 4100754a36..26d23badb6 100644 --- a/src/ophyd_async/__init__.py +++ b/src/ophyd_async/__init__.py @@ -1,6 +1,3 @@ -from importlib.metadata import version # noqa - -__version__ = version("ophyd-async") -del version +from ._version import __version__ __all__ = ["__version__"] diff --git a/src/ophyd_async/core/sim_signal_backend.py b/src/ophyd_async/core/sim_signal_backend.py index a2575dc9b8..b0190344bb 100644 --- a/src/ophyd_async/core/sim_signal_backend.py +++ b/src/ophyd_async/core/sim_signal_backend.py @@ -41,7 +41,7 @@ def descriptor(self, source: str, value) -> Descriptor: type(value) in primitive_dtypes ), f"invalid converter for value of type {type(value)}" dtype = primitive_dtypes[type(value)] - return dict(source=source, dtype=dtype, shape=[]) + return {"source": source, "dtype": dtype, "shape": []} def make_initial_value(self, datatype: Optional[Type[T]]) -> T: if datatype is None: @@ -52,7 +52,7 @@ def make_initial_value(self, datatype: Optional[Type[T]]) -> T: class SimArrayConverter(SimConverter): def descriptor(self, source: str, value) -> Descriptor: - return dict(source=source, dtype="array", shape=[len(value)]) + return {"source": source, "dtype": "array", "shape": [len(value)]} def make_initial_value(self, datatype: Optional[Type[T]]) -> T: if datatype is None: @@ -76,9 +76,7 @@ def write_value(self, value: Union[Enum, str]) -> Enum: def descriptor(self, source: str, value) -> Descriptor: choices = [e.value for e in self.enum_class] - return dict( - source=source, dtype="string", shape=[], choices=choices - ) # type: ignore + return {"source": source, "dtype": "string", "shape": [], "choices": choices} # type: ignore def make_initial_value(self, datatype: Optional[Type[T]]) -> T: if datatype is None: diff --git a/src/ophyd_async/core/utils.py b/src/ophyd_async/core/utils.py index 6863c1d1f2..ad70bcb62e 100644 --- a/src/ophyd_async/core/utils.py +++ b/src/ophyd_async/core/utils.py @@ -132,7 +132,7 @@ def get_unique(values: Dict[str, T], types: str) -> T: async def merge_gathered_dicts( - coros: Iterable[Awaitable[Dict[str, T]]] + coros: Iterable[Awaitable[Dict[str, T]]], ) -> Dict[str, T]: """Merge dictionaries produced by a sequence of coroutines. diff --git a/src/ophyd_async/epics/_backend/_aioca.py b/src/ophyd_async/epics/_backend/_aioca.py index 00cb0cbfc3..db8180641b 100644 --- a/src/ophyd_async/epics/_backend/_aioca.py +++ b/src/ophyd_async/epics/_backend/_aioca.py @@ -52,14 +52,14 @@ def value(self, value: AugmentedValue): return value def reading(self, value: AugmentedValue): - return dict( - value=self.value(value), - timestamp=value.timestamp, - alarm_severity=-1 if value.severity > 2 else value.severity, - ) + return { + "value": self.value(value), + "timestamp": value.timestamp, + "alarm_severity": -1 if value.severity > 2 else value.severity, + } def descriptor(self, source: str, value: AugmentedValue) -> Descriptor: - return dict(source=source, dtype=dbr_to_dtype[value.datatype], shape=[]) + return {"source": source, "dtype": dbr_to_dtype[value.datatype], "shape": []} class CaLongStrConverter(CaConverter): @@ -74,7 +74,7 @@ def write_value(self, value: str): class CaArrayConverter(CaConverter): def descriptor(self, source: str, value: AugmentedValue) -> Descriptor: - return dict(source=source, dtype="array", shape=[len(value)]) + return {"source": source, "dtype": "array", "shape": [len(value)]} @dataclass @@ -92,7 +92,7 @@ def value(self, value: AugmentedValue): def descriptor(self, source: str, value: AugmentedValue) -> Descriptor: choices = [e.value for e in self.enum_class] - return dict(source=source, dtype="string", shape=[], choices=choices) + return {"source": source, "dtype": "string", "shape": [], "choices": choices} class DisconnectedCaConverter(CaConverter): diff --git a/src/ophyd_async/epics/_backend/_p4p.py b/src/ophyd_async/epics/_backend/_p4p.py index 0507ff4d32..759d86b7bb 100644 --- a/src/ophyd_async/epics/_backend/_p4p.py +++ b/src/ophyd_async/epics/_backend/_p4p.py @@ -49,15 +49,15 @@ def value(self, value): def reading(self, value): ts = value["timeStamp"] sv = value["alarm"]["severity"] - return dict( - value=self.value(value), - timestamp=ts["secondsPastEpoch"] + ts["nanoseconds"] * 1e-9, - alarm_severity=-1 if sv > 2 else sv, - ) + return { + "value": self.value(value), + "timestamp": ts["secondsPastEpoch"] + ts["nanoseconds"] * 1e-9, + "alarm_severity": -1 if sv > 2 else sv, + } def descriptor(self, source: str, value) -> Descriptor: dtype = specifier_to_dtype[value.type().aspy("value")] - return dict(source=source, dtype=dtype, shape=[]) + return {"source": source, "dtype": dtype, "shape": []} def metadata_fields(self) -> List[str]: """ @@ -74,7 +74,7 @@ def value_fields(self) -> List[str]: class PvaArrayConverter(PvaConverter): def descriptor(self, source: str, value) -> Descriptor: - return dict(source=source, dtype="array", shape=[len(value["value"])]) + return {"source": source, "dtype": "array", "shape": [len(value["value"])]} class PvaNDArrayConverter(PvaConverter): @@ -98,7 +98,7 @@ def value(self, value): def descriptor(self, source: str, value) -> Descriptor: dims = self._get_dimensions(value) - return dict(source=source, dtype="array", shape=dims) + return {"source": source, "dtype": "array", "shape": dims} def write_value(self, value): # No clear use-case for writing directly to an NDArray, and some @@ -122,7 +122,7 @@ def value(self, value): def descriptor(self, source: str, value) -> Descriptor: choices = [e.value for e in self.enum_class] - return dict(source=source, dtype="string", shape=[], choices=choices) + return {"source": source, "dtype": "string", "shape": [], "choices": choices} class PvaEnumBoolConverter(PvaConverter): @@ -130,7 +130,7 @@ def value(self, value): return value["value"]["index"] def descriptor(self, source: str, value) -> Descriptor: - return dict(source=source, dtype="integer", shape=[]) + return {"source": source, "dtype": "integer", "shape": []} class PvaTableConverter(PvaConverter): @@ -139,7 +139,7 @@ def value(self, value): def descriptor(self, source: str, value) -> Descriptor: # This is wrong, but defer until we know how to actually describe a table - return dict(source=source, dtype="object", shape=[]) # type: ignore + return {"source": source, "dtype": "object", "shape": []} # type: ignore class PvaDictConverter(PvaConverter): @@ -147,7 +147,7 @@ def reading(self, value): ts = time.time() value = value.todict() # Alarm severity is vacuously 0 for a table - return dict(value=value, timestamp=ts, alarm_severity=0) + return {"value": value, "timestamp": ts, "alarm_severity": 0} def value(self, value: Value): return value.todict() @@ -279,7 +279,7 @@ async def put(self, value: Optional[T], wait=True, timeout=None): write_value = self.initial_values[self.write_pv] else: write_value = self.converter.write_value(value) - coro = self.ctxt.put(self.write_pv, dict(value=write_value), wait=wait) + coro = self.ctxt.put(self.write_pv, {"value": write_value}, wait=wait) try: await asyncio.wait_for(coro, timeout) except asyncio.TimeoutError as exc: diff --git a/src/ophyd_async/epics/_backend/common.py b/src/ophyd_async/epics/_backend/common.py index 964d385ca7..eaa6691926 100644 --- a/src/ophyd_async/epics/_backend/common.py +++ b/src/ophyd_async/epics/_backend/common.py @@ -15,6 +15,4 @@ def get_supported_enum_class( choices = tuple(v.value for v in datatype) if set(choices).difference(pv_choices): raise TypeError(f"{pv} has choices {pv_choices}: not all in {choices}") - return Enum( - "GeneratedChoices", {x or "_": x for x in pv_choices}, type=str - ) # type: ignore + return Enum("GeneratedChoices", {x or "_": x for x in pv_choices}, type=str) # type: ignore diff --git a/src/ophyd_async/epics/areadetector/writers/_hdffile.py b/src/ophyd_async/epics/areadetector/writers/_hdffile.py index 474d15e097..19f5a0c4ad 100644 --- a/src/ophyd_async/epics/areadetector/writers/_hdffile.py +++ b/src/ophyd_async/epics/areadetector/writers/_hdffile.py @@ -44,10 +44,10 @@ def stream_resources(self) -> Iterator[StreamResource]: def stream_data(self, indices_written: int) -> Iterator[StreamDatum]: # Indices are relative to resource if indices_written > self._last_emitted: - indices = dict( - start=self._last_emitted, - stop=indices_written, - ) + indices = { + "start": self._last_emitted, + "stop": indices_written, + } self._last_emitted = indices_written for bundle in self._bundles: yield bundle.compose_stream_datum(indices) diff --git a/src/ophyd_async/epics/pvi/pvi.py b/src/ophyd_async/epics/pvi/pvi.py index 37dc70c5ed..ea20656261 100644 --- a/src/ophyd_async/epics/pvi/pvi.py +++ b/src/ophyd_async/epics/pvi/pvi.py @@ -101,7 +101,8 @@ def _verify_common_blocks(entry: PVIEntry, common_device: Type[Device]): _verify_common_blocks(sub_sub_entry, sub_device) # type: ignore else: _verify_common_blocks( - entry.sub_entries[sub_name], sub_device # type: ignore + entry.sub_entries[sub_name], + sub_device, # type: ignore ) @@ -234,9 +235,7 @@ async def _get_pvi_entries(entry: PVIEntry, timeout=DEFAULT_TIMEOUT): sub_number_split = 1 if sub_number_split is None else sub_number_split if sub_name_split not in entry.sub_entries: entry.sub_entries[sub_name_split] = {} - entry.sub_entries[sub_name_split][ - sub_number_split - ] = sub_entry # type: ignore + entry.sub_entries[sub_name_split][sub_number_split] = sub_entry # type: ignore else: entry.sub_entries[sub_name] = sub_entry diff --git a/src/ophyd_async/panda/trigger.py b/src/ophyd_async/panda/trigger.py index ef9251b7f5..6e4d056f61 100644 --- a/src/ophyd_async/panda/trigger.py +++ b/src/ophyd_async/panda/trigger.py @@ -13,7 +13,6 @@ class SeqTableInfo: class StaticSeqTableTriggerLogic(TriggerLogic[SeqTableInfo]): - def __init__(self, seq: SeqBlock) -> None: self.seq = seq diff --git a/src/ophyd_async/panda/writers/hdf_writer.py b/src/ophyd_async/panda/writers/hdf_writer.py index 513d9c21e0..dc58fce101 100644 --- a/src/ophyd_async/panda/writers/hdf_writer.py +++ b/src/ophyd_async/panda/writers/hdf_writer.py @@ -63,9 +63,8 @@ class CaptureSignalWrapper: # This should return a dictionary which contains a dict, containing the Capture # signal object, and the value of that signal async def get_signals_marked_for_capture( - capture_signals: Dict[str, SignalR] + capture_signals: Dict[str, SignalR], ) -> Dict[str, CaptureSignalWrapper]: - # Read signals to see if they should be captured do_read = [signal.get_value() for signal in capture_signals.values()] @@ -79,7 +78,6 @@ async def get_signals_marked_for_capture( for signal_path, signal_object, signal_value in zip( capture_signals.keys(), capture_signals.values(), signal_values ): - signal_path = signal_path.replace("_capture", "") if (signal_value.value in iter(Capture)) and (signal_value.value != Capture.No): signals_to_capture[signal_path] = CaptureSignalWrapper( diff --git a/src/ophyd_async/panda/writers/panda_hdf_file.py b/src/ophyd_async/panda/writers/panda_hdf_file.py index 615ae85a94..3b5b77449d 100644 --- a/src/ophyd_async/panda/writers/panda_hdf_file.py +++ b/src/ophyd_async/panda/writers/panda_hdf_file.py @@ -49,10 +49,10 @@ def stream_resources(self) -> Iterator[StreamResource]: def stream_data(self, indices_written: int) -> Iterator[StreamDatum]: # Indices are relative to resource if indices_written > self._last_emitted: - indices = dict( - start=self._last_emitted, - stop=indices_written, - ) + indices = { + "start": self._last_emitted, + "stop": indices_written, + } self._last_emitted = indices_written for bundle in self._bundles: yield bundle.compose_stream_datum(indices) diff --git a/src/ophyd_async/planstubs/prepare_trigger_and_dets.py b/src/ophyd_async/planstubs/prepare_trigger_and_dets.py index ad86ef0a92..08e481b74e 100644 --- a/src/ophyd_async/planstubs/prepare_trigger_and_dets.py +++ b/src/ophyd_async/planstubs/prepare_trigger_and_dets.py @@ -19,7 +19,6 @@ def prepare_static_seq_table_flyer_and_detectors_with_same_trigger( repeats: int = 1, period: float = 0.0, ): - trigger_info = TriggerInfo( num=num * repeats, trigger=DetectorTrigger.constant_gate, diff --git a/tests/conftest.py b/tests/conftest.py index f21d0094e1..9a82fe2fd1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,5 @@ import asyncio +import os import subprocess import sys import time @@ -17,6 +18,18 @@ Path(__file__).parent / "panda" / "db" / "extra_blocks_panda.db" ) +# Prevent pytest from catching exceptions when debugging in vscode so that break on +# exception works correctly (see: https://github.com/pytest-dev/pytest/issues/7409) +if os.getenv("PYTEST_RAISE", "0") == "1": + + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call): + raise call.excinfo.value + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo): + raise excinfo.value + @pytest.fixture(scope="function") def RE(request): diff --git a/tests/core/test_device.py b/tests/core/test_device.py index a2d29dd5ad..482666c18c 100644 --- a/tests/core/test_device.py +++ b/tests/core/test_device.py @@ -51,9 +51,7 @@ def test_device_children(parent: DummyDeviceGroup): def test_device_vector_children(): parent = DummyDeviceGroup("root") - device_vector_children = [ - (name, child) for name, child in parent.dict_with_children.children() - ] + device_vector_children = list(parent.dict_with_children.children()) assert device_vector_children == [("123", parent.dict_with_children[123])] diff --git a/tests/core/test_flyer.py b/tests/core/test_flyer.py index 747850f846..53ff7313a7 100644 --- a/tests/core/test_flyer.py +++ b/tests/core/test_flyer.py @@ -97,10 +97,10 @@ async def collect_stream_docs( yield "stream_resource", self._file.stream_resource_doc if indices_written >= self._last_emitted: - indices = dict( - start=self._last_emitted, - stop=indices_written, - ) + indices = { + "start": self._last_emitted, + "stop": indices_written, + } self._last_emitted = indices_written self._last_flush = time.monotonic() yield "stream_datum", self._file.compose_stream_datum(indices) diff --git a/tests/core/test_sim.py b/tests/core/test_sim.py index a7459b9429..baefc850ee 100644 --- a/tests/core/test_sim.py +++ b/tests/core/test_sim.py @@ -18,23 +18,23 @@ class MyEnum(str, Enum): def integer_d(value): - return dict(dtype="integer", shape=[]) + return {"dtype": "integer", "shape": []} def number_d(value): - return dict(dtype="number", shape=[]) + return {"dtype": "number", "shape": []} def string_d(value): - return dict(dtype="string", shape=[]) + return {"dtype": "string", "shape": []} def enum_d(value): - return dict(dtype="string", shape=[], choices=["Aaa", "Bbb", "Ccc"]) + return {"dtype": "string", "shape": [], "choices": ["Aaa", "Bbb", "Ccc"]} def waveform_d(value): - return dict(dtype="array", shape=[len(value)]) + return {"dtype": "array", "shape": [len(value)]} class MonitorQueue: diff --git a/tests/core/test_utils.py b/tests/core/test_utils.py index 59dd8cd3f0..6a02bff77f 100644 --- a/tests/core/test_utils.py +++ b/tests/core/test_utils.py @@ -139,9 +139,11 @@ async def test_error_handling_value_errors(caplog): # This should fail since the error is a ValueError with pytest.raises(NotConnected) as e: - await dummy_device_two_working_one_timeout_two_value_error.connect( - timeout=0.01 - ), + ( + await dummy_device_two_working_one_timeout_two_value_error.connect( + timeout=0.01 + ), + ) assert str(e.value) == str(TWO_WORKING_TWO_TIMEOUT_TWO_VALUE_ERROR_OUTPUT) logs = caplog.get_records("call") @@ -213,7 +215,6 @@ def test_not_connected_error_output(): async def test_combining_top_level_signal_and_child_device(): - dummy_device1 = DummyDeviceCombiningTopLevelSignalAndSubDevice() with pytest.raises(NotConnected) as e: await dummy_device1.connect(timeout=0.01) diff --git a/tests/epics/test_signals.py b/tests/epics/test_signals.py index 2c00bde508..1ab05142b5 100644 --- a/tests/epics/test_signals.py +++ b/tests/epics/test_signals.py @@ -157,23 +157,23 @@ class MyEnum(str, Enum): def integer_d(value): - return dict(dtype="integer", shape=[]) + return {"dtype": "integer", "shape": []} def number_d(value): - return dict(dtype="number", shape=[]) + return {"dtype": "number", "shape": []} def string_d(value): - return dict(dtype="string", shape=[]) + return {"dtype": "string", "shape": []} def enum_d(value): - return dict(dtype="string", shape=[], choices=["Aaa", "Bbb", "Ccc"]) + return {"dtype": "string", "shape": [], "choices": ["Aaa", "Bbb", "Ccc"]} def waveform_d(value): - return dict(dtype="array", shape=[len(value)]) + return {"dtype": "array", "shape": [len(value)]} ls1 = "a string that is just longer than forty characters" @@ -389,7 +389,7 @@ async def test_pva_table(ioc: IOC) -> None: enum=[MyEnum.c, MyEnum.b], ) # TODO: what should this be for a variable length table? - descriptor = dict(dtype="object", shape=[]) + descriptor = {"dtype": "object", "shape": []} # Make and connect the backend for t, i, p in [(MyTable, initial, put), (None, put, initial)]: backend = await ioc.make_backend(t, "table") diff --git a/tests/panda/test_panda.py b/tests/panda/test_panda.py index ba9e753e63..1259f199ab 100644 --- a/tests/panda/test_panda.py +++ b/tests/panda/test_panda.py @@ -62,7 +62,6 @@ def __init__(self, prefix: str, name: str = "") -> None: async def connect( self, sim: bool = False, timeout: float = DEFAULT_TIMEOUT ) -> None: - await fill_pvi_entries(self, self._prefix + "PVI", timeout=timeout, sim=sim) await super().connect(sim) @@ -125,7 +124,6 @@ async def test_panda_with_missing_blocks(panda_pva): async def test_panda_with_extra_blocks_and_signals(panda_pva): - panda = PandANoDataBlock("PANDAQSRV:") await panda.connect() assert panda.extra # type: ignore diff --git a/tests/panda/test_panda_utils.py b/tests/panda/test_panda_utils.py index c636b62e01..c0b67a40f7 100644 --- a/tests/panda/test_panda_utils.py +++ b/tests/panda/test_panda_utils.py @@ -37,7 +37,6 @@ async def test_save_panda(mock_save_to_yaml, sim_panda, RE: RunEngine): "data.hdf_file_name": "", "data.num_capture": 0, "pcap.arm": False, - "pcap.arm": False, "pulse.1.delay": 0.0, "pulse.1.width": 0.0, "pulse.2.delay": 0.0, diff --git a/tests/panda/test_writer.py b/tests/panda/test_writer.py index 87238738da..c80893823c 100644 --- a/tests/panda/test_writer.py +++ b/tests/panda/test_writer.py @@ -43,11 +43,13 @@ async def sim_panda() -> PandA: ) set_sim_value( - sim_panda.block1.test_capture, Capture.MinMaxMean # type: ignore[attr-defined] + sim_panda.block1.test_capture, + Capture.MinMaxMean, # type: ignore[attr-defined] ) set_sim_value( - sim_panda.block2.test_capture, Capture.No # type: ignore[attr-defined] + sim_panda.block2.test_capture, + Capture.No, # type: ignore[attr-defined] ) return sim_panda @@ -90,7 +92,6 @@ async def test_get_capture_signals_gets_all_signals(sim_panda): async def test_get_signals_marked_for_capture(sim_panda): - capture_signals = { "block1.test_capture": sim_panda.block1.test_capture, "block2.test_capture": sim_panda.block2.test_capture, @@ -177,7 +178,6 @@ async def test_collect_stream_docs(sim_writer: PandaHDFWriter): async def test_numeric_blocks_correctly_formated(sim_writer: PandaHDFWriter): - async def get_numeric_signal(_): return { "device.block.1": CaptureSignalWrapper( diff --git a/tests/test_flyer_with_panda.py b/tests/test_flyer_with_panda.py index 573177e701..f680ad0395 100644 --- a/tests/test_flyer_with_panda.py +++ b/tests/test_flyer_with_panda.py @@ -74,10 +74,10 @@ async def collect_stream_docs( yield "stream_resource", self._file.stream_resource_doc if indices_written >= self._last_emitted: - indices = dict( - start=self._last_emitted, - stop=indices_written, - ) + indices = { + "start": self._last_emitted, + "stop": indices_written, + } self._last_emitted = indices_written self._last_flush = time.monotonic() yield "stream_datum", self._file.compose_stream_datum(indices) From 8d7ac48153e03dbd932d3f0a8ed2a555676578f0 Mon Sep 17 00:00:00 2001 From: DiamondJoseph <53935796+DiamondJoseph@users.noreply.github.com> Date: Thu, 18 Apr 2024 09:33:07 +0100 Subject: [PATCH 2/3] Update Pilatus Controller, Driver to match development of ADAravis, TetrAMM (#191) * Update PilatusDriver, Controller to match Aravis patterns * Add facility generic PilatusDetector --- .../epics/areadetector/__init__.py | 2 + .../controllers/pilatus_controller.py | 60 ++++++---- .../areadetector/drivers/pilatus_driver.py | 8 +- src/ophyd_async/epics/areadetector/pilatus.py | 51 ++++++++ tests/conftest.py | 7 ++ tests/epics/areadetector/test_controllers.py | 8 +- tests/epics/areadetector/test_pilatus.py | 112 ++++++++++++++++++ 7 files changed, 215 insertions(+), 33 deletions(-) create mode 100644 src/ophyd_async/epics/areadetector/pilatus.py create mode 100644 tests/epics/areadetector/test_pilatus.py diff --git a/src/ophyd_async/epics/areadetector/__init__.py b/src/ophyd_async/epics/areadetector/__init__.py index 69383d6741..9464936536 100644 --- a/src/ophyd_async/epics/areadetector/__init__.py +++ b/src/ophyd_async/epics/areadetector/__init__.py @@ -1,3 +1,4 @@ +from .pilatus import PilatusDetector from .single_trigger_det import SingleTriggerDet from .utils import ( FileWriteMode, @@ -16,4 +17,5 @@ "ad_rw", "NDAttributeDataType", "NDAttributesXML", + "PilatusDetector", ] diff --git a/src/ophyd_async/epics/areadetector/controllers/pilatus_controller.py b/src/ophyd_async/epics/areadetector/controllers/pilatus_controller.py index 1a238267df..13d52aca36 100644 --- a/src/ophyd_async/epics/areadetector/controllers/pilatus_controller.py +++ b/src/ophyd_async/epics/areadetector/controllers/pilatus_controller.py @@ -1,34 +1,36 @@ import asyncio -from typing import Optional, Set +from typing import Optional -from ophyd_async.core import AsyncStatus, DetectorControl, DetectorTrigger +from ophyd_async.core.async_status import AsyncStatus +from ophyd_async.core.detector import DetectorControl, DetectorTrigger from ophyd_async.epics.areadetector.drivers.ad_base import ( - DEFAULT_GOOD_STATES, - DetectorState, start_acquiring_driver_and_ensure_status, ) - -from ..drivers.pilatus_driver import PilatusDriver, TriggerMode -from ..utils import ImageMode, stop_busy_record - -TRIGGER_MODE = { - DetectorTrigger.internal: TriggerMode.internal, - DetectorTrigger.constant_gate: TriggerMode.ext_enable, - DetectorTrigger.variable_gate: TriggerMode.ext_enable, -} +from ophyd_async.epics.areadetector.drivers.pilatus_driver import ( + PilatusDriver, + PilatusTriggerMode, +) +from ophyd_async.epics.areadetector.utils import ImageMode, stop_busy_record class PilatusController(DetectorControl): + _supported_trigger_types = { + DetectorTrigger.internal: PilatusTriggerMode.internal, + DetectorTrigger.constant_gate: PilatusTriggerMode.ext_enable, + DetectorTrigger.variable_gate: PilatusTriggerMode.ext_enable, + } + def __init__( self, driver: PilatusDriver, - good_states: Set[DetectorState] = set(DEFAULT_GOOD_STATES), ) -> None: - self.driver = driver - self.good_states = good_states + self._drv = driver def get_deadtime(self, exposure: float) -> float: - return 0.001 + # Cite: https://media.dectris.com/User_Manual-PILATUS2-V1_4.pdf + """The required minimum time difference between ExpPeriod and ExpTime + (readout time) is 2.28 ms""" + return 2.28e-3 async def arm( self, @@ -36,14 +38,24 @@ async def arm( trigger: DetectorTrigger = DetectorTrigger.internal, exposure: Optional[float] = None, ) -> AsyncStatus: + if exposure is not None: + await self._drv.acquire_time.set(exposure) await asyncio.gather( - self.driver.trigger_mode.set(TRIGGER_MODE[trigger]), - self.driver.num_images.set(999_999 if num == 0 else num), - self.driver.image_mode.set(ImageMode.multiple), - ) - return await start_acquiring_driver_and_ensure_status( - self.driver, good_states=self.good_states + self._drv.trigger_mode.set(self._get_trigger_mode(trigger)), + self._drv.num_images.set(999_999 if num == 0 else num), + self._drv.image_mode.set(ImageMode.multiple), ) + return await start_acquiring_driver_and_ensure_status(self._drv) + + @classmethod + def _get_trigger_mode(cls, trigger: DetectorTrigger) -> PilatusTriggerMode: + if trigger not in cls._supported_trigger_types.keys(): + raise ValueError( + f"{cls.__name__} only supports the following trigger " + f"types: {cls._supported_trigger_types.keys()} but was asked to " + f"use {trigger}" + ) + return cls._supported_trigger_types[trigger] async def disarm(self): - await stop_busy_record(self.driver.acquire, False, timeout=1) + await stop_busy_record(self._drv.acquire, False, timeout=1) diff --git a/src/ophyd_async/epics/areadetector/drivers/pilatus_driver.py b/src/ophyd_async/epics/areadetector/drivers/pilatus_driver.py index 8bbfba94be..c0ffeffdfc 100644 --- a/src/ophyd_async/epics/areadetector/drivers/pilatus_driver.py +++ b/src/ophyd_async/epics/areadetector/drivers/pilatus_driver.py @@ -4,7 +4,7 @@ from .ad_base import ADBase -class TriggerMode(str, Enum): +class PilatusTriggerMode(str, Enum): internal = "Internal" ext_enable = "Ext. Enable" ext_trigger = "Ext. Trigger" @@ -13,6 +13,6 @@ class TriggerMode(str, Enum): class PilatusDriver(ADBase): - def __init__(self, prefix: str) -> None: - self.trigger_mode = ad_rw(TriggerMode, prefix + "TriggerMode") - super().__init__(prefix) + def __init__(self, prefix: str, name: str = "") -> None: + self.trigger_mode = ad_rw(PilatusTriggerMode, prefix + "TriggerMode") + super().__init__(prefix, name) diff --git a/src/ophyd_async/epics/areadetector/pilatus.py b/src/ophyd_async/epics/areadetector/pilatus.py new file mode 100644 index 0000000000..539726cdf0 --- /dev/null +++ b/src/ophyd_async/epics/areadetector/pilatus.py @@ -0,0 +1,51 @@ +from typing import Optional, Sequence + +from bluesky.protocols import Hints + +from ophyd_async.core import DirectoryProvider +from ophyd_async.core.detector import StandardDetector +from ophyd_async.core.signal import SignalR +from ophyd_async.epics.areadetector.controllers.pilatus_controller import ( + PilatusController, +) +from ophyd_async.epics.areadetector.drivers.ad_base import ADBaseShapeProvider +from ophyd_async.epics.areadetector.drivers.pilatus_driver import PilatusDriver +from ophyd_async.epics.areadetector.writers.hdf_writer import HDFWriter +from ophyd_async.epics.areadetector.writers.nd_file_hdf import NDFileHDF + + +class PilatusDetector(StandardDetector): + """A Pilatus StandardDetector writing HDF files""" + + _controller: PilatusController + _writer: HDFWriter + + def __init__( + self, + prefix: str, + name: str, + directory_provider: DirectoryProvider, + driver: PilatusDriver, + hdf: NDFileHDF, + config_sigs: Optional[Sequence[SignalR]] = None, + **scalar_sigs: str, + ): + self.drv = driver + self.hdf = hdf + + super().__init__( + PilatusController(self.drv), + HDFWriter( + self.hdf, + directory_provider, + lambda: self.name, + ADBaseShapeProvider(self.drv), + **scalar_sigs, + ), + config_sigs=config_sigs or (self.drv.acquire_time,), + name=name, + ) + + @property + def hints(self) -> Hints: + return self._writer.hints diff --git a/tests/conftest.py b/tests/conftest.py index 9a82fe2fd1..9dada83b57 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -9,6 +9,8 @@ import pytest from bluesky.run_engine import RunEngine, TransitionError +from ophyd_async.core import StaticDirectoryProvider + PANDA_RECORD = str(Path(__file__).parent / "panda" / "db" / "panda.db") INCOMPLETE_BLOCK_RECORD = str( Path(__file__).parent / "panda" / "db" / "incomplete_block_panda.db" @@ -102,3 +104,8 @@ async def inner_coroutine(): raise ValueError() return inner_coroutine + + +@pytest.fixture +def static_directory_provider(tmp_path: Path): + return StaticDirectoryProvider(directory_path=tmp_path) diff --git a/tests/epics/areadetector/test_controllers.py b/tests/epics/areadetector/test_controllers.py index 7127ff469a..faa5df5d15 100644 --- a/tests/epics/areadetector/test_controllers.py +++ b/tests/epics/areadetector/test_controllers.py @@ -8,9 +8,7 @@ PilatusController, ) from ophyd_async.epics.areadetector.drivers import ADBase, PilatusDriver -from ophyd_async.epics.areadetector.drivers.pilatus_driver import ( - TriggerMode as PilatusTrigger, -) +from ophyd_async.epics.areadetector.drivers.pilatus_driver import PilatusTriggerMode from ophyd_async.epics.areadetector.utils import ImageMode @@ -53,10 +51,10 @@ async def test_pilatus_controller(RE, pilatus: PilatusController): with patch("ophyd_async.core.signal.wait_for_value", return_value=None): await pilatus.arm(num=1, trigger=DetectorTrigger.constant_gate) - driver = pilatus.driver + driver = pilatus._drv assert await driver.num_images.get_value() == 1 assert await driver.image_mode.get_value() == ImageMode.multiple - assert await driver.trigger_mode.get_value() == PilatusTrigger.ext_enable + assert await driver.trigger_mode.get_value() == PilatusTriggerMode.ext_enable assert await driver.acquire.get_value() is True with patch( diff --git a/tests/epics/areadetector/test_pilatus.py b/tests/epics/areadetector/test_pilatus.py new file mode 100644 index 0000000000..e3c3c03cbb --- /dev/null +++ b/tests/epics/areadetector/test_pilatus.py @@ -0,0 +1,112 @@ +import pytest +from bluesky.run_engine import RunEngine + +from ophyd_async.core import ( + DetectorTrigger, + DeviceCollector, + DirectoryProvider, + TriggerInfo, + set_sim_value, +) +from ophyd_async.epics.areadetector.controllers.pilatus_controller import ( + PilatusController, +) +from ophyd_async.epics.areadetector.drivers.pilatus_driver import ( + PilatusDriver, + PilatusTriggerMode, +) +from ophyd_async.epics.areadetector.pilatus import PilatusDetector +from ophyd_async.epics.areadetector.writers.nd_file_hdf import NDFileHDF + + +@pytest.fixture +async def pilatus_driver(RE: RunEngine) -> PilatusDriver: + async with DeviceCollector(sim=True): + driver = PilatusDriver("DRV:") + + return driver + + +@pytest.fixture +async def pilatus_controller( + RE: RunEngine, pilatus_driver: PilatusDriver +) -> PilatusController: + async with DeviceCollector(sim=True): + controller = PilatusController(pilatus_driver) + + return controller + + +@pytest.fixture +async def hdf(RE: RunEngine) -> NDFileHDF: + async with DeviceCollector(sim=True): + hdf = NDFileHDF("HDF:") + + return hdf + + +@pytest.fixture +async def pilatus( + RE: RunEngine, + static_directory_provider: DirectoryProvider, + pilatus_driver: PilatusDriver, + hdf: NDFileHDF, +) -> PilatusDetector: + async with DeviceCollector(sim=True): + pilatus = PilatusDetector( + "PILATUS:", + "pilatus", + static_directory_provider, + driver=pilatus_driver, + hdf=hdf, + ) + + return pilatus + + +async def test_deadtime_invariant( + pilatus_controller: PilatusController, +): + # deadtime invariant with exposure time + assert pilatus_controller.get_deadtime(0) == 2.28e-3 + assert pilatus_controller.get_deadtime(500) == 2.28e-3 + + +@pytest.mark.parametrize( + "detector_trigger,expected_trigger_mode", + [ + (DetectorTrigger.internal, PilatusTriggerMode.internal), + (DetectorTrigger.internal, PilatusTriggerMode.internal), + (DetectorTrigger.internal, PilatusTriggerMode.internal), + ], +) +async def test_trigger_mode_set( + pilatus: PilatusDetector, + detector_trigger: DetectorTrigger, + expected_trigger_mode: PilatusTriggerMode, +): + async def trigger_and_complete(): + await pilatus.controller.arm(num=1, trigger=detector_trigger) + # Prevent timeouts + set_sim_value(pilatus.controller._drv.acquire, True) + + # Default TriggerMode + assert (await pilatus.drv.trigger_mode.get_value()) == PilatusTriggerMode.internal + + await trigger_and_complete() + + # TriggerSource changes + assert (await pilatus.drv.trigger_mode.get_value()) == expected_trigger_mode + + +async def test_hints_from_hdf_writer(pilatus: PilatusDetector): + assert pilatus.hints == {"fields": ["pilatus"]} + + +async def test_unsupported_trigger_excepts(pilatus: PilatusDetector): + with pytest.raises( + ValueError, + # str(EnumClass.value) handling changed in Python 3.11 + match=r"PilatusController only supports the following trigger types: .* but", + ): + await pilatus.prepare(TriggerInfo(1, DetectorTrigger.edge_trigger, 1, 1)) From 440bdb130073b8846aef9ac73ff7f3e738f0a548 Mon Sep 17 00:00:00 2001 From: Eva Lott Date: Thu, 18 Apr 2024 10:13:14 +0100 Subject: [PATCH 3/3] slight change to fix linting (#234) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 3238f5ce0a..141bc08ec5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -134,7 +134,7 @@ commands = [tool.ruff] src = ["src", "tests"] line-length = 88 -select = [ +lint.select = [ "C4", # flake8-comprehensions - https://beta.ruff.rs/docs/rules/#flake8-comprehensions-c4 "E", # pycodestyle errors - https://beta.ruff.rs/docs/rules/#error-e "F", # pyflakes rules - https://beta.ruff.rs/docs/rules/#pyflakes-f