diff --git a/.copier-answers.yml b/.copier-answers.yml index c499717c..91699d76 100644 --- a/.copier-answers.yml +++ b/.copier-answers.yml @@ -1,15 +1,16 @@ # Changes here will be overwritten by Copier -_commit: 1.2.0 +_commit: 2.3.0 _src_path: gh:DiamondLightSource/python-copier-template author_email: tom.cobb@diamond.ac.uk author_name: Tom Cobb -component_owner: group:default/sscc description: Specify step and flyscan paths in a serializable, efficient and Pythonic way distribution_name: scanspec docker: true docs_type: sphinx git_platform: github.com -github_org: dls-controls +github_org: bluesky package_name: scanspec +pypi: true repo_name: scanspec +type_checker: pyright diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 79b85ff4..b8781ba4 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -13,7 +13,8 @@ "vscode": { // Set *default* container specific settings.json values on container create. "settings": { - "python.defaultInterpreterPath": "/venv/bin/python" + "python.defaultInterpreterPath": "/venv/bin/python", + "remote.autoForwardPorts": false }, // Add the IDs of extensions you want installed when the container is created. "extensions": [ @@ -43,4 +44,4 @@ "workspaceMount": "source=${localWorkspaceFolder}/..,target=/workspaces,type=bind", // After the container is created, install the python project in editable form "postCreateCommand": "pip install $([ -f dev-requirements.txt ] && echo '-c dev-requirements.txt') -e '.[dev]' && pre-commit install" -} \ No newline at end of file +} diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 58db3d99..328eb421 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -1,14 +1,14 @@ # Contribute to the project Contributions and issues are most welcome! All issues and pull requests are -handled through [GitHub](https://github.com/dls-controls/scanspec/issues). Also, please check for any existing issues before +handled through [GitHub](https://github.com/bluesky/scanspec/issues). Also, please check for any existing issues before filing a new one. If you have a great idea but it involves big changes, please file a ticket before making a pull request! We want to make sure you don't spend your time coding something that might not fit the scope of the project. ## Issue or Discussion? -Github also offers [discussions](https://github.com/dls-controls/scanspec/discussions) as a place to ask questions and share ideas. If +Github also offers [discussions](https://github.com/bluesky/scanspec/discussions) as a place to ask questions and share ideas. If your issue is open ended and it is not obvious when it can be "closed", please raise it as a discussion instead. @@ -24,4 +24,4 @@ It is recommended that developers use a [vscode devcontainer](https://code.visua This project was created using the [Diamond Light Source Copier Template](https://github.com/DiamondLightSource/python-copier-template) for Python projects. -For more information on common tasks like setting up a developer environment, running the tests, and setting a pre-commit hook, see the template's [How-to guides](https://diamondlightsource.github.io/python-copier-template/1.2.0/how-to.html). +For more information on common tasks like setting up a developer environment, running the tests, and setting a pre-commit hook, see the template's [How-to guides](https://diamondlightsource.github.io/python-copier-template/2.2.0/how-to.html). diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000..aa65892f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,21 @@ +--- +name: Bug Report +about: The template to use for reporting bugs and usability issues +title: " " +labels: 'bug' +assignees: '' + +--- + +Describe the bug, including a clear and concise description of the expected behavior, the actual behavior and the context in which you encountered it (ideally include details of your environment). + +## Steps To Reproduce +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + + +## Acceptance Criteria +- Specific criteria that will be used to judge if the issue is fixed diff --git a/.github/ISSUE_TEMPLATE/issue.md b/.github/ISSUE_TEMPLATE/issue.md new file mode 100644 index 00000000..52c84dd8 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/issue.md @@ -0,0 +1,13 @@ +--- +name: Issue +about: The standard template to use for feature requests, design discussions and tasks +title: " " +labels: '' +assignees: '' + +--- + +A brief description of the issue, including specific stakeholders and the business case where appropriate + +## Acceptance Criteria +- Specific criteria that will be used to judge if the issue is fixed diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md new file mode 100644 index 00000000..8200afe5 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md @@ -0,0 +1,8 @@ +Fixes #ISSUE + +### Instructions to reviewer on how to test: +1. Do thing x +2. Confirm thing y happens + +### Checks for reviewer +- [ ] Would the PR title make sense to a user on a set of release notes diff --git a/.github/pages/index.html b/.github/pages/index.html index 80f0a009..c495f39f 100644 --- a/.github/pages/index.html +++ b/.github/pages/index.html @@ -8,4 +8,4 @@ - \ No newline at end of file + diff --git a/.github/pages/make_switcher.py b/.github/pages/make_switcher.py index 6d90f490..c06813af 100755 --- a/.github/pages/make_switcher.py +++ b/.github/pages/make_switcher.py @@ -1,3 +1,5 @@ +"""Make switcher.json to allow docs to switch between different versions.""" + import json import logging from argparse import ArgumentParser @@ -6,6 +8,7 @@ def report_output(stdout: bytes, label: str) -> list[str]: + """Print and return something received frm stdout.""" ret = stdout.decode().strip().split("\n") print(f"{label}: {ret}") return ret @@ -52,7 +55,8 @@ def get_versions(ref: str, add: str | None) -> list[str]: return versions -def write_json(path: Path, repository: str, versions: str): +def write_json(path: Path, repository: str, versions: list[str]): + """Write the JSON switcher to path.""" org, repo_name = repository.split("/") struct = [ {"version": version, "url": f"https://{org}.github.io/{repo_name}/{version}/"} @@ -64,6 +68,7 @@ def write_json(path: Path, repository: str, versions: str): def main(args=None): + """Parse args and write switcher.""" parser = ArgumentParser( description="Make a versions.json file from gh-pages directories" ) diff --git a/.github/workflows/_container.yml b/.github/workflows/_container.yml index 4857ee9e..da5e4936 100644 --- a/.github/workflows/_container.yml +++ b/.github/workflows/_container.yml @@ -25,7 +25,9 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Build and export to Docker local cache - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 + env: + DOCKER_BUILD_RECORD_UPLOAD: false with: context: . # Need load and tags so we can test it below @@ -46,7 +48,9 @@ jobs: - name: Push cached image to container registry if: github.ref_type == 'tag' - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 + env: + DOCKER_BUILD_RECORD_UPLOAD: false # This does not build the image again, it will find the image in the # Docker cache and publish it with: diff --git a/.github/workflows/_docs.yml b/.github/workflows/_docs.yml index 40446e33..a1cafcae 100644 --- a/.github/workflows/_docs.yml +++ b/.github/workflows/_docs.yml @@ -47,8 +47,8 @@ jobs: if: github.ref_type == 'tag' || github.ref_name == 'main' # We pin to the SHA, not the tag, for security reasons. # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions - uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3 + uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: .github/pages - keep_files: true \ No newline at end of file + keep_files: true diff --git a/.github/workflows/_pypi.yml b/.github/workflows/_pypi.yml index f2ead1bc..0c5258db 100644 --- a/.github/workflows/_pypi.yml +++ b/.github/workflows/_pypi.yml @@ -1,8 +1,5 @@ on: workflow_call: - secrets: - PYPI_TOKEN: - required: true jobs: upload: @@ -18,5 +15,3 @@ jobs: - name: Publish to PyPI using trusted publishing uses: pypa/gh-action-pypi-publish@release/v1 - with: - password: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/_release.yml b/.github/workflows/_release.yml index b49fa7dc..10d8ed87 100644 --- a/.github/workflows/_release.yml +++ b/.github/workflows/_release.yml @@ -23,7 +23,7 @@ jobs: - name: Create GitHub Release # We pin to the SHA, not the tag, for security reasons. # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15 + uses: softprops/action-gh-release@c062e08bd532815e2082a85e87e3ef29c3e6d191 # v2.0.8 with: prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }} files: "*" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ce814ba1..e1c0a71d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -21,7 +21,7 @@ jobs: strategy: matrix: runs-on: ["ubuntu-latest"] # can add windows-latest, macos-latest - python-version: ["3.10", "3.11"] + python-version: ["3.10", "3.11", "3.12"] include: # Include one that runs in the dev environment - runs-on: "ubuntu-latest" @@ -39,6 +39,7 @@ jobs: if: needs.check.outputs.branch-pr == '' uses: ./.github/workflows/_container.yml permissions: + contents: read packages: write docs: @@ -50,16 +51,14 @@ jobs: needs: check if: needs.check.outputs.branch-pr == '' uses: ./.github/workflows/_dist.yml - + pypi: if: github.ref_type == 'tag' needs: dist uses: ./.github/workflows/_pypi.yml permissions: id-token: write - secrets: - PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} - + release: if: github.ref_type == 'tag' needs: [dist, docs] diff --git a/.gitignore b/.gitignore index 2593ec75..0f33bf29 100644 --- a/.gitignore +++ b/.gitignore @@ -55,6 +55,7 @@ cov.xml # Sphinx documentation docs/_build/ +docs/_api # PyBuilder target/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5a4cbf7b..60fc23f9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,6 +5,7 @@ repos: - id: check-added-large-files - id: check-yaml - id: check-merge-conflict + - id: end-of-file-fixer - repo: local hooks: diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 66ad6324..933c580c 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -2,4 +2,4 @@ "recommendations": [ "ms-vscode-remote.remote-containers", ] -} \ No newline at end of file +} diff --git a/.vscode/launch.json b/.vscode/launch.json index df954ee5..2c5f015e 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -6,7 +6,7 @@ "configurations": [ { "name": "Debug Unit Test", - "type": "python", + "type": "debugpy", "request": "launch", "justMyCode": false, "program": "${file}", @@ -14,6 +14,10 @@ "debug-test" ], "console": "integratedTerminal", + "env": { + // Enable break on exception when debugging tests (see: tests/conftest.py) + "PYTEST_RAISE": "1", + }, }, { "name": "Python (Current File)", @@ -35,4 +39,4 @@ ] } ] -} \ No newline at end of file +} diff --git a/.vscode/settings.json b/.vscode/settings.json index c129d991..101c75fa 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -5,7 +5,8 @@ "editor.codeActionsOnSave": { "source.organizeImports": "explicit" }, + "files.insertFinalNewline": true, "[python]": { "editor.defaultFormatter": "charliermarsh.ruff", }, -} \ No newline at end of file +} diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 946e69d4..c999e864 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -13,4 +13,4 @@ "problemMatcher": [], } ] -} \ No newline at end of file +} diff --git a/Dockerfile b/Dockerfile index 5be05df7..e0e38655 100644 --- a/Dockerfile +++ b/Dockerfile @@ -16,7 +16,7 @@ ENV PATH=/venv/bin:$PATH FROM developer as build COPY . /context WORKDIR /context -RUN pip install . +RUN touch dev-requirements.txt && pip install -c dev-requirements.txt . # The runtime stage copies the built venv into a slim runtime container FROM python:${PYTHON_VERSION}-slim as runtime diff --git a/README.md b/README.md index d2ab2de9..451a67ed 100644 --- a/README.md +++ b/README.md @@ -23,6 +23,7 @@ can be produced and expanded Paths created to consume chunk by chunk. Source | :---: | :---: PyPI | `pip install scanspec` +Docker | `docker run ghcr.io/bluesky/scanspec:latest` Documentation | Releases | diff --git a/catalog-info.yaml b/catalog-info.yaml deleted file mode 100644 index 2c93efd0..00000000 --- a/catalog-info.yaml +++ /dev/null @@ -1,10 +0,0 @@ -apiVersion: backstage.io/v1alpha1 -kind: Component -metadata: - name: scanspec - title: scanspec - description: Specify step and flyscan paths in a serializable, efficient and Pythonic way -spec: - type: documentation - lifecycle: experimental - owner: group:default/sscc \ No newline at end of file diff --git a/docs/_api.rst b/docs/_api.rst new file mode 100644 index 00000000..b4b0a6c9 --- /dev/null +++ b/docs/_api.rst @@ -0,0 +1,16 @@ +:orphan: + +.. + This page is not included in the TOC tree, but must exist so that the + autosummary pages are generated for scanspec and all its + subpackages + +API +=== + +.. autosummary:: + :toctree: _api + :template: custom-module-template.rst + :recursive: + + scanspec diff --git a/docs/_templates/custom-module-template.rst b/docs/_templates/custom-module-template.rst new file mode 100644 index 00000000..9aeca540 --- /dev/null +++ b/docs/_templates/custom-module-template.rst @@ -0,0 +1,37 @@ +{{ ('``' + fullname + '``') | underline }} + +{%- set filtered_members = [] %} +{%- for item in members %} + {%- if item in functions + classes + exceptions + attributes %} + {% set _ = filtered_members.append(item) %} + {%- endif %} +{%- endfor %} + +.. automodule:: {{ fullname }} + :members: + + {% block modules %} + {% if modules %} + .. rubric:: Submodules + + .. autosummary:: + :toctree: + :template: custom-module-template.rst + :recursive: + {% for item in modules %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block members %} + {% if filtered_members %} + .. rubric:: Members + + .. autosummary:: + :nosignatures: + {% for item in filtered_members %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} diff --git a/docs/conf.py b/docs/conf.py index 33b043e4..acfda3be 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,8 +1,9 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html +"""Configuration file for the Sphinx documentation builder. + +This file only contains a selection of the most common options. For a full +list see the documentation: +https://www.sphinx-doc.org/en/master/usage/configuration.html +""" import sys from pathlib import Path @@ -32,6 +33,10 @@ extensions = [ # Use this for generating API docs "sphinx.ext.autodoc", + # and making summary tables at the top of API docs + "sphinx.ext.autosummary", + # With an extension for pydantic models + "sphinxcontrib.autodoc_pydantic", # This can parse google style docstrings "sphinx.ext.napoleon", # For linking to external sphinx documentation @@ -66,22 +71,8 @@ # domain name if present. Example entries would be ('py:func', 'int') or # ('envvar', 'LD_LIBRARY_PATH'). nitpick_ignore = [ - ("py:func", "int"), - ("py:class", "Axis"), - ("py:class", "~Axis"), - ("py:class", "scanspec.core.Axis"), - ("py:class", "AxesPoints"), - ("py:class", "np.ndarray"), - ("py:class", "NoneType"), - ("py:class", "'str'"), - ("py:class", "'float'"), - ("py:class", "'int'"), - ("py:class", "'bool'"), - ("py:class", "'object'"), - ("py:class", "'id'"), - ("py:class", "typing_extensions.Literal"), - ("py:class", "pydantic.config.BaseConfig"), - ("py:class", "starlette.responses.JSONResponse"), + ("py:class", "scanspec.core.C"), + ("py:class", "pydantic.config.ConfigDict"), ] # Both the class’ and the __init__ method’s docstring are concatenated and @@ -91,19 +82,24 @@ # Order the members by the order they appear in the source code autodoc_member_order = "bysource" -# Don't inherit docstrings from baseclasses -autodoc_inherit_docstrings = False +# For autodoc we want to document some additional optional modules +scanspec.__all__ += ["plot"] -# Insert inheritance links -autodoc_default_options = {"show-inheritance": True} +# Don't show config summary as it's not relevant +autodoc_pydantic_model_show_config_summary = False -# A dictionary for users defined type aliases that maps a type name to the -# full-qualified object name. -autodoc_type_aliases = {"AxesPoints": "scanspec.core.AxesPoints"} +# Show the fields in source order +autodoc_pydantic_model_summary_list_order = "bysource" # Include source in plot directive by default plot_include_source = True +# Document only what is in __all__ +autosummary_ignore_module_all = False + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + # Output graphviz directive produced images in a scalable format graphviz_output_format = "svg" @@ -162,10 +158,10 @@ # Theme options for pydata_sphinx_theme # We don't check switcher because there are 3 possible states for a repo: # 1. New project, docs are not published so there is no switcher -# 2. Existing project with latest skeleton, switcher exists and works -# 3. Existing project with old skeleton that makes broken switcher, +# 2. Existing project with latest copier template, switcher exists and works +# 3. Existing project with old copier template that makes broken switcher, # switcher exists but is broken -# Point 3 makes checking switcher difficult, because the updated skeleton +# Point 3 makes checking switcher difficult, because the updated copier template # will fix the switcher at the end of the docs workflow, but never gets a chance # to complete as the docs build warns and fails. html_theme_options = { @@ -193,7 +189,7 @@ # A dictionary of values to pass into the template engine’s context for all pages html_context = { "github_user": github_user, - "github_repo": project, + "github_repo": github_repo, "github_version": version, "doc_path": "docs", } diff --git a/docs/explanations/why-stack-frames.rst b/docs/explanations/why-stack-frames.rst index 48ef7930..815c93b9 100644 --- a/docs/explanations/why-stack-frames.rst +++ b/docs/explanations/why-stack-frames.rst @@ -1,12 +1,12 @@ Why create a stack of Frames? ============================= -If a `Spec` tells you the parameters of a scan, `Frames` gives you the `Points` -that will let you actually execute the scan. A stack of Frames is interpreted as -nested from slowest moving to fastest moving, so each faster Frames object will -iterate once per position of the slower Frames object. When fly-scanning the -axis will traverse lower-midpoint-upper on the fastest Frames object for each -point in the scan. +If a `Spec` tells you the parameters of a scan, `Frames` gives you the `Points +` that will let you actually execute the scan. A stack of Frames is +interpreted as nested from slowest moving to fastest moving, so each faster +Frames object will iterate once per position of the slower Frames object. When +fly-scanning the axis will traverse lower-midpoint-upper on the fastest Frames +object for each point in the scan. An Example ---------- @@ -63,4 +63,3 @@ which point it destroys the performance of the VDS. For this reason, it is advisable to `Squash` any snaking Specs with the first non-snaking axis above it so that the HDF Dimension will not be snaking. See `./why-squash-can-change-path` for some details on this. - diff --git a/docs/how-to/contribute.md b/docs/how-to/contribute.md index f9c4ca1d..6e419797 100644 --- a/docs/how-to/contribute.md +++ b/docs/how-to/contribute.md @@ -1,2 +1,2 @@ ```{include} ../../.github/CONTRIBUTING.md -``` \ No newline at end of file +``` diff --git a/docs/how-to/run-container.md b/docs/how-to/run-container.md index ab104335..8737adee 100644 --- a/docs/how-to/run-container.md +++ b/docs/how-to/run-container.md @@ -8,7 +8,7 @@ installed are available on [Github Container Registry](https://ghcr.io/bluesky/s To pull the container from github container registry and run: ``` -$ docker run ghcr.io/bluesky/scanspec:main --version +$ docker run ghcr.io/bluesky/scanspec:latest --version ``` -To get a released version, use a numbered release instead of `main`. +To get a released version, use a numbered release instead of `latest`. diff --git a/docs/reference.md b/docs/reference.md index ff85f567..e2ef404a 100644 --- a/docs/reference.md +++ b/docs/reference.md @@ -6,6 +6,7 @@ Technical reference material including APIs and release notes. :maxdepth: 1 :glob: +API <_api/scanspec> reference/* genindex Release Notes diff --git a/docs/reference/api.md b/docs/reference/api.md deleted file mode 100644 index c4bab762..00000000 --- a/docs/reference/api.md +++ /dev/null @@ -1,73 +0,0 @@ -# API - -```{eval-rst} -.. automodule:: scanspec - - ``scanspec`` - ------------ - -``` - -The top level scanspec module contains a number of packages that can be used -from code: - -- [](#scanspec.core): Core classes like [](#Frames) and [](#Path) -- [](#scanspec.specs): [](#Spec) and its subclasses -- [](#scanspec.regions): [](#Region) and its subclasses -- [](#scanspec.plot): [](#plot_spec) to visualize a scan -- [](#scanspec.service): Defines queries and field structure in REST such as [](#MidpointsResponse) - -```{eval-rst} -.. data:: scanspec.__version__ - :type: str - - Version number as calculated by https://github.com/bluesky/versiongit -``` - -```{eval-rst} -.. automodule:: scanspec.core - :members: - - ``scanspec.core`` - ----------------- -``` - -```{eval-rst} -.. automodule:: scanspec.specs - :members: - - ``scanspec.specs`` - ------------------ - - .. inheritance-diagram:: scanspec.specs - :top-classes: scanspec.specs.Spec - :parts: 1 -``` - -```{eval-rst} -.. automodule:: scanspec.regions - :members: - - ``scanspec.regions`` - -------------------- - - .. inheritance-diagram:: scanspec.regions - :top-classes: scanspec.regions.Region - :parts: 1 -``` - -```{eval-rst} -.. automodule:: scanspec.plot - :members: - - ``scanspec.plot`` - ----------------- -``` - -```{eval-rst} -.. automodule:: scanspec.service - :members: - - ``scanspec.service`` - -------------------- -``` diff --git a/docs/tutorials/installation.md b/docs/tutorials/installation.md index 592c9da3..b9c85bd1 100644 --- a/docs/tutorials/installation.md +++ b/docs/tutorials/installation.md @@ -2,7 +2,7 @@ ## Check your version of python -You will need python 3.8 or later. You can check your version of python by +You will need python 3.10 or later. You can check your version of python by typing into a terminal: ``` diff --git a/pyproject.toml b/pyproject.toml index c5b48619..b0bf50e6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=64", "setuptools_scm[toml]>=6.2"] +requires = ["setuptools>=64", "setuptools_scm[toml]>=8"] build-backend = "setuptools.build_meta" [project] @@ -9,6 +9,7 @@ classifiers = [ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] description = "Specify step and flyscan paths in a serializable, efficient and Pythonic way" dependencies = ["numpy", "click>=8.1", "pydantic>=2.0"] @@ -28,7 +29,9 @@ dev = [ # https://github.com/pypa/pip/issues/10393 "scanspec[plotting]", "scanspec[service]", + "autodoc_pydantic @ git+https://github.com/coretl/autodoc_pydantic.git@0b95311d8d10fce67a9ecd5830330364e31fa49c", "copier", + "httpx", "myst-parser", "pipdeptree", "pre-commit", @@ -43,8 +46,6 @@ dev = [ "sphinxcontrib-openapi", "tox-direct", "types-mock", - "httpx", - "myst-parser", ] [project.scripts] @@ -59,11 +60,11 @@ name = "Tom Cobb" [tool.setuptools_scm] -write_to = "src/scanspec/_version.py" +version_file = "src/scanspec/_version.py" [tool.pyright] -# strict = ["src", "tests"] -reportMissingImports = false # Ignore missing stubs in imported modules +typeCheckingMode = "standard" +reportMissingImports = false # Ignore missing stubs in imported modules [tool.pytest.ini_options] # Run pytest with all our checkers, and don't spam us with massive tracebacks on error @@ -100,7 +101,7 @@ allowlist_externals = sphinx-build sphinx-autobuild commands = - pre-commit: pre-commit run --all-files {posargs} + pre-commit: pre-commit run --all-files --show-diff-on-failure {posargs} type-checking: pyright src tests {posargs} tests: pytest --cov=scanspec --cov-report term --cov-report xml:cov.xml {posargs} docs: sphinx-{posargs:build -E --keep-going} -T docs build/html @@ -114,6 +115,7 @@ line-length = 88 extend-select = [ "B", # flake8-bugbear - https://docs.astral.sh/ruff/rules/#flake8-bugbear-b "C4", # flake8-comprehensions - https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4 + "D", # pydocstyle - https://docs.astral.sh/ruff/rules/#pydocstyle-d "E", # pycodestyle errors - https://docs.astral.sh/ruff/rules/#error-e "F", # pyflakes rules - https://docs.astral.sh/ruff/rules/#pyflakes-f "W", # pycodestyle warnings - https://docs.astral.sh/ruff/rules/#warning-w @@ -123,10 +125,19 @@ extend-select = [ ] ignore = [ "B008", # We use function calls in service arguments + "D105", # Don't document magic methods as they don't appear in sphinx autodoc pages + "D107", # We document the class, not the __init__ method ] +[tool.ruff.lint.pydocstyle] +convention = "google" + [tool.ruff.lint.per-file-ignores] -# By default, private member access is allowed in tests -# See https://github.com/DiamondLightSource/python-copier-template/issues/154 -# Remove this line to forbid private member access in tests -"tests/**/*" = ["SLF001"] + +"tests/**/*" = [ + # By default, private member access is allowed in tests + # See https://github.com/DiamondLightSource/python-copier-template/issues/154 + # Remove this line to forbid private member access in tests + "SLF001", + "D", # Don't check docstrings in tests +] diff --git a/schema.json b/schema.json index 30a5ae8e..c58c4fef 100644 --- a/schema.json +++ b/schema.json @@ -14,12 +14,7 @@ "content": { "application/json": { "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Input" - } - ], - "title": "Spec", + "$ref": "#/components/schemas/Spec-Input", "examples": [ { "outer": { @@ -77,12 +72,7 @@ "content": { "application/json": { "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/PointsRequest" - } - ], - "title": "Request", + "$ref": "#/components/schemas/PointsRequest", "examples": [ { "spec": { @@ -144,12 +134,7 @@ "content": { "application/json": { "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/PointsRequest" - } - ], - "title": "Request", + "$ref": "#/components/schemas/PointsRequest", "examples": [ { "spec": { @@ -205,18 +190,13 @@ "/gap": { "post": { "summary": "Gap", - "description": "Generate gaps from a scanspec.\n\nA scanspec may indicate if there is a gap between two frames.\nThe array returned corresponds to whether or not there is a gap\nafter each frame.\n\nArgs:\n request: Scanspec and formatting info.\n\nReturns:\n GapResponse: Bounds of the scan", + "description": "Generate gaps from a scanspec.\n\nA scanspec may indicate if there is a gap between two frames.\nThe array returned corresponds to whether or not there is a gap\nafter each frame.\n\nArgs:\n spec: Scanspec and formatting info.\n\nReturns:\n GapResponse: Bounds of the scan", "operationId": "gap_gap_post", "requestBody": { "content": { "application/json": { "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Input" - } - ], - "title": "Spec", + "$ref": "#/components/schemas/Spec-Input", "examples": [ { "outer": { @@ -274,12 +254,7 @@ "content": { "application/json": { "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Input" - } - ], - "title": "Spec", + "$ref": "#/components/schemas/Spec-Input", "examples": [ { "outer": { @@ -344,11 +319,7 @@ "description": "Total of number of frames in this response, may be less than total_frames due to downsampling etc." }, "format": { - "allOf": [ - { - "$ref": "#/components/schemas/PointsFormat" - } - ], + "$ref": "#/components/schemas/PointsFormat", "description": "Format of returned point data" }, "lower": { @@ -450,19 +421,11 @@ "CombinationOf-Input": { "properties": { "left": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Input" - } - ], + "$ref": "#/components/schemas/Region-Input", "description": "The left-hand Region to combine" }, "right": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Input" - } - ], + "$ref": "#/components/schemas/Region-Input", "description": "The right-hand Region to combine" }, "type": { @@ -487,19 +450,11 @@ "CombinationOf-Output": { "properties": { "left": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Output" - } - ], + "$ref": "#/components/schemas/Region-Output", "description": "The left-hand Region to combine" }, "right": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Output" - } - ], + "$ref": "#/components/schemas/Region-Output", "description": "The right-hand Region to combine" }, "type": { @@ -524,19 +479,11 @@ "Concat-Input": { "properties": { "left": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Input" - } - ], + "$ref": "#/components/schemas/Spec-Input", "description": "The left-hand Spec to Concat, midpoints will appear earlier" }, "right": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Input" - } - ], + "$ref": "#/components/schemas/Spec-Input", "description": "The right-hand Spec to Concat, midpoints will appear later" }, "gap": { @@ -573,19 +520,11 @@ "Concat-Output": { "properties": { "left": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Output" - } - ], + "$ref": "#/components/schemas/Spec-Output", "description": "The left-hand Spec to Concat, midpoints will appear earlier" }, "right": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Output" - } - ], + "$ref": "#/components/schemas/Spec-Output", "description": "The right-hand Spec to Concat, midpoints will appear later" }, "gap": { @@ -622,19 +561,11 @@ "DifferenceOf-Input": { "properties": { "left": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Input" - } - ], + "$ref": "#/components/schemas/Region-Input", "description": "The left-hand Region to combine" }, "right": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Input" - } - ], + "$ref": "#/components/schemas/Region-Input", "description": "The right-hand Region to combine" }, "type": { @@ -659,19 +590,11 @@ "DifferenceOf-Output": { "properties": { "left": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Output" - } - ], + "$ref": "#/components/schemas/Region-Output", "description": "The left-hand Region to combine" }, "right": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Output" - } - ], + "$ref": "#/components/schemas/Region-Output", "description": "The right-hand Region to combine" }, "type": { @@ -788,19 +711,11 @@ "IntersectionOf-Input": { "properties": { "left": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Input" - } - ], + "$ref": "#/components/schemas/Region-Input", "description": "The left-hand Region to combine" }, "right": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Input" - } - ], + "$ref": "#/components/schemas/Region-Input", "description": "The right-hand Region to combine" }, "type": { @@ -825,19 +740,11 @@ "IntersectionOf-Output": { "properties": { "left": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Output" - } - ], + "$ref": "#/components/schemas/Region-Output", "description": "The left-hand Region to combine" }, "right": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Output" - } - ], + "$ref": "#/components/schemas/Region-Output", "description": "The right-hand Region to combine" }, "type": { @@ -905,19 +812,11 @@ "Mask-Input": { "properties": { "spec": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Input" - } - ], + "$ref": "#/components/schemas/Spec-Input", "description": "The Spec containing the source midpoints" }, "region": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Input" - } - ], + "$ref": "#/components/schemas/Region-Input", "description": "The Region that midpoints will be inside" }, "check_path_changes": { @@ -948,19 +847,11 @@ "Mask-Output": { "properties": { "spec": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Output" - } - ], + "$ref": "#/components/schemas/Spec-Output", "description": "The Spec containing the source midpoints" }, "region": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Output" - } - ], + "$ref": "#/components/schemas/Region-Output", "description": "The Region that midpoints will be inside" }, "check_path_changes": { @@ -1001,11 +892,7 @@ "description": "Total of number of frames in this response, may be less than total_frames due to downsampling etc." }, "format": { - "allOf": [ - { - "$ref": "#/components/schemas/PointsFormat" - } - ], + "$ref": "#/components/schemas/PointsFormat", "description": "Format of returned point data" }, "midpoints": { @@ -1050,11 +937,7 @@ "PointsRequest": { "properties": { "spec": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Input" - } - ], + "$ref": "#/components/schemas/Spec-Input", "description": "The spec from which to generate points" }, "max_frames": { @@ -1071,11 +954,7 @@ "default": 100000 }, "format": { - "allOf": [ - { - "$ref": "#/components/schemas/PointsFormat" - } - ], + "$ref": "#/components/schemas/PointsFormat", "description": "The format in which to output the points data", "default": "FLOAT_LIST" } @@ -1139,19 +1018,11 @@ "Product-Input": { "properties": { "outer": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Input" - } - ], + "$ref": "#/components/schemas/Spec-Input", "description": "Will be executed once" }, "inner": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Input" - } - ], + "$ref": "#/components/schemas/Spec-Input", "description": "Will be executed len(outer) times" }, "type": { @@ -1176,19 +1047,11 @@ "Product-Output": { "properties": { "outer": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Output" - } - ], + "$ref": "#/components/schemas/Spec-Output", "description": "Will be executed once" }, "inner": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Output" - } - ], + "$ref": "#/components/schemas/Spec-Output", "description": "Will be executed len(outer) times" }, "type": { @@ -1462,11 +1325,7 @@ "Snake-Input": { "properties": { "spec": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Input" - } - ], + "$ref": "#/components/schemas/Spec-Input", "description": "The Spec to run in reverse every other iteration" }, "type": { @@ -1490,11 +1349,7 @@ "Snake-Output": { "properties": { "spec": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Output" - } - ], + "$ref": "#/components/schemas/Spec-Output", "description": "The Spec to run in reverse every other iteration" }, "type": { @@ -1682,11 +1537,7 @@ "Squash-Input": { "properties": { "spec": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Input" - } - ], + "$ref": "#/components/schemas/Spec-Input", "description": "The Spec to squash the dimensions of" }, "check_path_changes": { @@ -1716,11 +1567,7 @@ "Squash-Output": { "properties": { "spec": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Output" - } - ], + "$ref": "#/components/schemas/Spec-Output", "description": "The Spec to squash the dimensions of" }, "check_path_changes": { @@ -1787,19 +1634,11 @@ "SymmetricDifferenceOf-Input": { "properties": { "left": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Input" - } - ], + "$ref": "#/components/schemas/Region-Input", "description": "The left-hand Region to combine" }, "right": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Input" - } - ], + "$ref": "#/components/schemas/Region-Input", "description": "The right-hand Region to combine" }, "type": { @@ -1824,19 +1663,11 @@ "SymmetricDifferenceOf-Output": { "properties": { "left": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Output" - } - ], + "$ref": "#/components/schemas/Region-Output", "description": "The left-hand Region to combine" }, "right": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Output" - } - ], + "$ref": "#/components/schemas/Region-Output", "description": "The right-hand Region to combine" }, "type": { @@ -1861,19 +1692,11 @@ "UnionOf-Input": { "properties": { "left": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Input" - } - ], + "$ref": "#/components/schemas/Region-Input", "description": "The left-hand Region to combine" }, "right": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Input" - } - ], + "$ref": "#/components/schemas/Region-Input", "description": "The right-hand Region to combine" }, "type": { @@ -1898,19 +1721,11 @@ "UnionOf-Output": { "properties": { "left": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Output" - } - ], + "$ref": "#/components/schemas/Region-Output", "description": "The left-hand Region to combine" }, "right": { - "allOf": [ - { - "$ref": "#/components/schemas/Region-Output" - } - ], + "$ref": "#/components/schemas/Region-Output", "description": "The right-hand Region to combine" }, "type": { @@ -1935,19 +1750,11 @@ "ValidResponse": { "properties": { "input_spec": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Output" - } - ], + "$ref": "#/components/schemas/Spec-Output", "description": "The input scanspec" }, "valid_spec": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Output" - } - ], + "$ref": "#/components/schemas/Spec-Output", "description": "The validated version of the spec" } }, @@ -1995,19 +1802,11 @@ "Zip-Input": { "properties": { "left": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Input" - } - ], + "$ref": "#/components/schemas/Spec-Input", "description": "The left-hand Spec to Zip, will appear earlier in axes" }, "right": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Input" - } - ], + "$ref": "#/components/schemas/Spec-Input", "description": "The right-hand Spec to Zip, will appear later in axes" }, "type": { @@ -2032,19 +1831,11 @@ "Zip-Output": { "properties": { "left": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Output" - } - ], + "$ref": "#/components/schemas/Spec-Output", "description": "The left-hand Spec to Zip, will appear earlier in axes" }, "right": { - "allOf": [ - { - "$ref": "#/components/schemas/Spec-Output" - } - ], + "$ref": "#/components/schemas/Spec-Output", "description": "The right-hand Spec to Zip, will appear later in axes" }, "type": { @@ -2068,4 +1859,4 @@ } } } -} \ No newline at end of file +} diff --git a/src/scanspec/__init__.py b/src/scanspec/__init__.py index b99257eb..5546758d 100644 --- a/src/scanspec/__init__.py +++ b/src/scanspec/__init__.py @@ -1,4 +1,12 @@ -from . import regions, specs +"""Top level API. + +.. data:: __version__ + :type: str + + Version number as calculated by https://github.com/pypa/setuptools_scm +""" + +from . import core, regions, specs from ._version import __version__ -__all__ = ["__version__", "specs", "regions"] +__all__ = ["__version__", "core", "specs", "regions"] diff --git a/src/scanspec/__main__.py b/src/scanspec/__main__.py index cd9cfea2..a8da08d1 100644 --- a/src/scanspec/__main__.py +++ b/src/scanspec/__main__.py @@ -1,5 +1,6 @@ +"""Interface for ``python -m scanspec``.""" + from scanspec import cli -# test with: python -m scanspec if __name__ == "__main__": cli.cli() diff --git a/src/scanspec/cli.py b/src/scanspec/cli.py index 33f56520..82a57cce 100644 --- a/src/scanspec/cli.py +++ b/src/scanspec/cli.py @@ -1,3 +1,5 @@ +"""Interface for ``python -m scanspec``.""" + import logging import string diff --git a/src/scanspec/core.py b/src/scanspec/core.py index a0ea6169..a15b644c 100644 --- a/src/scanspec/core.py +++ b/src/scanspec/core.py @@ -1,3 +1,5 @@ +"""Core classes like `Frames` and `Path`.""" + from __future__ import annotations from collections.abc import Callable, Iterable, Iterator, Sequence @@ -25,11 +27,10 @@ "StrictConfig", ] - +#: Used to ensure pydantic dataclasses error if given extra arguments StrictConfig: ConfigDict = {"extra": "forbid"} C = TypeVar("C") -T = TypeVar("T", type, Callable) def discriminated_union_of_subclasses( @@ -44,8 +45,7 @@ def discriminated_union_of_subclasses( Subclasses that extend this class must be Pydantic dataclasses, and types that need their schema to be updated when a new type that extends super_cls is - created must be either Pydantic dataclasses or BaseModels, and must be decorated - with @uses_tagged_union. + created must be either Pydantic dataclasses or BaseModels. Example:: @@ -106,6 +106,7 @@ def calculate(self) -> int: Returns: Type: decorated superclass with handling for subclasses to be added to its discriminated union for deserialization + """ tagged_union = _TaggedUnion(super_cls, discriminator) _tagged_unions[super_cls] = tagged_union @@ -217,6 +218,7 @@ class Frames(Generic[Axis]): See Also: `technical-terms` + """ def __init__( @@ -282,6 +284,7 @@ def extract(self, indices: np.ndarray, calculate_gap=True) -> Frames[Axis]: >>> frames = Frames({"x": np.array([1, 2, 3])}) >>> frames.extract(np.array([1, 0, 1])).midpoints {'x': array([2, 1, 2])} + """ dim_indices = indices % len(self) @@ -312,6 +315,7 @@ def concat(self, other: Frames[Axis], gap: bool = False) -> Frames[Axis]: >>> frames2 = Frames({"y": np.array([3, 2, 1]), "x": np.array([4, 5, 6])}) >>> frames.concat(frames2).midpoints {'x': array([1, 2, 3, 4, 5, 6]), 'y': array([6, 5, 4, 3, 2, 1])} + """ assert set(self.axes()) == set( other.axes() @@ -411,6 +415,7 @@ def extract(self, indices: np.ndarray, calculate_gap=True) -> Frames[Axis]: >>> frames = SnakedFrames({"x": np.array([1, 2, 3])}) >>> frames.extract(np.array([0, 1, 2, 3, 4, 5])).midpoints {'x': array([1, 2, 3, 3, 2, 1])} + """ # Calculate the indices # E.g for len = 4 @@ -470,6 +475,7 @@ def squash_frames(stack: list[Frames[Axis]], check_path_changes=True) -> Frames[ >>> fy = Frames({"y": np.array([3, 4])}) >>> squash_frames([fy, fx]).midpoints {'y': array([3, 3, 4, 4]), 'x': array([1, 2, 2, 1])} + """ path = Path(stack) # Consuming a Path through these Frames performs the squash @@ -517,6 +523,7 @@ class Path(Generic[Axis]): See Also: `iterate-a-spec` + """ def __init__( @@ -607,6 +614,7 @@ class Midpoints(Generic[Axis]): {'y': np.int64(3), 'x': np.int64(2)} {'y': np.int64(4), 'x': np.int64(2)} {'y': np.int64(4), 'x': np.int64(1)} + """ def __init__(self, stack: list[Frames[Axis]]): diff --git a/src/scanspec/plot.py b/src/scanspec/plot.py index e4fc1e8c..f81c3447 100644 --- a/src/scanspec/plot.py +++ b/src/scanspec/plot.py @@ -1,3 +1,5 @@ +"""`plot_spec` to visualize a scan.""" + from collections.abc import Iterator from itertools import cycle from typing import Any diff --git a/src/scanspec/regions.py b/src/scanspec/regions.py index e60a3494..0f8e6872 100644 --- a/src/scanspec/regions.py +++ b/src/scanspec/regions.py @@ -1,3 +1,10 @@ +"""`Region` and its subclasses. + +.. inheritance-diagram:: scanspec.regions + :top-classes: scanspec.regions.Region + :parts: 1 +""" + from __future__ import annotations from collections.abc import Iterator, Mapping @@ -45,11 +52,11 @@ class Region(Generic[Axis]): - ``^``: `SymmetricDifferenceOf` two Regions, midpoints present in one not both """ - def axis_sets(self) -> list[set[Axis]]: + def axis_sets(self) -> list[set[Axis]]: # noqa: D102 """Produce the non-overlapping sets of axes this region spans.""" raise NotImplementedError(self) - def mask(self, points: AxesPoints[Axis]) -> np.ndarray: + def mask(self, points: AxesPoints[Axis]) -> np.ndarray: # noqa: D102 """Produce a mask of which points are in the region.""" raise NotImplementedError(self) @@ -111,7 +118,7 @@ class CombinationOf(Region[Axis]): left: Region[Axis] = Field(description="The left-hand Region to combine") right: Region[Axis] = Field(description="The right-hand Region to combine") - def axis_sets(self) -> list[set[Axis]]: + def axis_sets(self) -> list[set[Axis]]: # noqa: D102 axis_sets = list( _merge_axis_sets(self.left.axis_sets() + self.right.axis_sets()) ) @@ -130,7 +137,7 @@ class UnionOf(CombinationOf[Axis]): array([False, True, True, True, False]) """ - def mask(self, points: AxesPoints[Axis]) -> np.ndarray: + def mask(self, points: AxesPoints[Axis]) -> np.ndarray: # noqa: D102 mask = get_mask(self.left, points) | get_mask(self.right, points) return mask @@ -146,7 +153,7 @@ class IntersectionOf(CombinationOf[Axis]): array([False, False, True, False, False]) """ - def mask(self, points: AxesPoints[Axis]) -> np.ndarray: + def mask(self, points: AxesPoints[Axis]) -> np.ndarray: # noqa: D102 mask = get_mask(self.left, points) & get_mask(self.right, points) return mask @@ -162,7 +169,7 @@ class DifferenceOf(CombinationOf[Axis]): array([False, True, False, False, False]) """ - def mask(self, points: AxesPoints[Axis]) -> np.ndarray: + def mask(self, points: AxesPoints[Axis]) -> np.ndarray: # noqa: D102 left_mask = get_mask(self.left, points) # Return the xor restricted to the left region mask = left_mask ^ get_mask(self.right, points) & left_mask @@ -180,7 +187,7 @@ class SymmetricDifferenceOf(CombinationOf[Axis]): array([False, True, False, True, False]) """ - def mask(self, points: AxesPoints[Axis]) -> np.ndarray: + def mask(self, points: AxesPoints[Axis]) -> np.ndarray: # noqa: D102 mask = get_mask(self.left, points) ^ get_mask(self.right, points) return mask @@ -198,10 +205,10 @@ class Range(Region[Axis]): min: float = Field(description="The minimum inclusive value in the region") max: float = Field(description="The minimum inclusive value in the region") - def axis_sets(self) -> list[set[Axis]]: + def axis_sets(self) -> list[set[Axis]]: # noqa: D102 return [{self.axis}] - def mask(self, points: AxesPoints[Axis]) -> np.ndarray: + def mask(self, points: AxesPoints[Axis]) -> np.ndarray: # noqa: D102 v = points[self.axis] mask = np.bitwise_and(v >= self.min, v <= self.max) return mask @@ -230,10 +237,10 @@ class Rectangle(Region[Axis]): description="Clockwise rotation angle of the rectangle", default=0.0 ) - def axis_sets(self) -> list[set[Axis]]: + def axis_sets(self) -> list[set[Axis]]: # noqa: D102 return [{self.x_axis, self.y_axis}] - def mask(self, points: AxesPoints[Axis]) -> np.ndarray: + def mask(self, points: AxesPoints[Axis]) -> np.ndarray: # noqa: D102 x = points[self.x_axis] - self.x_min y = points[self.y_axis] - self.y_min if self.angle != 0: @@ -270,10 +277,10 @@ class Polygon(Region[Axis]): description="The Nx1 y coordinates of the polygons vertices", min_length=3 ) - def axis_sets(self) -> list[set[Axis]]: + def axis_sets(self) -> list[set[Axis]]: # noqa: D102 return [{self.x_axis, self.y_axis}] - def mask(self, points: AxesPoints[Axis]) -> np.ndarray: + def mask(self, points: AxesPoints[Axis]) -> np.ndarray: # noqa: D102 x = points[self.x_axis] y = points[self.y_axis] v1x, v1y = self.x_verts[-1], self.y_verts[-1] @@ -310,10 +317,10 @@ class Circle(Region[Axis]): y_middle: float = Field(description="The central y point of the circle") radius: float = Field(description="Radius of the circle", gt=0) - def axis_sets(self) -> list[set[Axis]]: + def axis_sets(self) -> list[set[Axis]]: # noqa: D102 return [{self.x_axis, self.y_axis}] - def mask(self, points: AxesPoints[Axis]) -> np.ndarray: + def mask(self, points: AxesPoints[Axis]) -> np.ndarray: # noqa: D102 x = points[self.x_axis] - self.x_middle y = points[self.y_axis] - self.y_middle mask = x * x + y * y <= (self.radius * self.radius) @@ -345,10 +352,10 @@ class Ellipse(Region[Axis]): ) angle: float = Field(description="The angle of the ellipse (degrees)", default=0.0) - def axis_sets(self) -> list[set[Axis]]: + def axis_sets(self) -> list[set[Axis]]: # noqa: D102 return [{self.x_axis, self.y_axis}] - def mask(self, points: AxesPoints[Axis]) -> np.ndarray: + def mask(self, points: AxesPoints[Axis]) -> np.ndarray: # noqa: D102 x = points[self.x_axis] - self.x_middle y = points[self.y_axis] - self.y_middle if self.angle != 0: @@ -362,7 +369,7 @@ def mask(self, points: AxesPoints[Axis]) -> np.ndarray: return mask -def find_regions(obj) -> Iterator[Region[Axis]]: +def find_regions(obj) -> Iterator[Region]: """Recursively yield Regions from obj and its children.""" if ( hasattr(obj, "__pydantic_model__") @@ -372,5 +379,5 @@ def find_regions(obj) -> Iterator[Region[Axis]]: if isinstance(obj, Region): yield obj for name in obj.__dict__.keys(): - regions: Iterator[Region[Axis]] = find_regions(getattr(obj, name)) + regions: Iterator[Region] = find_regions(getattr(obj, name)) yield from regions diff --git a/src/scanspec/service.py b/src/scanspec/service.py index 52121833..ce628de5 100644 --- a/src/scanspec/service.py +++ b/src/scanspec/service.py @@ -1,3 +1,5 @@ +"""FastAPI service to query information about Specs.""" + import base64 import json from collections.abc import Mapping @@ -133,6 +135,7 @@ def valid( Returns: ValidResponse: A canonical version of the spec if it is valid. An error otherwise. + """ valid_spec = Spec.deserialize(spec.serialize()) return ValidResponse(spec, valid_spec) @@ -156,6 +159,7 @@ def midpoints( Returns: MidpointsResponse: Midpoints of the scan + """ chunk, total_frames = _to_chunk(request) return MidpointsResponse( @@ -182,6 +186,7 @@ def bounds( Returns: BoundsResponse: Bounds of the scan + """ chunk, total_frames = _to_chunk(request) return BoundsResponse( @@ -207,10 +212,11 @@ def gap( after each frame. Args: - request: Scanspec and formatting info. + spec: Scanspec and formatting info. Returns: GapResponse: Bounds of the scan + """ dims = spec.calculate() # Grab dimensions from spec path = Path(dims) # Convert to a path @@ -231,6 +237,7 @@ def smallest_step( Returns: SmallestStepResponse: A description of the smallest steps in the spec + """ dims = spec.calculate() # Grab dimensions from spec path = Path(dims) # Convert to a path @@ -281,6 +288,7 @@ def _format_axes_points( Returns: Mapping[str, Points]: A mapping of axis to formatted points. + """ if format is PointsFormat.FLOAT_LIST: return {axis: list(points) for axis, points in axes_points.items()} @@ -301,6 +309,7 @@ def _reduce_frames(stack: list[Frames[str]], max_frames: int) -> Path: Args: stack: A stack of Frames created by a spec max_frames: The maximum number of frames the user wishes to be returned + """ # Calculate the total number of frames num_frames = 1 @@ -320,6 +329,7 @@ def _sub_sample(frames: Frames[str], ratio: float) -> Frames: Args: frames: the Frames object to be reduced ratio: the reduction ratio of the dimension + """ num_indexes = int(len(frames) / ratio) indexes = np.linspace(0, len(frames) - 1, num_indexes, dtype=np.int32) @@ -344,6 +354,7 @@ def _abs_diffs(array: np.ndarray) -> np.ndarray: Returns: A newly constucted array of absolute differences + """ # [array[1] - array[0], array[2] - array[1], ...] adjacent_diffs = array[1:] - array[:-1] @@ -371,6 +382,7 @@ def scanspec_schema_text() -> str: Returns: str: The OpenAPI schema + """ return json.dumps( get_openapi( diff --git a/src/scanspec/specs.py b/src/scanspec/specs.py index a9e4d648..adfec1a7 100644 --- a/src/scanspec/specs.py +++ b/src/scanspec/specs.py @@ -1,3 +1,10 @@ +"""`Spec` and its subclasses. + +.. inheritance-diagram:: scanspec.specs + :top-classes: scanspec.specs.Spec + :parts: 1 +""" + from __future__ import annotations from collections.abc import Callable, Mapping @@ -58,14 +65,14 @@ class Spec(Generic[Axis]): - ``~``: `Snake` the Spec, reversing every other iteration of it """ - def axes(self) -> list[Axis]: + def axes(self) -> list[Axis]: # noqa: D102 """Return the list of axes that are present in the scan. Ordered from slowest moving to fastest moving. """ raise NotImplementedError(self) - def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: + def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: # noqa: D102 """Produce a stack of nested `Frames` that form the scan. Ordered from slowest moving to fastest moving. @@ -130,10 +137,10 @@ class Product(Spec[Axis]): outer: Spec[Axis] = Field(description="Will be executed once") inner: Spec[Axis] = Field(description="Will be executed len(outer) times") - def axes(self) -> list: + def axes(self) -> list[Axis]: # noqa: D102 return self.outer.axes() + self.inner.axes() - def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: + def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: # noqa: D102 frames_outer = self.outer.calculate(bounds=False, nested=nested) frames_inner = self.inner.calculate(bounds, nested=True) return frames_outer + frames_inner @@ -169,10 +176,10 @@ class Repeat(Spec[Axis]): default=True, ) - def axes(self) -> list: + def axes(self) -> list[Axis]: # noqa: D102 return [] - def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: + def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: # noqa: D102 return [Frames({}, gap=np.full(self.num, self.gap))] @@ -206,10 +213,10 @@ class Zip(Spec[Axis]): description="The right-hand Spec to Zip, will appear later in axes" ) - def axes(self) -> list: + def axes(self) -> list[Axis]: # noqa: D102 return self.left.axes() + self.right.axes() - def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: + def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: # noqa: D102 frames_left = self.left.calculate(bounds, nested) frames_right = self.right.calculate(bounds, nested) assert len(frames_left) >= len( @@ -274,10 +281,10 @@ class Mask(Spec[Axis]): default=True, ) - def axes(self) -> list: + def axes(self) -> list[Axis]: # noqa: D102 return self.spec.axes() - def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: + def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: # noqa: D102 frames = self.spec.calculate(bounds, nested) for axis_set in self.region.axis_sets(): # Find the start and end index of any dimensions containing these axes @@ -332,10 +339,10 @@ class Snake(Spec[Axis]): description="The Spec to run in reverse every other iteration" ) - def axes(self) -> list: + def axes(self) -> list[Axis]: # noqa: D102 return self.spec.axes() - def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: + def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: # noqa: D102 return [ SnakedFrames.from_frames(segment) for segment in self.spec.calculate(bounds, nested) @@ -371,14 +378,14 @@ class Concat(Spec[Axis]): default=True, ) - def axes(self) -> list: + def axes(self) -> list[Axis]: # noqa: D102 left_axes, right_axes = self.left.axes(), self.right.axes() # Assuming the axes are the same, the order does not matter, we inherit the # order from the left-hand side. See also scanspec.core.concat. assert set(left_axes) == set(right_axes), f"axes {left_axes} != {right_axes}" return left_axes - def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: + def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: # noqa: D102 dim_left = squash_frames( self.left.calculate(bounds, nested), nested and self.check_path_changes ) @@ -401,6 +408,7 @@ class Squash(Spec[Axis]): from scanspec.specs import Line, Squash spec = Squash(Line("y", 1, 2, 3) * Line("x", 0, 1, 4)) + """ spec: Spec[Axis] = Field(description="The Spec to squash the dimensions of") @@ -409,10 +417,10 @@ class Squash(Spec[Axis]): default=True, ) - def axes(self) -> list: + def axes(self) -> list[Axis]: # noqa: D102 return self.spec.axes() - def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: + def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: # noqa: D102 dims = self.spec.calculate(bounds, nested) dim = squash_frames(dims, nested and self.check_path_changes) return [dim] @@ -461,7 +469,7 @@ class Line(Spec[Axis]): stop: float = Field(description="Midpoint of the last point of the line") num: int = Field(ge=1, description="Number of frames to produce") - def axes(self) -> list: + def axes(self) -> list[Axis]: # noqa: D102 return [self.axis] def _line_from_indexes(self, indexes: np.ndarray) -> dict[Axis, np.ndarray]: @@ -476,7 +484,7 @@ def _line_from_indexes(self, indexes: np.ndarray) -> dict[Axis, np.ndarray]: first = self.start - step / 2 return {self.axis: indexes * step + first} - def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: + def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: # noqa: D102 return _dimensions_from_indexes( self._line_from_indexes, self.axes(), self.num, bounds ) @@ -547,13 +555,13 @@ def duration( """ return cls(DURATION, duration, num) - def axes(self) -> list: + def axes(self) -> list[Axis]: # noqa: D102 return [self.axis] def _repeats_from_indexes(self, indexes: np.ndarray) -> dict[Axis, np.ndarray]: return {self.axis: np.full(len(indexes), self.value)} - def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: + def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: # noqa: D102 return _dimensions_from_indexes( self._repeats_from_indexes, self.axes(), self.num, bounds ) @@ -589,7 +597,7 @@ class Spiral(Spec[Axis]): description="How much to rotate the angle of the spiral", default=0.0 ) - def axes(self) -> list[Axis]: + def axes(self) -> list[Axis]: # noqa: D102 # TODO: reversed from __init__ args, a good idea? return [self.y_axis, self.x_axis] @@ -610,7 +618,7 @@ def _spiral_from_indexes(self, indexes: np.ndarray) -> dict[Axis, np.ndarray]: self.x_axis: self.x_start + x_scale * phi * np.sin(phi + self.rotate), } - def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: + def calculate(self, bounds=True, nested=False) -> list[Frames[Axis]]: # noqa: D102 return _dimensions_from_indexes( self._spiral_from_indexes, self.axes(), self.num, bounds ) @@ -662,6 +670,7 @@ def fly(spec: Spec[Axis], duration: float) -> Spec[Axis]: from scanspec.specs import Line, fly spec = fly(Line("x", 1, 2, 3), 0.1) + """ return spec.zip(Static.duration(duration)) @@ -680,13 +689,13 @@ def step(spec: Spec[Axis], duration: float, num: int = 1) -> Spec[Axis]: from scanspec.specs import Line, step spec = step(Line("x", 1, 2, 3), 0.1) + """ return spec * Static.duration(duration, num) def get_constant_duration(frames: list[Frames]) -> float | None: - """ - Returns the duration of a number of ScanSpec frames, if known and consistent. + """Returns the duration of a number of ScanSpec frames, if known and consistent. Args: frames (List[Frames]): A number of Frame objects diff --git a/src/scanspec/sphinxext.py b/src/scanspec/sphinxext.py index 6a1e2630..69500422 100644 --- a/src/scanspec/sphinxext.py +++ b/src/scanspec/sphinxext.py @@ -1,3 +1,5 @@ +"""An example_spec directive.""" + from contextlib import contextmanager from docutils.statemachine import StringList @@ -26,6 +28,7 @@ class ExampleSpecDirective(plot_directive.PlotDirective): """Runs `plot_spec` on the ``spec`` definied in the content.""" def run(self): + """Run the directive.""" self.content = StringList( ["# Example Spec", "", "from scanspec.plot import plot_spec"] + [str(x) for x in self.content] diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..ebe9c10f --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,21 @@ +import os +from typing import Any + +import pytest + +# Prevent pytest from catching exceptions when debugging in vscode so that break on +# exception works correctly (see: https://github.com/pytest-dev/pytest/issues/7409) +if os.getenv("PYTEST_RAISE", "0") == "1": + + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call: pytest.CallInfo[Any]): + if call.excinfo is not None: + raise call.excinfo.value + else: + raise RuntimeError( + f"{call} has no exception data, an unknown error has occurred" + ) + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo: pytest.ExceptionInfo[Any]): + raise excinfo.value diff --git a/tests/test_errors.py b/tests/test_errors.py index a7ff923c..80144234 100644 --- a/tests/test_errors.py +++ b/tests/test_errors.py @@ -14,7 +14,7 @@ def test_not_implemented() -> None: with pytest.raises(NotImplementedError): Spec().calculate() with pytest.raises(TypeError): - Spec() * Region() + Spec() * Region() # type: ignore def test_non_snake_not_allowed_inside_snaking_dim() -> None: