diff --git a/.github/workflows/check_external_links.yml b/.github/workflows/check_external_links.yml new file mode 100644 index 0000000..9dd1a84 --- /dev/null +++ b/.github/workflows/check_external_links.yml @@ -0,0 +1,32 @@ +name: Check Sphinx external links +on: + push: + schedule: + - cron: '0 5 * * 0' # once every Sunday at midnight ET + workflow_dispatch: + +jobs: + check-external-links: + name: Check for broken Sphinx external links + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 # tags are required to determine the version + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Install Sphinx dependencies and package + run: | + python -m pip install --upgrade pip + python -m pip install -r requirements-dev.txt + python -m pip install . + + - name: Check Sphinx external links + run: | + cd docs # run_doc_autogen assumes spec is found in ../spec/ + sphinx-build -b linkcheck ./source ./test_build diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml new file mode 100644 index 0000000..314b085 --- /dev/null +++ b/.github/workflows/codespell.yml @@ -0,0 +1,14 @@ +name: Codespell +on: + push: + workflow_dispatch: + +jobs: + codespell: + name: Check for spelling errors + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Codespell + uses: codespell-project/actions-codespell@v2 diff --git a/.github/workflows/ruff.yml b/.github/workflows/ruff.yml new file mode 100644 index 0000000..9b4f05d --- /dev/null +++ b/.github/workflows/ruff.yml @@ -0,0 +1,14 @@ +name: Ruff +on: + push: + workflow_dispatch: + +jobs: + ruff: + name: Check for style errors and common problems + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Ruff + uses: chartboost/ruff-action@v1 diff --git a/.github/workflows/run_all_tests.yml b/.github/workflows/run_all_tests.yml new file mode 100644 index 0000000..913e6f9 --- /dev/null +++ b/.github/workflows/run_all_tests.yml @@ -0,0 +1,178 @@ +name: Run all tests +on: + push: + schedule: + - cron: '0 5 * * 0' # once every Sunday at midnight ET + workflow_dispatch: + +jobs: + run-all-tests: + name: ${{ matrix.name }} + runs-on: ${{ matrix.os }} + defaults: + run: + shell: bash + concurrency: + group: ${{ github.workflow }}-${{ github.ref }}-${{ matrix.name }} + cancel-in-progress: true + strategy: + fail-fast: false + matrix: + include: + - { name: linux-python3.8-minimum , requirements: minimum , python-ver: "3.8" , os: ubuntu-latest } + - { name: linux-python3.8 , requirements: pinned , python-ver: "3.8" , os: ubuntu-latest } + - { name: linux-python3.9 , requirements: pinned , python-ver: "3.9" , os: ubuntu-latest } + - { name: linux-python3.10 , requirements: pinned , python-ver: "3.10", os: ubuntu-latest } + - { name: linux-python3.11 , requirements: pinned , python-ver: "3.11", os: ubuntu-latest } + - { name: linux-python3.12 , requirements: pinned , python-ver: "3.12", os: ubuntu-latest } + - { name: linux-python3.12-upgraded , requirements: upgraded , python-ver: "3.12", os: ubuntu-latest } + - { name: windows-python3.8-minimum , requirements: minimum , python-ver: "3.8" , os: windows-latest } + - { name: windows-python3.8 , requirements: pinned , python-ver: "3.8" , os: windows-latest } + - { name: windows-python3.9 , requirements: pinned , python-ver: "3.9" , os: windows-latest } + - { name: windows-python3.10 , requirements: pinned , python-ver: "3.10", os: windows-latest } + - { name: windows-python3.11 , requirements: pinned , python-ver: "3.11", os: windows-latest } + - { name: windows-python3.12 , requirements: pinned , python-ver: "3.12", os: windows-latest } + - { name: windows-python3.12-upgraded , requirements: upgraded , python-ver: "3.12", os: windows-latest } + - { name: macos-python3.8-minimum , requirements: minimum , python-ver: "3.8" , os: macos-latest } + - { name: macos-python3.8 , requirements: pinned , python-ver: "3.8" , os: macos-latest } + - { name: macos-python3.9 , requirements: pinned , python-ver: "3.9" , os: macos-latest } + - { name: macos-python3.10 , requirements: pinned , python-ver: "3.10", os: macos-latest } + - { name: macos-python3.11 , requirements: pinned , python-ver: "3.11", os: macos-latest } + - { name: macos-python3.12 , requirements: pinned , python-ver: "3.12", os: macos-latest } + - { name: macos-python3.12-upgraded , requirements: upgraded , python-ver: "3.12", os: macos-latest } + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 # tags are required to determine the version + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-ver }} + + - name: Install build dependencies + run: | + python -m pip install --upgrade pip + python -m pip list + python -m pip check + + - name: Install run requirements (minimum) + if: ${{ matrix.requirements == 'minimum' }} + run: | + python -m pip install -r requirements-min.txt -r requirements-dev.txt + python -m pip install -e . + + - name: Install run requirements (pinned) + if: ${{ matrix.requirements == 'pinned' }} + run: | + python -m pip install -r requirements-dev.txt + python -m pip install -e . + + - name: Install run requirements (upgraded) + if: ${{ matrix.requirements == 'upgraded' }} + run: | + python -m pip install -r requirements-dev.txt + python -m pip install -U -e . + + - name: Run tests + run: | + pytest -v + + - name: Build wheel and source distribution + run: | + python -m pip install --upgrade build + python -m build + ls -1 dist + + - name: Test installation from a wheel (POSIX) + if: ${{ matrix.os != 'windows-latest' }} + run: | + python -m venv test-wheel-env + source test-wheel-env/bin/activate + python -m pip install dist/*-none-any.whl + python -c "import ndx_extracellular_channels" + + - name: Test installation from a wheel (windows) + if: ${{ matrix.os == 'windows-latest' }} + run: | + python -m venv test-wheel-env + test-wheel-env/Scripts/activate.bat + python -m pip install dist/*-none-any.whl + python -c "import ndx_extracellular_channels" + + run-all-tests-on-conda: + name: ${{ matrix.name }} + runs-on: ubuntu-latest + defaults: + run: + shell: bash -l {0} # needed for conda environment to work + concurrency: + group: ${{ github.workflow }}-${{ github.ref }}-${{ matrix.name }} + cancel-in-progress: true + strategy: + fail-fast: false + matrix: + include: + - { name: conda-linux-python3.8-minimum , requirements: minimum , python-ver: "3.8" , os: ubuntu-latest } + - { name: conda-linux-python3.8 , requirements: pinned , python-ver: "3.8" , os: ubuntu-latest } + - { name: conda-linux-python3.9 , requirements: pinned , python-ver: "3.9" , os: ubuntu-latest } + - { name: conda-linux-python3.10 , requirements: pinned , python-ver: "3.10", os: ubuntu-latest } + - { name: conda-linux-python3.11 , requirements: pinned , python-ver: "3.11", os: ubuntu-latest } + - { name: conda-linux-python3.12 , requirements: pinned , python-ver: "3.12", os: ubuntu-latest } + - { name: conda-linux-python3.12-upgraded , requirements: upgraded , python-ver: "3.12", os: ubuntu-latest } + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 # tags are required to determine the version + + - name: Set up Conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + auto-activate-base: true + activate-environment: true + python-version: ${{ matrix.python-ver }} + + - name: Install build dependencies + run: | + conda config --set always_yes yes --set changeps1 no + conda info + conda config --show-sources + conda list --show-channel-urls + + - name: Install run requirements (minimum) + if: ${{ matrix.requirements == 'minimum' }} + run: | + python -m pip install -r requirements-min.txt -r requirements-dev.txt + python -m pip install -e . + + - name: Install run requirements (pinned) + if: ${{ matrix.requirements == 'pinned' }} + run: | + python -m pip install -r requirements-dev.txt + python -m pip install -e . + + - name: Install run requirements (upgraded) + if: ${{ matrix.requirements == 'upgraded' }} + run: | + python -m pip install -r requirements-dev.txt + python -m pip install -U -e . + + - name: Run tests + run: | + pytest -v + + - name: Build wheel and source distribution + run: | + python -m pip install --upgrade build + python -m build + ls -1 dist + + - name: Test installation from a wheel (POSIX) + run: | + python -m venv test-wheel-env + source test-wheel-env/bin/activate + python -m pip install dist/*-none-any.whl + python -c "import ndx_extracellular_channels" diff --git a/.github/workflows/run_coverage.yml b/.github/workflows/run_coverage.yml new file mode 100644 index 0000000..c36064a --- /dev/null +++ b/.github/workflows/run_coverage.yml @@ -0,0 +1,56 @@ +name: Run code coverage +on: + push: + workflow_dispatch: + +jobs: + run-coverage: + name: ${{ matrix.os }} + runs-on: ${{ matrix.os }} + # TODO handle forks + # run pipeline on either a push event or a PR event on a fork + # if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name + defaults: + run: + shell: bash + concurrency: + group: ${{ github.workflow }}-${{ github.ref }}-${{ matrix.os }} + cancel-in-progress: true + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + env: # used by codecov-action + OS: ${{ matrix.os }} + PYTHON: '3.12' + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 # tags are required to determine the version + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install -r requirements-dev.txt + + - name: Install package + run: | + python -m pip install -e . # must install in editable mode for coverage to find sources + python -m pip list + + - name: Run tests and generate coverage report + run: | + pytest --cov + python -m coverage xml # codecov uploader requires xml format + python -m coverage report -m + + # TODO uncomment after setting up repo on codecov.io + # - name: Upload coverage to Codecov + # uses: codecov/codecov-action@v3 + # with: + # fail_ci_if_error: true diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..fac0f30 --- /dev/null +++ b/.gitignore @@ -0,0 +1,170 @@ +# output NWB files +*.nwb + +# generated docs +docs/source/_format_auto_docs + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ +.ruff_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +# Mac finder +.DS_Store diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..1a99664 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog for ndx-extracellular-channels diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..6b6f34a --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2024, Alessio Buccino, Kyu Hyun Lee, Ramon Heberto Mayorquin, Cody Baker, Matt Avaylon, Ryan Ly, Ben Dichter, Oliver Ruebel +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/NEXTSTEPS.md b/NEXTSTEPS.md new file mode 100644 index 0000000..fc54254 --- /dev/null +++ b/NEXTSTEPS.md @@ -0,0 +1,182 @@ + + +# Next Steps for ndx-extracellular-channels Extension for NWB + +## Creating Your Extension + +1. In a terminal, change directory into the new ndx-extracellular-channels directory: `cd ndx-extracellular-channels` + +2. Add any packages required by your extension to the `dependencies` key in `pyproject.toml`. + +3. Run `python -m pip install -e .` to install your new extension Python package +and any other packages required to develop, document, and run your extension. + +4. Modify `src/spec/create_extension_spec.py` to define your extension. + +5. Run `python src/spec/create_extension_spec.py` to generate the +`spec/ndx-extracellular-channels.namespace.yaml` and +`spec/ndx-extracellular-channels.extensions.yaml` files. + +6. Define API classes for your new extension data types. + + - As a starting point, `src/pynwb/__init__.py` includes an example for how to use + the `pynwb.get_class` to generate a basic Python class for your new extension data + type. This class contains a constructor and properties for the new data type. + - Instead of using `pynwb.get_class`, you can define your own custom class for the + new type, which will allow you to customize the class methods, customize the + object mapping, and create convenience functions. See the + [Extending NWB tutorial](https://pynwb.readthedocs.io/en/stable/tutorials/general/extensions.html) + for more details. + +7. Define tests for your new extension data types in `src/pynwb/tests` or `src/matnwb/tests`. +A test for the example `TetrodeSeries` data type is provided as a reference and should be +replaced or removed. + + - Python tests should be runnable by executing [`pytest`](https://docs.pytest.org/en/latest/) + from the root of the extension directory. Use of PyNWB testing infrastructure from + `pynwb.testing` is encouraged (see + [documentation](https://pynwb.readthedocs.io/en/stable/pynwb.testing.html)). + - Creating both **unit tests** (e.g., testing initialization of new data type classes and + new functions) and **integration tests** (e.g., write the new data types to file, read + the file, and confirm the read data types are equal to the written data types) is + highly encouraged. + - By default, to aid with debugging, the project is configured NOT to run code coverage as + part of the tests. + Code coverage reporting is useful to help with creation of tests and report test coverage. + However, with this option enabled, breakpoints for debugging with pdb are being ignored. + To enable this option for code coverage reporting, uncomment out the following line in + your `pyproject.toml`: [line](https://github.com/nwb-extensions/ndx-template/blob/11ae225b3fd3934fa3c56e6e7b563081793b3b43/%7B%7B%20cookiecutter.namespace%20%7D%7D/pyproject.toml#L82-L83 +) + +7. (Optional) Define custom visualization widgets for your new extension data types in +`src/pynwb/widgets` so that the visualizations can be displayed with +[nwbwidgets](https://github.com/NeurodataWithoutBorders/nwbwidgets). +You will also need to update the `vis_spec` dictionary in `__init__.py` so that +nwbwidgets can find your custom visualizations. + +8. You may need to modify `pyproject.toml` and re-run `python -m pip install -e .` if you +use any dependencies. + +9. Update the `CHANGELOG.md` regularly to document changes to your extension. + + +## Documenting and Publishing Your Extension to the Community + +1. Install the latest release of hdmf_docutils: `python -m pip install hdmf-docutils` + +2. Start a git repository for your extension directory ndx-extracellular-channels + and push it to GitHub. You will need a GitHub account. + - Follow these directions: + https://help.github.com/en/articles/adding-an-existing-project-to-github-using-the-command-line + +3. Change directory into `docs`. + +4. Run `make html` to generate documentation for your extension based on the YAML files. + +5. Read `docs/README.md` for instructions on how to customize documentation for +your extension. + +6. Modify `README.md` to describe this extension for interested developers. + +7. Add a license file. Permissive licenses should be used if possible. **A [BSD license](https://opensource.org/licenses/BSD-3-Clause) is recommended.** + +8. Update the `CHANGELOG.md` to document changes to your extension. + +8. Push your repository to GitHub. A default set of GitHub Actions workflows is set up to +test your code on Linux, Windows, Mac OS, and Linux using conda; upload code coverage +stats to codecov.io; check for spelling errors; check for style errors; and check for broken +links in the documentation. For the code coverage workflow to work, you will need to +set up the repo on codecov.io and uncomment the "Upload coverage to Codecov" step +in `.github/workflows/run_coverage.yml`. + +8. Make a release for the extension on GitHub with the version number specified. e.g. if version is 0.1.0, then this page should exist: https://github.com/catalystneuro/ndx-extracellular-channels/releases/tag/0.1.0 . For instructions on how to make a release on GitHub see [here](https://help.github.com/en/github/administering-a-repository/creating-releases). + +9. Publish your updated extension on [PyPI](https://pypi.org/). + - Follow these directions: https://packaging.python.org/en/latest/tutorials/packaging-projects/ + - You may need to modify `pyproject.toml` + - If your extension version is 0.1.0, then this page should exist: https://pypi.org/project/ndx-extracellular-channels/0.1.0 + + Once your GitHub release and `pyproject.toml` are ready, publishing on PyPI: + ```bash + python -m pip install --upgrade build twine + python -m build + twine upload dist/* + ``` + +10. Go to https://github.com/nwb-extensions/staged-extensions and fork the +repository. + +11. Clone the fork onto your local filesystem. + +12. Copy the directory `staged-extensions/example` to a new directory +`staged-extensions/ndx-extracellular-channels`: + + ```bash + cp -r staged-extensions/example staged-extensions/ndx-extracellular-channels + ``` + +13. Edit `staged-extensions/ndx-extracellular-channels/ndx-meta.yaml` +with information on where to find your NWB extension. + - The YAML file MUST contain a dict with the following keys: + - name: extension namespace name + - version: extension version + - src: URL for the main page of the public repository (e.g. on GitHub, BitBucket, GitLab) that contains the sources of the extension + - pip: URL for the main page of the extension on PyPI + - license: name of the license of the extension + - maintainers: list of GitHub usernames of those who will reliably maintain the extension + - You may copy and modify the following YAML that was auto-generated: + + ```yaml + name: ndx-extracellular-channels + version: 0.1.0 + src: https://github.com/catalystneuro/ndx-extracellular-channels + pip: https://pypi.org/project/ndx-extracellular-channels/ + license: BSD-3 + maintainers: + - alejoe91 + - khl02007 + - h-mayorquin + - CodyCBakerPhD + - mavaylon1 + - rly + - bendichter + ``` + +14. Edit `staged-extensions/ndx-extracellular-channels/README.md` +to add information about your extension. You may copy it from +`ndx-extracellular-channels/README.md`. + + ```bash + cp ndx-extracellular-channels/README.md staged-extensions/ndx-extracellular-channels/README.md + ``` + +15. Add and commit your changes to Git and push your changes to GitHub. +``` +cd staged-extensions +git add ndx-extracellular-channels +git commit -m "Add new catalog entry for ndx-extracellular-channels" . +git push +``` + +16. Open a pull request. Building of your extension will be tested on Windows, +Mac, and Linux. The technical team will review your extension shortly after +and provide feedback and request changes, if any. + +17. When your pull request is merged, a new repository, called +ndx-extracellular-channels-record will be created in the nwb-extensions +GitHub organization and you will be added as a maintainer for that repository. + + +## Updating Your Published Extension + +1. Update your ndx-extracellular-channels GitHub repository. + +2. Publish your updated extension on PyPI. + +3. Fork the ndx-extracellular-channels-record repository on GitHub. + +4. Open a pull request to test the changes automatically. The technical team +will review your changes shortly after and provide feedback and request changes, +if any. + +5. Your updated extension is approved. diff --git a/README.md b/README.md index 928eb2c..49e23fe 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,72 @@ -# ndx-extracellular-channels +# ndx-extracellular-channels Extension for NWB +`ndx-extracellular-channels` is an extension of the NWB format to formally define information about neural probes as data types in NWB files. It comes with helper functions to easily construct `ndx_extracellular_channels.Probe` from `probeinterface.Probe` and vice versa. +It provides a new version of [`ElectricalSeries`](https://nwb-schema.readthedocs.io/en/latest/format.html#electricalseries) called `ExtracellularSeries`. Each `ExtracellularSeries` is associated with its own `ChannelsTable` that contains information about the channels and probe used to record the data. Each channel is mapped to contacts on the probe. + +Use cases supported: +- Storing the manufacturer, model, and serial number of the probe +- Storing the size, shape, shank ID, and relative position of the contacts +- Associating a channel with a contact on a probe +- Storing multiple data streams (channels) from the same contacts on a probe, e.g., AP and LF bands from a Neuropixels probe +- Storing a selection of channels from a probe, e.g., the probe model has 960 contacts and only 384 are used +- Bipolar reference schemes, e.g., channel 1 is defined as the signal from contact 2 referenced to contact 1 +- Storing precise information about the probe insertion position and orientation, separate from the targeted position and reconstructed position +- Storing separate channel tables for different data streams instead of a global channel table, which avoids creating columns that apply only to some streams +- Multiple instances of the same probe in the same file, e.g., a Neuropixels probe in the left and right hemispheres + +It encompasses [SpikeInterface's `ndx-probeinterface` extension](https://github.com/SpikeInterface/ndx-probeinterface) and started originally as [@D1o0g9s's ndx-probe-interface extension](https://github.com/D1o0g9s/ndx-probe-interface). + +## Installation + +(TODO publish to PyPI) +```python +pip install ndx_extracellular_channels +``` + +## Usage + +### Going from a `probeinterface.Probe`/`ProbeGroup` object to a `ndx_extracellular_channels.Probe` object +```python +import ndx_extracellular_channels + +pi_probe = probeinterface.Probe(...) +pi_probegroup = probeinterface.ProbeGroup() + +# from_probeinterface always returns a list of ndx_extracellular_channels.Probe devices +ndx_probes1 = ndx_extracellular_channels.from_probeinterface(pi_probe) +ndx_probes2 = ndx_extracellular_channels.from_probeinterface(pi_probegroup) + +ndx_probes = ndx_probes1.extend(ndx_probes2) + +nwbfile = pynwb.NWBFile(...) + +# add Probe as NWB Devices +for ndx_probe in ndx_probes: + nwbfile.add_device(ndx_probe) +``` + +### Going from a `ndx_extracellular_channels.Probe` object to a `probeinterface.Probe` object +```python +import ndx_extracellular_channels + +# load ndx_extracellular_channels.Probe objects from NWB file +io = pynwb.NWBH5IO(file_path, "r") +nwbfile = io.read() + +ndx_probes = [] +for device in nwbfile: + if isinstance(device, ndx_extracellular_channels.Probe): + ndx_probes.append(device) + +# convert to probeinterface.Probe objects +pi_probes = [] +for ndx_probe in ndx_probes: + pi_probe = ndx_extracellular_channels.to_probeinterface(ndx_probe) + pi_probes.append(pi_probe) +``` + +See `src/pynwb/tests/test_example_usage_probeinterface.py` for a full example. ## Diagram @@ -13,72 +79,86 @@ classDiagram direction LR class ExtracellularSeries { - <> + <> + data : numeric + --> unit : str = "microvolts" channels : DynamicTableRegion --> target : ChannelsTable + channel_conversion : List[float], optional + --> axis : int = 1 } - class ChannelsTable{ + class ChannelsTable { <> -------------------------------------- attributes -------------------------------------- name : str description : str - probe : ProbeModel - probe_insertion : ProbeInsertion, optional - contacts : DynamicTableRegion, optional? - --> target : ContactsTable - reference_contact : DynamicTableRegion, optional - --> target : ContactsTable - reference_mode : Literal["external wire", ...], optional + probe : Probe + position_reference : str, optional + electrical_reference_description : str, optional + ground : str, optional + position_confirmation_method : str, optional -------------------------------------- columns -------------------------------------- - id : int - filter : VectorData, optional - ---> Values strings such as "Bandpass 0-300 Hz". - contact_position [x, y, z] : VectorData, optional - ---> Each value is length 3 tuple of floats. - brain_area : VectorData, optional - --> data : str - ----> Plays the role of the old 'location'. - ... Any other custom columns, such analong frontend e.g. ADC information + id : VectorData[int] + contact : DynamicTableRegion + --> target : ContactsTable + reference_contact : DynamicTableRegion, optional + --> target : ContactsTable + filter : VectorData[str], optional + ---> Strings such as "Bandpass 0-300 Hz". + estimated_position_ap_in_mm : VectorData[float], optional + estimated_position_ml_in_mm : VectorData[float], optional + estimated_position_dv_in_mm : VectorData[float], optional + estimated_brain_area : VectorData[str], optional + confirmed_position_ap_in_mm : VectorData[float], optional + confirmed_position_ml_in_mm : VectorData[float], optional + confirmed_position_dv_in_mm : VectorData[float], optional + confirmed_brain_area : VectorData[str], optional + ... Any other custom columns, e.g., ADC information } class ProbeInsertion { <> - - insertion_position : Tuple[float, float, float], optional - ----> Stereotactic coordinates on surface. + insertion_position_ap_in_mm : float, optional + insertion_position_ml_in_mm : float, optional + insertion_position_dv_in_mm : float, optional + position_reference : str, optional + hemisphere : Literal["left", "right"], optional + insertion_angle_pitch_in_deg : float, optional + insertion_angle_roll_in_deg : float, optional + insertion_angle_yaw_in_deg : float, optional depth_in_um : float, optional - insertion_angle : Tuple[float, float, float], optional - ----> The pitch/roll/yaw relative to the position on the surface. } - namespace ProbeInterface{ + namespace ProbeInterface { class Probe { <> identifier : str --> Usually the serial number probe_model : ProbeModel + probe_insertion : ProbeInsertion, optional } class ProbeModel { - <> + <> name : str - manufactuer : str + manufacturer : str model : str - contour : List[Tuple[float, float], Tuple[float, float, float]] - contact_table : ContactsTable + ndim : int, optional + planar_contour_in_um : List[Tuple[float, float], Tuple[float, float, float]], optional + contacts_table : ContactsTable } - class ContactTable { + class ContactsTable { <> -------------------------------------- @@ -86,27 +166,37 @@ classDiagram -------------------------------------- name : str description : str - + -------------------------------------- columns -------------------------------------- - id : int - shape : str, optional - size : str, optional - shank_id : str, optional - relative_position : List[Tuple[float, float], Tuple[float, float, float]], optional + id : VectorData[int] + relative_position_in_um : List[Tuple[float, float], Tuple[float, float, float]] + contact_id : VectorData[str], optional + shank_id : VectorData[str], optional + plane_axes : List[Tuple[int, int], Tuple[int, int, int]], optional + shape : VectorData[str], optional + radius_in_um : VectorData[float], optional + width_in_um : VectorData[float], optional + height_in_um : VectorData[float], optional } } - + Probe *..> ProbeModel : links to probe_model + Probe *--> ProbeInsertion: might contain ProbeInsertion + ProbeModel *--> ContactsTable : contains + ExtracellularSeries ..> ChannelsTable : links to channels + ChannelsTable *..> Probe : links to probe + ChannelsTable ..> ContactsTable : row reference to contact + note for ChannelsTable "ChannelsTable is no longer global" +``` - ExtracellularSeries ..> ChannelsTable : links with channels - ProbeModel *--> ContactTable : contains - Probe *..> ProbeModel : links with probe_model - ChannelsTable *..> Probe : links with probe +## Ongoing work +- Publish on PyPI +- Incorporate this NDX into the core NWB schema via [NWBEP002](https://docs.google.com/document/d/1q-haFEEHEgZpRoCzzQsuSWCKN4QfMsTzLnlptLaf-yw/edit) - ChannelsTable ..> ContactTable : links with contacts +## Future plans +- Add information about the headstage used for data acquisition - ChannelsTable *--> ProbeInsertion: might contain ProbeInsertion - note for ChannelsTable "ChannelsTable is no longer global" -``` \ No newline at end of file +--- +This extension was created using [ndx-template](https://github.com/nwb-extensions/ndx-template). diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..54e6545 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,179 @@ + +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXAPIDOC = sphinx-apidoc +PAPER = +BUILDDIR = build +SRCDIR = ../src +RSTDIR = source +CONFDIR = $(PWD)/source + + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext fulldoc allclean + +help: + @echo "To update documentation sources from the format specification please use \`make apidoc'" + @echo "" + @echo "To build the documentation please use \`make ' where is one of" + @echo " fulldoc to rebuild the apidoc, html, and latexpdf all at once" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " apidoc to to build RST from source code" + @echo " clean to clean all documents built by Sphinx in _build" + @echo " allclean to clean all autogenerated documents both from Sphinx and apidoc" + +allclean: + -rm -rf $(BUILDDIR)/* $(RSTDIR)/modules.rst + -rm $(RSTDIR)/_format_auto_docs/*.png + -rm $(RSTDIR)/_format_auto_docs/*.pdf + -rm $(RSTDIR)/_format_auto_docs/*.rst + -rm $(RSTDIR)/_format_auto_docs/*.inc + +clean: + -rm -rf $(BUILDDIR)/* $(RSTDIR)/modules.rst + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/sample.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/sample.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/sample" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/sample" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " "results in $(BUILDDIR)/doctest/output.txt." + +apidoc: + PYTHONPATH=$(CONFDIR):$(PYTHONPATH) nwb_generate_format_docs + @echo + @echo "Generate rst source files from NWB spec." + +fulldoc: + $(MAKE) allclean + @echo + @echo "Rebuilding apidoc, html, latexpdf" + $(MAKE) apidoc + $(MAKE) html + $(MAKE) latexpdf diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000..9a3a30d --- /dev/null +++ b/docs/README.md @@ -0,0 +1,121 @@ + +# Getting started + +## Generate Documentation + +* To generate the HTML version of your documentation run ``make html``. +* The [hdmf-docutils](https://pypi.org/project/hdmf-docutils/) package must be installed. + +## Customize Your Extension Documentation + +* **extension description** + * Edit ``source/description.rst`` to describe your extension. + +* **release notes** + * Edit ``source/release_notes.rst`` to document improvements and fixes of your extension. + +* **documentation build settings** + * Edit ``source/conf.py`` to customize your extension documentation configuration. + * Edit ``source/conf_doc_autogen.py`` to customize the format documentation auto-generation based on + the YAML specification files. + + +# Overview + +The specification documentation uses Sphinx [http://www.sphinx-doc.org/en/stable/index.html](http://www.sphinx-doc.org/en/stable/index.html) + +## Rebuilding All + +To rebuild the full documentation in html, latex, and PDF simply run: + +``` +make fulldoc +``` + +This is a convenience function that is equivalent to: + +``` +make allclean +make apidoc +make html +make latexpdf +``` + +## Generating the format documentation from the format spec + +The format documentation is auto-generated from the format specification (YAML) sources via: + +``` +make apidoc +``` + +This will invoke the executable: + +``` +hdmf_generate_format_docs +``` + +The script automatically generates a series of .rst, .png, and .pdf files that are stored in the folder `source/format_auto_docs`. The generated .rst files are included in `source/format.rst` and the png and pdf files are used as figures in the autogenerated docs. + +The folder `source/format_auto_docs` is reserved for autogenerated files, i.e., files in the folder should not be added or edited by hand as they will be deleted and rebuilt during the full built of the documentation. + +By default the Sphinx configuration is setup to always regenerate the sources whenever the docs are being built (see next section). This behavior can be customized via the `spec_doc_rebuild_always` parameter in `source/conf.py` + +## Building a specific document type + +To build the documentation, run: + +``` +make +``` + +where `` is, e.g., `latexpdf`, `html`, `singlehtml`, or `man`. For a complete list of supported doc-types, see: + +``` +make help +``` + +## Cleaning up + +`make clean` cleans up all builds of the documentation located in `_build`. + +`make allclean` cleans up all builds of the documentation located in `_build` as well as all autogenerated sources stored in `source/format_auto_docs`. + +## Configuration + +The build of the documentation can be customized via a broad range of Sphinx options in: + +`source/conf_doc_autogen.py` + +In addition to standard Sphinx options, there are a number of additional options used to customize the content and structure of the autogenerated documents, e.g.: + +* `spec_show_yaml_src` - Boolean indicating whether the YAML sources should be included for the different Neurodata types +* `spec_generate_src_file` - Boolean indicating whether the YAML sources of the neurodata_types should be rendered in a separate section (True) or in the same location as the main documentation +* `spec_show_hierarchy_plots` - Boolean indicating whether we should generate and show figures of the hierarchy defined by the specifications as part of the documentation +* `spec_file_per_type` - Boolean indicating whether we should generate separate .inc reStructuredText for each neurodata_type (True) +or should all text be added to the main file (False) +* `spec_show_subgroups_in_tables` - Should subgroups of the main groups be rendered in the table as well. Usually this is disabled since groups are rendered as separate sections in the text +* `spec_appreviate_main_object_doc_in_tables` - Abbreviate the documentation of the main object for which a table is rendered in the table. This is commonly set to True as doc of the main object is already rendered as the main intro for the section describing the object +* `spec_show_title_for_tables` - Add a title for the table showing the specifications. +* `spec_show_subgroups_in_seperate_table` - Should top-level subgroups be listed in a separate table or as part of the main dataset and attributes table +* `spec_table_depth_char` - Char to be used as prefix to indicate the depth of an object in the specification hierarchy. NOTE: The char used should be supported by LaTeX. +* `spec_add_latex_clearpage_after_ndt_sections` - Add a LaTeX clearpage after each main section describing a neurodata_type. This helps in LaTeX to keep the ordering of figures, tables, and code blocks consistent in particular when the hierarchy_plots are included. +* `spec_resolve_type_inc` - Resolve includes to always show the full list of objects that are part of a type (True) or to show only the parts that are actually new to a current type while only linking to base types (False) + +In addition, the location of the input format specification can be customized as follows: + +* `spec_input_spec_dir` - Directory where the YAML files for the namespace to be documented are located +* `spec_input_namespace_filename` - Name of the YAML file with the specification of the Namespace to be documented +* `spec_input_default_namespace` - Name of the default namespace in the file + +Finally, the name and location of output files can be customized as follows: + +* `spec_output_dir` - Directory where the autogenerated files should be stored +* `spec_output_master_filename` - Name of the master .rst file that includes all the autogenerated docs +* `spec_output_doc_filename` - Name of the file where the main documentation goes +* `spec_output_src_filename` - Name of the file where the sources of the format spec go. NOTE: This file is only generated if `spec_generate_src_file` is enabled +* `spec_output_doc_type_hierarchy_filename` - Name of the file containing the type hierarchy. (Included in `spec_output_doc_filename`) + +In the regular Sphinx `source/conf.py` file, we can then also set: + +* `spec_doc_rebuild_always` - Boolean to define whether to always rebuild the source docs from YAML when doing a regular build of the sources (e.g., via `make html`) even if the folder with the source files already exists diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..747ffb7 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/source/_static/theme_overrides.css b/docs/source/_static/theme_overrides.css new file mode 100644 index 0000000..63ee6cc --- /dev/null +++ b/docs/source/_static/theme_overrides.css @@ -0,0 +1,13 @@ +/* override table width restrictions */ +@media screen and (min-width: 767px) { + + .wy-table-responsive table td { + /* !important prevents the common CSS stylesheets from overriding + this as on RTD they are loaded after this stylesheet */ + white-space: normal !important; + } + + .wy-table-responsive { + overflow: visible !important; + } +} diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 0000000..55b7755 --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,113 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = 'ndx-extracellular-channels' +copyright = '2024, Alessio Buccino, Kyu Hyun Lee, Ramon Heberto Mayorquin, Cody Baker, Matt Avaylon, Ryan Ly, Ben Dichter, Oliver Ruebel' +author = 'Alessio Buccino, Kyu Hyun Lee, Ramon Heberto Mayorquin, Cody Baker, Matt Avaylon, Ryan Ly, Ben Dichter, Oliver Ruebel' + +version = '0.1.0' +release = 'alpha' + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + 'sphinx.ext.ifconfig', + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx', +] + +templates_path = ['_templates'] +exclude_patterns = [] + +language = 'en' + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = 'alabaster' +html_static_path = ['_static'] + +# -- Options for intersphinx extension --------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#configuration + +intersphinx_mapping = { + 'python': ('https://docs.python.org/3', None), +} + + +############################################################################ +# CUSTOM CONFIGURATIONS ADDED BY THE NWB TOOL FOR GENERATING FORMAT DOCS +########################################################################### + +import textwrap # noqa: E402 + +import sphinx_rtd_theme # noqa: E402 + +# -- Options for intersphinx --------------------------------------------- +intersphinx_mapping.update({ + 'core': ('https://nwb-schema.readthedocs.io/en/latest/', None), + 'hdmf-common': ('https://hdmf-common-schema.readthedocs.io/en/latest/', None), +}) + +# -- Generate sources from YAML--------------------------------------------------- +# Always rebuild the source docs from YAML even if the folder with the source files already exists +spec_doc_rebuild_always = True + + +def run_doc_autogen(_): + # Execute the autogeneration of Sphinx format docs from the YAML sources + import os + import sys + conf_file_dir = os.path.dirname(os.path.abspath(__file__)) + sys.path.append(conf_file_dir) # Need so that generate format docs can find the conf_doc_autogen file + from conf_doc_autogen import spec_output_dir + + if spec_doc_rebuild_always or not os.path.exists(spec_output_dir): + sys.path.append('./docs') # needed to enable import of generate_format docs + from hdmf_docutils.generate_format_docs import main as generate_docs + generate_docs() + + +def setup(app): + app.connect('builder-inited', run_doc_autogen) + # overrides for wide tables in RTD theme + try: + app.add_css_file("theme_overrides.css") # Used by newer Sphinx versions + except AttributeError: + app.add_stylesheet("theme_overrides.css") # Used by older version of Sphinx + +# -- Customize sphinx settings +numfig = True +autoclass_content = 'both' +autodoc_docstring_signature = True +autodoc_member_order = 'bysource' +add_function_parentheses = False + + +# -- HTML sphinx options +html_theme = "sphinx_rtd_theme" +html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + +# LaTeX Sphinx options +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + 'preamble': textwrap.dedent( + ''' + \\setcounter{tocdepth}{3} + \\setcounter{secnumdepth}{6} + \\usepackage{enumitem} + \\setlistdepth{100} + '''), +} diff --git a/docs/source/conf_doc_autogen.py b/docs/source/conf_doc_autogen.py new file mode 100644 index 0000000..f99869d --- /dev/null +++ b/docs/source/conf_doc_autogen.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Configuration file for generating sources for the format documentation from the YAML specification files + +import os + +# -- Input options for the specification files to be used ----------------------- + +# Directory where the YAML files for the namespace to be documented are located +spec_input_spec_dir = '../spec' + +# Name of the YAML file with the specification of the Namespace to be documented +spec_input_namespace_filename = 'ndx-extracellular-channels.namespace.yaml' + +# Name of the default namespace in the file +spec_input_default_namespace = 'ndx-extracellular-channels' + + +# -- Options for customizing the locations of output files + +# Directory where the autogenerated files should be stored +spec_output_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "_format_auto_docs") + +# Name of the master rst file that includes all the autogenerated docs +spec_output_master_filename = 'format_spec_main.inc' + +# Name of the file where the main documentation goes +spec_output_doc_filename = 'format_spec_doc.inc' + +# Name of the file where the sources of the format spec go. NOTE: This file is only generated if +# spec_generate_src_file is enabled +spec_output_src_filename = 'format_spec_sources.inc' + +# Name of the file containing the type hierarchy. (Included in spec_output_doc_filename) +spec_output_doc_type_hierarchy_filename = 'format_spec_type_hierarchy.inc' + +# Clean up the output directory before we build if the git hash is out of date +spec_clean_output_dir_if_old_git_hash = True + +# Do not rebuild the format sources if we have previously build the sources and the git hash matches +spec_skip_doc_autogen_if_current_git_hash = False + + +# -- Options for the generation of the documentation from source ---------------- + +# Should the YAML sources be included for the different modules +spec_show_yaml_src = True + +# Show figure of the hierarchy of objects defined by the spec +spec_show_hierarchy_plots = True + +# Should the sources of the neurodata_types (YAML) be rendered in a separate section (True) or +# in the same location as the base documentation +spec_generate_src_file = True + +# Should separate .inc reStructuredText files be generated for each neurodata_type (True) +# or should all text be added to the main file +spec_file_per_type = True + +# Should top-level subgroups be listed in a separate table or as part of the main dataset and attributes table +spec_show_subgroups_in_seperate_table = True + +# Abbreviate the documentation of the main object for which a table is rendered in the table. +# This is commonly set to True as doc of the main object is alrready rendered as the main intro for the +# section describing the object +spec_appreviate_main_object_doc_in_tables = True + +# Show a title for the tables +spec_show_title_for_tables = True + +# Char to be used as prefix to indicate the depth of an object in the specification hierarchy +spec_table_depth_char = '.' # '→' '.' + +# Add a LaTeX clearpage after each main section describing a neurodata_type. This helps in LaTeX to keep the ordering +# of figures, tables, and code blocks consistent in particular when the hierarchy_plots are included +spec_add_latex_clearpage_after_ndt_sections = True + +# Resolve includes to always show the full list of objects that are part of a type (True) +# or to show only the parts that are actually new to a current type while only linking to base types +spec_resolve_type_inc = False + +# Default type map to be used. This is the type map where dependent namespaces are stored. In the case of +# NWB this is spec_default_type_map = pynwb.get_type_map() +import pynwb # noqa: E402 + +spec_default_type_map = pynwb.get_type_map() + +# Default specification classes for groups datasets and namespaces. In the case of NWB these are the NWB-specfic +# spec classes. In the general cases these are the spec classes from HDMF +spec_group_spec_cls = pynwb.spec.NWBGroupSpec +spec_dataset_spec_cls = pynwb.spec.NWBDatasetSpec +spec_namespace_spec_cls = pynwb.spec.NWBNamespace diff --git a/docs/source/credits.rst b/docs/source/credits.rst new file mode 100644 index 0000000..da5cda1 --- /dev/null +++ b/docs/source/credits.rst @@ -0,0 +1,21 @@ +******* +Credits +******* + +.. note:: + Add the credits for your extension here + +Acknowledgments +=============== + + +Authors +======= + + +***** +Legal +***** + +License +======= diff --git a/docs/source/description.rst b/docs/source/description.rst new file mode 100644 index 0000000..6f8553e --- /dev/null +++ b/docs/source/description.rst @@ -0,0 +1,5 @@ +Overview +======== + +.. note:: + Add the description of your extension here diff --git a/docs/source/format.rst b/docs/source/format.rst new file mode 100644 index 0000000..8e1dcb9 --- /dev/null +++ b/docs/source/format.rst @@ -0,0 +1,12 @@ + +.. _ndx-extracellular-channels: + +************************** +ndx-extracellular-channels +************************** + +Version |release| |today| + +.. .. contents:: + +.. include:: _format_auto_docs/format_spec_main.inc diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000..173bb4c --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,30 @@ +Specification for the ndx-extracellular-channels extension +========================================================== + +.. toctree:: + :numbered: + :maxdepth: 8 + :caption: Table of Contents + + description + +.. toctree:: + :numbered: + :maxdepth: 3 + :caption: Extension Specification + + format + +.. toctree:: + :maxdepth: 2 + :caption: History & Legal + + release_notes + credits + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst new file mode 100644 index 0000000..39ccd1c --- /dev/null +++ b/docs/source/release_notes.rst @@ -0,0 +1,5 @@ +Release Notes +============= + +.. note:: + Add the release notes of your extension here diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..466d8a9 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,122 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "ndx-extracellular-channels" +version = "0.1.0" +authors = [ + { name="Alessio Buccino", email="alessio.buccino@alleninstitute.org" }, + { name="Kyu Hyun Lee", email="kyuhyun.lee@ucsf.edu" }, + { name="Ramon Heberto Mayorquin", email="ramon.mayorquin@catalystneuro.com" }, + { name="Cody Baker", email="cody.baker@catalystneuro.com" }, + { name="Matt Avaylon", email="mavaylon@lbl.gov" }, + { name="Ryan Ly", email="rly@lbl.gov" }, + { name="Ben Dichter", email="ben.dichter@catalystneuro.com" }, + { name="Oliver Ruebel", email="oruebel@lbl.gov" }, +] +description = "NWB extension for storing extracellular probe and channels metadata" +readme = "README.md" +requires-python = ">=3.8" +license = {text = "BSD-3"} +classifiers = [ + "Programming Language :: Python", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", +] +keywords = [ + 'NeurodataWithoutBorders', + 'NWB', + 'nwb-extension', + 'ndx-extension', +] +dependencies = [ + "pynwb>=2.6.0", + "hdmf>=3.13.0", +] + +# TODO: add URLs before release +[project.urls] +"Homepage" = "https://github.com/catalystneuro/ndx-extracellular-channels" +# "Documentation" = "https://package.readthedocs.io/" +"Bug Tracker" = "https://github.com/catalystneuro/ndx-extracellular-channels/issues" +# "Discussions" = "https://github.com/organization/package/discussions" +"Changelog" = "https://github.com/catalystneuro/ndx-extracellular-channels/blob/main/CHANGELOG.md" + +[tool.hatch.build] +include = [ + "src/pynwb", + "spec/ndx-extracellular-channels.extensions.yaml", + "spec/ndx-extracellular-channels.namespace.yaml", +] +exclude = [ + "src/pynwb/tests", +] + +[tool.hatch.build.targets.wheel] +packages = [ + "src/pynwb/ndx_extracellular_channels", + "spec" +] + +[tool.hatch.build.targets.wheel.sources] +"spec" = "ndx_extracellular_channels/spec" + +[tool.hatch.build.targets.sdist] +include = [ + "src/pynwb", + "spec/ndx-extracellular-channels.extensions.yaml", + "spec/ndx-extracellular-channels.namespace.yaml", + "docs", +] +exclude = [] + +[tool.pytest.ini_options] +# uncomment below to run pytest with code coverage reporting. NOTE: breakpoints may not work +# addopts = "--cov --cov-report html" + +[tool.codespell] +skip = "htmlcov,.git,.mypy_cache,.pytest_cache,.coverage,*.pdf,*.svg,venvs,.tox,hdmf-common-schema,./docs/_build/*,*.ipynb" + +[tool.coverage.run] +branch = true +source = ["src/pynwb"] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "@abstract" +] + +[tool.black] +line-length = 120 +preview = true +exclude = ".git|.mypy_cache|.tox|.venv|venv|.ipynb_checkpoints|_build/|dist/|__pypackages__|.ipynb|docs/" + +[tool.ruff] +lint.select = ["E", "F", "T100", "T201", "T203"] +exclude = [ + ".git", + ".tox", + "__pycache__", + "build/", + "dist/", + "docs/source/conf.py", +] +line-length = 120 + +[tool.ruff.lint.per-file-ignores] +"src/pynwb/ndx_extracellular_channels/__init__.py" = ["E402", "F401"] +"src/spec/create_extension_spec.py" = ["T201"] +"src/pynwb/tests/test_example_usage_all.py" = ["T201"] +"src/pynwb/tests/test_example_usage_probeinterface.py" = ["T201"] + +[tool.ruff.lint.mccabe] +max-complexity = 17 diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..581cbf2 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,17 @@ +# pinned dependencies to reproduce an entire development environment to +# run tests, check code style, and generate documentation +black==24.3.0 +codespell==2.2.6 +coverage==7.4.4 +hdmf==3.13.0 +hdmf-docutils==0.4.7 +nwbwidgets==0.11.3 +pre-commit==3.5.0 +probeinterface==0.2.21 +pynwb==2.6.0 +pytest==8.1.1 +pytest-cov==5.0.0 +pytest-subtests==0.12.1 +python-dateutil==2.8.2 +ruff==0.3.4 +tox==4.14.2 diff --git a/requirements-min.txt b/requirements-min.txt new file mode 100644 index 0000000..c96cd67 --- /dev/null +++ b/requirements-min.txt @@ -0,0 +1,5 @@ +# minimum versions of package dependencies for installation +# these should match the minimum versions specified in pyproject.toml +# NOTE: it may be possible to relax these minimum requirements +pynwb==2.6.0 +hdmf==3.13.0 diff --git a/spec/ndx-extracellular-channels.extensions.yaml b/spec/ndx-extracellular-channels.extensions.yaml new file mode 100644 index 0000000..1176e8e --- /dev/null +++ b/spec/ndx-extracellular-channels.extensions.yaml @@ -0,0 +1,338 @@ +groups: +- neurodata_type_def: ContactsTable + neurodata_type_inc: DynamicTable + default_name: contacts_table + doc: Metadata about the contacts of a probe, compatible with the ProbeInterface + specification. + datasets: + - name: relative_position_in_um + neurodata_type_inc: VectorData + dtype: float + dims: + - - num_contacts + - x, y + - - num_contacts + - x, y, z + shape: + - - null + - 2 + - - null + - 3 + doc: Relative position of the contact in micrometers, relative to `reference`. + - name: contact_id + neurodata_type_inc: VectorData + dtype: text + doc: Unique ID of the contact + quantity: '?' + - name: shank_id + neurodata_type_inc: VectorData + dtype: text + doc: Shank ID of the contact + quantity: '?' + - name: plane_axes + neurodata_type_inc: VectorData + dtype: float + dims: + - - num_contacts + - v1, v2 + - x, y + - - num_contacts + - v1, v2 + - x, y, z + shape: + - - null + - 2 + - 2 + - - null + - 2 + - 3 + doc: The axes defining the contact plane for each contact. It can be used for + contact-wise rotations. For 2D probes, provide two points (v1x, v1y) and (v2x, + v2y). Axis 1 is defined by the vector from (0, 0) to (v1x, v1y). Axis 2 is defined + by the vector from (0, 0) to (v2x, v2y). So for one contact, a 45 degree rotation + would be [[1 / sqrt(2), 1 / sqrt(2)], [-1 / sqrt(2), 1 / sqrt(2)]]. The default + is [[1, 0], [0, 1]]. For 3D probes, provide two points (v1x, v1y, v1z), (v2x, + v2y, v2z). See 'contact_plane_axes' in https://probeinterface.readthedocs.io/en/main/format_spec.html + for more details. + quantity: '?' + - name: shape + neurodata_type_inc: VectorData + dtype: text + doc: Shape of the contact; e.g. 'circle' + quantity: '?' + - name: radius_in_um + neurodata_type_inc: VectorData + dtype: float + doc: Radius of a circular contact, in micrometers. + quantity: '?' + - name: width_in_um + neurodata_type_inc: VectorData + dtype: float + doc: Width of a rectangular or square contact, in micrometers. + quantity: '?' + - name: height_in_um + neurodata_type_inc: VectorData + dtype: float + doc: Height of a rectangular contact, in micrometers. + quantity: '?' +- neurodata_type_def: ProbeModel + neurodata_type_inc: Device + doc: Neural probe object, compatible with the ProbeInterface specification. The + name of the object should be the model name of the probe, e.g., "Neuropixels 1.0". + attributes: + - name: ndim + dtype: int + default_value: 2 + doc: dimension of the probe + required: false + - name: model + dtype: text + doc: Name of the model of the probe, e.g., "Neuropixels 1.0". + - name: planar_contour_in_um + dtype: float + dims: + - - num_points + - x, y + - - num_points + - x, y, z + shape: + - - null + - 2 + - - null + - 3 + doc: The coordinates of the nodes of the polygon that describe the shape (contour) + of the probe, in micrometers. The first and last points are connected to close + the polygon. e.g., [(-20., -30.), (20., -110.), (60., -30.), (60., 190.), (-20., + 190.)]. Coordinates can be in 2D or 3D. See 'probe_planar_contour' in https://probeinterface.readthedocs.io/en/main/format_spec.html + for more details. + required: false + groups: + - name: contacts_table + neurodata_type_inc: ContactsTable + doc: Neural probe contacts, compatible with the ProbeInterface specification +- neurodata_type_def: Probe + neurodata_type_inc: Device + doc: Specific instance of a neural probe object. + attributes: + - name: identifier + dtype: text + doc: Identifier of the probe, usually the serial number. + required: false + groups: + - name: probe_insertion + neurodata_type_inc: ProbeInsertion + doc: Information about the insertion of a probe into the brain. + quantity: '?' + links: + - name: probe_model + target_type: ProbeModel + doc: The model of the probe used to record the data. +- neurodata_type_def: ProbeInsertion + neurodata_type_inc: NWBContainer + default_name: probe_insertion + doc: Metadata about the insertion of a probe into the brain, which can be used to + determine the location of the probe in the brain. + attributes: + - name: insertion_position_ap_in_mm + dtype: float + doc: Anteroposterior (AP) stereotactic coordinate of where the probe was inserted, + in millimeters. + is anterior. Coordinate is relative to the zero-point described + in `position_reference`. + required: false + - name: insertion_position_ml_in_mm + dtype: float + doc: Mediolateral (ML) stereotactic coordinate of where the probe was inserted, + in millimeters. + is right. Coordinate is relative to the zero-point described + in `position_reference`. + required: false + - name: insertion_position_dv_in_mm + dtype: float + doc: Dorsoventral (DV) stereotactic coordinate of where the probe was inserted, + in millimeters. + is up. Coordinate is relative to the zero-point described + in `position_reference`. The zero-point is typically the surface of the brain, + so this value is typically 0. + required: false + - name: depth_in_mm + dtype: float + doc: Depth that the probe was driven along `insertion_angle` starting from `insertion_position_{X}_in_mm`, + in millimeters. + required: false + - name: position_reference + dtype: text + doc: Location of the origin (0, 0, 0) for `insertion_position_{X}_in_mm` coordinates, + e.g., "(AP, ML, DV) = (0, 0, 0) corresponds to bregma at the cortical surface". + required: false + - name: hemisphere + dtype: text + doc: The hemisphere ("left" or "right") of the targeted location of the optogenetic + stimulus site. Should be consistent with `insertion_position_in_mm.ml` coordinate + (left = ml < 0, right = ml > 0). + required: false + - name: insertion_angle_yaw_in_deg + dtype: float + doc: The yaw angle of the probe at the time of insertion, in degrees. Yaw = rotation + around dorsal-ventral axis, like shaking (+ is rotating the nose rightward). + Zero is defined as the probe being parallel to an sagittal slice of the brain. + The order of rotations is yaw, pitch, roll. + required: false + - name: insertion_angle_pitch_in_deg + dtype: float + doc: The pitch angle of the probe at the time of insertion, in degrees. Pitch + = rotation around left-right axis, like nodding (+ is rotating the nose upward). + Zero is defined as the probe being parallel to an axial slice of the brain. + The order of rotations is yaw, pitch, roll. + required: false + - name: insertion_angle_roll_in_deg + dtype: float + doc: The roll angle of the probe at the time of insertion, in degrees. Roll = + rotation around anterior-posterior axis, like tilting (+ is rotating the right + side downward). Zero is defined as the probe being parallel to a coronal slice + of the brain. The order of rotations is yaw, pitch, roll. + required: false +- neurodata_type_def: ChannelsTable + neurodata_type_inc: DynamicTable + default_name: ChannelsTable + doc: Metadata about the channels used in an extracellular recording from a single + probe. + attributes: + - name: position_reference + dtype: text + doc: Location of the origin (0, 0, 0) for `{X}_position_{Y}_in_mm` coordinates, + e.g., "(AP, ML, DV) = (0, 0, 0) corresponds to bregma at the cortical surface". + required: false + - name: electrical_reference_description + dtype: text + doc: The electrical reference used for the recording; e.g., "common average reference", + "probe tip". + required: false + - name: ground + dtype: text + doc: The ground used for the recording; e.g., "external wire in CSF", "skull screw + over frontal cortex". + required: false + - name: position_confirmation_method + dtype: text + doc: Description of the method used to confirm the position of the contacts or + brain area, e.g., "histology", "MRI". + required: false + datasets: + - name: contact + neurodata_type_inc: DynamicTableRegion + doc: The row in a ContactsTable that represents the contact used as a channel. + - name: reference_contact + neurodata_type_inc: DynamicTableRegion + doc: The row in a ContactsTable that represents the contact used as a reference. + This is useful for differential or bipolar recordings. The data in the `ExtracellularSeries` + corresponding to each channel (row) of this table is equal to the voltage from + `contact` minus the voltage from `reference_contact`. + quantity: '?' + - name: filter + neurodata_type_inc: VectorData + dtype: text + doc: The filter used on the raw (wideband) voltage data from this contact, including + the filter name and frequency cutoffs, e.g., "High-pass filter at 300 Hz." + quantity: '?' + - name: estimated_position_ap_in_mm + neurodata_type_inc: VectorData + dtype: float + doc: Anteroposterior (AP) stereotactic coordinate of the estimated contact position, + in millimeters. + is anterior. Coordinate is relative to the zero-point described + in `position_reference`. + quantity: '?' + - name: estimated_position_ml_in_mm + neurodata_type_inc: VectorData + dtype: float + doc: Mediolateral (ML) stereotactic coordinate of the estimated contact position, + in millimeters. + is right. Coordinate is relative to the zero-point described + in `position_reference`. + quantity: '?' + - name: estimated_position_dv_in_mm + neurodata_type_inc: VectorData + dtype: float + doc: Dorsoventral (DV) stereotactic coordinate of the estimated contact position, + in millimeters. + is up. Coordinate is relative to the zero-point described + in `position_reference`. + quantity: '?' + - name: estimated_brain_area + neurodata_type_inc: VectorData + dtype: text + doc: The brain area of the estimated contact position, e.g., "CA1". + quantity: '?' + - name: confirmed_position_ap_in_mm + neurodata_type_inc: VectorData + dtype: float + doc: Anteroposterior (AP) stereotactic coordinate of the confirmed contact position, + in millimeters. + is anterior. Coordinate is relative to the zero-point described + in `position_reference`. + quantity: '?' + - name: confirmed_position_ml_in_mm + neurodata_type_inc: VectorData + dtype: float + doc: Mediolateral (ML) stereotactic coordinate of the confirmed contact position, + in millimeters. + is right. Coordinate is relative to the zero-point described + in `position_reference`. + quantity: '?' + - name: confirmed_position_dv_in_mm + neurodata_type_inc: VectorData + dtype: float + doc: Dorsoventral (DV) stereotactic coordinate of the confirmed contact position, + in millimeters. + is up. Coordinate is relative to the zero-point described + in `position_reference`. + quantity: '?' + - name: confirmed_brain_area + neurodata_type_inc: VectorData + dtype: text + doc: The brain area of the actual contact position, e.g., "CA1". + quantity: '?' + links: + - name: probe + target_type: Probe + doc: The probe that the channels belongs to. +- neurodata_type_def: ExtracellularSeries + neurodata_type_inc: TimeSeries + doc: Extracellular recordings from a single probe. Create multiple instances of + this class for different probes. + datasets: + - name: data + dtype: numeric + dims: + - num_times + - num_channels + shape: + - null + - null + doc: Recorded voltage data. + attributes: + - name: unit + dtype: text + value: microvolts + doc: Base unit of measurement for working with the data. This value is fixed + to 'microvolts'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion', + followed by 'channel_conversion' (if present), and then add 'offset'. + - name: channels + neurodata_type_inc: DynamicTableRegion + doc: DynamicTableRegion pointer to rows in a ChannelsTable that represent the + channels used to collect the data in this recording. + - name: channel_conversion + dtype: float + dims: + - num_channels + shape: + - null + doc: Channel-specific conversion factor. Multiply the data in the 'data' dataset + by these values along the channel axis (as indicated by axis attribute) AND + by the global conversion factor in the 'conversion' attribute of 'data' to get + the data values in microvolts, i.e, data in microvolts = data * data.conversion + * channel_conversion. This approach allows for both global and per-channel data + conversion factors needed to support the storage of electrical recordings as + native values generated by data acquisition systems. If this dataset is not + present, then there is no channel-specific conversion factor, i.e. it is 1 for + all channels. + quantity: '?' + attributes: + - name: axis + dtype: int + value: 1 + doc: The zero-indexed axis of the 'data' dataset that the channel-specific conversionfactor + applies to. This value is fixed to 1. diff --git a/spec/ndx-extracellular-channels.namespace.yaml b/spec/ndx-extracellular-channels.namespace.yaml new file mode 100644 index 0000000..ae3a39e --- /dev/null +++ b/spec/ndx-extracellular-channels.namespace.yaml @@ -0,0 +1,27 @@ +namespaces: +- author: + - Alessio Buccino + - Kyu Hyun Lee + - Ramon Heberto Mayorquin + - Cody Baker + - Matt Avaylon + - Ryan Ly + - Ben Dichter + - Oliver Ruebel + - Geeling Chau + contact: + - alessio.buccino@alleninstitute.org + - kyuhyun.lee@ucsf.edu + - ramon.mayorquin@catalystneuro.com + - cody.baker@catalystneuro.com + - mavaylon@lbl.gov + - rly@lbl.gov + - ben.dichter@catalystneuro.com + - oruebel@lbl.gov + - gchau@caltech.edu + doc: NWB extension for storing extracellular probe and channels metadata + name: ndx-extracellular-channels + schema: + - namespace: core + - source: ndx-extracellular-channels.extensions.yaml + version: 0.1.0 diff --git a/src/matnwb/README.md b/src/matnwb/README.md new file mode 100644 index 0000000..e69de29 diff --git a/src/pynwb/README.md b/src/pynwb/README.md new file mode 100644 index 0000000..e69de29 diff --git a/src/pynwb/ndx_extracellular_channels/__init__.py b/src/pynwb/ndx_extracellular_channels/__init__.py new file mode 100644 index 0000000..691c06f --- /dev/null +++ b/src/pynwb/ndx_extracellular_channels/__init__.py @@ -0,0 +1,142 @@ +import os +import warnings + +from hdmf.utils import docval, get_docval, get_data_shape +from pynwb import get_class, load_namespaces, register_class + +try: + from importlib.resources import files +except ImportError: + # TODO: Remove when python 3.9 becomes the new minimum + from importlib_resources import files + +# Get path to the namespace.yaml file with the expected location when installed not in editable mode +__location_of_this_file = files(__name__) +__spec_path = __location_of_this_file / "spec" / "ndx-extracellular-channels.namespace.yaml" + +# If that path does not exist, we are likely running in editable mode. Use the local path instead +if not os.path.exists(__spec_path): + __spec_path = __location_of_this_file.parent.parent.parent / "spec" / "ndx-extracellular-channels.namespace.yaml" + +# Load the namespace +load_namespaces(str(__spec_path)) + +ProbeInsertion = get_class("ProbeInsertion", "ndx-extracellular-channels") +ContactsTable = get_class("ContactsTable", "ndx-extracellular-channels") +AutoProbeModel = get_class("ProbeModel", "ndx-extracellular-channels") +Probe = get_class("Probe", "ndx-extracellular-channels") +AutoChannelsTable = get_class("ChannelsTable", "ndx-extracellular-channels") +AutoExtracellularSeries = get_class("ExtracellularSeries", "ndx-extracellular-channels") + +probe_model_init_dv = [dv for dv in get_docval(AutoProbeModel.__init__) if dv["name"] != "name"] +probe_model_init_dv.append( + { + "name": "name", + "type": str, + "doc": "name of this ProbeModel. If not provided, this will be set to the value of ``model``", + "default": None, + } +) + + +@register_class("ProbeModel", "ndx-extracellular-channels") +class ProbeModel(AutoProbeModel): + + @docval(*probe_model_init_dv) + def __init__(self, **kwargs): + # If the user does not provide a name, we set it to the value of "model" + if kwargs.get("name") is None: + kwargs["name"] = kwargs["model"] + super().__init__(**kwargs) + + +channels_table_init_dv = [dv for dv in get_docval(AutoChannelsTable.__init__) if dv["name"] != "target_tables"] + + +@register_class("ChannelsTable", "ndx-extracellular-channels") +class ChannelsTable(AutoChannelsTable): + + @docval(*channels_table_init_dv) + def __init__(self, **kwargs): + # DynamicTable has an optional constructor argument "target_tables" + # that sets the target tables for the foreign keys in the table after initializing + # each column. Since `probe`, `Probe.probe_model` and `ProbeModel.contacts_table` are all + # required constructor arguments, we can set the target tables here. + kwargs["target_tables"] = { + "contact": kwargs["probe"].probe_model.contacts_table, + } + super().__init__(**kwargs) + + @docval(*get_docval(AutoChannelsTable.add_row), allow_extra=True) + def add_row(self, **kwargs): + # "reference_contact" is an optional column that is only added if the column is not already present. + # When it is added, we need to make sure that the target table is set correctly. + # So here, if the user supplies a "reference_contact" value and the column is not present, + # we set the target table for the column before we add the row + # (which would create the column without the target table). + # This may be handled automatically in the future by HDMF. + if "reference_contact" in kwargs and "reference_contact" not in self.columns: + self._set_dtr_targets( + { + "reference_contact": self.probe.probe_model.contacts_table, + } + ) + super().add_row(**kwargs) + + +extracellular_series_init_dv = [dv for dv in get_docval(AutoExtracellularSeries.__init__) if dv["name"] != "unit"] + + +@register_class("ExtracellularSeries", "ndx-extracellular-channels") +class ExtracellularSeries(AutoExtracellularSeries): + + @docval(*extracellular_series_init_dv) + def __init__(self, **kwargs): + data_shape = get_data_shape(kwargs["data"], strict_no_data_load=True) + if data_shape is not None: + # check that the second dimension of `data` matches the length of `channels` + channels_length = len(kwargs["channels"].data) + if data_shape[1] != channels_length: + if data_shape[0] == channels_length: + raise ValueError( + f"{self.__class__.__name__} '{kwargs['name']}': The length of the second dimension of `data` " + f"({data_shape[1]}) does not match the length of `channels` ({channels_length}), " + "but instead the length of the first dimension does. `data` is oriented incorrectly and " + "should be transposed." + ) + else: + raise ValueError( + f"{self.__class__.__name__} '{kwargs['name']}': The length of the second dimension of `data` " + f"({data_shape[1]}) does not match the length of `channels` ({channels_length})." + ) + # check that the second dimension of `data` matches the length of `channel_conversion` + channel_conversion_length = len(kwargs["channel_conversion"]) + if kwargs["channel_conversion"] and data_shape[1] != channel_conversion_length: + raise ValueError( + f"{self.__class__.__name__} '{kwargs['name']}': The length of the second dimension of `data` " + f"({data_shape[1]}) does not match the length of `channel_conversion` " + f"({channel_conversion_length})." + ) + + # NOTE: "unit" is a required constructor argument in the auto-generated class + # but it's value is fixed to "microvolts" + kwargs["unit"] = "microvolts" + super().__init__(**kwargs) + + +from .io import from_probeinterface, to_probeinterface + +__all__ = ( + "ProbeInsertion", + "ContactsTable", + "ProbeModel", + "Probe", + "ChannelsTable", + "ExtracellularSeries", + "from_probeinterface", + "to_probeinterface", +) + +# Remove these functions from the package +del load_namespaces, get_class, extracellular_series_init_dv, AutoExtracellularSeries +del channels_table_init_dv, AutoChannelsTable diff --git a/src/pynwb/ndx_extracellular_channels/io.py b/src/pynwb/ndx_extracellular_channels/io.py new file mode 100644 index 0000000..7a24314 --- /dev/null +++ b/src/pynwb/ndx_extracellular_channels/io.py @@ -0,0 +1,238 @@ +from __future__ import annotations # postpone type hint evaluation + +import warnings +from typing import TYPE_CHECKING, List, Union + +import ndx_extracellular_channels +import numpy as np + +if TYPE_CHECKING: + import probeinterface + +# map from probeinterface units to ndx-extracellular-channels units +unit_map = { + "um": "micrometers", + "mm": "millimeters", + "m": "meters", +} +inverted_unit_map = {v: k for k, v in unit_map.items()} + + +def from_probeinterface( + probe_or_probegroup: Union[probeinterface.Probe, probeinterface.ProbeGroup], + name: Union[str, list] = None, +) -> List[ndx_extracellular_channels.Probe]: + """ + Construct ndx_extracellular_channels.Probe objects from a probeinterface.Probe or probeinterface.ProbeGroup. + + Parameters + ---------- + probe_or_probegroup: Probe or ProbeGroup + Probe or ProbeGroup to convert to ndx_extracellular_channels.ProbeModel devices. + name: str or list, optional + Name of the Probe. If a ProbeGroup is passed, this can be a list of names. + If None, an error will be raised if the Probe(s) does not have a name. + + NOTE: The probeinterface.Probe.device_channel_indices are a property of the data acquisition and not set + in the ndx_extracellular_channels.Probe object. You can specify this in ChannelsTable.contacts. + + Returns + ------- + ndx_probes: list + The list of ndx_extracellular_channels.Probe objects. + """ + try: + import probeinterface + except ImportError: + raise ImportError( + "To use the probeinterface conversion functions, install probeinterface: pip install probeinterface" + ) + + assert isinstance( + probe_or_probegroup, (probeinterface.Probe, probeinterface.ProbeGroup) + ), f"The input must be a Probe or ProbeGroup, not {type(probe_or_probegroup)}." + if isinstance(probe_or_probegroup, probeinterface.Probe): + probes = [probe_or_probegroup] + else: + probes = probe_or_probegroup.probes + if name is not None: + if isinstance(name, str): + names = [name] + else: + names = name + assert len(probes) == len(names), "The number of names must match the number of probes." + else: + names = [None] * len(probes) + + ndx_probes = [] + for probe, name in zip(probes, names): + ndx_probes.append(_single_probe_to_ndx_probe(probe, name)) + return ndx_probes + + +def to_probeinterface(ndx_probe: ndx_extracellular_channels.Probe) -> probeinterface.Probe: + """ + Construct a probeinterface.Probe from a ndx_extracellular_channels.Probe. + + ndx_extracellular_channels.Probe.name -> probeinterface.Probe.name + ndx_extracellular_channels.Probe.identifier -> probeinterface.Probe.serial_number + ndx_extracellular_channels.Probe.probe_model.name -> probeinterface.Probe.model_name + ndx_extracellular_channels.Probe.probe_model.manufacturer -> probeinterface.Probe.manufacturer + ndx_extracellular_channels.Probe.probe_model.ndim -> probeinterface.Probe.ndim + ndx_extracellular_channels.Probe.probe_model.planar_contour_in_um -> probeinterface.Probe.probe_planar_contour + ndx_extracellular_channels.Probe.probe_model.contacts_table["relative_position_in_um"] -> + probeinterface.Probe.contact_positions + ndx_extracellular_channels.Probe.probe_model.contacts_table["shape"] -> probeinterface.Probe.contact_shapes + ndx_extracellular_channels.Probe.probe_model.contacts_table["contact_id"] -> probeinterface.Probe.contact_ids + ndx_extracellular_channels.Probe.probe_model.contacts_table["device_channel"] -> + probeinterface.Probe.device_channel_indices + ndx_extracellular_channels.Probe.probe_model.contacts_table["shank_id"] -> probeinterface.Probe.shank_ids + ndx_extracellular_channels.Probe.probe_model.contacts_table["plane_axes"] -> probeinterface.Probe.contact_plane_axes + ndx_extracellular_channels.Probe.probe_model.contacts_table["radius_in_um"] -> + probeinterface.Probe.contact_shapes["radius"] + + NOTE: The probeinterface.Probe.device_channel_indices are a property of the data acquisition. To set them + from NWB data, use the mapping from channels in the ChannelsTable to contacts in the + ndx_extracellular_channels.Probe.probe_model.contacts_table (ChannelsTable.contacts). + + Parameters + ---------- + ndx_probe: ndx_extracellular_channels.Probe + ndx_extracellular_channels.Probe to convert to probeinterface.Probe + + Returns + ------- + Probe: probeinterface.Probe + """ + try: + import probeinterface + except ImportError: + raise ImportError( + "To use the probeinterface conversion functions, install probeinterface: pip install probeinterface" + ) + + positions = [] + shapes = [] + + contact_ids = None + shank_ids = None + plane_axes = None + + possible_shape_keys = ["radius_in_um", "width_in_um", "height_in_um"] + contacts_table = ndx_probe.probe_model.contacts_table + + positions.append(contacts_table["relative_position_in_um"][:]) + shapes.append(contacts_table["shape"][:]) + if "contact_id" in contacts_table.colnames: + if contact_ids is None: + contact_ids = [] + contact_ids.append(contacts_table["contact_id"][:]) + if "plane_axes" in contacts_table.colnames: + if plane_axes is None: + plane_axes = [] + plane_axes.append(contacts_table["plane_axes"][:]) + if "shank_id" in contacts_table.colnames: + if shank_ids is None: + shank_ids = [] + shank_ids.append(contacts_table["shank_id"][:]) + + positions = [item for sublist in positions for item in sublist] + shapes = [item for sublist in shapes for item in sublist] + + if contact_ids is not None: + contact_ids = [item for sublist in contact_ids for item in sublist] + if plane_axes is not None: + plane_axes = [item for sublist in plane_axes for item in sublist] + if shank_ids is not None: + shank_ids = [item for sublist in shank_ids for item in sublist] + + # if there are multiple shape keys, e.g., radius, width, and height + # we need to create a list of dicts, one for each contact + shape_params = [dict() for _ in range(len(contacts_table))] + for i in range(len(contacts_table)): + for possible_shape_key in possible_shape_keys: + if possible_shape_key in contacts_table.colnames: + new_key = possible_shape_key.replace("_in_um", "") + shape_params[i][new_key] = contacts_table[possible_shape_key][i] + + probeinterface_probe = probeinterface.Probe( + ndim=ndx_probe.probe_model.ndim, + si_units="um", + name=ndx_probe.name, + serial_number=ndx_probe.identifier, + model_name=ndx_probe.probe_model.name, + manufacturer=ndx_probe.probe_model.manufacturer, + ) + probeinterface_probe.set_contacts( + positions=positions, shapes=shapes, shape_params=shape_params, plane_axes=plane_axes, shank_ids=shank_ids + ) + if contact_ids is not None: + probeinterface_probe.set_contact_ids(contact_ids=contact_ids) + probeinterface_probe.set_planar_contour(ndx_probe.probe_model.planar_contour_in_um) + + return probeinterface_probe + + +def _single_probe_to_ndx_probe( + probe: probeinterface.Probe, name: Union[str, None] = None +) -> ndx_extracellular_channels.Probe: + contacts_arr = probe.to_numpy() + + if probe.si_units == "um": + conversion_factor = 1 + elif probe.si_units == "mm": + conversion_factor = 1e3 + elif probe.si_units == "m": + conversion_factor = 1e6 + + shape_keys = [] + for shape_params in probe.contact_shape_params: + keys = list(shape_params.keys()) + for k in keys: + if k not in shape_keys: + shape_keys.append(k) + + contacts_table = ndx_extracellular_channels.ContactsTable( + description="Contacts Table, populated by ProbeInterface", + ) + + for index in np.arange(probe.get_contact_count()): + kwargs = dict( + relative_position_in_um=probe.contact_positions[index], + plane_axes=probe.contact_plane_axes[index], + shape=contacts_arr["contact_shapes"][index], + ) + for k in shape_keys: + kwargs[f"{k}_in_um"] = contacts_arr[k][index] * conversion_factor + if probe.contact_ids is not None: + kwargs["contact_id"] = probe.contact_ids[index] + if probe.shank_ids is not None: + kwargs["shank_id"] = probe.shank_ids[index] + contacts_table.add_row(kwargs) + + model_name = probe.model_name + if model_name is None: + warnings.warn("Probe model name not found in probe annotations, setting to 'unknown'", UserWarning) + model_name = "unknown" + + probe_model = ndx_extracellular_channels.ProbeModel( + name=model_name, + manufacturer=probe.manufacturer, + model=model_name, + ndim=probe.ndim, + planar_contour_in_um=probe.probe_planar_contour * conversion_factor, + contacts_table=contacts_table, + ) + + if name is None: + name = probe.name + if name is None: + raise ValueError("Probe name not provided and not found in probe annotations. Please provide a name.") + + probe = ndx_extracellular_channels.Probe( + name=name, + probe_model=probe_model, + identifier=probe.serial_number, + ) + + return probe diff --git a/src/pynwb/tests/__init__.py b/src/pynwb/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/pynwb/tests/test_classes.py b/src/pynwb/tests/test_classes.py new file mode 100644 index 0000000..92d72fa --- /dev/null +++ b/src/pynwb/tests/test_classes.py @@ -0,0 +1,812 @@ +"""Unit and integration tests for the ndx_extracellular_channels types.""" + +import numpy as np +from hdmf.common import DynamicTableRegion +from ndx_extracellular_channels import ( + ChannelsTable, + ContactsTable, + ExtracellularSeries, + Probe, + ProbeInsertion, + ProbeModel, +) +from pynwb.testing import NWBH5IOFlexMixin, TestCase + +from pynwb import NWBFile + + +class TestContactsTable(TestCase): + """Simple unit test for creating a ContactsTable.""" + + def test_constructor_minimal(self): + ct = ContactsTable( + description="Test contacts table", + ) + assert ct.name == "contacts_table" + assert len(ct) == 0 + + def test_constructor_add_row(self): + """Test that the constructor for ContactsTable sets values as expected.""" + ct = ContactsTable( + name="ContactsTable", # test custom name + description="Test contacts table", + ) + + # for testing, mix and match different shapes. np.nan means the radius/width/height does not apply + ct.add_row( + relative_position_in_um=[10.0, 10.0], + shape="circle", + contact_id="C1", + shank_id="shank0", + plane_axes=[[1.0, 0.0], [0.0, 1.0]], + radius_in_um=10.0, + width_in_um=np.nan, + height_in_um=np.nan, + ) + + ct.add_row( + relative_position_in_um=[20.0, 10.0], + shape="square", + contact_id="C2", + shank_id="shank0", + plane_axes=[[1 / np.sqrt(2), 1 / np.sqrt(2)], [-1 / np.sqrt(2), 1 / np.sqrt(2)]], + radius_in_um=np.nan, + width_in_um=10.0, + height_in_um=10.0, + ) + + assert ct.name == "ContactsTable" + assert ct.description == "Test contacts table" + + assert ct["relative_position_in_um"].data == [[10.0, 10.0], [20.0, 10.0]] + assert ct["shape"].data == ["circle", "square"] + assert ct["contact_id"].data == ["C1", "C2"] + assert ct["shank_id"].data == ["shank0", "shank0"] + assert ct["plane_axes"].data == [ + [[1.0, 0.0], [0.0, 1.0]], + [[1 / np.sqrt(2), 1 / np.sqrt(2)], [-1 / np.sqrt(2), 1 / np.sqrt(2)]], + ] + assert ct["radius_in_um"].data == [10.0, np.nan] + assert ct["width_in_um"].data == [np.nan, 10.0] + + +class TestContactsTableRoundTrip(NWBH5IOFlexMixin, TestCase): + """Simple roundtrip test for a ContactsTable.""" + + def getContainerType(self): + return "ContactsTable" + + def addContainer(self): + ct = ContactsTable( + name="ContactsTable", # test custom name + description="Test contacts table", + ) + + # for testing, mix and match different shapes. np.nan means the radius/width/height does not apply + ct.add_row( + relative_position_in_um=[10.0, 10.0], + shape="circle", + contact_id="C1", + shank_id="shank0", + plane_axes=[[1.0, 0.0], [0.0, 1.0]], + radius_in_um=10.0, + width_in_um=np.nan, + height_in_um=np.nan, + ) + + ct.add_row( + relative_position_in_um=[20.0, 10.0], + shape="square", + contact_id="C2", + shank_id="shank0", + plane_axes=[[1 / np.sqrt(2), 1 / np.sqrt(2)], [-1 / np.sqrt(2), 1 / np.sqrt(2)]], + radius_in_um=np.nan, + width_in_um=10.0, + height_in_um=10.0, + ) + + # add the object into nwbfile.acquisition for testing + # TODO after integration, put this into /general/extracellular_ephys + self.nwbfile.add_acquisition(ct) + + def getContainer(self, nwbfile: NWBFile): + return nwbfile.acquisition["ContactsTable"] + + +class TestProbeModel(TestCase): + """Simple unit test for creating a ProbeModel.""" + + def test_constructor(self): + """Test that the constructor for ProbeModel sets values as expected.""" + # NOTE: ContactsTable must be named "contacts_table" when used in ProbeModel. this is the default. + ct = ContactsTable( + description="Test contacts table", + ) + ct.add_row( + relative_position_in_um=[10.0, 10.0], + shape="circle", + ) + + pm = ProbeModel( + name="Neuropixels 1.0 Probe Model", + model="Neuropixels 1.0", + description="A neuropixels probe", + manufacturer="IMEC", + planar_contour_in_um=[[-10.0, -10.0], [10.0, -10.0], [10.0, 10.0], [-10.0, 10.0]], + contacts_table=ct, + ) + + assert pm.name == "Neuropixels 1.0 Probe Model" + assert pm.model == "Neuropixels 1.0" + assert pm.description == "A neuropixels probe" + assert pm.manufacturer == "IMEC" + assert pm.planar_contour_in_um == [[-10.0, -10.0], [10.0, -10.0], [10.0, 10.0], [-10.0, 10.0]] + assert pm.contacts_table is ct + assert pm.ndim == 2 + + def test_constructor_no_name(self): + """Test that the constructor for ProbeModel sets values as expected.""" + ct = ContactsTable( + description="Test contacts table", + ) + ct.add_row( + relative_position_in_um=[10.0, 10.0], + shape="circle", + ) + + pm = ProbeModel( + model="Neuropixels 1.0", + description="A neuropixels probe", + manufacturer="IMEC", + planar_contour_in_um=[[-10.0, -10.0], [10.0, -10.0], [10.0, 10.0], [-10.0, 10.0]], + contacts_table=ct, + ) + + assert pm.name == "Neuropixels 1.0" + + +class TestProbeModelRoundTrip(NWBH5IOFlexMixin, TestCase): + """Simple roundtrip test for a ProbeModel.""" + + def getContainerType(self): + return "ProbeModel" + + def addContainer(self): + ct = ContactsTable( + description="Test contacts table", + ) + ct.add_row( + relative_position_in_um=[10.0, 10.0], + shape="circle", + ) + + pm = ProbeModel( + name="Neuropixels 1.0 Probe Model", + model="Neuropixels 1.0", + description="A neuropixels probe", + manufacturer="IMEC", + planar_contour_in_um=[[-10.0, -10.0], [10.0, -10.0], [10.0, 10.0], [-10.0, 10.0]], + contacts_table=ct, + ) + + # TODO put this into /general/device_models + self.nwbfile.add_device(pm) + + def getContainer(self, nwbfile: NWBFile): + return nwbfile.devices["Neuropixels 1.0 Probe Model"] + + +class TestProbeInsertion(TestCase): + """Simple unit test for creating a ProbeInsertion.""" + + def test_constructor_minimal(self): + pi = ProbeInsertion() + assert pi.name == "probe_insertion" + assert pi.position_reference is None + assert pi.hemisphere is None + assert pi.depth_in_mm is None + assert pi.insertion_position_ap_in_mm is None + assert pi.insertion_position_ml_in_mm is None + assert pi.insertion_position_dv_in_mm is None + assert pi.insertion_angle_roll_in_deg is None + assert pi.insertion_angle_pitch_in_deg is None + assert pi.insertion_angle_yaw_in_deg is None + + def test_constructor_with_depth(self): + pi = ProbeInsertion( + name="ProbeInsertion", # test custom name + position_reference="Bregma at the cortical surface.", + hemisphere="left", + depth_in_mm=10.0, + insertion_position_ap_in_mm=2.0, + insertion_position_ml_in_mm=-4.0, + insertion_angle_roll_in_deg=-10.0, + insertion_angle_pitch_in_deg=0.0, + insertion_angle_yaw_in_deg=0.0, + ) + + assert pi.name == "ProbeInsertion" + assert pi.position_reference == "Bregma at the cortical surface." + assert pi.hemisphere == "left" + assert pi.depth_in_mm == 10.0 + assert pi.insertion_position_ap_in_mm == 2.0 + assert pi.insertion_position_ml_in_mm == -4.0 + assert pi.insertion_position_dv_in_mm is None + assert pi.insertion_angle_roll_in_deg == -10.0 + assert pi.insertion_angle_pitch_in_deg == 0.0 + assert pi.insertion_angle_yaw_in_deg == 0.0 + + def test_constructor_with_dv(self): + """Test creating a ProbeInsertion with insertion_position_dv_in_mm instead of depth_in_mm""" + pi = ProbeInsertion( + name="ProbeInsertion", # test custom name + position_reference="Bregma at the cortical surface.", + hemisphere="left", + insertion_position_ap_in_mm=2.0, + insertion_position_ml_in_mm=-4.0, + insertion_position_dv_in_mm=-10.0, + insertion_angle_roll_in_deg=-10.0, + insertion_angle_pitch_in_deg=0.0, + insertion_angle_yaw_in_deg=0.0, + ) + + assert pi.name == "ProbeInsertion" + assert pi.position_reference == "Bregma at the cortical surface." + assert pi.hemisphere == "left" + assert pi.depth_in_mm is None + assert pi.insertion_position_ap_in_mm == 2.0 + assert pi.insertion_position_ml_in_mm == -4.0 + assert pi.insertion_position_dv_in_mm == -10.0 + assert pi.insertion_angle_roll_in_deg == -10.0 + assert pi.insertion_angle_pitch_in_deg == 0.0 + assert pi.insertion_angle_yaw_in_deg == 0.0 + + +class TestProbeInsertionDepthRoundTrip(NWBH5IOFlexMixin, TestCase): + """Simple roundtrip test for a ProbeInsertion.""" + + def getContainerType(self): + return "ProbeInsertion" + + def addContainer(self): + pi = ProbeInsertion( + name="ProbeInsertion", # test custom name + position_reference="Bregma at the cortical surface.", + hemisphere="left", + depth_in_mm=10.0, + insertion_position_ap_in_mm=2.0, + insertion_position_ml_in_mm=-4.0, + insertion_angle_roll_in_deg=-10.0, + insertion_angle_pitch_in_deg=0.0, + insertion_angle_yaw_in_deg=0.0, + ) + + # put this in nwbfile.scratch for testing + self.nwbfile.add_scratch(pi) + + def getContainer(self, nwbfile: NWBFile): + return nwbfile.scratch["ProbeInsertion"] + + +class TestProbeInsertionDVRoundTrip(NWBH5IOFlexMixin, TestCase): + """Simple roundtrip test for a ProbeInsertion with insertion_position_dv_in_mm instead of depth_in_mm.""" + + def getContainerType(self): + return "ProbeInsertion" + + def addContainer(self): + pi = ProbeInsertion( + name="ProbeInsertion", # test custom name + position_reference="Bregma at the cortical surface.", + hemisphere="left", + depth_in_mm=10.0, + insertion_position_ap_in_mm=2.0, + insertion_position_ml_in_mm=-4.0, + insertion_position_dv_in_mm=-10.0, + insertion_angle_roll_in_deg=-10.0, + insertion_angle_pitch_in_deg=0.0, + insertion_angle_yaw_in_deg=0.0, + ) + + # put this in nwbfile.scratch for testing + self.nwbfile.add_scratch(pi) + + def getContainer(self, nwbfile: NWBFile): + return nwbfile.scratch["ProbeInsertion"] + + +class TestProbe(TestCase): + """Simple unit test for creating a Probe.""" + + def test_constructor_minimal(self): + """Test that the constructor for ProbeModel sets values as expected.""" + ct = ContactsTable( + description="Test contacts table", + ) + ct.add_row( + relative_position_in_um=[10.0, 10.0], + shape="circle", + ) + + pm = ProbeModel( + description="A neuropixels probe", + model="Neuropixels 1.0", + manufacturer="IMEC", + planar_contour_in_um=[[-10.0, -10.0], [10.0, -10.0], [10.0, 10.0], [-10.0, 10.0]], + contacts_table=ct, + ) + + probe = Probe( + name="Neuropixels Probe 1", + probe_model=pm, + ) + + assert probe.name == "Neuropixels Probe 1" + assert probe.identifier is None + assert probe.probe_model is pm + assert probe.probe_insertion is None + + def test_constructor(self): + """Test that the constructor for ProbeModel sets values as expected.""" + # NOTE: ProbeInsertion must be named "probe_insertion" when used in Probe. this is the default. + pi = ProbeInsertion() + + ct = ContactsTable( + description="Test contacts table", + ) + ct.add_row( + relative_position_in_um=[10.0, 10.0], + shape="circle", + ) + + pm = ProbeModel( + model="Neuropixels 1.0", + description="A neuropixels probe", + manufacturer="IMEC", + planar_contour_in_um=[[-10.0, -10.0], [10.0, -10.0], [10.0, 10.0], [-10.0, 10.0]], + contacts_table=ct, + ) + + probe = Probe( + name="Neuropixels Probe 1", + identifier="28948291", + probe_model=pm, + probe_insertion=pi, + ) + + assert probe.identifier == "28948291" + assert probe.probe_insertion is pi + assert probe.probe_model is pm + + +class TestProbeRoundTrip(NWBH5IOFlexMixin, TestCase): + """Simple roundtrip test for a Probe.""" + + def getContainerType(self): + return "Probe" + + def addContainer(self): + # NOTE: ProbeInsertion must be named "probe_insertion" when used in Probe. this is the default. + pi = ProbeInsertion() + + ct = ContactsTable( + description="Test contacts table", + ) + ct.add_row( + relative_position_in_um=[10.0, 10.0], + shape="circle", + ) + + pm = ProbeModel( + model="Neuropixels 1.0", + description="A neuropixels probe", + manufacturer="IMEC", + planar_contour_in_um=[[-10.0, -10.0], [10.0, -10.0], [10.0, 10.0], [-10.0, 10.0]], + contacts_table=ct, + ) + # TODO after integration in core, change this to add_device_model which puts it in + # /general/devices/models or /general/device_models. + # Alternatively, ProbeModel is a child of Probe and if there are multiple Probe objects + # that use the same ProbeModel, then create a link + self.nwbfile.add_device(pm) + + probe = Probe( + name="Neuropixels Probe 1", + identifier="28948291", + probe_model=pm, + probe_insertion=pi, + ) + self.nwbfile.add_device(probe) + + def getContainer(self, nwbfile: NWBFile): + return nwbfile.devices["Neuropixels Probe 1"] + + +def _create_test_probe(): + ct = ContactsTable( + description="Test contacts table", + ) + ct.add_row( + relative_position_in_um=[10.0, 10.0], + shape="circle", + ) + ct.add_row( + relative_position_in_um=[10.0, 10.0], + shape="circle", + ) + ct.add_row( + relative_position_in_um=[10.0, 10.0], + shape="circle", + ) + + pm = ProbeModel( + name="Neuropixels 1.0", + model="Neuropixels 1.0", + description="A neuropixels probe", + manufacturer="IMEC", + planar_contour_in_um=[[-10.0, -10.0], [10.0, -10.0], [10.0, 10.0], [-10.0, 10.0]], + contacts_table=ct, + ) + + probe = Probe( + name="Neuropixels Probe 1", + identifier="28948291", + probe_model=pm, # TODO rename as model? + ) + return probe + + +class TestChannelsTable(TestCase): + """Simple unit test for creating a ChannelsTable.""" + + def test_constructor_minimal(self): + """Test that the constructor for ChannelsTable sets values as expected.""" + probe = _create_test_probe() + + ct = ChannelsTable( + description="Test channels table", + probe=probe, + ) + + assert ct.name == "ChannelsTable" + assert ct.description == "Test channels table" + assert ct.electrical_reference_description is None + assert ct.ground is None + assert ct.probe is probe + assert len(ct) == 0 + + def test_constructor_add_row_minimal(self): + """Test that the constructor for ChannelsTable sets values as expected.""" + probe = _create_test_probe() + + ct = ChannelsTable( + description="Test channels table", + probe=probe, + ) + ct.add_row(contact=0) + ct.add_row(contact=1) + + assert len(ct) == 2 + assert ct.id.data == [0, 1] + assert ct["contact"].data == [0, 1] + assert ct["contact"].table is probe.probe_model.contacts_table + assert "reference_contact" not in ct.columns + + def test_constructor_add_row_with_reference(self): + """Test that the constructor for ChannelsTable sets values as expected.""" + probe = _create_test_probe() + + ct = ChannelsTable( + description="Test channels table", + probe=probe, + ) + ct.add_row(contact=0, reference_contact=1) + ct.add_row(contact=1, reference_contact=0) + + assert ct["reference_contact"].data == [1, 0] + assert ct["reference_contact"].table is probe.probe_model.contacts_table + + def test_constructor_add_row(self): + """Test that the constructor for ChannelsTable sets values as expected.""" + probe = _create_test_probe() + + ct = ChannelsTable( + name="Neuropixels1ChannelsTable", # test custom name + description="Test channels table", + electrical_reference_description="Probe tip.", # usually if reference_contact is provided, this is not + ground="Skull screw over cerebellum.", + position_reference="(AP, ML, DV) = (0, 0, 0) corresponds to bregma at the cortical surface.", + position_confirmation_method="Histology", + probe=probe, + ) + + ct.add_row( + contact=0, + reference_contact=1, + filter="High-pass at 300 Hz", + estimated_position_ap_in_mm=2.0, + estimated_position_ml_in_mm=-5.0, + estimated_position_dv_in_mm=-9.5, + estimated_brain_area="CA3", + confirmed_position_ap_in_mm=2.0, + confirmed_position_ml_in_mm=-4.9, + confirmed_position_dv_in_mm=-9.5, + confirmed_brain_area="CA3", + ) + + ct.add_row( + contact=1, + reference_contact=2, + filter="High-pass at 300 Hz", + estimated_position_ap_in_mm=2.0, + estimated_position_ml_in_mm=-4.9, + estimated_position_dv_in_mm=-9.3, + estimated_brain_area="CA3", + confirmed_position_ap_in_mm=2.0, + confirmed_position_ml_in_mm=-4.8, + confirmed_position_dv_in_mm=-9.3, + confirmed_brain_area="CA3", + ) + + assert ct.name == "Neuropixels1ChannelsTable" + assert ct.description == "Test channels table" + assert ct.electrical_reference_description == "Probe tip." + assert ct.ground == "Skull screw over cerebellum." + assert ct.position_reference == "(AP, ML, DV) = (0, 0, 0) corresponds to bregma at the cortical surface." + assert ct.position_confirmation_method == "Histology" + assert ct.probe is probe + assert len(ct) == 2 + assert ct["contact"].data == [0, 1] + assert ct["contact"].table is probe.probe_model.contacts_table + assert ct["reference_contact"].data == [1, 2] + assert ct["reference_contact"].table is probe.probe_model.contacts_table + assert ct["filter"].data == ["High-pass at 300 Hz", "High-pass at 300 Hz"] + assert ct["estimated_position_ap_in_mm"].data == [2.0, 2.0] + assert ct["estimated_position_ml_in_mm"].data == [-5.0, -4.9] + assert ct["estimated_position_dv_in_mm"].data == [-9.5, -9.3] + assert ct["estimated_brain_area"].data == ["CA3", "CA3"] + assert ct["confirmed_position_ap_in_mm"].data == [2.0, 2.0] + assert ct["confirmed_position_ml_in_mm"].data == [-4.9, -4.8] + assert ct["confirmed_position_dv_in_mm"].data == [-9.5, -9.3] + assert ct["confirmed_brain_area"].data == ["CA3", "CA3"] + + +class TestChannelsTableRoundTrip(NWBH5IOFlexMixin, TestCase): + """Simple roundtrip test for a ChannelsTable.""" + + def getContainerType(self): + return "ChannelsTable" + + def addContainer(self): + probe = _create_test_probe() + self.nwbfile.add_device(probe.probe_model) # TODO change to add_device_model after integration in core + self.nwbfile.add_device(probe) + + ct = ChannelsTable( + name="Neuropixels1ChannelsTable", # test custom name + description="Test channels table", + electrical_reference_description="Probe tip.", # usually if reference_contact is provided, this is not + ground="Skull screw over cerebellum.", + position_reference="(AP, ML, DV) = (0, 0, 0) corresponds to bregma at the cortical surface.", + position_confirmation_method="Histology", + probe=probe, + ) + + ct.add_row( + contact=0, + reference_contact=1, + filter="High-pass at 300 Hz", + estimated_position_ap_in_mm=2.0, + estimated_position_ml_in_mm=-5.0, + estimated_position_dv_in_mm=-9.5, + estimated_brain_area="CA3", + confirmed_position_ap_in_mm=2.0, + confirmed_position_ml_in_mm=-4.9, + confirmed_position_dv_in_mm=-9.5, + confirmed_brain_area="CA3", + ) + + ct.add_row( + contact=1, + reference_contact=2, + filter="High-pass at 300 Hz", + estimated_position_ap_in_mm=2.0, + estimated_position_ml_in_mm=-4.9, + estimated_position_dv_in_mm=-9.3, + estimated_brain_area="CA3", + confirmed_position_ap_in_mm=2.0, + confirmed_position_ml_in_mm=-4.8, + confirmed_position_dv_in_mm=-9.3, + confirmed_brain_area="CA3", + ) + + # put this in nwbfile.acquisition for testing + self.nwbfile.add_acquisition(ct) + + def getContainer(self, nwbfile: NWBFile): + return nwbfile.acquisition["Neuropixels1ChannelsTable"] + + +class TestExtracellularSeries(TestCase): + """Simple unit test for creating an ExtracellularSeries.""" + + def test_constructor(self): + probe = _create_test_probe() + + ct = ChannelsTable( + name="Neuropixels1ChannelsTable", + description="Test channels table", + probe=probe, + ) + ct.add_row(contact=0) + ct.add_row(contact=1) + ct.add_row(contact=2) + + channels = DynamicTableRegion( + name="channels", # NOTE: this must be named "channels" when used in ExtracellularSeries + data=[0, 1, 2], + description="All of the channels", + table=ct, + ) + + es = ExtracellularSeries( + name="ExtracellularSeries", + data=[[0.0, 1.0, 2.0], [1.0, 2.0, 3.0], [2.0, 3.0, 4.0], [3.0, 4.0, 5.0]], + timestamps=[0.0, 0.001, 0.002, 0.003], + channels=channels, + channel_conversion=[1.0, 1.1, 1.2], + conversion=1e5, + offset=0.001, + ) + + assert es.name == "ExtracellularSeries" + assert es.data == [[0.0, 1.0, 2.0], [1.0, 2.0, 3.0], [2.0, 3.0, 4.0], [3.0, 4.0, 5.0]] + assert es.timestamps == [0.0, 0.001, 0.002, 0.003] + assert es.channels is channels + assert es.channel_conversion == [1.0, 1.1, 1.2] + assert es.conversion == 1e5 + assert es.offset == 0.001 + # NOTE: the TimeSeries mapper maps spec "ExtracellularSeries/data/unit" to "ExtracellularSeries.unit" + assert es.unit == "microvolts" + assert es.timestamps_unit == "seconds" + + def test_constructor_channels_dim_transpose(self): + probe = _create_test_probe() + + ct = ChannelsTable( + name="Neuropixels1ChannelsTable", + description="Test channels table", + probe=probe, + ) + ct.add_row(contact=0) + ct.add_row(contact=1) + ct.add_row(contact=2) + + channels = DynamicTableRegion( + name="channels", # NOTE: this must be named "channels" when used in ExtracellularSeries + data=[0, 1, 2], + description="All of the channels", + table=ct, + ) + + msg = ( + "ExtracellularSeries 'ExtracellularSeries': The length of the second dimension of `data` " + "(4) does not match the length of `channels` (3), " + "but instead the length of the first dimension does. `data` is oriented incorrectly and " + "should be transposed." + ) + with self.assertRaisesWith(ValueError, msg): + ExtracellularSeries( + name="ExtracellularSeries", + data=[[0.0, 1.0, 2.0, 3.0], [1.0, 2.0, 3.0, 4.0], [2.0, 3.0, 4.0, 5.0]], + timestamps=[0.0, 0.001, 0.002, 0.003], + channels=channels, + ) + + def test_constructor_channels_dim_mismatch(self): + probe = _create_test_probe() + + ct = ChannelsTable( + name="Neuropixels1ChannelsTable", + description="Test channels table", + probe=probe, + ) + ct.add_row(contact=0) + ct.add_row(contact=1) + ct.add_row(contact=2) + + channels = DynamicTableRegion( + name="channels", # NOTE: this must be named "channels" when used in ExtracellularSeries + data=[0, 1, 2], + description="All of the channels", + table=ct, + ) + + msg = ( + "ExtracellularSeries 'ExtracellularSeries': The length of the second dimension of `data` " + "(2) does not match the length of `channels` (3)." + ) + with self.assertRaisesWith(ValueError, msg): + ExtracellularSeries( + name="ExtracellularSeries", + data=[[0.0, 1.0], [1.0, 2.0], [2.0, 3.0], [3.0, 4.0]], + timestamps=[0.0, 0.001, 0.002, 0.003], + channels=channels, + ) + + def test_constructor_channel_conversion_dim_mismatch(self): + probe = _create_test_probe() + + ct = ChannelsTable( + name="Neuropixels1ChannelsTable", + description="Test channels table", + probe=probe, + ) + ct.add_row(contact=0) + ct.add_row(contact=1) + ct.add_row(contact=2) + + channels = DynamicTableRegion( + name="channels", # NOTE: this must be named "channels" when used in ExtracellularSeries + data=[0, 1, 2], + description="All of the channels", + table=ct, + ) + + msg = ( + "ExtracellularSeries 'ExtracellularSeries': The length of the second dimension of `data` " + "(3) does not match the length of `channel_conversion` (1)." + ) + with self.assertRaisesWith(ValueError, msg): + ExtracellularSeries( + name="ExtracellularSeries", + data=[[0.0, 1.0, 2.0], [1.0, 2.0, 3.0], [2.0, 3.0, 4.0], [3.0, 4.0, 5.0]], + timestamps=[0.0, 0.001, 0.002, 0.003], + channels=channels, + channel_conversion=[0.1], + ) + + +class TestExtracellularSeriesRoundTrip(NWBH5IOFlexMixin, TestCase): + """Simple roundtrip test for a ExtracellularSeries.""" + + def getContainerType(self): + return "ExtracellularSeries" + + def addContainer(self): + probe = _create_test_probe() + self.nwbfile.add_device(probe.probe_model) # TODO change to add_device_model after integration in core + self.nwbfile.add_device(probe) + + ct = ChannelsTable( + name="Neuropixels1ChannelsTable", + description="Test channels table", + probe=probe, + ) + ct.add_row(contact=0) + ct.add_row(contact=1) + ct.add_row(contact=2) + + # put this in nwbfile.acquisition for testing + self.nwbfile.add_acquisition(ct) + + channels = DynamicTableRegion( + name="channels", # TODO I think this HAS to be named "channels" + data=[0, 1, 2], + description="All of the channels", + table=ct, + ) + + es = ExtracellularSeries( + name="ExtracellularSeries", + data=[[0.0, 1.0, 2.0], [1.0, 2.0, 3.0], [2.0, 3.0, 4.0], [3.0, 4.0, 5.0]], + timestamps=[0.0, 0.001, 0.002, 0.003], + channels=channels, + channel_conversion=[1.0, 1.1, 1.2], + conversion=1e5, + offset=0.001, + ) + self.nwbfile.add_acquisition(es) + + def getContainer(self, nwbfile: NWBFile): + return nwbfile.acquisition["ExtracellularSeries"] diff --git a/src/pynwb/tests/test_example_usage_all.py b/src/pynwb/tests/test_example_usage_all.py new file mode 100644 index 0000000..e33eb32 --- /dev/null +++ b/src/pynwb/tests/test_example_usage_all.py @@ -0,0 +1,236 @@ +import datetime +import uuid + +import numpy as np +import numpy.testing as npt +from hdmf.common import DynamicTableRegion +from ndx_extracellular_channels import ( + ChannelsTable, + ContactsTable, + ExtracellularSeries, + Probe, + ProbeInsertion, + ProbeModel, +) + +from pynwb import NWBHDF5IO, NWBFile + + +def test_all_classes(): + + # initialize an NWBFile object + nwbfile = NWBFile( + session_description="A description of my session", + identifier=str(uuid.uuid4()), + session_start_time=datetime.datetime.now(datetime.timezone.utc), + ) + + contacts_table = ContactsTable( + description="Test contacts table", + ) + # for demonstration, mix and match different shapes. np.nan means the radius/width/height does not apply + contacts_table.add_row( + relative_position_in_um=[10.0, 10.0], + contact_id="C1", + shank_id="shank0", + plane_axes=[[1.0, 0.0], [0.0, 1.0]], + shape="circle", + radius_in_um=10.0, + width_in_um=np.nan, + height_in_um=np.nan, + ) + contacts_table.add_row( + relative_position_in_um=[20.0, 10.0], + contact_id="C2", + shank_id="shank0", + plane_axes=[[1 / np.sqrt(2), 1 / np.sqrt(2)], [-1 / np.sqrt(2), 1 / np.sqrt(2)]], + shape="square", + radius_in_um=np.nan, + width_in_um=10.0, + height_in_um=10.0, + ) + + # add the object into nwbfile.acquisition for testing + # TODO after integration, put this into /general/extracellular_ephys + nwbfile.add_acquisition(contacts_table) + + pm = ProbeModel( + model="Neuropixels 1.0", + description="A neuropixels probe", + manufacturer="IMEC", + planar_contour_in_um=[[-10.0, -10.0], [10.0, -10.0], [10.0, 10.0], [-10.0, 10.0]], + contacts_table=contacts_table, + ) + # TODO put this into /general/device_models + nwbfile.add_device(pm) + + pi = ProbeInsertion( + position_reference="(AP, ML, DV) = (0, 0, 0) corresponds to bregma at the cortical surface.", + hemisphere="left", + depth_in_mm=10.0, + insertion_position_ap_in_mm=2.0, + insertion_position_ml_in_mm=-4.0, + insertion_angle_roll_in_deg=-10.0, + insertion_angle_pitch_in_deg=0.0, + insertion_angle_yaw_in_deg=0.0, + ) + + probe = Probe( + name="Neuropixels Probe 1", + identifier="28948291", + probe_model=pm, + probe_insertion=pi, + ) + nwbfile.add_device(probe) + + channels_table = ChannelsTable( + name="Neuropixels1ChannelsTable", # test custom name + description="Test channels table", + # electrical_reference_description="Probe tip.", # usually if reference_contact is provided, this is not + ground="Skull screw over cerebellum.", + position_reference="(AP, ML, DV) = (0, 0, 0) corresponds to bregma at the cortical surface.", + position_confirmation_method="Histology", + probe=probe, + ) + + # all of the keyword arguments in add_row are optional + channels_table.add_row( + contact=0, + reference_contact=2, + filter="High-pass at 300 Hz", + estimated_position_ap_in_mm=2.0, + estimated_position_ml_in_mm=-5.0, + estimated_position_dv_in_mm=-9.5, + estimated_brain_area="CA3", + confirmed_position_ap_in_mm=2.0, + confirmed_position_ml_in_mm=-4.9, + confirmed_position_dv_in_mm=-9.5, + confirmed_brain_area="CA3", + ) + channels_table.add_row( + contact=1, + reference_contact=2, + filter="High-pass at 300 Hz", + estimated_position_ap_in_mm=2.0, + estimated_position_ml_in_mm=-4.9, + estimated_position_dv_in_mm=-9.3, + estimated_brain_area="CA3", + confirmed_position_ap_in_mm=2.0, + confirmed_position_ml_in_mm=-4.8, + confirmed_position_dv_in_mm=-9.3, + confirmed_brain_area="CA3", + ) + + # put this in nwbfile.acquisition for testing + nwbfile.add_acquisition(channels_table) + + channels = DynamicTableRegion( + name="channels", # NOTE: this must be named "channels" when used in ExtracellularSeries + data=[0, 1, 2], + description="All of the channels", + table=channels_table, + ) + + es = ExtracellularSeries( + name="ExtracellularSeries", + data=[[0.0, 1.0, 2.0], [1.0, 2.0, 3.0], [2.0, 3.0, 4.0], [3.0, 4.0, 5.0]], + timestamps=[0.0, 0.001, 0.002, 0.003], + channels=channels, + channel_conversion=[1.0, 1.1, 1.2], + conversion=1e5, + offset=0.001, + ) + + nwbfile.add_acquisition(es) + + # write the NWBFile to disk + path = "test_extracellular_channels.nwb" + with NWBHDF5IO(path, mode="w") as io: + io.write(nwbfile) + + # read the NWBFile from disk + with NWBHDF5IO(path, mode="r") as io: + read_nwbfile = io.read() + + read_eseries = read_nwbfile.acquisition["ExtracellularSeries"] + read_channels_table = read_nwbfile.acquisition["Neuropixels1ChannelsTable"] + read_contacts_table = read_nwbfile.acquisition["contacts_table"] + + npt.assert_array_equal( + read_eseries.data[:], [[0.0, 1.0, 2.0], [1.0, 2.0, 3.0], [2.0, 3.0, 4.0], [3.0, 4.0, 5.0]] + ) + npt.assert_array_equal(read_eseries.timestamps[:], [0.0, 0.001, 0.002, 0.003]) + npt.assert_array_equal(read_eseries.channels.data[:], [0, 1, 2]) + assert read_eseries.channels.description == "All of the channels" + assert read_eseries.channels.table is read_channels_table + npt.assert_array_equal(read_eseries.channel_conversion[:], [1.0, 1.1, 1.2]) + assert read_eseries.conversion == 1e5 + assert read_eseries.offset == 0.001 + assert read_eseries.unit == "microvolts" + + assert read_channels_table.name == "Neuropixels1ChannelsTable" + assert read_channels_table.description == "Test channels table" + assert read_channels_table.ground == "Skull screw over cerebellum." + assert ( + read_channels_table.position_reference + == "(AP, ML, DV) = (0, 0, 0) corresponds to bregma at the cortical surface." + ) + assert read_channels_table.position_confirmation_method == "Histology" + assert read_channels_table.probe is read_nwbfile.devices["Neuropixels Probe 1"] + assert len(read_channels_table) == 2 + assert read_channels_table["contact"].table is read_contacts_table + npt.assert_array_equal(read_channels_table["contact"].data[:], [0, 1]) + assert read_channels_table["reference_contact"].table is read_contacts_table + npt.assert_array_equal(read_channels_table["reference_contact"].data[:], [2, 2]) + npt.assert_array_equal(read_channels_table["filter"].data[:], ["High-pass at 300 Hz", "High-pass at 300 Hz"]) + npt.assert_array_equal(read_channels_table["estimated_position_ap_in_mm"].data[:], [2.0, 2.0]) + npt.assert_array_equal(read_channels_table["estimated_position_ml_in_mm"].data[:], [-5.0, -4.9]) + npt.assert_array_equal(read_channels_table["estimated_position_dv_in_mm"].data[:], [-9.5, -9.3]) + npt.assert_array_equal(read_channels_table["estimated_brain_area"].data[:], ["CA3", "CA3"]) + npt.assert_array_equal(read_channels_table["confirmed_position_ap_in_mm"].data[:], [2.0, 2.0]) + npt.assert_array_equal(read_channels_table["confirmed_position_ml_in_mm"].data[:], [-4.9, -4.8]) + npt.assert_array_equal(read_channels_table["confirmed_position_dv_in_mm"].data[:], [-9.5, -9.3]) + npt.assert_array_equal(read_channels_table["confirmed_brain_area"].data[:], ["CA3", "CA3"]) + + assert ( + read_channels_table.probe.probe_insertion.position_reference + == "(AP, ML, DV) = (0, 0, 0) corresponds to bregma at the cortical surface." + ) + assert read_channels_table.probe.probe_insertion.hemisphere == "left" + assert read_channels_table.probe.probe_insertion.depth_in_mm == 10.0 + assert read_channels_table.probe.probe_insertion.insertion_position_ap_in_mm == 2.0 + assert read_channels_table.probe.probe_insertion.insertion_position_ml_in_mm == -4.0 + assert read_channels_table.probe.probe_insertion.insertion_angle_roll_in_deg == -10.0 + assert read_channels_table.probe.probe_insertion.insertion_angle_pitch_in_deg == 0.0 + assert read_channels_table.probe.probe_insertion.insertion_angle_yaw_in_deg == 0.0 + + assert read_nwbfile.devices["Neuropixels Probe 1"].name == "Neuropixels Probe 1" + assert read_nwbfile.devices["Neuropixels Probe 1"].identifier == "28948291" + assert read_nwbfile.devices["Neuropixels Probe 1"].probe_model is read_nwbfile.devices["Neuropixels 1.0"] + + assert read_nwbfile.devices["Neuropixels 1.0"].name == "Neuropixels 1.0" + assert read_nwbfile.devices["Neuropixels 1.0"].description == "A neuropixels probe" + assert read_nwbfile.devices["Neuropixels 1.0"].model == "Neuropixels 1.0" + assert read_nwbfile.devices["Neuropixels 1.0"].manufacturer == "IMEC" + npt.assert_array_equal( + read_nwbfile.devices["Neuropixels 1.0"].planar_contour_in_um, + [[-10.0, -10.0], [10.0, -10.0], [10.0, 10.0], [-10.0, 10.0]], + ) + assert read_nwbfile.devices["Neuropixels 1.0"].contacts_table is read_contacts_table + + assert read_contacts_table.name == "contacts_table" + assert read_contacts_table.description == "Test contacts table" + npt.assert_array_equal(read_contacts_table["relative_position_in_um"].data[:], [[10.0, 10.0], [20.0, 10.0]]) + npt.assert_array_equal(read_contacts_table["shape"].data[:], ["circle", "square"]) + npt.assert_array_equal(read_contacts_table["contact_id"].data[:], ["C1", "C2"]) + npt.assert_array_equal(read_contacts_table["shank_id"].data[:], ["shank0", "shank0"]) + npt.assert_array_equal( + read_contacts_table["plane_axes"].data[:], + [ + [[1.0, 0.0], [0.0, 1.0]], + [[1 / np.sqrt(2), 1 / np.sqrt(2)], [-1 / np.sqrt(2), 1 / np.sqrt(2)]], + ], + ) + npt.assert_array_equal(read_contacts_table["radius_in_um"].data[:], [10.0, np.nan]) + npt.assert_array_equal(read_contacts_table["width_in_um"].data[:], [np.nan, 10.0]) + npt.assert_array_equal(read_contacts_table["height_in_um"].data[:], [np.nan, 10.0]) diff --git a/src/pynwb/tests/test_example_usage_probeinterface.py b/src/pynwb/tests/test_example_usage_probeinterface.py new file mode 100644 index 0000000..2b19549 --- /dev/null +++ b/src/pynwb/tests/test_example_usage_probeinterface.py @@ -0,0 +1,325 @@ +import datetime +import uuid + +import ndx_extracellular_channels +import numpy as np +import numpy.testing as npt +import probeinterface + +import pynwb + + +def test_from_probeinterface(): + + # following the probeinterface tutorial, create a few probes + n = 24 + positions = np.zeros((n, 2)) + for i in range(n): + x = i // 8 + y = i % 8 + positions[i] = x, y + positions *= 20 + positions[8:16, 1] -= 10 + + probe0 = probeinterface.Probe( + ndim=2, + si_units="um", + name="probe0", + serial_number="0123", + model_name="a1x32-edge-5mm-20-177_H32", + manufacturer="Neuronexus", + ) + probe0.set_contacts(positions=positions, shapes="circle", shape_params={"radius": 5}) + + polygon = [(-20.0, -30.0), (20.0, -110.0), (60.0, -30.0), (60.0, 190.0), (-20.0, 190.0)] + probe0.set_planar_contour(polygon) + + probe1 = probeinterface.generate_dummy_probe(elec_shapes="circle") # no name set + probe1.serial_number = "1000" + probe1.model_name = "Dummy Neuropixels 1.0" + probe1.manufacturer = "IMEC" + probe1.move([250, -90]) + + probe2 = probeinterface.generate_dummy_probe(elec_shapes="square") + probe2.name = "probe2" + probe2.serial_number = "1001" + probe2.model_name = "Dummy Neuropixels 2.0" + probe2.manufacturer = "IMEC" + probe2.move([500, -90]) + + probe3 = probeinterface.generate_dummy_probe(elec_shapes="circle") + probe3.name = "probe3" + probe3.serial_number = "1002" + probe3.model_name = "Dummy Neuropixels 3.0" + probe3.manufacturer = "IMEC" + probe3.move([750, -90]) + + # create a probe group containing probe2 and probe3 + probegroup = probeinterface.ProbeGroup() + probegroup.add_probe(probe2) + probegroup.add_probe(probe3) + + # from_probeinterface always returns a list of ndx_extracellular_channels.Probe devices + ndx_probes = list() + model0 = ndx_extracellular_channels.from_probeinterface(probe0) + ndx_probes.extend(model0) + model1 = ndx_extracellular_channels.from_probeinterface(probe1, name="probe1") # override name of probe + ndx_probes.extend(model1) + # override name of probe3 + group_probes = ndx_extracellular_channels.from_probeinterface(probegroup, name=[None, "renamed_probe3"]) + ndx_probes.extend(group_probes) + + nwbfile = pynwb.NWBFile( + session_description="A description of my session", + identifier=str(uuid.uuid4()), + session_start_time=datetime.datetime.now(datetime.timezone.utc), + ) + + # add Probe as NWB Devices + for ndx_probe in ndx_probes: + nwbfile.add_device(ndx_probe.probe_model) + nwbfile.add_device(ndx_probe) + + with pynwb.NWBHDF5IO("test_probeinterface.nwb", "w") as io: + io.write(nwbfile) + + # read the file and check the content + with pynwb.NWBHDF5IO("test_probeinterface.nwb", "r") as io: + nwbfile = io.read() + assert set(nwbfile.devices.keys()) == { + "probe0", + "probe1", + "probe2", + "renamed_probe3", + "a1x32-edge-5mm-20-177_H32", + "Dummy Neuropixels 1.0", + "Dummy Neuropixels 2.0", + "Dummy Neuropixels 3.0", + } + for device in nwbfile.devices.values(): + assert isinstance(device, (ndx_extracellular_channels.ProbeModel, ndx_extracellular_channels.Probe)) + assert isinstance(nwbfile.devices["probe0"], ndx_extracellular_channels.Probe) + assert isinstance(nwbfile.devices["probe1"], ndx_extracellular_channels.Probe) + assert isinstance(nwbfile.devices["probe2"], ndx_extracellular_channels.Probe) + assert isinstance(nwbfile.devices["renamed_probe3"], ndx_extracellular_channels.Probe) + assert isinstance(nwbfile.devices["a1x32-edge-5mm-20-177_H32"], ndx_extracellular_channels.ProbeModel) + assert isinstance(nwbfile.devices["Dummy Neuropixels 1.0"], ndx_extracellular_channels.ProbeModel) + assert isinstance(nwbfile.devices["Dummy Neuropixels 2.0"], ndx_extracellular_channels.ProbeModel) + assert isinstance(nwbfile.devices["Dummy Neuropixels 3.0"], ndx_extracellular_channels.ProbeModel) + + assert nwbfile.devices["probe0"].name == "probe0" + assert nwbfile.devices["probe0"].identifier == "0123" + assert nwbfile.devices["probe0"].probe_model.name == "a1x32-edge-5mm-20-177_H32" + assert nwbfile.devices["probe0"].probe_model.manufacturer == "Neuronexus" + assert nwbfile.devices["probe0"].probe_model.ndim == 2 + npt.assert_array_equal(nwbfile.devices["probe0"].probe_model.planar_contour_in_um, polygon) + npt.assert_allclose(nwbfile.devices["probe0"].probe_model.contacts_table.relative_position_in_um, positions) + npt.assert_array_equal(nwbfile.devices["probe0"].probe_model.contacts_table["shape"].data[:], "circle") + npt.assert_array_equal(nwbfile.devices["probe0"].probe_model.contacts_table["radius_in_um"].data[:], 5.0) + + assert nwbfile.devices["probe1"].name == "probe1" + assert nwbfile.devices["probe1"].identifier == "1000" + assert nwbfile.devices["probe1"].probe_model.name == "Dummy Neuropixels 1.0" + assert nwbfile.devices["probe1"].probe_model.manufacturer == "IMEC" + assert nwbfile.devices["probe1"].probe_model.ndim == 2 + npt.assert_allclose(nwbfile.devices["probe1"].probe_model.planar_contour_in_um, probe1.probe_planar_contour) + npt.assert_allclose( + nwbfile.devices["probe1"].probe_model.contacts_table.relative_position_in_um, probe1.contact_positions + ) + npt.assert_array_equal(nwbfile.devices["probe1"].probe_model.contacts_table["shape"].data[:], "circle") + npt.assert_array_equal( + nwbfile.devices["probe1"].probe_model.contacts_table["radius_in_um"].data[:], probe1.to_numpy()["radius"] + ) + + assert nwbfile.devices["probe2"].name == "probe2" + assert nwbfile.devices["probe2"].identifier == "1001" + assert nwbfile.devices["probe2"].probe_model.name == "Dummy Neuropixels 2.0" + assert nwbfile.devices["probe2"].probe_model.manufacturer == "IMEC" + assert nwbfile.devices["probe2"].probe_model.ndim == 2 + npt.assert_allclose(nwbfile.devices["probe2"].probe_model.planar_contour_in_um, probe2.probe_planar_contour) + npt.assert_allclose( + nwbfile.devices["probe2"].probe_model.contacts_table.relative_position_in_um, probe2.contact_positions + ) + npt.assert_array_equal(nwbfile.devices["probe2"].probe_model.contacts_table["shape"].data[:], "square") + npt.assert_array_equal( + nwbfile.devices["probe2"].probe_model.contacts_table["width_in_um"].data[:], probe2.to_numpy()["width"] + ) + + assert nwbfile.devices["renamed_probe3"].name == "renamed_probe3" + assert nwbfile.devices["renamed_probe3"].identifier == "1002" + assert nwbfile.devices["renamed_probe3"].probe_model.name == "Dummy Neuropixels 3.0" + assert nwbfile.devices["renamed_probe3"].probe_model.manufacturer == "IMEC" + assert nwbfile.devices["renamed_probe3"].probe_model.ndim == 2 + npt.assert_allclose( + nwbfile.devices["renamed_probe3"].probe_model.planar_contour_in_um, probe3.probe_planar_contour + ) + npt.assert_allclose( + nwbfile.devices["renamed_probe3"].probe_model.contacts_table.relative_position_in_um, + probe3.contact_positions, + ) + npt.assert_array_equal(nwbfile.devices["renamed_probe3"].probe_model.contacts_table["shape"].data[:], "circle") + npt.assert_array_equal( + nwbfile.devices["renamed_probe3"].probe_model.contacts_table["radius_in_um"].data[:], + probe3.to_numpy()["radius"], + ) + + +def test_to_probeinterface(): + + # create a NWB file with a few probes + nwbfile = pynwb.NWBFile( + session_description="A description of my session", + identifier=str(uuid.uuid4()), + session_start_time=datetime.datetime.now(datetime.timezone.utc), + ) + + # create a probe model + probe_model0 = ndx_extracellular_channels.ProbeModel( + model="a1x32-edge-5mm-20-177_H32", + manufacturer="Neuronexus", + ndim=2, + planar_contour_in_um=[(-20.0, -30.0), (20.0, -110.0), (60.0, -30.0), (60.0, 190.0), (-20.0, 190.0)], + contacts_table=ndx_extracellular_channels.ContactsTable( + name="contacts_table", + description="a table with electrode contacts", + columns=[ + pynwb.core.VectorData( + name="relative_position_in_um", + description="the relative position of the contact in micrometers", + data=[ + (0.0, 0.0), + (0.0, 20.0), + (0.0, 40.0), + (0.0, 60.0), + (0.0, 80.0), + (0.0, 100.0), + (0.0, 120.0), + (0.0, 140.0), + (20.0, 0.0), + (20.0, 20.0), + (20.0, 40.0), + (20.0, 60.0), + (20.0, 80.0), + (20.0, 100.0), + (20.0, 120.0), + (20.0, 140.0), + (40.0, 0.0), + (40.0, 20.0), + (40.0, 40.0), + (40.0, 60.0), + (40.0, 80.0), + (40.0, 100.0), + (40.0, 120.0), + (40.0, 140.0), + ], + ), + pynwb.core.VectorData( + name="shape", + description="the shape of the contact", + data=["circle"] * 24, + ), + pynwb.core.VectorData( + name="radius_in_um", + description="the radius of the contact in um", + data=[5.0] * 24, + ), + ], + ), + ) + + # create a probe + probe0 = ndx_extracellular_channels.Probe( + name="probe0", + identifier="0123", + probe_model=probe_model0, + ) + + pi_probe0 = ndx_extracellular_channels.to_probeinterface(probe0) + assert pi_probe0.ndim == 2 + assert pi_probe0.si_units == "um" + assert pi_probe0.name == "probe0" + assert pi_probe0.serial_number == "0123" + assert pi_probe0.model_name == "a1x32-edge-5mm-20-177_H32" + assert pi_probe0.manufacturer == "Neuronexus" + npt.assert_array_equal(pi_probe0.contact_positions, probe_model0.contacts_table.relative_position_in_um) + npt.assert_array_equal(pi_probe0.contact_shapes, "circle") + npt.assert_array_equal(pi_probe0.to_numpy()["radius"], 5.0) + + ct2 = ndx_extracellular_channels.ContactsTable( + description="Test contacts table", + ) + + # for testing, mix and match different shapes. np.nan means the radius/width/height does not apply + ct2.add_row( + relative_position_in_um=[10.0, 10.0], + shape="circle", + contact_id="C1", + shank_id="shank0", + plane_axes=[[1.0, 0.0], [0.0, 1.0]], + radius_in_um=10.0, + width_in_um=np.nan, + height_in_um=np.nan, + ) + ct2.add_row( + relative_position_in_um=[20.0, 10.0], + shape="square", + contact_id="C2", + shank_id="shank0", + plane_axes=[[1 / np.sqrt(2), 1 / np.sqrt(2)], [-1 / np.sqrt(2), 1 / np.sqrt(2)]], + radius_in_um=np.nan, + width_in_um=10.0, + height_in_um=10.0, + ) + probe_model1 = ndx_extracellular_channels.ProbeModel( + model="Neuropixels 1.0", + description="A neuropixels probe", + manufacturer="IMEC", + planar_contour_in_um=[[-10.0, -10.0], [10.0, -10.0], [10.0, 10.0], [-10.0, 10.0]], + contacts_table=ct2, + ) + + # create a probe + probe1 = ndx_extracellular_channels.Probe( + name="probe1", + identifier="7890", + probe_model=probe_model1, + ) + + pi_probe1 = ndx_extracellular_channels.to_probeinterface(probe1) + assert pi_probe1.ndim == 2 + assert pi_probe1.si_units == "um" + assert pi_probe1.name == "probe1" + assert pi_probe1.serial_number == "7890" + assert pi_probe1.model_name == "Neuropixels 1.0" + assert pi_probe1.manufacturer == "IMEC" + npt.assert_array_equal(pi_probe1.contact_positions, probe_model1.contacts_table.relative_position_in_um) + npt.assert_array_equal(pi_probe1.contact_shapes, ["circle", "square"]) + npt.assert_array_equal( + pi_probe1.contact_plane_axes, + [[[1.0, 0.0], [0.0, 1.0]], [[1 / np.sqrt(2), 1 / np.sqrt(2)], [-1 / np.sqrt(2), 1 / np.sqrt(2)]]], + ) + npt.assert_array_equal(pi_probe1.to_numpy()["radius"], [10.0, np.nan]) + npt.assert_array_equal(pi_probe1.to_numpy()["width"], [np.nan, 10.0]) + npt.assert_array_equal(pi_probe1.to_numpy()["height"], [np.nan, 10.0]) + + # add Probe as NWB Devices + nwbfile.add_device(probe_model0) + nwbfile.add_device(probe0) + + with pynwb.NWBHDF5IO("test_probeinterface.nwb", "w") as io: + io.write(nwbfile) + + # read the file and test whether the read probe can be converted back to probeinterface correctly + with pynwb.NWBHDF5IO("test_probeinterface.nwb", "r") as io: + nwbfile = io.read() + read_probe = nwbfile.devices["probe0"] + pi_probe = ndx_extracellular_channels.to_probeinterface(read_probe) + assert pi_probe.ndim == 2 + assert pi_probe.si_units == "um" + assert pi_probe.name == "probe0" + assert pi_probe.serial_number == "0123" + assert pi_probe.model_name == "a1x32-edge-5mm-20-177_H32" + assert pi_probe.manufacturer == "Neuronexus" + npt.assert_array_equal(pi_probe.contact_positions, probe_model0.contacts_table.relative_position_in_um) + npt.assert_array_equal(pi_probe.to_numpy()["radius"], 5.0) + npt.assert_array_equal(pi_probe.contact_shapes, "circle") diff --git a/src/spec/create_extension_spec.py b/src/spec/create_extension_spec.py new file mode 100644 index 0000000..d1d9246 --- /dev/null +++ b/src/spec/create_extension_spec.py @@ -0,0 +1,518 @@ +# -*- coding: utf-8 -*- +import os.path + +from pynwb.spec import NWBAttributeSpec, NWBDatasetSpec, NWBGroupSpec, NWBLinkSpec, NWBNamespaceBuilder, export_spec + + +def main(): + ns_builder = NWBNamespaceBuilder( + name="""ndx-extracellular-channels""", + version="""0.1.0""", + doc="""NWB extension for storing extracellular probe and channels metadata""", + author=[ + "Alessio Buccino", + "Kyu Hyun Lee", + "Ramon Heberto Mayorquin", + "Cody Baker", + "Matt Avaylon", + "Ryan Ly", + "Ben Dichter", + "Oliver Ruebel", + "Geeling Chau", + ], + contact=[ + "alessio.buccino@alleninstitute.org", + "kyuhyun.lee@ucsf.edu", + "ramon.mayorquin@catalystneuro.com", + "cody.baker@catalystneuro.com", + "mavaylon@lbl.gov", + "rly@lbl.gov", + "ben.dichter@catalystneuro.com", + "oruebel@lbl.gov", + "gchau@caltech.edu", + ], + ) + ns_builder.include_namespace("core") + + contacts_table = NWBGroupSpec( + neurodata_type_def="ContactsTable", + neurodata_type_inc="DynamicTable", + doc="Metadata about the contacts of a probe, compatible with the ProbeInterface specification.", + default_name="contacts_table", + datasets=[ + NWBDatasetSpec( + name="relative_position_in_um", + neurodata_type_inc="VectorData", + doc="Relative position of the contact in micrometers, relative to `reference`.", + dtype="float", + dims=[["num_contacts", "x, y"], ["num_contacts", "x, y, z"]], + shape=[[None, 2], [None, 3]], + ), + NWBDatasetSpec( + name="contact_id", # id is already used by DynamicTable + neurodata_type_inc="VectorData", + doc="Unique ID of the contact", + dtype="text", + quantity="?", + ), + NWBDatasetSpec( + name="shank_id", + neurodata_type_inc="VectorData", + doc="Shank ID of the contact", + dtype="text", + quantity="?", + ), + NWBDatasetSpec( + name="plane_axes", + neurodata_type_inc="VectorData", + doc=( + "The axes defining the contact plane for each contact. It can be used for contact-wise rotations. " + "For 2D probes, provide two points (v1x, v1y) and (v2x, v2y). Axis 1 is defined by the vector " + "from (0, 0) to (v1x, v1y). Axis 2 is defined by the vector from (0, 0) to (v2x, v2y). " + "So for one contact, a 45 degree rotation would be " + "[[1 / sqrt(2), 1 / sqrt(2)], [-1 / sqrt(2), 1 / sqrt(2)]]. " + "The default is [[1, 0], [0, 1]]. For 3D probes, provide two points (v1x, v1y, v1z), " + "(v2x, v2y, v2z). " + "See 'contact_plane_axes' in " + "https://probeinterface.readthedocs.io/en/main/format_spec.html for more details." + ), + dtype="float", + dims=[["num_contacts", "v1, v2", "x, y"], ["num_contacts", "v1, v2", "x, y, z"]], + shape=[[None, 2, 2], [None, 2, 3]], + quantity="?", + ), + NWBDatasetSpec( + name="shape", + neurodata_type_inc="VectorData", + doc="Shape of the contact; e.g. 'circle'", + dtype="text", + quantity="?", + ), + NWBDatasetSpec( + name="radius_in_um", + neurodata_type_inc="VectorData", + doc="Radius of a circular contact, in micrometers.", + dtype="float", + quantity="?", + ), + NWBDatasetSpec( + name="width_in_um", + neurodata_type_inc="VectorData", + doc="Width of a rectangular or square contact, in micrometers.", + dtype="float", + quantity="?", + ), + NWBDatasetSpec( + name="height_in_um", + neurodata_type_inc="VectorData", + doc="Height of a rectangular contact, in micrometers.", + dtype="float", + quantity="?", + ), + ], + ) + + probe = NWBGroupSpec( + neurodata_type_def="Probe", + neurodata_type_inc="Device", + doc="Specific instance of a neural probe object.", + groups=[ + NWBGroupSpec( + name="probe_insertion", + neurodata_type_inc="ProbeInsertion", + doc="Information about the insertion of a probe into the brain.", + quantity="?", + ), + ], + links=[ + NWBLinkSpec( + name="probe_model", + doc="The model of the probe used to record the data.", + target_type="ProbeModel", + ), + ], + attributes=[ + NWBAttributeSpec( + name="identifier", + doc="Identifier of the probe, usually the serial number.", + dtype="text", + required=False, + ), + ], + ) + + probe_model = NWBGroupSpec( + neurodata_type_def="ProbeModel", + neurodata_type_inc="Device", + doc=( + "Neural probe object, compatible with the ProbeInterface specification. The name of the object should " + 'be the model name of the probe, e.g., "Neuropixels 1.0".' + ), + groups=[ + NWBGroupSpec( + name="contacts_table", + neurodata_type_inc="ContactsTable", + doc="Neural probe contacts, compatible with the ProbeInterface specification", + ), + ], + attributes=[ + # inherits name, description, manufacturer from Device + NWBAttributeSpec(name="ndim", doc="dimension of the probe", dtype="int", default_value=2), + NWBAttributeSpec( + # although the ProbeModel also has a name attribute, the name must be unique across all + # devices in the NWB file, and users may decide to use a more descriptive name than just + # the model name + name="model", + doc='Name of the model of the probe, e.g., "Neuropixels 1.0".', + dtype="text", + ), + NWBAttributeSpec( + name="planar_contour_in_um", # TODO should this just be "contour_in_um"? + doc=( + "The coordinates of the nodes of the polygon that describe the shape (contour) of the probe, " + "in micrometers. The first and last points are connected to close the polygon. " + "e.g., [(-20., -30.), (20., -110.), (60., -30.), (60., 190.), (-20., 190.)]. Coordinates can be " + "in 2D or 3D. See 'probe_planar_contour' in " + "https://probeinterface.readthedocs.io/en/main/format_spec.html for more details." + ), + dtype="float", + dims=[["num_points", "x, y"], ["num_points", "x, y, z"]], + shape=[[None, 2], [None, 3]], + required=False, + ), + ], + ) + + probe_insertion = NWBGroupSpec( + neurodata_type_def="ProbeInsertion", + neurodata_type_inc="NWBContainer", + doc=( + "Metadata about the insertion of a probe into the brain, which can be used to determine the location of " + "the probe in the brain." + ), + default_name="probe_insertion", + attributes=[ + NWBAttributeSpec( + name="insertion_position_ap_in_mm", + doc=( + "Anteroposterior (AP) stereotactic coordinate of where the probe was inserted, in millimeters. " + "+ is anterior. Coordinate is relative to the zero-point described in `position_reference`." + ), + dtype="float", + required=False, + ), + NWBAttributeSpec( + name="insertion_position_ml_in_mm", + doc=( + "Mediolateral (ML) stereotactic coordinate of where the probe was inserted, in millimeters. " + "+ is right. Coordinate is relative to the zero-point described in `position_reference`." + ), + dtype="float", + required=False, + ), + NWBAttributeSpec( + name="insertion_position_dv_in_mm", + doc=( + "Dorsoventral (DV) stereotactic coordinate of where the probe was inserted, in millimeters. " + "+ is up. Coordinate is relative to the zero-point described in `position_reference`. The " + "zero-point is typically the surface of the brain, so this value is typically 0." + ), + dtype="float", + required=False, + ), + NWBAttributeSpec( + name="depth_in_mm", + doc=( + "Depth that the probe was driven along `insertion_angle` starting from " + "`insertion_position_{X}_in_mm`, in millimeters." + ), + dtype="float", + required=False, + ), + NWBAttributeSpec( + name="position_reference", + doc=( + "Location of the origin (0, 0, 0) for `insertion_position_{X}_in_mm` coordinates, e.g., " + '"(AP, ML, DV) = (0, 0, 0) corresponds to bregma at the cortical surface".' + ), + dtype="text", + required=False, + ), + NWBAttributeSpec( + name="hemisphere", # NOTE this is useful to cache but could be done at the API level + doc=( + 'The hemisphere ("left" or "right") of the targeted location of the optogenetic stimulus site. ' + "Should be consistent with `insertion_position_in_mm.ml` coordinate (left = ml < 0, " + "right = ml > 0)." + ), + dtype="text", + required=False, + ), + # TODO confirm with surgical experts that these make sense for describing the angle of insertion + # or should we use stereotactic arm angles (if they can be well described) instead. + # NOTE that these terms and rotation order make sense for rotating the head, but not necessarily + # for thinking about rotations of the probe in a way that makes sense for understanding or describing. + NWBAttributeSpec( + name="insertion_angle_yaw_in_deg", + doc=( + "The yaw angle of the probe at the time of insertion, in degrees. " + "Yaw = rotation around dorsal-ventral axis, like shaking (+ is rotating the nose rightward). " + "Zero is defined as the probe being parallel to an sagittal slice of the brain. " + "The order of rotations is yaw, pitch, roll." + ), + dtype="float", + required=False, + ), + NWBAttributeSpec( + name="insertion_angle_pitch_in_deg", + doc=( + "The pitch angle of the probe at the time of insertion, in degrees. " + "Pitch = rotation around left-right axis, like nodding (+ is rotating the nose upward). " + "Zero is defined as the probe being parallel to an axial slice of the brain. " + "The order of rotations is yaw, pitch, roll." + ), + dtype="float", + required=False, + ), + NWBAttributeSpec( + name="insertion_angle_roll_in_deg", + doc=( + "The roll angle of the probe at the time of insertion, in degrees. " + "Roll = rotation around anterior-posterior axis, like tilting (+ is rotating the right side " + "downward). Zero is defined as the probe being parallel to a coronal slice of the brain. " + "The order of rotations is yaw, pitch, roll." + ), + dtype="float", + required=False, + ), + ], + ) + + channels_table = NWBGroupSpec( + neurodata_type_def="ChannelsTable", + neurodata_type_inc="DynamicTable", + doc="Metadata about the channels used in an extracellular recording from a single probe.", + default_name="ChannelsTable", + datasets=[ + NWBDatasetSpec( + name="contact", + neurodata_type_inc="DynamicTableRegion", + doc="The row in a ContactsTable that represents the contact used as a channel.", + ), + NWBDatasetSpec( + name="reference_contact", + neurodata_type_inc="DynamicTableRegion", + doc=( + "The row in a ContactsTable that represents the contact used as a reference. This is useful for " + "differential or bipolar recordings. The data in the `ExtracellularSeries` corresponding to each " + "channel (row) of this table is equal to the voltage from `contact` minus the " + "voltage from `reference_contact`." + ), + quantity="?", + ), + NWBDatasetSpec( + name="filter", + neurodata_type_inc="VectorData", + dtype="text", + doc=( + "The filter used on the raw (wideband) voltage data from this contact, including the filter " + 'name and frequency cutoffs, e.g., "High-pass filter at 300 Hz."' + ), + quantity="?", + ), + NWBDatasetSpec( + name="estimated_position_ap_in_mm", + neurodata_type_inc="VectorData", + doc=( + "Anteroposterior (AP) stereotactic coordinate of the estimated contact position, in millimeters. " + "+ is anterior. Coordinate is relative to the zero-point described in `position_reference`." + ), + dtype="float", + quantity="?", + ), + NWBDatasetSpec( + name="estimated_position_ml_in_mm", + neurodata_type_inc="VectorData", + doc=( + "Mediolateral (ML) stereotactic coordinate of the estimated contact position, in millimeters. " + "+ is right. Coordinate is relative to the zero-point described in `position_reference`." + ), + dtype="float", + quantity="?", + ), + NWBDatasetSpec( + name="estimated_position_dv_in_mm", + neurodata_type_inc="VectorData", + doc=( + "Dorsoventral (DV) stereotactic coordinate of the estimated contact position, in millimeters. " + "+ is up. Coordinate is relative to the zero-point described in `position_reference`." + ), + dtype="float", + quantity="?", + ), + NWBDatasetSpec( + name="estimated_brain_area", + neurodata_type_inc="VectorData", + dtype="text", + doc=('The brain area of the estimated contact position, e.g., "CA1".'), + quantity="?", + ), + NWBDatasetSpec( + name="confirmed_position_ap_in_mm", + neurodata_type_inc="VectorData", + doc=( + "Anteroposterior (AP) stereotactic coordinate of the confirmed contact position, in millimeters. " + "+ is anterior. Coordinate is relative to the zero-point described in `position_reference`." + ), + dtype="float", + quantity="?", + ), + NWBDatasetSpec( + name="confirmed_position_ml_in_mm", + neurodata_type_inc="VectorData", + doc=( + "Mediolateral (ML) stereotactic coordinate of the confirmed contact position, in millimeters. " + "+ is right. Coordinate is relative to the zero-point described in `position_reference`." + ), + dtype="float", + quantity="?", + ), + NWBDatasetSpec( + name="confirmed_position_dv_in_mm", + neurodata_type_inc="VectorData", + doc=( + "Dorsoventral (DV) stereotactic coordinate of the confirmed contact position, in millimeters. " + "+ is up. Coordinate is relative to the zero-point described in `position_reference`." + ), + dtype="float", + quantity="?", + ), + NWBDatasetSpec( + name="confirmed_brain_area", + neurodata_type_inc="VectorData", + dtype="text", + doc=('The brain area of the actual contact position, e.g., "CA1".'), + quantity="?", + ), + ], + links=[ + NWBLinkSpec( + name="probe", + doc="The probe that the channels belongs to.", + target_type="Probe", + ), + ], + attributes=[ + NWBAttributeSpec( + name="position_reference", + doc=( + "Location of the origin (0, 0, 0) for `{X}_position_{Y}_in_mm` coordinates, e.g., " + '"(AP, ML, DV) = (0, 0, 0) corresponds to bregma at the cortical surface".' + ), + dtype="text", + required=False, + ), + NWBAttributeSpec( + name="electrical_reference_description", + doc=('The electrical reference used for the recording; e.g., "common average reference", "probe tip".'), + dtype="text", + required=False, + ), + NWBAttributeSpec( + name="ground", + doc=( + 'The ground used for the recording; e.g., "external wire in CSF", "skull screw over ' + 'frontal cortex".' + ), + dtype="text", + required=False, + ), + NWBAttributeSpec( + name="position_confirmation_method", + doc=( + "Description of the method used to confirm the position of the contacts or brain area, " + 'e.g., "histology", "MRI".' + ), + dtype="text", + required=False, + ), + ], + ) + + extracellular_series = NWBGroupSpec( + neurodata_type_def="ExtracellularSeries", + neurodata_type_inc="TimeSeries", + doc=( + "Extracellular recordings from a single probe. Create multiple instances of this class for different " + "probes." + ), + datasets=[ + NWBDatasetSpec( + name="data", + doc="Recorded voltage data.", + dtype="numeric", + shape=[None, None], + dims=["num_times", "num_channels"], + attributes=[ + NWBAttributeSpec( + name="unit", + doc=( + "Base unit of measurement for working with the data. This value is fixed to " + "'microvolts'. Actual stored values are not necessarily stored in these units. To " + "access the data in these units, multiply 'data' by 'conversion', followed by " + "'channel_conversion' (if present), and then add 'offset'." + ), + value="microvolts", + dtype="text", + ) + ], + ), + NWBDatasetSpec( + name="channels", + neurodata_type_inc="DynamicTableRegion", + doc=( + "DynamicTableRegion pointer to rows in a ChannelsTable that represent the channels used to " + "collect the data in this recording." + ), + ), + NWBDatasetSpec( + name="channel_conversion", + dtype="float", + shape=[None], + dims=["num_channels"], + doc=( + "Channel-specific conversion factor. Multiply the data in the 'data' dataset by these " + "values along the channel axis (as indicated by axis attribute) AND by the global " + "conversion factor in the 'conversion' attribute of 'data' to get the data values in " + "microvolts, i.e, data in microvolts = data * data.conversion * channel_conversion. This " + "approach allows for both global and per-channel data conversion factors needed " + "to support the storage of electrical recordings as native values generated by data " + "acquisition systems. If this dataset is not present, then there is no channel-specific " + "conversion factor, i.e. it is 1 for all channels." + ), + quantity="?", + attributes=[ + NWBAttributeSpec( + name="axis", + dtype="int", + doc=( + "The zero-indexed axis of the 'data' dataset that the channel-specific conversion" + "factor applies to. This value is fixed to 1." + ), + value=1, + ) + ], + ), + ], + ) + + new_data_types = [contacts_table, probe_model, probe, probe_insertion, channels_table, extracellular_series] + + # export the spec to yaml files in the spec folder + output_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "spec")) + export_spec(ns_builder, new_data_types, output_dir) + + +if __name__ == "__main__": + # usage: python create_extension_spec.py + main()