diff --git a/.github/workflows/development.yaml b/.github/workflows/development.yaml
deleted file mode 100644
index 9ad02579..00000000
--- a/.github/workflows/development.yaml
+++ /dev/null
@@ -1,177 +0,0 @@
-name: Development
-on:
- push:
- branches:
- - '**' # every branch
- - '!gh-pages' # exclude gh-pages branch
- - '!stage*' # exclude branches beginning with stage
- tags:
- - '\d+\.\d+\.\d+' # only semver tags
- pull_request:
- branches:
- - '**' # every branch
- - '!gh-pages' # exclude gh-pages branch
- - '!stage*' # exclude branches beginning with stage
-jobs:
- test-changelog:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - name: Get changelog entry
- id: changelog_reader
- uses: guzman-raphael/changelog-reader-action@v5
- with:
- path: ./CHANGELOG.md
- - name: Verify changelog parsing
- env:
- TAG_NAME: ${{steps.changelog_reader.outputs.version}}
- RELEASE_NAME: Release ${{steps.changelog_reader.outputs.version}}
- BODY: ${{steps.changelog_reader.outputs.changes}}
- PRERELEASE: ${{steps.changelog_reader.outputs.status == 'prereleased'}}
- DRAFT: ${{steps.changelog_reader.outputs.status == 'unreleased'}}
- run: |
- echo "TAG_NAME=${TAG_NAME}"
- echo "RELEASE_NAME=${RELEASE_NAME}"
- echo "BODY=${BODY}"
- echo "PRERELEASE=${PRERELEASE}"
- echo "DRAFT=${DRAFT}"
- build:
- needs: test-changelog
- runs-on: ubuntu-latest
- strategy:
- matrix:
- include:
- - py_ver: 3.8
- distro: alpine
- image: djbase
- env:
- PY_VER: ${{matrix.py_ver}}
- DISTRO: ${{matrix.distro}}
- IMAGE: ${{matrix.image}}
- DOCKER_CLIENT_TIMEOUT: "120"
- COMPOSE_HTTP_TIMEOUT: "120"
- steps:
- - uses: actions/checkout@v2
- - name: Compile image
- run: |
- export PKG_NAME=$(python3 -c "print([p for p in __import__('setuptools').find_packages() if '.' not in p][0])")
- export PKG_VERSION=$(cat ${PKG_NAME}/version.py | awk -F\' '/__version__ = / {print $2}')
- export HOST_UID=$(id -u)
- docker-compose -f docker-compose-build.yaml up --exit-code-from element --build
- IMAGE=$(docker images --filter "reference=datajoint/${PKG_NAME}*" \
- --format "{{.Repository}}")
- TAG=$(docker images --filter "reference=datajoint/${PKG_NAME}*" --format "{{.Tag}}")
- docker save "${IMAGE}:${TAG}" | \
- gzip > "image-${PKG_NAME}-${PKG_VERSION}-py${PY_VER}-${DISTRO}.tar.gz"
- echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_ENV
- echo "PKG_VERSION=${PKG_VERSION}" >> $GITHUB_ENV
- - name: Add image artifact
- uses: actions/upload-artifact@v2
- with:
- name: image-${{env.PKG_NAME}}-${{env.PKG_VERSION}}-py${{matrix.py_ver}}-${{matrix.distro}}
- path:
- "image-${{env.PKG_NAME}}-${{env.PKG_VERSION}}-py${{matrix.py_ver}}-\
- ${{matrix.distro}}.tar.gz"
- retention-days: 1
- - if: matrix.py_ver == '3.8' && matrix.distro == 'alpine'
- name: Add pip artifacts
- uses: actions/upload-artifact@v2
- with:
- name: pip-${{env.PKG_NAME}}-${{env.PKG_VERSION}}
- path: dist
- retention-days: 1
- publish-release:
- if: |
- github.event_name == 'push' &&
- startsWith(github.ref, 'refs/tags')
- needs: build
- runs-on: ubuntu-latest
- env:
- TWINE_USERNAME: ${{secrets.twine_username}}
- TWINE_PASSWORD: ${{secrets.twine_password}}
- outputs:
- release_upload_url: ${{steps.create_gh_release.outputs.upload_url}}
- steps:
- - uses: actions/checkout@v2
- - name: Determine package version
- run: |
- PKG_NAME=$(python3 -c "print([p for p in __import__('setuptools').find_packages() if '.' not in p][0])")
- SDIST_PKG_NAME=$(echo ${PKG_NAME} | sed 's|_|-|g')
- PKG_VERSION=$(cat ${PKG_NAME}/version.py | awk -F\' '/__version__ = / {print $2}')
- echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_ENV
- echo "PKG_VERSION=${PKG_VERSION}" >> $GITHUB_ENV
- echo "SDIST_PKG_NAME=${SDIST_PKG_NAME}" >> $GITHUB_ENV
- - name: Get changelog entry
- id: changelog_reader
- uses: guzman-raphael/changelog-reader-action@v5
- with:
- path: ./CHANGELOG.md
- version: ${{env.PKG_VERSION}}
- - name: Create GH release
- id: create_gh_release
- uses: actions/create-release@v1
- env:
- GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
- with:
- tag_name: ${{steps.changelog_reader.outputs.version}}
- release_name: Release ${{steps.changelog_reader.outputs.version}}
- body: ${{steps.changelog_reader.outputs.changes}}
- prerelease: ${{steps.changelog_reader.outputs.status == 'prereleased'}}
- draft: ${{steps.changelog_reader.outputs.status == 'unreleased'}}
- - name: Fetch image artifact
- uses: actions/download-artifact@v2
- with:
- name: image-${{env.PKG_NAME}}-${{env.PKG_VERSION}}-py3.8-alpine
- - name: Fetch pip artifacts
- uses: actions/download-artifact@v2
- with:
- name: pip-${{env.PKG_NAME}}-${{env.PKG_VERSION}}
- path: dist
- - name: Publish pip release
- run: |
- export HOST_UID=$(id -u)
- docker load < "image-${{env.PKG_NAME}}-${PKG_VERSION}-py3.8-alpine.tar.gz"
- docker-compose -f docker-compose-build.yaml run \
- -e TWINE_USERNAME=${TWINE_USERNAME} -e TWINE_PASSWORD=${TWINE_PASSWORD} element \
- sh -lc "pip install twine && python -m twine upload dist/*"
- - name: Determine pip artifact paths
- run: |
- echo "PKG_WHEEL_PATH=$(ls dist/${PKG_NAME}-*.whl)" >> $GITHUB_ENV
- echo "PKG_SDIST_PATH=$(ls dist/${SDIST_PKG_NAME}-*.tar.gz)" >> $GITHUB_ENV
- - name: Upload pip wheel asset to release
- uses: actions/upload-release-asset@v1
- env:
- GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
- with:
- upload_url: ${{steps.create_gh_release.outputs.upload_url}}
- asset_path: ${{env.PKG_WHEEL_PATH}}
- asset_name: pip-${{env.PKG_NAME}}-${{env.PKG_VERSION}}.whl
- asset_content_type: application/zip
- - name: Upload pip sdist asset to release
- uses: actions/upload-release-asset@v1
- env:
- GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
- with:
- upload_url: ${{steps.create_gh_release.outputs.upload_url}}
- asset_path: ${{env.PKG_SDIST_PATH}}
- asset_name: pip-${{env.SDIST_PKG_NAME}}-${{env.PKG_VERSION}}.tar.gz
- asset_content_type: application/gzip
- publish-docs:
- if: |
- github.event_name == 'push' &&
- startsWith(github.ref, 'refs/tags')
- needs: build
- runs-on: ubuntu-latest
- env:
- DOCKER_CLIENT_TIMEOUT: "120"
- COMPOSE_HTTP_TIMEOUT: "120"
- steps:
- - uses: actions/checkout@v2
- - name: Deploy docs
- run: |
- export MODE=BUILD
- export PACKAGE=element_array_ephys
- export UPSTREAM_REPO=https://github.com/${GITHUB_REPOSITORY}.git
- export HOST_UID=$(id -u)
- docker compose -f docs/docker-compose.yaml up --exit-code-from docs --build
- git push origin gh-pages
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
new file mode 100644
index 00000000..9ae4ef02
--- /dev/null
+++ b/.github/workflows/release.yaml
@@ -0,0 +1,27 @@
+name: Release
+on:
+ workflow_dispatch:
+jobs:
+ make_github_release:
+ uses: datajoint/.github/.github/workflows/make_github_release.yaml@main
+ pypi_release:
+ needs: make_github_release
+ uses: datajoint/.github/.github/workflows/pypi_release.yaml@main
+ secrets:
+ TWINE_USERNAME: ${{secrets.TWINE_USERNAME}}
+ TWINE_PASSWORD: ${{secrets.TWINE_PASSWORD}}
+ with:
+ UPLOAD_URL: ${{needs.make_github_release.outputs.release_upload_url}}
+ mkdocs_release:
+ uses: datajoint/.github/.github/workflows/mkdocs_release.yaml@main
+ permissions:
+ contents: write
+ # devcontainer-build:
+ # uses: datajoint/.github/.github/workflows/devcontainer-build.yaml@main
+ # devcontainer-publish:
+ # needs:
+ # - devcontainer-build
+ # uses: datajoint/.github/.github/workflows/devcontainer-publish.yaml@main
+ # secrets:
+ # DOCKERHUB_USERNAME: ${{secrets.DOCKERHUB_USERNAME}}
+ # DOCKERHUB_TOKEN: ${{secrets.DOCKERHUB_TOKEN_FOR_ELEMENTS}}
\ No newline at end of file
diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml
new file mode 100644
index 00000000..acaddca0
--- /dev/null
+++ b/.github/workflows/test.yaml
@@ -0,0 +1,34 @@
+name: Test
+on:
+ push:
+ pull_request:
+ workflow_dispatch:
+jobs:
+ # devcontainer-build:
+ # uses: datajoint/.github/.github/workflows/devcontainer-build.yaml@main
+ tests:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ py_ver: ["3.9", "3.10"]
+ mysql_ver: ["8.0", "5.7"]
+ include:
+ - py_ver: "3.8"
+ mysql_ver: "5.7"
+ - py_ver: "3.7"
+ mysql_ver: "5.7"
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python ${{matrix.py_ver}}
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{matrix.py_ver}}
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install flake8 "black[jupyter]"
+ - name: Run style tests
+ run: |
+ python_version=${{matrix.py_ver}}
+ black element_array_ephys --check --verbose --target-version py${python_version//.}
+
diff --git a/.github/workflows/u24_element_before_release.yml b/.github/workflows/u24_element_before_release.yml
deleted file mode 100644
index 692cf82e..00000000
--- a/.github/workflows/u24_element_before_release.yml
+++ /dev/null
@@ -1,17 +0,0 @@
-name: u24_element_before_release
-on:
- pull_request:
- push:
- branches:
- - '**'
- tags-ignore:
- - '**'
- workflow_dispatch:
-jobs:
- call_context_check:
- uses: dj-sciops/djsciops-cicd/.github/workflows/context_check.yaml@main
- call_u24_elements_build_alpine:
- uses: dj-sciops/djsciops-cicd/.github/workflows/u24_element_build.yaml@main
- with:
- py_ver: 3.9
- image: djbase
diff --git a/.github/workflows/u24_element_release_call.yml b/.github/workflows/u24_element_release_call.yml
deleted file mode 100644
index f1986e17..00000000
--- a/.github/workflows/u24_element_release_call.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-name: u24_element_release_call
-on:
- workflow_run:
- workflows: ["u24_element_tag_to_release"]
- types:
- - completed
-jobs:
- call_context_check:
- uses: dj-sciops/djsciops-cicd/.github/workflows/context_check.yaml@main
- test_call_u24_elements_release_alpine:
- if: >-
- github.event.workflow_run.conclusion == 'success' && ( contains(github.event.workflow_run.head_branch, 'test') || (github.event.workflow_run.event == 'pull_request'))
- uses: dj-sciops/djsciops-cicd/.github/workflows/u24_element_release.yaml@main
- with:
- py_ver: 3.9
- twine_repo: testpypi
- secrets:
- TWINE_USERNAME: ${{secrets.TWINE_TEST_USERNAME}}
- TWINE_PASSWORD: ${{secrets.TWINE_TEST_PASSWORD}}
- GOOGLE_ANALYTICS_KEY: ${{secrets.GOOGLE_ANALYTICS_KEY}}
- call_u24_elements_release_alpine:
- if: >-
- github.event.workflow_run.conclusion == 'success' && github.repository_owner == 'datajoint' && !contains(github.event.workflow_run.head_branch, 'test')
- uses: dj-sciops/djsciops-cicd/.github/workflows/u24_element_release.yaml@main
- with:
- py_ver: 3.9
- secrets:
- TWINE_USERNAME: ${{secrets.TWINE_USERNAME}}
- TWINE_PASSWORD: ${{secrets.TWINE_PASSWORD}}
- GOOGLE_ANALYTICS_KEY: ${{secrets.GOOGLE_ANALYTICS_KEY}}
-
diff --git a/.github/workflows/u24_element_tag_to_release.yml b/.github/workflows/u24_element_tag_to_release.yml
deleted file mode 100644
index 57334e9a..00000000
--- a/.github/workflows/u24_element_tag_to_release.yml
+++ /dev/null
@@ -1,14 +0,0 @@
-name: u24_element_tag_to_release
-on:
- push:
- tags:
- - '*.*.*'
- - 'test*.*.*'
-jobs:
- call_context_check:
- uses: dj-sciops/djsciops-cicd/.github/workflows/context_check.yaml@main
- call_u24_elements_build_alpine:
- uses: dj-sciops/djsciops-cicd/.github/workflows/u24_element_build.yaml@main
- with:
- py_ver: 3.9
- image: djbase
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a29e7b2c..35f7120e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,42 @@
Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and
[Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention.
+## [0.2.11] - 2023-06-29
+
++ Update - Improve kilosort triggering routine - better logging, remove temporary files, robust resumable processing
++ Add - Null value for `package_version` to patch bug
++ Update - GitHub Actions workflows
++ Update - README instructions
+
+## [0.2.10] - 2023-05-26
+
++ Add - Kilosort, NWB, and DANDI citations
++ Fix - CSS to improve readability of tables in dark mode
++ Update - mkdocs.yaml
+
+## [0.2.9] - 2023-05-11
+
++ Fix - `.ipynb` dark mode output for all notebooks.
+
+## [0.2.8] - 2023-04-28
+
++ Fix - `.ipynb` output in tutorials is not visible in dark mode.
+
+## [0.2.7] - 2023-04-19
+
++ Bugfix - A name remapping dictionary was added to ensure consistency between the column names of the `metrics.csv` file and the attribute names of the `QualityMetrics` table
+
+## [0.2.6] - 2023-04-17
+
++ Fix - Update Pandas DataFrame column name to insert `pt_ratio` in `QualityMetrics.Waveform` table
+
+## [0.2.5] - 2023-04-12
+
++ Add - docstrings for quality metric tables
++ Fix - docstring errors
++ Update - `concepts.md`
++ Update - schema diagrams with quality metrics tables
+
## [0.2.4] - 2023-03-10
+ Update - Requirements with `ipywidgets` and `scikit-image` for plotting widget
@@ -90,6 +126,12 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and
+ Add - Probe table supporting: Neuropixels probes 1.0 - 3A, 1.0 - 3B, 2.0 - SS,
2.0 - MS
+[0.2.10]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.10
+[0.2.9]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.9
+[0.2.8]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.8
+[0.2.7]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.7
+[0.2.6]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.6
+[0.2.5]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.5
[0.2.4]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.4
[0.2.3]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.3
[0.2.2]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.2
diff --git a/README.md b/README.md
index eeeaa68b..46ce4d13 100644
--- a/README.md
+++ b/README.md
@@ -1,15 +1,43 @@
-# DataJoint Element - Array Electrophysiology Element
+[![PyPI version](https://badge.fury.io/py/element-array-ephys.svg)](http://badge.fury.io/py/element-array-ephys)
-DataJoint Element for extracellular array electrophysiology. DataJoint Elements
-collectively standardize and automate data collection and analysis for neuroscience
-experiments. Each Element is a modular pipeline for data storage and processing with
-corresponding database tables that can be combined with other Elements to assemble a
-fully functional pipeline.
+# DataJoint Element for Extracellular Electrophysiology
-![diagram](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/diagram_flowchart.svg)
+DataJoint Element for extracellular array electrophysiology that processes data
+acquired with a polytrode probe
+(e.g. [Neuropixels](https://www.neuropixels.org), Neuralynx) using the
+[SpikeGLX](https://github.com/billkarsh/SpikeGLX) or
+[OpenEphys](https://open-ephys.org/gui) acquisition software and
+[MATLAB-based Kilosort](https://github.com/MouseLand/Kilosort) or [python-based
+Kilosort](https://github.com/MouseLand/pykilosort) spike sorting software. DataJoint
+Elements collectively standardize and automate data collection and analysis for
+neuroscience experiments. Each Element is a modular pipeline for data storage and
+processing with corresponding database tables that can be combined with other Elements
+to assemble a fully functional pipeline.
-Installation and usage instructions can be found at the
-[Element documentation](https://datajoint.com/docs/elements/element-array-ephys).
+## Experiment flowchart
+
+![flowchart](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/diagram_flowchart.svg)
+
+## Data Pipeline Diagram
+
+![datajoint](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/attached_array_ephys_element_acute.svg)
+
+
+## Getting Started
+
++ Install from PyPI
+
+ ```bash
+ pip install element-array-ephys
+ ```
+
++ [Interactive tutorial on GitHub Codespaces](https://github.com/datajoint/workflow-array-ephys#interactive-tutorial)
+
++ [Documentation](https://datajoint.com/docs/elements/element-array-ephys)
+
+## Support
+
++ If you need help getting started or run into any errors, please contact our team by email at support@datajoint.com.
## The "modular clustering" branch
@@ -39,4 +67,3 @@ Thus, upon activation and incorporation of this `ecephys` pipeline to a workflow
ephys.Clustering.key_source - ephys_sorter.KilosortPreProcessing.key_source
).proj()
```
-
diff --git a/docs/.docker/pip_requirements.txt b/docs/.docker/pip_requirements.txt
index 5b7b2f4c..ae44fb59 100644
--- a/docs/.docker/pip_requirements.txt
+++ b/docs/.docker/pip_requirements.txt
@@ -8,4 +8,5 @@ mkdocs-gen-files
mkdocs-literate-nav
mkdocs-exclude-search
mkdocs-markdownextradata-plugin
-mkdocs-jupyter
\ No newline at end of file
+mkdocs-jupyter
+mkdocs-section-index
\ No newline at end of file
diff --git a/docs/docker-compose.yaml b/docs/docker-compose.yaml
index 12149475..5ba221df 100644
--- a/docs/docker-compose.yaml
+++ b/docs/docker-compose.yaml
@@ -14,7 +14,6 @@ services:
- PACKAGE
- UPSTREAM_REPO
- MODE
- - GOOGLE_ANALYTICS_KEY
- PATCH_VERSION
volumes:
- ../docs:/main/docs
diff --git a/docs/mkdocs.yaml b/docs/mkdocs.yaml
index 3607eee5..5fdbffd2 100644
--- a/docs/mkdocs.yaml
+++ b/docs/mkdocs.yaml
@@ -1,4 +1,4 @@
-# ---------------------- PROJECT SPECIFIC ---------------------------
+--- # ---------------------- PROJECT SPECIFIC ---------------------------
site_name: DataJoint Documentation
site_url: http://localhost/docs/elements/element-array-ephys
@@ -7,19 +7,20 @@ repo_name: datajoint/element-array-ephys
nav:
- Element Array Ephys: index.md
- Concepts: concepts.md
- - Tutorials:
- - Overview: tutorials/index.md
- - Data Download: tutorials/00-data-download-optional.ipynb
- - Configure: tutorials/01-configure.ipynb
- - Workflow Structure: tutorials/02-workflow-structure-optional.ipynb
- - Process: tutorials/03-process.ipynb
- - Automate: tutorials/04-automate-optional.ipynb
- - Explore: tutorials/05-explore.ipynb
- - Drop: tutorials/06-drop-optional.ipynb
- - Downstream Analysis: tutorials/07-downstream-analysis.ipynb
- - Visualizations: tutorials/10-data_visualization.ipynb
- - Electrode Localization: tutorials/08-electrode-localization.ipynb
- - NWB Export: tutorials/09-NWB-export.ipynb
+ - Tutorials:
+ - Overview: tutorials/index.md
+ - Data Download: tutorials/00-data-download-optional.ipynb
+ - Configure: tutorials/01-configure.ipynb
+ - Workflow Structure: tutorials/02-workflow-structure-optional.ipynb
+ - Process: tutorials/03-process.ipynb
+ - Automate: tutorials/04-automate-optional.ipynb
+ - Explore: tutorials/05-explore.ipynb
+ - Drop: tutorials/06-drop-optional.ipynb
+ - Downstream Analysis: tutorials/07-downstream-analysis.ipynb
+ - Visualizations: tutorials/10-data_visualization.ipynb
+ - Electrode Localization: tutorials/08-electrode-localization.ipynb
+ - NWB Export: tutorials/09-NWB-export.ipynb
+ - Quality Metrics: tutorials/quality_metrics.ipynb
- Citation: citation.md
- API: api/ # defer to gen-files + literate-nav
- Changelog: changelog.md
@@ -35,7 +36,7 @@ nav:
# 02. Instead of designating codeblocks with bash, use console. For example..
# ```console
# cd ../my_dir
-# ```
+# ```
# 03. Links across docs should ...
# A. Not involve line breaks.
# B. Use relative paths to docs in the same repo
@@ -54,19 +55,16 @@ nav:
# UPSTREAM_REPO=https://github.com/datajoint/element-{ELEMENT}.git \
# HOST_UID=$(id -u) docker compose -f docs/docker-compose.yaml up --build
# ```
-# 02. Site analytics depend on a local environment variable GOOGLE_ANALYTICS_KEY
-# You can find this in LastPass or declare with any string to suppress errors
-# 03. The API section will pull docstrings.
+# 02. The API section will pull docstrings.
# A. Follow google style guide e.g.,
# https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html
# With typing suggestions: https://docs.python.org/3/library/typing.html
# B. To pull a specific workflow fork, change ./docs/src/api/make_pages.py#L19
-# 04. To see your fork of the workflow-{element} in this render, change the
+# 03. To see your fork of the workflow-{element} in this render, change the
# URL in ./docs/src/api/make_pages.py#L19 to your fork.
-# 05. For redirecting options For redirect options, see 'redirects' below.
-# 06. To deploy this site on your fork,
+# 04. To deploy this site on your fork,
# A. declare a branch called gh-pages
-# B. go to the your fork > settings > pages
+# B. go to the your fork > settings > pages
# C. direct pages to render from the gh-pages branch at root
# D. push a tag to your fork with the format test*.*.*
#
@@ -99,9 +97,6 @@ theme:
plugins:
- markdownextradata: {}
- search
- # - redirects: # OPTIONAL REDIRECTS
- # redirect_maps:
- # "index.md": "getting_started.md"
- mkdocstrings:
default_handler: python
handlers:
@@ -112,7 +107,7 @@ plugins:
line_length: 88
- gen-files:
scripts:
- - ./src/api/make_pages.py
+ - ./src/api/make_pages.py
- literate-nav:
nav_file: navigation.md
- exclude-search:
@@ -120,6 +115,7 @@ plugins:
- "*/navigation.md"
- mkdocs-jupyter:
ignore_h1_titles: True
+ - section-index
markdown_extensions:
- attr_list
- toc:
@@ -141,13 +137,12 @@ markdown_extensions:
- pymdownx.inlinehilite
- pymdownx.snippets
- footnotes
-
+ - pymdownx.magiclink # Displays bare URLs as links
+ - pymdownx.tasklist: # Renders check boxes in tasks lists
+ custom_checkbox: true
extra:
PATCH_VERSION: !ENV PATCH_VERSION
generator: false # Disable watermark
- analytics:
- provider: google
- property: !ENV GOOGLE_ANALYTICS_KEY
version:
provider: mike
social:
@@ -182,4 +177,4 @@ extra_css:
- assets/stylesheets/extra.css
extra_javascript:
- - https://js-na1.hs-scripts.com/23133402.js # HubSpot chatbot
+ - https://js-na1.hs-scripts.com/23133402.js # HubSpot chatbot
diff --git a/docs/src/.overrides/assets/stylesheets/extra.css b/docs/src/.overrides/assets/stylesheets/extra.css
index 46b6aa59..4742958d 100644
--- a/docs/src/.overrides/assets/stylesheets/extra.css
+++ b/docs/src/.overrides/assets/stylesheets/extra.css
@@ -91,3 +91,22 @@ html a[title="YouTube"].md-social__link svg {
/* previous/next text */
/* --md-footer-fg-color: var(--dj-white); */
}
+
+table {
+ border-collapse: collapse;
+}
+
+tr {
+ border-left: 1px solid var(--dj-black);
+ border-right: 1px solid var(--dj-black);
+}
+
+td, th {
+ border-top: 1px solid var(--dj-black);
+ border-bottom: 1px solid var(--dj-black);
+}
+
+[data-md-color-scheme="slate"] td, th {
+ background-color: var(--dj-white);
+ color: var(--dj-black);
+}
\ No newline at end of file
diff --git a/docs/src/citation.md b/docs/src/citation.md
index 34ea0ab0..02baf664 100644
--- a/docs/src/citation.md
+++ b/docs/src/citation.md
@@ -1,11 +1,19 @@
# Citation
-If your work uses this Element, please cite the following manuscript and Research
-Resource Identifier (RRID):
+If your work uses the following resources, please cite the respective manuscript and/or Research Resource Identifier (RRID):
-+ Yatsenko D, Nguyen T, Shen S, Gunalan K, Turner CA, Guzman R, Sasaki M, Sitonic D,
- Reimer J, Walker EY, Tolias AS. DataJoint Elements: Data Workflows for
- Neurophysiology. bioRxiv. 2021 Jan 1. doi: https://doi.org/10.1101/2021.03.30.437358
++ DataJoint Element Array Electrophysiology - Version {{ PATCH_VERSION }}
+ + Yatsenko D, Nguyen T, Shen S, Gunalan K, Turner CA, Guzman R, Sasaki M, Sitonic D,
+ Reimer J, Walker EY, Tolias AS. DataJoint Elements: Data Workflows for
+ Neurophysiology. bioRxiv. 2021 Jan 1. doi: https://doi.org/10.1101/2021.03.30.437358
-+ DataJoint Elements ([RRID:SCR_021894](https://scicrunch.org/resolver/SCR_021894)) -
- Element Array Electrophysiology (version {{ PATCH_VERSION }})
+ + [RRID:SCR_021894](https://scicrunch.org/resolver/SCR_021894)
+
++ Kilosort
+ + [Manuscripts](https://github.com/MouseLand/Kilosort#citation-requirement)
+
++ NWB
+ + [Manuscript](https://www.nwb.org/publications/)
+
++ DANDI
+ + [Citation options](https://www.dandiarchive.org/handbook/10_using_dandi/#citing-dandi)
diff --git a/docs/src/concepts.md b/docs/src/concepts.md
index 06c57944..f864b306 100644
--- a/docs/src/concepts.md
+++ b/docs/src/concepts.md
@@ -107,6 +107,10 @@ is a table within the Element or a table connected to the Element.
![diagram](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/attached_array_ephys_element_precluster.svg)
+### `ephys_no_curation` module
+
+![diagram](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/attached_array_ephys_element_no_curation.svg)
+
### `subject` schema ([API docs](https://datajoint.com/docs/elements/element-animal/api/element_animal/subject))
Although not required, most choose to connect the `Session` table to a `Subject` table.
@@ -156,12 +160,15 @@ Tables for storing probe or unit-level visualization results.
| --- | --- |
| ProbeLevelReport | A table to store drift map figures generated from each recording probe. |
| UnitLevelReport | A table to store figures (waveforms, autocorrelogram, peak waveform + neighbors) generated for each unit. |
+| QualityMetricCutoffs | A table to store cut-off values for cluster quality metrics. |
+| QualityMetricSet | A manual table to match a set of cluster quality metric values with desired cut-offs. |
+| QualityMetricReport | A table to store quality metric figures. |
## Element Development
Through our interviews and direct collaboration on the precursor projects, we identified
the common motifs to create the
-[Array ElectrophysiologyElement](https://github.com/datajoint/element-array-ephys).
+[Array Electrophysiology Element](https://github.com/datajoint/element-array-ephys).
Major features of the Array Electrophysiology Element include:
diff --git a/element_array_ephys/ephys_acute.py b/element_array_ephys/ephys_acute.py
index b9f75845..9b7b5c8d 100644
--- a/element_array_ephys/ephys_acute.py
+++ b/element_array_ephys/ephys_acute.py
@@ -910,7 +910,7 @@ def make(self, key):
raise ValueError(f"Unknown task mode: {task_mode}")
creation_time, _, _ = kilosort.extract_clustering_info(kilosort_dir)
- self.insert1({**key, "clustering_time": creation_time})
+ self.insert1({**key, "clustering_time": creation_time, "package_version": ""})
@schema
@@ -1353,6 +1353,11 @@ def make(self, key):
kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir)
metric_fp = kilosort_dir / "metrics.csv"
+ rename_dict = {
+ "isi_viol": "isi_violation",
+ "num_viol": "number_violation",
+ "contam_rate": "contamination_rate",
+ }
if not metric_fp.exists():
raise FileNotFoundError(f"QC metrics file not found: {metric_fp}")
@@ -1360,7 +1365,8 @@ def make(self, key):
metrics_df = pd.read_csv(metric_fp)
metrics_df.set_index("cluster_id", inplace=True)
metrics_df.replace([np.inf, -np.inf], np.nan, inplace=True)
-
+ metrics_df.columns = metrics_df.columns.str.lower()
+ metrics_df.rename(columns=rename_dict, inplace=True)
metrics_list = [
dict(metrics_df.loc[unit_key["unit"]], **unit_key)
for unit_key in (CuratedClustering.Unit & key).fetch("KEY")
diff --git a/element_array_ephys/ephys_chronic.py b/element_array_ephys/ephys_chronic.py
index a15d56eb..61c325a9 100644
--- a/element_array_ephys/ephys_chronic.py
+++ b/element_array_ephys/ephys_chronic.py
@@ -839,7 +839,7 @@ def make(self, key):
raise ValueError(f"Unknown task mode: {task_mode}")
creation_time, _, _ = kilosort.extract_clustering_info(kilosort_dir)
- self.insert1({**key, "clustering_time": creation_time})
+ self.insert1({**key, "clustering_time": creation_time, "package_version": ""})
@schema
@@ -1282,6 +1282,11 @@ def make(self, key):
kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir)
metric_fp = kilosort_dir / "metrics.csv"
+ rename_dict = {
+ "isi_viol": "isi_violation",
+ "num_viol": "number_violation",
+ "contam_rate": "contamination_rate",
+ }
if not metric_fp.exists():
raise FileNotFoundError(f"QC metrics file not found: {metric_fp}")
@@ -1289,7 +1294,8 @@ def make(self, key):
metrics_df = pd.read_csv(metric_fp)
metrics_df.set_index("cluster_id", inplace=True)
metrics_df.replace([np.inf, -np.inf], np.nan, inplace=True)
-
+ metrics_df.columns = metrics_df.columns.str.lower()
+ metrics_df.rename(columns=rename_dict, inplace=True)
metrics_list = [
dict(metrics_df.loc[unit_key["unit"]], **unit_key)
for unit_key in (CuratedClustering.Unit & key).fetch("KEY")
diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py
index 5a7a4042..42f16a01 100644
--- a/element_array_ephys/ephys_no_curation.py
+++ b/element_array_ephys/ephys_no_curation.py
@@ -914,7 +914,7 @@ def make(self, key):
raise ValueError(f"Unknown task mode: {task_mode}")
creation_time, _, _ = kilosort.extract_clustering_info(kilosort_dir)
- self.insert1({**key, "clustering_time": creation_time})
+ self.insert1({**key, "clustering_time": creation_time, "package_version": ""})
@schema
@@ -935,9 +935,9 @@ class Unit(dj.Part):
Attributes:
CuratedClustering (foreign key): CuratedClustering primary key.
- unit (foreign key, int): Unique integer identifying a single unit.
- probe.ElectrodeConfig.Electrode (dict): probe.ElectrodeConfig.Electrode primary key.
- ClusteringQualityLabel (dict): CLusteringQualityLabel primary key.
+ unit (int): Unique integer identifying a single unit.
+ probe.ElectrodeConfig.Electrode (foreign key): probe.ElectrodeConfig.Electrode primary key.
+ ClusteringQualityLabel (foreign key): CLusteringQualityLabel primary key.
spike_count (int): Number of spikes in this recording for this unit.
spike_times (longblob): Spike times of this unit, relative to start time of EphysRecording.
spike_sites (longblob): Array of electrode associated with each spike.
@@ -1291,6 +1291,11 @@ def make(self, key):
kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir)
metric_fp = kilosort_dir / "metrics.csv"
+ rename_dict = {
+ "isi_viol": "isi_violation",
+ "num_viol": "number_violation",
+ "contam_rate": "contamination_rate",
+ }
if not metric_fp.exists():
raise FileNotFoundError(f"QC metrics file not found: {metric_fp}")
@@ -1298,7 +1303,8 @@ def make(self, key):
metrics_df = pd.read_csv(metric_fp)
metrics_df.set_index("cluster_id", inplace=True)
metrics_df.replace([np.inf, -np.inf], np.nan, inplace=True)
-
+ metrics_df.columns = metrics_df.columns.str.lower()
+ metrics_df.rename(columns=rename_dict, inplace=True)
metrics_list = [
dict(metrics_df.loc[unit_key["unit"]], **unit_key)
for unit_key in (CuratedClustering.Unit & key).fetch("KEY")
diff --git a/element_array_ephys/ephys_precluster.py b/element_array_ephys/ephys_precluster.py
index a17f3cc8..8c573a4c 100644
--- a/element_array_ephys/ephys_precluster.py
+++ b/element_array_ephys/ephys_precluster.py
@@ -518,7 +518,7 @@ def make(self, key):
else:
raise ValueError(f"Unknown task mode: {task_mode}")
- self.insert1({**key, "precluster_time": creation_time})
+ self.insert1({**key, "precluster_time": creation_time, "package_version": ""})
@schema
@@ -616,7 +616,6 @@ def make(self, key):
][recorded_site]
electrode_keys.append(probe_electrodes[(shank, shank_col, shank_row)])
elif acq_software == "Open Ephys":
-
session_dir = find_full_path(
get_ephys_root_data_dir(), get_session_directory(key)
)
@@ -833,7 +832,7 @@ def make(self, key):
else:
raise ValueError(f"Unknown task mode: {task_mode}")
- self.insert1({**key, "clustering_time": creation_time})
+ self.insert1({**key, "clustering_time": creation_time, "package_version": ""})
@schema
@@ -1270,13 +1269,20 @@ def make(self, key):
kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir)
metric_fp = kilosort_dir / "metrics.csv"
+ rename_dict = {
+ "isi_viol": "isi_violation",
+ "num_viol": "number_violation",
+ "contam_rate": "contamination_rate",
+ }
if not metric_fp.exists():
raise FileNotFoundError(f"QC metrics file not found: {metric_fp}")
metrics_df = pd.read_csv(metric_fp)
metrics_df.set_index("cluster_id", inplace=True)
-
+ metrics_df.replace([np.inf, -np.inf], np.nan, inplace=True)
+ metrics_df.columns = metrics_df.columns.str.lower()
+ metrics_df.rename(columns=rename_dict, inplace=True)
metrics_list = [
dict(metrics_df.loc[unit_key["unit"]], **unit_key)
for unit_key in (CuratedClustering.Unit & key).fetch("KEY")
diff --git a/element_array_ephys/ephys_report.py b/element_array_ephys/ephys_report.py
index 5b5aee2d..ce8f6cad 100644
--- a/element_array_ephys/ephys_report.py
+++ b/element_array_ephys/ephys_report.py
@@ -53,7 +53,6 @@ class ProbeLevelReport(dj.Computed):
"""
def make(self, key):
-
from .plotting.probe_level import plot_driftmap
save_dir = _make_save_dir()
@@ -63,7 +62,6 @@ def make(self, key):
shanks = set((probe.ProbeType.Electrode & units).fetch("shank"))
for shank_no in shanks:
-
table = units * ephys.ProbeInsertion * probe.ProbeType.Electrode & {
"shank": shank_no
}
@@ -120,7 +118,6 @@ class UnitLevelReport(dj.Computed):
"""
def make(self, key):
-
from .plotting.unit_level import (
plot_auto_correlogram,
plot_depth_waveforms,
@@ -159,6 +156,16 @@ def make(self, key):
@schema
class QualityMetricCutoffs(dj.Lookup):
+ """Cut-off values for unit quality metrics.
+
+ Attributes:
+ cutoffs_id (smallint): Unique ID for the cut-off values.
+ amplitude_cutoff_maximum (float): Optional. Amplitude cut-off.
+ presence_ratio_minimum (float): Optional. Presence ratio cut-off.
+ isi_violations_maximum (float): Optional. ISI violation ratio cut-off.
+ cutoffs_hash (uuid): uuid for the cut-off values.
+ """
+
definition = """
cutoffs_id : smallint
---
@@ -218,6 +225,13 @@ def insert_new_cutoffs(
@schema
class QualityMetricSet(dj.Manual):
+ """Set of quality metric values for clusters and its cut-offs.
+
+ Attributes:
+ ephys.QualityMetrics (foreign key): ephys.QualityMetrics primary key.
+ QualityMetricCutoffs (foreign key): QualityMetricCutoffs primary key.
+ """
+
definition = """
-> ephys.QualityMetrics
-> QualityMetricCutoffs
@@ -226,6 +240,13 @@ class QualityMetricSet(dj.Manual):
@schema
class QualityMetricReport(dj.Computed):
+ """Table for storing quality metric figures.
+
+ Attributes:
+ QualityMetricSet (foreign key): QualityMetricSet primary key.
+ plot_grid (longblob): Plotly figure object.
+ """
+
definition = """
-> QualityMetricSet
---
diff --git a/element_array_ephys/export/nwb/nwb.py b/element_array_ephys/export/nwb/nwb.py
index d498d468..a45eb754 100644
--- a/element_array_ephys/export/nwb/nwb.py
+++ b/element_array_ephys/export/nwb/nwb.py
@@ -219,7 +219,6 @@ def create_units_table(
(ephys.CuratedClustering.Unit & clustering_query.proj()).fetch(as_dict=True),
desc=f"creating units table for paramset {paramset_record['paramset_idx']}",
):
-
probe_id, shank_num = (
ephys.ProbeInsertion
* ephys.CuratedClustering.Unit
diff --git a/element_array_ephys/plotting/qc.py b/element_array_ephys/plotting/qc.py
index eb5d7709..16e88d3a 100644
--- a/element_array_ephys/plotting/qc.py
+++ b/element_array_ephys/plotting/qc.py
@@ -28,14 +28,10 @@ def __init__(
key (dict, optional): key from ephys.QualityMetric table. Defaults to None.
scale (float, optional): Scale at which to render figure. Defaults to 1.4.
fig_width (int, optional): Figure width in pixels. Defaults to 800.
- amplitude_cutoff_maximum (float, optional): Cutoff for unit amplitude in
- visualizations. Defaults to None.
- presence_ratio_minimum (float, optional): Cutoff for presence ratio in
- visualizations. Defaults to None.
- isi_violations_maximum (float, optional): Cutoff for isi violations in
- visualizations. Defaults to None.
- dark_mode (bool, optional): Set background to black, foreground white.
- Default False, black on white.
+ amplitude_cutoff_maximum (float, optional): Cutoff for unit amplitude in visualizations. Defaults to None.
+ presence_ratio_minimum (float, optional): Cutoff for presence ratio in visualizations. Defaults to None.
+ isi_violations_maximum (float, optional): Cutoff for isi violations in visualizations. Defaults to None.
+ dark_mode (bool, optional): Set background to black, foreground white. Default False, black on white.
"""
self._ephys = ephys
self._key = key
@@ -134,7 +130,7 @@ def _format_fig(
Figure to apply formatting. Defaults to empty.
scale (float, optional): Scale to render figure. Defaults to scale from
class init, 1.
- ratio (float, optional): Figure aspect ratio width/height . Defaults to 1.
+ ratio (float, optional): Figure aspect ratio width/height. Defaults to 1.
Returns:
go.Figure: Formatted figure
@@ -202,7 +198,7 @@ class initialization.
return fig.add_trace(
go.Scatter(
x=histogram_bins[:-1],
- y=gaussian_filter1d(histogram, 1), # TODO: remove smoothing
+ y=gaussian_filter1d(histogram, 1),
mode="lines",
line=dict(color="rgb(0, 160, 223)", width=2 * scale), # DataJoint Blue
hovertemplate="%{x:.2f}
%{y:.2f}",
@@ -215,8 +211,7 @@ def get_single_fig(self, fig_name: str, scale: float = None) -> go.Figure:
Args:
fig_name (str): Name of figure to be rendered
- scale (float, optional): Scale to render fig. Defaults to scale at class
- init, 1.
+ scale (float, optional): Scale to render fig. Defaults to scale at class init, 1.
Returns:
go.Figure: Histogram plot
@@ -253,8 +248,7 @@ def get_grid(self, n_columns: int = 4, scale: float = 1.0) -> go.Figure:
Args:
n_columns (int, optional): Number of column in grid. Defaults to 4.
- scale (float, optional): Scale to render fig. Defaults to scale at class
- init, 1.
+ scale (float, optional): Scale to render fig. Defaults to scale at class init, 1.
Returns:
go.Figure: grid of available plots
diff --git a/element_array_ephys/plotting/unit_level.py b/element_array_ephys/plotting/unit_level.py
index a19b0fbe..54130916 100644
--- a/element_array_ephys/plotting/unit_level.py
+++ b/element_array_ephys/plotting/unit_level.py
@@ -183,7 +183,6 @@ def plot_depth_waveforms(
# Plot figure
fig = go.Figure()
for electrode, wf, coord in zip(electrodes_to_plot, waveforms, coords):
-
wf_scaled = wf * y_scale_factor
wf_scaled -= wf_scaled.mean()
color = "red" if electrode == peak_electrode else "rgb(51, 76.5, 204)"
diff --git a/element_array_ephys/plotting/widget.py b/element_array_ephys/plotting/widget.py
index a26fc843..29338d59 100644
--- a/element_array_ephys/plotting/widget.py
+++ b/element_array_ephys/plotting/widget.py
@@ -11,7 +11,6 @@
def main(ephys: types.ModuleType) -> widgets:
-
# Build dropdown widgets
probe_dropdown_wg = widgets.Dropdown(
options=ephys.CuratedClustering & ephys_report.ProbeLevelReport,
@@ -66,7 +65,6 @@ def probe_dropdown_evt(change):
)
def plot_probe_widget(probe_key, shank):
-
fig_name = (
ephys_report.ProbeLevelReport & probe_key & f"shank={shank}"
).fetch1("drift_map_plot")
@@ -92,7 +90,6 @@ def plot_probe_widget(probe_key, shank):
display(go.FigureWidget(probe_fig))
def plot_unit_widget(unit):
-
waveform_fig, autocorrelogram_fig, depth_waveform_fig = (
ephys_report.UnitLevelReport & probe_dropdown_wg.value & f"unit={unit}"
).fetch1("waveform_plotly", "autocorrelogram_plotly", "depth_waveform_plotly")
diff --git a/element_array_ephys/probe.py b/element_array_ephys/probe.py
index 417fa3bc..497f1792 100644
--- a/element_array_ephys/probe.py
+++ b/element_array_ephys/probe.py
@@ -218,7 +218,6 @@ def build_electrode_layouts(
shank_spacing: float = None,
y_origin="bottom",
) -> list[dict]:
-
"""Builds electrode layouts.
Args:
diff --git a/element_array_ephys/readers/kilosort.py b/element_array_ephys/readers/kilosort.py
index abddee74..80ae5510 100644
--- a/element_array_ephys/readers/kilosort.py
+++ b/element_array_ephys/readers/kilosort.py
@@ -13,7 +13,6 @@
class Kilosort:
-
_kilosort_core_files = [
"params.py",
"amplitudes.npy",
diff --git a/element_array_ephys/readers/openephys.py b/element_array_ephys/readers/openephys.py
index d1f93231..db6097d8 100644
--- a/element_array_ephys/readers/openephys.py
+++ b/element_array_ephys/readers/openephys.py
@@ -135,7 +135,6 @@ def load_probe_data(self): # noqa: C901
probes[probe.probe_SN] = probe
for probe_index, probe_SN in enumerate(probes):
-
probe = probes[probe_SN]
for rec in self.experiment.recordings:
diff --git a/element_array_ephys/version.py b/element_array_ephys/version.py
index df499cc9..122aedf1 100644
--- a/element_array_ephys/version.py
+++ b/element_array_ephys/version.py
@@ -1,2 +1,2 @@
"""Package metadata."""
-__version__ = "0.2.4"
+__version__ = "0.2.11"
diff --git a/images/attached_array_ephys_element_acute.svg b/images/attached_array_ephys_element_acute.svg
index 1d0764cd..5b2bc265 100644
--- a/images/attached_array_ephys_element_acute.svg
+++ b/images/attached_array_ephys_element_acute.svg
@@ -1,3 +1,451 @@
-
-
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/images/attached_array_ephys_element_chronic.svg b/images/attached_array_ephys_element_chronic.svg
index ae8c81bc..808a2f17 100644
--- a/images/attached_array_ephys_element_chronic.svg
+++ b/images/attached_array_ephys_element_chronic.svg
@@ -1,404 +1,456 @@
-