Depthai Python CI/CD #761
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Depthai Python CI/CD | |
# Controls when the action will run. Triggers the workflow on push | |
on: | |
workflow_dispatch: | |
pull_request: | |
branches: | |
- v3_develop | |
push: | |
branches: | |
- v3_develop* | |
tags: | |
- 'v3*' | |
################################### | |
################################### | |
env: | |
CMAKE_WINDOWS_SDK_VERSION: '10.0.18362.0' | |
# A workflow run is made up of one or more jobs that can run sequentially or in parallel | |
jobs: | |
# Job which builds docstrings for the rest of the wheel builds | |
build-docstrings: | |
runs-on: ubuntu-latest | |
env: | |
VCPKG_BINARY_SOURCES: "clear;x-gha,readwrite" | |
steps: | |
- name: Cache .hunter folder | |
uses: actions/cache@v3 | |
with: | |
path: ~/.hunter | |
key: hunter-ubuntu-latest-v3-develop | |
- name: List .hunter cache directory | |
run: ls -a -l ~/.hunter/_Base/ || true | |
- name: Export GitHub Actions cache environment variables | |
uses: actions/github-script@v7 | |
with: | |
script: | | |
core.exportVariable('ACTIONS_CACHE_URL', process.env.ACTIONS_CACHE_URL || ''); | |
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || ''); | |
- uses: actions/checkout@v3 | |
with: | |
submodules: 'recursive' | |
- name: Set up Python | |
uses: actions/setup-python@v4 | |
with: | |
python-version: 3.8 | |
- name: Install dependencies | |
run: | | |
sudo apt update | |
python -m pip install --upgrade pip | |
sudo apt install libusb-1.0-0-dev | |
python -m pip install -r bindings/python/docs/requirements_mkdoc.txt | |
- name: Configure project | |
run: cmake -S . -B build -DDEPTHAI_BUILD_PYTHON=ON -DDEPTHAI_PYTHON_FORCE_DOCSTRINGS=ON -DDEPTHAI_BASALT_SUPPORT=ON -DDEPTHAI_PCL_SUPPORT=ON -DDEPTHAI_RTABMAP_SUPPORT=ON -DDEPTHAI_PYTHON_DOCSTRINGS_OUTPUT="$PWD/bindings/python/docstrings/depthai_python_docstring.hpp" | |
- name: Build target 'pybind11_mkdoc' | |
run: cmake --build build --target pybind11_mkdoc --parallel 4 | |
- name: Upload docstring artifacts | |
uses: actions/upload-artifact@v3 | |
with: | |
name: docstrings | |
path: bindings/python/docstrings/ | |
retention-days: 1 | |
# Build and test bindings | |
pytest: | |
needs: build-docstrings | |
env: | |
VCPKG_BINARY_SOURCES: "clear;x-gha,readwrite" | |
strategy: | |
matrix: | |
# os: [ubuntu-latest, windows-latest, macos-latest] | |
os: [ubuntu-latest] # TODO(Morato) - re-enable windows & macos | |
runs-on: ${{ matrix.os }} | |
steps: | |
- name: Print home directory | |
run: echo Home directory inside container $HOME | |
- name: Cache .hunter folder | |
if: matrix.os != 'windows-latest' | |
uses: actions/cache@v3 | |
with: | |
path: ~/.hunter/ | |
key: hunter-pytest-${{ matrix.os }}-v3-develop | |
- name: Cache .hunter folder | |
if: matrix.os == 'windows-latest' | |
uses: actions/cache@v3 | |
with: | |
path: C:/.hunter/ | |
key: hunter-pytest-${{ matrix.os }}-v3-develop | |
- name: Export GitHub Actions cache environment variables | |
uses: actions/github-script@v7 | |
with: | |
script: | | |
core.exportVariable('ACTIONS_CACHE_URL', process.env.ACTIONS_CACHE_URL || ''); | |
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || ''); | |
- uses: actions/checkout@v3 | |
with: | |
submodules: 'recursive' | |
- uses: actions/download-artifact@v3 | |
with: | |
name: 'docstrings' | |
path: bindings/python/docstrings | |
- name: Specify docstring to use while building the wheel | |
run: echo "DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/bindings/python/docstrings/depthai_python_docstring.hpp" >> $GITHUB_ENV | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v4 | |
with: | |
python-version: "3.10" | |
- name: Install dependencies (Ubuntu) | |
if: matrix.os == 'ubuntu-latest' | |
run: | | |
python -m pip install --upgrade pip | |
sudo apt install libusb-1.0-0-dev libopencv-dev | |
- name: Install dependencies (MacOS) | |
if: matrix.os == 'macos-latest' | |
run: | | |
python -m pip install --upgrade pip | |
brew install libusb | |
- name: Setup cmake | |
if: matrix.os == 'macos-latest' | |
uses: jwlawson/[email protected] | |
- name: Install pytest | |
run: | | |
python -m pip install pytest numpy opencv-python | |
- name: Compile | |
run: | | |
cmake -S . -B build -DDEPTHAI_BUILD_PYTHON=ON -D CMAKE_BUILD_TYPE=Release -D DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/bindings/python/docstrings/depthai_python_docstring.hpp -D DEPTHAI_PYTHON_ENABLE_TESTS=ON | |
cmake --build build --parallel 4 | |
- name: Test | |
run: | | |
cmake --build build --target pytest --config Release | |
# # This job builds wheels for armhf arch (RPi) | |
# build-linux-armhf: | |
# needs: build-docstrings | |
# strategy: | |
# matrix: | |
# rpi-os: [rpi-buster, rpi-bullseye, rpi-bookworm] | |
# runs-on: ${{ matrix.rpi-os }} | |
# steps: | |
# - name: Print home directory | |
# run: echo Home directory inside container $HOME | |
# - uses: actions/checkout@v3 | |
# with: | |
# submodules: 'recursive' | |
# - uses: actions/download-artifact@v3 | |
# with: | |
# name: 'docstrings' | |
# path: bindings/python/docstrings | |
# - name: Specify docstring to use while building the wheel | |
# run: echo "DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/bindings/python/docstrings/depthai_python_docstring.hpp" >> $GITHUB_ENV | |
# - name: Append build hash if not a tagged commit | |
# if: startsWith(github.ref, 'refs/tags/v') != true | |
# run: echo "BUILD_COMMIT_HASH=${{github.sha}}" >> $GITHUB_ENV | |
# - name: Building wheel | |
# run: cd bindings/python && python3 -m pip wheel . -w ./wheelhouse/ --verbose | |
# - name: Auditing wheels and adding armv6l tag (Running on RPi, binaries compiled as armv6l) | |
# run: | | |
# cd bindings/python | |
# python3 -m pip install -U wheel auditwheel | |
# for whl in wheelhouse/*.whl; do auditwheel repair "$whl" --plat linux_armv7l -w wheelhouse/preaudited/; done | |
# for whl in wheelhouse/preaudited/*.whl; do python3 -m wheel tags --platform-tag +linux_armv6l "$whl"; done | |
# mkdir -p wheelhouse/audited/ | |
# for whl in wheelhouse/preaudited/*linux_armv6l*.whl; do cp "$whl" wheelhouse/audited/$(basename $whl); done | |
# - name: Archive wheel artifacts | |
# uses: actions/upload-artifact@v3 | |
# with: | |
# name: audited-wheels | |
# path: bindings/python/wheelhouse/audited/ | |
# - name: Deploy wheels to artifactory (if not a release) | |
# if: startsWith(github.ref, 'refs/tags/v') != true | |
# run: cd bindings/python && bash ./ci/upload-artifactory.sh | |
# env: | |
# ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }} | |
# ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }} | |
# ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }} | |
# This job builds wheels for Windows x86_64 arch | |
build-windows-x86_64: | |
needs: build-docstrings | |
runs-on: windows-latest | |
strategy: | |
matrix: | |
python-version: [3.7, 3.8, 3.9, '3.10', '3.11', '3.12'] | |
# python-architecture: [x64, x86] | |
python-architecture: [x64] # TODO(Morato) - re-enable x86 - it complains that OpenCV even though it's 32 bit is not compatible | |
fail-fast: false | |
env: | |
DEPTHAI_BUILD_BASALT: OFF | |
VCPKG_BINARY_SOURCES: "clear;x-gha,readwrite" | |
steps: | |
- name: Cache .hunter folder | |
uses: actions/cache@v3 | |
with: | |
path: C:/.hunter | |
key: hunter-msvc-v3-develop | |
- uses: actions/checkout@v3 | |
with: | |
submodules: 'recursive' | |
- name: Export GitHub Actions cache environment variables | |
uses: actions/github-script@v7 | |
with: | |
script: | | |
core.exportVariable('ACTIONS_CACHE_URL', process.env.ACTIONS_CACHE_URL || ''); | |
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || ''); | |
- uses: actions/download-artifact@v3 | |
with: | |
name: 'docstrings' | |
path: bindings/python/docstrings | |
- name: Specify docstring to use while building the wheel | |
run: echo "DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/bindings/python/docstrings/depthai_python_docstring.hpp" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append | |
- name: Select Windows SDK | |
run: echo "CMAKE_ARGS=-DCMAKE_SYSTEM_VERSION=${{ env.CMAKE_WINDOWS_SDK_VERSION }}" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v4 | |
with: | |
python-version: ${{ matrix.python-version }} | |
architecture: ${{ matrix.python-architecture }} | |
- name: Append build hash if not a tagged commit | |
if: startsWith(github.ref, 'refs/tags/v') != true | |
run: echo "BUILD_COMMIT_HASH=${{github.sha}}" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append | |
- name: Install dependencies | |
run: | | |
python -m pip install --upgrade pip | |
- name: Building wheels | |
run: cd bindings/python && python -m pip wheel . -w ./wheelhouse/audited/ --verbose | |
- name: Archive wheel artifacts | |
uses: actions/upload-artifact@v3 | |
with: | |
name: audited-wheels | |
path: bindings/python/wheelhouse/audited/ | |
- name: Deploy wheels to artifactory (if not a release) | |
if: startsWith(github.ref, 'refs/tags/v') != true | |
run: cd bindings/python && bash ./ci/upload-artifactory.sh | |
env: | |
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }} | |
ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }} | |
ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }} | |
# This job builds wheels for macOS arch | |
build-macos: | |
needs: build-docstrings | |
env: | |
VCPKG_BINARY_SOURCES: "clear;x-gha,readwrite" | |
strategy: | |
matrix: | |
python-version: [3.8, 3.9, '3.10', '3.11', '3.12'] | |
os: [macos-13, macos-14] # macos-13 is x64, macos-14 is arm64 | |
fail-fast: false | |
runs-on: ${{ matrix.os }} | |
# env: | |
# TODO(Morato) - re-enable basalt, pcl, rtabmap after everything is ported | |
# DEPTHAI_BUILD_BASALT: ON | |
# DEPTHAI_BUILD_PCL: ON | |
# DEPTHAI_BUILD_RTABMAP: ON | |
steps: | |
- name: Cache .hunter folder | |
uses: actions/cache@v3 | |
with: | |
path: ~/.hunter | |
key: hunter-macos-latest-v3-develop | |
- name: List .hunter cache directory | |
run: | | |
ls -a -l ~/.hunter/_Base/ || true | |
echo "PATH=$PATH" | |
- name: Export GitHub Actions cache environment variables | |
uses: actions/github-script@v7 | |
with: | |
script: | | |
core.exportVariable('ACTIONS_CACHE_URL', process.env.ACTIONS_CACHE_URL || ''); | |
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || ''); | |
- uses: actions/checkout@v3 | |
with: | |
submodules: 'recursive' | |
- uses: actions/download-artifact@v3 | |
with: | |
name: 'docstrings' | |
path: bindings/python/docstrings | |
- name: Specify docstring to use while building the wheel | |
run: echo "DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/bindings/python/docstrings/depthai_python_docstring.hpp" >> $GITHUB_ENV | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v4 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Append build hash if not a tagged commit | |
if: startsWith(github.ref, 'refs/tags/v') != true | |
run: echo "BUILD_COMMIT_HASH=${{github.sha}}" >> $GITHUB_ENV | |
- name: Install dependencies | |
run: | | |
python -m pip install --upgrade pip | |
brew install libusb | |
python -m pip install delocate | |
- name: Building wheels | |
run: cd bindings/python && python -m pip wheel . -w ./wheelhouse/ --verbose | |
- name: Auditing wheels | |
run: cd bindings/python && ci/repair-whl-macos.sh `pwd`/wheelhouse/* `pwd`/wheelhouse/audited | |
- name: Archive wheel artifacts | |
uses: actions/upload-artifact@v3 | |
with: | |
name: audited-wheels | |
path: bindings/python/wheelhouse/audited/ | |
- name: Deploy wheels to artifactory (if not a release) | |
if: startsWith(github.ref, 'refs/tags/v') != true | |
run: cd bindings/python && bash ./ci/upload-artifactory.sh | |
env: | |
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }} | |
ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }} | |
ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }} | |
# # This job builds wheels for macOS arm64 arch | |
# build-macos-arm64: | |
# needs: build-docstrings | |
# runs-on: [self-hosted, macOS, ARM64] | |
# steps: | |
# # Cached locally on runner | |
# # - name: Cache .hunter folder | |
# # uses: actions/cache@v3 | |
# # with: | |
# # path: ~/.hunter | |
# # key: hunter-macos-latest | |
# - name: List .hunter cache directory | |
# run: | | |
# ls -a -l ~/.hunter/_Base/ || true | |
# echo "PATH=$PATH" | |
# - uses: actions/checkout@v3 | |
# with: | |
# submodules: 'recursive' | |
# - name: Install dependencies | |
# run: | | |
# brew install opencv | |
# - uses: actions/download-artifact@v3 | |
# with: | |
# name: 'docstrings' | |
# path: bindings/python/docstrings | |
# - name: Specify docstring to use while building the wheel | |
# run: echo "DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/bindings/python/docstrings/depthai_python_docstring.hpp" >> $GITHUB_ENV | |
# - name: Append build hash if not a tagged commit | |
# if: startsWith(github.ref, 'refs/tags/v') != true | |
# run: echo "BUILD_COMMIT_HASH=${{github.sha}}" >> $GITHUB_ENV | |
# # - name: Build and install depthai-core | |
# # run: | | |
# # echo "MACOSX_DEPLOYMENT_TARGET=11.0" >> $GITHUB_ENV | |
# # cmake -S depthai-core/ -B build_core -D CMAKE_BUILD_TYPE=Release -D CMAKE_TOOLCHAIN_FILE=$PWD/cmake/toolchain/pic.cmake | |
# # cmake --build build_core --target install --parallel 4 | |
# # echo "DEPTHAI_INSTALLATION_DIR=$PWD/build_core/install/" >> $GITHUB_ENV | |
# - name: Build wheels | |
# run: cd bindings/python && for PYBIN in {9..12}; do "python3.${PYBIN}" -m pip wheel . -w wheelhouse/ --verbose; done | |
# - name: Auditing wheels | |
# run: | | |
# cd bindings/python | |
# delocate-wheel -v -w wheelhouse/audited wheelhouse/*.whl | |
# - name: Archive wheel artifacts | |
# uses: actions/upload-artifact@v3 | |
# with: | |
# name: audited-wheels | |
# path: bindings/python/wheelhouse/audited/ | |
# - name: Deploy wheels to artifactory (if not a release) | |
# if: startsWith(github.ref, 'refs/tags/v') != true | |
# run: cd bindings/python && bash ./ci/upload-artifactory.sh | |
# env: | |
# ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }} | |
# ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }} | |
# ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }} | |
# This job builds wheels for x86_64 arch | |
build-linux-x86_64: | |
needs: build-docstrings | |
runs-on: ubuntu-latest | |
container: | |
image: quay.io/pypa/manylinux_2_28_x86_64 # TODO(mmorato) temporary location, push to luxonis namespace | |
env: | |
PLAT: manylinux_2_28_x86_64 | |
strategy: | |
matrix: | |
python-set: ["7..9", "10..12"] | |
env: | |
# workaround required for cache@v3, https://github.com/actions/cache/issues/1428 | |
# to be removed when upgrading the manylinux image | |
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true | |
DEPTHAI_BUILD_BASALT: ON | |
DEPTHAI_BUILD_PCL: ON | |
DEPTHAI_BUILD_RTABMAP: ON | |
VCPKG_BINARY_SOURCES: "clear;x-gha,readwrite" | |
steps: | |
- name: Cache .hunter folder | |
uses: actions/cache@v3 | |
with: | |
path: ~/.hunter | |
key: hunter-x86_64-v3-develop | |
- name: Export GitHub Actions cache environment variables | |
uses: actions/github-script@v7 | |
with: | |
script: | | |
core.exportVariable('ACTIONS_CACHE_URL', process.env.ACTIONS_CACHE_URL || ''); | |
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || ''); | |
- uses: actions/checkout@v3 | |
with: | |
submodules: 'recursive' | |
- name: Installing libusb1-devel dependency | |
run: yum install -y libusb1-devel perl-core curl zip unzip tar ninja-build | |
- name: Installing cmake dependency | |
run: | | |
cd bindings/python | |
/opt/python/cp38-cp38/bin/python3.8 -m pip install cmake | |
ln -s /opt/python/cp38-cp38/bin/cmake /bin/ | |
- name: Create folder structure | |
run: cd bindings/python && mkdir -p wheelhouse/audited/ | |
- uses: actions/download-artifact@v3 | |
with: | |
name: 'docstrings' | |
path: bindings/python/docstrings | |
- name: Specify docstring to use while building the wheel | |
run: echo "DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/bindings/python/docstrings/depthai_python_docstring.hpp" >> $GITHUB_ENV | |
- name: Build and install depthai-core | |
run: | | |
cmake -S . -B build_core -D CMAKE_BUILD_TYPE=Release -D CMAKE_TOOLCHAIN_FILE=$PWD/cmake/toolchain/pic.cmake | |
cmake --build build_core --target install --parallel 4 | |
echo "DEPTHAI_INSTALLATION_DIR=$PWD/build_core/install/" >> $GITHUB_ENV | |
- name: Append build hash if not a tagged commit | |
if: startsWith(github.ref, 'refs/tags/v') != true | |
run: echo "BUILD_COMMIT_HASH=${{github.sha}}" >> $GITHUB_ENV | |
- name: Building source distribution | |
run: | | |
cd bindings/python | |
/opt/python/cp38-cp38/bin/python3.8 setup.py sdist --formats=gztar | |
mv dist/* wheelhouse/audited/ | |
- name: Build wheels | |
run: cd bindings/python && for PYBIN in /opt/python/cp3{${{ matrix.python-set }}}*/bin; do "${PYBIN}/pip" wheel . -w ./wheelhouse/ --verbose; done | |
- name: Audit wheels | |
run: cd bindings/python && for whl in wheelhouse/*.whl; do auditwheel repair "$whl" --plat $PLAT -w wheelhouse/audited/; done | |
- name: Archive wheel artifacts | |
uses: actions/upload-artifact@v3 | |
with: | |
name: audited-wheels | |
path: bindings/python/wheelhouse/audited/ | |
- name: Deploy wheels to artifactory (if not a release) | |
if: startsWith(github.ref, 'refs/tags/v') != true | |
run: cd bindings/python && bash ./ci/upload-artifactory.sh | |
env: | |
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }} | |
ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }} | |
ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }} | |
# This job builds wheels for ARM64 arch | |
build-linux-arm64: | |
needs: build-docstrings | |
runs-on: [self-hosted, linux, ARM64] | |
timeout-minutes: 1440 # Set timeout to 24 hours | |
container: | |
image: quay.io/pypa/manylinux_2_28_aarch64 | |
env: | |
PLAT: manylinux_2_28_aarch66 | |
# Mount local hunter cache directory, instead of transfering to Github and back | |
volumes: | |
- /.hunter:/github/home/.hunter | |
strategy: | |
matrix: | |
python-set: ["7..9", "10..12"] | |
env: | |
# workaround required for cache@v3, https://github.com/actions/cache/issues/1428 | |
# to be removed when upgrading the manylinux image | |
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true | |
DEPTHAI_VCPKG_CFLAGS: "-std=c99" # Needed so vpckg can bootstrap itself with the old GCC on the manylinux image | |
VCPKG_FORCE_SYSTEM_BINARIES: "1" # Needed so vpckg can bootstrap itself | |
VCPKG_BINARY_SOURCES: "clear;x-gha,readwrite" | |
# DEPTHAI_BUILD_BASALT: ON | |
# DEPTHAI_BUILD_PCL: ON | |
# DEPTHAI_BUILD_RTABMAP: ON | |
steps: | |
- name: Export GitHub Actions cache environment variables | |
uses: actions/github-script@v7 | |
with: | |
script: | | |
core.exportVariable('ACTIONS_CACHE_URL', process.env.ACTIONS_CACHE_URL || ''); | |
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || ''); | |
- uses: actions/checkout@v3 | |
with: | |
submodules: 'recursive' | |
- name: Installing libusb1-devel dependency | |
run: yum install -y libusb1-devel perl-core curl zip unzip tar ninja-build | |
- name: Installing cmake dependency | |
run: | | |
cd bindings/python | |
/opt/python/cp38-cp38/bin/python3.8 -m pip install cmake | |
ln -s /opt/python/cp38-cp38/bin/cmake /bin/ | |
- name: Create folder structure | |
run: cd bindings/python && mkdir -p wheelhouse/audited/ | |
- uses: actions/download-artifact@v3 | |
with: | |
name: 'docstrings' | |
path: bindings/python/docstrings | |
- name: Specify docstring to use while building the wheel | |
run: echo "DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/bindings/python/docstrings/depthai_python_docstring.hpp" >> $GITHUB_ENV | |
- name: Build and install depthai-core | |
run: | | |
cmake -S . -B build_core -D CMAKE_BUILD_TYPE=Release -D CMAKE_TOOLCHAIN_FILE=$PWD/cmake/toolchain/pic.cmake | |
cmake --build build_core --target install --parallel 4 | |
echo "DEPTHAI_INSTALLATION_DIR=$PWD/build_core/install/" >> $GITHUB_ENV | |
- name: Append build hash if not a tagged commit | |
if: startsWith(github.ref, 'refs/tags/v') != true | |
run: echo "BUILD_COMMIT_HASH=${{github.sha}}" >> $GITHUB_ENV | |
- name: Building wheels | |
run: | | |
cd bindings/python && for PYBIN in /opt/python/cp3{${{ matrix.python-set }}}*/bin; do "${PYBIN}/pip" wheel . -w ./wheelhouse/ --verbose; done | |
- name: Auditing wheels | |
run: cd bindings/python && for whl in wheelhouse/*.whl; do auditwheel repair "$whl" --plat $PLAT -w wheelhouse/audited/; done | |
- name: Archive wheel artifacts | |
uses: actions/upload-artifact@v3 | |
with: | |
name: audited-wheels | |
path: bindings/python/wheelhouse/audited/ | |
- name: Deploy wheels to artifactory (if not a release) | |
if: startsWith(github.ref, 'refs/tags/v') != true | |
run: cd bindings/python && bash ./ci/upload-artifactory.sh | |
env: | |
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }} | |
ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }} | |
ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }} | |
release: | |
if: startsWith(github.ref, 'refs/tags/v') | |
# needs: [pytest, build-linux-armhf, build-windows-x86_64, build-macos-x86_64, build-macos-arm64, build-linux-x86_64, build-linux-arm64] | |
# needs: [pytest, build-windows-x86_64, build-macos, build-linux-x86_64, build-linux-arm64] | |
needs: [build-windows-x86_64, build-macos, build-linux-x86_64, build-linux-arm64] | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
submodules: 'recursive' | |
- uses: actions/setup-python@v4 | |
with: | |
python-version: '3.8' | |
- name: Check if version matches | |
run: cd bindings/python && python3.8 -c 'import find_version as v; exit(0) if "${{ github.ref_name }}" == f"v{v.get_package_version()}" else exit(1)' | |
# Deploy to PyPi and Artifactory. Only when a commit is tagged | |
deploy: | |
if: startsWith(github.ref, 'refs/tags/v') | |
needs: [release] | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
submodules: 'recursive' | |
- uses: actions/download-artifact@v3 | |
with: | |
name: audited-wheels | |
path: bindings/python/wheelhouse/audited/ | |
- name: List files | |
run: ls -lah | |
- name: Run deploy to PyPi | |
run: cd bindings/python && bash ./ci/upload-pypi.sh | |
if: ${{!contains(github.ref, '-') && !contains(github.ref, 'alpha') && !contains(github.ref, 'beta') && !contains(github.ref, 'rc')}} | |
env: | |
PYPI_SERVER: ${{ secrets.PYPI_SERVER }} | |
PYPI_USER: ${{ secrets.PYPI_USER }} | |
PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} | |
- name: Run deploy to Artifactory | |
run: cd bindings/python && bash ./ci/upload-artifactory-release.sh | |
env: | |
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }} | |
ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }} | |
ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }} | |
# notify_hil_workflow_linux_x86_64: | |
# needs: [build-linux-x86_64] | |
# runs-on: ubuntu-latest | |
# steps: | |
# - name: Dispatch an action and get the run ID | |
# uses: codex-/return-dispatch@v1 | |
# id: return_dispatch | |
# with: | |
# token: ${{ secrets.HIL_CORE_DISPATCH_TOKEN }} # Note this is NOT GITHUB_TOKEN but a PAT | |
# ref: main # or refs/heads/target_branch | |
# repo: depthai-core-hil-tests | |
# owner: luxonis | |
# workflow: regression_test.yml | |
# workflow_inputs: '{"commit": "${{ github.ref }}", "parent_url": "https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' | |
# workflow_timeout_seconds: 300 # was 120 Default: 300 | |
# - name: Release | |
# run: echo "https://github.com/luxonis/depthai-core-hil-tests/actions/runs/${{steps.return_dispatch.outputs.run_id}}" >> $GITHUB_STEP_SUMMARY |