diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/ci_pr.yml similarity index 91% rename from .github/workflows/build_and_test.yml rename to .github/workflows/ci_pr.yml index cc509ca951..db0e990aef 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/ci_pr.yml @@ -1,8 +1,6 @@ -name: Docker Image CI +name: Build and Test CI for Pull Requests on: - push: - branches: [ 'master', 'develop' ] pull_request: branches: [ 'master', 'develop' ] @@ -23,6 +21,7 @@ jobs: --build-arg UBUNTU_VERSION=16.04 --build-arg ROS_VERSION=kinetic --build-arg PYTHON='' + --build-arg REMOTE=ghcr.io/nasa -t astrobee/astrobee:latest-ubuntu16.04 - name: Test code @@ -45,6 +44,7 @@ jobs: --build-arg UBUNTU_VERSION=18.04 --build-arg ROS_VERSION=melodic --build-arg PYTHON=3 + --build-arg REMOTE=ghcr.io/nasa -t astrobee/astrobee:latest-ubuntu18.04 - name: Test code @@ -67,6 +67,7 @@ jobs: --build-arg UBUNTU_VERSION=20.04 --build-arg ROS_VERSION=noetic --build-arg PYTHON=3 + --build-arg REMOTE=ghcr.io/nasa -t astrobee/astrobee:latest-ubuntu20.04 - name: Test code diff --git a/.github/workflows/ci_push.yml b/.github/workflows/ci_push.yml new file mode 100644 index 0000000000..0589c215c5 --- /dev/null +++ b/.github/workflows/ci_push.yml @@ -0,0 +1,100 @@ +name: Build, test and push packages CI + +on: + push: + branches: [ 'develop' ] + +jobs: + + build-xenial: + + runs-on: ubuntu-18.04 + + steps: + - uses: actions/checkout@v2 + + - name: Checkout submodule + run: git submodule update --init --depth 1 description/media + + - name: Build code for Ubuntu 16 + run: docker build . -f ./scripts/docker/astrobee.Dockerfile + --build-arg UBUNTU_VERSION=16.04 + --build-arg ROS_VERSION=kinetic + --build-arg PYTHON='' + --build-arg REMOTE=ghcr.io/nasa + -t ghcr.io/${{ github.repository_owner }}/astrobee:latest-ubuntu16.04 + + - name: Test code + run: docker build . -f ./scripts/docker/test_astrobee.Dockerfile + --build-arg UBUNTU_VERSION=16.04 + --build-arg REMOTE=ghcr.io/${{ github.repository_owner }} + -t astrobee:test-ubuntu16.04 + + - name: Log in to registry + run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.repository_owner }} --password-stdin + + - name: Push Docker image + run: | + if [ "${{ github.repository_owner }}" = "nasa" ]; then docker push ghcr.io/${{ github.repository_owner }}/astrobee:latest-ubuntu16.04; fi; + + build-bionic: + + runs-on: ubuntu-18.04 + + steps: + - uses: actions/checkout@v2 + + - name: Checkout submodule + run: git submodule update --init --depth 1 description/media + + - name: Build code for Ubuntu 18 + run: docker build . -f ./scripts/docker/astrobee.Dockerfile + --build-arg UBUNTU_VERSION=18.04 + --build-arg ROS_VERSION=melodic + --build-arg PYTHON=3 + --build-arg REMOTE=ghcr.io/nasa + -t ghcr.io/${{ github.repository_owner }}/astrobee:latest-ubuntu18.04 + + - name: Test code + run: docker build . -f ./scripts/docker/test_astrobee.Dockerfile + --build-arg UBUNTU_VERSION=18.04 + --build-arg REMOTE=ghcr.io/${{ github.repository_owner }} + -t astrobee:test-ubuntu18.04 + + - name: Log in to registry + run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.repository_owner }} --password-stdin + + - name: Push Docker image + run: | + if [ "${{ github.repository_owner }}" = "nasa" ]; then docker push ghcr.io/${{ github.repository_owner }}/astrobee:latest-ubuntu18.04; fi; + + build-focal: + + runs-on: ubuntu-20.04 + + steps: + - uses: actions/checkout@v2 + + - name: Checkout submodule + run: git submodule update --init --depth 1 description/media + + - name: Build code for Ubuntu 20 + run: docker build . -f ./scripts/docker/astrobee.Dockerfile + --build-arg UBUNTU_VERSION=20.04 + --build-arg ROS_VERSION=noetic + --build-arg PYTHON=3 + --build-arg REMOTE=ghcr.io/nasa + -t ghcr.io/${{ github.repository_owner }}/astrobee:latest-ubuntu20.04 + + - name: Test code + run: docker build . -f ./scripts/docker/test_astrobee.Dockerfile + --build-arg UBUNTU_VERSION=20.04 + --build-arg REMOTE=ghcr.io/${{ github.repository_owner }} + -t astrobee:test-ubuntu20.0 + + - name: Log in to registry + run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.repository_owner }} --password-stdin + + - name: Push Docker image + run: | + if [ "${{ github.repository_owner }}" = "nasa" ]; then docker push ghcr.io/${{ github.repository_owner }}/astrobee:latest-ubuntu20.04; fi; diff --git a/.github/workflows/ci_release.yml b/.github/workflows/ci_release.yml new file mode 100644 index 0000000000..449de2001a --- /dev/null +++ b/.github/workflows/ci_release.yml @@ -0,0 +1,127 @@ +name: Build, test and push packages CI + +on: + push: + branches: [ 'release' ] + +jobs: + + build-xenial: + + runs-on: ubuntu-18.04 + + steps: + - uses: actions/checkout@v2 + + - name: Checkout submodule + run: git submodule update --init --depth 1 description/media + + - name: Build base for Ubuntu 16 + run: docker build . -f ./scripts/docker/astrobee_base.Dockerfile + --build-arg UBUNTU_VERSION=16.04 + --build-arg ROS_VERSION=kinetic + --build-arg PYTHON='' + -t astrobee/astrobee:base-latest-ubuntu16.04 + + - name: Build code for Ubuntu 16 + run: docker build . -f ./scripts/docker/astrobee.Dockerfile + --build-arg UBUNTU_VERSION=16.04 + --build-arg ROS_VERSION=kinetic + --build-arg PYTHON='' + -t astrobee/astrobee:latest-ubuntu16.04 + + - name: Test code + run: docker build . -f ./scripts/docker/test_astrobee.Dockerfile + --build-arg UBUNTU_VERSION=16.04 + -t astrobee:test-ubuntu16.04 + + - name: Log in to registry + run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.repository_owner }} --password-stdin + + - name: Push Docker image + run: | + export VERSION=`grep -w -m 1 "Release" RELEASE.md | awk '{print $3}'` + docker tag astrobee/astrobee:latest-base-ubuntu16.04 ghcr.io/${{ github.repository_owner }}/astrobee:latest-base-ubuntu16.04 + docker push ghcr.io/${{ github.repository_owner }}/astrobee:latest-base-ubuntu16.04 + docker tag astrobee/astrobee:latest-ubuntu16.04 ghcr.io/${{ github.repository_owner }}/astrobee:v${VERSION}-ubuntu16.04 + docker push ghcr.io/${{ github.repository_owner }}/astrobee:v${VERSION}-ubuntu16.04 + + build-bionic: + + runs-on: ubuntu-18.04 + + steps: + - uses: actions/checkout@v2 + + - name: Checkout submodule + run: git submodule update --init --depth 1 description/media + + - name: Build base for Ubuntu 18 + run: docker build . -f ./scripts/docker/astrobee_base.Dockerfile + --build-arg UBUNTU_VERSION=18.04 + --build-arg ROS_VERSION=melodic + --build-arg PYTHON='' + -t astrobee/astrobee:base-latest-ubuntu18.04 + + - name: Build code for Ubuntu 18 + run: docker build . -f ./scripts/docker/astrobee.Dockerfile + --build-arg UBUNTU_VERSION=18.04 + --build-arg ROS_VERSION=melodic + --build-arg PYTHON='' + -t astrobee/astrobee:latest-ubuntu18.04 + + - name: Test code + run: docker build . -f ./scripts/docker/test_astrobee.Dockerfile + --build-arg UBUNTU_VERSION=18.04 + -t astrobee:test-ubuntu18.04 + + - name: Log in to registry + run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.repository_owner }} --password-stdin + + - name: Push Docker image + run: | + export VERSION=`grep -w -m 1 "Release" RELEASE.md | awk '{print $3}'` + docker tag astrobee/astrobee:latest-base-ubuntu18.04 ghcr.io/${{ github.repository_owner }}/astrobee:latest-base-ubuntu18.04 + docker push ghcr.io/${{ github.repository_owner }}/astrobee:latest-base-ubuntu18.04 + docker tag astrobee/astrobee:latest-ubuntu18.04 ghcr.io/${{ github.repository_owner }}/astrobee:v${VERSION}-ubuntu18.04 + docker push ghcr.io/${{ github.repository_owner }}/astrobee:v${VERSION}-ubuntu18.04 + + build-focal: + + runs-on: ubuntu-20.04 + + steps: + - uses: actions/checkout@v2 + + - name: Checkout submodule + run: git submodule update --init --depth 1 description/media + + - name: Build base for Ubuntu 20 + run: docker build . -f ./scripts/docker/astrobee_base.Dockerfile + --build-arg UBUNTU_VERSION=20.04 + --build-arg ROS_VERSION=noetic + --build-arg PYTHON=3 + -t astrobee/astrobee:base-latest-ubuntu20.04 + + - name: Build code for Ubuntu 20 + run: docker build . -f ./scripts/docker/astrobee.Dockerfile + --build-arg UBUNTU_VERSION=20.04 + --build-arg ROS_VERSION=noetic + --build-arg PYTHON=3 + -t astrobee/astrobee:latest-ubuntu20.04 + + - name: Test code + run: docker build . -f ./scripts/docker/test_astrobee.Dockerfile + --build-arg UBUNTU_VERSION=20.04 + -t astrobee:test-ubuntu20.04 + + - name: Log in to registry + run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.repository_owner }} --password-stdin + + - name: Push Docker image + run: | + export VERSION=`grep -w -m 1 "Release" RELEASE.md | awk '{print $3}'` + docker tag astrobee/astrobee:latest-base-ubuntu20.04 ghcr.io/${{ github.repository_owner }}/astrobee:latest-base-ubuntu20.04 + docker push ghcr.io/${{ github.repository_owner }}/astrobee:latest-base-ubuntu20.04 + docker tag astrobee/astrobee:latest-ubuntu20.04 ghcr.io/${{ github.repository_owner }}/astrobee:v${VERSION}-ubuntu20.04 + docker push ghcr.io/${{ github.repository_owner }}/astrobee:v${VERSION}-ubuntu20.04 diff --git a/.gitignore b/.gitignore index c874f89b61..82f212e91b 100644 --- a/.gitignore +++ b/.gitignore @@ -70,7 +70,6 @@ arm_build_release arm_build_debug armhf build_native -data doc/html doc/latex diff --git a/CMakeLists.txt b/CMakeLists.txt deleted file mode 100644 index 7a4ce49830..0000000000 --- a/CMakeLists.txt +++ /dev/null @@ -1,411 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -cmake_minimum_required(VERSION 3.0) -project(Astrobee) - -set(ASTROBEE_VERSION 0.15.2) - -# Define our options -option(USE_CCACHE - "Use ccache to speed up compiling, at the cost of optimization" - ON) -option(USE_CTC - "Use cross compile toolchain for making ARM binaries" - OFF) -option(USE_ROS - "Build the ROS-dependent functionality." - ON) -option(USE_DDS - "Build the DDS-dependent functionality." - ON) -option(USE_STATIC_LIBS - "Build using static libraries. Will use lots of drive space." - OFF) -option(TEST_COVERAGE - "Build the code with code coverage options. Not compatible with USE_CTC." - OFF) -option(USE_DRIVERS - "Build the tools in the drivers directory." - ON) -option(IS_BAMBOO_BUILD - "The code is being built under the bamboo CI system" - OFF) -option(ENABLE_GPROF - "Enable compling with support for profiling wih gprof (the GNU Profiler)." - OFF) -option(ENABLE_GOOGLE_PROF - "Enable support for profiling wih pprof (the Google Profiler)." - OFF) -option(ENABLE_QP - "Enable support for the QP planner." - ON) -option(ENABLE_PICOFLEXX - "Enable support for building the PicoFlexx driver" - ON) -option(ENABLE_GAZEBO - "Enable support for building the Gazebo simulator" - ON) -option(ENABLE_VIVE - "Enable support for building the Vive drivers" - ON) -option(ENABLE_VIVE_SOLVER - "Enable support for building the Vive offline solver" - ON) -option(ENABLE_INTEGRATION_TESTING - "Build the integration tests if tests are active." - ON) -option(BUILD_LOC_RVIZ_PLUGINS - "Build the localization rviz plugins." - ON) - - -if ( "${CMAKE_VERSION}" VERSION_GREATER 3.0.0 ) - cmake_policy(SET CMP0045 OLD) - cmake_policy(SET CMP0046 OLD) -endif() - -set(ROS_FOUND FALSE) -if(DEFINED ENV{ROS_DISTRO}) - set(ROS_DISTRO $ENV{ROS_DISTRO}) - set(ROS_FOUND TRUE) -else() - message("ROS distro variable not set. Trying to figure it out...") - set(AVAILABLE_ROS_VERSIONS "melodic;kinetic;noetic") - set(ROS_FOUND FALSE) - foreach(version ${AVAILABLE_ROS_VERSIONS}) - if(NOT ROS_FOUND) - find_path(ROS_H ros.h PATHS /opt/ros/${version}/include/ros) - if(ROS_H) - set(ROS_DISTRO ${version}) - message("Found ros version ${ROS_DISTRO}") - set(ROS_FOUND TRUE) - endif() - endif() - endforeach() -endif() - -# Verify the user has the pre-commit hook -execute_process( - COMMAND cp scripts/git/pre-commit .git/hooks - WORKING_DIRECTORY ${PROJECT_SOURCE_DIR} - ) - -# Let cmake know of our additional scripts -list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake") -list(APPEND CMAKE_PREFIX_PATH "${CMAKE_SOURCE_DIR}/cmake") - -if (USE_CTC) - list(APPEND CMAKE_PREFIX_PATH "${ARM_CHROOT_DIR}/opt/ros/${ROS_DISTRO}") -else (USE_CTC) - list(APPEND CMAKE_PREFIX_PATH "/opt/ros/${ROS_DISTRO}") -endif (USE_CTC) - -# Enable RPATHs -set(CMAKE_SKIP_BUILD_RPATH FALSE) -set(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE) - -# The following works for then the binaries are not installed, but are -# ran from the build directory. -list(APPEND CMAKE_INSTALL_RPATH "$ORIGIN/../lib") -list(APPEND CMAKE_INSTALL_RPATH "/opt/ros/${ROS_DISTRO}/lib") - -# The following RPATHs work for when the binaries are installed -if (USE_CTC) - list(APPEND CMAKE_INSTALL_RPATH "/opt/rti/ndds/lib/$ENV{NDDSARCH}") - set(CMAKE_INSTALL_RPATH_USE_LINK_PATH FALSE) -else (USE_CTC) - set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE) - list(APPEND CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/lib") -endif (USE_CTC) - -# If the user selected static libs .. set BUILD_SHARED_LIBS accordingly -if(USE_STATIC_LIBS) - set(BUILD_SHARED_LIBS OFF) -else(USE_STATIC_LIBS) - set(BUILD_SHARED_LIBS ON) -endif(USE_STATIC_LIBS) - -# Check that the compiler supports C++14 -include(CheckCXXCompilerFlag) -include(CheckCXXSourceCompiles) -CHECK_CXX_COMPILER_FLAG("-std=c++14" COMPILER_SUPPORTS_CXX14) -CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X) -if(COMPILER_SUPPORTS_CXX14) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++14") -elseif(COMPILER_SUPPORTS_CXX0X) - SET(LAMBDA_SRC "int main(int, char*[]) {int r = [] (int x) {return x;}(0); return r;}") - CHECK_CXX_SOURCE_COMPILES("${LAMBDA_SRC}" COMPILER_SUPPORTS_LAMBDAS) - if (NOT COMPILER_SUPPORTS_LAMBDAS) - message(SEND_ERROR "C++0x compiler doesn't support lambda functions.") - else() - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") - endif() -else() - message(SEND_ERROR "Couldn't find compiler which supports C++14 or C++0X.") -endif() - -if (TEST_COVERAGE) - find_program(GCOV_PATH gcov) - if (NOT GCOV_PATH) - message(SEND_ERROR "Couldn't find gcov, but test coverage was opted for.") - endif() -endif() - -if (ENABLE_GPROF) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg") -endif() - -# if we're compiling for native use ccache to speed things up -if (NOT USE_CTC) - if (USE_CCACHE) - find_program(CCACHE_EXECUTABLE ccache) - mark_as_advanced(CCACHE_EXECUTABLE) - if(CCACHE_EXECUTABLE) - foreach(LANG C CXX) - message(STATUS "Enabling ccache for ${LANG}") - set(CMAKE_${LANG}_COMPILER_LAUNCHER ${CCACHE_EXECUTABLE} CACHE STRING "") - endforeach() - endif() - endif (USE_CCACHE) -endif (NOT USE_CTC) - -# Enable OpenMP if we have it. (Ceres will use it .. maybe more of our -# products will use it later). -find_package(OpenMP) -if (OPENMP_FOUND) - set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS}") - set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}") -endif(OPENMP_FOUND) - -find_package(Glog REQUIRED) -find_package(Gflags REQUIRED) -ADD_DEFINITIONS(-DFREEFLYER_GFLAGS_NAMESPACE=${GFLAGS_NAMESPACE}) -enable_testing() - -if (USE_CTC) - find_program(LSB_RELEASE_EXEC lsb_release) - execute_process(COMMAND "${LSB_RELEASE_EXEC}" --short --codename OUTPUT_VARIABLE LSB_RELEASE_VERSION_SHORT OUTPUT_STRIP_TRAILING_WHITESPACE) - if("${LSB_RELEASE_VERSION_SHORT}" STREQUAL "bionic") - set(Protobuf_PROTOC_EXECUTABLE /usr/local/bin/protoc) - endif() -endif (USE_CTC) - -find_package(Protobuf REQUIRED) -if (NOT PROTOBUF_PROTOC_EXECUTABLE) - message(FATAL_ERROR "Could not find system's protoc executable") -endif (NOT PROTOBUF_PROTOC_EXECUTABLE) - -find_package(Ceres REQUIRED) - -find_package(Boost 1.54.0 QUIET REQUIRED COMPONENTS filesystem system iostreams thread program_options timer) -find_package(Eigen3 REQUIRED) -find_package(ZLIB) -find_package(Luajit20 REQUIRED) - -# Find OpenCV and fix a 3.3.1 bug -find_package(OpenCV 3 REQUIRED) -if (${OpenCV_VERSION} MATCHES "3.3.1") - foreach(__cvcomponent ${OpenCV_LIB_COMPONENTS}) - set (__original_cvcomponent ${__cvcomponent}) - if(NOT __cvcomponent MATCHES "^opencv_") - set(__cvcomponent opencv_${__cvcomponent}) - endif() - if (TARGET ${__cvcomponent}) - set_target_properties(${__cvcomponent} PROPERTIES - MAP_IMPORTED_CONFIG_DEBUG "" - MAP_IMPORTED_CONFIG_RELEASE "" - MAP_IMPORTED_CONFIG_RELWITHDEBINFO "" - MAP_IMPORTED_CONFIG_MINSIZEREL "" - ) - endif() - endforeach(__cvcomponent) -endif() -if (USE_CTC) - foreach(__cvcomponent ${OpenCV_LIB_COMPONENTS}) - set(OpenCV_LIBS ${OpenCV_LIBS} "${OpenCV_INSTALL_PATH}/lib/arm-linux-gnueabihf/lib${__cvcomponent}3.so") - endforeach(__cvcomponent) -endif (USE_CTC) -set(OpenCV_LIBRARIES ${OpenCV_LIBS}) - -find_package(dbow2 REQUIRED) -find_package(Alvar REQUIRED) -find_package(GTSAM REQUIRED) -find_package(OpenMVG QUIET REQUIRED) -find_package(JsonCpp REQUIRED) -find_package(FFMPEG QUIET) # Optional -if (ENABLE_PICOFLEXX) - find_package(royale REQUIRED) -endif (ENABLE_PICOFLEXX) -# Find pkg-config supported libraries -if (ENABLE_VIVE) - find_package(USB REQUIRED) - find_package(JSONC REQUIRED) - find_package(ARGTABLE2 REQUIRED) - find_package(ZLIB REQUIRED) -endif (ENABLE_VIVE) - - -if (NOT USE_CTC) # pkg-config needs extra work to be used for cross compile - pkg_check_modules(YAMLCPP yaml-cpp) - if (YAMLCPP_FOUND) - set (YAMLCPP_INCLUDE_DIRS ${YAMLCPP_INCLUDEDIR}) - endif (YAMLCPP_FOUND) -endif (NOT USE_CTC) - -if(ENABLE_GOOGLE_PROF) - SET(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--no-as-needed -lprofiler -Wl,--as-needed") - SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DDEBUG") -endif() - -if(USE_CTC) - set(BUILD_LOC_RVIZ_PLUGINS OFF) -endif() - -set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -Wno-unused-local-typedefs -Wno-packed-bitfield-compat -std=c++14") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wno-unused-local-typedefs -Wno-packed-bitfield-compat -std=c++14") -set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--no-undefined") -set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--no-undefined") - -# Set Architecture Specific Instructions -if (USE_CTC) - # Cross Compiling, armv7-a == Cortex A5, A7, A8 (Beagle Bone), A9 - # (Odroid), A12, A15 (Snapdragon-ish), A17 - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=armv7-a -mtune=cortex-a9 -mfpu=neon -I${ARM_CHROOT_DIR}/usr/include/arm-linux-gnueabihf") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -march=armv7-a -mtune=cortex-a9 -mfpu=neon -I${ARM_CHROOT_DIR}/usr/include/arm-linux-gnueabihf") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-deprecated-declarations -Wno-unused-variable") -else (USE_CTC) - if (NOT IS_BAMBOO_BUILD) - if ((NOT USE_CCACHE) OR (NOT CCACHE_EXECUTABLE)) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=native -mtune=native") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -march=native -mtune=native") - endif () - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mno-avx") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mno-avx") - endif (NOT IS_BAMBOO_BUILD) -endif (USE_CTC) - -# Bring in catkin -if (USE_CTC) - # Catkin brings in GTest as a dependency. We don't want it because - # we have our own ... on top of that it doesn't build correctly for - # ARM. Our own unit tests however will continue to - # work. (non-catkin) - set(CATKIN_SKIP_TESTING ON) -endif (USE_CTC) -if (USE_ROS) - set(ENV{ROS_LANG_DISABLE} "genlisp:gennodejs:geneus:$ENV{ROS_LANG_DISABLE}") - - if (USE_CTC) - find_package(catkin2 COMPONENTS roscpp message_generation std_msgs geometry_msgs sensor_msgs cv_bridge image_transport tf tf2 tf2_ros rosbag nodelet) - else () - find_package(catkin2 COMPONENTS roscpp message_generation std_msgs geometry_msgs sensor_msgs cv_bridge image_transport tf tf2 tf2_ros rosbag nodelet rviz) - endif (USE_CTC) - - find_package(Qt5 ${rviz_QT_VERSION} REQUIRED Core Widgets) - - if (IS_BAMBOO_BUILD) - include(BambooFix) - endif() - - find_package(PCL REQUIRED COMPONENTS common) - find_package(Octomap MODULE REQUIRED) - #pkg_check_modules(OROCOS_KDL REQUIRED orocos_kdl) - if (USE_CTC) - set(OROCOS_KDL_LIBRARIES ${ARM_CHROOT_DIR}/opt/ros/${ROS_DISTRO}/lib/liborocos-kdl.so) - endif (USE_CTC) - set(catkin_FOUND ${catkin2_FOUND}) -endif(USE_ROS) - -if (USE_DDS) - list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake/soracore") - - if (USE_CTC) - set(SORACORE_ROOT_DIR ${ARM_CHROOT_DIR}/usr) - else (USE_CTC) - set(SORACORE_ROOT_DIR /usr) - endif (USE_CTC) - - set(MIRO_ROOT_DIR ${SORACORE_ROOT_DIR}) - - find_package(QtXml REQUIRED) - find_package(Miro REQUIRED) - find_package(RtiDds REQUIRED) - find_package(Soracore REQUIRED) -endif(USE_DDS) - -# Include Our CMake Functions -include(CreateLibrary) -include(CreateMsgTargets) -include(CreateRosTestTargets) -include(CreateTestTargets) -include(CreateToolTargets) -include(ExternalProjects) -include(FetchContent) -include(InstallLaunchFiles) - -add_subdirectory(external) - -if (TEST_COVERAGE) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g --coverage") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g --coverage") - set(CMAKE_LIBS_COVERAGE "gcov") -endif() - -set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib) -set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib) -set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin) - -# install the version.txt file when install target is run -add_custom_command(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/version.txt - COMMAND ${CMAKE_COMMAND} - -D OUTPUT_FILE=${CMAKE_CURRENT_BINARY_DIR}/version.txt - -D ASTROBEE_VERSION=${ASTROBEE_VERSION} - -D GIT_MODULE_FILE="${CMAKE_SOURCE_DIR}/cmake/GetGitRevisionDescription.cmake" - -P ${CMAKE_SOURCE_DIR}/cmake/SetVersion.cmake) -add_custom_target(version DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/version.txt) -install(CODE "execute_process(COMMAND \"${CMAKE_COMMAND}\" --build \"${CMAKE_CURRENT_BINARY_DIR}\" --target version)") -install(FILES ${CMAKE_CURRENT_BINARY_DIR}/version.txt DESTINATION ${CMAKE_INSTALL_PREFIX} ) - -# install the env_wrapper.sh for running on astrobee -install(PROGRAMS scripts/deploy/env_wrapper.sh DESTINATION ${CMAKE_INSTALL_PREFIX} ) - -if (USE_ROS) - add_subdirectory(tools) - add_subdirectory(astrobee) - add_subdirectory(communications) - add_subdirectory(gnc) - add_subdirectory(mobility) - add_subdirectory(behaviors) - add_subdirectory(management) - add_subdirectory(description) - if (NOT USE_CTC AND ENABLE_GAZEBO) - add_subdirectory(simulation) - endif (NOT USE_CTC AND ENABLE_GAZEBO) -endif (USE_ROS) - -if (USE_DRIVERS) - add_subdirectory(hardware) -endif (USE_DRIVERS) - -add_subdirectory(shared) -add_subdirectory(localization) - -if (USE_DDS) - add_subdirectory(wdock) -endif (USE_DDS) - diff --git a/INSTALL.md b/INSTALL.md index 220b5234f9..1d7607cab1 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -1,17 +1,20 @@ # Install -## Quick Start using the Astrobee docker image +## Quick start using the Astrobee Docker image -If you just want to try out the astrobee simulator, you can use one of the docker images in the [Docker Hub](https://hub.docker.com/r/astrobee/astrobee). +*The following has been tested on native (non-VM) Ubuntu systems using X11 (the default). Please see [these ROS pages](http://wiki.ros.org/docker/Tutorials#Tooling_with_Docker) for more resources.* -Given that Docker (see note) is installed: +If you just want to try out the astrobee simulator, you can use one of the Docker images in the [Github Container Registry](https://github.com/nasa/astrobee/pkgs/container/astrobee). - git clone https://github.com/nasa/astrobee.git - cd astrobee - ./scripts/docker/run.sh +Make sure you have Docker installed in your Ubuntu system following the [installation instructions](https://docs.docker.com/engine/install/ubuntu/) and [post-installation steps for Linux](https://docs.docker.com/engine/install/linux-postinstall/). -*Note: Be aware that this only works on a native ubuntu install.* -*Note: Make sure you have docker installed in your ubuntu system following the [installation instructions](https://docs.docker.com/engine/install/ubuntu/) and [post-installation steps for Linux](https://docs.docker.com/engine/install/linux-postinstall/).* +For some systems (with discrete graphics cards), you may need to install [additional software](http://wiki.ros.org/docker/Tutorials/Hardware%20Acceleration). + +``` bash +git clone https://github.com/nasa/astrobee.git +cd astrobee +./scripts/docker/run.sh --remote +``` ## Building the code natively @@ -32,7 +35,7 @@ If you are a NASA user and want to install the cross-compiler for robot testing ## Using Docker -Docker build is also available for Ubuntu 16 and Ubuntu 18, +Docker builds are available for Ubuntu 16.04, 18.04 and 20.04. Instructions on how to build the images natively and run the containers are in: diff --git a/README.md b/README.md index 5f55dc4d68..747c431e9b 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ signaling, and sound. The flight software is hosted on each Astrobee's three int uses the open-source [Robot Operating System (ROS)](https://www.ros.org/) framework as message-passing middleware. It provides a high-level [Astrobee Command API](https://nasa.github.io/astrobee/html/command_dictionary.html) for controlling the robot and has multiple operating modes. It can execute a plan (command sequence), individual operator commands (teleoperation), or commands from guest science code running onboard Astrobee. -The Astrobee Robot Software simulator, built using [ROS](https://www.ros.org/) and [Gazebo](http://gazebosim.org/), enables the flight software to be evaluated without the need for robot hardware. The supporting tools include a tool that processes ISS imagery to [build maps for Astrobee localization](https://github.com/nasa/astrobee/tree/master/localization/sparse_mapping), along with many others. +The Astrobee Robot Software simulator, built using [ROS](https://www.ros.org/) and [Gazebo](http://gazebosim.org/), enables the flight software to be evaluated without the need for robot hardware. The supporting tools include a tool that processes ISS imagery to [build maps for Astrobee localization](https://nasa.github.io/astrobee/html/sparsemapping.html), along with many others. Released separately, the Astrobee ground data system (GDS) includes Astrobee control station software that communicates with Astrobee flight software via the Data Distribution Service (DDS) network protocol over the ISS Ku-IP space-to-ground link. The control station is written primarily in Java using the Eclipse RCP framework. Source code is in the [`astrobee_gds`](https://github.com/nasa/astrobee_gds) repository, or you can download the [binary release](https://software.nasa.gov/software/ARC-17994-1B). @@ -31,7 +31,7 @@ The Astrobee Robot Software remains a work in progress. Please consult the ### Contributing The Astrobee Robot Software is open source, and we welcome contributions -from the public. Please submit pull requests to the [`develop`](https://github.com/nasa/astrobee/tree/develop) branch. +from the public. Please submit pull requests to the [`develop`](https://github.com/nasa/astrobee/tree/develop) branch. The code must follow the [Astrobee code style](https://nasa.github.io/astrobee/html/astrobee-code-style.html). For us to merge any pull requests, we must request that contributors sign and submit a [Contributor License Agreement](https://www.nasa.gov/sites/default/files/atoms/files/astrobee_individual_contributor_license_agreement.pdf) due to NASA legal requirements. Thank you for your understanding. diff --git a/RELEASE.md b/RELEASE.md index ceec6c1aaa..6df6b794e7 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,5 +1,13 @@ # Releases +## Release 0.16.0 + + * project compiles with catkin + * mapper performance improvement + * imu_augmentor performance improvement + * calibration improvements + * multiple other fixes + ## Release 0.15.2 * added python linters black and isort in the CI pipeline diff --git a/astrobee.doxyfile b/astrobee.doxyfile index 4d830446a7..0876902677 100644 --- a/astrobee.doxyfile +++ b/astrobee.doxyfile @@ -39,7 +39,7 @@ PROJECT_NAME = "NASA Astrobee Robot Software" # control system is used. -PROJECT_NUMBER = 0.15.2 +PROJECT_NUMBER = 0.16.0 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/astrobee/CMakeLists.txt b/astrobee/CMakeLists.txt index 2d072feb1e..bb6271bb59 100644 --- a/astrobee/CMakeLists.txt +++ b/astrobee/CMakeLists.txt @@ -15,20 +15,57 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(astrobee) +set(ASTROBEE_VERSION 0.16.0) + +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +# Verify the user has the pre-commit hook +execute_process( + COMMAND cp ../scripts/git/pre-commit ../.git/hooks + WORKING_DIRECTORY ${PROJECT_SOURCE_DIR} + ) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp +) + catkin_package( CATKIN_DEPENDS roscpp ) +############# +## Install ## +############# + install(DIRECTORY config/ DESTINATION config) # resources will be installed separately # install(DIRECTORY resources/ DESTINATION resources) -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) install(PROGRAMS scripts/apk_print_version.sh scripts/check_env.sh scripts/print_version.sh scripts/cpu_print_version.sh DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}) + +# install the version.txt file when install target is run +add_custom_command(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/version.txt + COMMAND ${CMAKE_COMMAND} + -D OUTPUT_FILE=${CMAKE_CURRENT_BINARY_DIR}/version.txt + -D ASTROBEE_VERSION=${ASTROBEE_VERSION} + -D GIT_MODULE_FILE="${CMAKE_SOURCE_DIR}/../cmake/GetGitRevisionDescription.cmake" + -P ${CMAKE_SOURCE_DIR}/../cmake/SetVersion.cmake) +add_custom_target(version DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/version.txt) +install(CODE "execute_process(COMMAND \"${CMAKE_COMMAND}\" --build \"${CMAKE_CURRENT_BINARY_DIR}\" --target version)") +install(FILES ${CMAKE_CURRENT_BINARY_DIR}/version.txt DESTINATION ${CMAKE_INSTALL_PREFIX} ) + +install(PROGRAMS scripts/env_wrapper.sh DESTINATION ${CMAKE_INSTALL_PREFIX} ) \ No newline at end of file diff --git a/astrobee/config/mobility/mapper.config b/astrobee/config/mobility/mapper.config index 02e194d72a..cbe6cd85ba 100644 --- a/astrobee/config/mobility/mapper.config +++ b/astrobee/config/mobility/mapper.config @@ -183,12 +183,12 @@ parameters = { unit = "hertz", description = "Frequency at which the collision checker runs." },{ - id = "tf_update_rate", + id = "octomap_update_rate", reconfigurable = true, type = "double", - default = 10, - min = 1, - max = 100, + default = 1, + min = 0.1, + max = 5, unit = "hertz", description = "Frequency at which the tf listeners update tf data." },{ diff --git a/astrobee/config/robots/bumble.config b/astrobee/config/robots/bumble.config index 9e31033f94..c1f73031e8 100644 --- a/astrobee/config/robots/bumble.config +++ b/astrobee/config/robots/bumble.config @@ -25,14 +25,14 @@ robot_i2c_bus = "/dev/i2c-1" robot_imu_drdy_pin = 4 robot_geometry = { - hazcam_to_navcam_transform = transform(vec3(0.070656217, -0.0053120391, -0.0086391367), quat4(-0.0033938631, 0.011939978, 0.99990009, -0.0067619677)), - scicam_to_hazcam_transform = transform(vec3(0.043949838, -0.026925687, -0.14459291), quat4(-0.009399956, 0.015955961, 0.99982772, -0.0012581665)), + hazcam_to_navcam_transform = transform(vec3(0.07142937, 0.00058221217, -0.001373669), quat4(-0.0030431141, 0.0092646368, 0.99993195, -0.0064039206)), + scicam_to_hazcam_transform = transform(vec3(-0.0052887445, 0.010298013, -0.043606689), quat4(0.0018545621, 0.012796392, 0.99991616, -0.00069204825)), navcam_to_hazcam_timestamp_offset = -0.02, scicam_to_hazcam_timestamp_offset = 0.5, hazcam_depth_to_image_transform = { - 0.91122714, -0.00020224138, -0.00066071236, 0.00091477566, - 0.00020680677, 0.91120557, 0.0063030044, -0.0072143461, - 0.00065929762, -0.0063031525, 0.91120536, -0.0035329091, + 0.91602851, -0.00031586647, -0.0028485861, 0.0029767338, + 0.00032189197, 0.91603089, 0.0019373744, -0.0020741879, + 0.0028479115, -0.0019383659, 0.91602652, -0.0042296964, 0, 0, 0, 1}, -- Engineering positions with idealized orientations @@ -87,12 +87,11 @@ robot_camera_calibrations = { gain=50, exposure=150 }, --- Placeholder for sci_cam, not accurate! sci_cam = { - distortion_coeff = {0.10733913, -0.10935864, 0.0010663099, 0.0010407278}, + distortion_coeff = {0.029948958, -0.056854916, -0.00077885482, -0.0039909885}, intrinsic_matrix = { - 1138.4943, 0.0, 680.36447, - 0.0, 1138.4943, 534.00133, + 1048.499, 0.0, 705.22914, + 0.0, 1048.499, 511.03079, 0.0, 0.0, 1.0 }, gain=50, diff --git a/astrobee/config/robots/queen.config b/astrobee/config/robots/queen.config index 3ffe24c21b..fe5953c911 100644 --- a/astrobee/config/robots/queen.config +++ b/astrobee/config/robots/queen.config @@ -48,8 +48,8 @@ robot_camera_calibrations = { 0.0, 602.67924, 509.73835, 0.0, 0.0, 1.0 }, - gain=100, - exposure=150 + gain=50, + exposure=175 }, dock_cam = { distortion_coeff = 1.01007, diff --git a/astrobee/config/tools/camera_target_based_intrinsics_calibrator.config b/astrobee/config/tools/camera_target_based_intrinsics_calibrator.config new file mode 100644 index 0000000000..0bed02b753 --- /dev/null +++ b/astrobee/config/tools/camera_target_based_intrinsics_calibrator.config @@ -0,0 +1,70 @@ +-- Copyright (c) 2017, United States Government, as represented by the +-- Administrator of the National Aeronautics and Space Administration. +-- +-- All rights reserved. +-- +-- The Astrobee platform is licensed under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with the +-- License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +-- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +-- License for the specific language governing permissions and limitations +-- under the License. + +require "context" + +camera = "nav_cam" + +calibrate_focal_lengths = true +calibrate_principal_points = true +calibrate_distortion = true +calibrate_target_poses = false + +-- fov, rad, or radtan +distortion_type = "fov" + +-- Calibration optimizer params +calibrator_max_num_iterations = 100 +calibrator_function_tolerance = 1e-8 +calibrator_parameter_tolerance = 1e-8 +calibrator_verbose_optimization = true +calibrator_huber_loss = 1.345 +-- dense_qr, dense_schur, sparse_normal_cholesky, sparse_schur, iterative_schur +-- Favor dense_qr or dense_schur for smaller/medium sized problems +-- Favor iterative schur for large problems +calibrator_linear_solver = "dense_qr" +calibrator_use_explicit_schur_complement = false +scale_loss_radially = false +radial_scale_power = 1 +-- ReprojectionPoseEstimate optimizer params +reprojection_optimize_estimate = true +reprojection_max_num_iterations = 100 +reprojection_function_tolerance = 1e-8 +reprojection_parameter_tolerance = 1e-8 +reprojection_verbose_optimization = true +reprojection_huber_loss = 1.345 +reprojection_linear_solver = "dense_qr" +reprojection_use_explicit_schur_complement = false +reprojection_max_inlier_threshold = 3.0 +-- RansacPnP params +ransac_max_inlier_threshold = 7.0 +ransac_num_iterations = 100 +ransac_min_num_inliers = 4 +-- p3p, epnp, ap3p, or it +ransac_pnp_method = "p3p" +-- Data Loading Params +max_num_match_sets = 10000 +min_num_target_inliers = 4 +only_use_inliers = false + +-- Other +image_width = 1280 +image_height = 960 +max_visualization_error_norm = 50 +save_individual_initial_reprojection_images = false +save_final_reprojection_image = true +individual_max_visualization_error_norm = 50 diff --git a/astrobee/config/worlds/granite.config b/astrobee/config/worlds/granite.config index c5f43bf03a..83e9ad118c 100755 --- a/astrobee/config/worlds/granite.config +++ b/astrobee/config/worlds/granite.config @@ -110,7 +110,7 @@ world_flight_modes = { speed = 0; -- Tolerances - tolerance_pos_endpoint = 0.05; -- Endpoint position (5 cm) + tolerance_pos_endpoint = 0.1; -- Endpoint position (10 cm) tolerance_pos = 0.1; -- Position (10 cm) tolerance_vel = 0; -- Velocity (disabled) tolerance_att = 0.1745; -- Attitude (10 degrees) @@ -152,7 +152,7 @@ world_flight_modes = { speed = 2; -- Tolerances - tolerance_pos_endpoint = 0.05; -- Endpoint position (5 cm) + tolerance_pos_endpoint = 0.1; -- Endpoint position (10 cm) tolerance_pos = 0.2; -- Position (20 cm) tolerance_vel = 0; -- Velocity (disabled) tolerance_att = 0.3491; -- Attitude (20 degrees) @@ -194,7 +194,7 @@ world_flight_modes = { speed = 3; -- Tolerances - tolerance_pos_endpoint = 0.1; -- Endpoint position (5 cm) + tolerance_pos_endpoint = 0.1; -- Endpoint position (10 cm) tolerance_pos = 0.2; -- Position (20 cm) tolerance_vel = 0; -- Velocity (disabled) tolerance_att = 0.3490; -- Attitude (20 degrees) @@ -236,7 +236,7 @@ world_flight_modes = { speed = 1; -- Tolerances - tolerance_pos_endpoint = 0.05; -- Endpoint position (5 cm) + tolerance_pos_endpoint = 0.1; -- Endpoint position (10 cm) tolerance_pos = 0.1; -- Position (10 cm) tolerance_vel = 0; -- Velocity (disabled) tolerance_att = 0.1745; -- Attitude (10 degrees) @@ -277,7 +277,7 @@ world_flight_modes = { speed = 3; -- Tolerances - tolerance_pos_endpoint = 0.05; -- Endpoint position (5 cm) + tolerance_pos_endpoint = 0.1; -- Endpoint position (10 cm) tolerance_pos = 0.1; -- Position (10 cm) tolerance_vel = 0; -- Velocity (disabled) tolerance_att = 0.1745; -- Attitude (10 degrees) diff --git a/astrobee/config/worlds/iss.config b/astrobee/config/worlds/iss.config index 38a9dbf6d9..7f472e08d4 100755 --- a/astrobee/config/worlds/iss.config +++ b/astrobee/config/worlds/iss.config @@ -120,7 +120,7 @@ world_flight_modes = { speed = 0; -- Tolerances - tolerance_pos_endpoint = 0.05; -- Endpoint position (5 cm) + tolerance_pos_endpoint = 0.1; -- Endpoint position (10 cm) tolerance_pos = 0.1; -- Position (10 cm) tolerance_vel = 0; -- Velocity (disabled) tolerance_att = 0.1745; -- Attitude (10 degrees) @@ -161,7 +161,7 @@ world_flight_modes = { speed = 2; -- Tolerances - tolerance_pos_endpoint = 0.05; -- Endpoint position (5 cm) + tolerance_pos_endpoint = 0.1; -- Endpoint position (10 cm) tolerance_pos = 0.25; -- Position (25 cm) tolerance_vel = 0; -- Velocity (disabled) tolerance_att = 0.3491; -- Attitude (20 degrees) @@ -202,7 +202,7 @@ world_flight_modes = { speed = 3; -- Tolerances - tolerance_pos_endpoint = 0.1; -- Endpoint position (5 cm) + tolerance_pos_endpoint = 0.1; -- Endpoint position (10 cm) tolerance_pos = 0.2; -- Position (20 cm) tolerance_vel = 0; -- Velocity (disabled) tolerance_att = 0.3490; -- Attitude (20 degrees) @@ -243,7 +243,7 @@ world_flight_modes = { speed = 1; -- Tolerances - tolerance_pos_endpoint = 0.05; -- Endpoint position (5 cm) + tolerance_pos_endpoint = 0.1; -- Endpoint position (10 cm) tolerance_pos = 0.1; -- Position (10 cm) tolerance_vel = 0; -- Velocity (disabled) tolerance_att = 0.1745; -- Attitude (10 degrees) @@ -284,7 +284,7 @@ world_flight_modes = { speed = 3; -- Tolerances - tolerance_pos_endpoint = 0.05; -- Endpoint position (5 cm) + tolerance_pos_endpoint = 0.1; -- Endpoint position (10 cm) tolerance_pos = 0.1; -- Position (10 cm) tolerance_vel = 0; -- Velocity (disabled) tolerance_att = 0.1745; -- Attitude (10 degrees) diff --git a/astrobee/package.xml b/astrobee/package.xml index d4f7e90b5c..5549996fc5 100644 --- a/astrobee/package.xml +++ b/astrobee/package.xml @@ -14,9 +14,7 @@ Astrobee Flight Software catkin - ff_msgs roscpp - ff_msgs roscpp diff --git a/scripts/deploy/env_wrapper.sh b/astrobee/scripts/env_wrapper.sh similarity index 100% rename from scripts/deploy/env_wrapper.sh rename to astrobee/scripts/env_wrapper.sh diff --git a/behaviors/CMakeLists.txt b/behaviors/CMakeLists.txt deleted file mode 100644 index 41b13472bc..0000000000 --- a/behaviors/CMakeLists.txt +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -add_subdirectory(arm) -add_subdirectory(dock) -add_subdirectory(states) -add_subdirectory(light_flow) -add_subdirectory(perch) - - diff --git a/behaviors/arm/CMakeLists.txt b/behaviors/arm/CMakeLists.txt index ea201b11fc..179a5bb305 100644 --- a/behaviors/arm/CMakeLists.txt +++ b/behaviors/arm/CMakeLists.txt @@ -15,8 +15,22 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(arm) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + std_msgs + nodelet + ff_msgs + ff_hw_msgs + ff_util +) + catkin_package( LIBRARIES arm @@ -27,33 +41,59 @@ catkin_package( actionlib sensor_msgs ff_msgs + ff_hw_msgs ff_util ) -create_library( - TARGET - arm - LIBS - ${catkin_LIBRARIES} - ff_nodelet - config_server - INC - ${catkin_INCLUDE_DIRS} - DEPS - ff_msgs +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -create_tool_targets( - DIR - tools - LIBS - ${catkin_LIBRARIES} - ff_common - ff_nodelet - INC - ${catkin_INCLUDE_DIRS} - DEPS - ff_msgs +# Declare C++ libraries +add_library(arm + src/arm_nodelet.cc ) +add_dependencies(arm ${catkin_EXPORTED_TARGETS}) +target_link_libraries(arm ${catkin_LIBRARIES}) + +## Declare a C++ executable: arm_tool +add_executable(arm_tool tools/arm_tool.cc) +add_dependencies(arm_tool ${catkin_EXPORTED_TARGETS}) +target_link_libraries(arm_tool + arm gflags ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Install C++ executables +install(TARGETS arm_tool DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/arm_tool share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/behaviors/arm/package.xml b/behaviors/arm/package.xml index cdb1f13dc2..bbd2b0aade 100644 --- a/behaviors/arm/package.xml +++ b/behaviors/arm/package.xml @@ -20,6 +20,7 @@ pluginlib sensor_msgs ff_msgs + ff_hw_msgs ff_util roscpp nodelet @@ -27,6 +28,7 @@ actionlib sensor_msgs ff_msgs + ff_hw_msgs ff_util diff --git a/behaviors/dock/CMakeLists.txt b/behaviors/dock/CMakeLists.txt index 41afe25811..dbb2aa8c96 100644 --- a/behaviors/dock/CMakeLists.txt +++ b/behaviors/dock/CMakeLists.txt @@ -15,8 +15,24 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(dock) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + geometry_msgs + tf2 + tf2_ros + tf2_geometry_msgs + ff_hw_msgs + ff_msgs + ff_util +) + catkin_package( LIBRARIES dock @@ -31,34 +47,55 @@ catkin_package( ff_util ) -create_library( - TARGET - dock - LIBS - ${catkin_LIBRARIES} - ff_nodelet - config_reader - msg_conversions - config_server - config_client - INC - ${catkin_INCLUDE_DIRS} - DEPS - ff_msgs - ff_hw_msgs +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -create_tool_targets( - DIR - tools - LIBS - ${catkin_LIBRARIES} - ff_common - ff_nodelet - INC - ${catkin_INCLUDE_DIRS} - DEPS - ff_msgs +# Declare C++ libraries +add_library(dock + src/dock_nodelet.cc ) +add_dependencies(dock ${catkin_EXPORTED_TARGETS}) +target_link_libraries(dock ${catkin_LIBRARIES}) + +## Declare a C++ executable: dock_tool +add_executable(dock_tool tools/dock_tool.cc) +add_dependencies(dock_tool ${catkin_EXPORTED_TARGETS}) +target_link_libraries(dock_tool + dock gflags ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Install C++ executables +install(TARGETS dock_tool DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/dock_tool share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/behaviors/light_flow/CMakeLists.txt b/behaviors/light_flow/CMakeLists.txt index 7f4a6b69f5..d5f12052ac 100644 --- a/behaviors/light_flow/CMakeLists.txt +++ b/behaviors/light_flow/CMakeLists.txt @@ -15,8 +15,21 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(light_flow) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + ff_hw_msgs + ff_msgs + nodelet + ff_util +) + catkin_package( LIBRARIES light_flow @@ -29,54 +42,67 @@ catkin_package( pluginlib ) -create_library( - DIR src/light_flow - TARGET light_flow - DEPS - ff_msgs - ff_hw_msgs - LIBS - ${catkin_LIBRARIES} - ff_nodelet - ${JSONCPP_LIBRARIES} - INC - ${catkin_INCLUDE_DIRS} - ${JSONCPP_INCLUDE_DIRS} +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -create_tool_targets( - DIR - tools - LIBS - ${catkin_LIBRARIES} - ff_nodelet - ${JSONCPP_LIBRARIES} - light_flow - INC - ${catkin_INCLUDE_DIRS} - ${JSONCPP_INCLUDE_DIRS} - DEPS - ff_msgs - ff_hw_msgs - light_flow +# Declare C++ libraries +add_library(light_flow + src/light_flow/light_flow.cc ) +add_dependencies(light_flow ${catkin_EXPORTED_TARGETS}) +target_link_libraries(light_flow ${catkin_LIBRARIES}) -create_library( - DIR src/light_flow_nodelet - TARGET light_flow_nodelet - LIBS - ${catkin_LIBRARIES} - ${JSONCPP_LIBRARIES} - config_reader - ff_nodelet - light_flow - INC - ${catkin_INCLUDE_DIRS} - ${JSONCPP_INCLUDE_DIRS} - DEPS - ff_msgs - ff_hw_msgs - light_flow +# Declare C++ libraries +add_library(light_flow_nodelet + src/light_flow_nodelet/light_flow_nodelet.cc ) +add_dependencies(light_flow_nodelet ${catkin_EXPORTED_TARGETS}) +target_link_libraries(light_flow_nodelet light_flow jsoncpp ${catkin_LIBRARIES}) + +## Declare a C++ executable: inspection_tool +add_executable(light_flow_tool tools/light_flow_tool.cc) +add_dependencies(light_flow_tool ${catkin_EXPORTED_TARGETS}) +target_link_libraries(light_flow_tool + light_flow gflags jsoncpp ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) +install(TARGETS ${PROJECT_NAME}_nodelet + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Install C++ executables +install(TARGETS light_flow_tool DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/light_flow_tool share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/behaviors/perch/CMakeLists.txt b/behaviors/perch/CMakeLists.txt index 5361a8810a..cb98d02e9a 100644 --- a/behaviors/perch/CMakeLists.txt +++ b/behaviors/perch/CMakeLists.txt @@ -15,8 +15,25 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(perch) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + geometry_msgs + tf2 + tf2_ros + tf2_geometry_msgs + ff_hw_msgs + ff_msgs + ff_util + localization_manager +) + catkin_package( LIBRARIES perch @@ -32,34 +49,55 @@ catkin_package( localization_manager ) -create_library( - TARGET - perch - LIBS - ${catkin_LIBRARIES} - ff_nodelet - config_reader - msg_conversions - config_server - config_client - INC - ${catkin_INCLUDE_DIRS} - DEPS - ff_msgs - ff_hw_msgs +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -create_tool_targets( - DIR - tools - LIBS - ${catkin_LIBRARIES} - ff_common - ff_nodelet - INC - ${catkin_INCLUDE_DIRS} - DEPS - ff_msgs +# Declare C++ libraries +add_library(perch + src/perch_nodelet.cc ) +add_dependencies(perch ${catkin_EXPORTED_TARGETS}) +target_link_libraries(perch ${catkin_LIBRARIES}) + +## Declare a C++ executable: inspection_tool +add_executable(perch_tool tools/perch_tool.cc) +add_dependencies(perch_tool ${catkin_EXPORTED_TARGETS}) +target_link_libraries(perch_tool + perch gflags ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Install C++ executables +install(TARGETS perch_tool DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/perch_tool share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/behaviors/perch/src/perch_nodelet.cc b/behaviors/perch/src/perch_nodelet.cc index 104d042935..5b895b3113 100644 --- a/behaviors/perch/src/perch_nodelet.cc +++ b/behaviors/perch/src/perch_nodelet.cc @@ -1031,6 +1031,11 @@ class PerchNodelet : public ff_util::FreeFlyerNodelet { int32_t err_; std::string err_msg_; std::string platform_name_; + + public: + // This fixes the Eigen aligment issue + // http://eigen.tuxfamily.org/dox-devel/group__TopicUnalignedArrayAssert.html + EIGEN_MAKE_ALIGNED_OPERATOR_NEW }; PLUGINLIB_EXPORT_CLASS(perch::PerchNodelet, nodelet::Nodelet); diff --git a/behaviors/states/CMakeLists.txt b/behaviors/states/CMakeLists.txt index 528bce0e0b..5ae0152483 100644 --- a/behaviors/states/CMakeLists.txt +++ b/behaviors/states/CMakeLists.txt @@ -15,30 +15,66 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(states) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + ff_hw_msgs + ff_msgs + ff_util +) + catkin_package( LIBRARIES states CATKIN_DEPENDS roscpp + nodelet ff_hw_msgs ff_msgs ff_util ) -create_library( - DIR src/states_nodelet - TARGET states_nodelet - LIBS - ${catkin_LIBRARIES} - config_reader - ff_nodelet - INC - ${catkin_INCLUDE_DIRS} - DEPS - ff_msgs - ff_hw_msgs +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(states + src/states_nodelet/states_nodelet.cc +) +add_dependencies(states ${catkin_EXPORTED_TARGETS}) +target_link_libraries(states ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} ) -install_launch_files() \ No newline at end of file +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/behaviors/states/nodelet_plugins.xml b/behaviors/states/nodelet_plugins.xml index e323994948..b1a0737d23 100644 --- a/behaviors/states/nodelet_plugins.xml +++ b/behaviors/states/nodelet_plugins.xml @@ -1,4 +1,4 @@ - + Nodelet for the automatically switching between Astrobee signal states diff --git a/cmake/CATKIN_LICENSE b/cmake/CATKIN_LICENSE deleted file mode 100644 index 3f4820a5c2..0000000000 --- a/cmake/CATKIN_LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -CATKIN: - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - * Neither the name of copyright holder nor the names of its - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff --git a/cmake/CreateLibrary.cmake b/cmake/CreateLibrary.cmake deleted file mode 100644 index a7cf5983dd..0000000000 --- a/cmake/CreateLibrary.cmake +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -function(create_library) - cmake_parse_arguments( - library # Prefix of output variables - "" # List of Boolean Variables - "TARGET;DIR" # List of mono valued arguments - "LIBS;INC;DEPS;ADD_SRCS;EXCLUDE;DEFINES;OPTIONS" # List of Multi Value Arguments - ${ARGN} - ) - - # If no directory was given, assume all files in the src dir go in our library - if(NOT library_DIR) - SET(library_DIR src) - endif(NOT library_DIR) - - # Search for all the executables in tool dir - file(GLOB SRC_FILES_C "${library_DIR}/*.c") - file(GLOB SRC_FILES_CC "${library_DIR}/*.cc") - set(SRC_FILES ${SRC_FILES_C} ${SRC_FILES_CC}) - - # Sift through SRC_FILES and remove EXCLUDE - foreach(SRC ${SRC_FILES}) - foreach (TEST_SRC ${library_EXCLUDE}) - string(FIND ${SRC} ${TEST_SRC} POSITION) - if (${POSITION} GREATER -1) - list(REMOVE_ITEM SRC_FILES ${SRC}) - endif() - endforeach() - endforeach() - - # Some of our dependencies might be header only, their headers paths are not - # being added automatically like the targets listed in LIB. Here we'll do it - # manually. - if (library_DEPS) - foreach(DEP ${library_DEPS}) - get_target_property(INC ${DEP} INCLUDE_DIRECTORIES) - if (INC) - list(APPEND library_INC ${INC}) - endif (INC) - endforeach() - list(REMOVE_DUPLICATES library_INC) - endif (library_DEPS) - - add_library(${library_TARGET} - ${SRC_FILES} ${library_ADD_SRCS}) - target_include_directories(${library_TARGET} - PUBLIC ${library_INC} ${CMAKE_CURRENT_SOURCE_DIR}/include) - target_link_libraries(${library_TARGET} - ${library_LIBS} ${CMAKE_LIBS_COVERAGE}) - if (library_DEFINES) - target_compile_definitions(${library_TARGET} - PUBLIC ${library_DEFINES}) - endif (library_DEFINES) - if (library_OPTIONS) - target_compile_options(${library_TARGET} - PUBLIC ${library_OPTIONS}) - endif (library_OPTIONS) - if (NOT "${library_DEPS}" STREQUAL "") - add_dependencies(${library_TARGET} - ${library_DEPS}) - endif() - - install(TARGETS ${library_TARGET} DESTINATION lib) - install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/include/ - DESTINATION include - FILES_MATCHING PATTERN "*.h" PATTERN "*.hpp") -endfunction() diff --git a/cmake/CreateMsgTargets.cmake b/cmake/CreateMsgTargets.cmake deleted file mode 100644 index 4f910d88dc..0000000000 --- a/cmake/CreateMsgTargets.cmake +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -function(create_msg_targets) - cmake_parse_arguments( - msg # Prefix of output variables - "" # List of boolean variables - "DIR;SDIR;ADIR" # List of mono valued arguments - "DEPS" # List of multi value arguments - ${ARGN}) - - if((NOT msg_DIR) AND (NOT msg_SDIR)) - message(WARNING "no msgs or srvs given to create_msg_targets") - return() - endif((NOT msg_DIR) AND (NOT msg_SDIR)) - - # Determine our module name - get_filename_component(MODULE_NAME ${CMAKE_CURRENT_SOURCE_DIR} NAME) - - # Search for all the messages to install - if(msg_DIR) - file(GLOB MSG_FILES ${msg_DIR}/*.msg) - - # The msgs to just be their name - foreach(SRC ${MSG_FILES}) - get_filename_component(NAME ${SRC} NAME) - list(APPEND MSG_FILE_NAMES ${NAME}) - endforeach() - - add_message_files(FILES ${MSG_FILE_NAMES}) - endif(msg_DIR) - - # Search for all the services to install - if(msg_SDIR) - file(GLOB SRV_FILES ${msg_SDIR}/*.srv) - - foreach(SRC ${SRV_FILES}) - get_filename_component(NAME ${SRC} NAME) - list(APPEND SRV_FILE_NAMES ${NAME}) - endforeach() - - add_service_files(FILES ${SRV_FILE_NAMES}) - endif(msg_SDIR) - - # Search for all the actions to install - if(msg_ADIR) - file(GLOB ACTION_FILES ${msg_ADIR}/*.action) - - foreach(SRC ${ACTION_FILES}) - get_filename_component(NAME ${SRC} NAME) - list(APPEND ACTION_FILE_NAMES ${NAME}) - endforeach() - - add_action_files(FILES ${ACTION_FILE_NAMES}) - endif(msg_ADIR) - - # Generate them - generate_messages(DEPENDENCIES ${msg_DEPS}) - - # Create a target that people can DEP to and find the headers - add_custom_target(${MODULE_NAME} - DEPENDS ${MODULE_NAME}_gencpp) - get_target_property(MSG_INC ${MODULE_NAME}_gencpp INCLUDE_DIRECTORIES) - set_target_properties(${MODULE_NAME} - PROPERTIES INCLUDE_DIRECTORIES ${MSG_INC}) - - # Make sure we install the package.xml so that it can be found later in the install directory - install(FILES package.xml DESTINATION share/${MODULE_NAME}) -endfunction(create_msg_targets) diff --git a/cmake/CreateToolTargets.cmake b/cmake/CreateToolTargets.cmake deleted file mode 100644 index 6f0eb3b8b7..0000000000 --- a/cmake/CreateToolTargets.cmake +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -function(create_tool_targets) - cmake_parse_arguments( - tool # Prefix of output variables - "" # List of boolean variables - "DIR" # List of mono valued arguments - "LIBS;INC;DEPS;EXCLUDE" # List of multi-value arguments - ${ARGN} - ) - - # Search for all the executables in tool dir - file(GLOB TOOL_SRC_FILES_C "${tool_DIR}/*.c") - file(GLOB TOOL_SRC_FILES_CC "${tool_DIR}/*.cc") - set(TOOL_SRC_FILES ${TOOL_SRC_FILES_C} ${TOOL_SRC_FILES_CC}) - - # Sift through SRC_FILES and remove EXCLUDE - foreach(SRC ${TOOL_SRC_FILES}) - foreach(TEST_SRC ${tool_EXCLUDE}) - string(FIND ${SRC} ${TEST_SRC} POSITION) - if (${POSITION} GREATER -1) - list(REMOVE_ITEM TOOL_SRC_FILES ${SRC}) - endif() - endforeach() - endforeach() - - # Determine our module name - get_filename_component(MODULE_NAME ${CMAKE_CURRENT_SOURCE_DIR} NAME) - - # Some of our dependencies might be header only, their headers paths are not - # being added automatically like the targets listed in LIB. Here we'll do it - # manually. - if (tool_DEPS) - foreach(DEP ${tool_DEPS}) - get_target_property(INC ${DEP} INCLUDE_DIRECTORIES) - if (INC) - list(APPEND tool_INC ${INC}) - endif (INC) - endforeach() - list(REMOVE_DUPLICATES tool_INC) - endif (tool_DEPS) - - # Link the executables! - foreach(filename ${TOOL_SRC_FILES}) - string(REGEX REPLACE ".(cc|c)$" "" execname ${filename}) - string(REGEX REPLACE "^[^ ]*/" "" execname ${execname}) - add_executable(${execname} ${filename}) - target_link_libraries(${execname} - LINK_PUBLIC ${tool_LIBS}) - target_include_directories(${execname} PUBLIC ${tool_INC}) - if (tool_DEPS) - add_dependencies(${execname} ${tool_DEPS}) - endif (tool_DEPS) - install(TARGETS ${execname} DESTINATION share/${PROJECT_NAME}) - install(TARGETS ${execname} DESTINATION bin) - endforeach() -endfunction() diff --git a/cmake/soracore/FindMiro.cmake b/cmake/FindMiro.cmake similarity index 100% rename from cmake/soracore/FindMiro.cmake rename to cmake/FindMiro.cmake diff --git a/cmake/FindOpenCV331.cmake b/cmake/FindOpenCV331.cmake new file mode 100644 index 0000000000..02b4a6fcbd --- /dev/null +++ b/cmake/FindOpenCV331.cmake @@ -0,0 +1,41 @@ +# Copyright (c) 2017, United States Government, as represented by the +# Administrator of the National Aeronautics and Space Administration. +# +# All rights reserved. +# +# The Astrobee platform is licensed under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with the +# License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Find OpenCV and fix a 3.3.1 bug +find_package(OpenCV 3.3.1 REQUIRED) +if (USE_CTC) + if (${OpenCV_VERSION} MATCHES "3.3.1") + foreach(__cvcomponent ${OpenCV_LIB_COMPONENTS}) + set (__original_cvcomponent ${__cvcomponent}) + if(NOT __cvcomponent MATCHES "^opencv_") + set(__cvcomponent opencv_${__cvcomponent}) + endif() + if (TARGET ${__cvcomponent}) + set_target_properties(${__cvcomponent} PROPERTIES + MAP_IMPORTED_CONFIG_DEBUG "" + MAP_IMPORTED_CONFIG_RELEASE "" + MAP_IMPORTED_CONFIG_RELWITHDEBINFO "" + MAP_IMPORTED_CONFIG_MINSIZEREL "" + ) + endif() + endforeach(__cvcomponent) + endif() + foreach(__cvcomponent ${OpenCV_LIB_COMPONENTS}) + set(OpenCV_LIBS ${OpenCV_LIBS} "${OpenCV_INSTALL_PATH}/lib/arm-linux-gnueabihf/lib${__cvcomponent}3.so") + endforeach(__cvcomponent) +endif (USE_CTC) +set(OpenCV_LIBRARIES ${OpenCV_LIBS}) \ No newline at end of file diff --git a/cmake/soracore/FindRtiDds.cmake b/cmake/FindRtiDds.cmake similarity index 100% rename from cmake/soracore/FindRtiDds.cmake rename to cmake/FindRtiDds.cmake diff --git a/cmake/soracore/FindSoracore.cmake b/cmake/FindSoracore.cmake similarity index 100% rename from cmake/soracore/FindSoracore.cmake rename to cmake/FindSoracore.cmake diff --git a/cmake/soracore/GenerateMiroMakeParams.cmake b/cmake/GenerateMiroMakeParams.cmake similarity index 100% rename from cmake/soracore/GenerateMiroMakeParams.cmake rename to cmake/GenerateMiroMakeParams.cmake diff --git a/cmake/soracore/GenerateRtiDdsIdl.cmake b/cmake/GenerateRtiDdsIdl.cmake similarity index 100% rename from cmake/soracore/GenerateRtiDdsIdl.cmake rename to cmake/GenerateRtiDdsIdl.cmake diff --git a/cmake/soracore/GetLibraryList.cmake b/cmake/GetLibraryList.cmake similarity index 100% rename from cmake/soracore/GetLibraryList.cmake rename to cmake/GetLibraryList.cmake diff --git a/cmake/soracore/GetPackageLibSearchPath.cmake b/cmake/GetPackageLibSearchPath.cmake similarity index 100% rename from cmake/soracore/GetPackageLibSearchPath.cmake rename to cmake/GetPackageLibSearchPath.cmake diff --git a/cmake/InstallLaunchFiles.cmake b/cmake/InstallLaunchFiles.cmake deleted file mode 100644 index a01b35ed95..0000000000 --- a/cmake/InstallLaunchFiles.cmake +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -function(install_launch_files) - file (GLOB LAUNCH_FILES launch/*.launch launch/*.yaml launch/*.xml) - get_filename_component(MODULE_NAME ${CMAKE_CURRENT_SOURCE_DIR} NAME) - install(DIRECTORY launch/ DESTINATION share/${PROJECT_NAME}/launch) -endfunction() diff --git a/cmake/catkin/all.cmake b/cmake/catkin/all.cmake deleted file mode 100644 index ac31ea1d2e..0000000000 --- a/cmake/catkin/all.cmake +++ /dev/null @@ -1,206 +0,0 @@ -# prevent multiple inclusion -if(DEFINED _CATKIN_ALL_INCLUDED_) - message(FATAL_ERROR "catkin/cmake/all.cmake included multiple times") -endif() -set(_CATKIN_ALL_INCLUDED_ TRUE) - -if(NOT DEFINED catkin_EXTRAS_DIR) - message(FATAL_ERROR "catkin_EXTRAS_DIR is not set") -endif() - -# define devel space -if(CATKIN_DEVEL_PREFIX) - set(CATKIN_DEVEL_PREFIX ${CATKIN_DEVEL_PREFIX} CACHE PATH "catkin devel space") -else() - set(CATKIN_DEVEL_PREFIX "${CMAKE_BINARY_DIR}/devel") -endif() -#message(STATUS "Using CATKIN_DEVEL_PREFIX: ${CATKIN_DEVEL_PREFIX}") - -# update develspace marker file with a reference to this sourcespace -set(_catkin_marker_file "${CATKIN_DEVEL_PREFIX}/.catkin") - -# check if the develspace marker file exists yet -if(EXISTS ${_catkin_marker_file}) - file(READ ${_catkin_marker_file} _existing_sourcespaces) - if(_existing_sourcespaces STREQUAL "") - # write this sourcespace to the marker file - file(WRITE ${_catkin_marker_file} "${CMAKE_SOURCE_DIR}") - else() - # append to existing list of sourcespaces if it's not in the list - list(FIND _existing_sourcespaces "${CMAKE_SOURCE_DIR}" _existing_sourcespace_index) - if(_existing_sourcespace_index EQUAL -1) - file(APPEND ${_catkin_marker_file} ";${CMAKE_SOURCE_DIR}") - endif() - endif() -else() - # create a new develspace marker file - # NOTE: extra care must be taken when running multiple catkin jobs in parallel - # so that this does not overwrite the result of a similar call in another package - file(WRITE ${_catkin_marker_file} "${CMAKE_SOURCE_DIR}") -endif() - -# use either CMAKE_PREFIX_PATH explicitly passed to CMake as a command line argument -# or CMAKE_PREFIX_PATH from the environment -if(NOT DEFINED CMAKE_PREFIX_PATH) - if(NOT "$ENV{CMAKE_PREFIX_PATH}" STREQUAL "") - string(REPLACE ":" ";" CMAKE_PREFIX_PATH $ENV{CMAKE_PREFIX_PATH}) - endif() -endif() -#message(STATUS "Using CMAKE_PREFIX_PATH: ${CMAKE_PREFIX_PATH}") -# store original CMAKE_PREFIX_PATH -set(CMAKE_PREFIX_PATH_AS_IS ${CMAKE_PREFIX_PATH}) - -# list of unique catkin workspaces based on CMAKE_PREFIX_PATH -set(CATKIN_WORKSPACES "") -foreach(path ${CMAKE_PREFIX_PATH}) - if(EXISTS "${path}/.catkin") - list(FIND CATKIN_WORKSPACES ${path} _index) - if(_index EQUAL -1) - list(APPEND CATKIN_WORKSPACES ${path}) - endif() - endif() -endforeach() -if(CATKIN_WORKSPACES) - #message(STATUS "This workspace overlays: ${CATKIN_WORKSPACES}") -endif() - -# prepend devel space to CMAKE_PREFIX_PATH -list(FIND CMAKE_PREFIX_PATH ${CATKIN_DEVEL_PREFIX} _index) -if(_index EQUAL -1) - list(INSERT CMAKE_PREFIX_PATH 0 ${CATKIN_DEVEL_PREFIX}) -endif() - - -# enable all new policies (if available) -macro(_set_cmake_policy_to_new_if_available policy) - if(POLICY ${policy}) - cmake_policy(SET ${policy} NEW) - endif() -endmacro() -_set_cmake_policy_to_new_if_available(CMP0000) -_set_cmake_policy_to_new_if_available(CMP0001) -_set_cmake_policy_to_new_if_available(CMP0002) -_set_cmake_policy_to_new_if_available(CMP0003) -_set_cmake_policy_to_new_if_available(CMP0004) -_set_cmake_policy_to_new_if_available(CMP0005) -_set_cmake_policy_to_new_if_available(CMP0006) -_set_cmake_policy_to_new_if_available(CMP0007) -_set_cmake_policy_to_new_if_available(CMP0008) -_set_cmake_policy_to_new_if_available(CMP0009) -_set_cmake_policy_to_new_if_available(CMP0010) -_set_cmake_policy_to_new_if_available(CMP0011) -_set_cmake_policy_to_new_if_available(CMP0012) -_set_cmake_policy_to_new_if_available(CMP0013) -_set_cmake_policy_to_new_if_available(CMP0014) -_set_cmake_policy_to_new_if_available(CMP0015) -_set_cmake_policy_to_new_if_available(CMP0016) -_set_cmake_policy_to_new_if_available(CMP0017) - -# the following operations must be performed inside a project context -if(NOT PROJECT_NAME) - project(catkin_internal) -endif() - -# include CMake functions -include(CMakeParseArguments) - -# functions/macros: list_append_unique, safe_execute_process -# python-integration: catkin_python_setup.cmake, interrogate_setup_dot_py.py, templates/__init__.py.in, templates/script.py.in, templates/python_distutils_install.bat.in, templates/python_distutils_install.sh.in, templates/safe_execute_install.cmake.in -foreach(filename - assert - atomic_configure_file - catkin_add_env_hooks - catkin_destinations - catkin_generate_environment - catkin_install_python - catkin_libraries - catkin_metapackage - catkin_package - catkin_package_xml - catkin_workspace - debug_message - em_expand - python # defines PYTHON_EXECUTABLE, required by empy - empy - find_program_required - legacy - list_append_deduplicate - list_append_unique - list_insert_in_workspace_order - safe_execute_process - stamp - string_starts_with - platform/lsb - platform/ubuntu - platform/windows - test/tests # defines CATKIN_ENABLE_TESTING, required by other test functions - test/catkin_download_test_data - test/gtest - test/nosetests - tools/doxygen - tools/libraries - tools/rt - -# tools/threads - ) - include(${catkin_EXTRAS_DIR}/${filename}.cmake) -endforeach() - -# output catkin version for debugging -_catkin_package_xml(${CMAKE_BINARY_DIR}/catkin/catkin_generated/version DIRECTORY ${catkin_EXTRAS_DIR}/..) -#message(STATUS "catkin ${catkin_VERSION}") -# ensure that no current package name is set -unset(_CATKIN_CURRENT_PACKAGE) - -# set global install destinations -set(CATKIN_GLOBAL_BIN_DESTINATION bin) -set(CATKIN_GLOBAL_ETC_DESTINATION etc) -set(CATKIN_GLOBAL_INCLUDE_DESTINATION include) -set(CATKIN_GLOBAL_LIB_DESTINATION lib) -set(CATKIN_GLOBAL_LIBEXEC_DESTINATION lib) -set(CATKIN_GLOBAL_PYTHON_DESTINATION ${PYTHON_INSTALL_DIR}) -set(CATKIN_GLOBAL_SHARE_DESTINATION share) - -# undefine CATKIN_ENV since it might be set in the cache from a previous build -set(CATKIN_ENV "" CACHE INTERNAL "catkin environment" FORCE) - -# generate environment files like env.* and setup.* -# uses em_expand without CATKIN_ENV being set yet -catkin_generate_environment() - -# file extension of env script -if(CMAKE_HOST_UNIX) # true for linux, apple, mingw-cross and cygwin - set(script_ext sh) -else() - set(script_ext bat) -endif() -# take snapshot of the modifications the setup script causes -# to reproduce the same changes with a static script in a fraction of the time -set(SETUP_DIR ${CMAKE_BINARY_DIR}/catkin_generated) -set(SETUP_FILENAME "setup_cached") -configure_file(${catkin_EXTRAS_DIR}/templates/generate_cached_setup.py.in - ${CMAKE_BINARY_DIR}/catkin_generated/generate_cached_setup.py) -set(GENERATE_ENVIRONMENT_CACHE_COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_BINARY_DIR}/catkin_generated/generate_cached_setup.py) -# the script is generated once here and refreshed by every call to catkin_add_env_hooks() -safe_execute_process(COMMAND ${GENERATE_ENVIRONMENT_CACHE_COMMAND}) -# generate env_cached which just relays to the setup_cached -configure_file(${catkin_EXTRAS_DIR}/templates/env.${script_ext}.in - ${SETUP_DIR}/env_cached.${script_ext} - @ONLY) -# environment to call external processes -set(CATKIN_ENV ${SETUP_DIR}/env_cached.${script_ext} CACHE INTERNAL "catkin environment") - -# add additional environment hooks -if(CATKIN_BUILD_BINARY_PACKAGE) - set(catkin_skip_install_env_hooks "SKIP_INSTALL") -endif() -if(CMAKE_HOST_UNIX) - catkin_add_env_hooks(05.catkin_make SHELLS bash DIRECTORY ${catkin_EXTRAS_DIR}/env-hooks ${catkin_skip_install_env_hooks}) - catkin_add_env_hooks(05.catkin_make_isolated SHELLS bash DIRECTORY ${catkin_EXTRAS_DIR}/env-hooks ${catkin_skip_install_env_hooks}) - catkin_add_env_hooks(05.catkin-test-results SHELLS sh DIRECTORY ${catkin_EXTRAS_DIR}/env-hooks ${catkin_skip_install_env_hooks}) -else() - catkin_add_env_hooks(05.catkin-test-results SHELLS bat DIRECTORY ${catkin_EXTRAS_DIR}/env-hooks ${catkin_skip_install_env_hooks}) -endif() - -# requires stamp and environment files -include(${catkin_EXTRAS_DIR}/catkin_python_setup.cmake) diff --git a/cmake/catkin/assert.cmake b/cmake/catkin/assert.cmake deleted file mode 100644 index 1a3f2c194b..0000000000 --- a/cmake/catkin/assert.cmake +++ /dev/null @@ -1,22 +0,0 @@ -function(assert VAR) - if(NOT ${VAR}) - message(FATAL_ERROR "\nAssertion failed: ${VAR} (value is '${${VAR}}')\n") - endif() - debug_message(3 "assert(${VAR}) passed (${VAR} = ${${VAR}})") -endfunction() - -function(assert_unset VAR) - if(${VAR}) - message(FATAL_ERROR "\nAssertion failed: '${VAR}' is set but should not be (value is '${${VAR}}')\n") - endif() - debug_message(3 "assert_unset(${VAR}) passed") -endfunction() - -function(assert_file_exists FILENAME MESSAGE) - if(NOT FILENAME) - message(FATAL_ERROR "\nAssertion failed: check for file existence, but filename (${FILENAME}) unset. Message: ${MESSAGE}\n") - endif() - if(NOT EXISTS ${FILENAME}) - message(FATAL_ERROR "\nAssertion failed: file '${FILENAME}' does not exist. Message: ${MESSAGE}\n") - endif() -endfunction() diff --git a/cmake/catkin/atomic_configure_file.cmake b/cmake/catkin/atomic_configure_file.cmake deleted file mode 100644 index b5a20a37bb..0000000000 --- a/cmake/catkin/atomic_configure_file.cmake +++ /dev/null @@ -1,9 +0,0 @@ -function(atomic_configure_file input output) - set(atomic_file "${CMAKE_BINARY_DIR}/atomic_configure_file") - configure_file("${input}" "${atomic_file}" ${ARGN}) - get_filename_component(output_path ${output} PATH) - if(NOT EXISTS ${output_path}) - file(MAKE_DIRECTORY ${output_path}) - endif() - file(RENAME "${atomic_file}" "${output}") -endfunction() diff --git a/cmake/catkin/catkinConfig.cmake.in b/cmake/catkin/catkinConfig.cmake.in deleted file mode 100644 index 2183ce8c6a..0000000000 --- a/cmake/catkin/catkinConfig.cmake.in +++ /dev/null @@ -1,117 +0,0 @@ -# generated from catkin/cmake/catkinConfig.cmake.in -# which overlays the default template catkin/cmake/template/pkgConfig.cmake.in -# -# :outvar catkin_INCLUDE_DIRS: contains the include dirs of all searched components. -# For use with CMake ``include_directories(${catkin_INCLUDE_DIRS})``. -# :outvar catkin_LIBRARY_DIRS: contains the library dirs of all searched components. -# For use with CMake ``link_directories(${catkin_INCLUDE_DIRS})``. -# :outvar catkin_LIBRARIES: contains the include dirs of all searched components. -# For use with CMake ``include_directories(${catkin_INCLUDE_DIRS})``. -# :outvar _INCLUDE_DIRS/_LIBRARY_DIRS/_LIBRARY: -# contains the include dirs / library dirs / libraries of the searched component . - -if(CATKIN_TOPLEVEL_FIND_PACKAGE OR NOT CATKIN_TOPLEVEL) - set(catkin_EXTRAS_DIR "@PKG_CMAKE_DIR@") - - # prevent multiple inclusion from repeated find_package() calls in non-workspace context - # as long as this variable is in the scope the variables from all.cmake are also, so no need to be evaluated again - if(NOT DEFINED _CATKIN_CONFIG_ALL_INCLUDED_) - set(_CATKIN_CONFIG_ALL_INCLUDED_ TRUE) - include(${catkin_EXTRAS_DIR}/all.cmake NO_POLICY_SCOPE) - endif() -endif() - -# skip setting find_package() variables when discovered from toplevel.cmake -if(CATKIN_TOPLEVEL_FIND_PACKAGE) - return() -endif() - - -# prevent usage with wrong case -if(CATKIN_FIND_COMPONENTS OR CATKIN_FIND_REQUIRED OR CATKIN_FIND_QUIETLY) - message(FATAL_ERROR "find_package() only supports lower-case package name 'catkin'") -endif() - -# mark as found -if(NOT catkin_FOUND) - set(catkin_FOUND) - set(CATKIN_PACKAGE_PREFIX "" CACHE STRING "Prefix to apply to package generated via gendebian") -endif() - -# flag project as catkin-based to distinguish if a find_package()-ed project is a catkin project -set(catkin_FOUND_CATKIN_PROJECT TRUE) - -# XXXX don't overwrite catkin_* variables when being called recursively -if(NOT _CATKIN_FIND_ OR _CATKIN_FIND_ EQUAL 0) - set(_CATKIN_FIND_ 0) - if(catkin_FIND_COMPONENTS) - set(catkin_INCLUDE_DIRS "") - set(catkin_LIBRARIES "") - set(catkin_LIBRARY_DIRS "") - set(catkin_EXPORTED_TARGETS "") - endif() -endif() - -# increment recursion counter -math(EXPR _CATKIN_FIND_ "${_CATKIN_FIND_} + 1") - -# find all components -if(catkin_FIND_COMPONENTS) - foreach(component ${catkin_FIND_COMPONENTS}) - string(TOLOWER "${component}" component_lower) - # skip catkin since it does not make sense as a component - if(NOT ${component_lower} STREQUAL "catkin") - - # get search paths from CMAKE_PREFIX_PATH (which includes devel space) - set(paths "") - foreach(path ${CMAKE_PREFIX_PATH}) - if(IS_DIRECTORY ${path}/share/${component}/cmake) - list(APPEND paths ${path}/share/${component}/cmake) - endif() - endforeach() - - # find package component - if(catkin_FIND_REQUIRED) - find_package(${component} REQUIRED NO_MODULE PATHS ${paths} - NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) - elseif(catkin_FIND_QUIETLY) - find_package(${component} QUIET NO_MODULE PATHS ${paths} - NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) - else() - find_package(${component} NO_MODULE PATHS ${paths} - NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) - endif() - - # append component-specific variables to catkin_* variables - list_append_unique(catkin_INCLUDE_DIRS ${${component}_INCLUDE_DIRS}) - - # merge build configuration keywords with library names to correctly deduplicate - catkin_pack_libraries_with_build_configuration(catkin_LIBRARIES ${catkin_LIBRARIES}) - catkin_pack_libraries_with_build_configuration(_libraries ${${component}_LIBRARIES}) - list_append_deduplicate(catkin_LIBRARIES ${_libraries}) - # undo build configuration keyword merging after deduplication - catkin_unpack_libraries_with_build_configuration(catkin_LIBRARIES ${catkin_LIBRARIES}) - - list_append_unique(catkin_LIBRARY_DIRS ${${component}_LIBRARY_DIRS}) - list(APPEND catkin_EXPORTED_TARGETS ${${component}_EXPORTED_TARGETS}) - endif() - endforeach() - list_insert_in_workspace_order(catkin_INCLUDE_DIRS ${catkin_INCLUDE_DIRS}) - list_insert_in_workspace_order(catkin_LIBRARY_DIRS ${catkin_LIBRARY_DIRS}) -endif() - -# add dummy target to catkin_EXPORTED_TARGETS if empty -if(NOT catkin_EXPORTED_TARGETS) - if(NOT TARGET _catkin_empty_exported_target) - add_custom_target(_catkin_empty_exported_target) - endif() - list(APPEND catkin_EXPORTED_TARGETS _catkin_empty_exported_target) -endif() - -# decrement recursion counter -math(EXPR _CATKIN_FIND_ "${_CATKIN_FIND_} - 1") - -if(_CATKIN_FIND_ EQUAL 0) - # store found components (from the fist level only) for validation in catkin_package() that they are build dependencies - list(APPEND catkin_ALL_FOUND_COMPONENTS ${catkin_FIND_COMPONENTS}) -endif() diff --git a/cmake/catkin/catkin_add_env_hooks.cmake b/cmake/catkin/catkin_add_env_hooks.cmake deleted file mode 100644 index dc063bd803..0000000000 --- a/cmake/catkin/catkin_add_env_hooks.cmake +++ /dev/null @@ -1,139 +0,0 @@ -# -# Register environment hooks which are executed by the setup script. -# -# For each shell in ``SHELLS``, the macro searches for one of the -# following files in the directory ``DIRECTORY``: -# ``.``, -# ``...em``, -# ``..em``, -# ``...in`` or -# ``..in``. -# -# Plain shells, will be copied to, templates are expanded to -# ``etc/catkin/profile.d/``, where it will be read by global generated -# ``setup.``. -# -# The templates can also distinguish between devel- and installspace -# using the boolean variables ``DEVELSPACE`` and ``INSTALLSPACE`` -# which are either ``true`` or ``false``. -# E.g. @[if DEVELSPACE]@ ... @[end if]@ for .em -# -# .. note:: Note that the extra extensions must appear in the filename -# but must not appear in the argument. -# -# .. note:: These files will share a single directory with other -# packages that choose to install env hooks. Be careful to give -# the file a unique name. Typically ``NN.name.`` is used, -# where NN can define when something should be run (the files are -# read in alphanumeric order) and the name serves to disambiguate -# in the event of collisions. -# -# Example:: -# -# catkin_add_env_hooks(my_prefix SHELLS bash tcsh zsh DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/env-hooks) -# -# looks for files env-hooks/my_prefix.[bash|tcsh|zsh]((.(devel|install)space)?.[em|in])? -# -# :param file_prefix: the filename prefix -# :type file_prefix: string -# :param SHELLS: the shell extensions (e.g.: sh bat bash zsh tcsh) -# :type SHELLS: list of strings -# :param DIRECTORY: the directory (default: ${CMAKE_CURRENT_SOURCE_DIR}) -# :type DIRECTORY: string -# :param SKIP_INSTALL: if specified the env hooks are only generated -# in the devel space but not installed -# :type SKIP_INSTALL: option -# -# @public -# -function(catkin_add_env_hooks file_prefix) - cmake_parse_arguments(ARG "SKIP_INSTALL" "DIRECTORY" "SHELLS" ${ARGN}) - - # create directory if necessary - if(NOT IS_DIRECTORY ${CATKIN_DEVEL_PREFIX}/etc/catkin/profile.d) - file(MAKE_DIRECTORY ${CATKIN_DEVEL_PREFIX}/etc/catkin/profile.d) - endif() - - if(NOT ARG_DIRECTORY) - set(ARG_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}) - endif() - - foreach(shell ${ARG_SHELLS}) - set(ENV_HOOK ${file_prefix}.${shell}) - set(base ${ARG_DIRECTORY}/${ENV_HOOK}) - - # generate environment hook for devel space - set(DEVELSPACE True) - set(INSTALLSPACE False) - if(EXISTS ${base}.em OR EXISTS ${base}.develspace.em) - # evaluate em template - if(EXISTS ${base}.develspace.em) - set(em_template ${base}.develspace.em) - else() - set(em_template ${base}.em) - endif() - em_expand(${catkin_EXTRAS_DIR}/templates/env-hook.context.py.in - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/${ENV_HOOK}.develspace.context.py - ${em_template} - ${CATKIN_DEVEL_PREFIX}/etc/catkin/profile.d/${ENV_HOOK}) - elseif(EXISTS ${base}.in OR EXISTS ${base}.develspace.in) - # evaluate in template - if(EXISTS ${base}.develspace.in) - set(in_template ${base}.develspace.in) - else() - set(in_template ${base}.in) - endif() - atomic_configure_file(${in_template} - ${CATKIN_DEVEL_PREFIX}/etc/catkin/profile.d/${ENV_HOOK} - @ONLY) - elseif (EXISTS ${base}) - # copy plain file - file(COPY ${base} DESTINATION ${CATKIN_DEVEL_PREFIX}/etc/catkin/profile.d) - else() - message(FATAL_ERROR "catkin_add_env_hooks() could not find environment hook. Either '${base}', '${base}.em', '${base}.develspace.em' or '${base}.in' must exist.") - endif() - - # generate and install environment hook for installspace - set(DEVELSPACE False) - set(INSTALLSPACE True) - if(EXISTS ${base}.em OR EXISTS ${base}.installspace.em) - # evaluate em template and install - if(EXISTS ${base}.installspace.em) - set(em_template ${base}.installspace.em) - else() - set(em_template ${base}.em) - endif() - em_expand(${catkin_EXTRAS_DIR}/templates/env-hook.context.py.in - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/${ENV_HOOK}.installspace.context.py - ${em_template} - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace/${ENV_HOOK}) - if(NOT ${ARG_SKIP_INSTALL}) - install(FILES ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace/${ENV_HOOK} - DESTINATION etc/catkin/profile.d) - endif() - elseif(EXISTS ${base}.in OR EXISTS ${base}.installspace.in) - # evaluate in template and install - if(EXISTS ${base}.installspace.in) - set(in_template ${base}.installspace.in) - else() - set(in_template ${base}.in) - endif() - configure_file(${in_template} - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace/${ENV_HOOK} - @ONLY) - if(NOT ${ARG_SKIP_INSTALL}) - install(FILES ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace/${ENV_HOOK} - DESTINATION etc/catkin/profile.d) - endif() - elseif (EXISTS ${base}) - # install plain file - if(NOT ${ARG_SKIP_INSTALL}) - install(FILES ${base} - DESTINATION etc/catkin/profile.d) - endif() - endif() - endforeach() - - # refresh environment cache - safe_execute_process(COMMAND ${GENERATE_ENVIRONMENT_CACHE_COMMAND}) -endfunction() diff --git a/cmake/catkin/catkin_destinations.cmake b/cmake/catkin/catkin_destinations.cmake deleted file mode 100644 index c4327e1223..0000000000 --- a/cmake/catkin/catkin_destinations.cmake +++ /dev/null @@ -1,59 +0,0 @@ -# -# Set several path suffixes for install destinations. -# -# :outvar CATKIN_PACKAGE_BIN_DESTINATION: -# See :cmake:data:`CATKIN_PACKAGE_BIN_DESTINATION`. -# :outvar CATKIN_PACKAGE_ETC_DESTINATION: -# See :cmake:data:`CATKIN_PACKAGE_ETC_DESTINATION`. -# :outvar CATKIN_PACKAGE_INCLUDE_DESTINATION: -# See :cmake:data:`CATKIN_PACKAGE_INCLUDE_DESTINATION`. -# :outvar CATKIN_PACKAGE_LIB_DESTINATION: -# See :cmake:data:`CATKIN_PACKAGE_LIB_DESTINATION`. -# :outvar CATKIN_PACKAGE_PYTHON_DESTINATION: -# See :cmake:data:`CATKIN_PACKAGE_PYTHON_DESTINATION`. -# :outvar CATKIN_PACKAGE_SHARE_DESTINATION: -# See :cmake:data:`CATKIN_PACKAGE_SHARE_DESTINATION`. -# -# :outvar CATKIN_GLOBAL_BIN_DESTINATION: -# See :cmake:data:`CATKIN_GLOBAL_BIN_DESTINATION`. -# :outvar CATKIN_GLOBAL_ETC_DESTINATION: -# See :cmake:data:`CATKIN_GLOBAL_ETC_DESTINATION`. -# :outvar CATKIN_GLOBAL_INCLUDE_DESTINATION: -# See :cmake:data:`CATKIN_GLOBAL_INCLUDE_DESTINATION`. -# :outvar CATKIN_GLOBAL_LIB_DESTINATION: -# See :cmake:data:`CATKIN_GLOBAL_LIB_DESTINATION`. -# :outvar CATKIN_GLOBAL_LIBEXEC_DESTINATION: -# See :cmake:data:`CATKIN_GLOBAL_LIBEXEC_DESTINATION`. -# :outvar CATKIN_GLOBAL_PYTHON_DESTINATION: -# See :cmake:data:`CATKIN_GLOBAL_PYTHON_DESTINATION`. -# :outvar CATKIN_GLOBAL_SHARE_DESTINATION: -# See :cmake:data:`CATKIN_GLOBAL_SHARE_DESTINATION`. -# -macro(catkin_destinations) - # verify that project() has been called before - if(NOT PROJECT_NAME) - message(FATAL_ERROR "catkin_destinations() PROJECT_NAME is not set. You must call project() before you can call catkin_destinations().") - endif() - - # execute catkin_destinations() only once, skip repeated invocations - if(NOT DEFINED _${PROJECT_NAME}_CATKIN_DESTINATIONS) - debug_message(10 "catkin_destinations()") - - # mark that catkin_destinations() was called - set(_${PROJECT_NAME}_CATKIN_DESTINATIONS TRUE) - - # set project specific install destinations - set(CATKIN_PACKAGE_BIN_DESTINATION ${CATKIN_GLOBAL_LIBEXEC_DESTINATION}/${PROJECT_NAME}) - set(CATKIN_PACKAGE_ETC_DESTINATION ${CATKIN_GLOBAL_ETC_DESTINATION}/${PROJECT_NAME}) - set(CATKIN_PACKAGE_INCLUDE_DESTINATION ${CATKIN_GLOBAL_INCLUDE_DESTINATION}/${PROJECT_NAME}) - set(CATKIN_PACKAGE_LIB_DESTINATION ${CATKIN_GLOBAL_LIB_DESTINATION}) - set(CATKIN_PACKAGE_PYTHON_DESTINATION ${CATKIN_GLOBAL_PYTHON_DESTINATION}/${PROJECT_NAME}) - set(CATKIN_PACKAGE_SHARE_DESTINATION ${CATKIN_GLOBAL_SHARE_DESTINATION}/${PROJECT_NAME}) - - # set project specific output directory for libraries - set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_LIB_DESTINATION}) - set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_LIB_DESTINATION}) - # set project specific output directory for binaries - set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_BIN_DESTINATION}) - endif() -endmacro() diff --git a/cmake/catkin/catkin_generate_environment.cmake b/cmake/catkin/catkin_generate_environment.cmake deleted file mode 100644 index 912078b6e6..0000000000 --- a/cmake/catkin/catkin_generate_environment.cmake +++ /dev/null @@ -1,146 +0,0 @@ -function(catkin_generate_environment) - set(SETUP_FILENAME "setup") - - # devel space - set(SETUP_DIR ${CATKIN_DEVEL_PREFIX}) - - # generate empty file to prevent searching for packages in binary dir - # except if source space and build space are identical (which is the case for dry eclipse projects) - if(NOT "${CMAKE_BINARY_DIR}" STREQUAL "${CMAKE_CURRENT_SOURCE_DIR}") - file(WRITE "${CMAKE_BINARY_DIR}/CATKIN_IGNORE" "") - endif() - - # get multiarch name - set(CATKIN_LIB_ENVIRONMENT_PATHS "'${CATKIN_GLOBAL_LIB_DESTINATION}'") - set(CATKIN_PKGCONFIG_ENVIRONMENT_PATHS "os.path.join('${CATKIN_GLOBAL_LIB_DESTINATION}', 'pkgconfig')") - if (UNIX AND NOT APPLE) - # Two step looking for multiarch support: check for gcc -print-multiarch - # and, if failed, try to run dpkg-architecture - execute_process(COMMAND gcc -print-multiarch - OUTPUT_VARIABLE CATKIN_MULTIARCH - OUTPUT_STRIP_TRAILING_WHITESPACE - ERROR_QUIET - ) - if ("${CATKIN_MULTIARCH}" STREQUAL "") - execute_process(COMMAND dpkg-architecture -qDEB_HOST_MULTIARCH - OUTPUT_VARIABLE CATKIN_MULTIARCH - OUTPUT_STRIP_TRAILING_WHITESPACE - ERROR_QUIET - ) - endif() - if (NOT "${CATKIN_MULTIARCH}" STREQUAL "") - set(CATKIN_LIB_ENVIRONMENT_PATHS - "[${CATKIN_LIB_ENVIRONMENT_PATHS}, os.path.join('${CATKIN_GLOBAL_LIB_DESTINATION}', '${CATKIN_MULTIARCH}')]") - set(CATKIN_PKGCONFIG_ENVIRONMENT_PATHS - "[${CATKIN_PKGCONFIG_ENVIRONMENT_PATHS}, os.path.join('${CATKIN_GLOBAL_LIB_DESTINATION}', '${CATKIN_MULTIARCH}', 'pkgconfig')]") - endif() - endif() - - # generate Python setup util - atomic_configure_file(${catkin_EXTRAS_DIR}/templates/_setup_util.py.in - ${CATKIN_DEVEL_PREFIX}/_setup_util.py - @ONLY) - - if(NOT WIN32) - # non-windows - # generate env - atomic_configure_file(${catkin_EXTRAS_DIR}/templates/env.sh.in - ${CATKIN_DEVEL_PREFIX}/env.sh - @ONLY) - # generate setup for various shells - foreach(shell bash sh zsh) - atomic_configure_file(${catkin_EXTRAS_DIR}/templates/setup.${shell}.in - ${CATKIN_DEVEL_PREFIX}/setup.${shell} - @ONLY) - endforeach() - - else() - # windows - # generate env - atomic_configure_file(${catkin_EXTRAS_DIR}/templates/env.bat.in - ${CATKIN_DEVEL_PREFIX}/env.bat - @ONLY) - # generate setup - atomic_configure_file(${catkin_EXTRAS_DIR}/templates/setup.bat.in - ${CATKIN_DEVEL_PREFIX}/setup.bat - @ONLY) - endif() - - # generate rosinstall file referencing setup.sh - atomic_configure_file(${catkin_EXTRAS_DIR}/templates/rosinstall.in - ${CATKIN_DEVEL_PREFIX}/.rosinstall - @ONLY) - - # installspace - set(SETUP_DIR ${CMAKE_INSTALL_PREFIX}) - - if(NOT CATKIN_BUILD_BINARY_PACKAGE) - # install empty workspace marker if it doesn't already exist - install(CODE " - if (NOT EXISTS \"\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}\") - file(MAKE_DIRECTORY \"\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}\") - endif() - if (NOT EXISTS \"\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/.catkin\") - file(WRITE \"\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/.catkin\" \"\") - endif()") - - # generate and install Python setup util - configure_file(${catkin_EXTRAS_DIR}/templates/_setup_util.py.in - ${CMAKE_BINARY_DIR}/catkin_generated/installspace/_setup_util.py - @ONLY) - catkin_install_python(PROGRAMS - ${CMAKE_BINARY_DIR}/catkin_generated/installspace/_setup_util.py - DESTINATION ${CMAKE_INSTALL_PREFIX}) - endif() - - if(NOT WIN32) - # non-windows - # generate and install env - configure_file(${catkin_EXTRAS_DIR}/templates/env.sh.in - ${CMAKE_BINARY_DIR}/catkin_generated/installspace/env.sh - @ONLY) - if(NOT CATKIN_BUILD_BINARY_PACKAGE) - install(PROGRAMS - ${CMAKE_BINARY_DIR}/catkin_generated/installspace/env.sh - DESTINATION ${CMAKE_INSTALL_PREFIX}) - endif() - # generate and install setup for various shells - foreach(shell bash sh zsh) - configure_file(${catkin_EXTRAS_DIR}/templates/setup.${shell}.in - ${CMAKE_BINARY_DIR}/catkin_generated/installspace/setup.${shell} - @ONLY) - if(NOT CATKIN_BUILD_BINARY_PACKAGE) - install(FILES - ${CMAKE_BINARY_DIR}/catkin_generated/installspace/setup.${shell} - DESTINATION ${CMAKE_INSTALL_PREFIX}) - endif() - endforeach() - - else() - # windows - # generate and install env - configure_file(${catkin_EXTRAS_DIR}/templates/env.bat.in - ${CMAKE_BINARY_DIR}/catkin_generated/installspace/env.bat - @ONLY) - install(PROGRAMS - ${CMAKE_BINARY_DIR}/catkin_generated/installspace/env.bat - DESTINATION ${CMAKE_INSTALL_PREFIX}) - # generate and install setup - configure_file(${catkin_EXTRAS_DIR}/templates/setup.bat.in - ${CMAKE_BINARY_DIR}/catkin_generated/installspace/setup.bat - @ONLY) - install(FILES - ${CMAKE_BINARY_DIR}/catkin_generated/installspace/setup.bat - DESTINATION ${CMAKE_INSTALL_PREFIX}) - endif() - - # generate rosinstall file referencing setup.sh - configure_file(${catkin_EXTRAS_DIR}/templates/rosinstall.in - ${CMAKE_BINARY_DIR}/catkin_generated/installspace/.rosinstall - @ONLY) - if(NOT CATKIN_BUILD_BINARY_PACKAGE) - install(FILES - ${CMAKE_BINARY_DIR}/catkin_generated/installspace/.rosinstall - DESTINATION ${CMAKE_INSTALL_PREFIX}) - endif() -endfunction() diff --git a/cmake/catkin/catkin_install_python.cmake b/cmake/catkin/catkin_install_python.cmake deleted file mode 100644 index 45f7a3c745..0000000000 --- a/cmake/catkin/catkin_install_python.cmake +++ /dev/null @@ -1,55 +0,0 @@ -# -# Install Python files and update their shebang lines -# to use a different Python executable. -# -# The signature: -# -# catkin_install_python(PROGRAMS files... DESTINATION -# [OPTIONAL] -# ) -# -# See the documentation for CMake install() function for more information. -# -# @public -# -function(catkin_install_python signature) - string(TOUPPER "${signature}" signature) - if(NOT "${signature}" STREQUAL "PROGRAMS") - message(FATAL_ERROR "catkin_install_python() only supports the PROGRAMS signature (not '${signature}').") - endif() - cmake_parse_arguments(ARG "OPTIONAL" "DESTINATION" "" ${ARGN}) - if(NOT ARG_DESTINATION) - message(FATAL_ERROR "catkin_install_python() called without required DESTINATION argument.") - endif() - foreach(file ${ARG_UNPARSED_ARGUMENTS}) - if(NOT IS_ABSOLUTE ${file}) - set(file "${CMAKE_CURRENT_SOURCE_DIR}/${file}") - endif() - if(EXISTS ${file}) - # read file and check shebang line - file(READ ${file} data) - set(regex "^#!/([^\r\n]+)/env python([\r\n])") - string(REGEX MATCH "${regex}" shebang_line "${data}") - string(LENGTH "${shebang_line}" length) - string(SUBSTRING "${data}" 0 ${length} prefix) - if("${shebang_line}" STREQUAL "${prefix}") - # write modified file with modified shebang line - get_filename_component(python_name ${PYTHON_EXECUTABLE} NAME) - string(REGEX REPLACE "${regex}" "#!/\\1/env ${python_name}\\2" data "${data}") - get_filename_component(filename ${file} NAME) - set(file "${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace") - file(MAKE_DIRECTORY ${file}) - set(file "${file}/${filename}") - file(WRITE ${file} "${data}") - endif() - # install (modified) file to destination - set(optional_flag "") - if(ARG_OPTIONAL) - set(optional_flag "OPTIONAL") - endif() - install(PROGRAMS "${file}" DESTINATION "${ARG_DESTINATION}" ${optional_flag}) - elseif(NOT ARG_OPTIONAL) - message(FATAL_ERROR "catkin_install_python() called with non-existing file '${file}'.") - endif() - endforeach() -endfunction() diff --git a/cmake/catkin/catkin_libraries.cmake b/cmake/catkin/catkin_libraries.cmake deleted file mode 100644 index 723d40abd1..0000000000 --- a/cmake/catkin/catkin_libraries.cmake +++ /dev/null @@ -1,159 +0,0 @@ -set(CATKIN_BUILD_CONFIGURATION_KEYWORD_SEPARATOR ":") - -# -# Filter libraries based on optional build configuration keywords. -# -# :param VAR: the output variable name -# :type VAR: string -# :param ARGN: a list of libraries -# :type ARGN: list of strings -# :param BUILD_TYPE: a keyword for the build type (default: -# ``CMAKE_BUILD_TYPE``) -# :type BUILD_TYPE: list of strings -# -# @public -# -macro(catkin_filter_libraries_for_build_configuration VAR) - cmake_parse_arguments(ARG "" "BUILD_TYPE" "" ${ARGN}) - if(NOT ARG_BUILD_TYPE) - set(ARG_BUILD_TYPE ${CMAKE_BUILD_TYPE}) - endif() - if(NOT ARG_BUILD_TYPE) - set(ARG_BUILD_TYPE "Debug") - endif() - set(${VAR} "") - list(LENGTH ARG_UNPARSED_ARGUMENTS _count) - set(_index 0) - while(${_index} LESS ${_count}) - list(GET ARG_UNPARSED_ARGUMENTS ${_index} lib) - - if("${lib}" STREQUAL "debug") - if(NOT "${ARG_BUILD_TYPE}" STREQUAL "Debug") - # skip keyword and debug library for non-debug builds - math(EXPR _index "${_index} + 1") - if(${_index} EQUAL ${_count}) - message(FATAL_ERROR "catkin_filter_libraries_for_build_configuration() the list of libraries '${ARG_UNPARSED_ARGUMENTS}' ends with '${lib}' which is a build configuration keyword and must be followed by a library") - endif() - endif() - elseif("${lib}" STREQUAL "optimized") - if("${ARG_BUILD_TYPE}" STREQUAL "Debug") - # skip keyword and non-debug library for debug builds - math(EXPR _index "${_index} + 1") - if(${_index} EQUAL ${_count}) - message(FATAL_ERROR "catkin_filter_libraries_for_build_configuration() the list of libraries '${ARG_UNPARSED_ARGUMENTS}' ends with '${lib}' which is a build configuration keyword and must be followed by a library") - endif() - endif() - elseif("${lib}" STREQUAL "general") - # just consume the keyword - if(${_index} EQUAL ${_count}) - message(FATAL_ERROR "catkin_package() the list of libraries '${ARG_UNPARSED_ARGUMENTS}' ends with '${lib}' which is a build configuration keyword and must be followed by a library") - endif() - else() - list(APPEND ${VAR} "${lib}") - endif() - math(EXPR _index "${_index} + 1") - endwhile() - debug_message(10 "catkin_filter_libraries_for_build_configuration(${VAR} ${ARG_UNPARSED_ARGUMENTS} BUILD_TYPE ${ARG_BUILD_TYPE}) ${${VAR}}") -endmacro() - -# -# Pack a list of libraries with optional build configuration keywords. -# Each keyword is joined with its library using a separator. -# A packed library list can be deduplicated correctly. -# -# :param VAR: the output variable name -# :type VAR: string -# :param ARGN: a list of libraries -# :type ARGN: list of strings -# -# @public -# -macro(catkin_pack_libraries_with_build_configuration VAR) - set(${VAR} "") - set(_argn ${ARGN}) - list(LENGTH _argn _count) - set(_index 0) - while(${_index} LESS ${_count}) - list(GET _argn ${_index} lib) - if("${lib}" MATCHES "^debug|optimized|general$") - math(EXPR _index "${_index} + 1") - if(${_index} EQUAL ${_count}) - message(FATAL_ERROR "catkin_pack_libraries_with_build_configuration() the list of libraries '${_argn}' ends with '${lib}' which is a build configuration keyword and must be followed by a library") - endif() - list(GET _argn ${_index} library) - list(APPEND ${VAR} "${lib}${CATKIN_BUILD_CONFIGURATION_KEYWORD_SEPARATOR}${library}") - else() - list(APPEND ${VAR} "${lib}") - endif() - math(EXPR _index "${_index} + 1") - endwhile() - #debug_message(10 "catkin_pack_libraries_with_build_configuration(${VAR} ${_argn}) ${${VAR}}") -endmacro() - -# -# Unpack a list of libraries with optional build configuration keyword prefixes. -# Libraries prefixed with a keyword are split into the keyword and the library. -# -# :param VAR: the output variable name -# :type VAR: string -# :param ARGN: a list of libraries -# :type ARGN: list of strings -# -# @public -# -macro(catkin_unpack_libraries_with_build_configuration VAR) - set(${VAR} "") - foreach(lib ${ARGN}) - string(REGEX REPLACE "^(debug|optimized|general)${CATKIN_BUILD_CONFIGURATION_KEYWORD_SEPARATOR}(.+)$" "\\1;\\2" lib "${lib}") - list(APPEND ${VAR} "${lib}") - endforeach() - #set(_argn ${ARGN}) - #debug_message(10 "catkin_unpack_libraries_with_build_configuration(${VAR} ${_argn}) ${${VAR}}") -endmacro() - -# -# Replace imported library target names with the library name. -# -# :param VAR: the output variable name -# :type VAR: string -# :param ARGN: a list of libraries -# :type ARGN: list of strings -# -# @public -# -macro(catkin_replace_imported_library_targets VAR) - set(${VAR} "") - foreach(lib ${ARGN}) - if((NOT "${lib}" MATCHES "^debug|optimized|general$") AND TARGET ${lib}) - # sometimes cmake dependencies define imported targets, in which - # case the imported library information is not the target name, but - # the information embedded in cmake properties inside the imported library - get_target_property(${lib}_imported ${lib} IMPORTED) - if(${${lib}_imported}) - set(imported_libraries) # empty list - get_target_property(${lib}_imported_location ${lib} IMPORTED_LOCATION) - if(${lib}_imported_location) - list(APPEND imported_libraries ${${lib}_imported_location}) - else() - get_target_property(${lib}_imported_configurations ${lib} IMPORTED_CONFIGURATIONS) - foreach(cfg ${${lib}_imported_configurations}) - get_target_property(${lib}_imported_location_${cfg} ${lib} IMPORTED_LOCATION_${cfg}) - if(${lib}_imported_location_${cfg}) - list(APPEND imported_libraries ${${lib}_imported_location_${cfg}}) - endif() - endforeach() - endif() - foreach(imp_lib ${imported_libraries}) - list(APPEND ${VAR} "${imp_lib}") - endforeach() - else() - # not an imported library target - list(APPEND ${VAR} "${lib}") - endif() - else() - list(APPEND ${VAR} "${lib}") - endif() - endforeach() - #set(_argn ${ARGN}) - #debug_message(10 "catkin_replace_imported_library_targets(${VAR} ${_argn}) ${${VAR}}") -endmacro() diff --git a/cmake/catkin/catkin_metapackage.cmake b/cmake/catkin/catkin_metapackage.cmake deleted file mode 100644 index e111338a90..0000000000 --- a/cmake/catkin/catkin_metapackage.cmake +++ /dev/null @@ -1,45 +0,0 @@ -# -# It installs the package.xml file of a metapackage. -# -# .. note:: It must be called once for each metapackage. Best -# practice is to call this macro early in your root CMakeLists.txt, -# immediately after calling ``project()`` and -# ``find_package(catkin REQUIRED)``. -# -# :param DIRECTORY: the path to the package.xml file if not in the same -# location as the CMakeLists.txt file -# :type DIRECTORY: string -# -# @public -# -function(catkin_metapackage) - cmake_parse_arguments(ARG "" "DIRECTORY" "" ${ARGN}) - if(ARG_UNPARSED_ARGUMENTS) - message(FATAL_ERROR "catkin_metapackage() called with unused arguments: ${ARG_UNPARSED_ARGUMENTS}") - endif() - - # verify that project() has been called before - if(NOT PROJECT_NAME) - message(FATAL_ERROR "catkin_metapackage() PROJECT_NAME is not set. You must call project() before calling catkin_metapackage().") - endif() - if(PROJECT_NAME STREQUAL "Project") - message(FATAL_ERROR "catkin_metapackage() PROJECT_NAME is set to 'Project', which is not a valid project name. You must call project() before calling catkin_metapackage().") - endif() - - debug_message(10 "catkin_metapackage() called in file ${CMAKE_CURRENT_LIST_FILE}") - - if(NOT ARG_DIRECTORY) - if(${CMAKE_CURRENT_LIST_FILE} STREQUAL ${CMAKE_BINARY_DIR}/catkin_generated/metapackages/${PROJECT_NAME}/CMakeLists.txt) - set(ARG_DIRECTORY ${CMAKE_SOURCE_DIR}/${path}) - else() - set(ARG_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}) - endif() - endif() - - catkin_package_xml(DIRECTORY ${ARG_DIRECTORY}) - - # install package.xml - install(FILES ${ARG_DIRECTORY}/package.xml - DESTINATION share/${PROJECT_NAME} - ) -endfunction() diff --git a/cmake/catkin/catkin_package.cmake b/cmake/catkin/catkin_package.cmake deleted file mode 100644 index 5a6aee1952..0000000000 --- a/cmake/catkin/catkin_package.cmake +++ /dev/null @@ -1,528 +0,0 @@ -# -# It installs the package.xml file, and it generates code for -# ``find_package`` and ``pkg-config`` so that other packages can get -# information about this package. For this purpose the information -# about include directories, libraries, further dependencies and -# CMake variables are used. -# -# .. note:: It must be called once for each package. It is indirectly -# calling``catkin_destinations()`` which will provide additional -# output variables. Please make sure to call ``catkin_package()`` -# before using those variables. -# -# :param INCLUDE_DIRS: ``CMAKE_CURRENT_SOURCE_DIR``-relative paths to -# C/C++ includes -# :type INCLUDE_DIRS: list of strings -# :param LIBRARIES: names of library targets that will appear in the -# ``catkin_LIBRARIES`` and ``${PROJECT_NAME}_LIBRARIES`` of other -# projects that search for you via ``find_package``. Currently -# this will break if the logical target names are not the same as -# the installed names. -# :type LIBRARIES: list of strings -# :param CATKIN_DEPENDS: a list of catkin projects which this project -# depends on. It is used when client code finds this project via -# ``find_package()`` or ``pkg-config``. Each project listed will in -# turn be ``find_package``\ -ed or is states as ``Requires`` in the -# .pc file. Therefore their ``INCLUDE_DIRS`` and ``LIBRARIES`` will -# be appended to ours. Only catkin projects should be used where it -# be guarantee that they are *find_packagable* and have pkg-config -# files. -# :type CATKIN_DEPENDS: list of strings -# :param DEPENDS: a list of CMake projects which this project depends -# on. Since they might not be *find_packagable* or lack a pkg-config -# file their ``INCLUDE_DIRS`` and ``LIBRARIES`` are passed directly. -# This requires that it has been ``find_package``\ -ed before. -# :type DEPENDS: list of strings -# :param CFG_EXTRAS: a CMake file containing extra stuff that should -# be accessible to users of this package after -# ``find_package``\ -ing it. This file must live in the -# subdirectory ``cmake`` or be an absolute path. Various additional -# file extension are possible: -# for a plain cmake file just ``.cmake``, for files expanded using -# CMake's ``configure_file()`` use ``.cmake.in`` or for files expanded -# by empy use ``.cmake.em``. The templates can distinguish between -# devel- and installspace using the boolean variables ``DEVELSPACE`` -# and ``INSTALLSPACE``. For templated files it is also possible to -# use the extensions ``.cmake.develspace.(in|em)`` or -# ``.cmake.installspace.(em|in)`` to generate the files only for a -# specific case. -# If the global variable ${PROJECT_NAME}_CFG_EXTRAS is set it will be -# prepended to the explicitly passed argument. -# :type CFG_EXTRAS: string -# :param EXPORTED_TARGETS: a list of target names which usually generate -# code. Downstream packages can depend on these targets to ensure that -# code is generated before it is being used. The generated CMake config -# file will ensure that the targets exists. -# If the global variable ${PROJECT_NAME}_EXPORTED_TARGETS is -# set it will be prepended to the explicitly passed argument. -# :type EXPORTED_TARGETS: list of strings -# :param SKIP_CMAKE_CONFIG_GENERATION: the option to skip the generation -# of the CMake config files for the package -# :type SKIP_CMAKE_CONFIG_GENERATION: bool -# :param SKIP_PKG_CONFIG_GENERATION: the option to skip the generation of -# the pkg-config file for the package -# :type SKIP_PKG_CONFIG_GENERATION: bool -# -# Example: -# :: -# -# catkin_package( -# INCLUDE_DIRS include -# LIBRARIES projlib1 projlib2 -# CATKIN_DEPENDS roscpp -# DEPENDS Eigen -# CFG_EXTRAS proj-extras[.cmake|.cmake.in|.cmake(.develspace|.installspace)?.em] -# ) -# -# @public -# -macro(catkin_package) - debug_message(10 "catkin_package() called in file ${CMAKE_CURRENT_LIST_FILE}") - - # verify that project() has been called before - if(NOT PROJECT_NAME) - message(FATAL_ERROR "catkin_package() PROJECT_NAME is not set. You must call project() before calling catkin_package().") - endif() - if(PROJECT_NAME STREQUAL "Project") - message(FATAL_ERROR "catkin_package() PROJECT_NAME is set to 'Project', which is not a valid project name. You must call project() before calling catkin_package().") - endif() - - # mark that catkin_package() was called in order to detect wrong order of calling with generate_messages() - set(${PROJECT_NAME}_CATKIN_PACKAGE TRUE) - - # call catkin_package_xml() if it has not been called before - if(NOT _CATKIN_CURRENT_PACKAGE) - catkin_package_xml() - endif() - - _catkin_package(${ARGN}) -endmacro() - -function(_catkin_package) - cmake_parse_arguments(PROJECT "SKIP_CMAKE_CONFIG_GENERATION;SKIP_PKG_CONFIG_GENERATION" "" "INCLUDE_DIRS;LIBRARIES;CATKIN_DEPENDS;DEPENDS;CFG_EXTRAS;EXPORTED_TARGETS" ${ARGN}) - if(PROJECT_UNPARSED_ARGUMENTS) - message(FATAL_ERROR "catkin_package() called with unused arguments: ${PROJECT_UNPARSED_ARGUMENTS}") - endif() - - if(NOT ${PROJECT_NAME} STREQUAL "catkin") - list(FIND ${PROJECT_NAME}_BUILDTOOL_DEPENDS "catkin" _index) - if(_index EQUAL -1) - list(FIND ${PROJECT_NAME}_BUILD_DEPENDS "catkin" _index) - if(_index EQUAL -1) - message(FATAL_ERROR "catkin_package() 'catkin' must be listed as a buildtool dependency in the package.xml") - endif() - message("WARNING: 'catkin' should be listed as a buildtool dependency in the package.xml (instead of build dependency)") - endif() - endif() - - # prepend INCLUDE_DIRS and LIBRARIES passed using a variable - if(${PROJECT_NAME}_INCLUDE_DIRS) - list(INSERT PROJECT_INCLUDE_DIRS 0 ${${PROJECT_NAME}_INCLUDE_DIRS}) - endif() - if(${PROJECT_NAME}_LIBRARIES) - list(INSERT PROJECT_LIBRARIES 0 ${${PROJECT_NAME}_LIBRARIES}) - endif() - - # unset previously found directory of this package, so that this package overlays the other cleanly - if(${PROJECT_NAME}_DIR) - set(${PROJECT_NAME}_DIR "" CACHE PATH "" FORCE) - endif() - - set(_PROJECT_CATKIN_DEPENDS ${PROJECT_CATKIN_DEPENDS}) - - set(PROJECT_DEPENDENCIES_INCLUDE_DIRS "") - set(PROJECT_DEPENDENCIES_LIBRARIES "") - foreach(depend ${PROJECT_DEPENDS}) - string(REPLACE " " ";" depend_list ${depend}) - # check if the second argument is the COMPONENTS keyword - list(LENGTH depend_list count) - set(second_item "") - if(${count} GREATER 1) - list(GET depend_list 1 second_item) - endif() - if("${second_item}" STREQUAL "COMPONENTS") - list(GET depend_list 0 depend_name) - if(NOT ${${depend_name}_FOUND}) - message(FATAL_ERROR "catkin_package() DEPENDS on '${depend}' which must be find_package()-ed before") - endif() - message(WARNING "catkin_package() DEPENDS on '${depend}' which is deprecated. find_package() it before and only DEPENDS on '${depend_name}' instead") - list(APPEND PROJECT_DEPENDENCIES_INCLUDE_DIRS ${${depend_name}_INCLUDE_DIRS}) - list(APPEND PROJECT_DEPENDENCIES_LIBRARIES ${${depend_name}_LIBRARIES}) - else() - # split multiple names (without COMPONENTS) into separate dependencies - foreach(depend_name ${depend_list}) - if(${depend_name}_FOUND_CATKIN_PROJECT) - #message(WARNING "catkin_package() DEPENDS on catkin package '${depend_name}' which is deprecated. Use CATKIN_DEPENDS for catkin packages instead.") - list(APPEND _PROJECT_CATKIN_DEPENDS ${depend_name}) - else() - if(NOT ${${depend_name}_FOUND}) - message(FATAL_ERROR "catkin_package() DEPENDS on '${depend_name}' which must be find_package()-ed before. If it is a catkin package it can be declared as CATKIN_DEPENDS instead without find_package()-ing it.") - endif() - list(APPEND PROJECT_DEPENDENCIES_INCLUDE_DIRS ${${depend_name}_INCLUDE_DIRS}) - list(APPEND PROJECT_DEPENDENCIES_LIBRARIES ${${depend_name}_LIBRARIES}) - endif() - endforeach() - endif() - endforeach() - - # for catkin packages it can be guaranteed that they are find_package()-able and have pkg-config files - set(PROJECT_DEPENDENCIES "") - foreach(depend_name ${_PROJECT_CATKIN_DEPENDS}) - # verify that all catkin packages which have been find_package()-ed are listed as build dependencies - if(${depend_name}_FOUND) - # verify that these packages are really catkin packages - if(NOT ${depend_name}_FOUND_CATKIN_PROJECT) - if(DEFINED ${depend_name}_CONFIG) - message(FATAL_ERROR "catkin_package() CATKIN_DEPENDS on '${depend_name}', which has been found in '${${depend_name}_CONFIG}', but it is not a catkin package") - else() - message(FATAL_ERROR "catkin_package() CATKIN_DEPENDS on '${depend_name}', but it is not a catkin package") - endif() - endif() - if(catkin_ALL_FOUND_COMPONENTS) - list(FIND catkin_ALL_FOUND_COMPONENTS ${depend_name} _index) - else() - set(_index -1) - endif() - if(NOT _index EQUAL -1) - list(FIND ${PROJECT_NAME}_BUILD_DEPENDS ${depend_name} _index) - if(_index EQUAL -1) - message(FATAL_ERROR "catkin_package() the catkin package '${depend_name}' has been find_package()-ed but is not listed as a build dependency in the package.xml") - endif() - # verify versioned dependency constraints - if(DEFINED ${PROJECT_NAME}_BUILD_DEPENDS_${depend_name}_VERSION_LT AND - NOT "${${depend_name}_VERSION}" VERSION_LESS "${${PROJECT_NAME}_BUILD_DEPENDS_${depend_name}_VERSION_LT}") - message(WARNING "catkin_package() version mismatch: the package.xml of '${PROJECT_NAME}' build_depends on '${depend_name} < ${${PROJECT_NAME}_BUILD_DEPENDS_${depend_name}_VERSION_LT}', but '${depend_name} ${${depend_name}_VERSION}' found") - endif() - if(DEFINED ${PROJECT_NAME}_BUILD_DEPENDS_${depend_name}_VERSION_LTE AND - "${${depend_name}_VERSION}" VERSION_GREATER "${${PROJECT_NAME}_BUILD_DEPENDS_${depend_name}_VERSION_LTE}") - message(WARNING "catkin_package() version mismatch: the package.xml of '${PROJECT_NAME}' build_depends on '${depend_name} <= ${${PROJECT_NAME}_BUILD_DEPENDS_${depend_name}_VERSION_LTE}', but '${depend_name} ${${depend_name}_VERSION}' found") - endif() - if(DEFINED ${PROJECT_NAME}_BUILD_DEPENDS_${depend_name}_VERSION_EQ AND - NOT "${${depend_name}_VERSION}" VERSION_EQUAL "${${PROJECT_NAME}_BUILD_DEPENDS_${depend_name}_VERSION_EQ}") - message(WARNING "catkin_package() version mismatch: the package.xml of '${PROJECT_NAME}' build_depends on '${depend_name} = ${${PROJECT_NAME}_BUILD_DEPENDS_${depend_name}_VERSION_EQ}', but '${depend_name} ${${depend_name}_VERSION}' found") - endif() - if(DEFINED ${PROJECT_NAME}_BUILD_DEPENDS_${depend_name}_VERSION_GTE AND - "${${depend_name}_VERSION}" VERSION_LESS "${${PROJECT_NAME}_BUILD_DEPENDS_${depend_name}_VERSION_GTE}") - message(WARNING "catkin_package() version mismatch: the package.xml of '${PROJECT_NAME}' build_depends on '${depend_name} >= ${${PROJECT_NAME}_BUILD_DEPENDS_${depend_name}_VERSION_GTE}', but '${depend_name} ${${depend_name}_VERSION}' found") - endif() - if(DEFINED ${PROJECT_NAME}_BUILD_DEPENDS_${depend_name}_VERSION_GT AND - NOT "${${depend_name}_VERSION}" VERSION_GREATER "${${PROJECT_NAME}_BUILD_DEPENDS_${depend_name}_VERSION_GT}") - message(WARNING "catkin_package() version mismatch: the package.xml of '${PROJECT_NAME}' build_depends on '${depend_name} > ${${PROJECT_NAME}_BUILD_DEPENDS_${depend_name}_VERSION_GT}', but '${depend_name} ${${depend_name}_VERSION}' found") - endif() - endif() - endif() - # verify that all catkin packages are listed as run dependencies - list(FIND ${PROJECT_NAME}_RUN_DEPENDS ${depend_name} _index) - if(_index EQUAL -1) - message(FATAL_ERROR "catkin_package() DEPENDS on the catkin package '${depend_name}' which must therefore be listed as a run dependency in the package.xml") - endif() - list(APPEND PROJECT_DEPENDENCIES ${depend_name}) - endforeach() - - # package version provided by package.cmake/xml - set(PROJECT_VERSION ${${PROJECT_NAME}_VERSION}) - - # flag if package is deprecated provided by package.cmake/xml - set(PROJECT_DEPRECATED ${${PROJECT_NAME}_DEPRECATED}) - - # package maintainer provided by package.cmake/xml - set(PROJECT_MAINTAINER ${${PROJECT_NAME}_MAINTAINER}) - - # get library paths from all workspaces - set(lib_paths "") - foreach(workspace ${CATKIN_WORKSPACES}) - list_append_unique(lib_paths ${workspace}/lib) - endforeach() - - # merge explicitly listed libraries and libraries from non-catkin but find_package()-ed packages - set(_PKG_CONFIG_LIBRARIES "") - if(PROJECT_LIBRARIES) - list(APPEND _PKG_CONFIG_LIBRARIES ${PROJECT_LIBRARIES}) - endif() - if(PROJECT_DEPENDENCIES_LIBRARIES) - list(APPEND _PKG_CONFIG_LIBRARIES ${PROJECT_DEPENDENCIES_LIBRARIES}) - endif() - - # resolve imported library targets - catkin_replace_imported_library_targets(_PKG_CONFIG_LIBRARIES ${_PKG_CONFIG_LIBRARIES}) - - # deduplicate libraries while maintaining build configuration keywords - catkin_pack_libraries_with_build_configuration(_PKG_CONFIG_LIBRARIES ${_PKG_CONFIG_LIBRARIES}) - set(PKG_CONFIG_LIBRARIES "") - foreach(library ${_PKG_CONFIG_LIBRARIES}) - list_append_deduplicate(PKG_CONFIG_LIBRARIES ${library}) - endforeach() - catkin_unpack_libraries_with_build_configuration(PKG_CONFIG_LIBRARIES ${PKG_CONFIG_LIBRARIES}) - - # .pc files can not handle build configuration keywords therefore filter them out based on the current build type - set(PKG_CONFIG_LIBRARIES_WITH_PREFIX "") - catkin_filter_libraries_for_build_configuration(libraries ${PKG_CONFIG_LIBRARIES}) - foreach(library ${libraries}) - if(IS_ABSOLUTE ${library}) - get_filename_component(suffix ${library} EXT) - if(NOT "${suffix}" STREQUAL "${CMAKE_STATIC_LIBRARY_SUFFIX}") - set(library "-l:${library}") - endif() - else() - set(library "-l${library}") - endif() - list_append_deduplicate(PKG_CONFIG_LIBRARIES_WITH_PREFIX ${library}) - endforeach() - - # - # DEVEL SPACE - # - - # used in the cmake extra files - set(DEVELSPACE TRUE) - set(INSTALLSPACE FALSE) - - set(PROJECT_SPACE_DIR ${CATKIN_DEVEL_PREFIX}) - set(PKG_INCLUDE_PREFIX ${CMAKE_CURRENT_SOURCE_DIR}) - - # absolute path to include dirs and validate that they are existing either absolute or relative to packages source - set(PROJECT_CMAKE_CONFIG_INCLUDE_DIRS "") - set(PROJECT_PKG_CONFIG_INCLUDE_DIRS "") - foreach(idir ${PROJECT_INCLUDE_DIRS}) - if(IS_ABSOLUTE ${idir}) - if(IS_DIRECTORY ${idir}) - set(include ${idir}) - else() - message(FATAL_ERROR "catkin_package() absolute include dir '${idir}' does not exist") - endif() - elseif(IS_DIRECTORY ${PKG_INCLUDE_PREFIX}/${idir}) - set(include ${PKG_INCLUDE_PREFIX}/${idir}) - else() - message(FATAL_ERROR "catkin_package() include dir '${idir}' does not exist relative to '${PKG_INCLUDE_PREFIX}'") - endif() - list_append_unique(PROJECT_CMAKE_CONFIG_INCLUDE_DIRS ${include}) - list_append_unique(PROJECT_PKG_CONFIG_INCLUDE_DIRS ${include}) - endforeach() - if(PROJECT_DEPENDENCIES_INCLUDE_DIRS) - list_append_unique(PROJECT_CMAKE_CONFIG_INCLUDE_DIRS ${PROJECT_DEPENDENCIES_INCLUDE_DIRS}) - list_append_unique(PROJECT_PKG_CONFIG_INCLUDE_DIRS ${PROJECT_DEPENDENCIES_INCLUDE_DIRS}) - endif() - - # prepend library path of this workspace - set(PKG_CONFIG_LIB_PATHS ${lib_paths}) - list(INSERT PKG_CONFIG_LIB_PATHS 0 ${PROJECT_SPACE_DIR}/lib) - set(PKG_CMAKE_DIR ${PROJECT_SPACE_DIR}/share/${PROJECT_NAME}/cmake) - if("${PROJECT_NAME}" STREQUAL "catkin") - set(PKG_CMAKE_DIR "${catkin_EXTRAS_DIR}") - endif() - - if(NOT PROJECT_SKIP_PKG_CONFIG_GENERATION) - # ensure that output folder exists - file(MAKE_DIRECTORY ${CATKIN_DEVEL_PREFIX}/lib/pkgconfig) - # generate devel space pc for project - em_expand(${catkin_EXTRAS_DIR}/templates/pkg.context.pc.in - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/pkg.develspace.context.pc.py - ${catkin_EXTRAS_DIR}/em/pkg.pc.em - ${CATKIN_DEVEL_PREFIX}/lib/pkgconfig/${PROJECT_NAME}.pc) - endif() - - # generate devel space cfg-extras for project - set(PKG_CFG_EXTRAS "") - foreach(extra ${${PROJECT_NAME}_CFG_EXTRAS} ${PROJECT_CFG_EXTRAS}) - if(IS_ABSOLUTE ${extra}) - set(base ${extra}) - get_filename_component(extra ${extra} NAME) - else() - set(base ${CMAKE_CURRENT_SOURCE_DIR}/cmake/${extra}) - endif() - if(EXISTS ${base}.em OR EXISTS ${base}.develspace.em) - if(EXISTS ${base}.develspace.em) - set(em_template ${base}.develspace.em) - else() - set(em_template ${base}.em) - endif() - em_expand(${catkin_EXTRAS_DIR}/templates/cfg-extras.context.py.in - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/${extra}.develspace.context.cmake.py - ${em_template} - ${CATKIN_DEVEL_PREFIX}/share/${PROJECT_NAME}/cmake/${extra}) - list(APPEND PKG_CFG_EXTRAS ${extra}) - elseif(EXISTS ${base}.in OR EXISTS ${base}.develspace.in) - if(EXISTS ${base}.develspace.in) - set(in_template ${base}.develspace.in) - else() - set(in_template ${base}.in) - endif() - configure_file(${in_template} - ${CATKIN_DEVEL_PREFIX}/share/${PROJECT_NAME}/cmake/${extra} - @ONLY - ) - list(APPEND PKG_CFG_EXTRAS ${extra}) - elseif(EXISTS ${base}) - list(APPEND PKG_CFG_EXTRAS ${base}) - elseif(NOT EXISTS ${base}.installspace.em AND NOT EXISTS ${base}.installspace.in) - message(FATAL_ERROR "catkin_package() could not find CFG_EXTRAS file. Either 'cmake/${extra}.develspace.em', 'cmake/${extra}.em', 'cmake/${extra}.develspace.in', 'cmake/${extra}.in', 'cmake/${extra}' or a variant specific to the installspace must exist.") - endif() - endforeach() - - if(NOT PROJECT_SKIP_CMAKE_CONFIG_GENERATION) - set(PKG_EXPORTED_TARGETS ${${PROJECT_NAME}_EXPORTED_TARGETS} ${PROJECT_EXPORTED_TARGETS}) - foreach(t ${PKG_EXPORTED_TARGETS}) - if(NOT TARGET ${t}) - message(FATAL_ERROR "catkin_package() could not find target '${t}' for code generation.") - endif() - endforeach() - - # generate devel space config for project - set(infile ${${PROJECT_NAME}_EXTRAS_DIR}/${PROJECT_NAME}Config.cmake.in) - if(NOT EXISTS ${infile}) - set(infile ${catkin_EXTRAS_DIR}/templates/pkgConfig.cmake.in) - endif() - configure_file(${infile} - ${CATKIN_DEVEL_PREFIX}/share/${PROJECT_NAME}/cmake/${PROJECT_NAME}Config.cmake - @ONLY - ) - # generate devel space config-version for project - configure_file(${catkin_EXTRAS_DIR}/templates/pkgConfig-version.cmake.in - ${CATKIN_DEVEL_PREFIX}/share/${PROJECT_NAME}/cmake/${PROJECT_NAME}Config-version.cmake - @ONLY - ) - endif() - - # - # INSTALLSPACE - # - - # used in the cmake extra files - set(DEVELSPACE FALSE) - set(INSTALLSPACE TRUE) - - set(PROJECT_SPACE_DIR ${CMAKE_INSTALL_PREFIX}) - set(PKG_INCLUDE_PREFIX ${PROJECT_SPACE_DIR}) - - # absolute path to include dir under install prefix if any include dir is set - set(PROJECT_CMAKE_CONFIG_INCLUDE_DIRS "") - set(PROJECT_PKG_CONFIG_INCLUDE_DIRS "") - foreach(idir ${PROJECT_INCLUDE_DIRS}) - # include dirs in source / build / devel space are handled like relative ones - # since these files are supposed to be installed to the include folder in install space - string_starts_with("${idir}/" "${CMAKE_CURRENT_SOURCE_DIR}/" _is_source_prefix) - string_starts_with("${idir}/" "${CMAKE_CURRENT_BINARY_DIR}/" _is_build_prefix) - string_starts_with("${idir}/" "${CATKIN_DEVEL_PREFIX}/" _is_devel_prefix) - if(_is_source_prefix OR _is_build_prefix OR _is_devel_prefix) - # generated header files should be places in the devel space rather then in the build space - if(_is_build_prefix) - message(WARNING "catkin_package() include dir '${idir}' should be placed in the devel space instead of the build space") - endif() - # the value doesn't matter as long as it doesn't match IS_ABSOLUTE - set(idir "${CATKIN_GLOBAL_INCLUDE_DESTINATION}") - endif() - if(IS_ABSOLUTE ${idir}) - list_append_unique(PROJECT_CMAKE_CONFIG_INCLUDE_DIRS "${idir}") - list_append_unique(PROJECT_PKG_CONFIG_INCLUDE_DIRS "${idir}") - else() - list_append_unique(PROJECT_CMAKE_CONFIG_INCLUDE_DIRS "${CATKIN_GLOBAL_INCLUDE_DESTINATION}") - list_append_unique(PROJECT_PKG_CONFIG_INCLUDE_DIRS "${PKG_INCLUDE_PREFIX}/${CATKIN_GLOBAL_INCLUDE_DESTINATION}") - endif() - endforeach() - if(PROJECT_DEPENDENCIES_INCLUDE_DIRS) - list_append_unique(PROJECT_CMAKE_CONFIG_INCLUDE_DIRS ${PROJECT_DEPENDENCIES_INCLUDE_DIRS}) - list_append_unique(PROJECT_PKG_CONFIG_INCLUDE_DIRS ${PROJECT_DEPENDENCIES_INCLUDE_DIRS}) - endif() - - # prepend library path of this workspace - set(PKG_CONFIG_LIB_PATHS ${lib_paths}) - list(INSERT PKG_CONFIG_LIB_PATHS 0 ${PROJECT_SPACE_DIR}/lib) - # package cmake dir is the folder where the generated pkgConfig.cmake is located - set(PKG_CMAKE_DIR "\${${PROJECT_NAME}_DIR}") - - if(NOT PROJECT_SKIP_PKG_CONFIG_GENERATION) - # ensure that output folder exists - file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace) - # generate and install pc for project - em_expand(${catkin_EXTRAS_DIR}/templates/pkg.context.pc.in - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/pkg.installspace.context.pc.py - ${catkin_EXTRAS_DIR}/em/pkg.pc.em - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace/${PROJECT_NAME}.pc) - install(FILES ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace/${PROJECT_NAME}.pc - DESTINATION lib/pkgconfig - ) - endif() - - # generate and install cfg-extras for project - set(PKG_CFG_EXTRAS "") - set(installable_cfg_extras "") - foreach(extra ${${PROJECT_NAME}_CFG_EXTRAS} ${PROJECT_CFG_EXTRAS}) - if(IS_ABSOLUTE ${extra}) - set(base ${extra}) - get_filename_component(extra ${extra} NAME) - else() - set(base ${CMAKE_CURRENT_SOURCE_DIR}/cmake/${extra}) - endif() - if(EXISTS ${base}.em OR EXISTS ${base}.installspace.em) - if(EXISTS ${base}.installspace.em) - set(em_template ${base}.installspace.em) - else() - set(em_template ${base}.em) - endif() - em_expand(${catkin_EXTRAS_DIR}/templates/cfg-extras.context.py.in - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/${extra}.installspace.context.cmake.py - ${em_template} - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace/${extra}) - list(APPEND installable_cfg_extras ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace/${extra}) - list(APPEND PKG_CFG_EXTRAS ${extra}) - elseif(EXISTS ${base}.in OR EXISTS ${base}.installspace.in) - if(EXISTS ${base}.installspace.in) - set(in_template ${base}.installspace.in) - else() - set(in_template ${base}.in) - endif() - configure_file(${in_template} - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace/${extra} - @ONLY - ) - list(APPEND installable_cfg_extras ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace/${extra}) - list(APPEND PKG_CFG_EXTRAS ${extra}) - elseif(EXISTS ${base}) - list(APPEND installable_cfg_extras ${base}) - list(APPEND PKG_CFG_EXTRAS ${extra}) - elseif(NOT EXISTS ${base}.develspace.em AND NOT EXISTS ${base}.develspace.in) - message(FATAL_ERROR "catkin_package() could not find CFG_EXTRAS file. Either 'cmake/${extra}.installspace.em', 'cmake/${extra}.em', 'cmake/${extra}.installspace.in', 'cmake/${extra}.in', 'cmake/${extra}'or a variant specific to the develspace must exist.") - endif() - endforeach() - install(FILES - ${installable_cfg_extras} - DESTINATION share/${PROJECT_NAME}/cmake - ) - - if(NOT PROJECT_SKIP_CMAKE_CONFIG_GENERATION) - # generate config for project - set(infile ${${PROJECT_NAME}_EXTRAS_DIR}/${PROJECT_NAME}Config.cmake.in) - if(NOT EXISTS ${infile}) - set(infile ${catkin_EXTRAS_DIR}/templates/pkgConfig.cmake.in) - endif() - configure_file(${infile} - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace/${PROJECT_NAME}Config.cmake - @ONLY - ) - # generate config-version for project - set(infile ${${PROJECT_NAME}_EXTRAS_DIR}/${PROJECT_NAME}Config-version.cmake.in) - if(NOT EXISTS ${infile}) - set(infile ${catkin_EXTRAS_DIR}/templates/pkgConfig-version.cmake.in) - endif() - configure_file(${infile} - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace/${PROJECT_NAME}Config-version.cmake - @ONLY - ) - # install config, config-version and cfg-extras for project - install(FILES - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace/${PROJECT_NAME}Config.cmake - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/installspace/${PROJECT_NAME}Config-version.cmake - DESTINATION share/${PROJECT_NAME}/cmake - ) - endif() - - # install package.xml - install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/package.xml - DESTINATION share/${PROJECT_NAME} - ) - - if (EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/nodelet_plugins.xml) - install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/nodelet_plugins.xml - DESTINATION share/${PROJECT_NAME} ) - endif () -endfunction() diff --git a/cmake/catkin/catkin_package_xml.cmake b/cmake/catkin/catkin_package_xml.cmake deleted file mode 100644 index 99dc212add..0000000000 --- a/cmake/catkin/catkin_package_xml.cmake +++ /dev/null @@ -1,69 +0,0 @@ -# -# Parse package.xml from ``CMAKE_CURRENT_SOURCE_DIR`` and -# make several information available to CMake. -# -# .. note:: It is called automatically by ``catkin_package()`` if not -# called manually before. It must be called once in each package, -# after calling ``project()`` where the project name must match the -# package name. The macro should only be called manually if the -# variables are use to parameterize ``catkin_package()``. -# -# :param DIRECTORY: the directory of the package.xml (default -# ``${CMAKE_CURRENT_SOURCE_DIR}``). -# :type DIRECTORY: string -# -# :outvar _VERSION: the version number -# :outvar _MAINTAINER: the name and email of the -# maintainer(s) -# :outvar _CATKIN_CURRENT_PACKAGE: the name of the package from the -# manifest -# -# .. note:: It is calling ``catkin_destinations()`` which will provide -# additional output variables. -# -# @public -# -macro(catkin_package_xml) - debug_message(10 "catkin_package_xml()") - - # verify that project() has been called before - if(NOT PROJECT_NAME) - message(FATAL_ERROR "catkin_package_xml() PROJECT_NAME is not set. You must call project() before you can call catkin_package_xml().") - endif() - - # ensure that function is not called multiple times per package - if(DEFINED _CATKIN_CURRENT_PACKAGE) - message(FATAL_ERROR "catkin_package_xml(): in '${CMAKE_CURRENT_LIST_FILE}', _CATKIN_CURRENT_PACKAGE is already set (to: ${_CATKIN_CURRENT_PACKAGE}). Did you called catkin_package_xml() multiple times?") - endif() - - _catkin_package_xml(${CMAKE_CURRENT_BINARY_DIR}/catkin_generated ${ARGN}) - - # verify that the package name from package.xml equals the project() name - if(NOT _CATKIN_CURRENT_PACKAGE STREQUAL PROJECT_NAME) - message(FATAL_ERROR "catkin_package_xml() package name '${_CATKIN_CURRENT_PACKAGE}' in '${_PACKAGE_XML_DIRECTORY}/package.xml' does not match current PROJECT_NAME '${PROJECT_NAME}'. You must call project() with the same package name before.") - endif() - - catkin_destinations() -endmacro() - -macro(_catkin_package_xml dest_dir) - cmake_parse_arguments(_PACKAGE_XML "" "DIRECTORY" "" ${ARGN}) - if(_PACKAGE_XML_UNPARSED_ARGUMENTS) - message(FATAL_ERROR "catkin_package_xml() called with unused arguments: ${_PACKAGE_XML_UNPARSED_ARGUMENTS}") - endif() - - # set default directory - if(NOT _PACKAGE_XML_DIRECTORY) - set(_PACKAGE_XML_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}) - endif() - - # stamp and parse package.xml - stamp(${_PACKAGE_XML_DIRECTORY}/package.xml) - file(MAKE_DIRECTORY ${dest_dir}) - safe_execute_process(COMMAND ${PYTHON_EXECUTABLE} - ${catkin_EXTRAS_DIR}/parse_package_xml.py - ${_PACKAGE_XML_DIRECTORY}/package.xml - ${dest_dir}/package.cmake) - # load extracted variable into cmake - include(${dest_dir}/package.cmake) -endmacro() diff --git a/cmake/catkin/catkin_python_setup.cmake b/cmake/catkin/catkin_python_setup.cmake deleted file mode 100644 index 2a8a0f7558..0000000000 --- a/cmake/catkin/catkin_python_setup.cmake +++ /dev/null @@ -1,150 +0,0 @@ -# This macro will interrogate the Python setup.py file in -# ``${${PROJECT_NAME}_SOURCE_DIR}``, and then creates forwarding -# Python :term:`pkgutil` infrastructure in devel space -# accordingly for the scripts and packages declared in setup.py. -# -# Doing so enables mixing :term:`generated code` in -# devel space with :term:`static code` from sourcespace within a -# single Python package. -# -# In addition, it adds the install command of -# distutils/setuputils to the install target. -# -# .. note:: If the project also uses genmsg message generation via -# ``generate_messages()`` this function must be called before. -# -# @public -# -function(catkin_python_setup) - if(ARGN) - message(FATAL_ERROR "catkin_python_setup() called with unused arguments: ${ARGN}") - endif() - - if(${PROJECT_NAME}_GENERATE_MESSAGES) - message(FATAL_ERROR "generate_messages() must be called after catkin_python_setup() in project '${PROJECT_NAME}'") - endif() - if(${PROJECT_NAME}_GENERATE_DYNAMIC_RECONFIGURE) - message(FATAL_ERROR "generate_dynamic_reconfigure_options() must be called after catkin_python_setup() in project '${PROJECT_NAME}'") - endif() - - if(NOT EXISTS ${${PROJECT_NAME}_SOURCE_DIR}/setup.py) - message(FATAL_ERROR "catkin_python_setup() called without 'setup.py' in project folder ' ${${PROJECT_NAME}_SOURCE_DIR}'") - endif() - - assert(PYTHON_INSTALL_DIR) - set(INSTALL_CMD_WORKING_DIRECTORY ${${PROJECT_NAME}_SOURCE_DIR}) - if(NOT WIN32) - set(INSTALL_SCRIPT - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/python_distutils_install.sh) - configure_file(${catkin_EXTRAS_DIR}/templates/python_distutils_install.sh.in - ${INSTALL_SCRIPT} - @ONLY) - else() - # need to convert install prefix to native path for python setuptools --prefix (its fussy about \'s) - file(TO_NATIVE_PATH ${CMAKE_INSTALL_PREFIX} PYTHON_INSTALL_PREFIX) - set(INSTALL_SCRIPT - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/python_distutils_install.bat) - configure_file(${catkin_EXTRAS_DIR}/templates/python_distutils_install.bat.in - ${INSTALL_SCRIPT} - @ONLY) - endif() - - # generate python script which gets executed at install time - configure_file(${catkin_EXTRAS_DIR}/templates/safe_execute_install.cmake.in - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/safe_execute_install.cmake) - install(SCRIPT ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/safe_execute_install.cmake) - - # interrogate setup.py - stamp(${${PROJECT_NAME}_SOURCE_DIR}/setup.py) - assert(CATKIN_ENV) - assert(PYTHON_EXECUTABLE) - set(cmd - ${CATKIN_ENV} ${PYTHON_EXECUTABLE} - ${catkin_EXTRAS_DIR}/interrogate_setup_dot_py.py - ${PROJECT_NAME} - ${${PROJECT_NAME}_SOURCE_DIR}/setup.py - ${${PROJECT_NAME}_BINARY_DIR}/catkin_generated/setup_py_interrogation.cmake - ) - debug_message(10 "catkin_python_setup() in project '{PROJECT_NAME}' executes: ${cmd}") - safe_execute_process(COMMAND ${cmd}) - include(${${PROJECT_NAME}_BINARY_DIR}/catkin_generated/setup_py_interrogation.cmake) - - # call catkin_package_xml() if it has not been called before - if(NOT _CATKIN_CURRENT_PACKAGE) - catkin_package_xml() - endif() - assert(${PROJECT_NAME}_VERSION) - # verify that version from setup.py is equal to version from package.xml - if(NOT "${${PROJECT_NAME}_SETUP_PY_VERSION}" STREQUAL "${${PROJECT_NAME}_VERSION}") - message(FATAL_ERROR "catkin_python_setup() version in setup.py (${${PROJECT_NAME}_SETUP_PY_VERSION}) differs from version in package.xml (${${PROJECT_NAME}_VERSION})") - endif() - - # generate relaying __init__.py for each python package - if(${PROJECT_NAME}_SETUP_PY_PACKAGES) - list(LENGTH ${PROJECT_NAME}_SETUP_PY_PACKAGES pkgs_count) - math(EXPR pkgs_range "${pkgs_count} - 1") - foreach(index RANGE ${pkgs_range}) - list(GET ${PROJECT_NAME}_SETUP_PY_PACKAGES ${index} pkg) - if("${pkg}" STREQUAL "${PROJECT_NAME}") - # mark that catkin_python_setup() was called and the setup.py file contains a package with the same name as the current project - # in order to disable installation of generated __init__.py files in generate_messages() and generate_dynamic_reconfigure_options() - set(${PROJECT_NAME}_CATKIN_PYTHON_SETUP_HAS_PACKAGE_INIT TRUE PARENT_SCOPE) - endif() - list(GET ${PROJECT_NAME}_SETUP_PY_PACKAGE_DIRS ${index} pkg_dir) - get_filename_component(name ${pkg_dir} NAME) - if(NOT ("${pkg}" STREQUAL "${name}")) - message(FATAL_ERROR "The package name '${pkg}' differs from the basename of the path '${pkg_dir}' in project '${PROJECT_NAME}'") - endif() - get_filename_component(path ${pkg_dir} PATH) - set(PACKAGE_PYTHONPATH ${CMAKE_CURRENT_SOURCE_DIR}/${path}) - configure_file(${catkin_EXTRAS_DIR}/templates/__init__.py.in - ${CATKIN_DEVEL_PREFIX}/${PYTHON_INSTALL_DIR}/${pkg}/__init__.py - @ONLY) - endforeach() - endif() - - # generate relay-script for each python module (and __init__.py files) if available - if(${PROJECT_NAME}_SETUP_PY_MODULES) - list(LENGTH ${PROJECT_NAME}_SETUP_PY_MODULES modules_count) - math(EXPR modules_range "${modules_count} - 1") - foreach(index RANGE ${modules_range}) - list(GET ${PROJECT_NAME}_SETUP_PY_MODULES ${index} module) - list(GET ${PROJECT_NAME}_SETUP_PY_MODULE_DIRS ${index} module_dir) - set(PYTHON_SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/${module_dir}/${module}) - if(EXISTS ${PYTHON_SCRIPT}) - get_filename_component(path ${module} PATH) - file(MAKE_DIRECTORY "${CATKIN_DEVEL_PREFIX}/${CATKIN_GLOBAL_PYTHON_DESTINATION}/${path}") - configure_file(${catkin_EXTRAS_DIR}/templates/relay.py.in - ${CATKIN_DEVEL_PREFIX}/${CATKIN_GLOBAL_PYTHON_DESTINATION}/${module} - @ONLY) - # relay parent __init__.py files if they exist - while(NOT "${path}" STREQUAL "") - set(PYTHON_SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/${module_dir}/${path}/__init__.py) - if(EXISTS ${PYTHON_SCRIPT}) - file(MAKE_DIRECTORY "${CATKIN_DEVEL_PREFIX}/${CATKIN_GLOBAL_PYTHON_DESTINATION}/${path}") - configure_file(${catkin_EXTRAS_DIR}/templates/relay.py.in - ${CATKIN_DEVEL_PREFIX}/${CATKIN_GLOBAL_PYTHON_DESTINATION}/${path}/__init__.py - @ONLY) - else() - message(WARNING "The module '${module_dir}/${module}' lacks an '__init__.py' file in the parent folder '${module_dir}/${path}' in project '${PROJECT_NAME}'") - endif() - get_filename_component(path ${path} PATH) - endwhile() - endif() - endforeach() - endif() - - # generate relay-script for each python script - foreach(script ${${PROJECT_NAME}_SETUP_PY_SCRIPTS}) - get_filename_component(name ${script} NAME) - if(NOT EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/${script}) - message(FATAL_ERROR "The script '${name}' as listed in 'setup.py' of '${PROJECT_NAME}' doesn't exist") - endif() - set(PYTHON_SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/${script}) - atomic_configure_file(${catkin_EXTRAS_DIR}/templates/script.py.in - ${CATKIN_DEVEL_PREFIX}/${CATKIN_GLOBAL_BIN_DESTINATION}/${name} - @ONLY) - endforeach() -endfunction() - -stamp(${catkin_EXTRAS_DIR}/interrogate_setup_dot_py.py) diff --git a/cmake/catkin/catkin_workspace.cmake b/cmake/catkin/catkin_workspace.cmake deleted file mode 100644 index d346ff0fde..0000000000 --- a/cmake/catkin/catkin_workspace.cmake +++ /dev/null @@ -1,122 +0,0 @@ -# -# Search all subfolders in the workspace for ``package.xml`` files. -# Based on the dependencies specified in the ``build_depends``, -# ``buildtool_depends`` and (as of package format version 2) -# ``test_depends`` tags it performs a topological sort and calls -# ``add_subdirectory()`` for each directory. -# -# The functions is only called in catkin's ``toplevel.cmake``, which -# is usually symlinked to the workspace root directory (which -# contains multiple packages). -# -function(catkin_workspace) - debug_message(10 "catkin_workspace() called in file '${CMAKE_CURRENT_LIST_FILE}'") - - # set global output directories for artifacts and create them if necessary - set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CATKIN_DEVEL_PREFIX}/lib) - set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CATKIN_DEVEL_PREFIX}/lib) - if(NOT IS_DIRECTORY ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}) - file(MAKE_DIRECTORY ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}) - endif() - - # tools/libraries.cmake - configure_shared_library_build_settings() - - set(CATKIN_WHITELIST_PACKAGES "" CACHE STRING "List of ';' separated packages to build") - set(CATKIN_BLACKLIST_PACKAGES "" CACHE STRING "List of ';' separated packages to exclude") - if(NOT "${CATKIN_WHITELIST_PACKAGES}" STREQUAL "") - message(STATUS "Using CATKIN_WHITELIST_PACKAGES: ${CATKIN_WHITELIST_PACKAGES}") - endif() - if(NOT "${CATKIN_BLACKLIST_PACKAGES}" STREQUAL "") - message(STATUS "Using CATKIN_BLACKLIST_PACKAGES: ${CATKIN_BLACKLIST_PACKAGES}") - endif() - - assert(catkin_EXTRAS_DIR) - em_expand( - ${catkin_EXTRAS_DIR}/templates/order_packages.context.py.in - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/order_packages.py - ${catkin_EXTRAS_DIR}/em/order_packages.cmake.em - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/order_packages.cmake - ) - debug_message(10 "catkin_workspace() including order_packages.cmake") - include(${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/order_packages.cmake) - - if(CATKIN_ORDERED_PACKAGES) - set(CATKIN_NONCONFORMANT_METAPACKAGE FALSE) - set(CATKIN_NONHOMOGENEOUS_WORKSPACE FALSE) - list(LENGTH CATKIN_ORDERED_PACKAGES count) - message(STATUS "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") - message(STATUS "~~ traversing ${count} packages in topological order:") - math(EXPR range "${count} - 1") - foreach(index RANGE ${range}) - list(GET CATKIN_ORDERED_PACKAGES ${index} name) - list(GET CATKIN_ORDERED_PACKAGE_PATHS ${index} path) - list(GET CATKIN_ORDERED_PACKAGES_IS_META ${index} is_meta) - list(GET CATKIN_ORDERED_PACKAGES_BUILD_TYPE ${index} build_type) - if(${is_meta}) - message(STATUS "~~ - ${name} (metapackage)") - # verify that CMakeLists.txt of metapackage conforms to standard - set(metapackage_arguments "") - assert(CATKIN_METAPACKAGE_CMAKE_TEMPLATE) - configure_file(${CATKIN_METAPACKAGE_CMAKE_TEMPLATE} - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/metapackages/${name}/CMakeLists.txt - @ONLY) - if(EXISTS ${CMAKE_SOURCE_DIR}/${path}/CMakeLists.txt) - # compare CMakeLists.txt with standard content - file(STRINGS ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/metapackages/${name}/CMakeLists.txt generated_cmakelists) - file(STRINGS ${path}/CMakeLists.txt existing_cmakelists) - if(NOT "${generated_cmakelists}" STREQUAL "${existing_cmakelists}") - set(CATKIN_NONHOMOGENEOUS_WORKSPACE TRUE) - message("WARNING: The CMakeLists.txt of the metapackage '${name}' contains non standard content. Use the content of the following file instead: ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/metapackages/${name}/CMakeLists.txt") - endif() - else() - message("WARNING: The metapackage '${name}' has no CMakeLists.txt. Please add one to the package source. You can use the following file: ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/metapackages/${name}/CMakeLists.txt") - endif() - else() - if(${build_type} MATCHES catkin) - message(STATUS "~~ - ${name}") - else() - set(CATKIN_NONHOMOGENEOUS_WORKSPACE TRUE) - if(${build_type} MATCHES cmake) - message(STATUS "~~ - ${name} (plain cmake)") - else() - message(STATUS "~~ - ${name} (unknown)") - message(WARNING "Unknown build type '${build_type}' for package '${name}'") - endif() - endif() - endif() - endforeach() - message(STATUS "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") - - if(${CATKIN_NONCONFORMANT_METAPACKAGE}) - message(FATAL_ERROR "This workspace contains metapackages with a non-standard CMakeLists.txt.") - endif() - if(${CATKIN_NONHOMOGENEOUS_WORKSPACE}) - message(FATAL_ERROR "This workspace contains non-catkin packages in it, and catkin cannot build a non-homogeneous workspace without isolation. Try the 'catkin_make_isolated' command instead.") - endif() - - foreach(index RANGE ${range}) - list(GET CATKIN_ORDERED_PACKAGES ${index} name) - list(GET CATKIN_ORDERED_PACKAGE_PATHS ${index} path) - list(GET CATKIN_ORDERED_PACKAGES_IS_META ${index} is_meta) - list(GET CATKIN_ORDERED_PACKAGES_BUILD_TYPE ${index} build_type) - if(${is_meta}) - message(STATUS "+++ processing catkin metapackage: '${name}'") - if(EXISTS ${CMAKE_SOURCE_DIR}/${path}/CMakeLists.txt) - message(STATUS "==> add_subdirectory(${path})") - add_subdirectory(${path}) - else() - message(STATUS "==> add_subdirectory(${path}) (using generated file from /catkin_generated/metapackages/${name})") - message("WARNING: Add a CMakeLists.txt file to the metapackage '${name}'") - add_subdirectory(${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/metapackages/${name} ${CMAKE_BINARY_DIR}/${path}) - endif() - elseif(${build_type} MATCHES catkin) - message(STATUS "+++ processing catkin package: '${name}'") - message(STATUS "==> add_subdirectory(${path})") - add_subdirectory(${path}) - else() - message(FATAL_ERROR "Non-catkin package found, non-homogeneous workspaces are not supported.") - endif() - endforeach() - endif() -endfunction() diff --git a/cmake/catkin/debug_message.cmake b/cmake/catkin/debug_message.cmake deleted file mode 100644 index bdb94e5a11..0000000000 --- a/cmake/catkin/debug_message.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# Log levels -# 0 Normal use -# 1 Catkin developer use (Stuff being developed) -# 2 Catkin developer use (Stuff working) -# 3 Also Print True Assert Statements - -macro(debug_message level) - set(loglevel ${CATKIN_LOG}) - if(NOT loglevel) - set(loglevel 0) - endif() - - if(NOT ${level} GREATER ${loglevel}) - message(STATUS " ${ARGN}") - endif() -endmacro() diff --git a/cmake/catkin/em/order_packages.cmake.em b/cmake/catkin/em/order_packages.cmake.em deleted file mode 100644 index 087d4d802e..0000000000 --- a/cmake/catkin/em/order_packages.cmake.em +++ /dev/null @@ -1,56 +0,0 @@ -# generated from catkin/cmake/em/order_packages.cmake.em -@{ -import os -try: - from catkin_pkg.cmake import get_metapackage_cmake_template_path -except ImportError as e: - raise RuntimeError('ImportError: "from catkin_pkg.cmake import get_metapackage_cmake_template_path" failed: %s\nMake sure that you have installed "catkin_pkg", it is up to date and on the PYTHONPATH.' % e) -try: - from catkin_pkg.topological_order import topological_order -except ImportError as e: - raise RuntimeError('ImportError: "from catkin_pkg.topological_order import topological_order" failed: %s\nMake sure that you have installed "catkin_pkg", it is up to date and on the PYTHONPATH.' % e) -try: - from catkin_pkg.package import InvalidPackage -except ImportError as e: - raise RuntimeError('ImportError: "from catkin_pkg.package import InvalidPackage" failed: %s\nMake sure that you have installed "catkin_pkg", it is up to date and on the PYTHONPATH.' % e) -# vars defined in order_packages.context.py.in -try: - ordered_packages = topological_order(os.path.normpath(source_root_dir), whitelisted=whitelisted_packages, blacklisted=blacklisted_packages, underlay_workspaces=underlay_workspaces) -except InvalidPackage as e: - print('message(FATAL_ERROR "%s")' % ('%s' % e).replace('"', '\\"')) - ordered_packages = [] -fatal_error = False -}@ - -set(CATKIN_ORDERED_PACKAGES "") -set(CATKIN_ORDERED_PACKAGE_PATHS "") -set(CATKIN_ORDERED_PACKAGES_IS_META "") -set(CATKIN_ORDERED_PACKAGES_BUILD_TYPE "") -@[for path, package in ordered_packages]@ -@[if path is None]@ -message(FATAL_ERROR "Circular dependency in subset of packages:\n@package") -@{ -fatal_error = True -}@ -@[elif package.name != 'catkin']@ -list(APPEND CATKIN_ORDERED_PACKAGES "@(package.name)") -list(APPEND CATKIN_ORDERED_PACKAGE_PATHS "@(path.replace('\\','/'))") -list(APPEND CATKIN_ORDERED_PACKAGES_IS_META "@(str('metapackage' in [e.tagname for e in package.exports]))") -list(APPEND CATKIN_ORDERED_PACKAGES_BUILD_TYPE "@(str([e.content for e in package.exports if e.tagname == 'build_type'][0]) if 'build_type' in [e.tagname for e in package.exports] else 'catkin')") -@{ -deprecated = [e for e in package.exports if e.tagname == 'deprecated'] -}@ -@[if deprecated]@ -message("WARNING: Package '@(package.name)' is deprecated@(' (%s)' % deprecated[0].content if deprecated[0].content else '')") -@[end if]@ -@[end if]@ -@[end for]@ - -@[if not fatal_error]@ -@{ -message_generators = [package.name for (_, package) in ordered_packages if 'message_generator' in [e.tagname for e in package.exports]] -}@ -set(CATKIN_MESSAGE_GENERATORS @(' '.join(message_generators))) -@[end if]@ - -set(CATKIN_METAPACKAGE_CMAKE_TEMPLATE "@(get_metapackage_cmake_template_path().replace('\\','/'))") diff --git a/cmake/catkin/em/pkg.pc.em b/cmake/catkin/em/pkg.pc.em deleted file mode 100644 index a4f72576cf..0000000000 --- a/cmake/catkin/em/pkg.pc.em +++ /dev/null @@ -1,8 +0,0 @@ -prefix=@PROJECT_SPACE_DIR - -Name: @(CATKIN_PACKAGE_PREFIX + PROJECT_NAME) -Description: Description of @PROJECT_NAME -Version: @PROJECT_VERSION -Cflags: @(' '.join(['-I%s' % include for include in PROJECT_PKG_CONFIG_INCLUDE_DIRS])) -Libs: -L@PROJECT_SPACE_DIR/lib @(' '.join(PKG_CONFIG_LIBRARIES_WITH_PREFIX)) -Requires: @(PROJECT_CATKIN_DEPENDS) diff --git a/cmake/catkin/em_expand.cmake b/cmake/catkin/em_expand.cmake deleted file mode 100644 index 0cca5f4667..0000000000 --- a/cmake/catkin/em_expand.cmake +++ /dev/null @@ -1,31 +0,0 @@ -macro(em_expand context_in context_out em_file_in file_out) - assert_file_exists("${context_in}" "input file for context missing") - assert_file_exists("${em_file_in}" "template file missing") - debug_message(2 "configure_file(${context_in}, ${context_out})") - configure_file(${context_in} ${context_out} @ONLY) - assert_file_exists("${context_out}" "context file was not generated correctly") - - stamp(${em_file_in}) - - # create directory if necessary - get_filename_component(_folder_out ${file_out} PATH) - if(NOT IS_DIRECTORY ${_folder_out}) - file(MAKE_DIRECTORY ${_folder_out}) - endif() - - debug_message(2 "Evaluate template '${em_file_in}' to '${file_out}' (with context from '${context_out}')") - assert(EMPY_SCRIPT) - # since empy contains a specific python version in its shebang line - # override the used python version by invoking it explicitly - set(command "${PYTHON_EXECUTABLE};${EMPY_SCRIPT}") - # prepend environment if set - if(CATKIN_ENV) - set(command ${CATKIN_ENV} ${command}) - endif() - safe_execute_process(COMMAND - ${command} - --raw-errors - -F ${context_out} - -o ${file_out} - ${em_file_in}) -endmacro() diff --git a/cmake/catkin/empy.cmake b/cmake/catkin/empy.cmake deleted file mode 100644 index 3328ce513d..0000000000 --- a/cmake/catkin/empy.cmake +++ /dev/null @@ -1,39 +0,0 @@ -function(find_python_module module) - # cribbed from http://www.cmake.org/pipermail/cmake/2011-January/041666.html - string(TOUPPER ${module} module_upper) - if(NOT PY_${module_upper}) - if(ARGC GREATER 1 AND ARGV1 STREQUAL "REQUIRED") - set(${module}_FIND_REQUIRED TRUE) - endif() - # A module's location is usually a directory, but for - # binary modules - # it's a .so file. - execute_process(COMMAND "${PYTHON_EXECUTABLE}" "-c" "import re, ${module}; print(re.compile('/__init__.py.*').sub('',${module}.__file__))" - RESULT_VARIABLE _${module}_status - OUTPUT_VARIABLE _${module}_location - ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE) - if(NOT _${module}_status) - set(PY_${module_upper} ${_${module}_location} CACHE STRING "Location of Python module ${module}") - endif(NOT _${module}_status) - endif(NOT PY_${module_upper}) - include(FindPackageHandleStandardArgs) - find_package_handle_standard_args(PY_${module} DEFAULT_MSG PY_${module_upper}) -endfunction(find_python_module) - -if(NOT EMPY_SCRIPT) - find_program(EMPY_EXECUTABLE empy) - if(NOT EMPY_EXECUTABLE) - # On OSX, there's an em.py, but not an executable empy script - find_python_module(em) - if(NOT PY_EM) - message(FATAL_ERROR "Unable to find either executable 'empy' or Python module 'em'... try installing the package 'python-empy'") - endif() - # ensure to use cmake-style path separators on Windows - file(TO_CMAKE_PATH "${PY_EM}" EMPY_SCRIPT) - else() - # ensure to use cmake-style path separators on Windows - file(TO_CMAKE_PATH "${EMPY_EXECUTABLE}" EMPY_SCRIPT) - endif() - set(EMPY_SCRIPT "${EMPY_SCRIPT}" CACHE STRING "Empy script" FORCE) -endif() -#message(STATUS "Using empy: ${EMPY_SCRIPT}") diff --git a/cmake/catkin/env-hooks/05.catkin-test-results.bat.develspace.in b/cmake/catkin/env-hooks/05.catkin-test-results.bat.develspace.in deleted file mode 100644 index e2eb02379b..0000000000 --- a/cmake/catkin/env-hooks/05.catkin-test-results.bat.develspace.in +++ /dev/null @@ -1,4 +0,0 @@ -REM generated from catkin/cmake/env-hooks/05.catkin-test-results.bat.develspace.in - -set CATKIN_TEST_RESULTS_DIR="@CATKIN_TEST_RESULTS_DIR@" -set ROS_TEST_RESULTS_DIR=%CATKIN_TEST_RESULTS_DIR% diff --git a/cmake/catkin/env-hooks/05.catkin-test-results.sh.develspace.in b/cmake/catkin/env-hooks/05.catkin-test-results.sh.develspace.in deleted file mode 100644 index dcbf018650..0000000000 --- a/cmake/catkin/env-hooks/05.catkin-test-results.sh.develspace.in +++ /dev/null @@ -1,4 +0,0 @@ -# generated from catkin/cmake/env-hooks/05.catkin-test-results.sh.develspace.in - -export CATKIN_TEST_RESULTS_DIR="@CATKIN_TEST_RESULTS_DIR@" -export ROS_TEST_RESULTS_DIR="$CATKIN_TEST_RESULTS_DIR" diff --git a/cmake/catkin/env-hooks/05.catkin_make.bash b/cmake/catkin/env-hooks/05.catkin_make.bash deleted file mode 100644 index 621d668a46..0000000000 --- a/cmake/catkin/env-hooks/05.catkin_make.bash +++ /dev/null @@ -1,67 +0,0 @@ -function _catkin_make() -{ - local cur prev - cur=${COMP_WORDS[COMP_CWORD]} - prev=${COMP_WORDS[COMP_CWORD-1]} - - # autocomplete path arguments for -C, --directory, --source, --build - case $prev in - -C|--directory|--source|--build) - _filedir -d - return 0 - ;; - esac - - if [[ "$cur" == -DCMAKE_BUILD_TYPE=* ]]; then - # autocomplete CMake argument CMAKE_BUILD_TYPE with its options - COMPREPLY=( $( compgen -P "-DCMAKE_BUILD_TYPE=" -W "None Debug Release RelWithDebInfo MinSizeRel" -- "${cur:19}" ) ) - elif [[ "$cur" == -DCATKIN_ENABLE_TESTING=* ]]; then - # autocomplete catkin argument CATKIN_ENABLE_TESTING with its options - COMPREPLY=( $( compgen -P "-DCATKIN_ENABLE_TESTING=" -W "0 1" -- "${cur:24}" ) ) - elif [[ "$cur" == -DCATKIN_DEVEL_PREFIX=* || "$cur" == -DCMAKE_INSTALL_PREFIX=* ]]; then - COMPREPLY=() - elif [[ "$cur" == -* ]]; then - local opts="$( _parse_help "$1" )" - [[ $opts ]] || opts="$( _parse_usage "$1" )" - if [[ "$cur" == -* ]]; then - # suggest some common CMake arguments - opts="$opts -DCATKIN_DEVEL_PREFIX= -DCATKIN_ENABLE_TESTING= -DCMAKE_INSTALL_PREFIX= -DCMAKE_BUILD_TYPE=" - fi - COMPREPLY=( $( compgen -W "$opts" -- "$cur" ) ) - [[ $COMPREPLY == *= ]] && compopt -o nospace - else - # check if custom workspace root has been specified on the command line - local workspace_dir="." - for (( i=0; i < ${#COMP_WORDS[@]}; i++ )); do - if [[ ${COMP_WORDS[i]} == -C || ${COMP_WORDS[i]} == --directory ]]; then - # eval to expand tilde - eval workspace_dir=${COMP_WORDS[i+1]} - fi - done - # check if custom build folder has been specified on the command line - local build_dir="build" - for (( i=0; i < ${#COMP_WORDS[@]}; i++ )); do - if [[ ${COMP_WORDS[i]} == --build ]]; then - # eval to expand tilde - eval build_dir=${COMP_WORDS[i+1]} - fi - done - - # determine location of Makefile - local makefile_dir - if [[ "$build_dir" = /* ]]; then - makefile_dir="$build_dir" - else - makefile_dir="$workspace_dir/$build_dir" - fi - COMPREPLY=() - if [ -f "$makefile_dir/Makefile" ]; then - cur=${COMP_WORDS[COMP_CWORD]} - COMPREPLY=( $( compgen -W "`make -C $makefile_dir -qp 2>/dev/null | awk -F':' '/^[a-zA-Z0-9][a-zA-Z0-9_\.]*:/ { print $1 }'`" -- $cur )) - elif [ -f "$makefile_dir/build.ninja" ]; then - cur=${COMP_WORDS[COMP_CWORD]} - COMPREPLY=( $( compgen -W "`ninja -C $makefile_dir -t targets 2>/dev/null | awk -F':' '/^[a-zA-Z0-9][a-zA-Z0-9_\.]*:/ { print $1 }'`" -- $cur )) - fi - fi -} && -complete -F _catkin_make catkin_make diff --git a/cmake/catkin/env-hooks/05.catkin_make_isolated.bash b/cmake/catkin/env-hooks/05.catkin_make_isolated.bash deleted file mode 100644 index 99e5d30164..0000000000 --- a/cmake/catkin/env-hooks/05.catkin_make_isolated.bash +++ /dev/null @@ -1,65 +0,0 @@ -function _catkin_make_isolated() -{ - local cur prev - cur=${COMP_WORDS[COMP_CWORD]} - prev=${COMP_WORDS[COMP_CWORD-1]} - - # autocomplete path arguments for -C, --directory, --source, --build, --devel, --install - case $prev in - -C|--directory|--source|--build|--devel|--install) - _filedir -d - return 0 - ;; - esac - - if [[ "$cur" == -DCMAKE_BUILD_TYPE=* ]]; then - # autocomplete CMake argument CMAKE_BUILD_TYPE with its options - COMPREPLY=( $( compgen -P "-DCMAKE_BUILD_TYPE=" -W "None Debug Release RelWithDebInfo MinSizeRel" -- "${cur:19}" ) ) - elif [[ "$cur" == -DCATKIN_ENABLE_TESTING=* ]]; then - # autocomplete catkin argument CATKIN_ENABLE_TESTING with its options - COMPREPLY=( $( compgen -P "-DCATKIN_ENABLE_TESTING=" -W "0 1" -- "${cur:24}" ) ) - elif [[ "$cur" == -* ]]; then - local opts="$( _parse_help "$1" )" - [[ $opts ]] || opts="$( _parse_usage "$1" )" - if [[ "$cur" == -* ]]; then - # suggest some common CMake arguments - opts="$opts -DCATKIN_ENABLE_TESTING= -DCMAKE_BUILD_TYPE=" - fi - COMPREPLY=( $( compgen -W "$opts" -- "$cur" ) ) - [[ $COMPREPLY == *= ]] && compopt -o nospace - else - # check if custom workspace root has been specified on the command line - local workspace_dir="." - for (( i=0; i < ${#COMP_WORDS[@]}; i++ )); do - if [[ ${COMP_WORDS[i]} == -C || ${COMP_WORDS[i]} == --directory ]]; then - # eval to expand tilde - eval workspace_dir=${COMP_WORDS[i+1]} - fi - done - # check if custom build folder has been specified on the command line - local build_dir="build_isolated" - for (( i=0; i < ${#COMP_WORDS[@]}; i++ )); do - if [[ ${COMP_WORDS[i]} == --build ]]; then - # eval to expand tilde - eval build_dir=${COMP_WORDS[i+1]} - fi - done - - # determine location of Makefile - local makefile_dir - if [[ "$build_dir" = /* ]]; then - makefile_dir="$build_dir" - else - makefile_dir="$workspace_dir/$build_dir" - fi - COMPREPLY=() - if [ -f "$makefile_dir/Makefile" ]; then - cur=${COMP_WORDS[COMP_CWORD]} - COMPREPLY=( $( compgen -W "`make -C $makefile_dir -qp 2>/dev/null | awk -F':' '/^[a-zA-Z0-9][a-zA-Z0-9_\.]*:/ { print $1 }'`" -- $cur )) - elif [ -f "$makefile_dir/build.ninja" ]; then - cur=${COMP_WORDS[COMP_CWORD]} - COMPREPLY=( $( compgen -W "`ninja -C $makefile_dir -t targets 2>/dev/null | awk -F':' '/^[a-zA-Z0-9][a-zA-Z0-9_\.]*:/ { print $1 }'`" -- $cur )) - fi - fi -} && -complete -F _catkin_make_isolated catkin_make_isolated diff --git a/cmake/catkin/find_program_required.cmake b/cmake/catkin/find_program_required.cmake deleted file mode 100644 index 4cd3ab0c79..0000000000 --- a/cmake/catkin/find_program_required.cmake +++ /dev/null @@ -1,9 +0,0 @@ -function(find_program_required ARG_VAR ARG_PROGRAM_NAME) - cmake_parse_arguments(ARG "NO_DEFAULT_PATH;NO_CMAKE_FIND_ROOT_PATH" "" "PATHS" ${ARGN}) - find_program(${ARG_VAR} ${ARG_PROGRAM_NAME} PATHS ${ARG_PATHS} ${ARG_NO_DEFAULT_PATH} ${ARG_NO_CMAKE_FIND_ROOT_PATH}) - if(NOT ${ARG_VAR}) - message(FATAL_ERROR "${ARG_PROGRAM_NAME} not found") - else() - debug_message(1 "${ARG_PROGRAM_NAME} was found at ${${ARG_VAR}}") - endif() -endfunction() diff --git a/cmake/catkin/interrogate_setup_dot_py.py b/cmake/catkin/interrogate_setup_dot_py.py deleted file mode 100644 index 26882f02aa..0000000000 --- a/cmake/catkin/interrogate_setup_dot_py.py +++ /dev/null @@ -1,250 +0,0 @@ -#!/usr/bin/env python - -# Software License Agreement (BSD License) -# -# Copyright (c) 2012, Willow Garage, Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following -# disclaimer in the documentation and/or other materials provided -# with the distribution. -# * Neither the name of Willow Garage, Inc. nor the names of its -# contributors may be used to endorse or promote products derived -# from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. - -from __future__ import print_function -import os -import sys - -import distutils.core -try: - import setuptools -except ImportError: - pass - -from argparse import ArgumentParser - - -def _get_locations(pkgs, package_dir): - """ - based on setuptools logic and the package_dir dict, builds a dict - of location roots for each pkg in pkgs. - See http://docs.python.org/distutils/setupscript.html - - :returns: a dict {pkgname: root} for each pkgname in pkgs (and each of their parents) - """ - # package_dir contains a dict {package_name: relativepath} - # Example {'': 'src', 'foo': 'lib', 'bar': 'lib2'} - # - # '' means where to look for any package unless a parent package - # is listed so package bar.pot is expected at lib2/bar/pot, - # whereas package sup.dee is expected at src/sup/dee - # - # if package_dir does not state anything about a package, - # setuptool expects the package folder to be in the root of the - # project - locations = {} - allprefix = package_dir.get('', '') - for pkg in pkgs: - parent_location = None - splits = pkg.split('.') - # we iterate over compound name from parent to child - # so once we found parent, children just append to their parent - for key_len in range(len(splits)): - key = '.'.join(splits[:key_len + 1]) - if key not in locations: - if key in package_dir: - locations[key] = package_dir[key] - elif parent_location is not None: - locations[key] = parent_location - else: - locations[key] = allprefix - parent_location = locations[key] - return locations - - -def generate_cmake_file(package_name, version, scripts, package_dir, pkgs, modules): - """ - Generates lines to add to a cmake file which will set variables - - :param version: str, format 'int.int.int' - :param scripts: [list of str]: relative paths to scripts - :param package_dir: {modulename: path} - :pkgs: [list of str] python_packages declared in catkin package - :modules: [list of str] python modules - """ - prefix = '%s_SETUP_PY' % package_name - result = [] - result.append(r'set(%s_VERSION "%s")' % (prefix, version)) - result.append(r'set(%s_SCRIPTS "%s")' % (prefix, ';'.join(scripts))) - - # Remove packages with '.' separators. - # - # setuptools allows specifying submodules in other folders than - # their parent - # - # The symlink approach of catkin does not work with such submodules. - # In the common case, this does not matter as the submodule is - # within the containing module. We verify this assumption, and if - # it passes, we remove submodule packages. - locations = _get_locations(pkgs, package_dir) - for pkgname, location in locations.items(): - if not '.' in pkgname: - continue - splits = pkgname.split('.') - # hack: ignore write-combining setup.py files for msg and srv files - if splits[1] in ['msg', 'srv']: - continue - # check every child has the same root folder as its parent - parent_name = '.'.join(splits[:1]) - if location != locations[parent_name]: - raise RuntimeError( - "catkin_export_python does not support setup.py files that combine across multiple directories: %s in %s, %s in %s" % (pkgname, location, parent_name, locations[parent_name])) - - # If checks pass, remove all submodules - pkgs = [p for p in pkgs if '.' not in p] - - resolved_pkgs = [] - for pkg in pkgs: - resolved_pkgs += [os.path.join(locations[pkg], pkg)] - - result.append(r'set(%s_PACKAGES "%s")' % (prefix, ';'.join(pkgs))) - result.append(r'set(%s_PACKAGE_DIRS "%s")' % (prefix, ';'.join(resolved_pkgs).replace("\\", "/"))) - - # skip modules which collide with package names - filtered_modules = [] - for modname in modules: - splits = modname.split('.') - # check all parents too - equals_package = [('.'.join(splits[:-i]) in locations) for i in range(len(splits))] - if any(equals_package): - continue - filtered_modules.append(modname) - module_locations = _get_locations(filtered_modules, package_dir) - - result.append(r'set(%s_MODULES "%s")' % (prefix, ';'.join(['%s.py' % m.replace('.', '/') for m in filtered_modules]))) - result.append(r'set(%s_MODULE_DIRS "%s")' % (prefix, ';'.join([module_locations[m] for m in filtered_modules]).replace("\\", "/"))) - - return result - - -def _create_mock_setup_function(package_name, outfile): - """ - Creates a function to call instead of distutils.core.setup or - setuptools.setup, which just captures some args and writes them - into a file that can be used from cmake - - :param package_name: name of the package - :param outfile: filename that cmake will use afterwards - :returns: a function to replace disutils.core.setup and setuptools.setup - """ - - def setup(*args, **kwargs): - ''' - Checks kwargs and writes a scriptfile - ''' - if 'version' not in kwargs: - sys.stderr.write("\n*** Unable to find 'version' in setup.py of %s\n" % package_name) - raise RuntimeError("version not found in setup.py") - version = kwargs['version'] - package_dir = kwargs.get('package_dir', {}) - - pkgs = kwargs.get('packages', []) - scripts = kwargs.get('scripts', []) - modules = kwargs.get('py_modules', []) - - unsupported_args = [ - 'entry_points', - 'exclude_package_data', - 'ext_modules ', - 'ext_package', - 'include_package_data', - 'namespace_packages', - 'setup_requires', - 'use_2to3', - 'zip_safe'] - used_unsupported_args = [arg for arg in unsupported_args if arg in kwargs] - if used_unsupported_args: - sys.stderr.write("*** Arguments %s to setup() not supported in catkin devel space in setup.py of %s\n" % (used_unsupported_args, package_name)) - - result = generate_cmake_file(package_name=package_name, - version=version, - scripts=scripts, - package_dir=package_dir, - pkgs=pkgs, - modules=modules) - with open(outfile, 'w') as out: - out.write('\n'.join(result)) - - return setup - - -def main(): - """ - Script main, parses arguments and invokes Dummy.setup indirectly. - """ - parser = ArgumentParser(description='Utility to read setup.py values from cmake macros. Creates a file with CMake set commands setting variables.') - parser.add_argument('package_name', help='Name of catkin package') - parser.add_argument('setupfile_path', help='Full path to setup.py') - parser.add_argument('outfile', help='Where to write result to') - - args = parser.parse_args() - - # print("%s" % sys.argv) - # PACKAGE_NAME = sys.argv[1] - # OUTFILE = sys.argv[3] - # print("Interrogating setup.py for package %s into %s " % (PACKAGE_NAME, OUTFILE), - # file=sys.stderr) - - # print("executing %s" % args.setupfile_path) - - # be sure you're in the directory containing - # setup.py so the sys.path manipulation works, - # so the import of __version__ works - os.chdir(os.path.dirname(os.path.abspath(args.setupfile_path))) - - # patch setup() function of distutils and setuptools for the - # context of evaluating setup.py - try: - fake_setup = _create_mock_setup_function(package_name=args.package_name, - outfile=args.outfile) - - distutils_backup = distutils.core.setup - distutils.core.setup = fake_setup - try: - setuptools_backup = setuptools.setup - setuptools.setup = fake_setup - except NameError: - pass - - with open(args.setupfile_path, 'r') as fh: - exec(fh.read()) - finally: - distutils.core.setup = distutils_backup - try: - setuptools.setup = setuptools_backup - except NameError: - pass - -if __name__ == '__main__': - main() diff --git a/cmake/catkin/legacy.cmake b/cmake/catkin/legacy.cmake deleted file mode 100644 index a4c65c2146..0000000000 --- a/cmake/catkin/legacy.cmake +++ /dev/null @@ -1,3 +0,0 @@ -function(catkin_stack) - message(FATAL_ERROR "catkin_stack() is not supported by catkin as of groovy. Please update the package to the latest catkin version.") -endfunction() diff --git a/cmake/catkin/list_append_deduplicate.cmake b/cmake/catkin/list_append_deduplicate.cmake deleted file mode 100644 index e59a61bb14..0000000000 --- a/cmake/catkin/list_append_deduplicate.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# -# Append elements to a list and remove existing duplicates from the list. -# -# .. note:: Using CMake's ``list(APPEND ..)`` and -# ``list(REMOVE_DUPLICATES ..)`` is not sufficient since its -# implementation uses a set internally which makes the operation -# unstable. -# -macro(list_append_deduplicate listname) - if(NOT "${ARGN}" STREQUAL "") - if(${listname}) - list(REMOVE_ITEM ${listname} ${ARGN}) - endif() - list(APPEND ${listname} ${ARGN}) - endif() -endmacro() diff --git a/cmake/catkin/list_append_unique.cmake b/cmake/catkin/list_append_unique.cmake deleted file mode 100644 index 4c9bb1df86..0000000000 --- a/cmake/catkin/list_append_unique.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# -# Append elements to a list if they are not already in the list. -# -# .. note:: Using CMake's ``list(APPEND ..)`` and -# ``list(REMOVE_DUPLICATES ..)`` is not sufficient since its -# implementation uses a set internally which makes the operation -# unstable. -# -macro(list_append_unique listname) - foreach(_item ${ARGN}) - list(FIND ${listname} ${_item} _index) - if(_index EQUAL -1) - list(APPEND ${listname} ${_item}) - endif() - endforeach() -endmacro() diff --git a/cmake/catkin/list_insert_in_workspace_order.cmake b/cmake/catkin/list_insert_in_workspace_order.cmake deleted file mode 100644 index 57ade4a493..0000000000 --- a/cmake/catkin/list_insert_in_workspace_order.cmake +++ /dev/null @@ -1,35 +0,0 @@ -# -# Insert elements to a list in the same order as the chained catkin workspaces. -# -set(CATKIN_ORDERED_SPACES "") -foreach(_space ${CATKIN_DEVEL_PREFIX} ${CATKIN_WORKSPACES}) - list(APPEND CATKIN_ORDERED_SPACES ${_space}) - if(NOT EXISTS "${_space}/.catkin") - message(FATAL_ERROR "The path '${_space}' is in CATKIN_WORKSPACES but does not have a .catkin file") - endif() - # prepend to existing list of sourcespaces - file(READ "${_space}/.catkin" _sourcespaces) - list(APPEND CATKIN_ORDERED_SPACES ${_sourcespaces}) -endforeach() - -debug_message(10 "CATKIN_ORDERED_SPACES ${CATKIN_ORDERED_SPACES}") - -macro(list_insert_in_workspace_order listname) - if(NOT "${ARGN}" STREQUAL "") - assert(CATKIN_ENV) - assert(PYTHON_EXECUTABLE) - set(cmd - ${CATKIN_ENV} ${PYTHON_EXECUTABLE} - ${catkin_EXTRAS_DIR}/order_paths.py - ${${PROJECT_NAME}_BINARY_DIR}/catkin_generated/ordered_paths.cmake - --paths-to-order ${ARGN} - --prefixes ${CATKIN_ORDERED_SPACES} - ) - debug_message(10 "list_insert_in_workspace_order() in project '{PROJECT_NAME}' executes: ${cmd}") - safe_execute_process(COMMAND ${cmd}) - include(${${PROJECT_NAME}_BINARY_DIR}/catkin_generated/ordered_paths.cmake) - set(${listname} ${ORDERED_PATHS}) - else() - set(${listname} "") - endif() -endmacro() diff --git a/cmake/catkin/order_paths.py b/cmake/catkin/order_paths.py deleted file mode 100644 index d2a3eca105..0000000000 --- a/cmake/catkin/order_paths.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python - -from __future__ import print_function - -from argparse import ArgumentParser -import os -import sys - -try: - from catkin_pkg.workspaces import order_paths -except ImportError as e: - sys.exit('ImportError: "from catkin_pkg.package import parse_package" failed: %s\nMake sure that you have installed "catkin_pkg", it is up to date and on the PYTHONPATH.' % e) - - -def main(): - """ - Order a list of paths according to a list of prefixes which define the order. - """ - parser = ArgumentParser(description='Utility to order a list of paths according to a list of prefixes. Creates a file with CMake set command setting a variable.') - parser.add_argument('outfile', help='The filename of the generated CMake file') - parser.add_argument('--paths-to-order', nargs='*', help='The semicolon-separated paths to order') - parser.add_argument('--prefixes', nargs='*', help='The semicolon-separated prefixes defining the order') - args = parser.parse_args() - - ordered_paths = order_paths(args.paths_to_order, args.prefixes) - - # create directory if necessary - outdir = os.path.dirname(args.outfile) - if not os.path.exists(outdir): - os.makedirs(outdir) - - with open(args.outfile, 'w') as fh: - fh.write('set(ORDERED_PATHS "%s")' % ';'.join(ordered_paths)) - - -if __name__ == '__main__': - main() diff --git a/cmake/catkin/parse_package_xml.py b/cmake/catkin/parse_package_xml.py deleted file mode 100755 index 71e3e91877..0000000000 --- a/cmake/catkin/parse_package_xml.py +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env python - -# Software License Agreement (BSD License) -# -# Copyright (c) 2012, Willow Garage, Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following -# disclaimer in the documentation and/or other materials provided -# with the distribution. -# * Neither the name of Willow Garage, Inc. nor the names of its -# contributors may be used to endorse or promote products derived -# from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. - -from __future__ import print_function -import sys -import argparse - -try: - from catkin_pkg.package import parse_package -except ImportError as e: - sys.exit('ImportError: "from catkin_pkg.package import parse_package" failed: %s\nMake sure that you have installed "catkin_pkg", it is up to date and on the PYTHONPATH.' % e) - - -def _get_output(package): - """ - returns a list of strings with cmake commands to execute to set cmake variables - - :param package: Package object - :returns: list of str, lines to output - """ - values = {} - values['VERSION'] = '"%s"' % package.version - - values['MAINTAINER'] = '"%s"' % (', '.join([str(m) for m in package.maintainers])) - - values.update(_get_dependency_values('BUILD_DEPENDS', package.build_depends)) - values.update(_get_dependency_values('BUILDTOOL_DEPENDS', package.buildtool_depends)) - values.update(_get_dependency_values('RUN_DEPENDS', package.run_depends)) - - deprecated = [e.content for e in package.exports if e.tagname == 'deprecated'] - values['DEPRECATED'] = '"%s"' % ((deprecated[0] if deprecated[0] else 'TRUE') if deprecated else '') - - output = [] - output.append(r'set(_CATKIN_CURRENT_PACKAGE "%s")' % package.name) - for k, v in values.items(): - output.append('set(%s_%s %s)' % (package.name, k, v)) - return output - -def _get_dependency_values(key, depends): - values = {} - values[key] = ' '.join(['"%s"' % str(d) for d in depends]) - for d in depends: - comparisons = ['version_lt', 'version_lte', 'version_eq', 'version_gte', 'version_gt'] - for comp in comparisons: - value = getattr(d, comp, None) - if value is not None: - values['%s_%s_%s' % (key, str(d), comp.upper())] = '"%s"' % value - return values - - -def main(argv=sys.argv[1:]): - """ - Reads given package_xml and writes extracted variables to outfile. - """ - parser = argparse.ArgumentParser(description="Read package.xml and write extracted variables to stdout") - parser.add_argument('package_xml') - parser.add_argument('outfile') - args = parser.parse_args(argv) - package = parse_package(args.package_xml) - - lines = _get_output(package) - with open(args.outfile, 'w') as ofile: - ofile.write('\n'.join(lines)) - - -if __name__ == '__main__': - main() diff --git a/cmake/catkin/platform/lsb.cmake b/cmake/catkin/platform/lsb.cmake deleted file mode 100644 index 7aca48b5f5..0000000000 --- a/cmake/catkin/platform/lsb.cmake +++ /dev/null @@ -1,28 +0,0 @@ -find_program(LSB_RELEASE_EXECUTABLE lsb_release) - -if(LSB_RELEASE_EXECUTABLE) - set(LSB_FOUND TRUE CACHE BOOL "lsb_release executable was found") - execute_process(COMMAND ${LSB_RELEASE_EXECUTABLE} -si - OUTPUT_VARIABLE LSB_DISTRIB_ID - OUTPUT_STRIP_TRAILING_WHITESPACE) - string(TOUPPER ${LSB_DISTRIB_ID} v) - set(${v} TRUE CACHE BOOL "LSB Distrib tag") - - execute_process(COMMAND ${LSB_RELEASE_EXECUTABLE} -sd - OUTPUT_VARIABLE LSB_DESCRIPTION - OUTPUT_STRIP_TRAILING_WHITESPACE) - execute_process(COMMAND ${LSB_RELEASE_EXECUTABLE} -sr - OUTPUT_VARIABLE LSB_RELEASE - OUTPUT_STRIP_TRAILING_WHITESPACE) - execute_process(COMMAND ${LSB_RELEASE_EXECUTABLE} -sc - OUTPUT_VARIABLE LSB_CODENAME - OUTPUT_STRIP_TRAILING_WHITESPACE) - - string(TOUPPER ${LSB_DISTRIB_ID} v) - set(${v} TRUE CACHE BOOL "LSB Distribution") - #message(STATUS "${v} is on") - - string(TOUPPER ${LSB_DISTRIB_ID}_${LSB_CODENAME} v) - set(${v} TRUE CACHE BOOL "LSB Distrib - codename tag") - #message(STATUS "${v} is on") -endif() diff --git a/cmake/catkin/platform/ubuntu.cmake b/cmake/catkin/platform/ubuntu.cmake deleted file mode 100644 index 3850736270..0000000000 --- a/cmake/catkin/platform/ubuntu.cmake +++ /dev/null @@ -1,4 +0,0 @@ -# requires lsb to be included first -if(UBUNTU) - # ubuntu-specifics go here -endif() diff --git a/cmake/catkin/platform/windows.cmake b/cmake/catkin/platform/windows.cmake deleted file mode 100644 index 35e61be15c..0000000000 --- a/cmake/catkin/platform/windows.cmake +++ /dev/null @@ -1,56 +0,0 @@ -# BUILD_SHARED_LIBS is a global cmake variable (usually defaults to on) -# that determines the build type of libraries: -# http://www.cmake.org/cmake/help/cmake-2-8-docs.html#variable:BUILD_SHARED_LIBS -# It defaults to shared. - -# Make sure this is already defined as a cached variable (@sa tools/libraries.cmake) -if(NOT DEFINED BUILD_SHARED_LIBS) - option(BUILD_SHARED_LIBS "Build dynamically-linked binaries" ON) -endif() - -# Windows/cmake make things difficult if building dll's. -# By default: -# .dll -> CMAKE_RUNTIME_OUTPUT_DIRECTORY -# .exe -> CMAKE_RUNTIME_OUTPUT_DIRECTORY -# .lib -> CMAKE_LIBRARY_OUTPUT_DIRECTORY -# -# Subsequently, .dll's and .exe's use the same variable and by -# default must be installed to the same place. Which is not -# what we want for catkin. We wish: -# -# .dll -> CATKIN_GLOBAL_BIN_DESTINATION -# .exe -> CATKIN_PACKAGE_BIN_DESTINATION -# .lib -> CATKIN_PACKAGE_LIB_DESTINATION -# -# Since we can't put CMAKE_RUNTIME_OUTPUT_DIRECTORY to -# two values at once, we have this ugly workaround here. -# -# Note - we want to move away from redefining -# add_library style calls, but necessary until a better solution -# is available for windows. Alternatives are to set_target_properties -# on every lib (painful) or to make exe's public (name conflicts -# bound to arise). -# -# Another feature that would be desirable, is to install .pdb's for -# debugging along with the library. Can't do that here as we do not -# know for sure if the library target is intended for installation -# or not. Might a good idea to have a script that searches for all -# pdb's under CATKIN_DEVEL_PREFIX and copies them over at the end -# of the cmake build. -if(BUILD_SHARED_LIBS) - if(WIN32) - function(add_library library) - # Check if its an external, imported library (e.g. boost libs via cmake module definition) - list(FIND ARGN "IMPORTED" FIND_POS) - _add_library(${ARGV0} ${ARGN}) - if(${FIND_POS} EQUAL -1) - set_target_properties(${ARGV0} - PROPERTIES - RUNTIME_OUTPUT_DIRECTORY ${CATKIN_DEVEL_PREFIX}/${CATKIN_GLOBAL_BIN_DESTINATION} - LIBRARY_OUTPUT_DIRECTORY ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_LIB_DESTINATION} - ARCHIVE_OUTPUT_DIRECTORY ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_LIB_DESTINATION} - ) - endif() - endfunction() - endif() -endif() diff --git a/cmake/catkin/python.cmake b/cmake/catkin/python.cmake deleted file mode 100644 index c82f31b479..0000000000 --- a/cmake/catkin/python.cmake +++ /dev/null @@ -1,41 +0,0 @@ -# the CMake variable PYTHON_INSTALL_DIR has the same value as the Python function catkin.builder.get_python_install_dir() - -set(PYTHON_VERSION "" CACHE STRING "Specify specific Python version to use ('major.minor' or 'major')") -if(PYTHON_VERSION) - set(PythonInterp_FIND_VERSION "${PYTHON_VERSION}") -endif() - -find_package(PythonInterp REQUIRED) -#message(STATUS "Using PYTHON_EXECUTABLE: ${PYTHON_EXECUTABLE}") - -set(_PYTHON_PATH_VERSION_SUFFIX "${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}") - -set(enable_setuptools_deb_layout OFF) -if(EXISTS "/etc/debian_version") - set(enable_setuptools_deb_layout ON) -endif() -option(SETUPTOOLS_DEB_LAYOUT "Enable debian style python package layout" ${enable_setuptools_deb_layout}) - -if(SETUPTOOLS_DEB_LAYOUT) - #message(STATUS "Using Debian Python package layout") - set(PYTHON_PACKAGES_DIR dist-packages) - set(SETUPTOOLS_ARG_EXTRA "--install-layout=deb") - # use major version only when installing 3.x with debian layout - if("${PYTHON_VERSION_MAJOR}" STREQUAL "3") - set(_PYTHON_PATH_VERSION_SUFFIX "${PYTHON_VERSION_MAJOR}") - endif() -else() - #message(STATUS "Using default Python package layout") - set(PYTHON_PACKAGES_DIR site-packages) - # setuptools is fussy about windows paths, make sure the install prefix is in native format - file(TO_NATIVE_PATH "${CMAKE_INSTALL_PREFIX}" SETUPTOOLS_INSTALL_PREFIX) -endif() - -if(NOT WIN32) - set(PYTHON_INSTALL_DIR lib/python${_PYTHON_PATH_VERSION_SUFFIX}/${PYTHON_PACKAGES_DIR} - CACHE INTERNAL "This needs to be in PYTHONPATH when 'setup.py install' is called. And it needs to match. But setuptools won't tell us where it will install things.") -else() - # Windows setuptools installs to lib/site-packages not lib/python2.7/site-packages - set(PYTHON_INSTALL_DIR lib/${PYTHON_PACKAGES_DIR} - CACHE INTERNAL "This needs to be in PYTHONPATH when 'setup.py install' is called. And it needs to match. But setuptools won't tell us where it will install things.") -endif() diff --git a/cmake/catkin/safe_execute_process.cmake b/cmake/catkin/safe_execute_process.cmake deleted file mode 100644 index 3f6ea9aad4..0000000000 --- a/cmake/catkin/safe_execute_process.cmake +++ /dev/null @@ -1,13 +0,0 @@ -macro(safe_execute_process cmd_keyword arg1) - set(_cmd ${arg1}) - foreach(_arg ${ARGN}) - set(_cmd "${_cmd} \"${_arg}\"") - endforeach() - - debug_message(2 "execute_process(${_cmd})") - execute_process(${ARGV} RESULT_VARIABLE _res) - - if(NOT _res EQUAL 0) - message(FATAL_ERROR "execute_process(${_cmd}) returned error code ${_res}") - endif() -endmacro() diff --git a/cmake/catkin/shell.cmake b/cmake/catkin/shell.cmake deleted file mode 100644 index 4b150d419e..0000000000 --- a/cmake/catkin/shell.cmake +++ /dev/null @@ -1,17 +0,0 @@ -function(shell arg1) - set(cmd ${arg1}) - foreach(arg ${ARGN}) - set(cmd "${cmd} ${arg}") - endforeach() - - execute_process(COMMAND ${arg1} ${ARGN} - RESULT_VARIABLE res - OUTPUT_VARIABLE out - ERROR_VARIABLE out) - - if(res EQUAL 0) - debug_message(2 "execute_process(${cmd}) succeeded returning: ${out}") - else() - message(FATAL_ERROR "execute_process(${cmd})\n***FAILED with ERROR:***\n${out}") - endif() -endfunction() diff --git a/cmake/catkin/stamp.cmake b/cmake/catkin/stamp.cmake deleted file mode 100644 index 91de185de5..0000000000 --- a/cmake/catkin/stamp.cmake +++ /dev/null @@ -1,13 +0,0 @@ -# -# :param path: file name -# -# Uses ``configure_file`` to generate a file ``filepath.stamp`` hidden -# somewhere in the build tree. This will cause cmake to rebuild its -# cache when ``filepath`` is modified. -# -function(stamp path) - get_filename_component(filename "${path}" NAME) - configure_file(${path} - ${CMAKE_CURRENT_BINARY_DIR}/catkin_generated/stamps/${PROJECT_NAME}/${filename}.stamp - COPYONLY) -endfunction() diff --git a/cmake/catkin/string_starts_with.cmake b/cmake/catkin/string_starts_with.cmake deleted file mode 100644 index 85483ec9e7..0000000000 --- a/cmake/catkin/string_starts_with.cmake +++ /dev/null @@ -1,22 +0,0 @@ -# -# Check if a string starts with a prefix. -# -# :param str: the string -# :type str: string -# :param prefix: the prefix -# :type prefix: string -# :param var: the output variable name -# :type var: bool -# -function(string_starts_with str prefix var) - string(LENGTH "${str}" str_length) - string(LENGTH "${prefix}" prefix_length) - set(value FALSE) - if(NOT ${str_length} LESS ${prefix_length}) - string(SUBSTRING "${str}" 0 ${prefix_length} str_prefix) - if("${str_prefix}" STREQUAL "${prefix}") - set(value TRUE) - endif() - endif() - set(${var} ${value} PARENT_SCOPE) -endfunction() diff --git a/cmake/catkin/templates/Doxyfile.in b/cmake/catkin/templates/Doxyfile.in deleted file mode 100644 index f64c9bdff0..0000000000 --- a/cmake/catkin/templates/Doxyfile.in +++ /dev/null @@ -1,1630 +0,0 @@ -# Doxyfile 1.7.1 -# generated from catkin/cmake/template/Doxyfile.in -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = @PROJECT_NAME@ - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = @PACKAGE_VERSION@ - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = api - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = NO - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 4 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it -# parses. With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this -# tag. The format is ext=language, where ext is a file extension, and language -# is one of the parsers supported by doxygen: IDL, Java, Javascript, CSharp, C, -# C++, D, PHP, Objective-C, Python, Fortran, VHDL, C, C++. For instance to make -# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C -# (default is Fortran), use: inc=Fortran f=C. Note that for custom extensions -# you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = NO - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = YES - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = YES - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = NO - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = YES - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the FORCE_LOCAL_INCLUDES tag is set to YES then Doxygen -# will list include files with double quotes in the documentation -# rather than with sharp brackets. - -FORCE_LOCAL_INCLUDES = NO - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen -# will sort the (brief and detailed) documentation of class members so that -# constructors and destructors are listed first. If set to NO (the default) -# the constructors will appear in the respective orders defined by -# SORT_MEMBER_DOCS and SORT_BRIEF_DOCS. -# This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO -# and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO. - -SORT_MEMBERS_CTORS_1ST = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = NO - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = YES - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = YES - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST = YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = NO - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed -# by doxygen. The layout file controls the global structure of the generated -# output files in an output format independent way. The create the layout file -# that represents doxygen's defaults, run doxygen with the -l option. -# You can optionally specify a file name after the option, if omitted -# DoxygenLayout.xml will be used as the name of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = @doc_sources@ - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.hpp - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -# INPUT_FILTER = *.hpp - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -# FILTER_PATTERNS = *.py=/usr/bin/doxypy - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = YES - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = NO - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = NO - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = NO - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = YES - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = html - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. -# Doxygen will adjust the colors in the stylesheet and background images -# according to this color. Hue is specified as an angle on a colorwheel, -# see http://en.wikipedia.org/wiki/Hue for more information. -# For instance the value 0 represents red, 60 is yellow, 120 is green, -# 180 is cyan, 240 is blue, 300 purple, and 360 is red again. -# The allowed range is 0 to 359. - -HTML_COLORSTYLE_HUE = 220 - -# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of -# the colors in the HTML output. For a value of 0 the output will use -# grayscales only. A value of 255 will produce the most vivid colors. - -HTML_COLORSTYLE_SAT = 100 - -# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to -# the luminance component of the colors in the HTML output. Values below -# 100 gradually make the output lighter, whereas values above 100 make -# the output darker. The value divided by 100 is the actual gamma applied, -# so 80 represents a gamma of 0.8, The value 220 represents a gamma of 2.2, -# and 100 does not change the gamma. - -HTML_COLORSTYLE_GAMMA = 80 - -# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML -# page will contain the date and time when the page was generated. Setting -# this to NO can help when comparing the output of multiple runs. - -HTML_TIMESTAMP = YES - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = NO - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html -# for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# When GENERATE_PUBLISHER_ID tag specifies a string that should uniquely identify -# the documentation publisher. This should be a reverse domain-name style -# string, e.g. com.mycompany.MyDocSet.documentation. - -DOCSET_PUBLISHER_ID = org.doxygen.Publisher - -# The GENERATE_PUBLISHER_NAME tag identifies the documentation publisher. - -DOCSET_PUBLISHER_NAME = Publisher - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = NO - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = NO - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = NO - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = NO - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and -# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated -# that can be used as input for Qt's qhelpgenerator to generate a -# Qt Compressed Help (.qch) of the generated HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = org.doxygen.Project - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to -# add. For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the -# custom filter to add. For more information please see -# -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this -# project's -# filter section matches. -# -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files -# will be generated, which together with the HTML files, form an Eclipse help -# plugin. To install this plugin and make it available under the help contents -# menu in Eclipse, the contents of the directory containing the HTML and XML -# files needs to be copied into the plugins directory of eclipse. The name of -# the directory within the plugins directory should be the same as -# the ECLIPSE_DOC_ID value. After copying Eclipse needs to be restarted before -# the help appears. - -GENERATE_ECLIPSEHELP = NO - -# A unique identifier for the eclipse help plugin. When installing the plugin -# the directory name containing the HTML and XML files should also have -# this name. - -ECLIPSE_DOC_ID = org.doxygen.Project - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to YES, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (i.e. any modern browser). -# Windows users are probably better off using the HTML help feature. - -GENERATE_TREEVIEW = NO - -# By enabling USE_INLINE_TREES, doxygen will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list. - -USE_INLINE_TREES = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open -# links to external symbols imported via tag files in a separate window. - -EXT_LINKS_IN_WINDOW = NO - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -# Use the FORMULA_TRANPARENT tag to determine whether or not the images -# generated for formulas are transparent PNGs. Transparent PNGs are -# not supported properly for IE 6.0, but are supported on all modern browsers. -# Note that when changing this option you need to delete any form_*.png files -# in the HTML output before the changes have effect. - -FORMULA_TRANSPARENT = YES - -# When the SEARCHENGINE tag is enabled doxygen will generate a search box -# for the HTML output. The underlying search engine uses javascript -# and DHTML and should work on any modern browser. Note that when using -# HTML help (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets -# (GENERATE_DOCSET) there is already a search function so this one should -# typically be disabled. For large projects the javascript based search engine -# can be slow, then enabling SERVER_BASED_SEARCH may provide a better solution. - -SEARCHENGINE = YES - -# When the SERVER_BASED_SEARCH tag is enabled the search engine will be -# implemented using a PHP enabled web server instead of at the web client -# using Javascript. Doxygen will generate the search PHP script and index -# file to put on the web server. The advantage of the server -# based approach is that it scales better to large projects and allows -# full text search. The disadvances is that it is more difficult to setup -# and does not have live searching capabilities. - -SERVER_BASED_SEARCH = NO - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = YES - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. -# Note that when enabling USE_PDFLATEX this option is only used for -# generating bitmaps for formulas in the HTML output, but not in the -# Makefile that is written to the output directory. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = YES - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = YES - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include -# source code with syntax highlighting in the LaTeX output. -# Note that which sources are shown also depends on other settings -# such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = YES - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = NO - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -# INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -# INCLUDE_FILE_PATTERNS = *.hpp - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = YES - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = YES - -# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is -# allowed to run in parallel. When set to 0 (the default) doxygen will -# base this on the number of processors available in the system. You can set it -# explicitly to a value larger than 0 to get control over the balance -# between CPU load and processing speed. - -DOT_NUM_THREADS = 0 - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans.ttf - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = NO - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = NO - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = NO - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 0 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES diff --git a/cmake/catkin/templates/__init__.py.in b/cmake/catkin/templates/__init__.py.in deleted file mode 100644 index 68f2f652ce..0000000000 --- a/cmake/catkin/templates/__init__.py.in +++ /dev/null @@ -1,37 +0,0 @@ -# generated from catkin/cmake/template/__init__.py.in -# keep symbol table as clean as possible by deleting all unnecessary symbols - -from os import path as os_path -from sys import path as sys_path - -from pkgutil import extend_path - -__extended_path = "@PACKAGE_PYTHONPATH@".split(";") -for p in reversed(__extended_path): - sys_path.insert(0, p) - del p -del sys_path - -__path__ = extend_path(__path__, __name__) -del extend_path - -__execfiles = [] -for p in __extended_path: - src_init_file = os_path.join(p, __name__ + '.py') - if os_path.isfile(src_init_file): - __execfiles.append(src_init_file) - else: - src_init_file = os_path.join(p, __name__, '__init__.py') - if os_path.isfile(src_init_file): - __execfiles.append(src_init_file) - del src_init_file - del p -del os_path -del __extended_path - -for __execfile in __execfiles: - with open(__execfile, 'r') as __fh: - exec(__fh.read()) - del __fh - del __execfile -del __execfiles diff --git a/cmake/catkin/templates/_setup_util.py.in b/cmake/catkin/templates/_setup_util.py.in deleted file mode 100755 index 46c8628052..0000000000 --- a/cmake/catkin/templates/_setup_util.py.in +++ /dev/null @@ -1,287 +0,0 @@ -#!@PYTHON_EXECUTABLE@ - -# Software License Agreement (BSD License) -# -# Copyright (c) 2012, Willow Garage, Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following -# disclaimer in the documentation and/or other materials provided -# with the distribution. -# * Neither the name of Willow Garage, Inc. nor the names of its -# contributors may be used to endorse or promote products derived -# from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. - -'''This file generates shell code for the setup.SHELL scripts to set environment variables''' - -from __future__ import print_function -import argparse -import copy -import errno -import os -import platform -import sys - -CATKIN_MARKER_FILE = '.catkin' - -system = platform.system() -IS_DARWIN = (system == 'Darwin') -IS_WINDOWS = (system == 'Windows') - -# subfolder of workspace prepended to CMAKE_PREFIX_PATH -ENV_VAR_SUBFOLDERS = { - 'CMAKE_PREFIX_PATH': '', - 'CPATH': 'include', - 'LD_LIBRARY_PATH' if not IS_DARWIN else 'DYLD_LIBRARY_PATH': @CATKIN_LIB_ENVIRONMENT_PATHS@, - 'PATH': '@CATKIN_GLOBAL_BIN_DESTINATION@', - 'PKG_CONFIG_PATH': @CATKIN_PKGCONFIG_ENVIRONMENT_PATHS@, - 'PYTHONPATH': '@PYTHON_INSTALL_DIR@', -} - - -def rollback_env_variables(environ, env_var_subfolders): - ''' - Generate shell code to reset environment variables - by unrolling modifications based on all workspaces in CMAKE_PREFIX_PATH. - This does not cover modifications performed by environment hooks. - ''' - lines = [] - unmodified_environ = copy.copy(environ) - for key in sorted(env_var_subfolders.keys()): - subfolders = env_var_subfolders[key] - if not isinstance(subfolders, list): - subfolders = [subfolders] - for subfolder in subfolders: - value = _rollback_env_variable(unmodified_environ, key, subfolder) - if value is not None: - environ[key] = value - lines.append(assignment(key, value)) - if lines: - lines.insert(0, comment('reset environment variables by unrolling modifications based on all workspaces in CMAKE_PREFIX_PATH')) - return lines - - -def _rollback_env_variable(environ, name, subfolder): - ''' - For each catkin workspace in CMAKE_PREFIX_PATH remove the first entry from env[NAME] matching workspace + subfolder. - - :param subfolder: str '' or subfoldername that may start with '/' - :returns: the updated value of the environment variable. - ''' - value = environ[name] if name in environ else '' - env_paths = [path for path in value.split(os.pathsep) if path] - value_modified = False - if subfolder: - if subfolder.startswith(os.path.sep) or (os.path.altsep and subfolder.startswith(os.path.altsep)): - subfolder = subfolder[1:] - if subfolder.endswith(os.path.sep) or (os.path.altsep and subfolder.endswith(os.path.altsep)): - subfolder = subfolder[:-1] - for ws_path in _get_workspaces(environ, include_fuerte=True, include_non_existing=True): - path_to_find = os.path.join(ws_path, subfolder) if subfolder else ws_path - path_to_remove = None - for env_path in env_paths: - env_path_clean = env_path[:-1] if env_path and env_path[-1] in [os.path.sep, os.path.altsep] else env_path - if env_path_clean == path_to_find: - path_to_remove = env_path - break - if path_to_remove: - env_paths.remove(path_to_remove) - value_modified = True - new_value = os.pathsep.join(env_paths) - return new_value if value_modified else None - - -def _get_workspaces(environ, include_fuerte=False, include_non_existing=False): - ''' - Based on CMAKE_PREFIX_PATH return all catkin workspaces. - - :param include_fuerte: The flag if paths starting with '/opt/ros/fuerte' should be considered workspaces, ``bool`` - ''' - # get all cmake prefix paths - env_name = 'CMAKE_PREFIX_PATH' - value = environ[env_name] if env_name in environ else '' - paths = [path for path in value.split(os.pathsep) if path] - # remove non-workspace paths - workspaces = [path for path in paths if os.path.isfile(os.path.join(path, CATKIN_MARKER_FILE)) or (include_fuerte and path.startswith('/opt/ros/fuerte')) or (include_non_existing and not os.path.exists(path))] - return workspaces - - -def prepend_env_variables(environ, env_var_subfolders, workspaces): - ''' - Generate shell code to prepend environment variables - for the all workspaces. - ''' - lines = [] - lines.append(comment('prepend folders of workspaces to environment variables')) - - paths = [path for path in workspaces.split(os.pathsep) if path] - - prefix = _prefix_env_variable(environ, 'CMAKE_PREFIX_PATH', paths, '') - lines.append(prepend(environ, 'CMAKE_PREFIX_PATH', prefix)) - - for key in sorted([key for key in env_var_subfolders.keys() if key != 'CMAKE_PREFIX_PATH']): - subfolder = env_var_subfolders[key] - prefix = _prefix_env_variable(environ, key, paths, subfolder) - lines.append(prepend(environ, key, prefix)) - return lines - - -def _prefix_env_variable(environ, name, paths, subfolders): - ''' - Return the prefix to prepend to the environment variable NAME, adding any path in NEW_PATHS_STR without creating duplicate or empty items. - ''' - value = environ[name] if name in environ else '' - environ_paths = [path for path in value.split(os.pathsep) if path] - checked_paths = [] - for path in paths: - if not isinstance(subfolders, list): - subfolders = [subfolders] - for subfolder in subfolders: - path_tmp = path - if subfolder: - path_tmp = os.path.join(path_tmp, subfolder) - # exclude any path already in env and any path we already added - if path_tmp not in environ_paths and path_tmp not in checked_paths: - checked_paths.append(path_tmp) - prefix_str = os.pathsep.join(checked_paths) - if prefix_str != '' and environ_paths: - prefix_str += os.pathsep - return prefix_str - - -def assignment(key, value): - if not IS_WINDOWS: - return 'export %s="%s"' % (key, value) - else: - return 'set %s=%s' % (key, value) - - -def comment(msg): - if not IS_WINDOWS: - return '# %s' % msg - else: - return 'REM %s' % msg - - -def prepend(environ, key, prefix): - if key not in environ or not environ[key]: - return assignment(key, prefix) - if not IS_WINDOWS: - return 'export %s="%s$%s"' % (key, prefix, key) - else: - return 'set %s=%s%%%s%%' % (key, prefix, key) - - -def find_env_hooks(environ, cmake_prefix_path): - ''' - Generate shell code with found environment hooks - for the all workspaces. - ''' - lines = [] - lines.append(comment('found environment hooks in workspaces')) - - generic_env_hooks = [] - generic_env_hooks_workspace = [] - specific_env_hooks = [] - specific_env_hooks_workspace = [] - generic_env_hooks_by_filename = {} - specific_env_hooks_by_filename = {} - generic_env_hook_ext = 'bat' if IS_WINDOWS else 'sh' - specific_env_hook_ext = environ['CATKIN_SHELL'] if not IS_WINDOWS and 'CATKIN_SHELL' in environ and environ['CATKIN_SHELL'] else None - # remove non-workspace paths - workspaces = [path for path in cmake_prefix_path.split(os.pathsep) if path and os.path.isfile(os.path.join(path, CATKIN_MARKER_FILE))] - for workspace in reversed(workspaces): - env_hook_dir = os.path.join(workspace, 'etc', 'catkin', 'profile.d') - if os.path.isdir(env_hook_dir): - for filename in sorted(os.listdir(env_hook_dir)): - if filename.endswith('.%s' % generic_env_hook_ext): - # remove previous env hook with same name if present - if filename in generic_env_hooks_by_filename: - i = generic_env_hooks.index(generic_env_hooks_by_filename[filename]) - generic_env_hooks.pop(i) - generic_env_hooks_workspace.pop(i) - # append env hook - generic_env_hooks.append(os.path.join(env_hook_dir, filename)) - generic_env_hooks_workspace.append(workspace) - generic_env_hooks_by_filename[filename] = generic_env_hooks[-1] - elif specific_env_hook_ext is not None and filename.endswith('.%s' % specific_env_hook_ext): - # remove previous env hook with same name if present - if filename in specific_env_hooks_by_filename: - i = specific_env_hooks.index(specific_env_hooks_by_filename[filename]) - specific_env_hooks.pop(i) - specific_env_hooks_workspace.pop(i) - # append env hook - specific_env_hooks.append(os.path.join(env_hook_dir, filename)) - specific_env_hooks_workspace.append(workspace) - specific_env_hooks_by_filename[filename] = specific_env_hooks[-1] - env_hooks = generic_env_hooks + specific_env_hooks - env_hooks_workspace = generic_env_hooks_workspace + specific_env_hooks_workspace - count = len(env_hooks) - lines.append(assignment('_CATKIN_ENVIRONMENT_HOOKS_COUNT', count)) - for i in range(count): - lines.append(assignment('_CATKIN_ENVIRONMENT_HOOKS_%d' % i, env_hooks[i])) - lines.append(assignment('_CATKIN_ENVIRONMENT_HOOKS_%d_WORKSPACE' % i, env_hooks_workspace[i])) - return lines - - -def _parse_arguments(args=None): - parser = argparse.ArgumentParser(description='Generates code blocks for the setup.SHELL script.') - parser.add_argument('--extend', action='store_true', help='Skip unsetting previous environment variables to extend context') - return parser.parse_known_args(args=args)[0] - - -if __name__ == '__main__': - try: - try: - args = _parse_arguments() - except Exception as e: - print(e, file=sys.stderr) - sys.exit(1) - - # environment at generation time - CMAKE_PREFIX_PATH = '@CMAKE_PREFIX_PATH_AS_IS@'.split(';') - # prepend current workspace if not already part of CPP - base_path = os.path.dirname(__file__) - if base_path not in CMAKE_PREFIX_PATH: - CMAKE_PREFIX_PATH.insert(0, base_path) - CMAKE_PREFIX_PATH = os.pathsep.join(CMAKE_PREFIX_PATH) - - environ = dict(os.environ) - lines = [] - if not args.extend: - lines += rollback_env_variables(environ, ENV_VAR_SUBFOLDERS) - lines += prepend_env_variables(environ, ENV_VAR_SUBFOLDERS, CMAKE_PREFIX_PATH) - lines += find_env_hooks(environ, CMAKE_PREFIX_PATH) - print('\n'.join(lines)) - - # need to explicitly flush the output - sys.stdout.flush() - except IOError as e: - # and catch potantial "broken pipe" if stdout is not writable - # which can happen when piping the output to a file but the disk is full - if e.errno == errno.EPIPE: - print(e, file=sys.stderr) - sys.exit(2) - raise - - sys.exit(0) diff --git a/cmake/catkin/templates/cfg-extras.context.py.in b/cmake/catkin/templates/cfg-extras.context.py.in deleted file mode 100644 index 256bb69bd6..0000000000 --- a/cmake/catkin/templates/cfg-extras.context.py.in +++ /dev/null @@ -1,33 +0,0 @@ -# generated from catkin/cmake/template/cfg-extras.context.py.in -DEVELSPACE = '@DEVELSPACE@' == 'TRUE' -INSTALLSPACE = '@INSTALLSPACE@' == 'TRUE' - -CATKIN_DEVEL_PREFIX = '@CATKIN_DEVEL_PREFIX@' - -CATKIN_GLOBAL_BIN_DESTINATION = '@CATKIN_GLOBAL_BIN_DESTINATION@' -CATKIN_GLOBAL_ETC_DESTINATION = '@CATKIN_GLOBAL_ETC_DESTINATION@' -CATKIN_GLOBAL_INCLUDE_DESTINATION = '@CATKIN_GLOBAL_INCLUDE_DESTINATION@' -CATKIN_GLOBAL_LIB_DESTINATION = '@CATKIN_GLOBAL_LIB_DESTINATION@' -CATKIN_GLOBAL_LIBEXEC_DESTINATION = '@CATKIN_GLOBAL_LIBEXEC_DESTINATION@' -CATKIN_GLOBAL_PYTHON_DESTINATION = '@CATKIN_GLOBAL_PYTHON_DESTINATION@' -CATKIN_GLOBAL_SHARE_DESTINATION = '@CATKIN_GLOBAL_SHARE_DESTINATION@' - -CATKIN_PACKAGE_BIN_DESTINATION = '@CATKIN_PACKAGE_BIN_DESTINATION@' -CATKIN_PACKAGE_ETC_DESTINATION = '@CATKIN_PACKAGE_ETC_DESTINATION@' -CATKIN_PACKAGE_INCLUDE_DESTINATION = '@CATKIN_PACKAGE_INCLUDE_DESTINATION@' -CATKIN_PACKAGE_LIB_DESTINATION = '@CATKIN_PACKAGE_LIB_DESTINATION@' -CATKIN_PACKAGE_LIBEXEC_DESTINATION = '@CATKIN_PACKAGE_LIBEXEC_DESTINATION@' -CATKIN_PACKAGE_PYTHON_DESTINATION = '@CATKIN_PACKAGE_PYTHON_DESTINATION@' -CATKIN_PACKAGE_SHARE_DESTINATION = '@CATKIN_PACKAGE_SHARE_DESTINATION@' - -CMAKE_BINARY_DIR = '@CMAKE_BINARY_DIR@' -CMAKE_CURRENT_BINARY_DIR = '@CMAKE_CURRENT_BINARY_DIR@' -CMAKE_CURRENT_SOURCE_DIR = '@CMAKE_CURRENT_SOURCE_DIR@' -CMAKE_INSTALL_PREFIX = '@CMAKE_INSTALL_PREFIX@' -CMAKE_SOURCE_DIR = '@CMAKE_SOURCE_DIR@' - -PKG_CMAKE_DIR = '@PKG_CMAKE_DIR@' - -PROJECT_NAME = '@PROJECT_NAME@' -PROJECT_BINARY_DIR = '@PROJECT_BINARY_DIR@' -PROJECT_SOURCE_DIR = '@PROJECT_SOURCE_DIR@' diff --git a/cmake/catkin/templates/env-hook.context.py.in b/cmake/catkin/templates/env-hook.context.py.in deleted file mode 100644 index 57cb22de6f..0000000000 --- a/cmake/catkin/templates/env-hook.context.py.in +++ /dev/null @@ -1,33 +0,0 @@ -# generated from catkin/cmake/template/env-hook.context.py.in -DEVELSPACE = @DEVELSPACE@ -INSTALLSPACE = @INSTALLSPACE@ - -CATKIN_DEVEL_PREFIX = '@CATKIN_DEVEL_PREFIX@' - -CATKIN_GLOBAL_BIN_DESTINATION = '@CATKIN_GLOBAL_BIN_DESTINATION@' -CATKIN_GLOBAL_ETC_DESTINATION = '@CATKIN_GLOBAL_ETC_DESTINATION@' -CATKIN_GLOBAL_INCLUDE_DESTINATION = '@CATKIN_GLOBAL_INCLUDE_DESTINATION@' -CATKIN_GLOBAL_LIB_DESTINATION = '@CATKIN_GLOBAL_LIB_DESTINATION@' -CATKIN_GLOBAL_LIBEXEC_DESTINATION = '@CATKIN_GLOBAL_LIBEXEC_DESTINATION@' -CATKIN_GLOBAL_PYTHON_DESTINATION = '@CATKIN_GLOBAL_PYTHON_DESTINATION@' -CATKIN_GLOBAL_SHARE_DESTINATION = '@CATKIN_GLOBAL_SHARE_DESTINATION@' - -CATKIN_PACKAGE_BIN_DESTINATION = '@CATKIN_PACKAGE_BIN_DESTINATION@' -CATKIN_PACKAGE_ETC_DESTINATION = '@CATKIN_PACKAGE_ETC_DESTINATION@' -CATKIN_PACKAGE_INCLUDE_DESTINATION = '@CATKIN_PACKAGE_INCLUDE_DESTINATION@' -CATKIN_PACKAGE_LIB_DESTINATION = '@CATKIN_PACKAGE_LIB_DESTINATION@' -CATKIN_PACKAGE_LIBEXEC_DESTINATION = '@CATKIN_PACKAGE_LIBEXEC_DESTINATION@' -CATKIN_PACKAGE_PYTHON_DESTINATION = '@CATKIN_PACKAGE_PYTHON_DESTINATION@' -CATKIN_PACKAGE_SHARE_DESTINATION = '@CATKIN_PACKAGE_SHARE_DESTINATION@' - -CMAKE_BINARY_DIR = '@CMAKE_BINARY_DIR@' -CMAKE_CURRENT_BINARY_DIR = '@CMAKE_CURRENT_BINARY_DIR@' -CMAKE_CURRENT_SOURCE_DIR = '@CMAKE_CURRENT_SOURCE_DIR@' -CMAKE_INSTALL_PREFIX = '@CMAKE_INSTALL_PREFIX@' -CMAKE_SOURCE_DIR = '@CMAKE_SOURCE_DIR@' - -PROJECT_NAME = '@PROJECT_NAME@' -PROJECT_BINARY_DIR = '@PROJECT_BINARY_DIR@' -PROJECT_SOURCE_DIR = '@PROJECT_SOURCE_DIR@' - -PYTHON_EXECUTABLE = '@PYTHON_EXECUTABLE@' diff --git a/cmake/catkin/templates/env.bat.in b/cmake/catkin/templates/env.bat.in deleted file mode 100644 index 55d882ec14..0000000000 --- a/cmake/catkin/templates/env.bat.in +++ /dev/null @@ -1,11 +0,0 @@ -@echo off -REM generated from catkin/cmake/templates/env.bat.in - -if "%1"=="" ( - echo "Usage: env.bat COMMANDS" - echo "Calling env.bat without arguments is not supported anymore. Instead spawn a subshell and source a setup file manually." - exit 1 -) else ( - call "@SETUP_DIR@/@SETUP_FILENAME@.bat" - %* -) diff --git a/cmake/catkin/templates/env.sh.in b/cmake/catkin/templates/env.sh.in deleted file mode 100755 index 4ffb7dd4b3..0000000000 --- a/cmake/catkin/templates/env.sh.in +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env sh -# generated from catkin/cmake/templates/env.sh.in - -if [ $# -eq 0 ] ; then - /bin/echo "Usage: env.sh COMMANDS" - /bin/echo "Calling env.sh without arguments is not supported anymore. Instead spawn a subshell and source a setup file manually." - exit 1 -fi - -# ensure to not use different shell type which was set before -CATKIN_SHELL=sh - -# source @SETUP_FILENAME@.sh from same directory as this file -_CATKIN_SETUP_DIR=$(cd "`dirname "$0"`" > /dev/null && pwd) -. "$_CATKIN_SETUP_DIR/@SETUP_FILENAME@.sh" -exec "$@" diff --git a/cmake/catkin/templates/generate_cached_setup.py.in b/cmake/catkin/templates/generate_cached_setup.py.in deleted file mode 100644 index c2848e04c5..0000000000 --- a/cmake/catkin/templates/generate_cached_setup.py.in +++ /dev/null @@ -1,29 +0,0 @@ -from __future__ import print_function -import argparse -import os -import stat -import sys - -# find the import for catkin's python package - either from source space or from an installed underlay -if os.path.exists(os.path.join('@catkin_EXTRAS_DIR@', 'catkinConfig.cmake.in')): - sys.path.insert(0, os.path.join('@catkin_EXTRAS_DIR@', '..', 'python')) -try: - from catkin.environment_cache import generate_environment_script -except ImportError: - # search for catkin package in all workspaces and prepend to path - for workspace in "@CATKIN_WORKSPACES@".split(';'): - python_path = os.path.join(workspace, '@CATKIN_GLOBAL_PYTHON_DESTINATION@') - if os.path.isdir(os.path.join(python_path, 'catkin')): - sys.path.insert(0, python_path) - break - from catkin.environment_cache import generate_environment_script - -code = generate_environment_script('@CATKIN_DEVEL_PREFIX@/env.@script_ext@') - -output_filename = '@SETUP_DIR@/@SETUP_FILENAME@.@script_ext@' -with open(output_filename, 'w') as f: - #print('Generate script for cached setup "%s"' % output_filename) - f.write('\n'.join(code)) - -mode = os.stat(output_filename).st_mode -os.chmod(output_filename, mode | stat.S_IXUSR) diff --git a/cmake/catkin/templates/order_packages.context.py.in b/cmake/catkin/templates/order_packages.context.py.in deleted file mode 100644 index 3609c0b300..0000000000 --- a/cmake/catkin/templates/order_packages.context.py.in +++ /dev/null @@ -1,5 +0,0 @@ -# generated from catkin/cmake/template/order_packages.context.py.in -source_root_dir = "@CMAKE_CURRENT_SOURCE_DIR@" -whitelisted_packages = "@CATKIN_WHITELIST_PACKAGES@".split(';') if "@CATKIN_WHITELIST_PACKAGES@" != "" else [] -blacklisted_packages = "@CATKIN_BLACKLIST_PACKAGES@".split(';') if "@CATKIN_BLACKLIST_PACKAGES@" != "" else [] -underlay_workspaces = "@CATKIN_WORKSPACES@".split(';') if "@CATKIN_WORKSPACES@" != "" else [] diff --git a/cmake/catkin/templates/pkg.context.pc.in b/cmake/catkin/templates/pkg.context.pc.in deleted file mode 100644 index 8e9cd6bc12..0000000000 --- a/cmake/catkin/templates/pkg.context.pc.in +++ /dev/null @@ -1,8 +0,0 @@ -# generated from catkin/cmake/template/pkg.context.pc.in -CATKIN_PACKAGE_PREFIX = "@CATKIN_PACKAGE_PREFIX@" -PROJECT_PKG_CONFIG_INCLUDE_DIRS = "@PROJECT_PKG_CONFIG_INCLUDE_DIRS@".split(';') if "@PROJECT_PKG_CONFIG_INCLUDE_DIRS@" != "" else [] -PROJECT_CATKIN_DEPENDS = "@_PROJECT_CATKIN_DEPENDS@".replace(';', ' ') -PKG_CONFIG_LIBRARIES_WITH_PREFIX = "@PKG_CONFIG_LIBRARIES_WITH_PREFIX@".split(';') if "@PKG_CONFIG_LIBRARIES_WITH_PREFIX@" != "" else [] -PROJECT_NAME = "@PROJECT_NAME@" -PROJECT_SPACE_DIR = "@PROJECT_SPACE_DIR@" -PROJECT_VERSION = "@PROJECT_VERSION@" diff --git a/cmake/catkin/templates/pkgConfig-version.cmake.in b/cmake/catkin/templates/pkgConfig-version.cmake.in deleted file mode 100644 index f29cb36d47..0000000000 --- a/cmake/catkin/templates/pkgConfig-version.cmake.in +++ /dev/null @@ -1,14 +0,0 @@ -# generated from catkin/cmake/template/pkgConfig-version.cmake.in -set(PACKAGE_VERSION "@PROJECT_VERSION@") - -set(PACKAGE_VERSION_EXACT False) -set(PACKAGE_VERSION_COMPATIBLE False) - -if("${PACKAGE_FIND_VERSION}" VERSION_EQUAL "${PACKAGE_VERSION}") - set(PACKAGE_VERSION_EXACT True) - set(PACKAGE_VERSION_COMPATIBLE True) -endif() - -if("${PACKAGE_FIND_VERSION}" VERSION_LESS "${PACKAGE_VERSION}") - set(PACKAGE_VERSION_COMPATIBLE True) -endif() diff --git a/cmake/catkin/templates/pkgConfig.cmake.in b/cmake/catkin/templates/pkgConfig.cmake.in deleted file mode 100644 index 91f6d65354..0000000000 --- a/cmake/catkin/templates/pkgConfig.cmake.in +++ /dev/null @@ -1,191 +0,0 @@ -# generated from catkin/cmake/template/pkgConfig.cmake.in - -# append elements to a list and remove existing duplicates from the list -# copied from catkin/cmake/list_append_deduplicate.cmake to keep pkgConfig -# self contained -macro(_list_append_deduplicate listname) - if(NOT "${ARGN}" STREQUAL "") - if(${listname}) - list(REMOVE_ITEM ${listname} ${ARGN}) - endif() - list(APPEND ${listname} ${ARGN}) - endif() -endmacro() - -# append elements to a list if they are not already in the list -# copied from catkin/cmake/list_append_unique.cmake to keep pkgConfig -# self contained -macro(_list_append_unique listname) - foreach(_item ${ARGN}) - list(FIND ${listname} ${_item} _index) - if(_index EQUAL -1) - list(APPEND ${listname} ${_item}) - endif() - endforeach() -endmacro() - -# pack a list of libraries with optional build configuration keywords -# copied from catkin/cmake/catkin_libraries.cmake to keep pkgConfig -# self contained -macro(_pack_libraries_with_build_configuration VAR) - set(${VAR} "") - set(_argn ${ARGN}) - list(LENGTH _argn _count) - set(_index 0) - while(${_index} LESS ${_count}) - list(GET _argn ${_index} lib) - if("${lib}" MATCHES "^debug|optimized|general$") - math(EXPR _index "${_index} + 1") - if(${_index} EQUAL ${_count}) - message(FATAL_ERROR "_pack_libraries_with_build_configuration() the list of libraries '${ARGN}' ends with '${lib}' which is a build configuration keyword and must be followed by a library") - endif() - list(GET _argn ${_index} library) - list(APPEND ${VAR} "${lib}${CATKIN_BUILD_CONFIGURATION_KEYWORD_SEPARATOR}${library}") - else() - list(APPEND ${VAR} "${lib}") - endif() - math(EXPR _index "${_index} + 1") - endwhile() -endmacro() - -# unpack a list of libraries with optional build configuration keyword prefixes -# copied from catkin/cmake/catkin_libraries.cmake to keep pkgConfig -# self contained -macro(_unpack_libraries_with_build_configuration VAR) - set(${VAR} "") - foreach(lib ${ARGN}) - string(REGEX REPLACE "^(debug|optimized|general)${CATKIN_BUILD_CONFIGURATION_KEYWORD_SEPARATOR}(.+)$" "\\1;\\2" lib "${lib}") - list(APPEND ${VAR} "${lib}") - endforeach() -endmacro() - - -if(@PROJECT_NAME@_CONFIG_INCLUDED) - return() -endif() -set(@PROJECT_NAME@_CONFIG_INCLUDED TRUE) - -# set variables for source/devel/install prefixes -if("@DEVELSPACE@" STREQUAL "TRUE") - set(@PROJECT_NAME@_SOURCE_PREFIX @CMAKE_CURRENT_SOURCE_DIR@) - set(@PROJECT_NAME@_DEVEL_PREFIX @CATKIN_DEVEL_PREFIX@) - set(@PROJECT_NAME@_INSTALL_PREFIX "") - set(@PROJECT_NAME@_PREFIX ${@PROJECT_NAME@_DEVEL_PREFIX}) -else() - set(@PROJECT_NAME@_SOURCE_PREFIX "") - set(@PROJECT_NAME@_DEVEL_PREFIX "") - set(@PROJECT_NAME@_INSTALL_PREFIX @CMAKE_INSTALL_PREFIX@) - set(@PROJECT_NAME@_PREFIX ${@PROJECT_NAME@_INSTALL_PREFIX}) -endif() - -# warn when using a deprecated package -if(NOT "@PROJECT_DEPRECATED@" STREQUAL "") - set(_msg "WARNING: package '@PROJECT_NAME@' is deprecated") - # append custom deprecation text if available - if(NOT "@PROJECT_DEPRECATED@" STREQUAL "TRUE") - set(_msg "${_msg} (@PROJECT_DEPRECATED@)") - endif() - message("${_msg}") -endif() - -# flag project as catkin-based to distinguish if a find_package()-ed project is a catkin project -set(@PROJECT_NAME@_FOUND_CATKIN_PROJECT TRUE) - -if(NOT "@PROJECT_CMAKE_CONFIG_INCLUDE_DIRS@" STREQUAL "") - set(@PROJECT_NAME@_INCLUDE_DIRS "") - set(_include_dirs "@PROJECT_CMAKE_CONFIG_INCLUDE_DIRS@") - foreach(idir ${_include_dirs}) - if(IS_ABSOLUTE ${idir} AND IS_DIRECTORY ${idir}) - set(include ${idir}) - elseif("${idir}" STREQUAL "@CATKIN_GLOBAL_INCLUDE_DESTINATION@") - get_filename_component(include "${@PROJECT_NAME@_DIR}/../../../@CATKIN_GLOBAL_INCLUDE_DESTINATION@" ABSOLUTE) - if(NOT IS_DIRECTORY ${include}) - message(FATAL_ERROR "Project '@PROJECT_NAME@' specifies '${idir}' as an include dir, which is not found. It does not exist in '${include}'. Ask the maintainer '@PROJECT_MAINTAINER@' to fix it.") - endif() - else() - message(FATAL_ERROR "Project '@PROJECT_NAME@' specifies '${idir}' as an include dir, which is not found. It does neither exist as an absolute directory nor in '@PKG_INCLUDE_PREFIX@/${idir}'. Ask the maintainer '@PROJECT_MAINTAINER@' to fix it.") - endif() - _list_append_unique(@PROJECT_NAME@_INCLUDE_DIRS ${include}) - endforeach() -endif() - -set(libraries "@PKG_CONFIG_LIBRARIES@") -foreach(library ${libraries}) - # keep build configuration keywords, target names and absolute libraries as-is - if("${library}" MATCHES "^debug|optimized|general$") - list(APPEND @PROJECT_NAME@_LIBRARIES ${library}) - elseif(TARGET ${library}) - list(APPEND @PROJECT_NAME@_LIBRARIES ${library}) - elseif(IS_ABSOLUTE ${library}) - list(APPEND @PROJECT_NAME@_LIBRARIES ${library}) - else() - set(lib_path "") - set(lib "${library}-NOTFOUND") - # since the path where the library is found is returned we have to iterate over the paths manually - foreach(path @PKG_CONFIG_LIB_PATHS@) - find_library(lib ${library} - PATHS ${path} - NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) - if(lib) - set(lib_path ${path}) - break() - endif() - endforeach() - if(lib) - _list_append_unique(@PROJECT_NAME@_LIBRARY_DIRS ${lib_path}) - list(APPEND @PROJECT_NAME@_LIBRARIES ${lib}) - else() - # as a fall back for non-catkin libraries try to search globally - find_library(lib ${library}) - if(NOT lib) - message(FATAL_ERROR "Project '${PROJECT_NAME}' tried to find library '${library}'. The library is neither a target nor built/installed properly. Did you compile project '@PROJECT_NAME@'? Did you find_package() it before the subdirectory containing its code is included?") - endif() - list(APPEND @PROJECT_NAME@_LIBRARIES ${lib}) - endif() - endif() -endforeach() - -set(@PROJECT_NAME@_EXPORTED_TARGETS "@PKG_EXPORTED_TARGETS@") -# create dummy targets for exported code generation targets to make life of users easier -foreach(t ${@PROJECT_NAME@_EXPORTED_TARGETS}) - if(NOT TARGET ${t}) - add_custom_target(${t}) - endif() -endforeach() - -set(depends "@PROJECT_DEPENDENCIES@") -foreach(depend ${depends}) - string(REPLACE " " ";" depend_list ${depend}) - # the package name of the dependency must be kept in a unique variable so that it is not overwritten in recursive calls - list(GET depend_list 0 @PROJECT_NAME@_dep) - list(LENGTH depend_list count) - if(${count} EQUAL 1) - # simple dependencies must only be find_package()-ed once - if(NOT ${@PROJECT_NAME@_dep}_FOUND) - find_package(${@PROJECT_NAME@_dep} REQUIRED) - endif() - else() - # dependencies with components must be find_package()-ed again - list(REMOVE_AT depend_list 0) - find_package(${@PROJECT_NAME@_dep} REQUIRED ${depend_list}) - endif() - _list_append_unique(@PROJECT_NAME@_INCLUDE_DIRS ${${@PROJECT_NAME@_dep}_INCLUDE_DIRS}) - - # merge build configuration keywords with library names to correctly deduplicate - _pack_libraries_with_build_configuration(@PROJECT_NAME@_LIBRARIES ${@PROJECT_NAME@_LIBRARIES}) - _pack_libraries_with_build_configuration(_libraries ${${@PROJECT_NAME@_dep}_LIBRARIES}) - _list_append_deduplicate(@PROJECT_NAME@_LIBRARIES ${_libraries}) - # undo build configuration keyword merging after deduplication - _unpack_libraries_with_build_configuration(@PROJECT_NAME@_LIBRARIES ${@PROJECT_NAME@_LIBRARIES}) - - _list_append_unique(@PROJECT_NAME@_LIBRARY_DIRS ${${@PROJECT_NAME@_dep}_LIBRARY_DIRS}) - list(APPEND @PROJECT_NAME@_EXPORTED_TARGETS ${${@PROJECT_NAME@_dep}_EXPORTED_TARGETS}) -endforeach() - -set(pkg_cfg_extras "@PKG_CFG_EXTRAS@") -foreach(extra ${pkg_cfg_extras}) - if(NOT IS_ABSOLUTE ${extra}) - set(extra ${@PROJECT_NAME@_DIR}/${extra}) - endif() - include(${extra}) -endforeach() diff --git a/cmake/catkin/templates/python_distutils_install.bat.in b/cmake/catkin/templates/python_distutils_install.bat.in deleted file mode 100644 index fd9906a687..0000000000 --- a/cmake/catkin/templates/python_distutils_install.bat.in +++ /dev/null @@ -1,17 +0,0 @@ -@echo off - -if DEFINED DESTDIR ( - echo "Destdir.............%DESTDIR%" - set DESTDIR_ARG="--root=%DESTDIR%" -) - -cd "@INSTALL_CMD_WORKING_DIRECTORY@" - -cmd /V:on /C set PYTHONPATH="@CMAKE_INSTALL_PREFIX@/@PYTHON_INSTALL_DIR@;@CMAKE_BINARY_DIR@/@PYTHON_INSTALL_DIR@" ^ - && set CATKIN_BINARY_DIR="@CMAKE_BINARY_DIR@" ^ - && "@PYTHON_EXECUTABLE@" ^ - "@CMAKE_CURRENT_SOURCE_DIR@/setup.py" ^ - build --build-base "@CMAKE_CURRENT_BINARY_DIR@" ^ - install %DESTDIR_ARG% @SETUPTOOLS_ARG_EXTRA@ ^ - --prefix="@SETUPTOOLS_INSTALL_PREFIX@" ^ - --install-scripts="@SETUPTOOLS_INSTALL_PREFIX@\@CATKIN_GLOBAL_BIN_DESTINATION@" diff --git a/cmake/catkin/templates/python_distutils_install.sh.in b/cmake/catkin/templates/python_distutils_install.sh.in deleted file mode 100755 index e7b56c72d0..0000000000 --- a/cmake/catkin/templates/python_distutils_install.sh.in +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/sh -x - -if [ -n "$DESTDIR" ] ; then - case $DESTDIR in - /*) # ok - ;; - *) - /bin/echo "DESTDIR argument must be absolute... " - /bin/echo "otherwise python's distutils will bork things." - exit 1 - esac - DESTDIR_ARG="--root=$DESTDIR" -fi - -cd "@INSTALL_CMD_WORKING_DIRECTORY@" - -# Note that PYTHONPATH is pulled from the environment to support installing -# into one location when some dependencies were installed in another -# location, #123. -/usr/bin/env \ - PYTHONPATH="@CMAKE_INSTALL_PREFIX@/@PYTHON_INSTALL_DIR@:@CMAKE_BINARY_DIR@/@PYTHON_INSTALL_DIR@:$PYTHONPATH" \ - CATKIN_BINARY_DIR="@CMAKE_BINARY_DIR@" \ - "@PYTHON_EXECUTABLE@" \ - "@CMAKE_CURRENT_SOURCE_DIR@/setup.py" \ - build --build-base "@CMAKE_CURRENT_BINARY_DIR@" \ - install \ - $DESTDIR_ARG \ - @SETUPTOOLS_ARG_EXTRA@ --prefix="@CMAKE_INSTALL_PREFIX@" --install-scripts="@CMAKE_INSTALL_PREFIX@/@CATKIN_GLOBAL_BIN_DESTINATION@" diff --git a/cmake/catkin/templates/relay.py.in b/cmake/catkin/templates/relay.py.in deleted file mode 100644 index 5221f48dc7..0000000000 --- a/cmake/catkin/templates/relay.py.in +++ /dev/null @@ -1,4 +0,0 @@ -# creates a relay to a python script source file, acting as that file. -# The purpose is that of a symlink -with open("@PYTHON_SCRIPT@", 'r') as fh: - exec(fh.read()) diff --git a/cmake/catkin/templates/rosinstall.in b/cmake/catkin/templates/rosinstall.in deleted file mode 100644 index 17f78a2981..0000000000 --- a/cmake/catkin/templates/rosinstall.in +++ /dev/null @@ -1,2 +0,0 @@ -- setup-file: - local-name: @SETUP_DIR@/setup.sh diff --git a/cmake/catkin/templates/safe_execute_install.cmake.in b/cmake/catkin/templates/safe_execute_install.cmake.in deleted file mode 100644 index 643c6eb4c9..0000000000 --- a/cmake/catkin/templates/safe_execute_install.cmake.in +++ /dev/null @@ -1,5 +0,0 @@ -execute_process(COMMAND "@INSTALL_SCRIPT@" RESULT_VARIABLE res) - -if(NOT res EQUAL 0) - message(FATAL_ERROR "execute_process(@INSTALL_SCRIPT@) returned error code ${res}") -endif() diff --git a/cmake/catkin/templates/script.bash.in b/cmake/catkin/templates/script.bash.in deleted file mode 100755 index ab172b97b8..0000000000 --- a/cmake/catkin/templates/script.bash.in +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env bash -# generated from catkin/cmake/templates/script.bash.in - -"@BASH_SCRIPT@" "$@" diff --git a/cmake/catkin/templates/script.in b/cmake/catkin/templates/script.in deleted file mode 100755 index 6c1be8d232..0000000000 --- a/cmake/catkin/templates/script.in +++ /dev/null @@ -1 +0,0 @@ -@SCRIPT@ diff --git a/cmake/catkin/templates/script.py.in b/cmake/catkin/templates/script.py.in deleted file mode 100755 index dc3a277371..0000000000 --- a/cmake/catkin/templates/script.py.in +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env python -# creates a relay to a python script source file, acting as that file. -# The purpose is that of a symlink -with open("@PYTHON_SCRIPT@", 'r') as fh: - exec(fh.read()) diff --git a/cmake/catkin/templates/script.sh.in b/cmake/catkin/templates/script.sh.in deleted file mode 100755 index 54e4e7a0a6..0000000000 --- a/cmake/catkin/templates/script.sh.in +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env sh -# generated from catkin/cmake/templates/script.sh.in - -"@EXECUTABLE@" "$@" diff --git a/cmake/catkin/templates/setup.bash.in b/cmake/catkin/templates/setup.bash.in deleted file mode 100644 index ff47af8f30..0000000000 --- a/cmake/catkin/templates/setup.bash.in +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash -# generated from catkin/cmake/templates/setup.bash.in - -CATKIN_SHELL=bash - -# source setup.sh from same directory as this file -_CATKIN_SETUP_DIR=$(builtin cd "`dirname "${BASH_SOURCE[0]}"`" > /dev/null && pwd) -. "$_CATKIN_SETUP_DIR/setup.sh" diff --git a/cmake/catkin/templates/setup.bat.in b/cmake/catkin/templates/setup.bat.in deleted file mode 100644 index 1549181597..0000000000 --- a/cmake/catkin/templates/setup.bat.in +++ /dev/null @@ -1,59 +0,0 @@ -@echo off -REM generated from catkin/cmake/template/setup.bat.in - -REM Sets various environment variables and sources additional environment hooks. -REM It tries it's best to undo changes from a previously sourced setup file before. -REM Supported command line options: -REM --extend: skips the undoing of changes from a previously sourced setup file - -set _SETUP_UTIL="@SETUP_DIR@/_setup_util.py" - -if NOT EXIST "%_SETUP_UTIL%" ( - echo "Missing Python script: %_SETUP_UTIL%" - exit 22 -) - -REM set the Python executable -set _PYTHON="@PYTHON_EXECUTABLE@" - -REM generate pseudo random temporary filename -:GenerateTempFilename -REM replace leading space of time with zero -set _SETUP_TMP=%Time: =0% -REM remove time delimiters -set _SETUP_TMP=%_SETUP_TMP::=% -set _SETUP_TMP=%_SETUP_TMP:.=% -set _SETUP_TMP=%_SETUP_TMP:,=% -set _SETUP_TMP=%Temp%\setup.%_SETUP_TMP%.bat -if EXIST %_SETUP_TMP% do goto GenerateTempFilename -type NUL > "%_SETUP_TMP%" -if NOT EXIST %_SETUP_TMP% ( - echo "Could not create temporary file: %_SETUP_TMP%" - exit 1 -) - -REM invoke Python script to generate necessary exports of environment variables -%_PYTHON% %_SETUP_UTIL% %* > %_SETUP_TMP% -if NOT EXIST %_SETUP_TMP% ( - echo "Could not create temporary file: %_SETUP_TMP%" - return 1 -) -call %_SETUP_TMP% -del %_SETUP_TMP% - -REM source all environment hooks -for /F "tokens=* delims=;" %%a in ("%_CATKIN_ENVIRONMENT_HOOKS%") do ( - for %%b in (%%a) do ( - call "%%b" - ) -) - -REM 3rdparty packages often put dll's into lib (convention is bin) and -REM windows finds it's dll's via the PATH variable. Make that happen here! -set PATH=%LD_LIBRARY_PATH%;%PATH% - -REM unset temporary variables -set _SETUP_UTIL= -set _PYTHON= -set _SETUP_TMP= -set _CATKIN_ENVIRONMENT_HOOKS= diff --git a/cmake/catkin/templates/setup.sh.in b/cmake/catkin/templates/setup.sh.in deleted file mode 100644 index a47611bdd0..0000000000 --- a/cmake/catkin/templates/setup.sh.in +++ /dev/null @@ -1,87 +0,0 @@ -#!/usr/bin/env sh -# generated from catkin/cmake/template/setup.sh.in - -# Sets various environment variables and sources additional environment hooks. -# It tries it's best to undo changes from a previously sourced setup file before. -# Supported command line options: -# --extend: skips the undoing of changes from a previously sourced setup file - -# since this file is sourced either use the provided _CATKIN_SETUP_DIR -# or fall back to the destination set at configure time -: ${_CATKIN_SETUP_DIR:=@SETUP_DIR@} -_SETUP_UTIL="$_CATKIN_SETUP_DIR/_setup_util.py" -unset _CATKIN_SETUP_DIR - -if [ ! -f "$_SETUP_UTIL" ]; then - echo "Missing Python script: $_SETUP_UTIL" - return 22 -fi - -# detect if running on Darwin platform -_UNAME=`uname -s` -_IS_DARWIN=0 -if [ "$_UNAME" = "Darwin" ]; then - _IS_DARWIN=1 -fi -unset _UNAME - -# make sure to export all environment variables -export CMAKE_PREFIX_PATH -export CPATH -if [ $_IS_DARWIN -eq 0 ]; then - export LD_LIBRARY_PATH -else - export DYLD_LIBRARY_PATH -fi -unset _IS_DARWIN -export PATH -export PKG_CONFIG_PATH -export PYTHONPATH - -# remember type of shell if not already set -if [ -z "$CATKIN_SHELL" ]; then - CATKIN_SHELL=sh -fi - -# invoke Python script to generate necessary exports of environment variables -_SETUP_TMP=`mktemp /tmp/setup.sh.XXXXXXXXXX` -if [ $? -ne 0 -o ! -f "$_SETUP_TMP" ]; then - echo "Could not create temporary file: $_SETUP_TMP" - return 1 -fi -CATKIN_SHELL=$CATKIN_SHELL "$_SETUP_UTIL" $@ > $_SETUP_TMP -_RC=$? -if [ $_RC -ne 0 ]; then - if [ $_RC -eq 2 ]; then - echo "Could not write the output of '$_SETUP_UTIL' to temporary file '$_SETUP_TMP': may be the disk if full?" - else - echo "Failed to run '\"$_SETUP_UTIL\" $@': return code $_RC" - fi - unset _RC - unset _SETUP_UTIL - rm -f $_SETUP_TMP - unset _SETUP_TMP - return 1 -fi -unset _RC -unset _SETUP_UTIL -. $_SETUP_TMP -rm -f $_SETUP_TMP -unset _SETUP_TMP - -# source all environment hooks -_i=0 -while [ $_i -lt $_CATKIN_ENVIRONMENT_HOOKS_COUNT ]; do - eval _envfile=\$_CATKIN_ENVIRONMENT_HOOKS_$_i - unset _CATKIN_ENVIRONMENT_HOOKS_$_i - eval _envfile_workspace=\$_CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE - unset _CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE - # set workspace for environment hook - CATKIN_ENV_HOOK_WORKSPACE=$_envfile_workspace - . "$_envfile" - unset CATKIN_ENV_HOOK_WORKSPACE - _i=$((_i + 1)) -done -unset _i - -unset _CATKIN_ENVIRONMENT_HOOKS_COUNT diff --git a/cmake/catkin/templates/setup.zsh.in b/cmake/catkin/templates/setup.zsh.in deleted file mode 100644 index 9f780b7410..0000000000 --- a/cmake/catkin/templates/setup.zsh.in +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env zsh -# generated from catkin/cmake/templates/setup.zsh.in - -CATKIN_SHELL=zsh - -# source setup.sh from same directory as this file -_CATKIN_SETUP_DIR=$(builtin cd -q "`dirname "$0"`" > /dev/null && pwd) -emulate -R zsh -c 'source "$_CATKIN_SETUP_DIR/setup.sh"' diff --git a/cmake/catkin/test/catkin_download_test_data.cmake b/cmake/catkin/test/catkin_download_test_data.cmake deleted file mode 100644 index 9d39052607..0000000000 --- a/cmake/catkin/test/catkin_download_test_data.cmake +++ /dev/null @@ -1,52 +0,0 @@ -_generate_function_if_testing_is_disabled("catkin_download_test_data") - -# -# Download a file containing test data from a URL. -# -# It is commonly used to download larger data files for unit tests -# which should not be stored in the repository. -# -# .. note:: The target will be registered as a dependency -# of the "tests" target. -# -# .. note:: If the tests should be run on the ROS buildfarm the URL -# must be publically and reliably accessible. -# -# :param target: the target name -# :type target: string -# :param url: the url to download -# :type url: string - -# :param DESTINATION: the directory where the file is downloaded to -# (default: ${PROJECT_BINARY_DIR}) -# :type DESTINATION: string -# :param FILENAME: the filename of the downloaded file -# (default: the basename of the url) -# :type FILENAME: string -# :param MD5: the expected md5 hash to compare against -# (default: empty, skipping the check) -# :type MD5: string -# -# @public -function(catkin_download_test_data target url) - _warn_if_skip_testing("catkin_download_test_data") - - cmake_parse_arguments(ARG "" "DESTINATION;FILENAME;MD5" "" ${ARGN}) - if(ARG_UNPARSED_ARGUMENTS) - message(FATAL_ERROR "catkin_download_test_data() called with unused arguments: ${ARG_UNPARSED_ARGUMENTS}") - endif() - if(NOT ARG_DESTINATION) - set(ARG_DESTINATION ${PROJECT_BINARY_DIR}) - endif() - if(NOT ARG_FILENAME) - get_filename_component(ARG_FILENAME ${url} NAME) - endif() - set(output "${ARG_DESTINATION}/${ARG_FILENAME}") - add_custom_command(OUTPUT ${output} - COMMAND ${PYTHON_EXECUTABLE} ${catkin_EXTRAS_DIR}/test/download_checkmd5.py ${url} ${output} ${ARG_MD5} - VERBATIM) - add_custom_target(${target} DEPENDS ${output}) - if(TARGET tests) - add_dependencies(tests ${target}) - endif() -endfunction() diff --git a/cmake/catkin/test/download_checkmd5.py b/cmake/catkin/test/download_checkmd5.py deleted file mode 100755 index 7394315b80..0000000000 --- a/cmake/catkin/test/download_checkmd5.py +++ /dev/null @@ -1,175 +0,0 @@ -#!/usr/bin/env python - -from __future__ import print_function -import errno -import os -import sys -try: - from urllib.request import addinfourl, BaseHandler, build_opener, Request, URLError -except ImportError: - from urllib2 import addinfourl, BaseHandler, build_opener, Request, URLError -import hashlib -from argparse import ArgumentParser - -NAME = "download_checkmd5.py" - - -class HTTPRangeHandler(BaseHandler): - - def http_error_206(self, req, fp, code, msg, hdrs): - r = addinfourl(fp, hdrs, req.get_full_url()) - r.code = code - r.msg = msg - return r - - def http_error_416(self, req, fp, code, msg, hdrs): - raise URLError('Requested Range Not Satisfiable') - - -def download_with_resume(uri, dest): - handler = HTTPRangeHandler() - opener = build_opener(handler) - - offset = 0 - content_length = None - accept_ranges = False - while True: - req = Request(uri) - if offset: - req.add_header('Range', 'bytes=%d-' % offset) - src_file = None - try: - src_file = opener.open(req) - headers = src_file.info() - if not offset: - # on first connection check server capabilities - if 'Content-Length' in headers: - content_length = int(headers['Content-Length']) - if 'Accept-Ranges' in headers: - accept_ranges = headers['Accept-Ranges'] != 'none' - else: - # on resume verify that server understood range header and responded accordingly - if 'Content-Range' not in headers: - raise IOError('Download aborted and server does not support resuming download') - if int(headers['Content-Range'][len('bytes '):].split('-')[0]) != offset: - raise IOError('Download aborted because server replied with different content range then requested') - sys.stdout.write(' resume from %d...' % offset) - sys.stdout.flush() - with open(dest, 'ab' if offset else 'wb') as dst_file: - progress = False - while True: - data = src_file.read(8192) - if not data: - break - progress = True - dst_file.write(data) - offset += len(data) - if not progress: - # if no bytes have been received abort download - raise IOError("No progress when trying to download '%s'" % uri) - except: - if src_file: - src_file.close() - raise - - # when content length is unknown it is assumed that the download is complete - if content_length is None: - break - # or when enough data has been downloaded (> is especially a valid case) - if offset >= content_length: - break - if not accept_ranges: - raise IOError('Server does not accept ranges to resume download') - - -def download_md5(uri, dest): - """ - downloads file from uri to file dest - """ - # Create intermediate directories as necessary, #2970 - dirname = os.path.dirname(dest) - if len(dirname): - try: - os.makedirs(dirname) - except OSError as e: - if e.errno != errno.EEXIST: - raise - - sys.stdout.write('Downloading %s to %s...' % (uri, dest)) - sys.stdout.flush() - try: - download_with_resume(uri, dest) - sys.stdout.write(' done.\n') - except Exception as e: - # delete partially downloaded data - if os.path.exists(dest): - os.unlink(dest) - sys.stdout.write(' failed (%s)!\n' % e) - raise - - -def checkmd5(dest, md5sum=None): - """ - checks file at dest against md5. - :returns (boolean, hexdigest): True if dest contents matches md5sum - """ - if not os.path.exists(dest): - return False, 'null' - with open(dest, 'rb') as f: - md5value = hashlib.md5() - while True: - buf = f.read(4096) - if not buf: - break - md5value.update(buf) - hexdigest = md5value.hexdigest() - - print('Checking md5sum on %s' % (dest)) - return hexdigest == md5sum, hexdigest - - -def main(argv=sys.argv[1:]): - """ - Dowloads URI to file dest and checks md5 if given. - """ - parser = ArgumentParser(description='Dowloads URI to file dest. If md5sum is given, checks md5sum. If file existed and mismatch, downloads and checks again') - parser.add_argument('uri') - parser.add_argument('dest') - parser.add_argument('md5sum', nargs='?') - parser.add_argument('--ignore-error', action='store_true', help='Ignore download errors') - args = parser.parse_args(argv) - - uri = args.uri - if '://' not in uri: - uri = 'file://' + uri - - fresh = False - if not os.path.exists(args.dest): - try: - download_md5(uri, args.dest) - except Exception: - if args.ignore_error: - return 0 - raise - fresh = True - - if args.md5sum: - result, hexdigest = checkmd5(args.dest, args.md5sum) - if result is False and fresh is False: - print('WARNING: md5sum mismatch (%s != %s); re-downloading file %s' % (hexdigest, args.md5sum, args.dest)) - os.remove(args.dest) - try: - download_md5(uri, args.dest) - except Exception: - if args.ignore_error: - return 0 - raise - result, hexdigest = checkmd5(args.dest, args.md5sum) - if result is False: - return 'ERROR: md5sum mismatch (%s != %s) on %s; aborting' % (hexdigest, args.md5sum, args.dest) - - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/cmake/catkin/test/gtest.cmake b/cmake/catkin/test/gtest.cmake deleted file mode 100644 index 916f98f0c8..0000000000 --- a/cmake/catkin/test/gtest.cmake +++ /dev/null @@ -1,139 +0,0 @@ -_generate_function_if_testing_is_disabled("catkin_add_gtest") - -# -# Add a GTest based test target. -# -# An executable target is created with the source files, it is linked -# against GTest and added to the set of unit tests. -# -# .. note:: The test can be executed by calling the binary directly -# or using: ``make run_tests_${PROJECT_NAME}_gtest_${target}`` -# -# :param target: the target name -# :type target: string -# :param source_files: a list of source files used to build the test -# executable -# :type source_files: list of strings -# :param TIMEOUT: currently not supported -# :type TIMEOUT: integer -# :param WORKING_DIRECTORY: the working directory when executing the -# executable -# :type WORKING_DIRECTORY: string -# -# @public -# -function(catkin_add_gtest target) - _warn_if_skip_testing("catkin_add_gtest") - - if(NOT GTEST_FOUND AND NOT GTEST_FROM_SOURCE_FOUND) - message(WARNING "skipping gtest '${target}' in project '${PROJECT_NAME}'") - return() - endif() - - if(NOT DEFINED CMAKE_RUNTIME_OUTPUT_DIRECTORY) - message(FATAL_ERROR "catkin_add_gtest() must be called after catkin_package() so that default output directories for the test binaries are defined") - endif() - - # XXX look for optional TIMEOUT argument, #2645 - cmake_parse_arguments(_gtest "" "TIMEOUT;WORKING_DIRECTORY" "" ${ARGN}) - if(_gtest_TIMEOUT) - message(WARNING "TIMEOUT argument to catkin_add_gtest() is ignored") - endif() - - # create the executable, with basic + gtest build flags - include_directories(${GTEST_INCLUDE_DIRS}) - link_directories(${GTEST_LIBRARY_DIRS}) - add_executable(${target} EXCLUDE_FROM_ALL ${_gtest_UNPARSED_ARGUMENTS}) - assert(GTEST_LIBRARIES) - target_link_libraries(${target} ${GTEST_LIBRARIES} ${THREADS_LIBRARY}) - - # make sure gtest is built before the test target - add_dependencies(${target} gtest gtest_main) - # make sure the target is built before running tests - add_dependencies(tests ${target}) - - # XXX we DONT use rosunit to call the executable to get process control, #1629, #3112 - get_target_property(_target_path ${target} RUNTIME_OUTPUT_DIRECTORY) - set(cmd "${_target_path}/${target} --gtest_output=xml:${CATKIN_TEST_RESULTS_DIR}/${PROJECT_NAME}/gtest-${target}.xml") - catkin_run_tests_target("gtest" ${target} "gtest-${target}.xml" COMMAND ${cmd} DEPENDENCIES ${target} WORKING_DIRECTORY ${_gtest_WORKING_DIRECTORY}) -endfunction() - -find_package(GTest QUIET) -if(NOT GTEST_FOUND) - # only add gtest directory once per workspace - if(NOT TARGET gtest) - # fall back to system installed path (i.e. on Ubuntu) - set(_paths "/usr/src/gtest/src") - if(CATKIN_TOPLEVEL) - # search in the current workspace before - list(INSERT _paths 0 "${CMAKE_SOURCE_DIR}/gtest/src") - endif() - find_file(_CATKIN_GTEST_SRC "gtest.cc" - PATHS ${_paths} - NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) - - # fall back to system installed path (i.e. on Ubuntu) - set(_paths "/usr/include/gtest") - if(CATKIN_TOPLEVEL) - # search in the current workspace before - list(INSERT _paths 0 "${CMAKE_SOURCE_DIR}/gtest/include/gtest") - endif() - find_file(_CATKIN_GTEST_INCLUDE "gtest.h" - PATHS ${_paths} - NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) - - if(_CATKIN_GTEST_SRC) - get_filename_component(_CATKIN_GTEST_SOURCE_DIR ${_CATKIN_GTEST_SRC} PATH) - get_filename_component(_CATKIN_GTEST_BASE_DIR ${_CATKIN_GTEST_SOURCE_DIR} PATH) - # add CMakeLists.txt from gtest dir - set(_CATKIN_GTEST_BINARY_DIR ${CMAKE_BINARY_DIR}/gtest) - add_subdirectory(${_CATKIN_GTEST_BASE_DIR} ${_CATKIN_GTEST_BINARY_DIR}) - # mark gtest targets with EXCLUDE_FROM_ALL to only build when tests are built which depend on them - set_target_properties(gtest gtest_main PROPERTIES EXCLUDE_FROM_ALL 1) - get_filename_component(_CATKIN_GTEST_INCLUDE_DIR ${_CATKIN_GTEST_INCLUDE} PATH) - get_filename_component(_CATKIN_GTEST_INCLUDE_DIR ${_CATKIN_GTEST_INCLUDE_DIR} PATH) - # set from-source variables - set(GTEST_FROM_SOURCE_FOUND TRUE CACHE INTERNAL "") - set(GTEST_FROM_SOURCE_INCLUDE_DIRS ${_CATKIN_GTEST_INCLUDE_DIR} CACHE INTERNAL "") - set(GTEST_FROM_SOURCE_LIBRARY_DIRS ${_CATKIN_GTEST_BINARY_DIR} CACHE INTERNAL "") - set(GTEST_FROM_SOURCE_LIBRARIES "gtest" CACHE INTERNAL "") - set(GTEST_FROM_SOURCE_MAIN_LIBRARIES "gtest_main" CACHE INTERNAL "") - #message(STATUS "Found gtest sources under '${_CATKIN_GTEST_BASE_DIR}': gtests will be built") - endif() - if(NOT GTEST_FROM_SOURCE_FOUND) - if(CATKIN_TOPLEVEL) - message(STATUS "gtest not found, C++ tests can not be built. Please install the gtest headers globally in your system or checkout gtest (by running 'svn checkout http://googletest.googlecode.com/svn/tags/release-1.6.0 gtest' in the source space '${CMAKE_SOURCE_DIR}' of your workspace) to enable gtests") - else() - message(STATUS "gtest not found, C++ tests can not be built. Please install the gtest headers globally in your system to enable gtests") - endif() - endif() - endif() - if(GTEST_FROM_SOURCE_FOUND) - # set the same variables as find_package() - # do NOT set GTEST_FOUND in the cache since when using gtest from source - # we must always add the subdirectory to have the gtest targets defined - set(GTEST_FOUND ${GTEST_FROM_SOURCE_FOUND}) - set(GTEST_INCLUDE_DIRS ${GTEST_FROM_SOURCE_INCLUDE_DIRS}) - set(GTEST_LIBRARY_DIRS ${GTEST_FROM_SOURCE_LIBRARY_DIRS}) - set(GTEST_LIBRARIES ${GTEST_FROM_SOURCE_LIBRARIES}) - set(GTEST_MAIN_LIBRARIES ${GTEST_FROM_SOURCE_MAIN_LIBRARIES}) - set(GTEST_BOTH_LIBRARIES ${GTEST_LIBRARIES} ${GTEST_MAIN_LIBRARIES}) - endif() -else() - #message(STATUS "Found gtest: gtests will be built") - add_library(gtest SHARED IMPORTED) - set_target_properties(gtest PROPERTIES IMPORTED_LOCATION "${GTEST_LIBRARIES}") - add_library(gtest_main SHARED IMPORTED) - set_target_properties(gtest_main PROPERTIES IMPORTED_LOCATION "${GTEST_MAIN_LIBRARIES}") - set(GTEST_FOUND ${GTEST_FOUND} CACHE INTERNAL "") - set(GTEST_INCLUDE_DIRS ${GTEST_INCLUDE_DIRS} CACHE INTERNAL "") - set(GTEST_LIBRARIES ${GTEST_LIBRARIES} CACHE INTERNAL "") - set(GTEST_MAIN_LIBRARIES ${GTEST_MAIN_LIBRARIES} CACHE INTERNAL "") - set(GTEST_BOTH_LIBRARIES ${GTEST_BOTH_LIBRARIES} CACHE INTERNAL "") -endif() -# For Visual C++, need to increase variadic template size to build gtest -if(GTEST_FOUND) - if(WIN32) - add_definitions(/D _VARIADIC_MAX=10) - endif() -endif() diff --git a/cmake/catkin/test/nosetests.cmake b/cmake/catkin/test/nosetests.cmake deleted file mode 100644 index 874e46115f..0000000000 --- a/cmake/catkin/test/nosetests.cmake +++ /dev/null @@ -1,117 +0,0 @@ -_generate_function_if_testing_is_disabled("catkin_add_nosetests") - -# -# Add Python nose tests. -# -# Nose collects tests from the directory ``dir`` automatically. -# -# .. note:: The test can be executed by calling ``nosetests`` -# directly or using: -# `` make run_tests_${PROJECT_NAME}_nosetests_${dir}`` -# (where slashes in the ``dir`` are replaced with underscores) -# -# :param path: a relative or absolute directory to search for -# nosetests in or a relative or absolute file containing tests -# :type path: string -# :param DEPENDENCIES: the targets which must be built before executing -# the test -# :type DEPENDENCIES: list of strings -# :param TIMEOUT: the timeout for individual tests in seconds -# (default: 60) -# :type TIMEOUT: integer -# :param WORKING_DIRECTORY: the working directory when executing the -# tests -# :type WORKING_DIRECTORY: string -# -# @public -# -function(catkin_add_nosetests path) - _warn_if_skip_testing("catkin_add_nosetests") - - if(NOT NOSETESTS) - message(STATUS "skipping nosetests(${path}) in project '${PROJECT_NAME}'") - return() - endif() - - cmake_parse_arguments(_nose "" "TIMEOUT;WORKING_DIRECTORY" "DEPENDENCIES" ${ARGN}) - if(NOT _nose_TIMEOUT) - set(_nose_TIMEOUT 60) - endif() - if(NOT _nose_TIMEOUT GREATER 0) - message(FATAL_ERROR "nosetests() TIMEOUT argument must be a valid number of seconds greater than zero") - endif() - - # check that the directory exists - set(_path_name _path_name-NOTFOUND) - if(IS_ABSOLUTE ${path}) - set(_path_name ${path}) - else() - find_file(_path_name ${path} - PATHS ${CMAKE_CURRENT_SOURCE_DIR} - NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) - if(NOT _path_name) - message(FATAL_ERROR "Can't find nosetests path '${path}'") - endif() - endif() - - # check if coverage reports are being requested - if("$ENV{CATKIN_TEST_COVERAGE}" STREQUAL "1") - set(_covarg " --with-coverage") - endif() - - # strip PROJECT_SOURCE_DIR and PROJECT_BINARY_DIR prefix from output_file_name - set(output_file_name ${path}) - _strip_path_prefix(output_file_name "${output_file_name}" "${PROJECT_SOURCE_DIR}") - _strip_path_prefix(output_file_name "${output_file_name}" "${PROJECT_BINARY_DIR}") - if("${output_file_name}" STREQUAL "") - set(output_file_name ".") - endif() - string(REPLACE "/" "." output_file_name ${output_file_name}) - string(REPLACE ":" "." output_file_name ${output_file_name}) - - set(output_path ${CATKIN_TEST_RESULTS_DIR}/${PROJECT_NAME}) - # make --xunit-file argument an absolute path (https://github.com/nose-devs/nose/issues/779) - get_filename_component(output_path "${output_path}" ABSOLUTE) - set(cmd "${CMAKE_COMMAND} -E make_directory ${output_path}") - if(IS_DIRECTORY ${_path_name}) - set(tests "--where=${_path_name}") - else() - set(tests "${_path_name}") - endif() - set(cmd ${cmd} "${NOSETESTS} -P --process-timeout=${_nose_TIMEOUT} ${tests} --with-xunit --xunit-file=${output_path}/nosetests-${output_file_name}.xml${_covarg}") - catkin_run_tests_target("nosetests" ${output_file_name} "nosetests-${output_file_name}.xml" COMMAND ${cmd} DEPENDENCIES ${_nose_DEPENDENCIES} WORKING_DIRECTORY ${_nose_WORKING_DIRECTORY}) -endfunction() - -find_program(NOSETESTS NAMES - "nosetests${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}" - "nosetests-${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}" - "nosetests${PYTHON_VERSION_MAJOR}" - "nosetests-${PYTHON_VERSION_MAJOR}" - "nosetests") -if(NOT NOSETESTS) - if("${PYTHON_VERSION_MAJOR}" STREQUAL "3") - message(WARNING "nosetests not found, Python tests can not be run (try installing package 'python3-nose')") - else() - message(WARNING "nosetests not found, Python tests can not be run (try installing package 'python-nose')") - endif() -endif() - -macro(_strip_path_prefix var value prefix) - if("${value}" STREQUAL "${prefix}" OR "${value}" STREQUAL "${prefix}/") - set(${var} "") - else() - set(${var} "${value}") - string(LENGTH "${prefix}/" prefix_length) - string(LENGTH "${value}" var_length) - if(${var_length} GREATER ${prefix_length}) - string(SUBSTRING "${value}" 0 ${prefix_length} var_prefix) - if("${var_prefix}" STREQUAL "${prefix}/") - # passing length -1 does not work for CMake < 2.8.5 - # http://public.kitware.com/Bug/view.php?id=10740 - string(LENGTH "${value}" _rest) - math(EXPR _rest "${_rest} - ${prefix_length}") - string(SUBSTRING "${value}" ${prefix_length} ${_rest} ${var}) - endif() - endif() - endif() -endmacro() diff --git a/cmake/catkin/test/run_tests.py b/cmake/catkin/test/run_tests.py deleted file mode 100755 index 9b93d55e2b..0000000000 --- a/cmake/catkin/test/run_tests.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python - -from __future__ import print_function - -import argparse -import os -import sys -import subprocess - -from catkin.test_results import ensure_junit_result_exist, remove_junit_result - - -def main(argv=sys.argv[1:]): - parser = argparse.ArgumentParser(description='Runs the test command passed as an argument and verifies that the expected result file has been generated.') - parser.add_argument('results', help='The path to the xunit result file') - parser.add_argument('command', nargs='+', help='The test command to execute') - parser.add_argument('--working-dir', nargs='?', help='The working directory for the executed command') - parser.add_argument('--return-code', action='store_true', default=False, help='Set the return code based on the success of the test command') - args = parser.parse_args(argv) - - remove_junit_result(args.results) - - work_dir_msg = ' with working directory "%s"' % args.working_dir if args.working_dir is not None else '' - cmds_msg = ''.join(['\n %s' % cmd for cmd in args.command]) - print('-- run_tests.py: execute commands%s%s' % (work_dir_msg, cmds_msg)) - - rc = 0 - for cmd in args.command: - rc = subprocess.call(cmd, cwd=args.working_dir, shell=True) - if rc: - break - - print('-- run_tests.py: verify result "%s"' % args.results) - no_errors = ensure_junit_result_exist(args.results) - if not no_errors: - rc = 1 - - if args.return_code: - return rc - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/cmake/catkin/test/tests.cmake b/cmake/catkin/test/tests.cmake deleted file mode 100644 index cccaec7641..0000000000 --- a/cmake/catkin/test/tests.cmake +++ /dev/null @@ -1,147 +0,0 @@ -option(CATKIN_ENABLE_TESTING "Catkin enable testing" ON) - -if(NOT DEFINED CATKIN_SKIP_TESTING) - option(CATKIN_SKIP_TESTING "Catkin skip testing" OFF) -endif() - -# check if testing is explicity skipped -if(CATKIN_SKIP_TESTING) - set(CATKIN_ENABLE_TESTING OFF) - #message(STATUS "Using CATKIN_SKIP_TESTING: ${CATKIN_SKIP_TESTING} (implying CATKIN_ENABLE_TESTING=${CATKIN_ENABLE_TESTING})") -else() - #message(STATUS "Using CATKIN_ENABLE_TESTING: ${CATKIN_ENABLE_TESTING}") -endif() - -# creates a dummy function in case testing has been explicitly disabled (and not only skipping) -# which outputs an error message when being invoked -macro(_generate_function_if_testing_is_disabled funcname) - if(DEFINED CATKIN_ENABLE_TESTING AND NOT CATKIN_ENABLE_TESTING AND NOT CATKIN_SKIP_TESTING) - function(${funcname}) - message(FATAL_ERROR - "${funcname}() is not available when tests are not enabled. The CMake code should only use it inside a conditional block which checks that testing is enabled:\n" - "if(CATKIN_ENABLE_TESTING)\n" - " ${funcname}(...)\n" - "endif()\n") - endfunction() - return() - endif() -endmacro() - -# checks if a function has been called while testing is skipped -# and outputs a warning message -macro(_warn_if_skip_testing funcname) - if(DEFINED CATKIN_ENABLE_TESTING AND NOT CATKIN_ENABLE_TESTING) - message(WARNING - "${funcname}() should only be used inside a conditional block which checks that testing is enabled:\n" - "if(CATKIN_ENABLE_TESTING)\n" - " ${funcname}(...)\n" - "endif()\n") - endif() -endmacro() - -if(DEFINED CATKIN_ENABLE_TESTING AND NOT CATKIN_ENABLE_TESTING AND NOT CATKIN_SKIP_TESTING) - return() -endif() - -# do not enable ctest's on the farm, since they are automatically executed by the current rules files -# and since the tests have not been build rostests would hang forever -if(NOT CATKIN_BUILD_BINARY_PACKAGE) - # do not enable ctest's for dry packages, since they have a custom test target which must not be overwritten - if(NOT ROSBUILD_init_called) - #message(STATUS "Call enable_testing()") - enable_testing() - else() - message(STATUS "Skip enable_testing() for dry packages") - endif() -else() - message(STATUS "Skip enable_testing() when building binary package") -endif() - -# allow overriding CATKIN_TEST_RESULTS_DIR when explicitly passed to CMake as a command line argument -if(DEFINED CATKIN_TEST_RESULTS_DIR) - set(CATKIN_TEST_RESULTS_DIR ${CATKIN_TEST_RESULTS_DIR} CACHE INTERNAL "") -else() - set(CATKIN_TEST_RESULTS_DIR ${CMAKE_BINARY_DIR}/test_results CACHE INTERNAL "") -endif() -#message(STATUS "Using CATKIN_TEST_RESULTS_DIR: ${CATKIN_TEST_RESULTS_DIR}") -file(MAKE_DIRECTORY ${CATKIN_TEST_RESULTS_DIR}) - -# create target to build tests -if(NOT TARGET tests) - add_custom_target(tests) -endif() - -# create target to run all tests -# it uses the dot-prefixed test targets to depend on building all tests and cleaning test results before the tests are executed -if(NOT TARGET run_tests) - add_custom_target(run_tests) -endif() - -# create target to clean test results -if(NOT TARGET clean_test_results) - add_custom_target(clean_test_results - COMMAND ${CMAKE_COMMAND} -E remove_directory ${CATKIN_TEST_RESULTS_DIR}) -endif() - -# -# Create a test target, integrate it with the run_tests infrastructure -# and post-process the junit result. -# -# All test results go under ${CATKIN_TEST_RESULTS_DIR}/${PROJECT_NAME}/.. -# -# This function is only used internally by the various -# catkin_add_*test() functions. -# -function(catkin_run_tests_target type name xunit_filename) - cmake_parse_arguments(_testing "" "WORKING_DIRECTORY" "COMMAND;DEPENDENCIES" ${ARGN}) - if(_testing_UNPARSED_ARGUMENTS) - message(FATAL_ERROR "catkin_run_tests_target() called with unused arguments: ${_testing_UNPARSED_ARGUMENTS}") - endif() - - # create meta target to trigger all tests of a project - if(NOT TARGET run_tests_${PROJECT_NAME}) - add_custom_target(run_tests_${PROJECT_NAME}) - # create hidden meta target which depends on hidden test targets which depend on clean_test_results - add_custom_target(_run_tests_${PROJECT_NAME}) - # run_tests depends on this hidden target hierarchy to clear test results before running all tests - add_dependencies(run_tests _run_tests_${PROJECT_NAME}) - endif() - # create meta target to trigger all tests of a specific type of a project - if(NOT TARGET run_tests_${PROJECT_NAME}_${type}) - add_custom_target(run_tests_${PROJECT_NAME}_${type}) - add_dependencies(run_tests_${PROJECT_NAME} run_tests_${PROJECT_NAME}_${type}) - # hidden meta target which depends on hidden test targets which depend on clean_test_results - add_custom_target(_run_tests_${PROJECT_NAME}_${type}) - add_dependencies(_run_tests_${PROJECT_NAME} _run_tests_${PROJECT_NAME}_${type}) - endif() - if(NOT DEFINED CATKIN_ENABLE_TESTING OR CATKIN_ENABLE_TESTING) - # create target for test execution - set(results ${CATKIN_TEST_RESULTS_DIR}/${PROJECT_NAME}/${xunit_filename}) - if (_testing_WORKING_DIRECTORY) - set(working_dir_arg "--working-dir" ${_testing_WORKING_DIRECTORY}) - endif() - assert(CATKIN_ENV) - set(cmd_wrapper ${CATKIN_ENV} ${PYTHON_EXECUTABLE} - ${catkin_EXTRAS_DIR}/test/run_tests.py ${results} ${working_dir_arg}) - # for ctest the command needs to return non-zero if any test failed - set(cmd ${cmd_wrapper} "--return-code" ${_testing_COMMAND}) - add_test(NAME _ctest_${PROJECT_NAME}_${type}_${name} COMMAND ${cmd}) - # for the run_tests target the command needs to return zero so that testing is not aborted - set(cmd ${cmd_wrapper} ${_testing_COMMAND}) - add_custom_target(run_tests_${PROJECT_NAME}_${type}_${name} - COMMAND ${cmd}) - else() - # create empty dummy target - set(cmd "${CMAKE_COMMAND}" "-E" "echo" "Skipping test target \\'run_tests_${PROJECT_NAME}_${type}_${name}\\'. Enable testing via -DCATKIN_ENABLE_TESTING.") - add_custom_target(run_tests_${PROJECT_NAME}_${type}_${name} ${cmd}) - endif() - add_dependencies(run_tests_${PROJECT_NAME}_${type} run_tests_${PROJECT_NAME}_${type}_${name}) - if(_testing_DEPENDENCIES) - add_dependencies(run_tests_${PROJECT_NAME}_${type}_${name} ${_testing_DEPENDENCIES}) - endif() - # hidden test target which depends on building all tests and cleaning test results - add_custom_target(_run_tests_${PROJECT_NAME}_${type}_${name} - COMMAND ${cmd}) - add_dependencies(_run_tests_${PROJECT_NAME}_${type} _run_tests_${PROJECT_NAME}_${type}_${name}) - add_dependencies(_run_tests_${PROJECT_NAME}_${type}_${name} clean_test_results tests ${_testing_DEPENDENCIES}) -endfunction() diff --git a/cmake/catkin/tools/bz2.cmake b/cmake/catkin/tools/bz2.cmake deleted file mode 100644 index d5f129d2a3..0000000000 --- a/cmake/catkin/tools/bz2.cmake +++ /dev/null @@ -1,31 +0,0 @@ -# -# Copyright (c) 2011, Willow Garage, Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of the Willow Garage, Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# - - -find_package(Bzip2) - diff --git a/cmake/catkin/tools/doxygen.cmake b/cmake/catkin/tools/doxygen.cmake deleted file mode 100644 index e8ed5d721f..0000000000 --- a/cmake/catkin/tools/doxygen.cmake +++ /dev/null @@ -1,64 +0,0 @@ -# -# Copyright (c) 2011, Willow Garage, Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of the Willow Garage, Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# - -## -# doxygen( ) -# TARGET_NAME -> The cmake target to create. -# SEARCH_DIRS -> a CMake List of directories to search for doxygenated files. -# -find_program(DOXYGEN_EXECUTABLE doxygen) - -if (DOXYGEN_EXECUTABLE) - set(DOXYGEN_FOUND TRUE CACHE BOOL "Doxygen found") -endif() - -if(NOT TARGET doxygen) - add_custom_target(doxygen) -endif() - -macro(catkin_doxygen TARGET_NAME SEARCH_DIRS) - foreach(dir ${SEARCH_DIRS}) - file(GLOB_RECURSE _doc_sources ${dir}/*) - list(APPEND doc_sources ${_doc_sources}) - endforeach() - - string(REPLACE ";" " " doc_sources "${doc_sources}") - - configure_file(${catkin_EXTRAS_DIR}/templates/Doxyfile.in ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile @ONLY) - - add_custom_target(${TARGET_NAME} - COMMENT "Generating API documentation with Doxygen" VERBATIM - ) - - add_custom_command(TARGET ${TARGET_NAME} - COMMAND ${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - ) - add_dependencies(doxygen ${TARGET_NAME}) - -endmacro() diff --git a/cmake/catkin/tools/libraries.cmake b/cmake/catkin/tools/libraries.cmake deleted file mode 100644 index 77e38d680e..0000000000 --- a/cmake/catkin/tools/libraries.cmake +++ /dev/null @@ -1,22 +0,0 @@ -# BUILD_SHARED_LIBS is a global cmake variable (usually defaults to on) -# that determines the build type of libraries: -# http://www.cmake.org/cmake/help/cmake-2-8-docs.html#variable:BUILD_SHARED_LIBS -# It defaults to shared. -# -# Our only current major use case for static libraries is -# via the mingw cross compiler, though embedded builds -# could be feasibly built this way also (largely untested). - -# Make sure this is already defined as a cached variable (@sa platform/windows.cmake) -if(NOT DEFINED BUILD_SHARED_LIBS) - option(BUILD_SHARED_LIBS "Build dynamically-linked binaries" ON) -endif() - -function(configure_shared_library_build_settings) - if(BUILD_SHARED_LIBS) - message(STATUS "BUILD_SHARED_LIBS is on") - add_definitions(-DROS_BUILD_SHARED_LIBS=1) - else() - message(STATUS "BUILD_SHARED_LIBS is off") - endif() -endfunction() diff --git a/cmake/catkin/tools/rt.cmake b/cmake/catkin/tools/rt.cmake deleted file mode 100644 index 3e5b708d8f..0000000000 --- a/cmake/catkin/tools/rt.cmake +++ /dev/null @@ -1,45 +0,0 @@ -# -# Copyright (c) 2011, Willow Garage, Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of the Willow Garage, Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# - -# message("CMAKE_LIBRARY_PATH: ${CMAKE_LIBRARY_PATH}") -# message("CMAKE_LIBRARY_ARCHITECTURE: ${CMAKE_LIBRARY_ARCHITECTURE}") -# message("CMAKE_SYSTEM_LIBRARY_PATH: ${CMAKE_SYSTEM_LIBRARY_PATH}") -# message("CMAKE_VERSION=${CMAKE_VERSION}") - -if(NOT (APPLE OR WIN32 OR MINGW OR ANDROID)) - if (${CMAKE_VERSION} VERSION_LESS 2.8.4) - # cmake later than 2.8.0 appears to have a better find_library - # that knows about the ABI of the compiler. For lucid we just - # depend on the linker to find it for us. - set(RT_LIBRARY rt CACHE FILEPATH "Hacked find of rt for cmake < 2.8.4") - else() - find_library(RT_LIBRARY rt) - assert_file_exists(${RT_LIBRARY} "RT Library") - endif() - #message(STATUS "RT_LIBRARY: ${RT_LIBRARY}") -endif() diff --git a/cmake/catkin/tools/threads.cmake b/cmake/catkin/tools/threads.cmake deleted file mode 100644 index 8087ad6fa1..0000000000 --- a/cmake/catkin/tools/threads.cmake +++ /dev/null @@ -1,31 +0,0 @@ -# -# Copyright (c) 2011, Willow Garage, Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of the Willow Garage, Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# - - -find_package(Threads) -set(THREADS_LIBRARY ${CMAKE_THREAD_LIBS_INIT} CACHE FILEPATH "Threads library") diff --git a/cmake/catkin/toplevel.cmake b/cmake/catkin/toplevel.cmake deleted file mode 100644 index 2978ef0654..0000000000 --- a/cmake/catkin/toplevel.cmake +++ /dev/null @@ -1,63 +0,0 @@ -# toplevel CMakeLists.txt for a catkin workspace -# catkin/cmake/toplevel.cmake - -cmake_minimum_required(VERSION 2.8.3) - -set(CATKIN_TOPLEVEL TRUE) - -# search for catkin within the workspace -set(_cmd "catkin_find_pkg" "catkin" "${CMAKE_SOURCE_DIR}") -execute_process(COMMAND ${_cmd} - RESULT_VARIABLE _res - OUTPUT_VARIABLE _out - ERROR_VARIABLE _err - OUTPUT_STRIP_TRAILING_WHITESPACE - ERROR_STRIP_TRAILING_WHITESPACE -) -if(NOT _res EQUAL 0 AND NOT _res EQUAL 2) - # searching fot catkin resulted in an error - string(REPLACE ";" " " _cmd_str "${_cmd}") - message(FATAL_ERROR "Search for 'catkin' in workspace failed (${_cmd_str}): ${_err}") -endif() - -# include catkin from workspace or via find_package() -if(_res EQUAL 0) - set(catkin_EXTRAS_DIR "${CMAKE_SOURCE_DIR}/${_out}/cmake") - # include all.cmake without add_subdirectory to let it operate in same scope - include(${catkin_EXTRAS_DIR}/all.cmake NO_POLICY_SCOPE) - add_subdirectory("${_out}") - -else() - # use either CMAKE_PREFIX_PATH explicitly passed to CMake as a command line argument - # or CMAKE_PREFIX_PATH from the environment - if(NOT DEFINED CMAKE_PREFIX_PATH) - if(NOT "$ENV{CMAKE_PREFIX_PATH}" STREQUAL "") - string(REPLACE ":" ";" CMAKE_PREFIX_PATH $ENV{CMAKE_PREFIX_PATH}) - endif() - endif() - - # list of catkin workspaces - set(catkin_search_path "") - foreach(path ${CMAKE_PREFIX_PATH}) - if(EXISTS "${path}/.catkin") - list(FIND catkin_search_path ${path} _index) - if(_index EQUAL -1) - list(APPEND catkin_search_path ${path}) - endif() - endif() - endforeach() - - # search for catkin in all workspaces - set(CATKIN_TOPLEVEL_FIND_PACKAGE TRUE) - find_package(catkin QUIET - NO_POLICY_SCOPE - PATHS ${catkin_search_path} - NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) - unset(CATKIN_TOPLEVEL_FIND_PACKAGE) - - if(NOT catkin_FOUND) - message(FATAL_ERROR "find_package(catkin) failed. catkin was neither found in the workspace nor in the CMAKE_PREFIX_PATH. One reason may be that no ROS setup.sh was sourced before.") - endif() -endif() - -catkin_workspace() diff --git a/cmake/catkin2Config.cmake b/cmake/catkin2Config.cmake index f1212b16f3..90bd6f89ff 100644 --- a/cmake/catkin2Config.cmake +++ b/cmake/catkin2Config.cmake @@ -27,8 +27,9 @@ # :outvar _INCLUDE_DIRS/_LIBRARY_DIRS/_LIBRARY: # contains the include dirs / library dirs / libraries of the searched component . +find_package(catkin REQUIRED) # defines catkin_DIR if(CATKIN_TOPLEVEL_FIND_PACKAGE OR NOT CATKIN_TOPLEVEL) - set(catkin_EXTRAS_DIR ${CMAKE_CURRENT_LIST_DIR}/catkin) + set(catkin_EXTRAS_DIR "${catkin_DIR}") # prevent multiple inclusion from repeated find_package() calls in non-workspace context # as long as this variable is in the scope the variables from all.cmake are also, so no need to be evaluated again @@ -92,15 +93,24 @@ if(catkin2_FIND_COMPONENTS) endforeach() # find package component - if(catkin2_FIND_REQUIRED) - find_package(${component} REQUIRED NO_MODULE PATHS ${paths} - NO_DEFAULT_PATH) - elseif(catkin2_FIND_QUIETLY) + if(catkin_FIND_REQUIRED) + # try without REQUIRED first + find_package(${component} NO_MODULE PATHS ${paths} + NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) + if(NOT ${component}_FOUND) + # show better message to help users with the CMake error message coming up + message(STATUS "Could not find the required component '${component}'. " + "The following CMake error indicates that you either need to install the package " + "with the same name or change your environment so that it can be found.") + find_package(${component} REQUIRED NO_MODULE PATHS ${paths} + NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) + endif() + elseif(catkin_FIND_QUIETLY) find_package(${component} QUIET NO_MODULE PATHS ${paths} - NO_DEFAULT_PATH) + NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) else() find_package(${component} NO_MODULE PATHS ${paths} - NO_DEFAULT_PATH) + NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) endif() # ROS Packages give their library packages via hard coded diff --git a/cmake/package.xml b/cmake/package.xml deleted file mode 100644 index c0bf80b0d7..0000000000 --- a/cmake/package.xml +++ /dev/null @@ -1,37 +0,0 @@ - - - catkin - 0.6.11 - Low-level build system macros and infrastructure for ROS. - Dirk Thomas - BSD - - http://www.ros.org/wiki/catkin - https://github.com/ros/catkin/issues - https://github.com/ros/catkin - - Troy Straszheim - Morten Kjaergaard - Brian Gerkey - Dirk Thomas - - cmake - cmake - - python-argparse - python-catkin-pkg - - python-empy - - gtest - python-empy - python-nose - - python-mock - python-nose - - - - - - diff --git a/communications/CMakeLists.txt b/communications/CMakeLists.txt deleted file mode 100644 index d8162a363c..0000000000 --- a/communications/CMakeLists.txt +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -add_subdirectory(ff_msgs) -add_subdirectory(ff_hw_msgs) - -if (USE_DDS) - add_subdirectory(dds_msgs) - add_subdirectory(dds_ros_bridge) -endif (USE_DDS) diff --git a/communications/dds_msgs/CMakeLists.txt b/communications/dds_msgs/CMakeLists.txt index 6d9c160e76..c593054aa4 100644 --- a/communications/dds_msgs/CMakeLists.txt +++ b/communications/dds_msgs/CMakeLists.txt @@ -15,4 +15,100 @@ # License for the specific language governing permissions and limitations # under the License. -add_subdirectory(idl) +cmake_minimum_required(VERSION 3.0) +project(dds_msgs) + +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +if (USE_DDS) + ## Find catkin macros and libraries + find_package(catkin2 REQUIRED COMPONENTS + ) + + if (USE_CTC) + set(SORACORE_ROOT_DIR ${ARM_CHROOT_DIR}/usr) + else (USE_CTC) + set(SORACORE_ROOT_DIR /usr) + endif (USE_CTC) + + set(MIRO_ROOT_DIR ${SORACORE_ROOT_DIR}) + + list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/../../cmake") + find_package(Qt4 4.6.0 REQUIRED QtXml) + + find_package(Miro REQUIRED) + find_package(RtiDds REQUIRED) + find_package(Soracore REQUIRED) + + catkin_package( + INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR}/.. + LIBRARIES + rapidExtAstrobee + ) + + ########### + ## Build ## + ########### + # Specify additional locations of header files + include_directories( + ${catkin_INCLUDE_DIRS} + ${CMAKE_CURRENT_BINARY_DIR} + ${RTIDDS_INCLUDE_DIR} + ${SORACORE_INCLUDE_DIRS} + ${CMAKE_SOURCE_DIR}/rapidDds + ${SORACORE_IDL_DIR}/rapidDds + ) + + # Copying IDL files + file(GLOB ASTROBEE_IDL_FILES + "idl/*.idl") + file(COPY ${ASTROBEE_IDL_FILES} DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) + + set(LIB_NAME "rapidExtAstrobee") + + include(CreateDdsMsgTargets) + + set(EXTRA_RTIDDS_IDL_ARGS -I${SORACORE_IDL_DIR}/rapidDds) + create_dds_msg_targets( + NAME ${LIB_NAME} + DIR ${CMAKE_CURRENT_BINARY_DIR} + INCLUDES "-I${SORACORE_IDL_DIR}/rapidDds") + + add_library(${LIB_NAME} ${rapidExtAstrobee_GENERATED}) + target_link_libraries(${LIB_NAME} + rapidDds + ) + target_compile_definitions(${LIB_NAME} PUBLIC ${RTIDDS_DEFINE_FLAGS}) + target_compile_options(${LIB_NAME} PUBLIC -Wno-write-strings) + target_compile_options(${LIB_NAME} PRIVATE -Wno-strict-aliasing) + target_include_directories(${LIB_NAME} PUBLIC + ${CMAKE_CURRENT_BINARY_DIR} + ${RTIDDS_INCLUDE_DIR} + ${SORACORE_INCLUDE_DIRS} + ${MIRO_INCLUDE_DIR}) + + ############# + ## Install ## + ############# + + # Mark libraries for installation + install(TARGETS ${LIB_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} + ) + + install(FILES ${rapidExtAstrobee_GENERATED_HEADERS} + DESTINATION include/${LIB_NAME}) + + # Mark cpp header files for installation + install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}/../ + FILES_MATCHING PATTERN "dds_msgs/*.h" + PATTERN ".svn" EXCLUDE + ) +else (USE_DDS) + find_package(catkin REQUIRED COMPONENTS) + catkin_package() +endif (USE_DDS) \ No newline at end of file diff --git a/communications/dds_msgs/idl/CMakeLists.txt b/communications/dds_msgs/idl/CMakeLists.txt deleted file mode 100644 index 0be60313b0..0000000000 --- a/communications/dds_msgs/idl/CMakeLists.txt +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -message(STATUS "Copying IDL files") -file(GLOB DDS_IDL_FILES - "${CMAKE_SOURCE_DIR}/communications/dds_msgs/idl/*.idl") - -file(COPY ${DDS_IDL_FILES} DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) - -set(LIB_NAME "rapidExtAstrobee") - -include(CreateDdsMsgTargets) - -set(EXTRA_RTIDDS_IDL_ARGS -I${SORACORE_IDL_DIR}/rapidDds) -create_dds_msg_targets( - NAME ${LIB_NAME} - DIR ${CMAKE_CURRENT_BINARY_DIR} - INCLUDES "-I${SORACORE_IDL_DIR}/rapidDds") - -add_library(${LIB_NAME} ${rapidExtAstrobee_GENERATED}) -target_link_libraries(${LIB_NAME} - rapidDds - ) -target_compile_definitions(${LIB_NAME} PUBLIC ${RTIDDS_DEFINE_FLAGS}) -target_compile_options(${LIB_NAME} PUBLIC -Wno-write-strings) -target_compile_options(${LIB_NAME} PRIVATE -Wno-strict-aliasing) -target_include_directories(${LIB_NAME} PUBLIC - ${CMAKE_CURRENT_BINARY_DIR} - ${RTIDDS_INCLUDE_DIR} - ${SORACORE_INCLUDE_DIRS}) - -install(TARGETS ${LIB_NAME} DESTINATION lib) -install(FILES ${rapidExtAstrobee_GENERATED_HEADERS} - DESTINATION include/${LIB_NAME}) diff --git a/tools/rviz_visualizer/package.xml b/communications/dds_msgs/package.xml similarity index 52% rename from tools/rviz_visualizer/package.xml rename to communications/dds_msgs/package.xml index ac9325d0e6..0f35132e90 100644 --- a/tools/rviz_visualizer/package.xml +++ b/communications/dds_msgs/package.xml @@ -1,8 +1,10 @@ - rviz_visualizer + dds_msgs 0.0.0 - The rviz_visualizer package draws boxes in rviz. + + The dds_msgs package + Apache License, Version 2.0 @@ -12,11 +14,8 @@ Astrobee Flight Software + message_generation + message_runtime catkin - roscpp - visualization_msgs - roscpp - visualization_msgs - tf2_ros - + diff --git a/communications/dds_ros_bridge/CMakeLists.txt b/communications/dds_ros_bridge/CMakeLists.txt index 4e19586103..c0ee7aa106 100644 --- a/communications/dds_ros_bridge/CMakeLists.txt +++ b/communications/dds_ros_bridge/CMakeLists.txt @@ -15,50 +15,93 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(dds_ros_bridge) -catkin_package( - LIBRARIES dds_ros_bridge astrobee_astrobee_bridge - CATKIN_DEPENDS roscpp message_runtime std_msgs nav_msgs -) -set(DEPS +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +if (USE_DDS) +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + nodelet + config_reader + ff_util ff_msgs + ff_hw_msgs + dds_msgs ) -set(LIBS - ${Boost_IOSTREAMS_LIBRARY} - config_reader - ff_nodelet - Qt4::QtXml - rapidIo - rapidExtAstrobee +# System dependencies are found with CMake's conventions +find_package(Eigen3 REQUIRED) +find_package(Boost 1.54.0 QUIET REQUIRED COMPONENTS filesystem system iostreams thread program_options timer) + +list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/../../cmake") + +if (USE_CTC) + set(SORACORE_ROOT_DIR ${ARM_CHROOT_DIR}/usr) +else (USE_CTC) + set(SORACORE_ROOT_DIR /usr) +endif (USE_CTC) + +set(MIRO_ROOT_DIR ${SORACORE_ROOT_DIR}) + +SET(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +SET(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) + +# just calls the normal one +find_package(Qt4 4.6.0 REQUIRED QtXml) +find_package(Miro REQUIRED) +find_package(RtiDds REQUIRED) +find_package(Soracore REQUIRED) + + +catkin_package( + LIBRARIES dds_ros_bridge astrobee_astrobee_bridge + CATKIN_DEPENDS nodelet config_reader ff_util ff_msgs ff_hw_msgs dds_msgs ) -set(INCLUDES - ${CMAKE_CURRENT_SOURCE_DIR}/include +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include ${catkin_INCLUDE_DIRS} ${EIGEN3_INCLUDE_DIRS} + ${RTIDDS_INCLUDE_DIR} ${SORACORE_INCLUDE_DIRS} + ${MIRO_INCLUDE_DIR} + ${QT_INCLUDE_DIR} + ${QT_INCLUDE_DIR}/Qt + ${Boost_INCLUDE_DIRS} ) -create_library(TARGET dds_ros_bridge - LIBS ${LIBS} - INC ${INCLUDES} - DEPS ${DEPS} +file(GLOB cc_files + "src/*.cc" ) -create_library(TARGET astrobee_astrobee_bridge - LIBS ${LIBS} - INC ${INCLUDES} - DEPS ${DEPS} +# Declare C++ libraries +add_library(dds_ros_bridge + ${cc_files} ) +target_compile_definitions(dds_ros_bridge PUBLIC ${RTIDDS_DEFINE_FLAGS}) +add_dependencies(dds_ros_bridge ${catkin_EXPORTED_TARGETS}) +target_link_libraries(dds_ros_bridge rapidIo ${Boost_LIBRARIES} ${catkin_LIBRARIES}) + -create_tool_targets(DIR tools - LIBS dds_ros_bridge - INC ${INCLUDES} - DEPS ${DEPS} dds_ros_bridge +add_library(astrobee_astrobee_bridge + src/astrobee_astrobee_bridge.cc ) +target_compile_definitions(astrobee_astrobee_bridge PUBLIC ${RTIDDS_DEFINE_FLAGS}) +add_dependencies(astrobee_astrobee_bridge ${catkin_EXPORTED_TARGETS}) +target_link_libraries(astrobee_astrobee_bridge Qt4::QtXml rapidIo ${Boost_LIBRARIES} ${catkin_LIBRARIES}) + +############# +## Install ## +############# # Determine our module name get_filename_component(MODULE_NAME ${CMAKE_CURRENT_SOURCE_DIR} NAME) @@ -70,4 +113,28 @@ install(CODE "execute_process( ERROR_QUIET )") -install_launch_files() +# Mark libraries for installation +install(TARGETS dds_ros_bridge + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) +install(TARGETS astrobee_astrobee_bridge + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) +else (USE_DDS) + find_package(catkin REQUIRED COMPONENTS) + catkin_package() +endif (USE_DDS) \ No newline at end of file diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_command_ros_command_plan.h b/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_command_ros_command_plan.h index 821a2feca2..f3940d3d3e 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_command_ros_command_plan.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_command_ros_command_plan.h @@ -33,7 +33,7 @@ #include "ff_msgs/CommandArg.h" #include "ff_msgs/CommandStamped.h" -#include "AstrobeeCommandConstants.h" +#include "dds_msgs/AstrobeeCommandConstants.h" #include "knDds/DdsTypedSupplier.h" diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_compressed_file_ros_compressed_file.h b/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_compressed_file_ros_compressed_file.h index 1938fe2cb2..5a22668a23 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_compressed_file_ros_compressed_file.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_compressed_file_ros_compressed_file.h @@ -31,9 +31,9 @@ #include "ff_msgs/CompressedFile.h" -#include "AstrobeeConstants.h" -#include "CompressedFile.h" -#include "CompressedFileSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/CompressedFile.h" +#include "dds_msgs/CompressedFileSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_ekf_ros_ekf.h b/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_ekf_ros_ekf.h index 5a973a81bf..93a41ca65c 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_ekf_ros_ekf.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_ekf_ros_ekf.h @@ -32,11 +32,11 @@ #include "ff_msgs/EkfState.h" -#include "AstrobeeConstants.h" +#include "dds_msgs/AstrobeeConstants.h" #include "knDds/DdsTypedSupplier.h" -#include "EkfStateSupport.h" +#include "dds_msgs/EkfStateSupport.h" #include "rapidDds/RapidConstants.h" diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_gs_data_ros_gs_data.h b/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_gs_data_ros_gs_data.h index 1b44ac9272..a11da1d691 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_gs_data_ros_gs_data.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/rapid_gs_data_ros_gs_data.h @@ -32,11 +32,11 @@ #include "ff_msgs/GuestScienceData.h" -#include "AstrobeeConstants.h" +#include "dds_msgs/AstrobeeConstants.h" #include "knDds/DdsTypedSupplier.h" -#include "GuestScienceDataSupport.h" +#include "dds_msgs/GuestScienceDataSupport.h" #include "rapidDds/RapidConstants.h" diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_agent_state.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_agent_state.h index 6c81a94006..d51fb94e8a 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_agent_state.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_agent_state.h @@ -35,9 +35,9 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeAgentStateSupport.h" -#include "AstrobeeConstants.h" -#include "MobilitySettingsStateSupport.h" +#include "dds_msgs/AstrobeeAgentStateSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/MobilitySettingsStateSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_arm_state.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_arm_state.h index edf88296b6..fadf46e604 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_arm_state.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_arm_state.h @@ -34,8 +34,8 @@ #include "rapidUtil/RapidHelper.h" -#include "ArmStateSupport.h" -#include "AstrobeeConstants.h" +#include "dds_msgs/ArmStateSupport.h" +#include "dds_msgs/AstrobeeConstants.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_battery_state.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_battery_state.h index d03251bc40..0083a3aaf3 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_battery_state.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_battery_state.h @@ -33,9 +33,9 @@ #include "sensor_msgs/BatteryState.h" #include "sensor_msgs/Temperature.h" -#include "AstrobeeConstants.h" -#include "EpsStateSupport.h" -#include "EpsConfigSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/EpsStateSupport.h" +#include "dds_msgs/EpsConfigSupport.h" #include "rapidUtil/RapidHelper.h" diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_command_config_rapid_command_config.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_command_config_rapid_command_config.h index 69d53f75e7..acc160f687 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_command_config_rapid_command_config.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_command_config_rapid_command_config.h @@ -36,7 +36,7 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeCommandConstants.h" +#include "dds_msgs/AstrobeeCommandConstants.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_compressed_file_ack.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_compressed_file_ack.h index 1dffa64233..1aa210e678 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_compressed_file_ack.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_compressed_file_ack.h @@ -33,8 +33,8 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "CompressedFileAckSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/CompressedFileAckSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_compressed_file_rapid_compressed_file.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_compressed_file_rapid_compressed_file.h index 34bc85c097..e3a957d9c1 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_compressed_file_rapid_compressed_file.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_compressed_file_rapid_compressed_file.h @@ -33,8 +33,8 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "CompressedFileSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/CompressedFileSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_cpu_state.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_cpu_state.h index acb06d0a1a..c820bc3f0e 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_cpu_state.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_cpu_state.h @@ -32,9 +32,9 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "CpuConfigSupport.h" -#include "CpuStateSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/CpuConfigSupport.h" +#include "dds_msgs/CpuStateSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_data_to_disk.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_data_to_disk.h index 9fd42415d6..a58e4f13d3 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_data_to_disk.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_data_to_disk.h @@ -33,9 +33,9 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "DataToDiskStateSupport.h" -#include "DataTopicsListSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/DataToDiskStateSupport.h" +#include "dds_msgs/DataTopicsListSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_disk_state.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_disk_state.h index bb890ca8b2..b9250bdae1 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_disk_state.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_disk_state.h @@ -34,9 +34,9 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "DiskStateSupport.h" -#include "DiskConfigSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/DiskStateSupport.h" +#include "dds_msgs/DiskConfigSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_ekf_rapid_ekf.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_ekf_rapid_ekf.h index 5a754d1296..ff1ba346d2 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_ekf_rapid_ekf.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_ekf_rapid_ekf.h @@ -36,8 +36,8 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "EkfStateSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/EkfStateSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_fault_config.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_fault_config.h index c3cdea3110..aba113753a 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_fault_config.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_fault_config.h @@ -32,9 +32,9 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "FaultConfigSupport.h" -#include "FaultStateSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/FaultConfigSupport.h" +#include "dds_msgs/FaultStateSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_fault_state.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_fault_state.h index 149f9489fc..629c750dc7 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_fault_state.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_fault_state.h @@ -32,8 +32,8 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "FaultStateSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/FaultStateSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_gnc_control_state.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_gnc_control_state.h index 729683cdfb..653c52847f 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_gnc_control_state.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_gnc_control_state.h @@ -18,8 +18,8 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "GncControlStateSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/GncControlStateSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_gnc_fam_cmd_state.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_gnc_fam_cmd_state.h index 49e8e451a1..210c312410 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_gnc_fam_cmd_state.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_gnc_fam_cmd_state.h @@ -35,8 +35,8 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "GncFamCmdStateSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/GncFamCmdStateSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_guest_science.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_guest_science.h index 3a861090d8..e54b9e2a1e 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_guest_science.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_guest_science.h @@ -37,10 +37,10 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "GuestScienceConfigSupport.h" -#include "GuestScienceDataSupport.h" -#include "GuestScienceStateSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/GuestScienceConfigSupport.h" +#include "dds_msgs/GuestScienceDataSupport.h" +#include "dds_msgs/GuestScienceStateSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_inertia.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_inertia.h index 34d4281db4..fa112aec69 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_inertia.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_inertia.h @@ -30,8 +30,8 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "InertialPropertiesSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/InertialPropertiesSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_log_sample.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_log_sample.h index 7968382dd7..a9b467e0e2 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_log_sample.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_log_sample.h @@ -18,8 +18,8 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "LogSampleSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/LogSampleSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_odom_rapid_position.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_odom_rapid_position.h index d1201e560d..6284e38b5e 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_odom_rapid_position.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_odom_rapid_position.h @@ -36,8 +36,8 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "EkfStateSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/EkfStateSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_payload_state.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_payload_state.h index 7ba4eaccef..73e3fee1d5 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_payload_state.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_payload_state.h @@ -33,9 +33,9 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "PayloadConfigSupport.h" -#include "PayloadStateSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/PayloadConfigSupport.h" +#include "dds_msgs/PayloadStateSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_plan_status_rapid_plan_status.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_plan_status_rapid_plan_status.h index 9dc1241916..36bdbf780d 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_plan_status_rapid_plan_status.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_plan_status_rapid_plan_status.h @@ -35,8 +35,8 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "PlanStatusSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/PlanStatusSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_pmc_cmd_state.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_pmc_cmd_state.h index 97a971e9a9..de30077fec 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_pmc_cmd_state.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_pmc_cmd_state.h @@ -18,8 +18,8 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "PmcCmdStateSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/PmcCmdStateSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_telemetry_rapid_telemetry.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_telemetry_rapid_telemetry.h index 09f3cf9e81..584b5db19f 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_telemetry_rapid_telemetry.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_telemetry_rapid_telemetry.h @@ -34,9 +34,9 @@ #include "ros/ros.h" -#include "AstrobeeConstants.h" -#include "TelemetryConfigSupport.h" -#include "TelemetryStateSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/TelemetryConfigSupport.h" +#include "dds_msgs/TelemetryStateSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_zones_rapid_compressed_file.h b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_zones_rapid_compressed_file.h index 4cc7c52efa..69f409455d 100644 --- a/communications/dds_ros_bridge/include/dds_ros_bridge/ros_zones_rapid_compressed_file.h +++ b/communications/dds_ros_bridge/include/dds_ros_bridge/ros_zones_rapid_compressed_file.h @@ -44,8 +44,8 @@ #include "rapidUtil/RapidHelper.h" -#include "AstrobeeConstants.h" -#include "CompressedFileSupport.h" +#include "dds_msgs/AstrobeeConstants.h" +#include "dds_msgs/CompressedFileSupport.h" namespace ff { diff --git a/communications/dds_ros_bridge/package.xml b/communications/dds_ros_bridge/package.xml index 50931e9e46..010526ba3c 100644 --- a/communications/dds_ros_bridge/package.xml +++ b/communications/dds_ros_bridge/package.xml @@ -15,22 +15,18 @@ Astrobee Flight Software catkin - cmake_modules - message_generation - ff_msgs - nav_msgs - roscpp - rossmbus - std_msgs nodelet - cmake_modules - message_runtime - ff_msgs - nav_msgs - roscpp - rossmbus - std_msgs + config_reader + ff_util + ff_msgs + ff_hw_msgs + dds_msgs nodelet + config_reader + ff_util + ff_msgs + ff_hw_msgs + dds_msgs diff --git a/communications/ff_hw_msgs/CMakeLists.txt b/communications/ff_hw_msgs/CMakeLists.txt index b2ad112ac7..27087b46b6 100644 --- a/communications/ff_hw_msgs/CMakeLists.txt +++ b/communications/ff_hw_msgs/CMakeLists.txt @@ -15,16 +15,34 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(ff_hw_msgs) -find_package(catkin2 REQUIRED COMPONENTS +find_package(catkin REQUIRED COMPONENTS message_generation - roscpp std_msgs sensor_msgs ) -create_msg_targets(DIR msg SDIR srv DEPS std_msgs sensor_msgs) +# Generate messages in the 'msg' folder +file(GLOB MSG_FILES msg/*.msg) +foreach(SRC ${MSG_FILES}) + get_filename_component(NAME ${SRC} NAME) + list(APPEND MSG_FILE_NAMES ${NAME}) +endforeach() +add_message_files(FILES ${MSG_FILE_NAMES}) + +# Generate services in the 'srv' folder +file(GLOB SRV_FILES srv/*.srv) +foreach(SRC ${SRV_FILES}) + get_filename_component(NAME ${SRC} NAME) + list(APPEND SRV_FILE_NAMES ${NAME}) +endforeach() +add_service_files(FILES ${SRV_FILE_NAMES}) + +generate_messages( + DEPENDENCIES std_msgs geometry_msgs sensor_msgs +) catkin_package( # INCLUDE_DIRS include @@ -35,4 +53,4 @@ catkin_package( include_directories( include ${catkin_INCLUDE_DIRS} -) \ No newline at end of file +) diff --git a/communications/ff_hw_msgs/package.xml b/communications/ff_hw_msgs/package.xml index 6ffefff800..0f1520d52b 100644 --- a/communications/ff_hw_msgs/package.xml +++ b/communications/ff_hw_msgs/package.xml @@ -16,10 +16,8 @@ catkin message_generation - ros_cpp std_msgs sensor_msgs - ros_cpp std_msgs sensor_msgs diff --git a/communications/ff_msgs/CMakeLists.txt b/communications/ff_msgs/CMakeLists.txt index 5040c836e1..37d9842fc3 100644 --- a/communications/ff_msgs/CMakeLists.txt +++ b/communications/ff_msgs/CMakeLists.txt @@ -15,9 +15,10 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(ff_msgs) -find_package(catkin2 REQUIRED COMPONENTS +find_package(catkin REQUIRED COMPONENTS message_generation std_msgs geometry_msgs @@ -26,8 +27,33 @@ find_package(catkin2 REQUIRED COMPONENTS sensor_msgs ) -create_msg_targets(DIR msg SDIR srv ADIR action - DEPS std_msgs geometry_msgs actionlib_msgs trajectory_msgs sensor_msgs) +# Generate messages in the 'msg' folder +file(GLOB MSG_FILES msg/*.msg) +foreach(SRC ${MSG_FILES}) + get_filename_component(NAME ${SRC} NAME) + list(APPEND MSG_FILE_NAMES ${NAME}) +endforeach() +add_message_files(FILES ${MSG_FILE_NAMES}) + +# Generate services in the 'srv' folder +file(GLOB SRV_FILES srv/*.srv) +foreach(SRC ${SRV_FILES}) + get_filename_component(NAME ${SRC} NAME) + list(APPEND SRV_FILE_NAMES ${NAME}) +endforeach() +add_service_files(FILES ${SRV_FILE_NAMES}) + +# Generate actions in the 'action' folder +file(GLOB ACTION_FILES action/*.action) +foreach(SRC ${ACTION_FILES}) + get_filename_component(NAME ${SRC} NAME) + list(APPEND ACTION_FILE_NAMES ${NAME}) +endforeach() +add_action_files(FILES ${ACTION_FILE_NAMES}) + +generate_messages( + DEPENDENCIES std_msgs geometry_msgs actionlib_msgs trajectory_msgs sensor_msgs std_msgs +) catkin_package( # INCLUDE_DIRS include diff --git a/debian/changelog b/debian/changelog index 4b4b4606fd..5cd459ed68 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,13 @@ +astrobee (0.16.0) testing; urgency=medium + + * project compiles with catkin + * mapper performance improvement + * imu_augmentor performance improvement + * calibration improvements + * multiple other fixes + + -- Astrobee Flight Software Thu, 02 Dec 2021 13:57:39 -0800 + astrobee (0.15.2) testing; urgency=medium * added python linters black and isort in the CI pipeline diff --git a/debian/rules b/debian/rules index 9ac1033ac5..b36dc4a593 100755 --- a/debian/rules +++ b/debian/rules @@ -17,7 +17,7 @@ export EXTRA_CMAKE_OPTS export LIB_SEARCH_OPTIONS ifeq ($(DEB_HOST_ARCH),armhf) - export EXTRA_CMAKE_OPTS=-DCMAKE_TOOLCHAIN_FILE=../scripts/build/ubuntu_cross.cmake -DUSE_CTC=true +# export EXTRA_CMAKE_OPTS=-DCMAKE_TOOLCHAIN_FILE=../scripts/build/ubuntu_cross.cmake -DUSE_CTC=true # stripping doesn't work with the cross compile #export DEB_BUILD_OPTIONS += nostrip export LIB_SEARCH_OPTIONS = -l$(ARMHF_CHROOT_DIR)/opt/rti/ndds/lib/armv6vfphLinux3.xgcc4.7.2:$(ARMHF_CHROOT_DIR)/opt/ros/kinetic/lib:$(ARMHF_CHROOT_DIR)/opt/ros/kinetic/lib/arm-linux-gnueabihf:$(ARMHF_CHROOT_DIR)/lib:$(ARMHF_CHROOT_DIR)/usr/lib:$(ARMHF_CHROOT_DIR)/usr/lib/arm-linux-gnueabihf:$(ARMHF_CHROOT_DIR)/lib/arm-linux-gnueabihf -- --ignore-missing-info --admindir=$(ARMHF_CHROOT_DIR)/var/lib/dpkg @@ -36,8 +36,16 @@ build-indep: ; # we always need release, otherwise it is too slow override_dh_auto_configure: - mkdir -p build - cd build && cmake .. -DCMAKE_VERBOSE_MAKEFILE=ON -DCMAKE_INSTALL_PREFIX:PATH=/opt/astrobee -DCMAKE_BUILD_TYPE=Release $(EXTRA_CMAKE_OPTS) && cd .. + # configure the build + cd .. && \ + ./src/scripts/configure.sh -a -w src/debian/tmp && \ + catkin clean -y --force +override_dh_auto_build: ; + +# the install command is already invoked in the build +override_dh_auto_install: + cd .. && \ + catkin build override_dh_install: dh_movefiles diff --git a/description/CMakeLists.txt b/description/CMakeLists.txt deleted file mode 100644 index 91d48bc92e..0000000000 --- a/description/CMakeLists.txt +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# Check to see if correct version of media has been downloaded into tree. We -# dont do this if we are cross-compiling, as there is no need for it. We must -# also make sure the media is copied in a native install to a simulator. - -# Add the things that need to be on both the robot and the simulation -add_subdirectory(description) - -# Optional: install only for simulated context -if (NOT USE_CTC) - add_subdirectory(media/astrobee_granite) - add_subdirectory(media/astrobee_dock) - add_subdirectory(media/astrobee_iss) - add_subdirectory(media/astrobee_handrail_8_5) - add_subdirectory(media/astrobee_handrail_21_5) - add_subdirectory(media/astrobee_handrail_30) - add_subdirectory(media/astrobee_handrail_41_5) - add_subdirectory(media/astrobee_freeflyer) -endif (NOT USE_CTC) diff --git a/description/description/CMakeLists.txt b/description/description/CMakeLists.txt index 6da9a5ccfc..8212f71f21 100644 --- a/description/description/CMakeLists.txt +++ b/description/description/CMakeLists.txt @@ -19,8 +19,12 @@ # dont do this if we are cross-compiling, as there is no need for it. We must # also make sure the media is copied in a native install to a simulator. +cmake_minimum_required(VERSION 3.0) project(description) +find_package(catkin REQUIRED COMPONENTS +) + catkin_package() install(DIRECTORY urdf DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}) diff --git a/description/media b/description/media index bf2057d4ab..e3561545c6 160000 --- a/description/media +++ b/description/media @@ -1 +1 @@ -Subproject commit bf2057d4ab02516878fd374f2f2e6601ac49510f +Subproject commit e3561545c61b6be5406839b9d959becbfe7b4100 diff --git a/doc/general_documentation/INSTALL.md b/doc/general_documentation/INSTALL.md index 72094b1858..ef25d418c3 100644 --- a/doc/general_documentation/INSTALL.md +++ b/doc/general_documentation/INSTALL.md @@ -22,28 +22,21 @@ support running Astrobee Robot Software on 32-bit systems.* ### Checkout the project source code At this point you need to decide where you'd like to put the source code -(`SOURCE_PATH`) on your machine: +(`ASTROBEE_WS`) on your machine: - export SOURCE_PATH=$HOME/astrobee + export ASTROBEE_WS=$HOME/astrobee First, clone the flight software repository and media: - git clone https://github.com/nasa/astrobee.git $SOURCE_PATH - pushd $SOURCE_PATH + git clone https://github.com/nasa/astrobee.git $ASTROBEE_WS/src + pushd $ASTROBEE_WS/src git submodule update --init --depth 1 description/media popd If you are planning to work with guest science code, you will also need the -`astrobee_android` repository. You should checkout the repository in the same -directory you checked out the source code in: - - export ANDROID_PATH="${SOURCE_PATH}_android" - - -Clone the android repository: - - git clone https://github.com/nasa/astrobee_android.git $ANDROID_PATH +`astrobee_android` repository. You should checkout the repository as a submodule: + git submodule update --init --depth 1 submodules/android ### Dependencies @@ -54,8 +47,8 @@ Next, install all required dependencies: *Note: Before running this please ensure that your system is completely updated by running 'sudo apt-get update' and then 'sudo apt-get upgrade'* - pushd $SOURCE_PATH - cd scripts/setup + pushd $ASTROBEE_WS + cd src/scripts/setup ./add_ros_repository.sh sudo apt-get update cd debians @@ -81,17 +74,21 @@ by running 'sudo apt-get update' and then 'sudo apt-get upgrade'* ## Configuring the build ### Note for the build setup -By default, the configure script uses the following paths: - - native build path: `$HOME/astrobee_build/native` - - native install path: `$HOME/astrobee_install/native` + +When compiling, the `$WORKSPACE_PATH` defines where the `devel`, `build`, `logs` and +`install` directories are created. If you want to customize the `install` path then the +`$INSTALL_PATH` can be defined. By default, the configure script uses the following paths: + + - native build path: `$ASTROBEE_WS/build` + - native install path: `$ASTROBEE_WS/install` If you are satisfied with these paths, you can invoke the `configure.sh` without -the `-p` and `-b` options. For the simplicity of the instructions below, -we assume that `$BUILD_PATH` and `$INSTALL_PATH` contain the location of the +the `-p` and `-w` options. For the simplicity of the instructions below, +we assume that `$WORKSPACE_PATH` and `$INSTALL_PATH` contain the location of the build and install path. For example: - export BUILD_PATH=$HOME/astrobee_build/native - export INSTALL_PATH=$HOME/astrobee_install/native + export WORKSPACE_PATH=$ASTROBEE_WS + export INSTALL_PATH=$ASTROBEE_WS/install ### Native build @@ -100,14 +97,22 @@ that `configure.sh` is simply a wrapper around CMake that provides an easy way of turning on and off options. To see which options are supported, simply run `configure.sh -h`. - pushd $SOURCE_PATH - ./scripts/configure.sh -l -F -D + pushd $ASTROBEE_WS + ./src/scripts/configure.sh -l -F -D + source ~/.bashrc popd -If you want to explicitly specify the build and install directories, use +The configure script modifies your ``.bashrc`` to source ``setup.bash`` for +the current ROS distribution and to set CMAKE_PREFIX_PATH. It is suggested +to examine it and see if all changes were made correctly. + +If you want to explicitly specify the workspace and install directories, use instead: - ./scripts/configure.sh -l -F -D -p $INSTALL_PATH -b $BUILD_PATH + ./src/scripts/configure.sh -l -F -D -p $INSTALL_PATH -w $WORKSPACE_PATH + +*Note: If a workspace is specified but not an explicit install distectory, +install location will be $WORKSPACE_PATH/install.* *Note: Make sure you use the -F and -D flags. If these flags are not used, the code will not compile. The -F flag is used to turn off building the Picoflex. @@ -118,13 +123,13 @@ simulator.* ## Building the code -To build, run `make` in the `$BUILD_PATH`. Note that depending on your host +To build, run `catkin build` in the `$WORKSPACE_PATH`. Note that depending on your host machine, this might take in the order of tens of minutes to complete the first time round. Future builds will be faster, as only changes to the code are rebuilt, and not the entire code base. - pushd $BUILD_PATH - make -j2 + pushd $ASTROBEE_WS + catkin build popd If you configured your virtual machine with more than the baseline resources, diff --git a/doc/general_documentation/NASA_INSTALL.md b/doc/general_documentation/NASA_INSTALL.md index 08ee58f3bb..2a515262a8 100644 --- a/doc/general_documentation/NASA_INSTALL.md +++ b/doc/general_documentation/NASA_INSTALL.md @@ -52,20 +52,17 @@ reach `astrobee.ndc.nasa.gov`: 1. Use VPN to act like if you were inside the ARC TI private network and obtain the correct kerberos credentials inside the VM with the following - command (note the capitalization): -``` -kinit $NDC_USERNAME@NDC.NASA.GOV` -``` + command (note the capitalization): `kinit $NDC_USERNAME@NDC.NASA.GOV` 2. setup your `.ssh/config` to do ssh forwarding. A tutorial on this method is available at: https://babelfish.arc.nasa.gov/trac/freeflyer/wiki/SSHSetup For either solution, please verify that you can SSH to `m.ndc.nasa.gov` without entering your password (`m` is used to tunnel to `astrobee.ndc.nasa.gov`): - ssh $NDC_USERNAME@m.ndc.nasa.gov + `ssh $NDC_USERNAME@m.ndc.nasa.gov` The command should succeed without entering your password. Once this is verified, -exit this session on `m` with +D. +exit this session on `m` with `+D`. - These notes apply to `install_desktop_16.04_packages.sh` and `make_xenial.sh` @@ -73,13 +70,13 @@ exit this session on `m` with +D. ### Checkout the project source code At this point you need to decide where you'd like to put the source code -(`SOURCE_PATH`) on your machine: +(`ASTROBEE_WS`) on your machine: - export SOURCE_PATH=$HOME/astrobee + export ASTROBEE_WS=$HOME/astrobee First, clone the flight software repository: - git clone https://github.com/nasa/astrobee.git --branch develop $SOURCE_PATH + git clone https://github.com/nasa/astrobee.git --branch develop $ASTROBEE_WS/src git submodule update --init --depth 1 description/media git submodule update --init --depth 1 submodules/platform @@ -99,8 +96,8 @@ module is used when cross-compiling to test on the robot hardware. ### Dependencies Install dependencies: - pushd $SOURCE_PATH - cd scripts/setup + pushd $ASTROBEE_WS + cd src/scripts/setup ./add_local_repository.sh ./add_ros_repository.sh ./install_desktop_packages.sh @@ -134,8 +131,11 @@ Next, download the cross toolchain and install the chroot: mkdir -p $ARMHF_TOOLCHAIN cd $HOME/arm_cross - $SOURCE_PATH/submodules/platform/fetch_toolchain.sh - $SOURCE_PATH/submodules/platform/rootfs/make_xenial.sh dev $ARMHF_CHROOT_DIR + $ASTROBEE_WS/src/submodules/platform/fetch_toolchain.sh + $ASTROBEE_WS/src/submodules/platform/rootfs/make_xenial.sh dev $ARMHF_CHROOT_DIR + +*Note: The last script shown above needs the packages `qemu-user-static` (not +`qemu-arm-static`) and `multistrap` to be installed (can be installed through apt).* ## Configuring the build @@ -145,18 +145,20 @@ the code on the robot itself). Please skip to the relevant subsection. ### Note for both builds setup -By default, the configure script uses the following paths: +When compiling, the `$WORKSPACE_PATH` defines where the `devel`, `build`, `logs` and +`install` directories are created. If you want to customize the `install` path then the +`$INSTALL_PATH` can be defined. By default, the configure script uses the following paths: - - native build path (BUILD_PATH): `$HOME/astrobee_build/native` - - native install path (INSTALL_PATH): `$HOME/astrobee_install/native` - - armhf build path (BUILD_PATH): `$HOME/astrobee_build/armhf` - - armhf install path (INSTALL_PATH): `$HOME/astrobee_install/armhf` + - native build path (WORKSPACE_PATH): `$ASTROBEE_WS` + - native install path (INSTALL_PATH): `$ASTROBEE_WS/install` + - armhf build path (WORKSPACE_PATH): `$ASTROBEE_WS/armhf` + - armhf install path (INSTALL_PATH): `$ASTROBEE_WS/armhf/install` You should set these values in your shell. If you are satisfied with these paths, you can invoke the `configure.sh` without -the `-p` and `-b` options. For the simplicity of the instructions below, -we assume that `$BUILD_PATH` and `$INSTALL_PATH` contain the location of the +the `-p` and `-w` options. For the simplicity of the instructions below, +we assume that `$WORKSPACE_PATH` and `$INSTALL_PATH` contain the location of the build and install path for either `native` or `armhf` platforms. ### Native build @@ -166,45 +168,57 @@ that `configure.sh` is simply a wrapper around CMake that provides an easy way of turning on and off options. To see which options are supported, simply run `configure.sh -h`. - pushd $SOURCE_PATH - ./scripts/configure.sh -l + pushd $ASTROBEE_WS + ./src/scripts/configure.sh -l + source ~/.bashrc popd -If you want to explicitly specify the build and install directories, use +The configure script modifies your ``.bashrc`` to source ``setup.bash`` for +the current ROS distribution and to set CMAKE_PREFIX_PATH. It is suggested +to examine it and see if all changes were made correctly. + +If you want to explicitly specify the workspace and/or install directories, use instead: - ./scripts/configure.sh -l -p $INSTALL_PATH -b $BUILD_PATH + ./scripts/configure.sh -l -p $INSTALL_PATH -w $WORKSPACE_PATH + +*Note: If a workspace is specified but not an explicit install distectory, +install location will be $WORKSPACE_PATH/install.* ### Cross-compile build Cross compiling for the robot follows the same process, except the configure script takes a `-a` flag instead of `-l`. - pushd $SOURCE_PATH - ./scripts/configure.sh -a + pushd $ASTROBEE_WS + ./src/scripts/configure.sh -a popd Or with explicit build and install paths: - ./scripts/configure.sh -a -p $INSTALL_PATH -b $BUILD_PATH + ./scripts/configure.sh -a -p $INSTALL_PATH -w $WORKSPACE_PATH -*Warning: `$INSTALL_PATH` and `$BUILD_PATH` used for cross compiling HAVE to be +*Warning: `$INSTALL_PATH` and `$WORKSPACE_PATH` used for cross compiling HAVE to be different than the paths for native build! See above for the default values for these.* ## Building the code -To build, run `make` in the `$BUILD_PATH`. Note that depending on your host +To build, run `catkin build` in the `$WORKSPACE_PATH`. Note that depending on your host machine, this might take in the order of tens of minutes to complete the first time round. Future builds will be faster, as only changes to the code are rebuilt, and not the entire code base. - pushd $BUILD_PATH - make -j6 + pushd $ASTROBEE_WS + catkin build popd -*Note: `$BUILD_PATH` above is either the path for native build or armhf build, -whatever you currently are doing.* +## Switching build profiles + +To alternate between native and armhf profiles: + + catkin profile set native + catkin profile set armhf ## Running a simulation @@ -212,7 +226,7 @@ In order to run a simulation you must have build natively. You will need to first setup your environment, so that ROS knows about the new packages provided by Astrobee flight software: - pushd $BUILD_PATH + pushd $ASTROBEE_WS source devel/setup.bash popd @@ -234,21 +248,17 @@ For more information on running the simulator and moving the robot, please see t ## Running the code on a real robot In order to do this, you will need to have followed the cross-compile build -instructions. Once the code has been built, you also need to install the code to +instructions. Once the code has been built, it also installs the code to a singular location. CMake remembers what `$INSTALL_PATH` you specified, and will copy all products into this directory. - pushd $BUILD_PATH - make install - popd - Once the installation has completed, copy the install directory to the robot. This script assumes that you are connected to the Astrobee network, as it uses rsync to copy the install directory to `~/armhf` on the two processors. It takes the robot name as an argument. Here we use `p4d'. - pushd $SOURCE_PATH - ./scripts/install_to_astrobee.sh $INSTALL_PATH p4d + pushd $ASTROBEE_WS + ./src/scripts/install_to_astrobee.sh $INSTALL_PATH p4d popd Here, p4d is the name of the robot, which may be different in your case. @@ -256,8 +266,8 @@ Here, p4d is the name of the robot, which may be different in your case. You are now ready to run the code. This code launches a visualization tool, which starts the flight software as a background process. - pushd $SOURCE_PATH - python ./tools/gnc_visualizer/scripts/visualizer --proto4 + pushd $ASTROBEE_WS + python ./src/tools/gnc_visualizer/scripts/visualizer --proto4 popd # Further information diff --git a/doc/general_documentation/code_style.md b/doc/general_documentation/code_style.md new file mode 100644 index 0000000000..1e8d8f8dfd --- /dev/null +++ b/doc/general_documentation/code_style.md @@ -0,0 +1,3 @@ +\page astrobee-code-style Astrobee code style + +The Astrobee code uses [cpplint](https://en.wikipedia.org/wiki/Cpplint) to enforce a consistent and uniform coding style. Code which fails this tool cannot be committed with Git. It is suggested that the [clang-format-8](https://launchpad.net/ubuntu/bionic/+package/clang-format-8) program be installed on your machine. The *git commit* command will invoke this tool which will fix most (but not all) of the formatting errors using as a guide the ``.clang-format`` style file at the base of the Astrobee repository. diff --git a/doc/general_documentation/flight_release.md b/doc/general_documentation/flight_release.md index e547cc231a..18ebc2f7e7 100644 --- a/doc/general_documentation/flight_release.md +++ b/doc/general_documentation/flight_release.md @@ -1,7 +1,5 @@ \page release Creating a Flight Release -# Creating a Flight Release - This page describes the steps required to complete a flight release. It assumes that your environment is configured to cross-compile the fsw for the `armhf` architecture. diff --git a/doc/general_documentation/readme.md b/doc/general_documentation/readme.md deleted file mode 100644 index 9abf7bca4e..0000000000 --- a/doc/general_documentation/readme.md +++ /dev/null @@ -1,95 +0,0 @@ -\page doc Documentation - -# Tools used for FSW documentation - -## Code documentation - -Code should be documented using doxygen using the latest version. - -## UML diagraming - -Astrobee UML diagrams are created using [plantuml](http://plantuml.com/) that -transform text files into beautiful UML 2 compliant diagrams. - -Specific notations used in the diagrams to represent ROS communication schemes -is described in: -[Astrobee UML Notations](doc/diagrams/notations.png) - -Despite the ads overloaded website, plantuml is free and [open -source](https://github.com/plantuml/plantuml). The plantuml [Reference -Guide](http://plantuml.com/PlantUML_Language_Reference_Guide.pdf) walks to all -the diagrams types. - -Plantuml relies on [Graphviz](http://www.graphviz.org/) that should be already -installed since ROS is also using this tool to render runtime node/topic graphs. -There are many ways to write/generate plantuml diagrams: -http://plantuml.com/running. Choose the integration that best suit you, but vim + -command line java works perfectly. - -Note: some diagrams are not generated correctly when using graphviz 2.40 (layout -stretched vertically). So for now please use graphviz 2.38. - -Note: to re-active the 2.38 that keep being updated on Mac: - - sudo port activate graphviz @2.38.0_3+pangocairo - -To generate the diagrams from the `doc\diagrams` directory: -``` -make -# or to get PNG versions: -make png -``` - -## ROS Messages, Services and Actions - -Our toolchain incudes Doxygen python filters to interpret `*.msg`, `*.action` -and `*.srv` files. Internally, the filters convert the messages, topics and -action files to markdown, and renders them in a hierarchy of Doxygen 'module' -pages. So, they will appear in the same structure as the written markdown -documentation, and not as classes / structs or other types. - -In order for the files to be interpreted correctly, one needs to abide by a -couple of simple rules. -1. All header lines must begin with a '#' comment character. The end of the - header is signified by an empty line. All comments following the newline will - be treated as leading comments for the first declared variable. -1. Any header lines containing the word 'copyright' will be stripped from the - documentation. All hashes ans newlines will also be stripped from the - documentation. You are therefore discouraged from including any comment that - relies on formatting using tabs or spaces. -1. You may document a variable above (leading) or in-line (to the right of) it's - declaration. However, you cannot do both! An inline comment will mute a - leading comment. - -# ROS Unit Tests - -The ROS test framework (rostest) is build upon the Google test framework. The -idea behind a ROS test is that it first launches a collection of ROS nodes, and -then starts the unit test, which runs tests against the ROS nodes. - -Writing a unit test for a package involves modifying the package's -`CMakeLists.txt` and `package.xml` files, and writing a collection of test -cases. To see an example, have a look in the `./tools/visualeyez` folder. - -To build all ROS tests, use the following: - - make -j6 tests - -To execute all ROS tests run the following: - - make -j6 run_tests - -Note that by default all ROS console messages (ROS_INFO, ROS_WARN, etc.) called -from within the test cases are suppressed. To print all messages, as well as a -more comprehensive test result, add the `--text` argument: -``` -rostest <*.test> --text -``` - -Sometimes you want to debug your unit tests, but it's unclear on which port -rostest started the ROS master. A simpler approach is to manually start the ROS -master using `roscore` and then latch the rostest to this master with the -`reuse-master` argument as follows: -``` -rostest <*.test> --reuse-master -``` \ No newline at end of file diff --git a/external/CATKIN_IGNORE b/external/CATKIN_IGNORE deleted file mode 100644 index 8d1c8b69c3..0000000000 --- a/external/CATKIN_IGNORE +++ /dev/null @@ -1 +0,0 @@ - diff --git a/external/CMakeLists.txt b/external/CMakeLists.txt deleted file mode 100644 index 09e17b861b..0000000000 --- a/external/CMakeLists.txt +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# all the external packages have lots of warnings, ignore them -#STRING(REGEX REPLACE "-Wall" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") -#STRING(REGEX REPLACE "-Wall" "" CMAKE_C_FLAGS "${CMAKE_C_FLAGS}") -#STRING(REGEX REPLACE "-Werror" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") -#STRING(REGEX REPLACE "-Werror" "" CMAKE_C_FLAGS "${CMAKE_C_FLAGS}") -#set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -w") -#set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w") - -# gtest is needed because no binaries distributed with ubuntu!!! -add_subdirectory(gtest) -set(GTEST_LIBRARIES ${GTEST_LIBRARIES} PARENT_SCOPE) -set(GTEST_INCLUDE_DIRS ${GTEST_INCLUDE_DIRS} PARENT_SCOPE) - diff --git a/gnc/CMakeLists.txt b/gnc/CMakeLists.txt deleted file mode 100644 index a4cca4df8e..0000000000 --- a/gnc/CMakeLists.txt +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -add_subdirectory(ctl) -add_subdirectory(ekf) -add_subdirectory(fam) -add_subdirectory(gnc_autocode) -add_subdirectory(sim_wrapper) diff --git a/gnc/ctl/CMakeLists.txt b/gnc/ctl/CMakeLists.txt index cd8dca1eff..b0e43276e2 100644 --- a/gnc/ctl/CMakeLists.txt +++ b/gnc/ctl/CMakeLists.txt @@ -15,18 +15,54 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(ctl) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + ff_msgs + ff_util + config_reader + gnc_autocode +) +# System dependencies are found with CMake's conventions +find_package(Eigen3 REQUIRED) + catkin_package( - LIBRARIES ctl - CATKIN_DEPENDS roscpp sensor_msgs nav_msgs - DEPENDS ff_msgs ff_hw_msgs ff_common + LIBRARIES + ctl + CATKIN_DEPENDS + roscpp + nodelet + ff_msgs + ff_util + config_reader + gnc_autocode ) -create_library(TARGET ctl - LIBS ${catkin_LIBRARIES} ${EIGEN_LIBRARIES} gnc_autocode msg_conversions ff_common config_reader ff_nodelet - INC ${catkin_INCLUDE_DIRS} ${EIGEN3_INCLUDE_DIRS} - DEPS ff_msgs ff_hw_msgs) +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} +) + +# Declare C++ libraries +add_library(ctl + src/ctl.cc + src/ctl_nodelet.cc +) +add_dependencies(ctl ${catkin_EXPORTED_TARGETS}) +target_link_libraries(ctl ${catkin_LIBRARIES}) if(CATKIN_ENABLE_TESTING AND ENABLE_INTEGRATION_TESTING) find_package(rostest REQUIRED) @@ -38,4 +74,32 @@ if(CATKIN_ENABLE_TESTING AND ENABLE_INTEGRATION_TESTING) ) endif() -install_launch_files() + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) + diff --git a/gnc/ctl/include/ctl/ctl.h b/gnc/ctl/include/ctl/ctl.h index 3e1554dffb..1503009cec 100644 --- a/gnc/ctl/include/ctl/ctl.h +++ b/gnc/ctl/include/ctl/ctl.h @@ -184,6 +184,7 @@ class Ctl { std::string name_; bool inertia_received_; bool control_enabled_; + bool flight_enabled_; bool use_truth_; float stopping_vel_thresh_squared_; float stopping_omega_thresh_squared_; diff --git a/gnc/ctl/package.xml b/gnc/ctl/package.xml index 0d84445096..6a4eade279 100644 --- a/gnc/ctl/package.xml +++ b/gnc/ctl/package.xml @@ -16,18 +16,18 @@ catkin roscpp - sensor_msgs - vpp_msgs - nav_msgs - ff_msgs nodelet + ff_msgs + ff_util + config_reader + gnc_autocode roscpp - rt_analyze - sensor_msgs - vpp_msgs - nav_msgs - ff_msgs nodelet + ff_msgs + ff_util + ff_msgs + config_reader + gnc_autocode diff --git a/gnc/ctl/src/ctl.cc b/gnc/ctl/src/ctl.cc index 5c25f7bd99..4191f16bcb 100644 --- a/gnc/ctl/src/ctl.cc +++ b/gnc/ctl/src/ctl.cc @@ -42,7 +42,7 @@ namespace ctl { // Nodehandle for /gnc/wrapper Ctl::Ctl(ros::NodeHandle* nh, std::string const& name) : fsm_(WAITING, std::bind(&Ctl::UpdateCallback, this, std::placeholders::_1, std::placeholders::_2)), - name_(name), inertia_received_(false), control_enabled_(false) { + name_(name), inertia_received_(false), control_enabled_(true), flight_enabled_(false) { // Add the state transition lambda functions - refer to the FSM diagram // [0] fsm_.Add(WAITING, @@ -332,7 +332,7 @@ void Ctl::FlightModeCallback(const ff_msgs::FlightMode::ConstPtr& mode) { mc::ros_to_array_vector(mode->pos_ki, input.pos_ki); mc::ros_to_array_vector(mode->vel_kd, input.vel_kd); input.speed_gain_cmd = mode->speed; - control_enabled_ = + flight_enabled_ = (input.speed_gain_cmd > 0 ? mode->control_enabled : false); } @@ -510,7 +510,7 @@ bool Ctl::Step(void) { NODELET_DEBUG_STREAM_THROTTLE(10, "GNC step waiting for inertia"); return false; } - if (!control_enabled_) { + if (!flight_enabled_ || !control_enabled_) { NODELET_DEBUG_STREAM_THROTTLE(10, "GNC control disabled in flight mode"); return false; } diff --git a/gnc/ekf/CMakeLists.txt b/gnc/ekf/CMakeLists.txt index 1a3769a6ea..5a74453bb7 100644 --- a/gnc/ekf/CMakeLists.txt +++ b/gnc/ekf/CMakeLists.txt @@ -15,17 +15,71 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(ekf) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + ff_msgs + ff_util + config_reader + gnc_autocode + camera +) + catkin_package( + INCLUDE_DIRS include LIBRARIES ekf - CATKIN_DEPENDS roscpp sensor_msgs nav_msgs - DEPENDS ff_msgs ff_hw_msgs ff_common + CATKIN_DEPENDS roscpp ff_msgs ff_util config_reader gnc_autocode camera +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(ekf + src/ekf.cc + src/ekf_nodelet.cc + src/ekf_wrapper.cc +) +add_dependencies(ekf ${catkin_EXPORTED_TARGETS}) +target_link_libraries(ekf ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) -create_library(TARGET ekf - LIBS ${catkin_LIBRARIES} ${EIGEN_LIBRARIES} gnc_autocode msg_conversions camera ff_common config_reader ff_nodelet ff_flight - INC ${catkin_INCLUDE_DIRS} ${EIGEN3_INCLUDE_DIRS} - DEPS ff_msgs ff_hw_msgs) +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/gnc/ekf/package.xml b/gnc/ekf/package.xml index f65c8d463f..0062940cee 100644 --- a/gnc/ekf/package.xml +++ b/gnc/ekf/package.xml @@ -16,18 +16,18 @@ catkin roscpp - sensor_msgs - vpp_msgs - nav_msgs ff_msgs - nodelet + ff_util + config_reader + gnc_autocode + camera roscpp - rt_analyze - sensor_msgs - vpp_msgs - nav_msgs ff_msgs - nodelet + ff_util + ff_msgs + config_reader + gnc_autocode + camera diff --git a/gnc/fam/CMakeLists.txt b/gnc/fam/CMakeLists.txt index b51661889c..0c3ee5e304 100644 --- a/gnc/fam/CMakeLists.txt +++ b/gnc/fam/CMakeLists.txt @@ -15,17 +15,77 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(fam) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + ff_msgs + ff_hw_msgs + ff_util + config_reader + gnc_autocode +) + +# System dependencies are found with CMake's conventions +find_package(Eigen3 REQUIRED) + catkin_package( - LIBRARIES fam - CATKIN_DEPENDS roscpp sensor_msgs nav_msgs - DEPENDS ff_msgs ff_hw_msgs ff_common + LIBRARIES + fam + CATKIN_DEPENDS + roscpp + nodelet + ff_msgs + ff_hw_msgs + ff_util + config_reader + gnc_autocode ) -create_library(TARGET fam - LIBS ${catkin_LIBRARIES} ${EIGEN_LIBRARIES} gnc_autocode msg_conversions ff_common config_reader ff_nodelet - INC ${catkin_INCLUDE_DIRS} ${EIGEN3_INCLUDE_DIRS} - DEPS ff_msgs ff_hw_msgs) +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} +) + +# Declare C++ libraries +add_library(fam + src/fam.cc + src/fam_nodelet.cc +) +add_dependencies(fam ${catkin_EXPORTED_TARGETS}) +target_link_libraries(fam ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) + -install_launch_files() diff --git a/gnc/fam/package.xml b/gnc/fam/package.xml index c556cf684f..1a4392849d 100644 --- a/gnc/fam/package.xml +++ b/gnc/fam/package.xml @@ -16,18 +16,20 @@ catkin roscpp - sensor_msgs - vpp_msgs - nav_msgs - ff_msgs nodelet + ff_msgs + ff_hw_msgs + ff_util + config_reader + gnc_autocode roscpp - rt_analyze - sensor_msgs - vpp_msgs - nav_msgs - ff_msgs nodelet + ff_msgs + ff_util + ff_msgs + ff_hw_msgs + config_reader + gnc_autocode diff --git a/gnc/gnc_autocode/CMakeLists.txt b/gnc/gnc_autocode/CMakeLists.txt index a43881d83a..b06e34b52e 100644 --- a/gnc/gnc_autocode/CMakeLists.txt +++ b/gnc/gnc_autocode/CMakeLists.txt @@ -15,12 +15,25 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(gnc_autocode) + +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + ff_util + config_reader + msg_conversions +) + # find gnc directories find_path ( GNC_ROOT_DIR ctl_controller0_ert_rtw/ctl_controller0.h - PATHS ${CMAKE_SOURCE_DIR}/gnc/matlab/code_generation - HINTS ${CMAKE_SOURCE_DIR}/gnc/matlab/code_generation + PATHS ${CMAKE_SOURCE_DIR}/../matlab/code_generation + HINTS ${CMAKE_SOURCE_DIR}/../matlab/code_generation NO_CMAKE_FIND_ROOT_PATH ) @@ -34,7 +47,7 @@ if (GNC_ROOT_DIR) set (GNC_BL1_DIR ${GNC_ROOT_DIR}/bpm_blower_1_propulsion_module_ert_rtw) set (GNC_BL2_DIR ${GNC_ROOT_DIR}/bpm_blower_2_propulsion_module_ert_rtw) set (GNC_UTIL_DIR ${GNC_ROOT_DIR}/sharedutils) - set (GNC_SPD_DIR ${GNC_ROOT_DIR}/esc_electronic_speed_controller0_ert_rtw) + #set (GNC_SPD_DIR ${GNC_ROOT_DIR}/esc_electronic_speed_controller0_ert_rtw) set (GNC_CXX_DIR ${GNC_ROOT_DIR}/../cxx_functions) set (GNC_INCLUDE_DIRS ${GNC_ADDITIONAL_DIR} ${GNC_CTL_DIR} ${GNC_EST_DIR} ${GNC_FAM_DIR}) else (GNC_ROOT_DIR) @@ -91,9 +104,57 @@ STRING(REGEX REPLACE "-Werror[^ ]*" "" CMAKE_C_FLAGS "${CMAKE_C_FLAGS}") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -w -DPORTABLE_WORDSIZES -ffast-math -funsafe-math-optimizations") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w -DPORTABLE_WORDSIZES -ffast-math -funsafe-math-optimizations") -create_library(TARGET gnc_autocode - LIBS ${EIGEN_LIBRARIES} ff_nodelet config_reader msg_conversions - INC ${GNC_INCLUDES} ${EIGEN_INCLUDE_DIRS} - ADD_SRCS ${GNC_SOURCES} +catkin_package( + INCLUDE_DIRS include ${GNC_INCLUDES} + LIBRARIES gnc_autocode + CATKIN_DEPENDS + roscpp + ff_util + config_reader + msg_conversions +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${GNC_INCLUDES} ) +# Declare C++ libraries +add_library(gnc_autocode + src/autocode.cc + src/constants.cc + src/ekf.cc + src/fam.cc + src/sim_csv.cc + src/blowers.cc + src/ctl.cc + src/ekf_csv.cc + src/sim.cc + ${GNC_SOURCES} +) +add_dependencies(gnc_autocode ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gnc_autocode ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) diff --git a/tools/simulator/dock_sim/package.xml b/gnc/gnc_autocode/package.xml similarity index 53% rename from tools/simulator/dock_sim/package.xml rename to gnc/gnc_autocode/package.xml index 5f5f5aa010..dfc9e816dd 100644 --- a/tools/simulator/dock_sim/package.xml +++ b/gnc/gnc_autocode/package.xml @@ -1,12 +1,12 @@ - dock_sim + gnc_autocode 0.0.0 - - The dock simulator simulates the dock and undock action. + Calls the GNC simulink auto-generated EKF code, and passes + inputs and outputs to and from ros messages. - Apache License, Version 2.0 + Apache License, Version 2.0 Astrobee Flight Software @@ -16,11 +16,13 @@ catkin roscpp - ff_msgs - nodelet - ff_msgs + ff_util + config_reader + msg_conversions roscpp - nodelet + ff_util + config_reader + msg_conversions diff --git a/gnc/sim_wrapper/CMakeLists.txt b/gnc/sim_wrapper/CMakeLists.txt index d4fb7d17ca..7bdc66b067 100644 --- a/gnc/sim_wrapper/CMakeLists.txt +++ b/gnc/sim_wrapper/CMakeLists.txt @@ -15,21 +15,96 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(sim_wrapper) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + ff_msgs + ff_hw_msgs + ff_util + config_reader + gnc_autocode + camera + sparse_mapping +) + catkin_package( - LIBRARIES sim_wrapper - CATKIN_DEPENDS roscpp geometry_msgs ff_util nodelet pluginlib + LIBRARIES + sim_wrapper + CATKIN_DEPENDS + roscpp + ff_msgs + ff_hw_msgs + ff_util + config_reader + gnc_autocode + camera + sparse_mapping +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -create_library(TARGET sim_wrapper - LIBS ${catkin_LIBRARIES} gnc_autocode sparse_mapping msg_conversions camera ff_nodelet - INC ${catkin_INCLUDE_DIRS} - DEPS ff_hw_msgs +# Declare C++ libraries +add_library(sim_wrapper + src/sim.cc + src/sim_nodelet.cc ) +add_dependencies(sim_wrapper ${catkin_EXPORTED_TARGETS}) +target_link_libraries(sim_wrapper ${catkin_LIBRARIES}) -create_tool_targets(DIR tools - LIBS sim_wrapper +## Declare a C++ executable: sim_node +add_executable(sim_node tools/sim_node.cc) +add_dependencies(sim_node ${catkin_EXPORTED_TARGETS}) +target_link_libraries(sim_node + sim_wrapper gflags ${catkin_LIBRARIES}) + +## Declare a C++ executable: sim_node +add_executable(test_sim tools/test_sim.cc) +add_dependencies(test_sim ${catkin_EXPORTED_TARGETS}) +target_link_libraries(test_sim + sim_wrapper ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) -install_launch_files() \ No newline at end of file +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Install C++ executables +install(TARGETS sim_node DESTINATION bin) +install(TARGETS test_sim DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/sim_node share/${PROJECT_NAME} + COMMAND ln -s ../../bin/test_sim share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/gnc/sim_wrapper/package.xml b/gnc/sim_wrapper/package.xml index f3c0e624ce..3eded9c9ed 100644 --- a/gnc/sim_wrapper/package.xml +++ b/gnc/sim_wrapper/package.xml @@ -15,20 +15,22 @@ Astrobee Flight Software catkin - nodelet - pluginlib + roscpp ff_msgs + ff_hw_msgs ff_util - roscpp - sensor_msgs - geometry_msgs + config_reader + gnc_autocode + camera + sparse_mapping roscpp - nodelet - pluginlib ff_msgs + ff_hw_msgs ff_util - sensor_msgs - geometry_msgs + config_reader + gnc_autocode + camera + sparse_mapping diff --git a/hardware/eps_driver/CMakeLists.txt b/hardware/eps_driver/CMakeLists.txt index 74baa86642..ad23c586b0 100644 --- a/hardware/eps_driver/CMakeLists.txt +++ b/hardware/eps_driver/CMakeLists.txt @@ -15,9 +15,24 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(eps_driver) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + pluginlib + ff_util + ff_hw_msgs + i2c +) + catkin_package( + INCLUDE_DIRS include LIBRARIES eps_driver CATKIN_DEPENDS @@ -26,41 +41,90 @@ catkin_package( pluginlib ff_util ff_hw_msgs + i2c ) -# ROS agnostic proxy library and tool +########### +## Build ## +########### -create_library( - DIR src/eps_driver - TARGET eps_driver - LIBS i2c +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -create_tool_targets( - DIR tools/eps_driver_tool - LIBS eps_driver ff_common +# Declare C++ libraries +add_library(eps_driver + src/eps_driver/eps_driver.cc ) +add_dependencies(eps_driver ${catkin_EXPORTED_TARGETS}) +target_link_libraries(eps_driver ${catkin_LIBRARIES}) -# ROS node -create_library( - DIR src/eps_driver_node - TARGET eps_driver_node - LIBS ${catkin_LIBRARIES} eps_driver config_reader ff_nodelet - INC ${catkin_INCLUDE_DIRS} - DEPS ff_hw_msgs +add_library(eps_driver_node + src/eps_driver_node/eps_driver_node.cc ) +add_dependencies(eps_driver_node ${catkin_EXPORTED_TARGETS}) +target_link_libraries(eps_driver_node eps_driver ${catkin_LIBRARIES}) + +## Declare a C++ executable: eps_driver_tool +add_executable(eps_driver_tool tools/eps_driver_tool/eps_driver_tool.cc) +add_dependencies(eps_driver_tool ${catkin_EXPORTED_TARGETS}) +target_link_libraries(eps_driver_tool + eps_driver gflags ${catkin_LIBRARIES}) -# Simulator +## Declare a C++ executable: eps_simulator +add_executable(eps_simulator tools/eps_simulator/eps_simulator.cc) +add_dependencies(eps_simulator ${catkin_EXPORTED_TARGETS}) +target_link_libraries(eps_simulator + eps_driver gflags ${catkin_LIBRARIES}) -create_tool_targets( - DIR tools/eps_simulator - LIBS ${catkin_LIBRARIES} eps_driver ff_nodelet - INC ${catkin_INCLUDE_DIRS} - DEPS ff_hw_msgs +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) +install(TARGETS ${PROJECT_NAME}_node + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) -install_launch_files() +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) + +# Install C++ executables +install(TARGETS eps_driver_tool DESTINATION bin) +install(TARGETS eps_simulator DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/eps_driver_tool share/${PROJECT_NAME} + COMMAND ln -s ../../bin/eps_simulator share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) + install(FILES tools/eps_aux.py DESTINATION share/${PROJECT_NAME}) diff --git a/hardware/eps_driver/package.xml b/hardware/eps_driver/package.xml index f1c49b11d1..1a809c5486 100644 --- a/hardware/eps_driver/package.xml +++ b/hardware/eps_driver/package.xml @@ -18,11 +18,13 @@ pluginlib ff_util ff_hw_msgs + i2c roscpp nodelet pluginlib ff_util ff_hw_msgs + i2c diff --git a/hardware/epson_imu/CMakeLists.txt b/hardware/epson_imu/CMakeLists.txt index 65c492fb90..635c1137e4 100644 --- a/hardware/epson_imu/CMakeLists.txt +++ b/hardware/epson_imu/CMakeLists.txt @@ -15,19 +15,72 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(epson_imu) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + rospy + nodelet + sensor_msgs + ff_util +) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-unused-result") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-unused-result") catkin_package( LIBRARIES epson_imu - CATKIN_DEPENDS roscpp rospy sensor_msgs + CATKIN_DEPENDS roscpp rospy nodelet sensor_msgs ff_util +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(epson_imu + src/epson_imu_nodelet.cc + src/G362P.cc + src/GPIO.cc +) +add_dependencies(epson_imu ${catkin_EXPORTED_TARGETS}) +target_link_libraries(epson_imu ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} ) -create_library(TARGET epson_imu - LIBS ${catkin_LIBRARIES} ff_nodelet config_reader - INC ${catkin_INCLUDE_DIRS} +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE ) -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/hardware/epson_imu/package.xml b/hardware/epson_imu/package.xml index 2c6eab0869..9e9928e77e 100644 --- a/hardware/epson_imu/package.xml +++ b/hardware/epson_imu/package.xml @@ -15,12 +15,14 @@ catkin roscpp rospy - sensor_msgs nodelet + sensor_msgs + ff_util roscpp rospy - sensor_msgs nodelet + sensor_msgs + ff_util diff --git a/hardware/fam_cmd_i2c/CMakeLists.txt b/hardware/fam_cmd_i2c/CMakeLists.txt index 60884dc8ad..0cbdf2941c 100644 --- a/hardware/fam_cmd_i2c/CMakeLists.txt +++ b/hardware/fam_cmd_i2c/CMakeLists.txt @@ -15,16 +15,64 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(fam_cmd_i2c) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + rospy + ff_msgs + std_msgs + i2c +) + catkin_package( - CATKIN_DEPENDS roscpp rospy ff_msgs std_msgs + CATKIN_DEPENDS roscpp rospy ff_msgs std_msgs i2c ) -create_tool_targets(DIR tools - LIBS ${catkin_LIBRARIES} i2c - INC ${catkin_INCLUDE_DIRS} - DEPS ff_msgs std_msgs +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -install_launch_files() +## Declare a C++ executable: inspection_tool +add_executable(fam_cmd_gen tools/fam_cmd_gen.cc) +add_dependencies(fam_cmd_gen ${catkin_EXPORTED_TARGETS}) +target_link_libraries(fam_cmd_gen + ${catkin_LIBRARIES}) + +## Declare a C++ executable: inspection_tool +add_executable(fam_cmd_i2c_node tools/fam_cmd_i2c_node.cc) +add_dependencies(fam_cmd_i2c_node ${catkin_EXPORTED_TARGETS}) +target_link_libraries(fam_cmd_i2c_node + ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Install C++ executables +install(TARGETS fam_cmd_gen DESTINATION bin) +install(TARGETS fam_cmd_i2c_node DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/fam_cmd_gen share/${PROJECT_NAME} + COMMAND ln -s ../../bin/fam_cmd_i2c_node share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) + diff --git a/hardware/fam_cmd_i2c/package.xml b/hardware/fam_cmd_i2c/package.xml index c9c89e41e3..ac3ebc363a 100644 --- a/hardware/fam_cmd_i2c/package.xml +++ b/hardware/fam_cmd_i2c/package.xml @@ -18,9 +18,11 @@ rospy ff_msgs std_msgs + i2c roscpp rospy ff_msgs std_msgs + i2c diff --git a/hardware/ff_serial/CMakeLists.txt b/hardware/ff_serial/CMakeLists.txt index 58d30a1be4..87355d7a07 100644 --- a/hardware/ff_serial/CMakeLists.txt +++ b/hardware/ff_serial/CMakeLists.txt @@ -15,10 +15,58 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(ff_serial) -create_library( - TARGET ff_serial - DIR src - LIBS ${Boost_THREAD_LIBRARY} ${Boost_SYSTEM_LIBRARY} +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +find_package(catkin REQUIRED COMPONENTS +) + +catkin_package( + LIBRARIES ff_serial + INCLUDE_DIRS include +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(ff_serial + src/serial.cc +) +add_dependencies(ff_serial ${catkin_EXPORTED_TARGETS}) +target_link_libraries(ff_serial ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE ) diff --git a/hardware/flashlight/CMakeLists.txt b/hardware/flashlight/CMakeLists.txt index 2b70e3bedf..868a45dcd2 100644 --- a/hardware/flashlight/CMakeLists.txt +++ b/hardware/flashlight/CMakeLists.txt @@ -15,34 +15,104 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(flashlight) -# Core library -create_library(TARGET flashlight - INC ${GLOG_INCLUDE_DIRS} - LIBS ${GLOG_LIBRARIES} i2c +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + roslib + nodelet + ff_hw_msgs + i2c + ff_util + config_reader +) + +catkin_package( + LIBRARIES + flashlight + CATKIN_DEPENDS + roscpp + roslib + nodelet + ff_hw_msgs + i2c + ff_util + config_reader ) -# Standalone test programs -create_tool_targets(DIR tools - INC ${GFLAGS_INCLUDE_DIRS} - LIBS ${GFLAGS_LIBRARIES} flashlight ff_common +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -# Create ROS nodes if we are using ROS -if(USE_ROS) - catkin_package( - LIBRARIES flashlight - CATKIN_DEPENDS roscpp roslib nodelet - DEPENDS ff_hw_msgs + # Declare C++ librarie - Core library + add_library(flashlight + src/flashlight.cc ) + add_dependencies(flashlight ${catkin_EXPORTED_TARGETS}) + target_link_libraries(flashlight glog ${catkin_LIBRARIES}) - create_library(TARGET flashlight_nodelet - DIR src/ros - LIBS ${roscpp_LIBRARIES} ${nodelet_LIBRARIES} flashlight config_reader - ff_nodelet - INC ${catkin_INCLUDE_DIRS} - DEPS ff_hw_msgs + # Declare C++ librarie + add_library(flashlight_nodelet + src/ros/flashlight_nodelet.cc ) - install_launch_files() -endif(USE_ROS) + add_dependencies(flashlight_nodelet ${catkin_EXPORTED_TARGETS}) + target_link_libraries(flashlight_nodelet flashlight ${catkin_LIBRARIES}) + +## Declare a C++ executable: flashlight_test +add_executable(flashlight_test tools/flashlight_test.cc) +add_dependencies(flashlight_test ${catkin_EXPORTED_TARGETS}) +target_link_libraries(flashlight_test + flashlight gflags ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) +install(TARGETS ${PROJECT_NAME}_nodelet + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) + +# Install C++ executables +install(TARGETS flashlight_test DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/flashlight_test share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/hardware/flashlight/package.xml b/hardware/flashlight/package.xml index 035c062fcc..d2756af322 100644 --- a/hardware/flashlight/package.xml +++ b/hardware/flashlight/package.xml @@ -18,11 +18,16 @@ roslib nodelet ff_hw_msgs + i2c + ff_util + config_reader roscpp + roslib nodelet ff_hw_msgs - roslib - message_runtime + i2c + ff_util + config_reader diff --git a/hardware/flycapture/CMakeLists.txt b/hardware/flycapture/CMakeLists.txt index 71b7762520..c558af0e75 100644 --- a/hardware/flycapture/CMakeLists.txt +++ b/hardware/flycapture/CMakeLists.txt @@ -15,15 +15,22 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(flycapture) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + # Need to think of a better way to get flycapture find_path(FLYCAPTURE_INCLUDE_DIR flycapture/FlyCapture2Defs.h HINTS ${FLYCAPTURE_ROOT_DIR}/include /usr) find_library(FLYCAPTURE_LIBRARY NAMES flycapture libflycapture HINTS /usr/lib ${FLYCAPTURE_ROOT_DIR}/lib) -find_package_handle_standard_args(FLYCAPTURE DEFAULT_MSG - FLYCAPTURE_INCLUDE_DIR FLYCAPTURE_LIBRARY) +# find_package_handle_standard_args(FLYCAPTURE DEFAULT_MSG +# FLYCAPTURE_INCLUDE_DIR FLYCAPTURE_LIBRARY) + +find_package(catkin REQUIRED COMPONENTS +) catkin_package( LIBRARIES flycapture @@ -32,10 +39,11 @@ catkin_package( if (FLYCAPTURE_FOUND) - create_tool_targets(DIR tools - LIBS ${FLYCAPTURE_LIBRARY} ${catkin_LIBRARIES} ff_nodelet - INC ${FLYCAPTURE_INCLUDE_DIR} ${catkin_INCLUDE_DIRS} - ) + ## Declare a C++ executable: fly_node + add_executable(fly_node tools/fly_node.cc) + add_dependencies(fly_node ${catkin_EXPORTED_TARGETS}) + target_link_libraries(fly_node + gflags ${catkin_LIBRARIES}) else() message(STATUS "Not building fly_node") endif() diff --git a/hardware/gpio/CMakeLists.txt b/hardware/gpio/CMakeLists.txt index 7fb5761e9c..78cc53f436 100644 --- a/hardware/gpio/CMakeLists.txt +++ b/hardware/gpio/CMakeLists.txt @@ -15,4 +15,44 @@ # License for the specific language governing permissions and limitations # under the License. -create_library(TARGET gpio) +cmake_minimum_required(VERSION 3.0) +project(gpio) + +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +find_package(catkin REQUIRED COMPONENTS +) + +catkin_package( + LIBRARIES + gpio +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(gpio + src/gpio.cc +) +add_dependencies(gpio ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gpio ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) \ No newline at end of file diff --git a/hardware/i2c/CMakeLists.txt b/hardware/i2c/CMakeLists.txt index 3f73f4b024..1ba70cf931 100644 --- a/hardware/i2c/CMakeLists.txt +++ b/hardware/i2c/CMakeLists.txt @@ -15,8 +15,52 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(i2c) -create_library(TARGET i2c - LIBS ${GLOG_LIBRARIES} - INC ${GLOG_INCLUDE_DIRS}) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +find_package(catkin REQUIRED COMPONENTS +) + +catkin_package( + LIBRARIES i2c + INCLUDE_DIRS include +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(i2c + src/i2c.cc + src/i2c_new.cc +) +add_dependencies(i2c ${catkin_EXPORTED_TARGETS}) +target_link_libraries(i2c glog ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) diff --git a/hardware/i2c/package.xml b/hardware/i2c/package.xml index 0dcc2828d7..00136cafd4 100644 --- a/hardware/i2c/package.xml +++ b/hardware/i2c/package.xml @@ -15,7 +15,5 @@ Astrobee Flight Software catkin - roscpp - roscpp diff --git a/hardware/is_camera/CMakeLists.txt b/hardware/is_camera/CMakeLists.txt index 32b6d3c1c9..b3e399b4fe 100644 --- a/hardware/is_camera/CMakeLists.txt +++ b/hardware/is_camera/CMakeLists.txt @@ -15,8 +15,21 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(is_camera) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + image_transport + nodelet + ff_util + config_reader +) + # Need to find libv4l2 find_path(V4L2_INCLUDE_DIR libv4l2.h linux/videodev2.h HINTS /usr ${V4L2_ROOT_DIR}) @@ -25,22 +38,56 @@ find_library(V4L2_LIBRARY NAMES libv4l2 v4l2 find_package_handle_standard_args(V4L2 DEFAULT_MSG V4L2_INCLUDE_DIR V4L2_LIBRARY) + +# Find OpenCV +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../cmake") +find_package(OpenCV331 REQUIRED) + catkin_package( LIBRARIES is_camera - CATKIN_DEPENDS roscpp image_transport nodelet + CATKIN_DEPENDS + roscpp + image_transport + nodelet + ff_util + config_reader ) -create_library(TARGET is_camera - LIBS ${catkin_LIBRARIES} ${V4L2_LIBRARY} config_reader ff_nodelet - INC ${catkin_INCLUDE_DIRS} ${V4L2_INCLUDE_DIR} - DEPS ff_msgs) +########### +## Build ## +########### -create_tool_targets(DIR tools - LIBS is_camera - ) +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${OpenCV_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(is_camera + src/camera.cc +) +add_dependencies(is_camera ${catkin_EXPORTED_TARGETS}) +target_link_libraries(is_camera ${V4L2_LIBRARY} ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) -# I'm not sure who installed package.xml for us, but we should do the same for -# nodelet_plugins.xml -install(FILES nodelet_plugins.xml DESTINATION share/is_camera) +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/hardware/is_camera/package.xml b/hardware/is_camera/package.xml index ee9bbd1cf2..1b8858acbe 100644 --- a/hardware/is_camera/package.xml +++ b/hardware/is_camera/package.xml @@ -15,12 +15,16 @@ Astrobee Flight Software catkin + image_transport roscpp nodelet - image_transport + ff_util + config_reader roscpp - nodelet image_transport + nodelet + ff_util + config_reader diff --git a/hardware/laser/CMakeLists.txt b/hardware/laser/CMakeLists.txt index b064c43921..e8534aee9c 100644 --- a/hardware/laser/CMakeLists.txt +++ b/hardware/laser/CMakeLists.txt @@ -15,33 +15,102 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(laser) -# Core library -create_library(TARGET laser - INC ${GLOG_INCLUDE_DIRS} - LIBS ${GLOG_LIBRARIES} i2c - DEPS i2c) - -# Standalone test programs -create_tool_targets(DIR tools - INC ${GFLAGS_INCLUDE_DIRS} - LIBS ${GFLAGS_LIBRARIES} laser ff_common - DEPS laser) - -# Create ROS nodes if we are using ROS -if(USE_ROS) - catkin_package( - LIBRARIES laser - CATKIN_DEPENDS roscpp roslib nodelet - DEPENDS ff_hw_msgs - ) - - create_library(TARGET laser_nodelet - DIR src/ros - LIBS ${catkin_LIBRARIES} laser config_reader ff_nodelet - INC ${catkin_INCLUDE_DIRS} - DEPS ff_hw_msgs - ) - install_launch_files() -endif(USE_ROS) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + roslib + nodelet + cv_bridge + ff_util + ff_common + ff_hw_msgs + i2c + config_reader +) + +catkin_package( + INCLUDE_DIRS include + LIBRARIES laser + CATKIN_DEPENDS + roscpp + roslib + nodelet + cv_bridge + ff_util + ff_common + ff_hw_msgs + i2c + config_reader +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} +) + +# Declare C++ libraries +add_library(laser + src/laser.cc +) +add_dependencies(laser ${catkin_EXPORTED_TARGETS}) +target_link_libraries(laser ${catkin_LIBRARIES} ${EIGEN_LIBRARIES}) + +# Declare C++ libraries +add_library(laser_nodelet + src/ros/laser_nodelet.cc +) +add_dependencies(laser_nodelet ${catkin_EXPORTED_TARGETS}) +target_link_libraries(laser_nodelet laser ${catkin_LIBRARIES} ${EIGEN_LIBRARIES}) + +## Declare a C++ executable: bag_to_csv +add_executable(laser_test tools/laser_test.cc) +add_dependencies(laser_test ${catkin_EXPORTED_TARGETS}) +target_link_libraries(laser_test + laser gflags glog ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) +install(TARGETS ${PROJECT_NAME}_nodelet + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Install C++ executables +install(TARGETS laser_test DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/laser_test share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/hardware/laser/package.xml b/hardware/laser/package.xml index 653cf7196c..63332e9b2b 100644 --- a/hardware/laser/package.xml +++ b/hardware/laser/package.xml @@ -15,12 +15,21 @@ roscpp roslib nodelet + cv_bridge + ff_util + ff_common ff_hw_msgs + i2c + config_reader roscpp + roslib nodelet + cv_bridge + ff_util + ff_common ff_hw_msgs - roslib - message_runtime + i2c + config_reader diff --git a/hardware/perching_arm/CMakeLists.txt b/hardware/perching_arm/CMakeLists.txt index 2784bdf506..6f1e67d3e5 100644 --- a/hardware/perching_arm/CMakeLists.txt +++ b/hardware/perching_arm/CMakeLists.txt @@ -15,8 +15,22 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(perching_arm) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + pluginlib + ff_util + ff_serial + ff_hw_msgs +) + catkin_package( LIBRARIES perching_arm @@ -25,21 +39,61 @@ catkin_package( nodelet pluginlib ff_util + ff_serial ff_hw_msgs - sensor_msgs ) -create_library( - TARGET perching_arm - LIBS ${catkin_LIBRARIES} ff_serial config_reader ff_nodelet - INC ${catkin_INCLUDE_DIRS} - DEPS perching_arm_gencpp +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} ) -create_tool_targets(DIR tools - LIBS ${catkin_LIBRARIES} perching_arm ff_common - INC ${catkin_INCLUDE_DIRS} - DEPS ff_common +# Declare C++ libraries +add_library(perching_arm + src/perching_arm.cc + src/perching_arm_node.cc ) +add_dependencies(perching_arm ${catkin_EXPORTED_TARGETS}) +target_link_libraries(perching_arm ${catkin_LIBRARIES} ${EIGEN_LIBRARIES}) + +## Declare a C++ executable: bag_to_csv +add_executable(perching_arm_tool tools/perching_arm_tool.cc) +add_dependencies(perching_arm_tool ${catkin_EXPORTED_TARGETS}) +target_link_libraries(perching_arm_tool + perching_arm gflags glog ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Install C++ executables +install(TARGETS perching_arm_tool DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/perching_arm_tool share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/hardware/perching_arm/package.xml b/hardware/perching_arm/package.xml index 60601af3e8..921d8ba8e2 100644 --- a/hardware/perching_arm/package.xml +++ b/hardware/perching_arm/package.xml @@ -17,14 +17,14 @@ roscpp nodelet pluginlib - sensor_msgs ff_util + ff_serial ff_hw_msgs roscpp nodelet pluginlib - sensor_msgs ff_util + ff_serial ff_hw_msgs diff --git a/hardware/pico_driver/CMakeLists.txt b/hardware/pico_driver/CMakeLists.txt index ba5f87099d..18bdfc62cd 100644 --- a/hardware/pico_driver/CMakeLists.txt +++ b/hardware/pico_driver/CMakeLists.txt @@ -15,22 +15,64 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(pico_driver) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +if (ENABLE_PICOFLEXX) +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + ff_msgs + ff_util + cv_bridge +) + +# System dependencies are found with CMake's conventions +find_package(royale REQUIRED) + catkin_package( LIBRARIES ${PROJECT_NAME} - CATKIN_DEPENDS roscpp nodelet ff_msgs - DEPENDS ${PROJECT_NAME} + CATKIN_DEPENDS roscpp nodelet ff_msgs ff_util cv_bridge ) -link_directories(${royale_LIB_DIR}) +########### +## Build ## +########### -create_library(TARGET ${PROJECT_NAME} - LIBS ${catkin_LIBRARIES} ${royale_LIBRARIES} config_reader ff_nodelet - INC ${catkin_INCLUDE_DIRS} ${royale_INCLUDE_DIRS} +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} + ${royale_INCLUDE_DIRS} + ${OpenCV_INCLUDE_DIRS} ) +# Declare C++ libraries +add_library(pico_driver + src/pico_driver.cc +) +add_dependencies(pico_driver ${catkin_EXPORTED_TARGETS}) +target_link_libraries(pico_driver ${royale_LIBRARIES} ${EIGEN_LIBRARIES} ${catkin_LIBRARIES}) + +## Declare a C++ executable: bag_to_csv +add_executable(pico_proxy tools/pico_proxy.cc) +add_dependencies(pico_proxy ${catkin_EXPORTED_TARGETS}) +target_link_libraries(pico_proxy + pico_driver gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: bag_to_csv +add_executable(pico_tool tools/pico_tool.cc) +add_dependencies(pico_tool ${catkin_EXPORTED_TARGETS}) +target_link_libraries(pico_tool + pico_driver gflags glog ${royale_LIBRARIES} ${catkin_LIBRARIES}) + if(CATKIN_ENABLE_TESTING) + find_package(rostest REQUIRED) # Pico flexx initialization fault tester add_rostest_gtest(test_init_pico_flexx test/test_init_pico_flexx.test @@ -38,14 +80,43 @@ if(CATKIN_ENABLE_TESTING) ) target_link_libraries(test_init_pico_flexx - ${catkin_LIBRARIES} config_reader ff_nodelet + glog ${catkin_LIBRARIES} ) endif() -create_tool_targets(DIR tools - LIBS ${catkin_LIBRARIES} ${royale_LIBRARIES} uvc - INC ${catkin_INCLUDE_DIRS} ${royale_INCLUDE_DIRS} - DEPS ff_msgs opencv +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} ) -install_launch_files() +# Install C++ executables +install(TARGETS pico_proxy DESTINATION bin) +install(TARGETS pico_tool DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/pico_proxy share/${PROJECT_NAME} + COMMAND ln -s ../../bin/pico_tool share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) + +else (ENABLE_PICOFLEXX) + find_package(catkin REQUIRED COMPONENTS) + catkin_package() +endif (ENABLE_PICOFLEXX) diff --git a/hardware/pico_driver/package.xml b/hardware/pico_driver/package.xml index 00b2e7634a..d789d1c0cb 100644 --- a/hardware/pico_driver/package.xml +++ b/hardware/pico_driver/package.xml @@ -17,11 +17,15 @@ catkin roscpp nodelet - ff_msgs + ff_msgs + ff_util + cv_bridge roscpp nodelet tf2_ros ff_msgs + ff_util + cv_bridge diff --git a/hardware/pmc_actuator/CMakeLists.txt b/hardware/pmc_actuator/CMakeLists.txt index 823883697b..438e4e1ed4 100644 --- a/hardware/pmc_actuator/CMakeLists.txt +++ b/hardware/pmc_actuator/CMakeLists.txt @@ -15,8 +15,24 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(pmc_actuator) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + pluginlib + ff_util + std_msgs + ff_msgs + ff_hw_msgs + i2c +) + catkin_package( LIBRARIES pmc_actuator @@ -28,36 +44,103 @@ catkin_package( std_msgs ff_msgs ff_hw_msgs + i2c ) -# ROS agnostic driver and tool +########### +## Build ## +########### -create_library( - DIR src/pmc_actuator - TARGET pmc_actuator - LIBS i2c +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} ) -create_tool_targets(DIR tools/pmc_actuator_tool - LIBS pmc_actuator +# ROS agnostic driver and tool + +# Declare C++ libraries +add_library(pmc_actuator + src/pmc_actuator/pmc_actuator.cc ) +add_dependencies(pmc_actuator ${catkin_EXPORTED_TARGETS}) +target_link_libraries(pmc_actuator ${catkin_LIBRARIES} ${EIGEN_LIBRARIES}) + +## Declare a C++ executable: bag_to_csv +add_executable(pmc_actuator_tool tools/pmc_actuator_tool/pmc_actuator_tool.cc) +add_dependencies(pmc_actuator_tool ${catkin_EXPORTED_TARGETS}) +target_link_libraries(pmc_actuator_tool + pmc_actuator gflags glog ${catkin_LIBRARIES}) # ROS nodelet and tools -create_library( - DIR src/pmc_actuator_nodelet - TARGET pmc_actuator_nodelet - LIBS ${catkin_LIBRARIES} pmc_actuator config_reader ff_nodelet - INC ${catkin_INCLUDE_DIRS} - DEPS ff_hw_msgs +# Declare C++ libraries +add_library(pmc_actuator_nodelet + src/pmc_actuator_nodelet/pmc_actuator_nodelet.cc +) +add_dependencies(pmc_actuator_nodelet ${catkin_EXPORTED_TARGETS}) +target_link_libraries(pmc_actuator_nodelet pmc_actuator ${catkin_LIBRARIES} ${EIGEN_LIBRARIES}) + +## Declare a C++ executable: pmc_actuator_cmd_test +add_executable(pmc_actuator_cmd_test tools/pmc_actuator_cmd_test.cc) +add_dependencies(pmc_actuator_cmd_test ${catkin_EXPORTED_TARGETS}) +target_link_libraries(pmc_actuator_cmd_test + pmc_actuator gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: pmc_actuator_feeder +add_executable(pmc_actuator_feeder tools/pmc_actuator_feeder.cc) +add_dependencies(pmc_actuator_feeder ${catkin_EXPORTED_TARGETS}) +target_link_libraries(pmc_actuator_feeder + pmc_actuator gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: pmc_autotrim_node +add_executable(pmc_autotrim_node tools/pmc_autotrim_node.cc) +add_dependencies(pmc_autotrim_node ${catkin_EXPORTED_TARGETS}) +target_link_libraries(pmc_autotrim_node + pmc_actuator gflags glog ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) +install(TARGETS ${PROJECT_NAME}_nodelet + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) -create_tool_targets(DIR tools - LIBS ${catkin_LIBRARIES} pmc_actuator ff_nodelet - INC ${catkin_INCLUDE_DIRS} +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} ) -install_launch_files() +# Install C++ executables +install(TARGETS pmc_actuator_tool DESTINATION bin) +install(TARGETS pmc_actuator_cmd_test DESTINATION bin) +install(TARGETS pmc_actuator_feeder DESTINATION bin) +install(TARGETS pmc_autotrim_node DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/pmc_actuator_tool share/${PROJECT_NAME} + COMMAND ln -s ../../bin/pmc_actuator_cmd_test share/${PROJECT_NAME} + COMMAND ln -s ../../bin/pmc_actuator_feeder share/${PROJECT_NAME} + COMMAND ln -s ../../bin/pmc_autotrim_node share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) + install(FILES tools/pmc_currents.py tools/pmc_status.py diff --git a/hardware/pmc_actuator/tools/pmc_actuator_tool/pmc_actuator_tool.cc b/hardware/pmc_actuator/tools/pmc_actuator_tool/pmc_actuator_tool.cc index 96576ddf0d..eb86a9dc9c 100644 --- a/hardware/pmc_actuator/tools/pmc_actuator_tool/pmc_actuator_tool.cc +++ b/hardware/pmc_actuator/tools/pmc_actuator_tool/pmc_actuator_tool.cc @@ -24,7 +24,7 @@ #include #include -// C++11 STL includes +// C++ STL includes #include #include #include diff --git a/hardware/signal_lights/CMakeLists.txt b/hardware/signal_lights/CMakeLists.txt index 7a6ba0ae3a..ce53bd52e6 100644 --- a/hardware/signal_lights/CMakeLists.txt +++ b/hardware/signal_lights/CMakeLists.txt @@ -15,8 +15,23 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(signal_lights) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + pluginlib + ff_util + std_msgs + ff_msgs + ff_hw_msgs +) + catkin_package( LIBRARIES signal_lights @@ -30,32 +45,80 @@ catkin_package( ff_hw_msgs ) -# ROS agnostic driver and tool -create_library( - DIR src/custom_i2c - TARGET custom_i2c +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(custom_i2c + src/custom_i2c/i2c.cc ) +add_dependencies(custom_i2c ${catkin_EXPORTED_TARGETS}) +target_link_libraries(custom_i2c ${catkin_LIBRARIES}) -create_library( - DIR src/signal_lights - TARGET signal_lights - LIBS custom_i2c +# Declare C++ libraries +add_library(signal_lights + src/signal_lights/signal_lights.cc ) +add_dependencies(signal_lights ${catkin_EXPORTED_TARGETS}) +target_link_libraries(signal_lights custom_i2c ${catkin_LIBRARIES}) -create_tool_targets( - DIR tools - LIBS signal_lights ff_common custom_i2c +# Declare C++ libraries +add_library(signal_lights_nodelet + src/signal_lights_node/signal_lights_nodelet.cc ) +add_dependencies(signal_lights_nodelet ${catkin_EXPORTED_TARGETS}) +target_link_libraries(signal_lights_nodelet signal_lights ${catkin_LIBRARIES}) -# ROS nodelet and tools +## Declare a C++ executable: inspection_tool +add_executable(signal_lights_tool tools/signal_lights_tool.cc) +add_dependencies(signal_lights_tool ${catkin_EXPORTED_TARGETS}) +target_link_libraries(signal_lights_tool + signal_lights gflags ${catkin_LIBRARIES}) -create_library( - DIR src/signal_lights_node - TARGET signal_lights_nodelet - LIBS ${catkin_LIBRARIES} signal_lights config_reader ff_nodelet custom_i2c - INC ${catkin_INCLUDE_DIRS} - DEPS ff_hw_msgs +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS custom_i2c + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) +install(TARGETS signal_lights + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) +install(TARGETS signal_lights_nodelet + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) -install_launch_files() +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Install C++ executables +install(TARGETS signal_lights_tool DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/signal_lights_tool share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/hardware/smart_dock/CMakeLists.txt b/hardware/smart_dock/CMakeLists.txt index 8a71ce8c23..33ab1059e3 100644 --- a/hardware/smart_dock/CMakeLists.txt +++ b/hardware/smart_dock/CMakeLists.txt @@ -15,42 +15,117 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(smart_dock) -set(DEPS - rapidExtAstrobee -) - -set(LIBS - Qt4::QtXml - rapidIo - rapidExtAstrobee - ff_common) - -set(INCLUDES - ${CMAKE_CURRENT_SOURCE_DIR}/include - ${SORACORE_INCLUDE_DIRS} -) - -create_library(TARGET smart_dock - LIBS ${LIBS} i2c eps_driver - INC ${INCLUDES} - DEPS ${DEPS} -) - -create_tool_targets(DIR tools - LIBS ${LIBS} smart_dock config_reader - INC ${INCLUDES} - DEPS ${DEPS} -) - -# Determine our module name -get_filename_component(MODULE_NAME ${CMAKE_CURRENT_SOURCE_DIR} NAME) - -install(CODE "execute_process( - COMMAND mkdir -p share/${MODULE_NAME} - COMMAND ln -s ../../bin/dds_ros_bridge share/${MODULE_NAME} - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} - OUTPUT_QUIET - ERROR_QUIET - )") +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +if (USE_DDS) + ## Find catkin macros and libraries + find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + i2c + eps_driver + dds_msgs + ) + + # System dependencies are found with CMake's conventions + find_package(Boost 1.54.0 QUIET REQUIRED COMPONENTS filesystem system iostreams thread program_options timer) + + list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/../../cmake") + + if (USE_CTC) + set(SORACORE_ROOT_DIR ${ARM_CHROOT_DIR}/usr) + else (USE_CTC) + set(SORACORE_ROOT_DIR /usr) + endif (USE_CTC) + + set(MIRO_ROOT_DIR ${SORACORE_ROOT_DIR}) + + SET(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) + SET(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) + + # just calls the normal one + find_package(Qt4 4.6.0 REQUIRED QtXml) + find_package(Miro REQUIRED) + find_package(RtiDds REQUIRED) + find_package(Soracore REQUIRED) + catkin_package( + LIBRARIES + signal_lights + CATKIN_DEPENDS + roscpp + nodelet + i2c + eps_driver + dds_msgs + ) + + ########### + ## Build ## + ########### + + # Specify additional locations of header files + include_directories( + include + ${catkin_INCLUDE_DIRS} + ${RTIDDS_INCLUDE_DIR} + ${SORACORE_INCLUDE_DIRS} + ${MIRO_INCLUDE_DIR} + ${QT_INCLUDE_DIR} + ${QT_INCLUDE_DIR}/Qt + ) + + + # Declare C++ libraries + add_library(smart_dock + src/smart_dock.cc + src/smart_dock_node.cc + ) + target_compile_definitions(smart_dock PUBLIC ${RTIDDS_DEFINE_FLAGS}) + add_dependencies(smart_dock ${catkin_EXPORTED_TARGETS}) + target_link_libraries(smart_dock rapidIo ${catkin_LIBRARIES}) + + ## Declare a C++ executable: smart_dock_service + add_executable(smart_dock_service tools/smart_dock_service.cc) + add_dependencies(smart_dock_service ${catkin_EXPORTED_TARGETS}) + target_link_libraries(smart_dock_service + smart_dock gflags ${catkin_LIBRARIES}) + + ## Declare a C++ executable: smart_dock_tool + add_executable(smart_dock_tool tools/smart_dock_tool.cc) + add_dependencies(smart_dock_tool ${catkin_EXPORTED_TARGETS}) + target_link_libraries(smart_dock_tool + smart_dock gflags ${catkin_LIBRARIES}) + + # Determine our module name + get_filename_component(MODULE_NAME ${CMAKE_CURRENT_SOURCE_DIR} NAME) + + ############# + ## Install ## + ############# + + # Mark libraries for installation + install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} + ) + + # Install C++ executables + install(TARGETS smart_dock_service DESTINATION bin) + install(TARGETS smart_dock_tool DESTINATION bin) + install(CODE "execute_process( + COMMAND ln -s ../../bin/smart_dock_service share/${PROJECT_NAME} + COMMAND ln -s ../../bin/smart_dock_tool share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +else (USE_DDS) + find_package(catkin REQUIRED COMPONENTS) + catkin_package() +endif (USE_DDS) \ No newline at end of file diff --git a/hardware/smart_dock/include/smart_dock/smart_dock_node.h b/hardware/smart_dock/include/smart_dock/smart_dock_node.h index 4bdc4362c2..a04e0c31cc 100644 --- a/hardware/smart_dock/include/smart_dock/smart_dock_node.h +++ b/hardware/smart_dock/include/smart_dock/smart_dock_node.h @@ -28,11 +28,11 @@ #include #include -#include "AstrobeeConstants.h" +#include "dds_msgs/AstrobeeConstants.h" #include "ff_common/init.h" -#include "DockStateSupport.h" +#include "dds_msgs/DockStateSupport.h" #include "knDds/DdsEntitiesFactory.h" #include "knDds/DdsEntitiesFactorySvc.h" diff --git a/tools/simulator/eps_sim/package.xml b/hardware/smart_dock/package.xml similarity index 62% rename from tools/simulator/eps_sim/package.xml rename to hardware/smart_dock/package.xml index 113ca79308..91f95b596c 100644 --- a/tools/simulator/eps_sim/package.xml +++ b/hardware/smart_dock/package.xml @@ -1,9 +1,8 @@ - - eps_sim - 0.0.0 + smart_dock + 0.0.1 - The eps simulator simulates the astrobee eps. + Package to control the signal lights Apache License, Version 2.0 @@ -16,17 +15,19 @@ catkin roscpp - ff_hw_msgs - ff_util nodelet - message_generation - ff_hw_msgs - ff_util + i2c + eps_driver + ff_util + ff_msgs + ff_hw_msgs + dds_msgs roscpp nodelet - message_generation - message_runtime - - - + i2c + eps_driver + ff_util + ff_msgs + ff_hw_msgs + dds_msgs diff --git a/hardware/speed_cam/CMakeLists.txt b/hardware/speed_cam/CMakeLists.txt index 840a9170c0..1a0a5aa3f2 100644 --- a/hardware/speed_cam/CMakeLists.txt +++ b/hardware/speed_cam/CMakeLists.txt @@ -15,29 +15,59 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(speed_cam) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + ff_util + ff_hw_msgs + ff_serial + mavlink +) + catkin_package( LIBRARIES speed_cam CATKIN_DEPENDS roscpp nodelet - pluginlib - sensor_msgs - geometry_msgs ff_util ff_hw_msgs - mavlink + ff_serial ) -create_library( - TARGET speed_cam - LIBS ${catkin_LIBRARIES} ff_serial config_reader ff_nodelet - INC ${catkin_INCLUDE_DIRS} - DEPS mavlink speed_cam_gencpp +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) +# Declare C++ libraries +# Declare C++ libraries +add_library(speed_cam + src/speed_cam.cc + src/speed_cam_node.cc +) +add_dependencies(speed_cam ${catkin_EXPORTED_TARGETS}) +target_link_libraries(speed_cam ${catkin_LIBRARIES}) + +## Declare a C++ executable: speed_cam_tool +add_executable(speed_cam_tool tools/speed_cam_tool.cc) +add_dependencies(speed_cam_tool ${catkin_EXPORTED_TARGETS}) +target_link_libraries(speed_cam_tool + speed_cam gflags ${catkin_LIBRARIES}) + + if(CATKIN_ENABLE_TESTING) find_package(rostest REQUIRED) # Speed cam initialization fault tester @@ -47,13 +77,37 @@ if(CATKIN_ENABLE_TESTING) ) target_link_libraries(test_init_speed_cam - ${catkin_LIBRARIES} config_reader ff_nodelet + ${catkin_LIBRARIES} glog ) endif() -create_tool_targets(DIR tools - LIBS ${catkin_LIBRARIES} speed_cam - INC ${catkin_INCLUDE_DIRS} +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} ) -install_launch_files() +# Install C++ executables +install(TARGETS speed_cam_tool DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/speed_cam_tool share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) + diff --git a/hardware/speed_cam/package.xml b/hardware/speed_cam/package.xml index 5a47339720..985cc8b6d5 100644 --- a/hardware/speed_cam/package.xml +++ b/hardware/speed_cam/package.xml @@ -16,19 +16,15 @@ catkin roscpp nodelet - pluginlib - sensor_msgs - geometry_msgs ff_util ff_hw_msgs + ff_serial mavlink roscpp nodelet - pluginlib - sensor_msgs - geometry_msgs ff_util ff_hw_msgs + ff_serial mavlink diff --git a/hardware/temp_monitor/CMakeLists.txt b/hardware/temp_monitor/CMakeLists.txt index ac4e787f8f..e74a1eff61 100644 --- a/hardware/temp_monitor/CMakeLists.txt +++ b/hardware/temp_monitor/CMakeLists.txt @@ -15,30 +15,86 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(temp_monitor) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + i2c + config_reader + ff_hw_msgs + ff_msgs + ff_util +) + catkin_package( LIBRARIES temp_monitor CATKIN_DEPENDS roscpp nodelet - pluginlib + i2c + config_reader ff_util ff_hw_msgs - sensor_msgs ) -create_library( - TARGET temp_monitor - LIBS ${catkin_LIBRARIES} i2c config_reader ff_nodelet - INC ${catkin_INCLUDE_DIRS} - DEPS config_reader i2c +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -create_tool_targets( - DIR tools - LIBS temp_monitor +# Declare C++ libraries +add_library(temp_monitor + src/ADT7410.cc + src/TCN75A.cc + src/temp_monitor.cc + src/temp_monitor_node.cc ) +add_dependencies(temp_monitor ${catkin_EXPORTED_TARGETS}) +target_link_libraries(temp_monitor ${catkin_LIBRARIES}) + +## Declare a C++ executable: temp_monitor_tool +add_executable(temp_monitor_tool tools/temp_monitor_tool.cc) +add_dependencies(temp_monitor_tool ${catkin_EXPORTED_TARGETS}) +target_link_libraries(temp_monitor_tool + temp_monitor gflags ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Install C++ executables +install(TARGETS temp_monitor_tool DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/temp_monitor_tool share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") -install_launch_files() \ No newline at end of file +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/hardware/temp_monitor/package.xml b/hardware/temp_monitor/package.xml index 5be91546e6..00b8076e65 100644 --- a/hardware/temp_monitor/package.xml +++ b/hardware/temp_monitor/package.xml @@ -17,16 +17,16 @@ catkin roscpp nodelet - pluginlib + i2c ff_util ff_hw_msgs - sensor_msgs + config_reader roscpp nodelet - pluginlib + i2c ff_util ff_hw_msgs - sensor_msgs + config_reader diff --git a/hardware/vive/CMakeLists.txt b/hardware/vive/CMakeLists.txt index 128153502a..9a21e664d4 100644 --- a/hardware/vive/CMakeLists.txt +++ b/hardware/vive/CMakeLists.txt @@ -15,11 +15,23 @@ # License for the specific language governing permissions and limitations # under the License. -# Declare the project +cmake_minimum_required(VERSION 3.0) project(vive) -# We need standard math -find_library(CMATH_LIBRARIES NAMES m) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + ff_util + ff_hw_msgs + config_reader +) + +# System dependencies are found with CMake's conventions +find_package(cmake_modules REQUIRED) # declare the catkin package catkin_package( @@ -29,36 +41,86 @@ catkin_package( CATKIN_DEPENDS roscpp nodelet - pluginlib ff_util ff_hw_msgs + config_reader ) -# ROS agnostic proxy library and tool +########### +## Build ## +########### -create_library( - DIR src/vive_driver - TARGET vive_driver - INC ${USB_INCLUDE_DIRS} ${JSONC_INCLUDE_DIRS} ${ZLIB_INCLUDE_DIRS} - LIBS ${USB_LIBRARIES} ${JSONC_LIBRARIES} ${ZLIB_LIBRARIES} ${CMATH_LIBRARIES} +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -create_tool_targets( - DIR tools/vive_tool - INT ${ARGTABLE2_INCLUDE_DIRS} - LIBS vive_driver ${ARGTABLE2_LIBRARIES} +# Declare C++ libraries +add_library(vive_driver + src/vive_driver/vive.c + src/vive_driver/vive_data_button.c + src/vive_driver/vive_data_imu.c + src/vive_driver/vive_data_light.c + src/vive_driver/vive_data_ootx.c + src/vive_driver/vive_dev_tracker.c + src/vive_driver/vive_dev_watchman.c + src/vive_driver/vive_usb.c ) +add_dependencies(vive_driver ${catkin_EXPORTED_TARGETS}) +target_link_libraries(vive_driver json-c usb-1.0 z ${catkin_LIBRARIES}) + +# Declare C++ libraries +add_library(vive_nodelet + src/vive_nodelet/vive_nodelet.cc +) +add_dependencies(vive_nodelet ${catkin_EXPORTED_TARGETS}) +target_link_libraries(vive_nodelet vive_driver ${catkin_LIBRARIES}) + +## Declare a C++ executable: vive_tool +add_executable(vive_tool tools/vive_tool/vive_tool.c) +add_dependencies(vive_tool ${catkin_EXPORTED_TARGETS}) +target_link_libraries(vive_tool + vive_driver argtable2 ${catkin_LIBRARIES}) -# ROS nodelet +############# +## Install ## +############# -create_library( - DIR src/vive_nodelet - TARGET vive_nodelet - LIBS ${catkin_LIBRARIES} vive_driver config_reader ff_nodelet - INC ${catkin_INCLUDE_DIRS} - DEPS ff_hw_msgs +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME}_driver + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) +install(TARGETS ${PROJECT_NAME}_nodelet + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} ) -# Launch files +# Install C++ executables +install(TARGETS vive_tool DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/vive_tool share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/hardware/vive/package.xml b/hardware/vive/package.xml index 03448f9c7a..6fcbe6babb 100644 --- a/hardware/vive/package.xml +++ b/hardware/vive/package.xml @@ -15,14 +15,14 @@ catkin roscpp nodelet - pluginlib ff_util ff_hw_msgs + config_reader roscpp nodelet - pluginlib ff_util ff_hw_msgs + config_reader diff --git a/localization/CMakeLists.txt b/localization/CMakeLists.txt index 7e207b201f..936c385cda 100644 --- a/localization/CMakeLists.txt +++ b/localization/CMakeLists.txt @@ -21,6 +21,7 @@ add_subdirectory(imu_integration) add_subdirectory(interest_point) add_subdirectory(localization_common) add_subdirectory(localization_measurements) +add_subdirectory(optimization_common) add_subdirectory(sparse_mapping) if (USE_ROS) diff --git a/localization/camera/CMakeLists.txt b/localization/camera/CMakeLists.txt index c8ee371895..3683d83894 100644 --- a/localization/camera/CMakeLists.txt +++ b/localization/camera/CMakeLists.txt @@ -15,43 +15,71 @@ # License for the specific language governing permissions and limitations # under the License. -cmake_minimum_required(VERSION 2.8.3) +cmake_minimum_required(VERSION 3.0) project(camera) -set(LIBS - config_reader - ${GLOG_LIBRARIES} - ${GFLAGS_LIBRARIES} - ${OpenCV_LIBRARIES} - ${EIGEN_LIBRARIES} - ${Boost_SYSTEM_LIBRARY} - ${Boost_FILESYSTEM_LIBRARY} - ) -set(INCLUDES +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + std_msgs + sensor_msgs + cv_bridge + image_transport + rosbag + ff_common + ff_msgs + nodelet config_reader - ${GFLAGS_INCLUDE_DIRS} - ${GLOG_INCLUDE_DIRS} - ${EIGEN3_INCLUDE_DIRS} - ${OpenCV_INCLUDE_DIRS} - ${Boost_INCLUDE_DIRS} ) +# System dependencies are found with CMake's conventions +find_package(cmake_modules REQUIRED) +find_package(Boost REQUIRED COMPONENTS system) +find_package(Eigen3 REQUIRED) + catkin_package( INCLUDE_DIRS include LIBRARIES camera CATKIN_DEPENDS -# DEPENDS system_lib + roscpp + std_msgs + sensor_msgs + cv_bridge + image_transport + rosbag + ff_common + ff_msgs + nodelet + config_reader ) -create_library(TARGET camera - LIBS ${LIBS} - INC ${INCLUDES} +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} ) -create_tool_targets(DIR tools - LIBS camera config_reader - INC ${INCLUDES} +# Declare C++ libraries +add_library(camera + src/camera_model.cc + src/camera_params.cc ) +add_dependencies(camera ${catkin_EXPORTED_TARGETS}) +target_link_libraries(camera ${catkin_LIBRARIES} ${EIGEN_LIBRARIES}) + +## Declare a C++ executable: undistort_image +add_executable(undistort_image tools/undistort_image.cc) +add_dependencies(undistort_image ${catkin_EXPORTED_TARGETS}) +target_link_libraries(undistort_image + camera gflags glog ${catkin_LIBRARIES}) if(CATKIN_ENABLE_TESTING) find_package(rostest REQUIRED) @@ -74,4 +102,29 @@ if(CATKIN_ENABLE_TESTING) endif() -set(CAMERA_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/include PARENT_SCOPE) +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Install C++ executables +install(TARGETS undistort_image DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/undistort_image share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) \ No newline at end of file diff --git a/localization/camera/package.xml b/localization/camera/package.xml index 412f360bd7..fa69774761 100644 --- a/localization/camera/package.xml +++ b/localization/camera/package.xml @@ -21,9 +21,11 @@ cv_bridge image_transport rosbag + ff_common ff_msgs nodelet tf2 + config_reader roscpp message_runtime std_msgs @@ -31,7 +33,9 @@ cv_bridge image_transport rosbag + ff_common ff_msgs nodelet tf2 + config_reader diff --git a/localization/camera/readme.md b/localization/camera/readme.md index a928647d89..8615710bce 100644 --- a/localization/camera/readme.md +++ b/localization/camera/readme.md @@ -1,4 +1,4 @@ -\page camera Camera Library +\page camera Camera library This library handles camera calibration parameters and transformations, providing functions to handle between undistorted and @@ -21,6 +21,28 @@ Usage: undistort_image input_dir/*[0-9].jpg --output_directory output_dir \ --robot_camera nav_cam -Other options, useful with the dense mapper, are --save_bgr, ---undistorted_crop_win, -scale, and --image_list. See -undistort_image.cc for more information. +Options: + + --image_list: A file having the list of images to undistort, one per + line. If not specified it is assumed they are passed in directly on + the command line. + + --output_directory: Output directory. If not specified, undistorted + images will saved in the same directory as the inputs. + + --scale: Undistort images at different resolution, with their width + being a multiple of this scale compared to the camera model. The + default is 1. + + --undistorted_crop_win: After undistorting, apply a crop window of + these dimensions centered at the undistorted image center. The + adjusted dimensions and optical center will be printed on screen. + Specify as: 'crop_x crop_y'. + + --save_bgr: Save the undistorted images as BGR instead of + grayscale. (Some tools expect BGR.) + + --histogram_equalization: If true, do histogram equalization. + + --robot_camera: Which of bot's cameras to use. Tested with nav_cam + and sci_cam. The default is nav_cam. diff --git a/localization/camera/test/test_camera_model.cc b/localization/camera/test/test_camera_model.cc index 0b95949778..2ae5888e6e 100644 --- a/localization/camera/test/test_camera_model.cc +++ b/localization/camera/test/test_camera_model.cc @@ -176,3 +176,9 @@ TEST(camera_model, DistortionCoeffs) { EXPECT_NEAR(3.92, optical_center[0], 1e-3); EXPECT_NEAR(0.827, optical_center[1], 1e-3); } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/camera/test/test_camera_params.cc b/localization/camera/test/test_camera_params.cc index 2021953974..6f417afd29 100644 --- a/localization/camera/test/test_camera_params.cc +++ b/localization/camera/test/test_camera_params.cc @@ -195,3 +195,9 @@ TEST(camera_params, conversion) { EXPECT_NEAR(input[0], output2[0], 1e-6); EXPECT_NEAR(input[1], output2[1], 1e-6); } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/camera/tools/undistort_image.cc b/localization/camera/tools/undistort_image.cc index 5e27612b32..1101bb0fc7 100644 --- a/localization/camera/tools/undistort_image.cc +++ b/localization/camera/tools/undistort_image.cc @@ -45,19 +45,24 @@ separate directory is specified. */ -DEFINE_string(image_list, "", "The list of images to undistort, one per line. If not specified, " - "it is assumed they are passed in directly on the command line."); +DEFINE_string(image_list, "", "A file having the list of images to undistort, one per line. " + "If not specified, it is assumed they are passed in directly on the command line."); -DEFINE_string(output_directory, "", "If not specified, undistorted images will " +DEFINE_string(output_directory, "", "Output directory. If not specified, undistorted images will " "saved in the same directory as the inputs."); +DEFINE_string(output_list, "", "Save the undistorted images with names given in this list, " + "instead of using the output directory."); + +DEFINE_string(undistorted_intrinsics, "", "Save to this file the undistorted camera intrinsics."); + DEFINE_double(scale, 1.0, "Undistort images at different resolution, with their width " "being a multiple of this scale compared to the camera model."); DEFINE_string(undistorted_crop_win, "", "After undistorting, apply a crop window of these dimensions " "centered at the undistorted image center. The adjusted " - "dimensions and optical center will be displayed below. " + "dimensions and optical center will be printed on screen. " "Specify as: 'crop_x crop_y'."); DEFINE_bool(save_bgr, false, @@ -67,7 +72,7 @@ DEFINE_bool(histogram_equalization, false, "If true, do histogram equalization."); DEFINE_string(robot_camera, "nav_cam", - "Which of bot's cameras to use. Anything except nav_cam is experimental."); + "Which of bot's cameras to use. Tested with nav_cam and sci_cam."); int main(int argc, char ** argv) { ff_common::InitFreeFlyerApplication(&argc, &argv); @@ -87,6 +92,18 @@ int main(int argc, char ** argv) { if (images.empty()) LOG(FATAL) << "Expecting at least one input image."; + std::vector undist_images; + if (FLAGS_output_list != "") { + std::ifstream ifs(FLAGS_output_list); + std::string image; + while (ifs >> image) + undist_images.push_back(image); + + if (undist_images.size() != images.size()) + LOG(FATAL) << "There must be as many output undistorted " + << "images as input distorted images.\n"; + } + // Read the camera config_reader::ConfigReader config; config.AddFile("cameras.config"); @@ -243,7 +260,10 @@ int main(int argc, char ** argv) { // The output file name std::string undist_file; - if (!FLAGS_output_directory.empty()) { + if (!undist_images.empty()) { + // Was specified via a list + undist_file = undist_images[i]; + } else if (!FLAGS_output_directory.empty()) { // A separate output directory was specified undist_file = FLAGS_output_directory + "/" + ff_common::basename(filename); } else { @@ -256,7 +276,11 @@ int main(int argc, char ** argv) { cv::Mat bgr_image; if (FLAGS_save_bgr && undist_image.channels() == 1) { // Convert from grayscale to color if needed - cvtColor(undist_image, bgr_image, CV_GRAY2BGR); + #if (CV_VERSION_MAJOR >= 4) + cvtColor(undist_image, bgr_image, cv::COLOR_GRAY2BGR); + #else + cvtColor(undist_image, bgr_image, CV_GRAY2BGR); + #endif undist_image = bgr_image; } cv::imwrite(undist_file, undist_image); @@ -268,15 +292,18 @@ int main(int argc, char ** argv) { std::cout << "Focal length: " << focal_length << "\n"; std::cout << "Undistorted optical center: " << optical_center.transpose() << "\n"; - if (!FLAGS_output_directory.empty()) { - std::string intrinsics_file = FLAGS_output_directory + "/undistorted_intrinsics.txt"; + std::string intrinsics_file = FLAGS_undistorted_intrinsics; + if (intrinsics_file.empty() && !FLAGS_output_directory.empty()) + intrinsics_file = FLAGS_output_directory + "/undistorted_intrinsics.txt"; + + if (!intrinsics_file.empty()) { + std::cout << "Writing: " << intrinsics_file << std::endl; std::ofstream ofs(intrinsics_file.c_str()); ofs.precision(17); ofs << "# Unidistored width and height, focal length, undistorted optical center\n"; ofs << undist_size.transpose() << " " << focal_length << " " << optical_center.transpose() << "\n"; ofs.close(); - std::cout << "Wrote: " << intrinsics_file << std::endl; } return 0; diff --git a/localization/graph_localizer/CMakeLists.txt b/localization/graph_localizer/CMakeLists.txt index 499d41542c..e44fb1955a 100644 --- a/localization/graph_localizer/CMakeLists.txt +++ b/localization/graph_localizer/CMakeLists.txt @@ -15,23 +15,74 @@ #License for the specific language governing permissions and limitations #under the License. +cmake_minimum_required(VERSION 3.0) project(graph_localizer) -if (USE_ROS) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +# Find GTSAM +find_package(GTSAM REQUIRED) + +# System dependencies are found with CMake's conventions +find_package(Eigen3 REQUIRED) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + nodelet + camera + config_reader + ff_util + ff_msgs + graph_optimizer + imu_integration + localization_common + localization_measurements + msg_conversions +) + +# Find OpenCV +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../cmake") +find_package(OpenCV331 REQUIRED) # include ff_nodelet to get ff_util header files since these aren't exposed elsewhere catkin_package( - LIBRARIES ${PROJECT_NAME} ${GLOG_LIBRARIES} ${GTSAM_LIBRARIES} camera config_reader ff_nodelet graph_optimizer imu_integration localization_common localization_measurements msg_conversions + LIBRARIES ${PROJECT_NAME} ${GLOG_LIBRARIES} ${GTSAM_LIBRARIES} INCLUDE_DIRS include ${GLOG_INCLUDE_DIRS} ${GTSAM_INCLUDE_DIR} - CATKIN_DEPENDS roscpp - DEPENDS gtsam ff_msgs + CATKIN_DEPENDS + nodelet + camera + config_reader + ff_util + ff_msgs + graph_optimizer + imu_integration + localization_common + localization_measurements + msg_conversions +) + +########### +## Build ## +########### +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIRS} +) + +file(GLOB cc_files + "src/*.cc" ) -create_library(TARGET ${PROJECT_NAME} - LIBS ${catkin_LIBRARIES} ${GLOG_LIBRARIES} gtsam camera config_reader ff_nodelet graph_optimizer imu_integration localization_common localization_measurements msg_conversions - INC ${catkin_INCLUDE_DIRS} ${GLOG_INCLUDE_DIRS} - DEPS ff_msgs +# Declare C++ libraries +add_library(${PROJECT_NAME} + ${cc_files} ) +add_dependencies(${PROJECT_NAME} ${catkin_EXPORTED_TARGETS}) +target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBRARIES} ${catkin_LIBRARIES}) + if(CATKIN_ENABLE_TESTING) find_package(rostest REQUIRED) @@ -40,52 +91,77 @@ if(CATKIN_ENABLE_TESTING) test/test_rotation_factor_adder.cc ) target_link_libraries(test_rotation_factor_adder - graph_localizer gtsam + graph_localizer gtsam ${catkin_LIBRARIES} glog ) add_rostest_gtest(test_rotation_factor test/test_rotation_factor.test test/test_rotation_factor.cc ) target_link_libraries(test_rotation_factor - graph_localizer gtsam + graph_localizer gtsam ${catkin_LIBRARIES} glog ) add_rostest_gtest(test_point_to_line_factor test/test_point_to_line_factor.test test/test_point_to_line_factor.cc ) target_link_libraries(test_point_to_line_factor - graph_localizer gtsam + graph_localizer gtsam ${catkin_LIBRARIES} glog ) add_rostest_gtest(test_point_to_line_segment_factor test/test_point_to_line_segment_factor.test test/test_point_to_line_segment_factor.cc ) target_link_libraries(test_point_to_line_segment_factor - graph_localizer gtsam + graph_localizer gtsam ${catkin_LIBRARIES} glog ) add_rostest_gtest(test_point_to_plane_factor test/test_point_to_plane_factor.test test/test_point_to_plane_factor.cc ) target_link_libraries(test_point_to_plane_factor - graph_localizer localization_measurements gtsam + graph_localizer gtsam ${catkin_LIBRARIES} glog ) add_rostest_gtest(test_point_to_handrail_endpoint_factor test/test_point_to_handrail_endpoint_factor.test test/test_point_to_handrail_endpoint_factor.cc ) target_link_libraries(test_point_to_handrail_endpoint_factor - graph_localizer gtsam + graph_localizer gtsam ${catkin_LIBRARIES} glog ) add_rostest_gtest(test_silu test/test_silu.test test/test_silu.cc ) target_link_libraries(test_silu - graph_localizer gtsam + graph_localizer gtsam ${catkin_LIBRARIES} glog ) endif() -install_launch_files() +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) -endif (USE_ROS) diff --git a/localization/graph_localizer/include/graph_localizer/graph_localizer_nodelet.h b/localization/graph_localizer/include/graph_localizer/graph_localizer_nodelet.h index 0b1dd016e5..8ebea49c9c 100644 --- a/localization/graph_localizer/include/graph_localizer/graph_localizer_nodelet.h +++ b/localization/graph_localizer/include/graph_localizer/graph_localizer_nodelet.h @@ -122,6 +122,7 @@ class GraphLocalizerNodelet : public ff_util::FreeFlyerNodelet { ros::Time last_time_tf_dock_; ros::Time last_time_tf_handrail_; + ros::Time last_heartbeat_time_; // Timers localization_common::RosTimer vl_timer_ = localization_common::RosTimer("VL msg"); diff --git a/wdock/tools/wdock_main.cc b/localization/graph_localizer/include/graph_localizer/test_utilities.h similarity index 68% rename from wdock/tools/wdock_main.cc rename to localization/graph_localizer/include/graph_localizer/test_utilities.h index 0133164bd8..dfd2ba4d1a 100644 --- a/wdock/tools/wdock_main.cc +++ b/localization/graph_localizer/include/graph_localizer/test_utilities.h @@ -1,14 +1,14 @@ /* Copyright (c) 2017, United States Government, as represented by the * Administrator of the National Aeronautics and Space Administration. - * + * * All rights reserved. - * + * * The Astrobee platform is licensed under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the @@ -16,23 +16,12 @@ * under the License. */ +#ifndef GRAPH_LOCALIZER_TEST_UTILITIES_H_ +#define GRAPH_LOCALIZER_TEST_UTILITIES_H_ -#include -#include -#include -#include +#include -#include "wdock/wdock.h" - -int main(int argc, char** argv) { - w_dock::WDock wan_dock(argc, argv, "RapidCommand"); - - wan_dock.SetBerthOne(); - wan_dock.SetBerthTwo(); - while (true) { - wan_dock.ProcessDdsEventLoop(); - sleep(1); - } - - return 1; -} +namespace graph_localizer { +localization_measurements::Plane RandomPlane(); +} // namespace graph_localizer +#endif // GRAPH_LOCALIZER_TEST_UTILITIES_H_ diff --git a/localization/graph_localizer/package.xml b/localization/graph_localizer/package.xml index 784d51ba15..764dd85b33 100644 --- a/localization/graph_localizer/package.xml +++ b/localization/graph_localizer/package.xml @@ -14,12 +14,26 @@ Astrobee Flight Software catkin - ff_msgs nodelet - roscpp - ff_msgs + camera + config_reader + ff_util + ff_msgs + graph_optimizer + imu_integration + localization_common + localization_measurements + msg_conversions nodelet - roscpp + camera + config_reader + ff_util + ff_msgs + graph_optimizer + imu_integration + localization_common + localization_measurements + msg_conversions diff --git a/localization/graph_localizer/src/graph_localizer_nodelet.cc b/localization/graph_localizer/src/graph_localizer_nodelet.cc index bf9f26939f..5d32234e9d 100644 --- a/localization/graph_localizer/src/graph_localizer_nodelet.cc +++ b/localization/graph_localizer/src/graph_localizer_nodelet.cc @@ -43,6 +43,7 @@ GraphLocalizerNodelet::GraphLocalizerNodelet() : ff_util::FreeFlyerNodelet(NODE_ LogFatal("Failed to read config files."); } LoadGraphLocalizerNodeletParams(config, params_); + last_heartbeat_time_ = ros::Time::now(); } void GraphLocalizerNodelet::Initialize(ros::NodeHandle* nh) { @@ -263,8 +264,7 @@ void GraphLocalizerNodelet::PublishWorldTDockTF() { const auto world_T_dock_tf = lc::PoseToTF(world_T_dock, "world", "dock/body", lc::TimeFromRosTime(ros::Time::now()), platform_name_); // If the rate is higher than the sim time, prevent repeated timestamps - if (world_T_dock_tf.header.stamp == last_time_tf_dock_) - return; + if (world_T_dock_tf.header.stamp == last_time_tf_dock_) return; last_time_tf_dock_ = world_T_dock_tf.header.stamp; transform_pub_.sendTransform(world_T_dock_tf); } @@ -275,8 +275,7 @@ void GraphLocalizerNodelet::PublishWorldTHandrailTF() { const auto world_T_handrail_tf = lc::PoseToTF(world_T_handrail->pose, "world", "handrail/body", lc::TimeFromRosTime(ros::Time::now()), platform_name_); // If the rate is higher than the sim time, prevent repeated timestamps - if (world_T_handrail_tf.header.stamp == last_time_tf_handrail_) - return; + if (world_T_handrail_tf.header.stamp == last_time_tf_handrail_) return; last_time_tf_handrail_ = world_T_handrail_tf.header.stamp; transform_pub_.sendTransform(world_T_handrail_tf); } @@ -288,7 +287,9 @@ void GraphLocalizerNodelet::PublishReset() const { void GraphLocalizerNodelet::PublishHeartbeat() { heartbeat_.header.stamp = ros::Time::now(); + if ((heartbeat_.header.stamp - last_heartbeat_time_).toSec() < 1.0) return; heartbeat_pub_.publish(heartbeat_); + last_heartbeat_time_ = heartbeat_.header.stamp; } void GraphLocalizerNodelet::PublishGraphMessages() { diff --git a/localization/graph_localizer/src/test_utilities.cc b/localization/graph_localizer/src/test_utilities.cc new file mode 100644 index 0000000000..e51fe70cd3 --- /dev/null +++ b/localization/graph_localizer/src/test_utilities.cc @@ -0,0 +1,31 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#include +#include + +#include + +namespace graph_localizer { +namespace lc = localization_common; + +localization_measurements::Plane RandomPlane() { + gtsam::Point3 point = lc::RandomVector(); + gtsam::Vector3 normal = lc::RandomVector().normalized(); + return localization_measurements::Plane(point, normal); +} +} // namespace graph_localizer diff --git a/localization/graph_localizer/test/test_point_to_handrail_endpoint_factor.cc b/localization/graph_localizer/test/test_point_to_handrail_endpoint_factor.cc index 7f78f8f994..0f0d477dfc 100644 --- a/localization/graph_localizer/test/test_point_to_handrail_endpoint_factor.cc +++ b/localization/graph_localizer/test/test_point_to_handrail_endpoint_factor.cc @@ -16,9 +16,9 @@ * under the License. */ -#include "test_utilities.h" // NOLINT #include #include +#include #include #include @@ -26,15 +26,15 @@ #include -namespace gl = graph_localizer; +namespace lc = localization_common; namespace sym = gtsam::symbol_shorthand; TEST(PointToHandrailEndpointFactorTester, Jacobian) { for (int i = 0; i < 500; ++i) { - const gtsam::Point3 sensor_t_point = gl::RandomVector(); - const gtsam::Point3 world_t_handrail_endpoint_a = gl::RandomVector(); - const gtsam::Point3 world_t_handrail_endpoint_b = gl::RandomVector(); - const gtsam::Pose3 body_T_sensor = gl::RandomPose(); - const gtsam::Pose3 world_T_body = gl::RandomPose(); + const gtsam::Point3 sensor_t_point = lc::RandomVector(); + const gtsam::Point3 world_t_handrail_endpoint_a = lc::RandomVector(); + const gtsam::Point3 world_t_handrail_endpoint_b = lc::RandomVector(); + const gtsam::Pose3 body_T_sensor = lc::RandomPose(); + const gtsam::Pose3 world_T_body = lc::RandomPose(); // Ignore case where sensor point is directly between two endpoints as this leads to a known // discontinuity in the Jacobian. { @@ -79,3 +79,9 @@ TEST(PointToHandrailEndpointFactorTester, SelectingCorrectEndpoint) { EXPECT_TRUE(error.isApprox(gtsam::Vector3(-0.4, 0, 0), 1e-6)); } } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/graph_localizer/test/test_point_to_line_factor.cc b/localization/graph_localizer/test/test_point_to_line_factor.cc index 0cc55984cb..a04e8e3a5d 100644 --- a/localization/graph_localizer/test/test_point_to_line_factor.cc +++ b/localization/graph_localizer/test/test_point_to_line_factor.cc @@ -16,9 +16,9 @@ * under the License. */ -#include "test_utilities.h" // NOLINT #include #include +#include #include #include @@ -26,14 +26,14 @@ #include -namespace gl = graph_localizer; +namespace lc = localization_common; namespace sym = gtsam::symbol_shorthand; TEST(PointToLineFactorTester, Jacobian) { for (int i = 0; i < 500; ++i) { - const gtsam::Point3 sensor_t_point = gl::RandomVector(); - const gtsam::Pose3 world_T_line = gl::RandomPose(); - const gtsam::Pose3 body_T_sensor = gl::RandomPose(); - const gtsam::Pose3 world_T_body = gl::RandomPose(); + const gtsam::Point3 sensor_t_point = lc::RandomVector(); + const gtsam::Pose3 world_T_line = lc::RandomPose(); + const gtsam::Pose3 body_T_sensor = lc::RandomPose(); + const gtsam::Pose3 world_T_body = lc::RandomPose(); const auto noise = gtsam::noiseModel::Unit::Create(2); const gtsam::PointToLineFactor factor(sensor_t_point, world_T_line, body_T_sensor, noise, sym::P(0)); gtsam::Matrix H; @@ -137,3 +137,9 @@ TEST(PointToLineFactorTester, ZeroErrorForZeroOffest) { error_norm = (factor.evaluateError(world_T_body)).norm(); EXPECT_DOUBLE_EQ(error_norm, 0); } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/graph_localizer/test/test_point_to_line_segment_factor.cc b/localization/graph_localizer/test/test_point_to_line_segment_factor.cc index cc8d5d1072..eb07677c0b 100644 --- a/localization/graph_localizer/test/test_point_to_line_segment_factor.cc +++ b/localization/graph_localizer/test/test_point_to_line_segment_factor.cc @@ -16,9 +16,9 @@ * under the License. */ -#include "test_utilities.h" // NOLINT #include #include +#include #include #include @@ -26,15 +26,15 @@ #include -namespace gl = graph_localizer; +namespace lc = localization_common; namespace sym = gtsam::symbol_shorthand; TEST(PointToLineSegmentFactorTester, Jacobian) { for (int i = 0; i < 500; ++i) { - const gtsam::Point3 sensor_t_point = gl::RandomVector(); - const gtsam::Pose3 world_T_line = gl::RandomPose(); - const gtsam::Pose3 body_T_sensor = gl::RandomPose(); - const gtsam::Pose3 world_T_body = gl::RandomPose(); - const double line_length = gl::RandomPositiveDouble(); + const gtsam::Point3 sensor_t_point = lc::RandomVector(); + const gtsam::Pose3 world_T_line = lc::RandomPose(); + const gtsam::Pose3 body_T_sensor = lc::RandomPose(); + const gtsam::Pose3 world_T_body = lc::RandomPose(); + const double line_length = lc::RandomPositiveDouble(); const auto noise = gtsam::noiseModel::Unit::Create(2); const gtsam::PointToLineSegmentFactor factor(sensor_t_point, world_T_line, body_T_sensor, line_length, noise, sym::P(0)); @@ -50,11 +50,11 @@ TEST(PointToLineSegmentFactorTester, Jacobian) { TEST(PointToLineSegmentFactorTester, JacobianWithSilu) { for (int i = 0; i < 500; ++i) { - const gtsam::Point3 sensor_t_point = gl::RandomVector(); - const gtsam::Pose3 world_T_line = gl::RandomPose(); - const gtsam::Pose3 body_T_sensor = gl::RandomPose(); - const gtsam::Pose3 world_T_body = gl::RandomPose(); - const double line_length = gl::RandomPositiveDouble(); + const gtsam::Point3 sensor_t_point = lc::RandomVector(); + const gtsam::Pose3 world_T_line = lc::RandomPose(); + const gtsam::Pose3 body_T_sensor = lc::RandomPose(); + const gtsam::Pose3 world_T_body = lc::RandomPose(); + const double line_length = lc::RandomPositiveDouble(); const auto noise = gtsam::noiseModel::Unit::Create(2); const bool use_silu = true; const gtsam::PointToLineSegmentFactor factor(sensor_t_point, world_T_line, body_T_sensor, line_length, noise, @@ -311,3 +311,9 @@ TEST(PointToLineSegmentFactorTester, IncreaseErrorWithIncreasedYDistanceWithSilu } EXPECT_LT(error_1_norm, error_2_norm); } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/graph_localizer/test/test_point_to_plane_factor.cc b/localization/graph_localizer/test/test_point_to_plane_factor.cc index 996de5475b..39194501bc 100644 --- a/localization/graph_localizer/test/test_point_to_plane_factor.cc +++ b/localization/graph_localizer/test/test_point_to_plane_factor.cc @@ -16,9 +16,10 @@ * under the License. */ -#include "test_utilities.h" // NOLINT #include +#include #include +#include #include #include @@ -28,14 +29,15 @@ #include namespace gl = graph_localizer; +namespace lc = localization_common; namespace lm = localization_measurements; namespace sym = gtsam::symbol_shorthand; TEST(PointToPlaneFactorTester, Jacobian) { for (int i = 0; i < 500; ++i) { - const gtsam::Point3 sensor_t_point = gl::RandomVector(); + const gtsam::Point3 sensor_t_point = lc::RandomVector(); const lm::Plane world_T_handrail_plane = gl::RandomPlane(); - const gtsam::Pose3 body_T_sensor = gl::RandomPose(); - const gtsam::Pose3 world_T_body = gl::RandomPose(); + const gtsam::Pose3 body_T_sensor = lc::RandomPose(); + const gtsam::Pose3 world_T_body = lc::RandomPose(); const auto noise = gtsam::noiseModel::Unit::Create(1); const gtsam::PointToPlaneFactor factor(sensor_t_point, world_T_handrail_plane, body_T_sensor, noise, sym::P(0)); gtsam::Matrix H; @@ -47,3 +49,9 @@ TEST(PointToPlaneFactorTester, Jacobian) { ASSERT_TRUE(numerical_H.isApprox(H.matrix(), 1e-6)); } } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/graph_localizer/test/test_rotation_factor.cc b/localization/graph_localizer/test/test_rotation_factor.cc index 98abb72b3e..8444005e01 100644 --- a/localization/graph_localizer/test/test_rotation_factor.cc +++ b/localization/graph_localizer/test/test_rotation_factor.cc @@ -51,3 +51,9 @@ TEST(RotationFactorTester, Jacobian) { world_T_body_1, world_T_perturbed_body_2, 1e-5); ASSERT_TRUE(numerical_H2.isApprox(factor_H2.matrix(), 1e-6)); } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/graph_localizer/test/test_rotation_factor_adder.cc b/localization/graph_localizer/test/test_rotation_factor_adder.cc index 740c9de49c..236615cd8b 100644 --- a/localization/graph_localizer/test/test_rotation_factor_adder.cc +++ b/localization/graph_localizer/test/test_rotation_factor_adder.cc @@ -151,3 +151,9 @@ TEST_F(RotationFactorAdderTester, CorrectOptimization) { ASSERT_TRUE(optimized_world_T_body_1.equals(world_T_body_1, 1e-6)); ASSERT_TRUE(optimized_world_T_body_2.equals(world_T_body_2, 1e-6)); } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/graph_localizer/test/test_silu.cc b/localization/graph_localizer/test/test_silu.cc index fc35450982..3bd5332550 100644 --- a/localization/graph_localizer/test/test_silu.cc +++ b/localization/graph_localizer/test/test_silu.cc @@ -16,18 +16,19 @@ * under the License. */ -#include "test_utilities.h" // NOLINT #include #include +#include #include #include namespace gl = graph_localizer; +namespace lc = localization_common; TEST(SiluTester, Jacobian) { for (int i = 0; i < 500; ++i) { - const double x = gl::RandomDouble(); + const double x = lc::RandomDouble(); gtsam::Matrix H; gl::Silu(x, H); const auto numerical_H = gtsam::numericalDerivative11( @@ -38,8 +39,8 @@ TEST(SiluTester, Jacobian) { TEST(SiluWithOffsetTester, Jacobian) { for (int i = 0; i < 500; ++i) { - const double x = gl::RandomDouble(); - const double offset = gl::RandomPositiveDouble(); + const double x = lc::RandomDouble(); + const double offset = lc::RandomPositiveDouble(); gtsam::Matrix H; gl::SiluWithOffset(x, offset, H); const auto numerical_H = gtsam::numericalDerivative21( @@ -93,8 +94,8 @@ TEST(SiluWithOffsetTester, XMuchLessThanOffset) { TEST(SiluWithOffsetTwoWayTester, Jacobian) { for (int i = 0; i < 500; ++i) { - const double x = gl::RandomDouble(); - const double offset = gl::RandomPositiveDouble(); + const double x = lc::RandomDouble(); + const double offset = lc::RandomPositiveDouble(); gtsam::Matrix H; gl::SiluWithOffsetTwoWay(x, offset, H); const auto numerical_H = gtsam::numericalDerivative21( @@ -180,3 +181,9 @@ TEST(SiluWithOffsetTwoWayTester, NegativeXMuchGreaterThanOffset) { const double silu_x = gl::SiluWithOffsetTwoWay(x, offset); EXPECT_NEAR(0.0, silu_x, 1e-6); } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/graph_localizer/test/test_utilities.h b/localization/graph_localizer/test/test_utilities.h deleted file mode 100644 index b3ca3600d4..0000000000 --- a/localization/graph_localizer/test/test_utilities.h +++ /dev/null @@ -1,60 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -#ifndef GRAPH_LOCALIZER_TEST_UTILITIES_H_ // NOLINT -#define GRAPH_LOCALIZER_TEST_UTILITIES_H_ // NOLINT - -#include - -#include - -#include - -namespace graph_localizer { -double RandomDouble() { - std::random_device dev; - std::mt19937 rng(dev()); - return std::uniform_real_distribution(-100, 100)(rng); -} - -double RandomPositiveDouble() { - std::random_device dev; - std::mt19937 rng(dev()); - return std::uniform_real_distribution(0, 100)(rng); -} - -gtsam::Vector3 RandomVector() { - // Eigen::Vector3 is constrained to [-1, 1] - return RandomDouble() * Eigen::Vector3d::Random(); -} - -gtsam::Pose3 RandomPose() { - std::random_device dev; - std::mt19937 rng(dev()); - gtsam::Rot3 rot = gtsam::Rot3::Random(rng); - gtsam::Point3 trans = RandomVector(); - return gtsam::Pose3(rot, trans); -} - -localization_measurements::Plane RandomPlane() { - gtsam::Point3 point = RandomVector(); - gtsam::Vector3 normal = RandomVector().normalized(); - return localization_measurements::Plane(point, normal); -} -} // namespace graph_localizer -#endif // GRAPH_LOCALIZER_TEST_UTILITIES_H_ // NOLINT diff --git a/localization/graph_optimizer/CMakeLists.txt b/localization/graph_optimizer/CMakeLists.txt index cbc0a62a79..604d1dc2ad 100644 --- a/localization/graph_optimizer/CMakeLists.txt +++ b/localization/graph_optimizer/CMakeLists.txt @@ -15,18 +15,67 @@ #License for the specific language governing permissions and limitations #under the License. +cmake_minimum_required(VERSION 3.0) project(graph_optimizer) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + config_reader + localization_common + localization_measurements + msg_conversions +) + +# System dependencies are found with CMake's conventions +find_package(Eigen3 REQUIRED) + # include ff_nodelet to get ff_util header files since these aren't exposed elsewhere catkin_package( - LIBRARIES ${PROJECT_NAME} ${GLOG_LIBRARIES} ${GTSAM_LIBRARIES} config_reader localization_common localization_measurements msg_conversions - INCLUDE_DIRS include ${GLOG_INCLUDE_DIRS} ${GTSAM_INCLUDE_DIR} - CATKIN_DEPENDS - DEPENDS gtsam + LIBRARIES ${PROJECT_NAME} ${GLOG_LIBRARIES} + INCLUDE_DIRS include ${GLOG_INCLUDE_DIRS} + CATKIN_DEPENDS config_reader localization_common localization_measurements msg_conversions ) -create_library(TARGET ${PROJECT_NAME} - LIBS ${catkin_LIBRARIES} ${GLOG_LIBRARIES} gtsam config_reader localization_common localization_measurements msg_conversions - INC ${catkin_INCLUDE_DIRS} ${GLOG_INCLUDE_DIRS} - DEPS +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIRS} + ${GLOG_INCLUDE_DIRS} ) + +# Declare C++ libraries +add_library(${PROJECT_NAME} + src/graph_optimizer.cc + src/graph_values.cc + src/utilities.cc + src/graph_stats.cc + src/parameter_reader.cc +) +add_dependencies(${PROJECT_NAME} ${catkin_EXPORTED_TARGETS}) +target_link_libraries(${PROJECT_NAME} ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) \ No newline at end of file diff --git a/localization/graph_optimizer/package.xml b/localization/graph_optimizer/package.xml index 81b9009955..10b279e9d5 100644 --- a/localization/graph_optimizer/package.xml +++ b/localization/graph_optimizer/package.xml @@ -14,4 +14,16 @@ Astrobee Flight Software catkin + roscpp + nodelet + config_reader + localization_common + localization_measurements + msg_conversions + roscpp + nodelet + config_reader + localization_common + localization_measurements + msg_conversions diff --git a/localization/ground_truth_localizer/CMakeLists.txt b/localization/ground_truth_localizer/CMakeLists.txt index 9d1d878271..d1bea27238 100644 --- a/localization/ground_truth_localizer/CMakeLists.txt +++ b/localization/ground_truth_localizer/CMakeLists.txt @@ -15,24 +15,74 @@ #License for the specific language governing permissions and limitations #under the License. +cmake_minimum_required(VERSION 3.0) project(ground_truth_localizer) if (USE_ROS) + + ## Compile as C++14, supported in ROS Kinetic and newer + add_compile_options(-std=c++14) -# include ff_nodelet to get ff_util header files since these aren't exposed elsewhere -catkin_package( - LIBRARIES ${PROJECT_NAME} ${GLOG_LIBRARIES} config_reader ff_nodelet localization_common - INCLUDE_DIRS include ${GLOG_INCLUDE_DIRS} - CATKIN_DEPENDS roscpp - DEPENDS ff_msgs -) + ## Find catkin macros and libraries + find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + ff_msgs + ff_util + localization_common + ) + + # System dependencies are found with CMake's conventions + find_package(Eigen3 REQUIRED) + + # include ff_nodelet to get ff_util header files since these aren't exposed elsewhere + catkin_package( + LIBRARIES ${PROJECT_NAME} ${GLOG_LIBRARIES} + INCLUDE_DIRS include ${GLOG_INCLUDE_DIRS} + CATKIN_DEPENDS roscpp ff_msgs ff_util localization_common + ) + + ########### + ## Build ## + ########### + # Specify additional locations of header files + include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIRS} + ) + + # Declare C++ libraries + add_library(${PROJECT_NAME} + src/ground_truth_localizer_nodelet.cc + src/utilities.cc + ) + add_dependencies(${PROJECT_NAME} ${catkin_EXPORTED_TARGETS}) + target_link_libraries(${PROJECT_NAME} ${catkin_LIBRARIES} ${QT_LIBRARIES}) + + + ############# + ## Install ## + ############# + + # Mark libraries for installation + install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} + ) -create_library(TARGET ${PROJECT_NAME} - LIBS ${catkin_LIBRARIES} ${GLOG_LIBRARIES} config_reader ff_nodelet localization_common - INC ${catkin_INCLUDE_DIRS} ${GLOG_INCLUDE_DIRS} - DEPS ff_msgs +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} ) -install_launch_files() + # Mark launch files for installation + install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) +else (USE_ROS) + find_package(catkin REQUIRED COMPONENTS) + catkin_package() endif (USE_ROS) diff --git a/localization/ground_truth_localizer/package.xml b/localization/ground_truth_localizer/package.xml index 842b4c5d22..7d9c801f35 100644 --- a/localization/ground_truth_localizer/package.xml +++ b/localization/ground_truth_localizer/package.xml @@ -14,12 +14,16 @@ Astrobee Flight Software catkin - ff_msgs - nodelet roscpp - ff_msgs - nodelet + nodelet + ff_msgs + ff_util + localization_common roscpp + nodelet + ff_msgs + ff_util + localization_common diff --git a/localization/handrail_detect/CMakeLists.txt b/localization/handrail_detect/CMakeLists.txt index 9887b0ecf1..36f2706e3c 100644 --- a/localization/handrail_detect/CMakeLists.txt +++ b/localization/handrail_detect/CMakeLists.txt @@ -15,30 +15,66 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(handrail_detect) -if (USE_ROS) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + tf2 + nodelet + visualization_msgs + ff_msgs + ff_util +) + +# Find OpenCV3 +find_package(OpenCV 3.3.1 REQUIRED) catkin_package( LIBRARIES ${PROJECT_NAME} - CATKIN_DEPENDS tf2 visualization_msgs roscpp ff_msgs nodelet - DEPENDS ${PROJECT_NAME} + CATKIN_DEPENDS roscpp tf2 nodelet visualization_msgs ff_msgs ff_util ) -set(CMAKE_CXX_FLAGS_HACKY "${CMAKE_CXX_FLAGS} -Wno-return-type") +########### +## Build ## +########### -create_library(TARGET ${PROJECT_NAME} - LIBS ${catkin_LIBRARIES} ${OROCOS_KDL_LIBRARIES} ff_common config_reader msg_conversions ff_nodelet - INC ${catkin_INCLUDE_DIRS} ${EIGEN3_INCLUDE_DIRS} - DEPS ff_msgs +# Specify additional locations of header files +include_directories( + ${catkin_INCLUDE_DIRS} + ${OpenCV_INCLUDE_DIRS} ) -set_target_properties(${PROJECT_NAME} PROPERTIES COMPILE_FLAGS ${CMAKE_CXX_FLAGS_HACKY}) +# Declare C++ libraries +add_library(handrail_detect + src/handrail_detect.cc +) +add_dependencies(handrail_detect ${catkin_EXPORTED_TARGETS}) +target_link_libraries(handrail_detect ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) -create_tool_targets(DIR tools - LIBS ${PROJECT_NAME} +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} ) -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) -endif (USE_ROS) +install(DIRECTORY config/ DESTINATION config) diff --git a/localization/handrail_detect/package.xml b/localization/handrail_detect/package.xml index 7ca3c4bf22..a4bb30a8ae 100644 --- a/localization/handrail_detect/package.xml +++ b/localization/handrail_detect/package.xml @@ -17,7 +17,6 @@ catkin roscpp nodelet - pluginlib tf2 tf2_ros tf2_geometry_msgs @@ -29,7 +28,6 @@ ff_util roscpp nodelet - pluginlib tf2 tf2_ros tf2_geometry_msgs diff --git a/localization/handrail_detect/src/handrail_detect.cc b/localization/handrail_detect/src/handrail_detect.cc index 1fb3bef0d6..b2942d3731 100644 --- a/localization/handrail_detect/src/handrail_detect.cc +++ b/localization/handrail_detect/src/handrail_detect.cc @@ -52,7 +52,7 @@ #include #include -// Eigen incliudes +// Eigen includes #include #include #include diff --git a/localization/imu_augmentor/CMakeLists.txt b/localization/imu_augmentor/CMakeLists.txt index 2039fa6dee..de41cf6c1f 100644 --- a/localization/imu_augmentor/CMakeLists.txt +++ b/localization/imu_augmentor/CMakeLists.txt @@ -15,23 +15,105 @@ #License for the specific language governing permissions and limitations #under the License. +cmake_minimum_required(VERSION 3.0) project(imu_augmentor) -if (USE_ROS) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +# Find GTSAM +find_package(GTSAM REQUIRED) + +# System dependencies are found with CMake's conventions +find_package(Eigen3 REQUIRED) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + nodelet + ff_util + imu_integration + localization_common + localization_measurements +) catkin_package( - LIBRARIES ${PROJECT_NAME} ${GLOG_LIBRARIES} ${GTSAM_LIBRARIES} ff_nodelet imu_integration localization_common localization_measurements INCLUDE_DIRS include ${GTSAM_INCLUDE_DIR} ${GLOG_INCLUDE_DIRS} - CATKIN_DEPENDS - DEPENDS gtsam + LIBRARIES ${PROJECT_NAME} ${GLOG_LIBRARIES} ${GTSAM_LIBRARIES} gtsam + CATKIN_DEPENDS + nodelet + ff_util + imu_integration + localization_common + localization_measurements +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${GTSAM_INCLUDE_DIR} + ${EIGEN3_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(${PROJECT_NAME} + src/imu_augmentor.cc + src/imu_augmentor_nodelet.cc + src/imu_augmentor_wrapper.cc +) +add_dependencies(${PROJECT_NAME} ${catkin_EXPORTED_TARGETS}) +target_link_libraries(${PROJECT_NAME} ${catkin_LIBRARIES}) + +if(CATKIN_ENABLE_TESTING) + find_package(rostest REQUIRED) + add_rostest_gtest(test_imu_augmentor + test/test_imu_augmentor.test + test/test_imu_augmentor.cc + test/test_utilities.cc + ) + target_link_libraries(test_imu_augmentor + imu_augmentor ${catkin_LIBRARIES} + ) + find_package(rostest REQUIRED) + add_rostest_gtest(test_imu_augmentor_wrapper + test/test_imu_augmentor_wrapper.test + test/test_imu_augmentor_wrapper.cc + test/test_utilities.cc + ) + target_link_libraries(test_imu_augmentor_wrapper + imu_augmentor ${catkin_LIBRARIES} + ) +endif() + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} ) -create_library(TARGET ${PROJECT_NAME} - LIBS ${catkin_LIBRARIES} ${GLOG_LIBRARIES} ff_nodelet imu_integration localization_common localization_measurements - INC ${catkin_INCLUDE_DIRS} ${GLOG_INCLUDE_DIRS} - DEPS gtsam ff_msgs +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE ) -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) -endif (USE_ROS) diff --git a/localization/imu_augmentor/include/imu_augmentor/imu_augmentor.h b/localization/imu_augmentor/include/imu_augmentor/imu_augmentor.h index eefb45b064..3c7ccd2c30 100644 --- a/localization/imu_augmentor/include/imu_augmentor/imu_augmentor.h +++ b/localization/imu_augmentor/include/imu_augmentor/imu_augmentor.h @@ -30,8 +30,8 @@ class ImuAugmentor : public imu_integration::ImuIntegrator { public: explicit ImuAugmentor(const ImuAugmentorParams& params); - boost::optional PimPredict( - const localization_common::CombinedNavState& combined_nav_state); + void PimPredict(const localization_common::CombinedNavState& latest_combined_nav_state, + localization_common::CombinedNavState& latest_imu_augmented_combined_nav_state); }; } // namespace imu_augmentor diff --git a/localization/imu_augmentor/include/imu_augmentor/imu_augmentor_nodelet.h b/localization/imu_augmentor/include/imu_augmentor/imu_augmentor_nodelet.h index 85a6a550b5..a8f2609124 100644 --- a/localization/imu_augmentor/include/imu_augmentor/imu_augmentor_nodelet.h +++ b/localization/imu_augmentor/include/imu_augmentor/imu_augmentor_nodelet.h @@ -68,6 +68,8 @@ class ImuAugmentorNodelet : public ff_util::FreeFlyerNodelet { ff_msgs::Heartbeat heartbeat_; tf2_ros::TransformBroadcaster transform_pub_; ros::Time last_time_; + ros::Time last_heartbeat_time_; + boost::optional last_state_msg_time_; }; } // namespace imu_augmentor diff --git a/localization/imu_augmentor/include/imu_augmentor/imu_augmentor_wrapper.h b/localization/imu_augmentor/include/imu_augmentor/imu_augmentor_wrapper.h index 6b51fbbefe..420f7e8da0 100644 --- a/localization/imu_augmentor/include/imu_augmentor/imu_augmentor_wrapper.h +++ b/localization/imu_augmentor/include/imu_augmentor/imu_augmentor_wrapper.h @@ -39,6 +39,8 @@ class ImuAugmentorWrapper { public: explicit ImuAugmentorWrapper(const std::string& graph_config_path_prefix = ""); + explicit ImuAugmentorWrapper(const ImuAugmentorParams& params); + void LocalizationStateCallback(const ff_msgs::GraphState& loc_msg); void ImuCallback(const sensor_msgs::Imu& imu_msg); @@ -51,6 +53,8 @@ class ImuAugmentorWrapper { boost::optional LatestImuAugmentedLocalizationMsg(); private: + void Initialize(const ImuAugmentorParams& params); + bool LatestImuAugmentedCombinedNavStateAndCovariances( localization_common::CombinedNavState& latest_imu_augmented_combined_nav_state, localization_common::CombinedNavStateCovariances& latest_imu_augmented_covariances); @@ -61,6 +65,7 @@ class ImuAugmentorWrapper { std::unique_ptr imu_augmentor_; boost::optional latest_combined_nav_state_; + boost::optional latest_imu_augmented_combined_nav_state_; boost::optional latest_covariances_; boost::optional latest_loc_msg_; std::unique_ptr preintegration_helper_; diff --git a/localization/imu_augmentor/package.xml b/localization/imu_augmentor/package.xml index 0e0156dca1..503996e830 100644 --- a/localization/imu_augmentor/package.xml +++ b/localization/imu_augmentor/package.xml @@ -14,13 +14,16 @@ Astrobee Flight Software catkin - catkin - ff_msgs nodelet - roscpp - ff_msgs + ff_util + imu_integration + localization_common + localization_measurements nodelet - roscpp + ff_util + imu_integration + localization_common + localization_measurements diff --git a/localization/imu_augmentor/src/imu_augmentor.cc b/localization/imu_augmentor/src/imu_augmentor.cc index abefad9fdb..49fce03489 100644 --- a/localization/imu_augmentor/src/imu_augmentor.cc +++ b/localization/imu_augmentor/src/imu_augmentor.cc @@ -26,28 +26,30 @@ namespace lc = localization_common; namespace lm = localization_measurements; ImuAugmentor::ImuAugmentor(const ImuAugmentorParams& params) : ii::ImuIntegrator(params) {} -boost::optional ImuAugmentor::PimPredict(const lc::CombinedNavState& combined_nav_state) { - if (Empty()) return combined_nav_state; +void ImuAugmentor::PimPredict(const lc::CombinedNavState& latest_combined_nav_state, + lc::CombinedNavState& latest_imu_augmented_combined_nav_state) { + if (Empty()) return; // Start with least upper bound measurement // Don't add measurements with same timestamp as start_time // since these would have a dt of 0 (wrt the pim start time) and cause errors for the pim - auto measurement_it = measurements().upper_bound(combined_nav_state.timestamp()); - if (measurement_it == measurements().cend()) return combined_nav_state; - auto last_predicted_combined_nav_state = combined_nav_state; - auto pim = ii::Pim(last_predicted_combined_nav_state.bias(), pim_params()); + auto measurement_it = measurements().upper_bound(latest_imu_augmented_combined_nav_state.timestamp()); + if (measurement_it == measurements().cend()) return; + // Always use the biases from the lastest combined nav state + auto pim = ii::Pim(latest_combined_nav_state.bias(), pim_params()); int num_measurements_added = 0; // Create new pim each time since pim uses beginning orientation and velocity for // gravity integration and initial velocity integration. for (; measurement_it != measurements().cend(); ++measurement_it) { - pim.resetIntegrationAndSetBias(last_predicted_combined_nav_state.bias()); - auto time = last_predicted_combined_nav_state.timestamp(); + pim.resetIntegrationAndSetBias(latest_combined_nav_state.bias()); + auto time = latest_imu_augmented_combined_nav_state.timestamp(); ii::AddMeasurement(measurement_it->second, time, pim); - last_predicted_combined_nav_state = ii::PimPredict(last_predicted_combined_nav_state, pim); + latest_imu_augmented_combined_nav_state = ii::PimPredict(latest_imu_augmented_combined_nav_state, pim); ++num_measurements_added; } - RemoveOldMeasurements(combined_nav_state.timestamp()); + // Only remove measurements up to latest combined nav state so that when a new nav state is received IMU data is still + // available for extrapolation + RemoveOldMeasurements(latest_combined_nav_state.timestamp()); LogDebug("PimPredict: Added " << num_measurements_added << " measurements."); - return last_predicted_combined_nav_state; } } // namespace imu_augmentor diff --git a/localization/imu_augmentor/src/imu_augmentor_nodelet.cc b/localization/imu_augmentor/src/imu_augmentor_nodelet.cc index d70cf903b1..dfba453ef2 100644 --- a/localization/imu_augmentor/src/imu_augmentor_nodelet.cc +++ b/localization/imu_augmentor/src/imu_augmentor_nodelet.cc @@ -33,6 +33,7 @@ ImuAugmentorNodelet::ImuAugmentorNodelet() : ff_util::FreeFlyerNodelet(NODE_IMU_ heartbeat_.node = GetName(); heartbeat_.nodelet_manager = ros::this_node::getName(); last_time_ = ros::Time::now(); + last_heartbeat_time_ = ros::Time::now(); } void ImuAugmentorNodelet::Initialize(ros::NodeHandle* nh) { @@ -76,7 +77,10 @@ boost::optional ImuAugmentorNodelet::PublishLatestImuAugmente LogDebugEveryN(100, "PublishLatestImuAugmentedLocalizationState: Failed to get latest imu augmented loc msg."); return boost::none; } + // Avoid sending repeat messages + if (last_state_msg_time_ && (latest_imu_augmented_loc_msg->header.stamp == *last_state_msg_time_)) return boost::none; state_pub_.publish(*latest_imu_augmented_loc_msg); + last_state_msg_time_ = latest_imu_augmented_loc_msg->header.stamp; return latest_imu_augmented_loc_msg; } @@ -112,7 +116,9 @@ void ImuAugmentorNodelet::PublishPoseAndTwistAndTransform(const ff_msgs::EkfStat void ImuAugmentorNodelet::PublishHeartbeat() { heartbeat_.header.stamp = ros::Time::now(); + if ((heartbeat_.header.stamp - last_heartbeat_time_).toSec() < 1.0) return; heartbeat_pub_.publish(heartbeat_); + last_heartbeat_time_ = heartbeat_.header.stamp; } void ImuAugmentorNodelet::Run() { diff --git a/localization/imu_augmentor/src/imu_augmentor_wrapper.cc b/localization/imu_augmentor/src/imu_augmentor_wrapper.cc index 4c0141c8d7..7373e4035a 100644 --- a/localization/imu_augmentor/src/imu_augmentor_wrapper.cc +++ b/localization/imu_augmentor/src/imu_augmentor_wrapper.cc @@ -41,8 +41,16 @@ ImuAugmentorWrapper::ImuAugmentorWrapper(const std::string& graph_config_path_pr LogFatal("Failed to read config files."); } - ii::LoadImuIntegratorParams(config, params_); - params_.standstill_enabled = mc::LoadBool(config, "imu_augmentor_standstill"); + ImuAugmentorParams params; + ii::LoadImuIntegratorParams(config, params); + params.standstill_enabled = mc::LoadBool(config, "imu_augmentor_standstill"); + Initialize(params); +} + +ImuAugmentorWrapper::ImuAugmentorWrapper(const ImuAugmentorParams& params) { Initialize(params); } + +void ImuAugmentorWrapper::Initialize(const ImuAugmentorParams& params) { + params_ = params; imu_augmentor_.reset(new ImuAugmentor(params_)); // Preintegration_helper_ is only being used to frame change and remove centrifugal acceleration, so body_T_imu is the @@ -61,6 +69,8 @@ void ImuAugmentorWrapper::LocalizationStateCallback(const ff_msgs::GraphState& l latest_combined_nav_state_ = lc::CombinedNavStateFromMsg(loc_msg); latest_covariances_ = lc::CombinedNavStateCovariancesFromMsg(loc_msg); + // Reset imu augmented state so IMU data is added starting with the most recent combined nav state's biases + latest_imu_augmented_combined_nav_state_ = latest_combined_nav_state_; latest_loc_msg_ = loc_msg; standstill_ = loc_msg.standstill; } @@ -83,7 +93,8 @@ void ImuAugmentorWrapper::FlightModeCallback(const ff_msgs::FlightMode& flight_m boost::optional> ImuAugmentorWrapper::LatestImuAugmentedCombinedNavStateAndCovariances() { - if (!latest_combined_nav_state_ || !latest_covariances_ || !imu_augmentor_) { + if (!latest_combined_nav_state_ || !latest_covariances_ || !imu_augmentor_ || + !latest_imu_augmented_combined_nav_state_) { LogError( "LatestImuAugmentedCombinedNavStateAndCovariances: Not enough information available to create desired data."); return boost::none; @@ -91,17 +102,24 @@ ImuAugmentorWrapper::LatestImuAugmentedCombinedNavStateAndCovariances() { if (standstill()) { LogDebugEveryN(100, "LatestImuAugmentedCombinedNavStateAndCovariances: Standstill."); - return std::pair{*latest_combined_nav_state_, + const auto latest_timestamp = imu_augmentor_->LatestTime(); + if (!latest_timestamp) { + LogError("LatestImuAugmentedCombinedNavStateAndCovariances: Failed to get latest timestamp."); + return boost::none; + } + const lc::CombinedNavState latest_timestamp_combined_nav_state( + latest_combined_nav_state_->nav_state(), latest_combined_nav_state_->bias(), *latest_timestamp); + return std::pair{latest_timestamp_combined_nav_state, *latest_covariances_}; } - const auto latest_imu_augmented_combined_nav_state = imu_augmentor_->PimPredict(*latest_combined_nav_state_); - if (!latest_imu_augmented_combined_nav_state) { + imu_augmentor_->PimPredict(*latest_combined_nav_state_, *latest_imu_augmented_combined_nav_state_); + if (!latest_imu_augmented_combined_nav_state_) { LogError("LatestImuAugmentedCombinedNavSTateAndCovariances: Failed to pim predict combined nav state."); return boost::none; } // TODO(rsoussan): propogate uncertainties from imu augmentor - return std::pair{*latest_imu_augmented_combined_nav_state, + return std::pair{*latest_imu_augmented_combined_nav_state_, *latest_covariances_}; } diff --git a/localization/imu_augmentor/test/test_imu_augmentor.cc b/localization/imu_augmentor/test/test_imu_augmentor.cc new file mode 100644 index 0000000000..1997bbe30b --- /dev/null +++ b/localization/imu_augmentor/test/test_imu_augmentor.cc @@ -0,0 +1,204 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include "test_utilities.h" // NOLINT +#include +#include +#include +#include + +#include + +namespace ia = imu_augmentor; +namespace lc = localization_common; +namespace lm = localization_measurements; + +class ConstantAccelerationTest : public ::testing::Test { + public: + // Start at time increment so first IMU measurement is after starting combined nav state time + ConstantAccelerationTest() + : acceleration_i_(0.1), + acceleration_(acceleration_i_, acceleration_i_, acceleration_i_), + time_increment_(1.0 / 125.0), + start_time_(time_increment_), + num_measurements_(20) { + const ia::ImuAugmentorParams params = ia::DefaultImuAugmentorParams(); + imu_augmentor_.reset(new ia::ImuAugmentor(params)); + } + + void SetUp() final { + const std::vector imu_measurements = + ia::ConstantAccelerationMeasurements(acceleration_, num_measurements_, start_time_, time_increment_); + for (const auto& imu_measurement : imu_measurements) { + imu_augmentor_->BufferImuMeasurement(imu_measurement); + } + } + + ia::ImuAugmentor& imu_augmentor() { return *imu_augmentor_; } + + double acceleration_i() { return acceleration_i_; } + + const Eigen::Vector3d& acceleration() { return acceleration_; } + + double time_increment() { return time_increment_; } + + double start_time() { return start_time_; } + + int num_measurements() { return num_measurements_; } + + private: + std::unique_ptr imu_augmentor_; + const double acceleration_i_; + const Eigen::Vector3d acceleration_; + const double time_increment_; + const lc::Time start_time_; + const int num_measurements_; +}; + +class ConstantAngularVelocityTest : public ::testing::Test { + public: + // Start at time increment so first IMU measurement is after starting combined nav state time + ConstantAngularVelocityTest() + : angular_velocity_i_(0.1), + angular_velocity_(angular_velocity_i_, angular_velocity_i_, angular_velocity_i_), + time_increment_(1.0 / 125.0), + start_time_(time_increment_), + num_measurements_(20) { + const ia::ImuAugmentorParams params = ia::DefaultImuAugmentorParams(); + imu_augmentor_.reset(new ia::ImuAugmentor(params)); + } + + void SetUp() final { + imu_measurements_ = + ia::ConstantAngularVelocityMeasurements(angular_velocity_, num_measurements_, start_time_, time_increment_); + for (const auto& imu_measurement : imu_measurements_) { + imu_augmentor_->BufferImuMeasurement(imu_measurement); + } + } + + ia::ImuAugmentor& imu_augmentor() { return *imu_augmentor_; } + + double angular_velocity_i() { return angular_velocity_i_; } + + const Eigen::Vector3d& angular_velocity() { return angular_velocity_; } + + double time_increment() { return time_increment_; } + + double start_time() { return start_time_; } + + int num_measurements() { return num_measurements_; } + + const std::vector& imu_measurements() { return imu_measurements_; } + + private: + std::unique_ptr imu_augmentor_; + const double angular_velocity_i_; + const Eigen::Vector3d angular_velocity_; + const double time_increment_; + const lc::Time start_time_; + const int num_measurements_; + std::vector imu_measurements_; +}; + +TEST_F(ConstantAccelerationTest, AddAllMeasurements) { + const lc::CombinedNavState initial_state(gtsam::Pose3::identity(), gtsam::Velocity3::Zero(), + gtsam::imuBias::ConstantBias(), 0); + lc::CombinedNavState imu_augmented_state = initial_state; + imu_augmentor().PimPredict(initial_state, imu_augmented_state); + + EXPECT_NEAR(imu_augmented_state.timestamp(), num_measurements() * time_increment(), 1e-6); + const double expected_velocity_i = acceleration_i() * num_measurements() * time_increment(); + const gtsam::Vector3 expected_velocity(expected_velocity_i, expected_velocity_i, expected_velocity_i); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state.velocity().matrix(), expected_velocity.matrix()); + // x = 1/2*a*t^2 + const double expected_position_i = acceleration_i() * 0.5 * std::pow(num_measurements() * time_increment(), 2); + const gtsam::Vector3 expected_position(expected_position_i, expected_position_i, expected_position_i); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state.pose().translation().matrix(), expected_position.matrix()); +} + +TEST_F(ConstantAccelerationTest, AddHalfOfMeasurements) { + const lc::CombinedNavState initial_state(gtsam::Pose3::identity(), gtsam::Velocity3::Zero(), + gtsam::imuBias::ConstantBias(), 0); + const lc::Time imu_augmented_state_start_time = num_measurements() / 2 * time_increment(); + lc::CombinedNavState imu_augmented_state(gtsam::Pose3::identity(), gtsam::Velocity3::Zero(), + gtsam::imuBias::ConstantBias(), imu_augmented_state_start_time); + imu_augmentor().PimPredict(initial_state, imu_augmented_state); + + EXPECT_NEAR(imu_augmented_state.timestamp(), num_measurements() * time_increment(), 1e-6); + const double expected_velocity_i = acceleration_i() * num_measurements() / 2 * time_increment(); + const gtsam::Vector3 expected_velocity(expected_velocity_i, expected_velocity_i, expected_velocity_i); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state.velocity().matrix(), expected_velocity.matrix()); + // x = 1/2*a*t^2 + const double expected_position_i = acceleration_i() * 0.5 * std::pow(num_measurements() / 2 * time_increment(), 2); + const gtsam::Vector3 expected_position(expected_position_i, expected_position_i, expected_position_i); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state.pose().translation().matrix(), expected_position.matrix()); +} + +TEST_F(ConstantAccelerationTest, AddAllMeasurementsWithAccelBias) { + const lc::CombinedNavState initial_state(gtsam::Pose3::identity(), gtsam::Velocity3::Zero(), + gtsam::imuBias::ConstantBias(acceleration(), gtsam::Vector3::Zero()), 0); + lc::CombinedNavState imu_augmented_state = initial_state; + imu_augmentor().PimPredict(initial_state, imu_augmented_state); + + EXPECT_NEAR(imu_augmented_state.timestamp(), num_measurements() * time_increment(), 1e-6); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state.velocity().matrix(), gtsam::Vector3::Zero().matrix()); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state.pose().translation().matrix(), + gtsam::Vector3::Zero().matrix()); +} + +TEST_F(ConstantAngularVelocityTest, AddAllMeasurements) { + const lc::CombinedNavState initial_state(gtsam::Pose3::identity(), gtsam::Velocity3::Zero(), + gtsam::imuBias::ConstantBias(), 0); + lc::CombinedNavState imu_augmented_state = initial_state; + imu_augmentor().PimPredict(initial_state, imu_augmented_state); + + EXPECT_NEAR(imu_augmented_state.timestamp(), num_measurements() * time_increment(), 1e-6); + gtsam::Rot3 expected_orientation = + ia::IntegrateAngularVelocities(imu_measurements(), gtsam::Rot3::identity(), initial_state.timestamp()); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state.pose().rotation().matrix(), expected_orientation.matrix()); +} + +TEST_F(ConstantAngularVelocityTest, AddHalfOfMeasurements) { + const lc::CombinedNavState initial_state(gtsam::Pose3::identity(), gtsam::Velocity3::Zero(), + gtsam::imuBias::ConstantBias(), 0); + const lc::Time imu_augmented_state_start_time = num_measurements() / 2 * time_increment(); + lc::CombinedNavState imu_augmented_state(gtsam::Pose3::identity(), gtsam::Velocity3::Zero(), + gtsam::imuBias::ConstantBias(), imu_augmented_state_start_time); + imu_augmentor().PimPredict(initial_state, imu_augmented_state); + + EXPECT_NEAR(imu_augmented_state.timestamp(), num_measurements() * time_increment(), 1e-6); + gtsam::Rot3 expected_orientation = + ia::IntegrateAngularVelocities(imu_measurements(), gtsam::Rot3::identity(), imu_augmented_state_start_time); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state.pose().rotation().matrix(), expected_orientation.matrix()); +} + +TEST_F(ConstantAngularVelocityTest, AddAllMeasurementsWithAccelBias) { + const lc::CombinedNavState initial_state(gtsam::Pose3::identity(), gtsam::Velocity3::Zero(), + gtsam::imuBias::ConstantBias(gtsam::Vector3::Zero(), angular_velocity()), 0); + lc::CombinedNavState imu_augmented_state = initial_state; + imu_augmentor().PimPredict(initial_state, imu_augmented_state); + + EXPECT_NEAR(imu_augmented_state.timestamp(), num_measurements() * time_increment(), 1e-6); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state.pose().rotation().matrix(), gtsam::Rot3::identity().matrix()); +} + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/tools/rviz_visualizer/launch/granite_table_visualizer.launch b/localization/imu_augmentor/test/test_imu_augmentor.test similarity index 91% rename from tools/rviz_visualizer/launch/granite_table_visualizer.launch rename to localization/imu_augmentor/test/test_imu_augmentor.test index b2fa6c62f0..4e44c50ef7 100644 --- a/tools/rviz_visualizer/launch/granite_table_visualizer.launch +++ b/localization/imu_augmentor/test/test_imu_augmentor.test @@ -16,7 +16,5 @@ - - + diff --git a/localization/imu_augmentor/test/test_imu_augmentor_wrapper.cc b/localization/imu_augmentor/test/test_imu_augmentor_wrapper.cc new file mode 100644 index 0000000000..7b3977ca73 --- /dev/null +++ b/localization/imu_augmentor/test/test_imu_augmentor_wrapper.cc @@ -0,0 +1,176 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include "test_utilities.h" // NOLINT +#include +#include +#include +#include +#include + +#include + +namespace ia = imu_augmentor; +namespace lc = localization_common; +namespace lm = localization_measurements; + +class ConstantAccelerationTest : public ::testing::Test { + public: + // Start at time increment so first IMU measurement is after starting combined nav state time + ConstantAccelerationTest() + : acceleration_i_(0.1), + acceleration_(acceleration_i_, acceleration_i_, acceleration_i_), + time_increment_(1.0 / 125.0), + start_time_(time_increment_), + num_measurements_(20) { + const ia::ImuAugmentorParams params = ia::DefaultImuAugmentorParams(); + imu_augmentor_wrapper_.reset(new ia::ImuAugmentorWrapper(params)); + } + + void SetUp() final { + const std::vector imu_measurements = + ia::ConstantAccelerationMeasurements(acceleration_, num_measurements_, start_time_, time_increment_); + AddMeasurements(imu_measurements); + } + + void AddMeasurements(const std::vector& imu_measurements) { + for (const auto& imu_measurement : imu_measurements) { + const auto imu_msg = ia::ImuMsg(imu_measurement); + imu_augmentor_wrapper_->ImuCallback(imu_msg); + } + } + + void AddCombinedNavState(const lc::CombinedNavState& state) { + ff_msgs::GraphState loc_msg; + lc::CombinedNavStateToMsg(state, loc_msg); + imu_augmentor_wrapper_->LocalizationStateCallback(loc_msg); + } + + ia::ImuAugmentorWrapper& imu_augmentor_wrapper() { return *imu_augmentor_wrapper_; } + + double acceleration_i() { return acceleration_i_; } + + const Eigen::Vector3d& acceleration() { return acceleration_; } + + double time_increment() { return time_increment_; } + + double start_time() { return start_time_; } + + int num_measurements() { return num_measurements_; } + + double TotalDuration() { return num_measurements_ * time_increment_; } + + private: + std::unique_ptr imu_augmentor_wrapper_; + const double acceleration_i_; + const Eigen::Vector3d acceleration_; + const double time_increment_; + const lc::Time start_time_; + const int num_measurements_; +}; + +TEST_F(ConstantAccelerationTest, AddAllMeasurements) { + const lc::CombinedNavState initial_state(gtsam::Pose3::identity(), gtsam::Velocity3::Zero(), + gtsam::imuBias::ConstantBias(), 0); + AddCombinedNavState(initial_state); + const auto imu_augmented_state = imu_augmentor_wrapper().LatestImuAugmentedCombinedNavStateAndCovariances(); + ASSERT_TRUE(imu_augmented_state != boost::none); + EXPECT_NEAR(imu_augmented_state->first.timestamp(), TotalDuration(), 1e-6); + const double expected_velocity_i = acceleration_i() * TotalDuration(); + const gtsam::Vector3 expected_velocity(expected_velocity_i, expected_velocity_i, expected_velocity_i); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state->first.velocity().matrix(), expected_velocity.matrix()); + // x = 1/2*a*t^2 + const double expected_position_i = acceleration_i() * 0.5 * std::pow(TotalDuration(), 2); + const gtsam::Vector3 expected_position(expected_position_i, expected_position_i, expected_position_i); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state->first.pose().translation().matrix(), + expected_position.matrix()); +} + +TEST_F(ConstantAccelerationTest, AddAllMeasurementsWithHalfwayBias) { + // Now add a new combined nav state halfway through with an acceleration bias equal to the constant acceleration + // The new latest imu augmented state should have the same pose and velocity as the halfway state + const lc::CombinedNavState halfway_state_with_bias( + gtsam::Pose3(gtsam::Rot3::identity(), gtsam::Point3(1, 2, 3)), gtsam::Velocity3(4, 5, 6), + gtsam::imuBias::ConstantBias(acceleration(), gtsam::Vector3::Zero()), num_measurements() / 2 * time_increment()); + AddCombinedNavState(halfway_state_with_bias); + const auto imu_augmented_state = imu_augmentor_wrapper().LatestImuAugmentedCombinedNavStateAndCovariances(); + + ASSERT_TRUE(imu_augmented_state != boost::none); + EXPECT_NEAR(imu_augmented_state->first.timestamp(), TotalDuration(), 1e-6); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state->first.velocity().matrix(), + halfway_state_with_bias.velocity().matrix()); + const gtsam::Point3 expected_translation = + halfway_state_with_bias.pose().translation() + + halfway_state_with_bias.velocity() * num_measurements() / 2 * time_increment(); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state->first.pose().translation().matrix(), expected_translation); +} + +TEST_F(ConstantAccelerationTest, AddAllMeasurementsTieredBiases) { + { + const lc::CombinedNavState state_a_no_bias(gtsam::Pose3::identity(), gtsam::Velocity3::Zero(), + gtsam::imuBias::ConstantBias(), 0); + AddCombinedNavState(state_a_no_bias); + const lc::CombinedNavState state_b_half_bias_quarter_time( + gtsam::Pose3::identity(), gtsam::Velocity3::Zero(), + gtsam::imuBias::ConstantBias(acceleration() / 2.0, gtsam::Vector3::Zero()), + num_measurements() / 4 * time_increment()); + AddCombinedNavState(state_b_half_bias_quarter_time); + const auto imu_augmented_state = imu_augmentor_wrapper().LatestImuAugmentedCombinedNavStateAndCovariances(); + ASSERT_TRUE(imu_augmented_state != boost::none); + EXPECT_NEAR(imu_augmented_state->first.timestamp(), TotalDuration(), 1e-6); + // Expect half the acceleration for the last three quarters of the imu msgs + const double expected_velocity_i = acceleration_i() / 2 * TotalDuration() * 0.75; + const gtsam::Vector3 expected_velocity(expected_velocity_i, expected_velocity_i, expected_velocity_i); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state->first.velocity().matrix(), expected_velocity.matrix()); + // x = 1/2*a*t^2 + const double expected_position_i = acceleration_i() / 2.0 * 0.5 * std::pow(TotalDuration() * 0.75, 2); + const gtsam::Vector3 expected_position(expected_position_i, expected_position_i, expected_position_i); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state->first.pose().translation().matrix(), + expected_position.matrix()); + } + + { + const lc::CombinedNavState state_c_quarter_bias_half_time_initial_pose_and_velocity( + gtsam::Pose3(gtsam::Rot3::identity(), gtsam::Point3(1, 2, 3)), gtsam::Velocity3(4, 5, 6), + gtsam::imuBias::ConstantBias(acceleration() / 4.0, gtsam::Vector3::Zero()), TotalDuration() / 2.0); + AddCombinedNavState(state_c_quarter_bias_half_time_initial_pose_and_velocity); + const auto imu_augmented_state = imu_augmentor_wrapper().LatestImuAugmentedCombinedNavStateAndCovariances(); + ASSERT_TRUE(imu_augmented_state != boost::none); + EXPECT_NEAR(imu_augmented_state->first.timestamp(), TotalDuration(), 1e-6); + // Expect 3/4 of the acceleration for the last half of the imu msgs + const double expected_velocity_i = acceleration_i() * 0.75 * TotalDuration() * 0.5; + const gtsam::Vector3 expected_velocity = + state_c_quarter_bias_half_time_initial_pose_and_velocity.velocity() + + gtsam::Vector3(expected_velocity_i, expected_velocity_i, expected_velocity_i); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state->first.velocity().matrix(), expected_velocity.matrix()); + // x = x_0 + v_0*t + 1/2*a*t^2 + const double expected_position_i = acceleration_i() * 0.75 * 0.5 * std::pow(TotalDuration() * 0.5, 2); + const gtsam::Vector3 expected_position = + state_c_quarter_bias_half_time_initial_pose_and_velocity.pose().translation() + + state_c_quarter_bias_half_time_initial_pose_and_velocity.velocity() * TotalDuration() * 0.5 + + gtsam::Vector3(expected_position_i, expected_position_i, expected_position_i); + ASSERT_PRED2(lc::MatrixEquality<6>, imu_augmented_state->first.pose().translation().matrix(), + expected_position.matrix()); + } +} + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/tools/simulator/arm_sim/launch/arm_sim.launch b/localization/imu_augmentor/test/test_imu_augmentor_wrapper.test similarity index 86% rename from tools/simulator/arm_sim/launch/arm_sim.launch rename to localization/imu_augmentor/test/test_imu_augmentor_wrapper.test index e8f017c573..fede20d3b6 100644 --- a/tools/simulator/arm_sim/launch/arm_sim.launch +++ b/localization/imu_augmentor/test/test_imu_augmentor_wrapper.test @@ -16,6 +16,5 @@ - - + diff --git a/localization/imu_augmentor/test/test_utilities.cc b/localization/imu_augmentor/test/test_utilities.cc new file mode 100644 index 0000000000..de12c0c5db --- /dev/null +++ b/localization/imu_augmentor/test/test_utilities.cc @@ -0,0 +1,95 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include "test_utilities.h" // NOLINT +#include +#include +#include + +namespace imu_augmentor { +namespace lc = localization_common; +namespace lm = localization_measurements; +namespace mc = msg_conversions; + +ImuAugmentorParams DefaultImuAugmentorParams() { + ImuAugmentorParams params; + params.gravity = gtsam::Vector3::Zero(); + params.body_T_imu = gtsam::Pose3::identity(); + // Filer params are already default none + params.filter = imu_integration::ImuFilterParams(); + params.gyro_sigma = 0.1; + params.accel_sigma = 0.1; + params.accel_bias_sigma = 0.1; + params.gyro_bias_sigma = 0.1; + params.integration_variance = 0.1; + params.bias_acc_omega_int = 0.1; + params.standstill_enabled = true; + return params; +} + +std::vector ConstantMeasurements(const Eigen::Vector3d& acceleration, + const Eigen::Vector3d& angular_velocity, + const int num_measurements, const lc::Time start_time, + const double time_increment) { + std::vector imu_measurements; + for (int i = 0; i < num_measurements; ++i) { + const lc::Time time = start_time + i * time_increment; + imu_measurements.emplace_back(lm::ImuMeasurement(acceleration, angular_velocity, time)); + } + return imu_measurements; +} + +std::vector ConstantAccelerationMeasurements(const Eigen::Vector3d& acceleration, + const int num_measurements, const lc::Time start_time, + const double time_increment) { + const Eigen::Vector3d zero_angular_velocity(Eigen::Vector3d::Zero()); + return ConstantMeasurements(acceleration, zero_angular_velocity, num_measurements, start_time, time_increment); +} + +std::vector ConstantAngularVelocityMeasurements(const Eigen::Vector3d& angular_velocity, + const int num_measurements, + const lc::Time start_time, + const double time_increment) { + const Eigen::Vector3d zero_acceleration(Eigen::Vector3d::Zero()); + return ConstantMeasurements(zero_acceleration, angular_velocity, num_measurements, start_time, time_increment); +} + +gtsam::Rot3 IntegrateAngularVelocities(const std::vector& imu_measurements, + const gtsam::Rot3& starting_orientation, + const localization_common::Time starting_time) { + gtsam::Rot3 integrated_orientation = starting_orientation; + lc::Time integrated_time = starting_time; + for (const auto& imu_measurement : imu_measurements) { + if (imu_measurement.timestamp <= integrated_time) continue; + const double dt = imu_measurement.timestamp - integrated_time; + integrated_time = imu_measurement.timestamp; + // TODO(rsoussan): subtract ang vel bias first!! add this as param!! + const gtsam::Rot3 orientation_update = gtsam::Rot3::Expmap(imu_measurement.angular_velocity * dt); + integrated_orientation = integrated_orientation * orientation_update; + } + return integrated_orientation; +} + +sensor_msgs::Imu ImuMsg(const localization_measurements::ImuMeasurement& imu_measurement) { + sensor_msgs::Imu imu_msg; + msg_conversions::VectorToMsg(imu_measurement.acceleration, imu_msg.linear_acceleration); + msg_conversions::VectorToMsg(imu_measurement.angular_velocity, imu_msg.angular_velocity); + lc::TimeToHeader(imu_measurement.timestamp, imu_msg.header); + return imu_msg; +} +} // namespace imu_augmentor diff --git a/localization/imu_augmentor/test/test_utilities.h b/localization/imu_augmentor/test/test_utilities.h new file mode 100644 index 0000000000..6a9397c339 --- /dev/null +++ b/localization/imu_augmentor/test/test_utilities.h @@ -0,0 +1,53 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#ifndef IMU_AUGMENTOR_TEST_UTILITIES_H_ // NOLINT +#define IMU_AUGMENTOR_TEST_UTILITIES_H_ // NOLINT + +#include +#include +#include +#include + +#include + +namespace imu_augmentor { +ImuAugmentorParams DefaultImuAugmentorParams(); + +std::vector ConstantMeasurements(const Eigen::Vector3d& acceleration, + const Eigen::Vector3d& angular_velocity, + const int num_measurements, + const localization_common::Time start_time, + const double time_increment); + +std::vector ConstantAccelerationMeasurements( + const Eigen::Vector3d& acceleration, const int num_measurements, const localization_common::Time start_time, + const double time_increment); + +std::vector ConstantAngularVelocityMeasurements( + const Eigen::Vector3d& angular_velocity, const int num_measurements, const localization_common::Time start_time, + const double time_increment); + +gtsam::Rot3 IntegrateAngularVelocities(const std::vector& imu_measurements, + const gtsam::Rot3& starting_orientation, + const localization_common::Time starting_time); + +sensor_msgs::Imu ImuMsg(const localization_measurements::ImuMeasurement& imu_measurement); +} // namespace imu_augmentor + +#endif // IMU_AUGMENTOR_TEST_UTILITIES_H_ // NOLINT diff --git a/localization/imu_integration/CMakeLists.txt b/localization/imu_integration/CMakeLists.txt index 7d07b77bed..1a7b581447 100644 --- a/localization/imu_integration/CMakeLists.txt +++ b/localization/imu_integration/CMakeLists.txt @@ -15,17 +15,67 @@ #License for the specific language governing permissions and limitations #under the License. +cmake_minimum_required(VERSION 3.0) project(imu_integration) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + config_reader + localization_common + localization_measurements +) + +# System dependencies are found with CMake's conventions +find_package(Eigen3 REQUIRED) + catkin_package( - LIBRARIES ${PROJECT_NAME} ${GLOG_LIBRARIES} ${GTSAM_LIBRARIES} config_reader localization_common localization_measurements + LIBRARIES ${PROJECT_NAME} ${GLOG_LIBRARIES} ${GTSAM_LIBRARIES} INCLUDE_DIRS include ${GTSAM_INCLUDE_DIR} ${GLOG_INCLUDE_DIRS} - CATKIN_DEPENDS - DEPENDS gtsam + CATKIN_DEPENDS config_reader localization_common localization_measurements +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIRS} + ${GLOG_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(${PROJECT_NAME} + src/dynamic_imu_filter.cc + src/identity_filter.cc + src/imu_filter.cc + src/imu_integrator.cc + src/latest_imu_integrator.cc + src/utilities.cc +) +add_dependencies(${PROJECT_NAME} ${catkin_EXPORTED_TARGETS}) +target_link_libraries(${PROJECT_NAME} ${catkin_LIBRARIES}) + + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) -create_library(TARGET ${PROJECT_NAME} - LIBS ${catkin_LIBRARIES} ${GLOG_LIBRARIES} config_reader localization_common localization_measurements - INC ${catkin_INCLUDE_DIRS} ${GLOG_INCLUDE_DIRS} - DEPS gtsam +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE ) diff --git a/localization/imu_integration/package.xml b/localization/imu_integration/package.xml index dde9b9886c..f4bccb1e22 100644 --- a/localization/imu_integration/package.xml +++ b/localization/imu_integration/package.xml @@ -14,4 +14,11 @@ Astrobee Flight Software catkin + config_reader + localization_common + localization_measurements + config_reader + localization_common + localization_measurements + diff --git a/localization/interest_point/CMakeLists.txt b/localization/interest_point/CMakeLists.txt index 58ceb35cdb..2989c725ad 100644 --- a/localization/interest_point/CMakeLists.txt +++ b/localization/interest_point/CMakeLists.txt @@ -15,48 +15,55 @@ # License for the specific language governing permissions and limitations # under the License. -cmake_minimum_required(VERSION 2.8.3) +cmake_minimum_required(VERSION 3.0) project(interest_point) -set(LIBS - ${GLOG_LIBRARIES} - ${GFLAGS_LIBRARIES} - ${OpenCV_LIBRARIES} - ${OPENMVG_LIBRARIES} - ${CERES_LIBRARY} - ) -set(INCLUDES - ${CMAKE_CURRENT_SOURCE_DIR}/include - ${CMAKE_SOURCE_DIR}/common/include - ${CERES_INCLUDE_DIRS} - ${EIGEN3_INCLUDE_DIRS} - ${OpenCV_INCLUDE_DIRS} - ${OPENMVG_INCLUDE_DIRS} - ${GFLAGS_INCLUDE_DIRS} - ${GLOG_INCLUDE_DIRS} +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp ) +# System dependencies are found with CMake's conventions +find_package(cmake_modules REQUIRED) +find_package(Eigen3 REQUIRED) +find_package(OpenMVG QUIET REQUIRED) + +# Find OpenCV +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../cmake") +find_package(OpenCV331 REQUIRED) + catkin_package( INCLUDE_DIRS include - LIBRARIES interest_point + LIBRARIES interest_point ${OpenCV_LIBRARIES} CATKIN_DEPENDS # DEPENDS system_lib ) -create_library(TARGET interest_point - LIBS ${LIBS} - INC ${INCLUDES} -) +########### +## Build ## +########### -set(INTEREST_LIBS - interest_point - ff_common - ${LIBS}) +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} + ${OpenCV_INCLUDE_DIRS} + ${OPENMVG_INCLUDE_DIRS} +) -create_tool_targets(DIR tools - LIBS ${INTEREST_LIBS} - INC ${INCLUDES} +# Declare C++ libraries +add_library(interest_point + src/agast_score.cc + src/brisk.cc + src/essential.cc + src/matching.cc ) +add_dependencies(interest_point ${catkin_EXPORTED_TARGETS}) +target_link_libraries(interest_point ${OPENMVG_LIBRARIES} ${catkin_LIBRARIES} ${OpenCV_LIBRARIES}) if(CATKIN_ENABLE_TESTING) find_package(rostest REQUIRED) @@ -66,10 +73,25 @@ if(CATKIN_ENABLE_TESTING) test/test_matching.cc ) target_link_libraries(test_matching - interest_point + interest_point glog ) endif() -set(INTEREST_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/include PARENT_SCOPE) +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) \ No newline at end of file diff --git a/localization/interest_point/test/test_matching.cc b/localization/interest_point/test/test_matching.cc index 6f2ce0d3d1..d623f8c6ed 100644 --- a/localization/interest_point/test/test_matching.cc +++ b/localization/interest_point/test/test_matching.cc @@ -73,3 +73,9 @@ TEST_F(MatchingTest, ORGBRISK) { EXPECT_EQ(64, descriptor1.cols); EXPECT_LT(50u, matches.size()); } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/lk_optical_flow/CMakeLists.txt b/localization/lk_optical_flow/CMakeLists.txt index 5ff2d04f17..79b87e341a 100644 --- a/localization/lk_optical_flow/CMakeLists.txt +++ b/localization/lk_optical_flow/CMakeLists.txt @@ -15,22 +15,70 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(lk_optical_flow) -if (USE_ROS) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + cv_bridge + image_transport + nodelet + camera + ff_util +) catkin_package( + INCLUDE_DIRS include LIBRARIES ${PROJECT_NAME} - CATKIN_DEPENDS roscpp cv_bridge image_transport nodelet - DEPENDS lk_optical_flow + CATKIN_DEPENDS roscpp cv_bridge image_transport nodelet camera ff_util +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -create_library(TARGET lk_optical_flow - LIBS ff_nodelet camera config_reader ${catkin_LIBRARIES} - INC ${catkin_INCLUDE_DIRS} - DEPS ff_msgs config_reader camera +# Declare C++ libraries +add_library(lk_optical_flow + src/lk_optical_flow.cc + src/lk_optical_flow_nodelet.cc ) +add_dependencies(lk_optical_flow ${catkin_EXPORTED_TARGETS}) +target_link_libraries(lk_optical_flow ${catkin_LIBRARIES}) + +############# +## Install ## +############# -install_launch_files() +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) -endif (USE_ROS) +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/localization/lk_optical_flow/package.xml b/localization/lk_optical_flow/package.xml index 4ae2b03065..33799fcf96 100644 --- a/localization/lk_optical_flow/package.xml +++ b/localization/lk_optical_flow/package.xml @@ -19,11 +19,15 @@ roscpp nodelet ff_msgs + camera + ff_util cv_bridge image_transport roscpp nodelet ff_msgs + camera + ff_util diff --git a/localization/localization_common/CMakeLists.txt b/localization/localization_common/CMakeLists.txt index 953e1db213..f804327bff 100644 --- a/localization/localization_common/CMakeLists.txt +++ b/localization/localization_common/CMakeLists.txt @@ -15,17 +15,88 @@ #License for the specific language governing permissions and limitations #under the License. +cmake_minimum_required(VERSION 3.0) project(localization_common) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + camera + config_reader + msg_conversions + ff_msgs +) + +# Find GTSAM +find_package(GTSAM REQUIRED) + +# System dependencies are found with CMake's conventions +find_package(Eigen3 REQUIRED) + catkin_package( - LIBRARIES ${PROJECT_NAME} ${GTSAM_LIBRARIES} camera config_reader msg_conversions - INCLUDE_DIRS include ${GTSAM_INCLUDE_DIR} ${Boost_INCLUDE_DIRS} - CATKIN_DEPENDS - DEPENDS gtsam + INCLUDE_DIRS include + LIBRARIES ${PROJECT_NAME} ${GTSAM_LIBRARIES} gtsam + CATKIN_DEPENDS + camera + config_reader + msg_conversions + ff_msgs +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${GTSAM_INCLUDE_DIR} + ${EIGEN3_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(${PROJECT_NAME} + src/averager.cc + src/combined_nav_state.cc + src/combined_nav_state_covariances.cc + src/rate_timer.cc + src/ros_timer.cc + src/time.cc + src/timer.cc + src/test_utilities.cc + src/utilities.cc ) +add_dependencies(${PROJECT_NAME} ${catkin_EXPORTED_TARGETS}) +target_link_libraries(${PROJECT_NAME} ${catkin_LIBRARIES} gtsam) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +if(CATKIN_ENABLE_TESTING) + find_package(rostest REQUIRED) + add_rostest_gtest(test_utilities_localization_common + test/test_utilities.test + test/test_utilities.cc + ) + target_link_libraries(test_utilities_localization_common + ${PROJECT_NAME} + ) +endif() -create_library(TARGET ${PROJECT_NAME} - LIBS ${catkin_LIBRARIES} camera config_reader gtsam msg_conversions - INC ${catkin_INCLUDE_DIRS} ${Boost_INCLUDE_DIRS} - DEPS +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE ) diff --git a/localization/localization_common/include/localization_common/image_correspondences.h b/localization/localization_common/include/localization_common/image_correspondences.h new file mode 100644 index 0000000000..192d4659f9 --- /dev/null +++ b/localization/localization_common/include/localization_common/image_correspondences.h @@ -0,0 +1,43 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef LOCALIZATION_COMMON_IMAGE_CORRESPONDENCES_H_ +#define LOCALIZATION_COMMON_IMAGE_CORRESPONDENCES_H_ + +#include + +#include + +namespace localization_common { +struct ImageCorrespondences { + ImageCorrespondences(const std::vector& image_points, const std::vector& points_3d) + : image_points(image_points), points_3d(points_3d) {} + ImageCorrespondences() {} + + void AddCorrespondence(const Eigen::Vector2d& image_point, const Eigen::Vector3d& point_3d) { + image_points.emplace_back(image_point); + points_3d.emplace_back(point_3d); + } + + size_t size() const { return image_points.size(); } + + std::vector image_points; + std::vector points_3d; +}; +} // namespace localization_common + +#endif // LOCALIZATION_COMMON_IMAGE_CORRESPONDENCES_H_ diff --git a/localization/localization_common/include/localization_common/pose_with_covariance.h b/localization/localization_common/include/localization_common/pose_with_covariance.h new file mode 100644 index 0000000000..67de44e0c9 --- /dev/null +++ b/localization/localization_common/include/localization_common/pose_with_covariance.h @@ -0,0 +1,37 @@ + +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#ifndef LOCALIZATION_COMMON_POSE_WITH_COVARIANCE_H_ +#define LOCALIZATION_COMMON_POSE_WITH_COVARIANCE_H_ + +#include + +namespace localization_common { +using PoseCovariance = Eigen::Matrix; +struct PoseWithCovariance { + PoseWithCovariance(const Eigen::Isometry3d& pose, const PoseCovariance& covariance) + : pose(pose), covariance(covariance) {} + Eigen::Isometry3d pose; + PoseCovariance covariance; + + EIGEN_MAKE_ALIGNED_OPERATOR_NEW +}; +} // namespace localization_common + +#endif // LOCALIZATION_COMMON_POSE_WITH_COVARIANCE_H_ diff --git a/localization/localization_common/include/localization_common/test_utilities.h b/localization/localization_common/include/localization_common/test_utilities.h new file mode 100644 index 0000000000..20b03e5237 --- /dev/null +++ b/localization/localization_common/include/localization_common/test_utilities.h @@ -0,0 +1,96 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#ifndef LOCALIZATION_COMMON_TEST_UTILITIES_H_ +#define LOCALIZATION_COMMON_TEST_UTILITIES_H_ + +#include + +namespace localization_common { +class Sampler { + public: + Sampler(const double min, const double max, const double count); + double Sample(const int increment) const; + + private: + const double scale_; + const double min_; +}; + +// Selected from [-100, 100] +double RandomDouble(); + +// Selected from [0, 100] +double RandomPositiveDouble(); + +// Selected from [min, max] +double RandomDouble(const double min, const double max); + +double RandomGaussianDouble(const double mean, const double stddev); + +bool RandomBool(); + +// Returns white noise with set stddev +double Noise(const double stddev); + +// Each index ranges from [-100, 100] +Eigen::Vector3d RandomVector(); + +// Translation ranges from [-100, 100] +// Rotation spans all possible rotations +gtsam::Pose3 RandomPose(); + +Eigen::Isometry3d RandomIsometry3d(); + +Eigen::Affine3d RandomAffine3d(); + +// Focal lengths and principal points selected from [0.1, 1000] +Eigen::Matrix3d RandomIntrinsics(); + +Eigen::Isometry3d AddNoiseToIsometry3d(const Eigen::Isometry3d& pose, const double translation_stddev, + const double rotation_stddev); + +template +Eigen::Matrix AddNoiseToVector(const Eigen::Matrix& vector, const double noise_stddev); + +// Template on tolerance power so this can be used with gtest's ASSERT_PRED2. +// If pass tolerance as argument, this is no longer a binary function and valid for usage with +// ASSERT_PRED2. +template +bool MatrixEquality(const Eigen::MatrixXd& lhs, const Eigen::MatrixXd& rhs); + +template +Eigen::Matrix AddNoiseToVector(const Eigen::Matrix& vector, const double noise_stddev) { + Eigen::Matrix noisy_vector = vector; + for (int i = 0; i < vector.rows(); ++i) { + noisy_vector(i, 0) += Noise(noise_stddev); + } + return noisy_vector; +} + +template +bool MatrixEquality(const Eigen::MatrixXd& lhs, const Eigen::MatrixXd& rhs) { + constexpr double tolerance = std::pow(10, -1.0 * TolerancePower); + // Seperately check for zero matrices since isApprox fails for these + if (lhs.isZero(tolerance) || rhs.isZero(tolerance)) { + return lhs.isZero(tolerance) && rhs.isZero(tolerance); + } + return lhs.isApprox(rhs, tolerance); +} +} // namespace localization_common +#endif // LOCALIZATION_COMMON_TEST_UTILITIES_H_ diff --git a/localization/localization_common/include/localization_common/utilities.h b/localization/localization_common/include/localization_common/utilities.h index 112fb75b5b..0ebd379b46 100644 --- a/localization/localization_common/include/localization_common/utilities.h +++ b/localization/localization_common/include/localization_common/utilities.h @@ -38,6 +38,7 @@ #include #include +#include namespace localization_common { gtsam::Pose3 LoadTransform(config_reader::ConfigReader& config, const std::string& transform_config_name); @@ -120,6 +121,21 @@ void CombinedNavStateCovariancesToMsg(const CombinedNavStateCovariances& covaria msg_conversions::VariancesToCovDiag(covariances.position_variances(), &loc_msg.cov_diag[12]); } +Eigen::Isometry3d Isometry3d(const Eigen::Vector3d& translation, const Eigen::Matrix3d& rotation); + +double Deg2Rad(const double degrees); + +double Rad2Deg(const double radians); + +// Assumes angles in degrees, ordered as rho, phi, z +Eigen::Vector3d CylindricalToCartesian(const Eigen::Vector3d& cylindrical_coordinates); + +// Uses Euler Angles in intrinsic ypr representation in degrees +Eigen::Matrix3d RotationFromEulerAngles(const double yaw, const double pitch, const double roll); + +Eigen::Vector2d FocalLengths(const Eigen::Matrix3d& intrinsics); + +Eigen::Vector2d PrincipalPoints(const Eigen::Matrix3d& intrinsics); } // namespace localization_common #endif // LOCALIZATION_COMMON_UTILITIES_H_ diff --git a/localization/localization_common/package.xml b/localization/localization_common/package.xml index defe706423..2663824daa 100644 --- a/localization/localization_common/package.xml +++ b/localization/localization_common/package.xml @@ -14,4 +14,12 @@ Astrobee Flight Software catkin + camera + config_reader + msg_conversions + ff_msgs + camera + config_reader + msg_conversions + ff_msgs diff --git a/localization/localization_common/src/test_utilities.cc b/localization/localization_common/src/test_utilities.cc new file mode 100644 index 0000000000..1943e386dc --- /dev/null +++ b/localization/localization_common/src/test_utilities.cc @@ -0,0 +1,109 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include +#include + +#include + +namespace localization_common { +Sampler::Sampler(const double min, const double max, const double count) + : min_(min), scale_((max - min) / static_cast(count - 1)) {} + +double Sampler::Sample(const int increment) const { return min_ + increment * scale_; } + +double RandomDouble() { return RandomDouble(-100, 100); } + +double RandomPositiveDouble() { return RandomDouble(0, 100); } + +double RandomDouble(const double min, const double max) { + std::random_device dev; + std::mt19937 rng(dev()); + return std::uniform_real_distribution(min, max)(rng); +} + +double RandomGaussianDouble(const double mean, const double stddev) { + std::random_device dev; + std::mt19937 rng(dev()); + std::normal_distribution distribution(mean, stddev); + return distribution(rng); +} + +bool RandomBool() { return RandomDouble(0, 1) < 0.5; } + +double Noise(const double stddev) { return RandomGaussianDouble(0.0, stddev); } + +Eigen::Vector3d RandomVector() { + // Eigen::Vector3 is constrained to [-1, 1] + return RandomDouble() * Eigen::Vector3d::Random(); +} + +gtsam::Pose3 RandomPose() { + std::random_device dev; + std::mt19937 rng(dev()); + gtsam::Rot3 rot = gtsam::Rot3::Random(rng); + gtsam::Point3 trans = RandomVector(); + return gtsam::Pose3(rot, trans); +} + +Eigen::Isometry3d RandomIsometry3d() { + const gtsam::Pose3 random_pose = RandomPose(); + return EigenPose(random_pose); +} + +Eigen::Affine3d RandomAffine3d() { + const Eigen::Isometry3d random_pose = RandomIsometry3d(); + const double scale = RandomPositiveDouble(); + Eigen::Affine3d random_affine3d = Eigen::Affine3d::Identity(); + random_affine3d.translation() = random_pose.translation(); + random_affine3d.linear() = scale * random_pose.linear(); + return random_affine3d; +} + +Eigen::Matrix3d RandomIntrinsics() { + Eigen::Matrix3d intrinsics = Eigen::Matrix3d::Identity(); + const double f_x = RandomGaussianDouble(500, 20); + // Ensure that focal lengths are quite similar + const double f_y = RandomGaussianDouble(f_x, 5); + const double p_x = RandomGaussianDouble(640, 50); + const double p_y = RandomGaussianDouble(480, 50); + + intrinsics(0, 0) = f_x; + intrinsics(1, 1) = f_y; + intrinsics(0, 2) = p_x; + intrinsics(1, 2) = p_y; + return intrinsics; +} + +Eigen::Isometry3d AddNoiseToIsometry3d(const Eigen::Isometry3d& pose, const double translation_stddev, + const double rotation_stddev) { + const double mean = 0.0; + std::random_device dev; + std::mt19937 rng(dev()); + std::normal_distribution translation_distribution(mean, translation_stddev); + std::normal_distribution rotation_distribution(mean, rotation_stddev); + + const Eigen::Vector3d translation_noise(translation_distribution(rng), translation_distribution(rng), + translation_distribution(rng)); + const Eigen::Matrix3d rotation_noise( + RotationFromEulerAngles(rotation_distribution(rng), rotation_distribution(rng), rotation_distribution(rng))); + + Eigen::Isometry3d pose_noise = Isometry3d(translation_noise, rotation_noise); + return pose * pose_noise; +} +} // namespace localization_common diff --git a/localization/localization_common/src/utilities.cc b/localization/localization_common/src/utilities.cc index 6386db115e..6cb275d65b 100644 --- a/localization/localization_common/src/utilities.cc +++ b/localization/localization_common/src/utilities.cc @@ -146,4 +146,41 @@ gtsam::Vector3 RemoveGravityFromAccelerometerMeasurement(const gtsam::Vector3& g // Add gravity correction to measurement to offset negatively measured gravity return (uncorrected_accelerometer_measurement + imu_F_gravity); } + +Eigen::Isometry3d Isometry3d(const Eigen::Vector3d& translation, const Eigen::Matrix3d& rotation) { + Eigen::Isometry3d pose = Eigen::Isometry3d::Identity(); + pose.translation() = translation; + pose.linear() = rotation; + return pose; +} + +double Deg2Rad(const double degrees) { return M_PI / 180.0 * degrees; } + +double Rad2Deg(const double radians) { return 180.0 / M_PI * radians; } + +Eigen::Vector3d CylindricalToCartesian(const Eigen::Vector3d& cylindrical_coordinates) { + Eigen::Vector3d cartesian_coordinates; + cartesian_coordinates.x() = cylindrical_coordinates[0] * std::cos(Deg2Rad(cylindrical_coordinates[1])); + cartesian_coordinates.y() = cylindrical_coordinates[0] * std::sin(Deg2Rad(cylindrical_coordinates[1])); + cartesian_coordinates.z() = cylindrical_coordinates[2]; + return cartesian_coordinates; +} + +Eigen::Matrix3d RotationFromEulerAngles(const double yaw, const double pitch, const double roll) { + const Eigen::AngleAxisd yaw_aa = Eigen::AngleAxisd(Deg2Rad(yaw), Eigen::Vector3d::UnitZ()); + const Eigen::AngleAxisd pitch_aa = Eigen::AngleAxisd(Deg2Rad(pitch), Eigen::Vector3d::UnitY()); + const Eigen::AngleAxisd roll_aa = Eigen::AngleAxisd(Deg2Rad(roll), Eigen::Vector3d::UnitX()); + // For intrinsics euler angle convention, yaw, pitch, then roll in intrinsic body frame is equivalent to + // roll, pitch, then yaw in extrinsic global frame + const Eigen::Matrix3d rotation(roll_aa * pitch_aa * yaw_aa); + return rotation; +} + +Eigen::Vector2d FocalLengths(const Eigen::Matrix3d& intrinsics) { + return Eigen::Vector2d(intrinsics(0, 0), intrinsics(1, 1)); +} + +Eigen::Vector2d PrincipalPoints(const Eigen::Matrix3d& intrinsics) { + return Eigen::Vector2d(intrinsics(0, 2), intrinsics(1, 2)); +} } // namespace localization_common diff --git a/localization/localization_common/test/test_utilities.cc b/localization/localization_common/test/test_utilities.cc new file mode 100644 index 0000000000..b871e0465d --- /dev/null +++ b/localization/localization_common/test/test_utilities.cc @@ -0,0 +1,53 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include +#include +#include + +#include + +namespace lc = localization_common; +TEST(UtilitiesTester, Deg2Rad2Deg) { + for (int i = 0; i < 500; ++i) { + const double degrees = lc::RandomDouble(-360.0, 360.0); + const double radians = lc::Deg2Rad(degrees); + const double degrees_again = lc::Rad2Deg(radians); + ASSERT_NEAR(degrees, degrees_again, 1e-6); + } +} + +TEST(UtilitiesTester, EulerAngles) { + for (int i = 0; i < 500; ++i) { + const double yaw = lc::RandomDouble(-360.0, 360.0); + const double pitch = lc::RandomDouble(-360.0, 360.0); + const double roll = lc::RandomDouble(-360.0, 360.0); + const Eigen::Matrix3d rotation = lc::RotationFromEulerAngles(yaw, pitch, roll); + // ypr intrinsics equivalent to rpy extrinsics, and Eigen uses extrinsics convention + const Eigen::Vector3d angles = rotation.eulerAngles(0, 1, 2); + const Eigen::Matrix3d rotation_again = + lc::RotationFromEulerAngles(lc::Rad2Deg(angles[2]), lc::Rad2Deg(angles[1]), lc::Rad2Deg(angles[0])); + ASSERT_TRUE(rotation.matrix().isApprox(rotation_again.matrix(), 1e-6)); + } +} + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/tools/simulator/eps_sim/launch/eps_sim.launch b/localization/localization_common/test/test_utilities.test similarity index 86% rename from tools/simulator/eps_sim/launch/eps_sim.launch rename to localization/localization_common/test/test_utilities.test index 7cd4e5c75e..ff23a5eec9 100644 --- a/tools/simulator/eps_sim/launch/eps_sim.launch +++ b/localization/localization_common/test/test_utilities.test @@ -16,6 +16,5 @@ - - + diff --git a/localization/localization_manager/CMakeLists.txt b/localization/localization_manager/CMakeLists.txt index 17964c6dfb..a8d82d33ca 100644 --- a/localization/localization_manager/CMakeLists.txt +++ b/localization/localization_manager/CMakeLists.txt @@ -15,8 +15,24 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(localization_manager) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + actionlib + nodelet + pluginlib + ff_msgs + std_msgs + std_srvs + ff_util +) + catkin_package( LIBRARIES localization_manager @@ -28,12 +44,44 @@ catkin_package( ff_msgs std_msgs std_srvs + ff_util +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(localization_manager + src/localization_manager_nodelet.cc +) +add_dependencies(localization_manager ${catkin_EXPORTED_TARGETS}) +target_link_libraries(localization_manager ${catkin_LIBRARIES}) + + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) -create_library(TARGET localization_manager - LIBS ${catkin_LIBRARIES} ff_common ff_nodelet config_server - INC ${catkin_INCLUDE_DIRS} - DEPS ff_msgs +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} ) -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/localization/localization_manager/include/localization_manager/localization_pipeline.h b/localization/localization_manager/include/localization_manager/localization_pipeline.h index 0f6911a90f..060652725d 100644 --- a/localization/localization_manager/include/localization_manager/localization_pipeline.h +++ b/localization/localization_manager/include/localization_manager/localization_pipeline.h @@ -29,7 +29,7 @@ #include #include -// C++11 +// C++ includes #include #include #include diff --git a/localization/localization_manager/package.xml b/localization/localization_manager/package.xml index bf0d3efd51..d52b6f709e 100644 --- a/localization/localization_manager/package.xml +++ b/localization/localization_manager/package.xml @@ -22,6 +22,7 @@ actionlib nodelet pluginlib + ff_util roscpp std_msgs std_srvs @@ -30,6 +31,7 @@ planner nodelet pluginlib + ff_util diff --git a/localization/localization_measurements/CMakeLists.txt b/localization/localization_measurements/CMakeLists.txt index c688bcfe03..648695cab9 100644 --- a/localization/localization_measurements/CMakeLists.txt +++ b/localization/localization_measurements/CMakeLists.txt @@ -15,21 +15,57 @@ #License for the specific language governing permissions and limitations #under the License. +cmake_minimum_required(VERSION 3.0) project(localization_measurements) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + ff_msgs + config_reader + localization_common + msg_conversions +) + + +# System dependencies are found with CMake's conventions +find_package(Eigen3 REQUIRED) + catkin_package( - LIBRARIES ${PROJECT_NAME} ${GTSAM_LIBRARIES} ff_common localization_common - INCLUDE_DIRS include ${GTSAM_INCLUDE_DIR} - CATKIN_DEPENDS - DEPENDS gtsam ff_msgs + LIBRARIES ${PROJECT_NAME} + INCLUDE_DIRS include + CATKIN_DEPENDS ff_msgs config_reader localization_common msg_conversions +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIRS} ) -create_library(TARGET ${PROJECT_NAME} - LIBS ${catkin_LIBRARIES} ff_common gtsam localization_common - INC ${catkin_INCLUDE_DIRS} - DEPS ff_msgs +# create_library(TARGET ${PROJECT_NAME} +# LIBS ${catkin_LIBRARIES} ff_common gtsam localization_common +# INC ${catkin_INCLUDE_DIRS} +# DEPS ff_msgs +# ) + + +# Declare C++ libraries +add_library(${PROJECT_NAME} + src/measurement_conversions.cc ) +add_dependencies(${PROJECT_NAME} ${catkin_EXPORTED_TARGETS}) +target_link_libraries(${PROJECT_NAME} ${catkin_LIBRARIES}) + + if(CATKIN_ENABLE_TESTING) find_package(rostest REQUIRED) add_rostest_gtest(test_plane @@ -49,3 +85,21 @@ if(CATKIN_ENABLE_TESTING) ) endif() + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) \ No newline at end of file diff --git a/localization/localization_measurements/package.xml b/localization/localization_measurements/package.xml index d1ec591cc3..27acc9a3a3 100644 --- a/localization/localization_measurements/package.xml +++ b/localization/localization_measurements/package.xml @@ -15,5 +15,11 @@ catkin ff_msgs + config_reader + localization_common + msg_conversions ff_msgs + config_reader + localization_common + msg_conversions diff --git a/localization/localization_measurements/test/test_measurement_conversions.cc b/localization/localization_measurements/test/test_measurement_conversions.cc index 31657681f4..2d853c4846 100644 --- a/localization/localization_measurements/test/test_measurement_conversions.cc +++ b/localization/localization_measurements/test/test_measurement_conversions.cc @@ -89,3 +89,9 @@ TEST(MeasurementsConversionTester, MakeHandrailEndpoints) { EXPECT_NEAR(trans.z(), endpoints.second.z(), 1e-6); } } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/localization_measurements/test/test_plane.cc b/localization/localization_measurements/test/test_plane.cc index 9b8abd72a2..3e049390d0 100644 --- a/localization/localization_measurements/test/test_plane.cc +++ b/localization/localization_measurements/test/test_plane.cc @@ -158,3 +158,9 @@ TEST(PlaneTester, PointToPlaneDistanceZAxisPlane) { EXPECT_NEAR(0, distance, 1e-6); } } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/localization_node/CMakeLists.txt b/localization/localization_node/CMakeLists.txt index bac91ae60f..d5f6478a2a 100644 --- a/localization/localization_node/CMakeLists.txt +++ b/localization/localization_node/CMakeLists.txt @@ -15,23 +15,129 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(localization_node) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + cv_bridge + image_transport + nodelet + camera + ff_util + sparse_mapping + msg_conversions +) + +# System dependencies are found with CMake's conventions +find_package(OpenMVG QUIET REQUIRED) +find_package(Eigen3 REQUIRED) + +# Find OpenCV +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../cmake") +find_package(OpenCV331 REQUIRED) + catkin_package( + INCLUDE_DIRS include LIBRARIES ${PROJECT_NAME} - CATKIN_DEPENDS roscpp message_runtime std_msgs sensor_msgs cv_bridge image_transport rosbag ff_msgs tf2 sparse_mapping + CATKIN_DEPENDS + roscpp + message_runtime + std_msgs + sensor_msgs + cv_bridge + image_transport + rosbag + ff_msgs + tf2 + sparse_mapping + msg_conversions +) + + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} +) + +# Declare C++ libraries +add_library(localization_node + src/localization.cc + src/localization_nodelet.cc +) +add_dependencies(localization_node ${catkin_EXPORTED_TARGETS}) +target_link_libraries(localization_node + ${catkin_LIBRARIES} + ${OpenCV_LIBRARIES} + ${OPENMVG_LIBRARIES} +) + +## Declare a C++ executable: extract_image_bag +add_executable(extract_image_bag tools/extract_image_bag.cc) +add_dependencies(extract_image_bag ${catkin_EXPORTED_TARGETS}) +target_link_libraries(extract_image_bag + localization_node gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: interest_points_test_bag +add_executable(interest_points_test_bag tools/interest_points_test_bag.cc) +add_dependencies(interest_points_test_bag ${catkin_EXPORTED_TARGETS}) +target_link_libraries(interest_points_test_bag + localization_node gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: merge_bags +add_executable(merge_bags tools/merge_bags.cc) +add_dependencies(merge_bags ${catkin_EXPORTED_TARGETS}) +target_link_libraries(merge_bags + localization_node gflags glog ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) -create_library(TARGET localization_node - LIBS ${SPARSE_MAPPING_LIBRARIES} ff_nodelet msg_conversions ${catkin_LIBRARIES} - INC ${catkin_INCLUDE_DIRS} - DEPS sparse_mapping ff_msgs +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} ) -create_tool_targets(DIR tools - LIBS ${SPARSE_MAPPING_LIBRARIES} localization_node ${catkin_LIBRARIES} - INC ${catkin_INCLUDE_DIRS} - DEPS localization_node +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE ) -install_launch_files() +# Install C++ executables +install(TARGETS extract_image_bag DESTINATION bin) +install(TARGETS interest_points_test_bag DESTINATION bin) +install(TARGETS merge_bags DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/extract_image_bag share/${PROJECT_NAME} + COMMAND ln -s ../../bin/interest_points_test_bag share/${PROJECT_NAME} + COMMAND ln -s ../../bin/merge_bags share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/localization/localization_node/package.xml b/localization/localization_node/package.xml index 197f369b21..8af7f9450e 100644 --- a/localization/localization_node/package.xml +++ b/localization/localization_node/package.xml @@ -25,6 +25,7 @@ nodelet tf2 sparse_mapping + msg_conversions roscpp message_runtime std_msgs @@ -36,6 +37,7 @@ nodelet tf2 sparse_mapping + msg_conversions diff --git a/localization/marker_tracking/CMakeLists.txt b/localization/marker_tracking/CMakeLists.txt index 2c2042b311..24e0b677ab 100644 --- a/localization/marker_tracking/CMakeLists.txt +++ b/localization/marker_tracking/CMakeLists.txt @@ -15,35 +15,180 @@ # License for the specific language governing permissions and limitations # under the License. -set(LIBS - ${ALVAR_LIBRARIES} - ${OpenCV_LIBRARIES} - camera - ff_common - config_reader - ${GLOG_LIBRARIES} - ${GFLAGS_LIBRARIES} - ) -set(INCLUDES +cmake_minimum_required(VERSION 3.0) +project(marker_tracking) + +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + ff_util + message_runtime + std_msgs + geometry_msgs + cv_bridge + image_transport + camera msg_conversions +) + +# System dependencies are found with CMake's conventions +find_package(cmake_modules REQUIRED) +find_package(Eigen3 REQUIRED) +find_package(Alvar REQUIRED) +find_package(Alvar REQUIRED) + + +# Find OpenCV +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../cmake") +find_package(OpenCV331 REQUIRED) + +catkin_package( + LIBRARIES ${PROJECT_NAME} + CATKIN_DEPENDS roscpp ff_util message_runtime std_msgs geometry_msgs cv_bridge image_transport camera msg_conversions +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ${ALVAR_INCLUDE_DIRS} ${EIGEN3_INCLUDE_DIRS} - ${GLOG_INCLUDE_DIRS} - ${GFLAGS_INCLUDE_DIRS} + ${OpenCV_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(marker_tracking + src/marker_tracking/arconfigio.cc + src/marker_tracking/arxmlio.cc + src/marker_tracking/labelling_method.cc + src/marker_tracking/marker_detector.cc +) +add_dependencies(marker_tracking ${catkin_EXPORTED_TARGETS}) +target_link_libraries(marker_tracking ${catkin_LIBRARIES} ${ALVAR_LIBRARIES}) + +## Declare a C++ executable: generate_ar_target +add_executable(generate_ar_target tools/marker_tracking/generate_ar_target.cc) +add_dependencies(generate_ar_target ${catkin_EXPORTED_TARGETS}) +target_link_libraries(generate_ar_target + marker_tracking gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: generate_multiscale_target +add_executable(generate_multiscale_target tools/marker_tracking/generate_multiscale_target.cc) +add_dependencies(generate_multiscale_target ${catkin_EXPORTED_TARGETS}) +target_link_libraries(generate_multiscale_target + marker_tracking gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: generate_svg_markers +add_executable(generate_svg_markers tools/marker_tracking/generate_svg_markers.cc) +add_dependencies(generate_svg_markers ${catkin_EXPORTED_TARGETS}) +target_link_libraries(generate_svg_markers + marker_tracking gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: identify_marker_in_image +add_executable(identify_marker_in_image tools/marker_tracking/identify_marker_in_image.cc) +add_dependencies(identify_marker_in_image ${catkin_EXPORTED_TARGETS}) +target_link_libraries(identify_marker_in_image + marker_tracking gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: marker_config2xml +add_executable(marker_config2xml tools/marker_tracking/marker_config2xml.cc) +add_dependencies(marker_config2xml ${catkin_EXPORTED_TARGETS}) +target_link_libraries(marker_config2xml + marker_tracking gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: markers_to_Kalibr +add_executable(markers_to_Kalibr tools/marker_tracking/markers_to_Kalibr.cc) +add_dependencies(markers_to_Kalibr ${catkin_EXPORTED_TARGETS}) +target_link_libraries(markers_to_Kalibr + marker_tracking gflags glog yaml-cpp ${catkin_LIBRARIES}) + +# Declare C++ libraries +add_library(marker_tracking_node + src/marker_tracking_node/marker_tracker.cc + src/marker_tracking_node/marker_tracking_nodelet.cc +) +add_dependencies(marker_tracking_node ${catkin_EXPORTED_TARGETS}) +target_link_libraries(marker_tracking_node marker_tracking ${catkin_LIBRARIES}) + +## Declare a C++ executable: overhead_tracking_node +add_executable(overhead_tracking_node tools/marker_tracking_node/overhead_tracking_node.cc) +add_dependencies(overhead_tracking_node ${catkin_EXPORTED_TARGETS}) +target_link_libraries(overhead_tracking_node + marker_tracking_node gflags glog ${catkin_LIBRARIES}) + +if(CATKIN_ENABLE_TESTING) + find_package(rostest REQUIRED) + add_rostest_gtest(test_arxmlio test/test_arxmlio.test + test/test_arxmlio.cc + ) + target_link_libraries(test_arxmlio + marker_tracking + ) + + add_rostest_gtest(test_marker_config_loader test/test_marker_config_loader.test + test/test_marker_config_loader.cc + ) + target_link_libraries(test_marker_config_loader + marker_tracking ) -set(MARKER_TRACKING_LIBRARIES marker_tracking ${LIBS}) + add_rostest_gtest(test_marker_detector test/test_marker_detector.test + test/test_marker_detector.cc + ) + target_link_libraries(test_marker_detector + marker_tracking + ) +endif() + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) +install(TARGETS marker_tracking_node + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) + +# Install C++ executables +install(TARGETS overhead_tracking_node DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/overhead_tracking_node share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") -create_library(TARGET marker_tracking - LIBS ${LIBS} - INC ${INCLUDES} - DEPS camera) +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) -if (YAMLCPP_FOUND) - create_tool_targets(DIR tools - LIBS marker_tracking ${YAMLCPP_LIBRARIES} - INC ${INCLUDES}) -endif (YAMLCPP_FOUND) +install(FILES + tools/marker_tracking_node/features_counter.py + DESTINATION share/${PROJECT_NAME}) -if (USE_ROS) - add_subdirectory(ros) -endif (USE_ROS) diff --git a/localization/marker_tracking/ros/include/marker_tracking_node/marker_tracker.h b/localization/marker_tracking/include/marker_tracking_node/marker_tracker.h similarity index 100% rename from localization/marker_tracking/ros/include/marker_tracking_node/marker_tracker.h rename to localization/marker_tracking/include/marker_tracking_node/marker_tracker.h diff --git a/localization/marker_tracking/ros/include/marker_tracking_node/marker_tracking_nodelet.h b/localization/marker_tracking/include/marker_tracking_node/marker_tracking_nodelet.h similarity index 100% rename from localization/marker_tracking/ros/include/marker_tracking_node/marker_tracking_nodelet.h rename to localization/marker_tracking/include/marker_tracking_node/marker_tracking_nodelet.h diff --git a/localization/marker_tracking/ros/launch/body_tags.xml b/localization/marker_tracking/launch/body_tags.xml similarity index 100% rename from localization/marker_tracking/ros/launch/body_tags.xml rename to localization/marker_tracking/launch/body_tags.xml diff --git a/localization/marker_tracking/ros/launch/dock_targets.m b/localization/marker_tracking/launch/dock_targets.m similarity index 100% rename from localization/marker_tracking/ros/launch/dock_targets.m rename to localization/marker_tracking/launch/dock_targets.m diff --git a/localization/marker_tracking/ros/launch/granite_lab_tags.xml b/localization/marker_tracking/launch/granite_lab_tags.xml similarity index 100% rename from localization/marker_tracking/ros/launch/granite_lab_tags.xml rename to localization/marker_tracking/launch/granite_lab_tags.xml diff --git a/localization/marker_tracking/ros/launch/granitetable_1280_960.xml b/localization/marker_tracking/launch/granitetable_1280_960.xml similarity index 100% rename from localization/marker_tracking/ros/launch/granitetable_1280_960.xml rename to localization/marker_tracking/launch/granitetable_1280_960.xml diff --git a/localization/marker_tracking/ros/launch/marker_tracking.launch b/localization/marker_tracking/launch/marker_tracking.launch similarity index 100% rename from localization/marker_tracking/ros/launch/marker_tracking.launch rename to localization/marker_tracking/launch/marker_tracking.launch diff --git a/localization/marker_tracking/ros/launch/marker_tracking_overhead.launch b/localization/marker_tracking/launch/marker_tracking_overhead.launch similarity index 100% rename from localization/marker_tracking/ros/launch/marker_tracking_overhead.launch rename to localization/marker_tracking/launch/marker_tracking_overhead.launch diff --git a/localization/marker_tracking/ros/launch/new_dock_targets.m b/localization/marker_tracking/launch/new_dock_targets.m similarity index 100% rename from localization/marker_tracking/ros/launch/new_dock_targets.m rename to localization/marker_tracking/launch/new_dock_targets.m diff --git a/localization/marker_tracking/ros/nodelet_plugins.xml b/localization/marker_tracking/nodelet_plugins.xml similarity index 84% rename from localization/marker_tracking/ros/nodelet_plugins.xml rename to localization/marker_tracking/nodelet_plugins.xml index 44e2ded10b..faf1b6d995 100644 --- a/localization/marker_tracking/ros/nodelet_plugins.xml +++ b/localization/marker_tracking/nodelet_plugins.xml @@ -1,4 +1,4 @@ - + diff --git a/localization/marker_tracking/ros/package.xml b/localization/marker_tracking/package.xml similarity index 80% rename from localization/marker_tracking/ros/package.xml rename to localization/marker_tracking/package.xml index 322f434e6e..993bfb7e8a 100644 --- a/localization/marker_tracking/ros/package.xml +++ b/localization/marker_tracking/package.xml @@ -15,7 +15,8 @@ catkin roscpp - message_generation + ff_util + message_runtime std_msgs geometry_msgs sensor_msgs @@ -23,7 +24,10 @@ image_transport ff_msgs nodelet + camera + msg_conversions roscpp + ff_util message_runtime std_msgs geometry_msgs @@ -32,6 +36,8 @@ image_transport ff_msgs nodelet + camera + msg_conversions diff --git a/localization/marker_tracking/ros/CMakeLists.txt b/localization/marker_tracking/ros/CMakeLists.txt deleted file mode 100644 index 13dca84b4f..0000000000 --- a/localization/marker_tracking/ros/CMakeLists.txt +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -project(marker_tracking) - -catkin_package( - LIBRARIES ${PROJECT_NAME} - CATKIN_DEPENDS roscpp message_runtime std_msgs geometry_msgs cv_bridge image_transport - DEPENDS marker_tracking -) - -create_library(TARGET rosmarkertracking - LIBS ${MARKER_TRACKING_LIBRARIES} ff_nodelet msg_conversions config_reader ${catkin_LIBRARIES} - INC ${catkin_INCLUDE_DIRS} - DEPS marker_tracking ff_msgs -) - -create_tool_targets(DIR tools - LIBS ${MARKER_TRACKING_LIBRARIES} rosmarkertracking ${catkin_LIBRARIES} - INC ${catkin_INCLUDE_DIRS} - DEPS rosmarkertracking -) - -if(CATKIN_ENABLE_TESTING) - find_package(rostest REQUIRED) - add_rostest_gtest(test_arxmlio test/test_arxmlio.test - test/test_arxmlio.cc - ) - target_link_libraries(test_arxmlio - marker_tracking - ) - - add_rostest_gtest(test_marker_config_loader test/test_marker_config_loader.test - test/test_marker_config_loader.cc - ) - target_link_libraries(test_marker_config_loader - marker_tracking - ) - - add_rostest_gtest(test_marker_detector test/test_marker_detector.test - test/test_marker_detector.cc - ) - target_link_libraries(test_marker_detector - marker_tracking - ) -endif() - -install_launch_files() -install(FILES - tools/features_counter.py - DESTINATION share/${PROJECT_NAME}) - diff --git a/localization/marker_tracking/src/arconfigio.cc b/localization/marker_tracking/src/marker_tracking/arconfigio.cc similarity index 100% rename from localization/marker_tracking/src/arconfigio.cc rename to localization/marker_tracking/src/marker_tracking/arconfigio.cc diff --git a/localization/marker_tracking/src/arxmlio.cc b/localization/marker_tracking/src/marker_tracking/arxmlio.cc similarity index 100% rename from localization/marker_tracking/src/arxmlio.cc rename to localization/marker_tracking/src/marker_tracking/arxmlio.cc diff --git a/localization/marker_tracking/src/labelling_method.cc b/localization/marker_tracking/src/marker_tracking/labelling_method.cc similarity index 100% rename from localization/marker_tracking/src/labelling_method.cc rename to localization/marker_tracking/src/marker_tracking/labelling_method.cc diff --git a/localization/marker_tracking/src/marker_detector.cc b/localization/marker_tracking/src/marker_tracking/marker_detector.cc similarity index 100% rename from localization/marker_tracking/src/marker_detector.cc rename to localization/marker_tracking/src/marker_tracking/marker_detector.cc diff --git a/localization/marker_tracking/ros/src/marker_tracker.cc b/localization/marker_tracking/src/marker_tracking_node/marker_tracker.cc similarity index 100% rename from localization/marker_tracking/ros/src/marker_tracker.cc rename to localization/marker_tracking/src/marker_tracking_node/marker_tracker.cc diff --git a/localization/marker_tracking/ros/src/marker_tracking_nodelet.cc b/localization/marker_tracking/src/marker_tracking_node/marker_tracking_nodelet.cc similarity index 100% rename from localization/marker_tracking/ros/src/marker_tracking_nodelet.cc rename to localization/marker_tracking/src/marker_tracking_node/marker_tracking_nodelet.cc diff --git a/localization/marker_tracking/ros/test/data/IMG_20141217_160451.small.opt.jpg b/localization/marker_tracking/test/data/IMG_20141217_160451.small.opt.jpg similarity index 100% rename from localization/marker_tracking/ros/test/data/IMG_20141217_160451.small.opt.jpg rename to localization/marker_tracking/test/data/IMG_20141217_160451.small.opt.jpg diff --git a/localization/marker_tracking/ros/test/data/IMG_20141217_160458.small.opt.jpg b/localization/marker_tracking/test/data/IMG_20141217_160458.small.opt.jpg similarity index 100% rename from localization/marker_tracking/ros/test/data/IMG_20141217_160458.small.opt.jpg rename to localization/marker_tracking/test/data/IMG_20141217_160458.small.opt.jpg diff --git a/localization/marker_tracking/ros/test/data/IMG_20141217_160509.small.opt.jpg b/localization/marker_tracking/test/data/IMG_20141217_160509.small.opt.jpg similarity index 100% rename from localization/marker_tracking/ros/test/data/IMG_20141217_160509.small.opt.jpg rename to localization/marker_tracking/test/data/IMG_20141217_160509.small.opt.jpg diff --git a/localization/marker_tracking/ros/test/data/ar_lab_tags.xml b/localization/marker_tracking/test/data/ar_lab_tags.xml similarity index 100% rename from localization/marker_tracking/ros/test/data/ar_lab_tags.xml rename to localization/marker_tracking/test/data/ar_lab_tags.xml diff --git a/localization/marker_tracking/ros/test/data/dock_xml_markers.config b/localization/marker_tracking/test/data/dock_xml_markers.config similarity index 100% rename from localization/marker_tracking/ros/test/data/dock_xml_markers.config rename to localization/marker_tracking/test/data/dock_xml_markers.config diff --git a/localization/marker_tracking/ros/test/data/markers_sample.config b/localization/marker_tracking/test/data/markers_sample.config similarity index 100% rename from localization/marker_tracking/ros/test/data/markers_sample.config rename to localization/marker_tracking/test/data/markers_sample.config diff --git a/localization/marker_tracking/ros/test/test_arxmlio.cc b/localization/marker_tracking/test/test_arxmlio.cc similarity index 92% rename from localization/marker_tracking/ros/test/test_arxmlio.cc rename to localization/marker_tracking/test/test_arxmlio.cc index c0ed1d223b..6261655284 100644 --- a/localization/marker_tracking/ros/test/test_arxmlio.cc +++ b/localization/marker_tracking/test/test_arxmlio.cc @@ -49,3 +49,9 @@ TEST(AR_XML_IO, Reading) { EXPECT_NEAR(-0.993, ar_tags[29].row(BL)[0], 1e-3); EXPECT_NEAR(-0.824, ar_tags[29].row(BL)[2], 1e-3); } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/marker_tracking/ros/test/test_arxmlio.test b/localization/marker_tracking/test/test_arxmlio.test similarity index 100% rename from localization/marker_tracking/ros/test/test_arxmlio.test rename to localization/marker_tracking/test/test_arxmlio.test diff --git a/localization/marker_tracking/ros/test/test_marker_config_loader.cc b/localization/marker_tracking/test/test_marker_config_loader.cc similarity index 93% rename from localization/marker_tracking/ros/test/test_marker_config_loader.cc rename to localization/marker_tracking/test/test_marker_config_loader.cc index 3a6a7101f2..33e2f81151 100644 --- a/localization/marker_tracking/ros/test/test_marker_config_loader.cc +++ b/localization/marker_tracking/test/test_marker_config_loader.cc @@ -58,3 +58,9 @@ TEST(MarkerDetector, ConfigLoader) { EXPECT_NEAR(diag1(1), diag2(1), 1E-2); EXPECT_NEAR(diag1(2), diag2(2), 1E-2); } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/marker_tracking/ros/test/test_marker_config_loader.test b/localization/marker_tracking/test/test_marker_config_loader.test similarity index 100% rename from localization/marker_tracking/ros/test/test_marker_config_loader.test rename to localization/marker_tracking/test/test_marker_config_loader.test diff --git a/localization/marker_tracking/ros/test/test_marker_detector.cc b/localization/marker_tracking/test/test_marker_detector.cc similarity index 87% rename from localization/marker_tracking/ros/test/test_marker_detector.cc rename to localization/marker_tracking/test/test_marker_detector.cc index ee8367d913..682313702e 100644 --- a/localization/marker_tracking/ros/test/test_marker_detector.cc +++ b/localization/marker_tracking/test/test_marker_detector.cc @@ -55,7 +55,11 @@ TEST(MarkerDetector, TestDetection) { IplImage ipl_image; std::vector >::iterator expected_it = expected_corner_loc.begin(); for (std::string const& image_filename : image_filenames) { - image = cv::imread(data_dir + image_filename, CV_LOAD_IMAGE_GRAYSCALE); + #if (CV_VERSION_MAJOR >= 4) + image = cv::imread(data_dir + image_filename, cv::IMREAD_GRAYSCALE); + #else + image = cv::imread(data_dir + image_filename, CV_LOAD_IMAGE_GRAYSCALE); + #endif ipl_image = image; detector.Detect(&ipl_image, 0.08, 0.2); EXPECT_EQ(1u, detector.NumMarkers()); @@ -68,3 +72,9 @@ TEST(MarkerDetector, TestDetection) { expected_it++; } } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/marker_tracking/ros/test/test_marker_detector.test b/localization/marker_tracking/test/test_marker_detector.test similarity index 100% rename from localization/marker_tracking/ros/test/test_marker_detector.test rename to localization/marker_tracking/test/test_marker_detector.test diff --git a/localization/marker_tracking/tools/generate_ar_target.cc b/localization/marker_tracking/tools/marker_tracking/generate_ar_target.cc similarity index 99% rename from localization/marker_tracking/tools/generate_ar_target.cc rename to localization/marker_tracking/tools/marker_tracking/generate_ar_target.cc index f2a471b23e..bcb9132199 100644 --- a/localization/marker_tracking/tools/generate_ar_target.cc +++ b/localization/marker_tracking/tools/marker_tracking/generate_ar_target.cc @@ -16,7 +16,8 @@ * under the License. */ -#include +#include + #include #include diff --git a/localization/marker_tracking/tools/generate_multiscale_target.cc b/localization/marker_tracking/tools/marker_tracking/generate_multiscale_target.cc similarity index 99% rename from localization/marker_tracking/tools/generate_multiscale_target.cc rename to localization/marker_tracking/tools/marker_tracking/generate_multiscale_target.cc index 887edd9a09..70c463cbcd 100644 --- a/localization/marker_tracking/tools/generate_multiscale_target.cc +++ b/localization/marker_tracking/tools/marker_tracking/generate_multiscale_target.cc @@ -16,7 +16,8 @@ * under the License. */ -#include +#include + #include #include diff --git a/localization/marker_tracking/tools/generate_svg_markers.cc b/localization/marker_tracking/tools/marker_tracking/generate_svg_markers.cc similarity index 99% rename from localization/marker_tracking/tools/generate_svg_markers.cc rename to localization/marker_tracking/tools/marker_tracking/generate_svg_markers.cc index 5b3b075293..df063523bc 100644 --- a/localization/marker_tracking/tools/generate_svg_markers.cc +++ b/localization/marker_tracking/tools/marker_tracking/generate_svg_markers.cc @@ -19,7 +19,7 @@ #include -#include +#include #include #include diff --git a/localization/marker_tracking/tools/identify_marker_in_image.cc b/localization/marker_tracking/tools/marker_tracking/identify_marker_in_image.cc similarity index 92% rename from localization/marker_tracking/tools/identify_marker_in_image.cc rename to localization/marker_tracking/tools/marker_tracking/identify_marker_in_image.cc index 96b6577b2d..e9534d6e93 100644 --- a/localization/marker_tracking/tools/identify_marker_in_image.cc +++ b/localization/marker_tracking/tools/marker_tracking/identify_marker_in_image.cc @@ -47,7 +47,12 @@ int main(int argc, char** argv) { IplImage ipl_image; for (int i = 1; i < argc; i++) { std::string image_filename(argv[i]); - image = cv::imread(image_filename, CV_LOAD_IMAGE_GRAYSCALE); + + #if (CV_VERSION_MAJOR >= 4) + image = cv::imread(image_filename, cv::IMREAD_GRAYSCALE); + #else + image = cv::imread(image_filename, CV_LOAD_IMAGE_GRAYSCALE); + #endif ipl_image = image; detector.Detect(&ipl_image, 0.08, 0.2); LOG(INFO) << image_filename << " : Detected " << detector.NumMarkers() << " markers."; diff --git a/localization/marker_tracking/tools/marker_config2xml.cc b/localization/marker_tracking/tools/marker_tracking/marker_config2xml.cc similarity index 100% rename from localization/marker_tracking/tools/marker_config2xml.cc rename to localization/marker_tracking/tools/marker_tracking/marker_config2xml.cc diff --git a/localization/marker_tracking/tools/markers_to_Kalibr.cc b/localization/marker_tracking/tools/marker_tracking/markers_to_Kalibr.cc similarity index 99% rename from localization/marker_tracking/tools/markers_to_Kalibr.cc rename to localization/marker_tracking/tools/marker_tracking/markers_to_Kalibr.cc index fb7a406ac7..fce59fb266 100644 --- a/localization/marker_tracking/tools/markers_to_Kalibr.cc +++ b/localization/marker_tracking/tools/marker_tracking/markers_to_Kalibr.cc @@ -17,7 +17,7 @@ */ #include -#include +#include #include #include diff --git a/localization/marker_tracking/ros/tools/features_counter.py b/localization/marker_tracking/tools/marker_tracking_node/features_counter.py similarity index 100% rename from localization/marker_tracking/ros/tools/features_counter.py rename to localization/marker_tracking/tools/marker_tracking_node/features_counter.py diff --git a/localization/marker_tracking/ros/tools/overhead_tracking_node.cc b/localization/marker_tracking/tools/marker_tracking_node/overhead_tracking_node.cc similarity index 100% rename from localization/marker_tracking/ros/tools/overhead_tracking_node.cc rename to localization/marker_tracking/tools/marker_tracking_node/overhead_tracking_node.cc diff --git a/localization/optimization_common/CMakeLists.txt b/localization/optimization_common/CMakeLists.txt new file mode 100644 index 0000000000..2961c5543d --- /dev/null +++ b/localization/optimization_common/CMakeLists.txt @@ -0,0 +1,99 @@ +#Copyright(c) 2017, United States Government, as represented by the +#Administrator of the National Aeronautics and Space Administration. +# +#All rights reserved. +# +#The Astrobee platform is licensed under the Apache License, Version 2.0 +#(the "License"); you may not use this file except in compliance with the +#License.You may obtain a copy of the License at +# +#http: // www.apache.org/licenses/LICENSE-2.0 +# +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +#WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.See the +#License for the specific language governing permissions and limitations +#under the License. + +cmake_minimum_required(VERSION 3.0) +project(optimization_common) + +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + localization_common +) + +# Find OpenCV +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../cmake") +find_package(OpenCV331 REQUIRED) + +find_package(Eigen3 REQUIRED) + +# Non-linear optimizer +find_package(Ceres REQUIRED) + +catkin_package( + INCLUDE_DIRS include + LIBRARIES ${PROJECT_NAME} ${CERES_LIBRARIES} ${OpenCV_LIBRARIES} + CATKIN_DEPENDS localization_common +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIRS} + ${CERES_INCLUDE_DIRS} + ${OpenCV_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(${PROJECT_NAME} + src/utilities.cc +) +add_dependencies(${PROJECT_NAME} ${catkin_EXPORTED_TARGETS}) +target_link_libraries(${PROJECT_NAME} ${catkin_LIBRARIES} ${CERES_LIBRARIES} ${OpenCV_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +if(CATKIN_ENABLE_TESTING) + find_package(rostest REQUIRED) + add_rostest_gtest(test_utilities_optimization_common + test/test_utilities.test + test/test_utilities.cc + ) + target_link_libraries(test_utilities_optimization_common + ${PROJECT_NAME} ${catkin_LIBRARIES} + ) + find_package(rostest REQUIRED) + add_rostest_gtest(test_se3_local_parameterization + test/test_se3_local_parameterization.test + test/test_se3_local_parameterization.cc + ) + target_link_libraries(test_se3_local_parameterization + ${PROJECT_NAME} ${catkin_LIBRARIES} + ) +endif() + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) diff --git a/localization/optimization_common/include/optimization_common/distorter.h b/localization/optimization_common/include/optimization_common/distorter.h new file mode 100644 index 0000000000..880a8c8272 --- /dev/null +++ b/localization/optimization_common/include/optimization_common/distorter.h @@ -0,0 +1,55 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef OPTIMIZATION_COMMON_DISTORTER_H_ +#define OPTIMIZATION_COMMON_DISTORTER_H_ + +#include + +#include + +#include + +namespace optimization_common { +template +class Distorter { + public: + Eigen::Vector2d Distort(const Eigen::VectorXd& distortion, const Eigen::Matrix3d& intrinsics, + const Eigen::Vector2d& undistorted_point) const { + return static_cast(this)->Distort(distortion.data(), intrinsics, undistorted_point); + } + + virtual Eigen::Vector2d Undistort(const Eigen::Vector2d& distorted_point, const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion) const = 0; + + std::vector Undistort(const std::vector& distorted_points, + const Eigen::Matrix3d& intrinsics, const Eigen::VectorXd& distortion) const { + std::vector undistorted_points; + for (const auto& distorted_point : distorted_points) { + undistorted_points.emplace_back(Undistort(distorted_point, intrinsics, distortion)); + } + return undistorted_points; + } + + virtual cv::Mat Undistort(const cv::Mat& distorted_image, const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion) const = 0; + + static constexpr int kNumParams = NUM_PARAMS; +}; +} // namespace optimization_common + +#endif // OPTIMIZATION_COMMON_DISTORTER_H_ diff --git a/localization/optimization_common/include/optimization_common/fov_distorter.h b/localization/optimization_common/include/optimization_common/fov_distorter.h new file mode 100644 index 0000000000..b9d1ef8a97 --- /dev/null +++ b/localization/optimization_common/include/optimization_common/fov_distorter.h @@ -0,0 +1,93 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef OPTIMIZATION_COMMON_FOV_DISTORTER_H_ +#define OPTIMIZATION_COMMON_FOV_DISTORTER_H_ + +#include +#include + +#include + +#include + +#include + +namespace optimization_common { +class FovDistorter : public Distorter<1, FovDistorter> { + public: + using Distorter<1, FovDistorter>::Distort; + using Distorter<1, FovDistorter>::Undistort; + + template + Eigen::Matrix Distort(const T* distortion, const Eigen::Matrix& intrinsics, + const Eigen::Matrix& undistorted_point) const { + // Distortion model expects image coordinates to be in relative coordinates + const Eigen::Matrix relative_coordinates = RelativeCoordinates(undistorted_point, intrinsics); + const T& relative_x = relative_coordinates[0]; + const T& relative_y = relative_coordinates[1]; + + // Squared norm + const T r2 = relative_x * relative_x + relative_y * relative_y; + const T& w = distortion[0]; + T warping; + if (r2 > 1e-5) { + const T a = 2.0 * ceres::tan(w / 2.0); + const T b = ceres::atan(r2 * a) / w; + warping = b / r2; + } else { + warping = T(1.0); + } + const T distorted_relative_x = warping * relative_x; + const T distorted_relative_y = warping * relative_y; + return AbsoluteCoordinates(Eigen::Matrix(distorted_relative_x, distorted_relative_y), intrinsics); + } + + cv::Mat Undistort(const cv::Mat& distorted_image, const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion) const final { + cv::Mat gray_distorted_image; + cv::cvtColor(distorted_image, gray_distorted_image, cv::COLOR_BGRA2GRAY); + cv::Mat undistorted_image(distorted_image.size(), CV_8UC1, cv::Scalar(0)); + for (int y = 0; y < undistorted_image.rows; ++y) { + for (int x = 0; x < undistorted_image.cols; ++x) { + const uchar pixel_val = gray_distorted_image.at(y, x); + const Eigen::Vector2d undistorted_point = Undistort(Eigen::Vector2d(x, y), intrinsics, distortion); + const Eigen::Vector2i undistorted_rounded_point(std::round(undistorted_point[0]), + std::round(undistorted_point[1])); + if (undistorted_rounded_point.x() >= undistorted_image.cols || undistorted_rounded_point.x() < 0) continue; + if (undistorted_rounded_point.y() >= undistorted_image.rows || undistorted_rounded_point.y() < 0) continue; + undistorted_image.at(undistorted_rounded_point.y(), undistorted_rounded_point.x()) = pixel_val; + } + } + return undistorted_image; + } + + Eigen::Vector2d Undistort(const Eigen::Vector2d& distorted_point, const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion) const final { + const Eigen::Vector2d relative_distorted_point = RelativeCoordinates(distorted_point, intrinsics); + const double rd = relative_distorted_point.norm(); + const double a = 2.0 * std::tan(distortion[0] / 2.0); + const double ru = std::tan(rd * distortion[0]) / a; + const double unwarping = rd > 1e-5 ? ru / rd : 1.0; + const Eigen::Vector2d relative_undistorted_point = unwarping * relative_distorted_point; + const Eigen::Vector2d undistorted_point = AbsoluteCoordinates(relative_undistorted_point, intrinsics); + return undistorted_point; + } +}; +} // namespace optimization_common + +#endif // OPTIMIZATION_COMMON_FOV_DISTORTER_H_ diff --git a/localization/optimization_common/include/optimization_common/identity_distorter.h b/localization/optimization_common/include/optimization_common/identity_distorter.h new file mode 100644 index 0000000000..45962298e4 --- /dev/null +++ b/localization/optimization_common/include/optimization_common/identity_distorter.h @@ -0,0 +1,49 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef OPTIMIZATION_COMMON_IDENTITY_DISTORTER_H_ +#define OPTIMIZATION_COMMON_IDENTITY_DISTORTER_H_ + +#include + +namespace optimization_common { +// TODO(rsoussan): This actually takes 0 params but ceres needs at least 1 to form +// valid parameter blocks. Change to 0 when this is avoided. +class IdentityDistorter : public Distorter<1, IdentityDistorter> { + public: + using Distorter<1, IdentityDistorter>::Distort; + using Distorter<1, IdentityDistorter>::Undistort; + + template + Eigen::Matrix Distort(const T* distortion, const Eigen::Matrix& intrinsics, + const Eigen::Matrix& undistorted_point) const { + return undistorted_point; + } + + cv::Mat Undistort(const cv::Mat& distorted_image, const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion) const final { + return distorted_image; + } + + Eigen::Vector2d Undistort(const Eigen::Vector2d& distorted_point, const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion) const final { + return distorted_point; + } +}; +} // namespace optimization_common + +#endif // OPTIMIZATION_COMMON_IDENTITY_DISTORTER_H_ diff --git a/localization/optimization_common/include/optimization_common/rad_distorter.h b/localization/optimization_common/include/optimization_common/rad_distorter.h new file mode 100644 index 0000000000..7b2b94f6dc --- /dev/null +++ b/localization/optimization_common/include/optimization_common/rad_distorter.h @@ -0,0 +1,72 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef OPTIMIZATION_COMMON_RAD_DISTORTER_H_ +#define OPTIMIZATION_COMMON_RAD_DISTORTER_H_ + +#include +#include +#include + +#include + +#include + +namespace optimization_common { +class RadDistorter : public Distorter<2, RadDistorter> { + public: + using Distorter<2, RadDistorter>::Distort; + using Distorter<2, RadDistorter>::Undistort; + + template + Eigen::Matrix Distort(const T* distortion, const Eigen::Matrix& intrinsics, + const Eigen::Matrix& undistorted_point) const { + T radtan_distortion[4]; + radtan_distortion[0] = distortion[0]; + radtan_distortion[1] = distortion[1]; + radtan_distortion[2] = T(0.0); + radtan_distortion[3] = T(0.0); + return radtan_distorter_.Distort(radtan_distortion, intrinsics, undistorted_point); + } + + cv::Mat Undistort(const cv::Mat& distorted_image, const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion) const final { + const Eigen::VectorXd radtan_distortion = RadTanDistortionVector(distortion); + return radtan_distorter_.Undistort(distorted_image, intrinsics, radtan_distortion); + } + + Eigen::Vector2d Undistort(const Eigen::Vector2d& distorted_point, const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion) const final { + const Eigen::VectorXd radtan_distortion = RadTanDistortionVector(distortion); + return radtan_distorter_.Undistort(distorted_point, intrinsics, radtan_distortion); + } + + private: + Eigen::VectorXd RadTanDistortionVector(const Eigen::VectorXd& rad_distortion) const { + Eigen::VectorXd radtan_distortion(4); + radtan_distortion[0] = rad_distortion[0]; + radtan_distortion[1] = rad_distortion[1]; + radtan_distortion[2] = 0; + radtan_distortion[3] = 0; + return radtan_distortion; + } + + RadTanDistorter radtan_distorter_; +}; +} // namespace optimization_common + +#endif // OPTIMIZATION_COMMON_RAD_DISTORTER_H_ diff --git a/localization/optimization_common/include/optimization_common/radtan_distorter.h b/localization/optimization_common/include/optimization_common/radtan_distorter.h new file mode 100644 index 0000000000..37c97c280c --- /dev/null +++ b/localization/optimization_common/include/optimization_common/radtan_distorter.h @@ -0,0 +1,94 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef OPTIMIZATION_COMMON_RADTAN_DISTORTER_H_ +#define OPTIMIZATION_COMMON_RADTAN_DISTORTER_H_ + +#include +#include + +#include + +#include + +#include + +namespace optimization_common { +class RadTanDistorter : public Distorter<4, RadTanDistorter> { + public: + using Distorter<4, RadTanDistorter>::Distort; + using Distorter<4, RadTanDistorter>::Undistort; + + template + Eigen::Matrix Distort(const T* distortion, const Eigen::Matrix& intrinsics, + const Eigen::Matrix& undistorted_point) const { + const T& k1 = distortion[0]; + const T& k2 = distortion[1]; + const T& p1 = distortion[2]; + const T& p2 = distortion[3]; + // TODO(rsoussan): Support 5 distortion params + const T k3(0.0); + + const Eigen::Matrix relative_coordinates = RelativeCoordinates(undistorted_point, intrinsics); + const T& relative_x = relative_coordinates[0]; + const T& relative_y = relative_coordinates[1]; + // Squared norm + const T r2 = relative_x * relative_x + relative_y * relative_y; + + // Apply radial distortion + const T radial_distortion_coeff = 1.0 + k1 * r2 + k2 * r2 * r2 + k3 * r2 * r2 * r2; + T distorted_relative_x = relative_x * radial_distortion_coeff; + T distorted_relative_y = relative_y * radial_distortion_coeff; + + // Apply tangential distortion + distorted_relative_x = + distorted_relative_x + (2.0 * p1 * relative_x * relative_y + p2 * (r2 + 2.0 * relative_x * relative_x)); + distorted_relative_y = + distorted_relative_y + (p1 * (r2 + 2.0 * relative_y * relative_y) + 2.0 * p2 * relative_x * relative_y); + + return AbsoluteCoordinates(Eigen::Matrix(distorted_relative_x, distorted_relative_y), intrinsics); + } + + cv::Mat Undistort(const cv::Mat& distorted_image, const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion) const final { + cv::Mat undistorted_image; + cv::Mat intrinsics_mat; + cv::eigen2cv(intrinsics, intrinsics_mat); + cv::Mat distortion_vector; + cv::eigen2cv(distortion, distortion_vector); + cv::undistort(distorted_image, undistorted_image, intrinsics_mat, distortion_vector); + return undistorted_image; + } + + Eigen::Vector2d Undistort(const Eigen::Vector2d& distorted_point, const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion) const final { + cv::Mat intrinsics_mat; + cv::eigen2cv(intrinsics, intrinsics_mat); + cv::Mat distortion_vector; + cv::eigen2cv(distortion, distortion_vector); + cv::Mat distorted_point_vector; + cv::eigen2cv(distorted_point, distorted_point_vector); + cv::Mat undistorted_point_vector; + cv::undistort(distorted_point_vector, undistorted_point_vector, intrinsics_mat, distortion_vector); + Eigen::Vector2d undistorted_point; + cv::cv2eigen(undistorted_point_vector, undistorted_point); + return undistorted_point; + } +}; +} // namespace optimization_common + +#endif // OPTIMIZATION_COMMON_RADTAN_DISTORTER_H_ diff --git a/localization/optimization_common/include/optimization_common/residuals.h b/localization/optimization_common/include/optimization_common/residuals.h new file mode 100644 index 0000000000..270f743930 --- /dev/null +++ b/localization/optimization_common/include/optimization_common/residuals.h @@ -0,0 +1,218 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef OPTIMIZATION_COMMON_RESIDUALS_H_ +#define OPTIMIZATION_COMMON_RESIDUALS_H_ + +#include + +#include + +#include +#include +#include +#include +#include +#include +#include + +namespace optimization_common { +class PointToPointError { + public: + PointToPointError(const Eigen::Vector3d& source_point, const Eigen::Vector3d& target_point) + : source_point_(source_point), target_point_(target_point) {} + + template + bool operator()(const T* relative_transform_data, T* point_to_point_error) const { + const auto relative_transform = Isometry3(relative_transform_data); + // Compute error + const Eigen::Matrix transformed_source_point = relative_transform * source_point_.cast(); + point_to_point_error[0] = transformed_source_point[0] - target_point_[0]; + point_to_point_error[1] = transformed_source_point[1] - target_point_[1]; + point_to_point_error[2] = transformed_source_point[2] - target_point_[2]; + return true; + } + + static void AddCostFunction(const Eigen::Vector3d& source_point, const Eigen::Vector3d& target_point, + Eigen::Matrix& relative_transform, ceres::Problem& problem) { + ceres::LossFunction* huber_loss = new ceres::HuberLoss(1.345); + ceres::CostFunction* point_to_point_cost_function = + new ceres::AutoDiffCostFunction(new PointToPointError(source_point, target_point)); + problem.AddResidualBlock(point_to_point_cost_function, huber_loss, relative_transform.data()); + } + + private: + Eigen::Vector3d source_point_; + Eigen::Vector3d target_point_; +}; + +class PointToPlaneError { + public: + PointToPlaneError(const Eigen::Vector3d& source_point, const Eigen::Vector3d& target_point, + const Eigen::Vector3d& target_normal) + : source_point_(source_point), target_point_(target_point), target_normal_(target_normal) {} + + template + bool operator()(const T* relative_transform_data, T* point_to_plane_error) const { + const auto relative_transform = Isometry3(relative_transform_data); + // Compute error + const Eigen::Matrix transformed_source_point = relative_transform * source_point_.cast(); + const Eigen::Matrix transformed_source_point_to_target_point = + transformed_source_point - target_point_.cast(); + point_to_plane_error[0] = transformed_source_point_to_target_point.dot(target_normal_.cast()); + return true; + } + + static void AddCostFunction(const Eigen::Vector3d& source_point, const Eigen::Vector3d& target_point, + const Eigen::Vector3d& target_normal, Eigen::Matrix& relative_transform, + ceres::Problem& problem) { + ceres::LossFunction* huber_loss = new ceres::HuberLoss(1.345); + ceres::CostFunction* point_to_plane_cost_function = new ceres::AutoDiffCostFunction( + new PointToPlaneError(source_point, target_point, target_normal)); + problem.AddResidualBlock(point_to_plane_cost_function, huber_loss, relative_transform.data()); + } + + private: + Eigen::Vector3d source_point_; + Eigen::Vector3d target_point_; + Eigen::Vector3d target_normal_; +}; + +class SymmetricPointToPlaneError { + public: + SymmetricPointToPlaneError(const Eigen::Vector3d& source_point, const Eigen::Vector3d& target_point, + const Eigen::Vector3d& source_normal, const Eigen::Vector3d& target_normal) + : source_point_(source_point), + target_point_(target_point), + source_normal_(source_normal), + target_normal_(target_normal) {} + + template + bool operator()(const T* relative_transform_data, T* symmetric_point_to_plane_error) const { + const auto relative_transform = Isometry3(relative_transform_data); + // Compute error + const Eigen::Matrix transformed_source_point = relative_transform * source_point_.cast(); + const Eigen::Matrix transformed_source_point_to_target_point = + transformed_source_point - target_point_.cast(); + symmetric_point_to_plane_error[0] = transformed_source_point_to_target_point.dot(source_normal_.cast()); + symmetric_point_to_plane_error[1] = transformed_source_point_to_target_point.dot(target_normal_.cast()); + return true; + } + + static void AddCostFunction(const Eigen::Vector3d& source_point, const Eigen::Vector3d& target_point, + const Eigen::Vector3d& source_normal, const Eigen::Vector3d& target_normal, + Eigen::Matrix& relative_transform, ceres::Problem& problem) { + ceres::LossFunction* huber_loss = new ceres::HuberLoss(1.345); + ceres::CostFunction* symmetric_point_to_plane_cost_function = + new ceres::AutoDiffCostFunction( + new SymmetricPointToPlaneError(source_point, target_point, source_normal, target_normal)); + problem.AddResidualBlock(symmetric_point_to_plane_cost_function, huber_loss, relative_transform.data()); + } + + private: + Eigen::Vector3d source_point_; + Eigen::Vector3d target_point_; + Eigen::Vector3d source_normal_; + Eigen::Vector3d target_normal_; +}; + +template +class AffineReprojectionError { + public: + AffineReprojectionError(const Eigen::Vector2d& image_point, const Eigen::Vector3d& depth_cloud_F_point_3d) + : image_point_(image_point), depth_cloud_F_point_3d_(depth_cloud_F_point_3d) {} + + template + bool operator()(const T* depth_image_A_depth_cloud_data, const T* intrinsics_data, const T* distortion_data, + T* reprojection_error) const { + // Handle type conversions + const auto intrinsics = Intrinsics(intrinsics_data); + const auto depth_image_A_depth_cloud = Affine3(depth_image_A_depth_cloud_data); + + // Compute error + const Eigen::Matrix depth_image_F_point_3d = depth_image_A_depth_cloud * depth_cloud_F_point_3d_.cast(); + const Eigen::Matrix undistorted_reprojected_point_3d = (intrinsics * depth_image_F_point_3d).hnormalized(); + const Eigen::Matrix distorted_reprojected_point_3d = + distorter_.Distort(distortion_data, intrinsics, undistorted_reprojected_point_3d); + + reprojection_error[0] = image_point_[0] - distorted_reprojected_point_3d[0]; + reprojection_error[1] = image_point_[1] - distorted_reprojected_point_3d[1]; + return true; + } + + static void AddCostFunction(const Eigen::Vector2d& image_point, const Eigen::Vector3d& point_3d, + Eigen::Matrix& depth_image_A_depth_cloud_vector, + Eigen::Matrix& intrinsics_vector, Eigen::VectorXd& distortion, + ceres::Problem& problem) { + ceres::LossFunction* huber_loss = new ceres::HuberLoss(1.345); + ceres::CostFunction* reprojection_cost_function = + new ceres::AutoDiffCostFunction, 2, 7, 4, DISTORTER::kNumParams>( + new AffineReprojectionError(image_point, point_3d)); + problem.AddResidualBlock(reprojection_cost_function, huber_loss, depth_image_A_depth_cloud_vector.data(), + intrinsics_vector.data(), distortion.data()); + } + + private: + Eigen::Vector2d image_point_; + Eigen::Vector3d depth_cloud_F_point_3d_; + DISTORTER distorter_; +}; + +template +class ReprojectionError { + public: + ReprojectionError(const Eigen::Vector2d& image_point, const Eigen::Vector3d& target_t_point_3d) + : image_point_(image_point), target_t_point_3d_(target_t_point_3d) {} + + template + bool operator()(const T* camera_T_target_data, const T* focal_lengths_data, const T* principal_points_data, + const T* distortion_data, T* reprojection_error) const { + // Handle type conversions + const auto intrinsics = Intrinsics(focal_lengths_data, principal_points_data); + const auto camera_T_target = Isometry3(camera_T_target_data); + // Compute error + const Eigen::Matrix camera_t_point_3d = camera_T_target * target_t_point_3d_.cast(); + const Eigen::Matrix undistorted_reprojected_point_3d = (intrinsics * camera_t_point_3d).hnormalized(); + const Eigen::Matrix distorted_reprojected_point_3d = + distorter_.Distort(distortion_data, intrinsics, undistorted_reprojected_point_3d); + + reprojection_error[0] = image_point_[0] - distorted_reprojected_point_3d[0]; + reprojection_error[1] = image_point_[1] - distorted_reprojected_point_3d[1]; + return true; + } + + static void AddCostFunction(const Eigen::Vector2d& image_point, const Eigen::Vector3d& point_3d, + Eigen::Matrix& camera_T_target, Eigen::Vector2d& focal_lengths, + Eigen::Vector2d& principal_points, Eigen::VectorXd& distortion, ceres::Problem& problem, + const double scale_factor = 1, const double huber_threshold = 1.345) { + ceres::LossFunction* huber_loss = new ceres::HuberLoss(huber_threshold); + ceres::LossFunction* scaled_huber_loss = new ceres::ScaledLoss(huber_loss, scale_factor, ceres::TAKE_OWNERSHIP); + ceres::CostFunction* reprojection_cost_function = + new ceres::AutoDiffCostFunction, 2, 6, 2, 2, DISTORTER::kNumParams>( + new ReprojectionError(image_point, point_3d)); + problem.AddResidualBlock(reprojection_cost_function, scaled_huber_loss, camera_T_target.data(), + focal_lengths.data(), principal_points.data(), distortion.data()); + } + + private: + Eigen::Vector2d image_point_; + Eigen::Vector3d target_t_point_3d_; + DISTORTER distorter_; +}; +} // namespace optimization_common + +#endif // OPTIMIZATION_COMMON_RESIDUALS_H_ diff --git a/localization/optimization_common/include/optimization_common/se3_local_parameterization.h b/localization/optimization_common/include/optimization_common/se3_local_parameterization.h new file mode 100644 index 0000000000..8ce8c7f246 --- /dev/null +++ b/localization/optimization_common/include/optimization_common/se3_local_parameterization.h @@ -0,0 +1,45 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef OPTIMIZATION_COMMON_SE3_LOCAL_PARAMETERIZATION_H_ +#define OPTIMIZATION_COMMON_SE3_LOCAL_PARAMETERIZATION_H_ + +#include + +#include + +#include + +namespace optimization_common { +struct SE3Plus { + template + bool operator()(const T* x, const T* delta, T* x_plus_delta) const { + const Eigen::Transform pose = Isometry3(x); + const Eigen::Transform pose_delta = Isometry3(delta); + const Eigen::Transform updated_pose = pose * pose_delta; + const Eigen::Matrix updated_pose_vector = VectorFromIsometry3(updated_pose); + for (int i = 0; i < 6; ++i) { + x_plus_delta[i] = updated_pose_vector[i]; + } + return true; + } +}; + +using SE3LocalParameterization = ceres::AutoDiffLocalParameterization; +} // namespace optimization_common + +#endif // OPTIMIZATION_COMMON_SE3_LOCAL_PARAMETERIZATION_H_ diff --git a/localization/optimization_common/include/optimization_common/utilities.h b/localization/optimization_common/include/optimization_common/utilities.h new file mode 100644 index 0000000000..fb95552d7e --- /dev/null +++ b/localization/optimization_common/include/optimization_common/utilities.h @@ -0,0 +1,157 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef OPTIMIZATION_COMMON_UTILITIES_H_ +#define OPTIMIZATION_COMMON_UTILITIES_H_ + +#include + +#include +#include + +#include + +namespace optimization_common { +// Assumes compact angle axis (3d vector where norm gives the angle) parameterization for rotations +// First 3 values of isometry_data are the compact angle axis, next 3 are the translation +template +Eigen::Matrix VectorFromIsometry3(const Eigen::Transform& isometry_3); + +Eigen::Matrix VectorFromIsometry3d(const Eigen::Isometry3d& isometry_3d); +// Assumes compact angle axis (3d vector where norm gives the angle) parameterization for rotations +// First 3 values of isometry_data are the compact angle axis, next 3 are the translation, last is scale +Eigen::Matrix VectorFromAffine3d(const Eigen::Affine3d& affine_3d); +// Assumes compact angle axis (3d vector where norm gives the angle) parameterization for rotations +// First 3 values of isometry_data are the compact angle axis, next 3 are the translation +template +Eigen::Transform Isometry3(const T* isometry_data); +Eigen::Matrix3d Intrinsics(const Eigen::Vector2d& focal_lengths, const Eigen::Vector2d& principal_points); +// Assumes compact angle axis (3d vector where norm gives the angle) parameterization for rotations +// First 3 values of isometry_data are the compact angle axis, next 3 are the translation, last is scale +template +Eigen::Transform Affine3(const T* affine_data); + +Eigen::Isometry3d Isometry3d(const Eigen::Matrix& isometry_vector); + +Eigen::Affine3d Affine3d(const Eigen::Matrix& affine_vector); + +// Assumes storage as focal lengths followed by principal points +template +Eigen::Matrix Intrinsics(const T* intrinsics_data); + +template +Eigen::Matrix Intrinsics(const T* focal_lengths, const T* principal_points); + +template +Eigen::Matrix RelativeCoordinates(const Eigen::Matrix& absolute_point, + const Eigen::Matrix& intrinsics); +template +Eigen::Matrix AbsoluteCoordinates(const Eigen::Matrix& relative_point, + const Eigen::Matrix& intrinsics); + +void AddParameterBlock(const int num_parameters, double* const parameters, ceres::Problem& problem, + const bool set_constant = false); + +void AddConstantParameterBlock(const int num_parameters, double* const parameters, ceres::Problem& problem); + +void AddConstantParameterBlock(const int num_parameters, double const* const parameters, ceres::Problem& problem); + +double ResidualNorm(const std::vector& residual, const int index, const int residual_size); + +// Assumes each residual is the same size +void CheckResiduals(const int residual_size, ceres::Problem& problem, const double outlier_threshold = 0.99); + +template +Eigen::Matrix VectorFromIsometry3(const Eigen::Transform& isometry_3) { + // Isometry3d linear().data() returns the data pointer to the full Isometry3d matrix rather than just the rotation + const Eigen::Matrix rotation = isometry_3.linear(); + Eigen::Matrix isometry_3_vector; + ceres::RotationMatrixToAngleAxis(rotation.data(), &(isometry_3_vector.data()[0])); + isometry_3_vector[3] = isometry_3.translation().x(); + isometry_3_vector[4] = isometry_3.translation().y(); + isometry_3_vector[5] = isometry_3.translation().z(); + return isometry_3_vector; +} + +template +Eigen::Transform Isometry3(const T* isometry_data) { + Eigen::Matrix rotation; + ceres::AngleAxisToRotationMatrix(isometry_data, rotation.data()); + Eigen::Map> translation(&isometry_data[3]); + Eigen::Transform isometry_3(Eigen::Transform::Identity()); + isometry_3.linear() = rotation; + isometry_3.translation() = translation; + return isometry_3; +} + +template +Eigen::Transform Affine3(const T* affine_data) { + const Eigen::Transform isometry_3 = Isometry3(affine_data); + const T scale = affine_data[6]; + Eigen::Transform affine_3; + affine_3.linear() = scale * isometry_3.linear(); + affine_3.translation() = isometry_3.translation(); + return affine_3; +} + +template +Eigen::Matrix Intrinsics(const T* intrinsics_data) { + Eigen::Matrix intrinsics(Eigen::Matrix::Identity()); + intrinsics(0, 0) = intrinsics_data[0]; + intrinsics(1, 1) = intrinsics_data[1]; + intrinsics(0, 2) = intrinsics_data[2]; + intrinsics(1, 2) = intrinsics_data[3]; + return intrinsics; +} + +template +Eigen::Matrix Intrinsics(const T* focal_lengths, const T* principal_points) { + Eigen::Matrix intrinsics(Eigen::Matrix::Identity()); + intrinsics(0, 0) = focal_lengths[0]; + intrinsics(1, 1) = focal_lengths[1]; + intrinsics(0, 2) = principal_points[0]; + intrinsics(1, 2) = principal_points[1]; + return intrinsics; +} + +template +Eigen::Matrix RelativeCoordinates(const Eigen::Matrix& absolute_point, + const Eigen::Matrix& intrinsics) { + const T& f_x = intrinsics(0, 0); + const T& f_y = intrinsics(1, 1); + const T& p_x = intrinsics(0, 2); + const T& p_y = intrinsics(1, 2); + + const T& x = absolute_point[0]; + const T& y = absolute_point[1]; + const T relative_x = (x - p_x) / f_x; + const T relative_y = (y - p_y) / f_y; + return Eigen::Matrix(relative_x, relative_y); +} + +template +Eigen::Matrix AbsoluteCoordinates(const Eigen::Matrix& relative_point, + const Eigen::Matrix& intrinsics) { + const T& f_x = intrinsics(0, 0); + const T& f_y = intrinsics(1, 1); + const T& p_x = intrinsics(0, 2); + const T& p_y = intrinsics(1, 2); + return Eigen::Matrix(relative_point[0] * f_x + p_x, relative_point[1] * f_y + p_y); +} +} // namespace optimization_common + +#endif // OPTIMIZATION_COMMON_UTILITIES_H_ diff --git a/localization/optimization_common/package.xml b/localization/optimization_common/package.xml new file mode 100644 index 0000000000..6812b0e767 --- /dev/null +++ b/localization/optimization_common/package.xml @@ -0,0 +1,19 @@ + + optimization_common + 1.0.0 + + The optimization_common package + + + Apache License, Version 2.0 + + + Astrobee Flight Software + + + Astrobee Flight Software + + catkin + localization_common + localization_common + diff --git a/localization/optimization_common/src/utilities.cc b/localization/optimization_common/src/utilities.cc new file mode 100644 index 0000000000..26b78e4878 --- /dev/null +++ b/localization/optimization_common/src/utilities.cc @@ -0,0 +1,115 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#include +#include + +namespace optimization_common { +Eigen::Matrix VectorFromIsometry3d(const Eigen::Isometry3d& isometry_3d) { + return VectorFromIsometry3(isometry_3d); +} + +Eigen::Matrix VectorFromAffine3d(const Eigen::Affine3d& affine_3d) { + Eigen::Matrix3d rotation; + Eigen::Matrix3d scale_matrix; + affine_3d.computeRotationScaling(&rotation, &scale_matrix); + // Assumes uniform scaling, which is the case for Affine3d + const double scale = scale_matrix(0, 0); + Eigen::Matrix affine_3d_vector; + ceres::RotationMatrixToAngleAxis(rotation.data(), &(affine_3d_vector.data()[0])); + affine_3d_vector.block<3, 1>(3, 0) = affine_3d.translation(); + affine_3d_vector(6, 0) = scale; + return affine_3d_vector; +} + +Eigen::Isometry3d Isometry3d(const Eigen::Matrix& isometry_vector) { + return Isometry3(isometry_vector.data()); +} + +Eigen::Affine3d Affine3d(const Eigen::Matrix& affine_vector) { return Affine3(affine_vector.data()); } + +Eigen::Matrix3d Intrinsics(const Eigen::Vector2d& focal_lengths, const Eigen::Vector2d& principal_points) { + Eigen::Matrix3d intrinsics(Eigen::Matrix3d::Identity()); + intrinsics(0, 0) = focal_lengths(0); + intrinsics(1, 1) = focal_lengths(1); + intrinsics(0, 2) = principal_points(0); + intrinsics(1, 2) = principal_points(1); + return intrinsics; +} + +void AddParameterBlock(const int num_parameters, double* const parameters, ceres::Problem& problem, + const bool set_constant) { + problem.AddParameterBlock(parameters, num_parameters); + if (set_constant) problem.SetParameterBlockConstant(parameters); +} + +void AddConstantParameterBlock(const int num_parameters, double* const parameters, ceres::Problem& problem) { + AddParameterBlock(num_parameters, parameters, problem, true); +} + +void AddConstantParameterBlock(const int num_parameters, double const* const parameters, ceres::Problem& problem) { + // Even though parameter doesn't change ceres requires a non const data type + AddConstantParameterBlock(num_parameters, const_cast(parameters), problem); +} + +double ResidualNorm(const std::vector& residual, const int index, const int residual_size) { + double norm = 0; + for (int i = 0; i < residual_size; ++i) { + norm += residual[index * residual_size + i]; + } + return norm; +} + +// Assumes each residual is the same size +void CheckResiduals(const int residual_size, ceres::Problem& problem, const double outlier_threshold) { + double total_cost = 0.0; + std::vector residual_norms; + std::vector residuals; + ceres::Problem::EvaluateOptions eval_options; + eval_options.num_threads = 1; + eval_options.apply_loss_function = false; + problem.Evaluate(eval_options, &total_cost, &residuals, NULL, NULL); + double total_cost_huber = 0.0; + + std::vector residual_norms_huber; + std::vector residuals_huber; + ceres::Problem::EvaluateOptions eval_options_huber; + eval_options_huber.num_threads = 1; + eval_options_huber.apply_loss_function = true; + problem.Evaluate(eval_options_huber, &total_cost_huber, &residuals_huber, NULL, NULL); + const int num_residuals = residuals.size() / residual_size; + double mean_residual = 0.0; + int huber_outliers = 0; + for (int index = 0; index < num_residuals; ++index) { + double norm = ResidualNorm(residuals, index, residual_size); + mean_residual += norm; + residual_norms.push_back(norm); + + double norm_huber = ResidualNorm(residuals_huber, index, residual_size); + residual_norms_huber.push_back(norm_huber); + if (norm_huber / norm < outlier_threshold) { + ++huber_outliers; + } + } + mean_residual /= num_residuals; + std::sort(residual_norms.begin(), residual_norms.end()); + LogInfo("Residual min, mean, median and max error: " << residual_norms[0] << ' ' << mean_residual << ' ' + << residual_norms[residual_norms.size() / 2] << ' ' + << residual_norms.back()); + LogInfo("Huber outlier percentage: " << huber_outliers / static_cast(residuals.size())); +} +} // namespace optimization_common diff --git a/localization/optimization_common/test/test_se3_local_parameterization.cc b/localization/optimization_common/test/test_se3_local_parameterization.cc new file mode 100644 index 0000000000..d5fb35b5cd --- /dev/null +++ b/localization/optimization_common/test/test_se3_local_parameterization.cc @@ -0,0 +1,46 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include +#include +#include + +#include + +namespace lc = localization_common; +namespace oc = optimization_common; +TEST(SE3LocalParameterizationTester, SE3Plus) { + oc::SE3Plus se3_plus; + for (int i = 0; i < 500; ++i) { + const Eigen::Isometry3d pose = lc::RandomIsometry3d(); + const Eigen::Isometry3d delta = lc::RandomIsometry3d(); + const Eigen::Isometry3d updated_pose = pose * delta; + const Eigen::Matrix pose_vector = oc::VectorFromIsometry3d(pose); + const Eigen::Matrix delta_vector = oc::VectorFromIsometry3d(delta); + Eigen::Matrix updated_pose_vector; + se3_plus(pose_vector.data(), delta_vector.data(), updated_pose_vector.data()); + const Eigen::Isometry3d updated_pose_again = oc::Isometry3d(updated_pose_vector); + ASSERT_PRED2(lc::MatrixEquality<6>, updated_pose.matrix(), updated_pose_again.matrix()); + } +} + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/tools/simulator/dock_sim/launch/dock_sim.launch b/localization/optimization_common/test/test_se3_local_parameterization.test similarity index 86% rename from tools/simulator/dock_sim/launch/dock_sim.launch rename to localization/optimization_common/test/test_se3_local_parameterization.test index 80b7662fb3..acbcc417a0 100644 --- a/tools/simulator/dock_sim/launch/dock_sim.launch +++ b/localization/optimization_common/test/test_se3_local_parameterization.test @@ -16,6 +16,5 @@ - - + diff --git a/localization/optimization_common/test/test_utilities.cc b/localization/optimization_common/test/test_utilities.cc new file mode 100644 index 0000000000..713642ec01 --- /dev/null +++ b/localization/optimization_common/test/test_utilities.cc @@ -0,0 +1,49 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include +#include +#include + +#include + +namespace lc = localization_common; +namespace oc = optimization_common; +TEST(UtilitiesTester, Isometry3dToVectorToIsometry3d) { + for (int i = 0; i < 500; ++i) { + const Eigen::Isometry3d pose = lc::RandomIsometry3d(); + const Eigen::Matrix pose_vector = oc::VectorFromIsometry3d(pose); + const Eigen::Isometry3d pose_again = oc::Isometry3d(pose_vector); + ASSERT_TRUE(pose_again.matrix().isApprox(pose.matrix(), 1e-6)); + } +} + +TEST(UtilitiesTester, VectorToAffine3dToVector) { + for (int i = 0; i < 500; ++i) { + const Eigen::Affine3d affine_3d = lc::RandomAffine3d(); + const Eigen::Matrix affine_3d_vector = oc::VectorFromAffine3d(affine_3d); + const Eigen::Affine3d affine_3d_again = oc::Affine3d(affine_3d_vector); + ASSERT_TRUE(affine_3d_again.matrix().isApprox(affine_3d.matrix(), 1e-6)); + } +} + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/optimization_common/test/test_utilities.test b/localization/optimization_common/test/test_utilities.test new file mode 100644 index 0000000000..18a1ea2417 --- /dev/null +++ b/localization/optimization_common/test/test_utilities.test @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/localization/sparse_mapping/CMakeLists.txt b/localization/sparse_mapping/CMakeLists.txt index 1d21f64a30..493ce9bda1 100644 --- a/localization/sparse_mapping/CMakeLists.txt +++ b/localization/sparse_mapping/CMakeLists.txt @@ -15,57 +15,169 @@ # License for the specific language governing permissions and limitations # under the License. -cmake_minimum_required(VERSION 2.8.12) # Need get_filename_component DIRECTORY +cmake_minimum_required(VERSION 3.0) project(sparse_mapping) -PROTOBUF_GENERATE_CPP(PROTO_SRCS PROTO_HDRS protobuf/sparse_map.proto) -get_filename_component(PROTO_HDR_DIR ${PROTO_HDRS} DIRECTORY) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) -set(LIBS +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + ff_msgs + ff_common camera - ${DBOW2_LIBRARIES} - ${OpenCV_LIBRARIES} - ${OPENMVG_LIBRARIES} - ${CERES_LIBRARIES} - ${PROTOBUF_LIBRARIES} interest_point config_reader - ff_common ) -set(INCLUDES - ${CERES_INCLUDE_DIRS} - ${EIGEN3_INCLUDE_DIRS} - ${GFLAGS_INCLUDE_DIRS} - ${GLOG_INCLUDE_DIRS} - ${NGRAPH_INCLUDE_DIRS} + +# System dependencies are found with CMake's conventions +find_package(cmake_modules REQUIRED) +find_package(Eigen3 REQUIRED) +find_package(OpenMVG QUIET REQUIRED) +find_package(Protobuf REQUIRED) +find_package(dbow2 REQUIRED) + + +# Find OpenCV +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../cmake") +find_package(OpenCV331 REQUIRED) + + +catkin_package( + INCLUDE_DIRS include + LIBRARIES sparse_mapping + CATKIN_DEPENDS ff_msgs ff_common camera interest_point config_reader +) + +########### +## Build ## +########### + +# Build the protobuf message +PROTOBUF_GENERATE_CPP(PROTO_SRCS PROTO_HDRS protobuf/sparse_map.proto) +get_filename_component(PROTO_HDR_DIR ${PROTO_HDRS} DIRECTORY) + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} ${OpenCV_INCLUDE_DIRS} ${OPENMVG_INCLUDE_DIRS} ${PROTOBUF_INCLUDE_DIRS} ${PROTO_HDR_DIR} ${DBOW2_INCLUDE_DIRS} ) -set(SPARSE_MAPPING_LIBRARIES sparse_mapping ${LIBS}) -catkin_package( - INCLUDE_DIRS include - LIBRARIES sparse_mapping - CATKIN_DEPENDS camera interest_point config_reader ff_common +# Declare C++ libraries +add_library(sparse_mapping + src/ransac.cc + src/reprojection.cc + src/sparse_map.cc + src/sparse_mapping.cc + src/tensor.cc + src/vocab_tree.cc + ${PROTO_SRCS} +) +add_dependencies(sparse_mapping ${catkin_EXPORTED_TARGETS}) +target_link_libraries(sparse_mapping + ${catkin_LIBRARIES} + ${DBOW2_LIBRARIES} + ${OpenCV_LIBRARIES} + ${OPENMVG_LIBRARIES} + ${CERES_LIBRARIES} + ${PROTOBUF_LIBRARIES} ) -create_library(TARGET sparse_mapping - LIBS ${LIBS} - INC include ${INCLUDES} - DEPS ${DEPS} - ADD_SRCS ${PROTO_SRCS} ) if (NOT USE_CTC) - create_tool_targets(DIR tools - LIBS ${LIBS} sparse_mapping - INC ${INCLUDES} - DEPS ${DEPS} - EXCLUDE ${EXCLUDE}) +## Declare a C++ executable: build_map +add_executable(build_map tools/build_map.cc) +add_dependencies(build_map ${catkin_EXPORTED_TARGETS}) +target_link_libraries(build_map + sparse_mapping gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: evaluate_camera +add_executable(evaluate_camera tools/evaluate_camera.cc) +add_dependencies(evaluate_camera ${catkin_EXPORTED_TARGETS}) +target_link_libraries(evaluate_camera + sparse_mapping gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: evaluate_localization +add_executable(evaluate_localization tools/evaluate_localization.cc) +add_dependencies(evaluate_localization ${catkin_EXPORTED_TARGETS}) +target_link_libraries(evaluate_localization + sparse_mapping gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: extract_camera_info +add_executable(extract_camera_info tools/extract_camera_info.cc) +add_dependencies(extract_camera_info ${catkin_EXPORTED_TARGETS}) +target_link_libraries(extract_camera_info + sparse_mapping gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: extract_submap +add_executable(extract_submap tools/extract_submap.cc) +add_dependencies(extract_submap ${catkin_EXPORTED_TARGETS}) +target_link_libraries(extract_submap + sparse_mapping gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: generate_localization_test +add_executable(generate_localization_test tools/generate_localization_test.cc) +add_dependencies(generate_localization_test ${catkin_EXPORTED_TARGETS}) +target_link_libraries(generate_localization_test + sparse_mapping gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: gen_trajectory +add_executable(gen_trajectory tools/gen_trajectory.cc) +add_dependencies(gen_trajectory ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gen_trajectory + sparse_mapping gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: import_map +add_executable(import_map tools/import_map.cc) +add_dependencies(import_map ${catkin_EXPORTED_TARGETS}) +target_link_libraries(import_map + sparse_mapping gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: localize_cams +add_executable(localize_cams tools/localize_cams.cc) +add_dependencies(localize_cams ${catkin_EXPORTED_TARGETS}) +target_link_libraries(localize_cams + sparse_mapping gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: localize +add_executable(localize tools/localize.cc) +add_dependencies(localize ${catkin_EXPORTED_TARGETS}) +target_link_libraries(localize + sparse_mapping gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: merge_maps +add_executable(merge_maps tools/merge_maps.cc) +add_dependencies(merge_maps ${catkin_EXPORTED_TARGETS}) +target_link_libraries(merge_maps + sparse_mapping gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: nvm_visualize +add_executable(nvm_visualize tools/nvm_visualize.cc) +add_dependencies(nvm_visualize ${catkin_EXPORTED_TARGETS}) +target_link_libraries(nvm_visualize + sparse_mapping gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: parse_cam +add_executable(parse_cam tools/parse_cam.cc) +add_dependencies(parse_cam ${catkin_EXPORTED_TARGETS}) +target_link_libraries(parse_cam + sparse_mapping gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: select_images +add_executable(select_images tools/select_images.cc) +add_dependencies(select_images ${catkin_EXPORTED_TARGETS}) +target_link_libraries(select_images + sparse_mapping gflags glog ${catkin_LIBRARIES}) endif (NOT USE_CTC) + if(CATKIN_ENABLE_TESTING) find_package(rostest REQUIRED) @@ -74,7 +186,7 @@ if(CATKIN_ENABLE_TESTING) test/test_build_db.cc ) target_link_libraries(test_build_db - sparse_mapping + sparse_mapping glog ) add_rostest_gtest(test_nvm_fileio @@ -106,17 +218,25 @@ if(CATKIN_ENABLE_TESTING) test/test_sparse_map.cc ) target_link_libraries(test_sparse_map - sparse_mapping + sparse_mapping glog ) endif() -set(SPARSE_MAPPING_INCLUDE_DIRS - ${CMAKE_CURRENT_SOURCE_DIR}/include - ${INTEREST_INCLUDE_DIRS} - ${CAMERA_INCLUDE_DIRS} - ${EIGEN3_INCLUDE_DIRS} - ${CERES_INCLUDE_DIRS}) +############# +## Install ## +############# -set(SPARSE_MAPPING_LIBRARIES ${SPARSE_MAPPING_LIBRARIES} PARENT_SCOPE) -set(SPARSE_MAPPING_INCLUDE_DIRS ${SPARSE_MAPPING_INCLUDE_DIRS} PARENT_SCOPE) +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) diff --git a/localization/sparse_mapping/build_map.md b/localization/sparse_mapping/build_map.md index 216c821687..8a774e8fa3 100644 --- a/localization/sparse_mapping/build_map.md +++ b/localization/sparse_mapping/build_map.md @@ -4,9 +4,9 @@ Here we describe how to build a map. ## Summary -1. Reduce the number of images. +1. Set up the environment. -2. Set up the environment. +2. Reduce the number of images. 3. Build the map. @@ -16,6 +16,48 @@ Here we describe how to build a map. # Detailed explanation +## Setup the environment + +In the first step, one needs to set some environmental variables, as +follows: + + export ASTROBEE_SOURCE_PATH=$HOME/astrobee/src + export ASTROBEE_BUILD_PATH=$HOME/astrobee + export ASTROBEE_RESOURCE_DIR=$ASTROBEE_SOURCE_PATH/astrobee/resources + export ASTROBEE_CONFIG_DIR=$ASTROBEE_SOURCE_PATH/astrobee/config + export ASTROBEE_ROBOT=p4d + export ASTROBEE_WORLD=granite + +The source and build paths need to be adjusted for your particular +setup. + +Also consider setting: + + export PATH=$ASTROBEE_BUILD_PATH/devel/lib/sparse_mapping:$PATH + +to have the ``build_map`` and other related tools in your path. + +Above, ``p4d`` is the robot being used to take pictures, and the world +is the granite table. These may need to change, depending on your +goals. + +Under the hood, the following configuration files will be read: + + $ASTROBEE_CONFIG_DIR/cameras.config + +which contains the image width and height (the camera we use is +the nav cam) and + + $ASTROBEE_CONFIG_DIR/robots/$ASTROBEE_ROBOT.config + +having nav cam's intrinsics. If your camera is not the nav cam on p4d, +and none of the other available config files apply, you can just +temporarily modify the above files to reflect your camera's parameters +(without checking in your changes). + +More details on these and other environmental variables can be found +in the \subpage astrobee configuration documentation. + ## Reduce the number of images Here, we delete the images that overlap highly. (This tool is, like all @@ -30,14 +72,14 @@ The higher the value of the density factor, the more images will be kept. Some experimentation with this number is necessary. A value of 1.4 seems to work well. It may be needed to decrease this to 1.0 if images appear to be too dense. Ideally the images should have perhaps -on the order of 2/3 to 3/4 of overlap. This tool is not perfect. One -should inspect the images in the `eog` viewer, and delete redundant +on the order of 3/4 to 4/5 of overlap. This tool is not perfect. One +should inspect the images in the ``eog`` viewer, and delete redundant ones from it manually, using the Delete key. -The images can also be inspected and deleted with nvm_visualize, a -tool included with this software. See readme.md for details. This -tool, unlike eog, echoes each image name as it is displayed, which can -be useful with image manipulation tasks. +The images can also be inspected and deleted with ``nvm_visualize``, a +tool included with this software. See \subpage sparsemapping for +details. This tool, unlike ``eog``, echoes each image name as it is +displayed, which can be useful with image manipulation tasks. If the images do not have enough overlap, the selection tool needs to be run again with a different value of this factor, or otherwise @@ -55,35 +97,6 @@ images, as then the map could be of poor quality. Hence, the robot should have some translation motion (in addition to any rotation) when the data is acquired. -## Setup the environment - -In the first step, one needs to set some environmental variables, as -follows: - - export ASTROBEE_RESOURCE_DIR=$SOURCE_PATH/astrobee/resources - export ASTROBEE_CONFIG_DIR=$SOURCE_PATH/astrobee/config - export ASTROBEE_ROBOT=p4d - export ASTROBEE_WORLD=granite - -Here, `p4d` is the robot being used to take pictures, and the world is -the granite table. These may need to change, depending on your -goals. Under the hood, the following configuration files will be read: - - $ASTROBEE_CONFIG_DIR/cameras.config - -which contains the image width and height (the camera we use is -the nav cam) and - - $ASTROBEE_CONFIG_DIR/robots/$ASTROBEE_ROBOT.config - -having nav cam's intrinsics. If your camera is not the nav cam on p4d, -and none of the other available config files apply, you can just -temporarily modify the above files to reflect your camera's parameters -(without checking in your changes). - -More details on these and other environmental variables can be found in - - $SOURCE_PATH/astrobee/readme.md ## Building a map @@ -110,8 +123,8 @@ before doing feature detection. It was shown to create maps that are more robust to illumination changes. In practice, the map is build in pieces, and then merged. Then the -above process needs to be modified. See readme.md in this directory -for how this approach should go. +above process needs to be modified. See \subpage sparsemapping for the +procedure. ### Map building pipeline @@ -157,11 +170,12 @@ consistently across multiple frames. build_map -bundle_adjustment -histogram_equalization - Adjust the initial transformations to minimize error with bundle adjustment. +Adjust the initial transformations to minimize error with bundle +adjustment. -If the options: +If the options: - -first_ba_index and -last_ba_index + -first_ba_index and -last_ba_index are specified, only cameras with indices between these (including both endpoints) will be optimized during bundle adjustment. @@ -198,8 +212,8 @@ would drift from each other. If it is desired to take out images from the map, it should happen at this stage, before the vocabulary database and pruning happens at the -next step. See readme.md when it comes to such operations, where the -script grow_map.py is used. +next step. See \subpage sparsemapping when it comes to such +operations, where the script grow_map.py is used. #### Vocabulary database @@ -290,8 +304,8 @@ The xyz locations of the control points for the granite lab, the ISS and MGTF are mentioned below. If a set of world coordinates needs to be acquired, one can use the -Total Station, as described in the [total station](total_station.md) -documentation. +\subpage total_station. (Alternatively one can can try the +\subpage faro instrument but that is more technically involved.) Register the map with the command: @@ -352,7 +366,7 @@ new image set. ### Registration in the granite lab See the xyz coordinates of the control points used for registration in -[granite_lab_registration.md](granite_lab_registration.md) +the \subpage granite_lab_registration section. ### Registration on the ISS @@ -363,8 +377,12 @@ be visualized in the ISS as follows: Open two terminals, and in each one type: - export BUILD_PATH=$HOME/astrobee_build/native - source $BUILD_PATH/devel/setup.bash + export ASTROBEE_BUILD_PATH=$HOME/astrobee + source $ASTROBEE_BUILD_PATH/devel/setup.bash + +The Astrobee directory above must have ``src`` and ``devel`` +subdirectories, and needs to be adjusted given its location on your +disk. In the first terminal start the simulator: @@ -372,8 +390,8 @@ In the first terminal start the simulator: In the second, run: - python $SOURCE_PATH/localization/sparse_mapping/tools/view_control_points.py \ - $SOURCE_PATH/localization/sparse_mapping/iss_registration.txt + python $ASTROBEE_SOURCE_PATH/localization/sparse_mapping/tools/view_control_points.py \ + $ASTROBEE_SOURCE_PATH/localization/sparse_mapping/iss_registration.txt Go back to the simulated ISS and examine the registration points. If the Rviz display looks too cluttered, most topics can be turned off. @@ -409,9 +427,9 @@ Python command will refresh them. ### Registration in the MGTF A set of 10 registration points were measured in the MGTF with the -Total Station. They are in the file +\subpage total_station. They are in the file: - $SOURCE_PATH/localization/sparse_mapping/mgtf_registration.txt + $ASTROBEE_SOURCE_PATH/localization/sparse_mapping/mgtf_registration.txt Two of these are on the back wall, and the rest are on the metal columns on the side walls, with four on each wall. Half of the points @@ -427,7 +445,8 @@ be needed to identify them. A registered and bundle-adjusted map can be used to study how well it predicts the computed 3D locations for an independently acquired set of control points and 3D measurements. These are in the same format as -for registration. The map is not modified in any way during this step. +for registration. The map is not modified in any way during this step, +The command is: build_map -verification @@ -459,7 +478,7 @@ To test how the map may perform on the robot, do the following: ### Stage the feature counter utility (should be added to the install at one point): - scp $SOURCE_PATH/localization/marker_tracking/ros/tools/features_counter.py mlp: + scp $ASTROBEE_SOURCE_PATH/localization/marker_tracking/ros/tools/features_counter.py mlp: ### Launch the localization node on LLP @@ -474,7 +493,8 @@ this robot will give wrong results for other users. Then launch localization: ssh llp - roslaunch astrobee astrobee.launch llp:=disabled mlp:=mlp nodes:=framestore,dds_ros_bridge,localization_node + roslaunch astrobee astrobee.launch llp:=disabled mlp:=mlp \ + nodes:=framestore,dds_ros_bridge,localization_node ### Enable localization and the mapped landmark production (on MLP) @@ -529,9 +549,9 @@ computations may differ. Set up the environment in every terminal that is used. Ensure that you use the correct robot name below. - source $BUILD_PATH/devel/setup.bash - export ASTROBEE_RESOURCE_DIR=$SOURCE_PATH/astrobee/resources - export ASTROBEE_CONFIG_DIR=$SOURCE_PATH/astrobee/config + source $ASTROBEE_BUILD_PATH/devel/setup.bash + export ASTROBEE_RESOURCE_DIR=$ASTROBEE_SOURCE_PATH/astrobee/resources + export ASTROBEE_CONFIG_DIR=$ASTROBEE_SOURCE_PATH/astrobee/config export ASTROBEE_WORLD=iss export ASTROBEE_ROBOT=bumble # your robot's name may be different export ROS_MASTER_URI=http://127.0.0.1:11311/ @@ -542,9 +562,9 @@ Examine the localization configuration file: Sym link the map to test: - mkdir -p $SOURCE_PATH/astrobee/resources/maps - rm -fv $SOURCE_PATH/astrobee/resources/maps/iss.map - ln -s $(pwd)/mymap.map $SOURCE_PATH/astrobee/resources/maps/iss.map + mkdir -p $ASTROBEE_SOURCE_PATH/astrobee/resources/maps + rm -fv $ASTROBEE_SOURCE_PATH/astrobee/resources/maps/iss.map + ln -s $(pwd)/mymap.map $ASTROBEE_SOURCE_PATH/astrobee/resources/maps/iss.map Start the localization node: @@ -570,9 +590,9 @@ and compared to the old ones via: ## Evaluating the map without running the localization node -See astrobee/tools/ekf_bag/readme.md for how to run the -sparse_map_eval tool that takes as inputs a bag and a BRISK map and -prints the number of detected features. +See the \subpage ekfbag page for how to run the ``sparse_map_eval`` +tool that takes as inputs a bag and a BRISK map and prints the number +of detected features. Note that this approach may give slightly different results than using the localization node, and even with using this node, things can diff --git a/localization/sparse_mapping/granite_xyz_controlPoints.txt b/localization/sparse_mapping/granite_xyz_controlPoints.txt old mode 100755 new mode 100644 diff --git a/localization/sparse_mapping/include/sparse_mapping/reprojection.h b/localization/sparse_mapping/include/sparse_mapping/reprojection.h index 7fd0982eee..3382ff9d98 100644 --- a/localization/sparse_mapping/include/sparse_mapping/reprojection.h +++ b/localization/sparse_mapping/include/sparse_mapping/reprojection.h @@ -26,6 +26,7 @@ #include #include #include +#include namespace camera { class CameraModel; @@ -69,8 +70,8 @@ void BundleAdjust(std::vector > const& pid_to_cid_fid, ceres::Solver::Options const& options, ceres::Solver::Summary* summary, int first = 0, int last = std::numeric_limits::max(), - bool fix_cameras = false, bool fix_all_xyz = false); - + bool fix_cameras = false, + std::set const& fixed_cameras = std::set()); /** * Perform bundle adjustment. @@ -79,15 +80,14 @@ void BundleAdjust(std::vector > const& pid_to_cid_fid, * meant to be used to do 2 or 3 camera refinements however it can do * N cameras just fine. * - **/ -void BundleAdjust(std::vector const& features_n, - double focal_length, - std::vector * cam_t_global_n, - Eigen::Matrix3Xd * pid_to_xyz, - ceres::LossFunction * loss, - ceres::Solver::Options const& options, - ceres::Solver::Summary * summary); +void BundleAdjustSmallSet(std::vector const& features_n, + double focal_length, + std::vector * cam_t_global_n, + Eigen::Matrix3Xd * pid_to_xyz, + ceres::LossFunction * loss, + ceres::Solver::Options const& options, + ceres::Solver::Summary * summary); // Random integer between min (inclusive) and max (exclusive) int RandomInt(int min, int max); diff --git a/localization/sparse_mapping/include/sparse_mapping/sparse_map.h b/localization/sparse_mapping/include/sparse_mapping/sparse_map.h index 10f55a4b2d..f31491ac3b 100644 --- a/localization/sparse_mapping/include/sparse_mapping/sparse_map.h +++ b/localization/sparse_mapping/include/sparse_mapping/sparse_map.h @@ -305,6 +305,9 @@ struct SparseMap { SparseMap(); SparseMap(SparseMap &); SparseMap& operator=(const SparseMap&); + + // Reorder the images in the map and the rest of the data accordingly + void reorderMap(std::map const& old_cid_to_new_cid); }; } // namespace sparse_mapping diff --git a/localization/sparse_mapping/include/sparse_mapping/tensor.h b/localization/sparse_mapping/include/sparse_mapping/tensor.h index d2f7501323..f9bdc0a11b 100644 --- a/localization/sparse_mapping/include/sparse_mapping/tensor.h +++ b/localization/sparse_mapping/include/sparse_mapping/tensor.h @@ -85,14 +85,16 @@ namespace sparse_mapping { * Improve the map with bundle adjustment. Vary only the cameras * between given indices. **/ - void BundleAdjust(bool fix_cameras, sparse_mapping::SparseMap * map); + void BundleAdjust(bool fix_all_cameras, sparse_mapping::SparseMap * map, + std::set const& fixed_cameras = std::set()); void BundleAdjustment(sparse_mapping::SparseMap * s, ceres::LossFunction * loss, const ceres::Solver::Options & options, ceres::Solver::Summary * summary, int first = 0, int last = std::numeric_limits::max(), - bool fix_cameras = false); + bool fix_all_cameras = false, + std::set const& fixed_cameras = std::set()); /** Append map file. @@ -100,7 +102,7 @@ namespace sparse_mapping { void AppendMapFile(std::string const& mapOut, std::string const& mapIn, int num_image_overlaps_at_endpoints, double outlier_factor, - bool bundle_adjust); + bool bundle_adjust, bool fix_first_map); /** Merge two maps. diff --git a/localization/sparse_mapping/package.xml b/localization/sparse_mapping/package.xml index 8e8801bda8..79fab36473 100644 --- a/localization/sparse_mapping/package.xml +++ b/localization/sparse_mapping/package.xml @@ -14,15 +14,17 @@ Astrobee Flight Software catkin + ff_msgs + ff_common camera roscpp interest_point config_reader - ff_common roscpp + ff_msgs + ff_common camera interest_point config_reader - ff_common diff --git a/localization/sparse_mapping/readme.md b/localization/sparse_mapping/readme.md index 9e9aa4e4cc..10164f5c98 100644 --- a/localization/sparse_mapping/readme.md +++ b/localization/sparse_mapping/readme.md @@ -1,4 +1,4 @@ -\page sparsemapping Sparse Mapping +\page sparsemapping Sparse mapping # Creation of sparse maps for robot localization @@ -14,19 +14,35 @@ Maps are stored as protobuf files. ## ROS node -The ROS node takes images and a map as input, and outputs visual features -detected in the image and their 3D coordinates. +The ROS node takes images and a map as input, and outputs visual +features detected in the image and their 3D coordinates. ### Inputs -* `/hw/cam_nav`: Camera images -* The map file. See [build_map](build_map.md) at the bottom for its assumed location. +* `/hw/cam_nav`: Camera images The map file. See the \subpage + map_building section (towards the bottom) for its assumed location + on the robot. ### Outputs * `/localization/mapped_landmarks/features` * `/localization/mapped_landmarks/registration` +## The environment + +It will be convenient in this document to set these environmental +variables, pointing to the source and build directories. They may need +to be adjusted given your setup. + + export ASTROBEE_SOURCE_PATH=$HOME/astrobee/src + export ASTROBEE_BUILD_PATH=$HOME/astrobee + +To access many of the localization tools, such as ``build_map``, +``merge_maps``, etc., without specifying a full path, also consider +setting: + + export PATH=$ASTROBEE_BUILD_PATH/devel/lib/sparse_mapping:$PATH + ## Tools and procedures ### Record a bag @@ -50,19 +66,21 @@ Usually the bags are acquired at a very high frame rate, and they are huge. A preliminary filtering of the bag images while still on the robot can be done with the command: - rosbag filter input.bag output.bag "(topic == '/hw/cam_nav') and (float(t.nsecs)/1e+9 <= 0.1)" + rosbag filter input.bag output.bag \ + "(topic == '/hw/cam_nav') and (float(t.nsecs)/1e+9 <= 0.1)" Here, for every second of recorded data, we keep only the first tenth of a second. This number may need to be adjusted. Later, a further selection of the images can be done. -### Copy the bag from the robot: +### Copy the bag from the robot From the local machine, fetch the bag: rsync -avzP astrobee@10.42.0.32:/data/bagfile.bag . -Here, the IP address of P4D was used, which may differ from your robot's IP address. +Here, the IP address of P4D was used, which may differ from your +robot's IP address. ### Merging bags @@ -70,26 +88,26 @@ The bags created on the ISS are likely split into many smaller bags, for easy and reliability of transfer. Those can be merged into one bag as follows: - astrobee_build/devel/lib/localization_node/merge_bags \ + $ASTROBEE_BUILD_PATH/devel/lib/localization_node/merge_bags \ -output_bag ### Extracting images To extract images from a bag file: - extract_image_bag -use_timestamp_as_image_name \ - -image_topic /hw/cam_nav -output_directory + $ASTROBEE_BUILD_PATH/devel/lib/localization_node/extract_image_bag \ + -use_timestamp_as_image_name \ + -image_topic /hw/cam_nav -output_directory -The above assumes that the software was built with ROS on. This tool should -exist in astrobee_build/native. +The above assumes that the software was built with ROS on. Please check using 'rosbag info' the nav cam topic in the bag, as its name can change. ### Building a map -The `build_map` tools aids in constructing a map. See -[build_map](build_map.md) for further details. +The ``build_map`` tools is used to construct a map. See \subpage +map_building for further details. ### Visualization @@ -135,7 +153,7 @@ collecting a subset of the images. (After clicking, a bug in OpenCV disables the arrow keys, then one can navigate with the "Ins" and "Del" keys on the numpad.) -This tool can be invoked to just look at images, without any map being +This tool can be invoked to look at just images, without any map being built. It can also delete images in this mode, with the 'Delete' and 'x' keys, if invoked as: @@ -144,7 +162,7 @@ built. It can also delete images in this mode, with the 'Delete' and ### Localize a single frame All the commands below assume that the environment was set up, -as specified in build_map.md. +as specified in the \subpage map_building section. To test localization of a single frame, use the command: @@ -187,19 +205,15 @@ This functionality is implemented in the localize_cams tool. Usage: Here we use values that are different from - astrobee/config/localization.config + $ASTROBEE_SOURCE_PATH/astrobee/config/localization.config which are used for localization on the robot, since those are optimized for speed and here we want more accuracy. ### Testing localization using a bag -See: - - astrobee/tools/ekf_bag/readme.md - -for how to see how well a BRISK map with a vocabulary database does -when localizing images from a bag. +See the \subpage ekfbag page for how to study how well a BRISK map +with a vocabulary database does when localizing images from a bag. ### Extract sub-maps @@ -238,7 +252,7 @@ need to be rebuilt for the extracted submap using build_map -vocab_db -#### Merge maps +### Merge maps Given a set of SURF maps, they can be merged using the command: @@ -246,8 +260,8 @@ Given a set of SURF maps, they can be merged using the command: -num_image_overlaps_at_endpoints 50 It is very important to note that only maps with SURF features (see -build_map.md) can be merged. If a map has BRISK features, it needs to -be rebuilt with SURF features, as follows: +\subpage map_building) can be merged. If a map has BRISK features, it +needs to be rebuilt with SURF features, as follows: build_map -rebuild -histogram_equalization \ -rebuild_detector SURF -output_map @@ -283,13 +297,10 @@ option until the final map is computed, as this step can be time-consuming. If the first of the two maps to merge is already registered, it may be -desirable to keep that portion fixed during merging. To achieve that, -the merging can be done without bundle adjustment, and then build_map -can be invoked only to do bundle adjustment, while specifying the -range of cameras to optimize (the ones from the second map). See -build_map.md for details. +desirable to keep that portion fixed during merging when bundle +adjustment happens. That is accomplished with the flag -fix_first_map. -#### How to build a map efficiently +### How to build a map efficiently Often times map-building can take a long time, or it can fail. A cautious way of building a map is to build it in portions (perhaps on @@ -326,12 +337,12 @@ maps using the command: images/*jpg -output_map examine them individually, merging them as appropriate, then -performing bundle adjustment and registration as per build_map.md. -Only when a good enough map is obtained, a renamed copy of it -should be rebuilt with BRISK features and a vocabulary database -to be used on the robot. +performing bundle adjustment and registration as per the \subpage +map_building section. Only when a good enough map is obtained, a +renamed copy of it should be rebuilt with BRISK features and a +vocabulary database to be used on the robot. -#### Map strategy for the space station +### Map strategy for the space station For the space station, there exists one large SURF map with many images, and a small BRISK map with fewer images. If new images are @@ -351,7 +362,7 @@ for 80 of them localization failed. So, things will change as follows: The precise details are described in the next section. -#### Growing a map when more images are acquired +### Growing a map when more images are acquired Sometimes things in the desired environment change enough, or the lighting changes, and an existing map may no longer do as well in some @@ -412,7 +423,7 @@ batch if they are not strictly necessary. The following Python code implements this: - python ~/astrobee/localization/sparse_mapping/tools/grow_map.py \ + python astrobee/src/localization/sparse_mapping/tools/grow_map.py \ -histogram_equalization -small_map prev_brisk_vocab_hist.map \ -big_map curr_brisk_no_prune_hist.map -work_dir work \ -output_map curr_brisk_vocab_hist.map \ @@ -431,7 +442,7 @@ cohesiveness. Also note that the grow_map.py script takes a lot of other parameters on input that must be the same as in localization.config. -#### Reducing the number of images in a map +### Reducing the number of images in a map Sometimes a map has too many similar images. The tool reduce_map.py attempts to reduce their number without sacrificing the map quality. @@ -442,10 +453,11 @@ need not have a vocab db. Usage: - python reduce_map.py -input_map -min_brisk_threshold \ - -default_brisk_threshold -max_brisk_threshold \ - -localization_error -work_dir \ - -sample_rate -histogram_equalization + python astrobee/src/localization/sparse_mapping/tools/reduce_map.py \ + -input_map -min_brisk_threshold \ + -default_brisk_threshold -max_brisk_threshold \ + -localization_error -work_dir \ + -sample_rate -histogram_equalization The BRISK thresholds here must be as when the map was built (ideally like in localization.config). The -histogram_equalization flag is @@ -480,9 +492,3 @@ Instead of taking images out of the map randomly, one can start with a reduced map with a small list of desired images which can be set with -image_list, and then all images for which localization fails will be added back to it. - - -\subpage map_building -\subpage total_station -\subpage granite_lab_registration -\subpage using_faro \ No newline at end of file diff --git a/localization/sparse_mapping/src/reprojection.cc b/localization/sparse_mapping/src/reprojection.cc index a5b9ea9490..5bf243f717 100644 --- a/localization/sparse_mapping/src/reprojection.cc +++ b/localization/sparse_mapping/src/reprojection.cc @@ -94,22 +94,18 @@ struct ReprojectionError { }; void BundleAdjust(std::vector > const& pid_to_cid_fid, - std::vector const& cid_to_keypoint_map, - double focal_length, - std::vector * cid_to_cam_t_global, - std::vector * pid_to_xyz, + std::vector const& cid_to_keypoint_map, double focal_length, + std::vector* cid_to_cam_t_global, std::vector* pid_to_xyz, std::vector > const& user_pid_to_cid_fid, - std::vector const& user_cid_to_keypoint_map, - std::vector * user_pid_to_xyz, - ceres::LossFunction * loss, - ceres::Solver::Options const& options, - ceres::Solver::Summary* summary, - int first, int last, bool fix_cameras, bool fix_all_xyz) { + std::vector const& user_cid_to_keypoint_map, + std::vector* user_pid_to_xyz, ceres::LossFunction* loss, + ceres::Solver::Options const& options, ceres::Solver::Summary* summary, int first, int last, + bool fix_all_cameras, std::set const& fixed_cameras) { // Perform bundle adjustment. Keep fixed all cameras with cid // not within [first, last] and all xyz points which project only // onto fixed cameras. - // If provided, use user-set info. + // If provided, use user-set registration points in the second pass. // Allocate space for the angle axis representation of rotation std::vector camera_aa_storage(3 * cid_to_cam_t_global->size()); @@ -127,7 +123,11 @@ void BundleAdjust(std::vector > const& pid_to_cid_fid, // Ideally the block inside of the loop below must be a function call, // but the compiler does not handle that correctly with ceres. // So do this by changing where things are pointing. - for (int pass = 0; pass < 2; pass++) { + + int num_passes = 1; + if (!user_pid_to_xyz->empty()) num_passes = 2; // A second pass using control points + + for (int pass = 0; pass < num_passes; pass++) { std::vector > const * p_pid_to_cid_fid; std::vector const * p_cid_to_keypoint_map; std::vector * p_pid_to_xyz; @@ -166,12 +166,13 @@ void BundleAdjust(std::vector > const& pid_to_cid_fid, &p_pid_to_xyz->at(pid)[0], &focal_length); - if (fix_cameras || (cid_fid.first < first || cid_fid.first > last)) { + if (fix_all_cameras || (cid_fid.first < first || cid_fid.first > last) || + fixed_cameras.find(cid_fid.first) != fixed_cameras.end()) { problem.SetParameterBlockConstant(&cid_to_cam_t_global->at(cid_fid.first).translation()[0]); problem.SetParameterBlockConstant(&camera_aa_storage[3 * cid_fid.first]); } } - if (fix_pid || pass == 1 || fix_all_xyz) { + if (fix_pid || pass == 1) { // Fix pids which don't project in cameras that are floated. // Also, must not float points given by the user, those are measurements // we are supposed to reference ourselves against, and floating @@ -195,13 +196,14 @@ void BundleAdjust(std::vector > const& pid_to_cid_fid, } } -void BundleAdjust(std::vector const& features_n, - double focal_length, - std::vector * cam_t_global_n, - Eigen::Matrix3Xd * pid_to_xyz, - ceres::LossFunction * loss, - ceres::Solver::Options const& options, - ceres::Solver::Summary * summary) { +// This is a very specialized function +void BundleAdjustSmallSet(std::vector const& features_n, + double focal_length, + std::vector * cam_t_global_n, + Eigen::Matrix3Xd * pid_to_xyz, + ceres::LossFunction * loss, + ceres::Solver::Options const& options, + ceres::Solver::Summary * summary) { CHECK(cam_t_global_n) << "Variable cam_t_global_n needs to be defined"; CHECK(cam_t_global_n->size() == features_n.size()) << "Variables features_n and cam_t_global_n need to agree on the number of cameras"; diff --git a/localization/sparse_mapping/src/sparse_map.cc b/localization/sparse_mapping/src/sparse_map.cc index 6bef6a6c73..d536fb436b 100644 --- a/localization/sparse_mapping/src/sparse_map.cc +++ b/localization/sparse_mapping/src/sparse_map.cc @@ -37,6 +37,9 @@ #include #include +#include +#include + #include #include #include @@ -119,24 +122,80 @@ SparseMap::SparseMap(const std::vector& cid_to_cam_t, } // Form a sparse map by reading a text file from disk. This is for comparing -// the bundler and theia maps with ours. -SparseMap::SparseMap(bool bundler_format, std::string const& filename, - std::vector const& files) : - camera_params_(Eigen::Vector2i(640, 480), Eigen::Vector2d::Constant(300), - Eigen::Vector2d(320, 240)), - num_similar_(FLAGS_num_similar), - num_ransac_iterations_(FLAGS_num_ransac_iterations), - ransac_inlier_tolerance_(FLAGS_ransac_inlier_tolerance), - early_break_landmarks_(FLAGS_early_break_landmarks), - histogram_equalization_(FLAGS_histogram_equalization) { - int num_cams; +// bundler, nvm or theia maps. +SparseMap::SparseMap(bool bundler_format, std::string const& filename, std::vector const& all_image_files) + : camera_params_(Eigen::Vector2i(640, 480), Eigen::Vector2d::Constant(300), + Eigen::Vector2d(320, 240)), // these are placeholders and must be changed + num_similar_(FLAGS_num_similar), + num_ransac_iterations_(FLAGS_num_ransac_iterations), + ransac_inlier_tolerance_(FLAGS_ransac_inlier_tolerance), + early_break_landmarks_(FLAGS_early_break_landmarks), + histogram_equalization_(FLAGS_histogram_equalization) { + std::string ext = ff_common::file_extension(filename); + boost::to_lower(ext); + + if (ext == "nvm") { + std::cout << "NVM format detected." << std::endl; + + sparse_mapping::ReadNVM(filename, &cid_to_keypoint_map_, &cid_to_filename_, &pid_to_cid_fid_, &pid_to_xyz_, + &cid_to_cam_t_global_); + + // Descriptors are not saved, so let them be empty + cid_to_descriptor_map_.resize(cid_to_keypoint_map_.size()); + + // When the NVM file is created by Theia, it saves the images + // without a path, in random order, and it may not have used up + // all the images, so need to adjust for that. + + std::map base_to_full_path; + std::map orig_order; + for (size_t it = 0; it < all_image_files.size(); it++) { + std::string image = all_image_files[it]; + std::string base = boost::filesystem::path(image).filename().string(); + if (base_to_full_path.find(base) != base_to_full_path.end()) + LOG(FATAL) << "Duplicate image: " << base << std::endl; + base_to_full_path[base] = image; + orig_order[it] = base; + } + + // Find the permutation which will tell how to reorder the images + // in the nvm file to be in the original order. This must happen + // before we change cid_to_filename_ below. + std::map old_cid_to_new_cid; + std::map base2cid; + for (size_t it = 0; it < cid_to_filename_.size(); it++) base2cid[cid_to_filename_[it]] = it; + int new_cid = 0; + for (auto order_it = orig_order.begin(); order_it != orig_order.end() ; order_it++) { + auto base_it = base2cid.find(order_it->second); + if (base_it == base2cid.end()) continue; // Not all input images may be present in the map + + int old_cid = base_it->second; + old_cid_to_new_cid[old_cid] = new_cid; + new_cid++; + } + + // Map the theia images to the actual image paths + for (size_t it = 0; it < cid_to_filename_.size(); it++) { + std::string base = cid_to_filename_[it]; + auto map_it = base_to_full_path.find(base); + if (map_it == base_to_full_path.end()) + LOG(FATAL) << "The input file list is missing the nvm map image: " << base << std::endl; + cid_to_filename_[it] = map_it->second; + } + + // Apply the permutation + reorderMap(old_cid_to_new_cid); + + } else if (bundler_format) { + std::cout << "Bundler format detected." << std::endl; + + int num_cams = 0; - if (bundler_format) { std::ifstream is(filename.c_str()); std::string line; std::getline(is, line); // empty line is >> num_cams; - cid_to_filename_ = files; + cid_to_filename_ = all_image_files; cid_to_cam_t_global_.resize(num_cams); cid_to_filename_.resize(num_cams); @@ -165,53 +224,13 @@ SparseMap::SparseMap(bool bundler_format, std::string const& filename, cid_to_cam_t_global_[i].translation() = P; } - } else { - // Read the text dump of a theia map - - std::ifstream is(filename.c_str()); - is >> num_cams; - cid_to_cam_t_global_.resize(num_cams); - cid_to_filename_.resize(num_cams); - - for (int i = 0; i < num_cams; i++) { - std::string line; - std::getline(is, line); // empty line - std::getline(is, line); // actual data - cid_to_filename_[i] = line; - - Eigen::Matrix3d T; - for (int row = 0; row < T.rows(); row++) { - for (int col = 0; col < T.cols(); col++) { - is >> T(row, col); - } - } - - Eigen::Vector3d P; - for (int row = 0; row < P.size(); row++) - is >> P[row]; - - cid_to_cam_t_global_[i].linear() = T.transpose(); // not sure - cid_to_cam_t_global_[i].translation() = P; - cid_to_cam_t_global_[i] = cid_to_cam_t_global_[i].inverse(); - } - - int num_pts; - is >> num_pts; - pid_to_xyz_.resize(num_pts); - pid_to_cid_fid_.resize(num_pts); - for (int i = 0; i < num_pts; i++) { - Eigen::Vector3d P; - for (int row = 0; row < P.size(); row++) { - is >> P[row]; - } - pid_to_xyz_[i] = P; - } - is.close(); + // Initialize other data expected in the map + cid_to_keypoint_map_.resize(num_cams); + cid_to_descriptor_map_.resize(num_cams); } - // Initialize other data expected in the map - cid_to_keypoint_map_.resize(num_cams); - cid_to_descriptor_map_.resize(num_cams); + // Initialize this convenient mapping + InitializeCidFidToPid(); } // Detect features in given images @@ -662,7 +681,7 @@ bool Localize(cv::Mat const& test_descriptors, } // To turn on verbose localization for debugging - // FREEFLYER_GFLAGS_NAMESPACE::SetCommandLineOption("verbose_localization", "true"); + // google::SetCommandLineOption("verbose_localization", "true"); // Find matches to each image in map. Do this in two passes. First, // find matches to all map images, then keep only num_similar @@ -853,6 +872,79 @@ void SparseMap::PruneMap(void) { #endif } +// Reorder the images in the map and the rest of the data accordingly +void SparseMap::reorderMap(std::map const& old_cid_to_new_cid) { + int num_cid = cid_to_filename_.size(); + + // Sanity checks + if (old_cid_to_new_cid.size() != cid_to_filename_.size()) + LOG(FATAL) << "Wrong size of the permutation in SparseMap::reorderMap()."; + for (auto it = old_cid_to_new_cid.begin(); it != old_cid_to_new_cid.end(); it++) { + int new_cid = it->second; + if (new_cid >= num_cid) LOG(FATAL) << "Out of bounds in the permutation in SparseMap::reorderMap()."; + } + + // Wipe things that we won't reorder + vocab_db_ = sparse_mapping::VocabDB(); + db_to_cid_map_.clear(); + cid_to_cid_.clear(); + user_cid_to_keypoint_map_.clear(); + user_pid_to_cid_fid_.clear(); + user_pid_to_xyz_.clear(); + cid_fid_to_pid_.clear(); // Will recreate this later + + // Must create temporary structures + std::vector new_cid_to_filename(num_cid); + std::vector new_cid_to_keypoint_map(num_cid); + std::vector new_cid_to_cam_t_global(num_cid); + std::vector new_cid_to_descriptor_map(num_cid); + std::vector> new_pid_to_cid_fid(pid_to_cid_fid_.size()); + + // Note that pid_to_xyz_ is not changed by this reordering + + // Copy the data in new order + for (int old_cid = 0; old_cid < num_cid; old_cid++) { + auto it = old_cid_to_new_cid.find(old_cid); + if (it == old_cid_to_new_cid.end()) + LOG(FATAL) << "Cannot find desired index in permutation in SparseMap::reorderMap()."; + + int new_cid = it->second; + + new_cid_to_filename[new_cid] = cid_to_filename_[old_cid]; + new_cid_to_keypoint_map[new_cid] = cid_to_keypoint_map_[old_cid]; + new_cid_to_cam_t_global[new_cid] = cid_to_cam_t_global_[old_cid]; + new_cid_to_descriptor_map[new_cid] = cid_to_descriptor_map_[old_cid]; + } + + // pid_to_cid_fid needs special treatment + for (size_t pid = 0; pid < pid_to_cid_fid_.size(); pid++) { + auto const& cid_fid = pid_to_cid_fid_[pid]; // alias + + std::map new_cid_fid; + for (auto cid_fid_it = cid_fid.begin(); cid_fid_it != cid_fid.end(); cid_fid_it++) { + int old_cid = cid_fid_it->first; + auto cid_it = old_cid_to_new_cid.find(old_cid); + if (cid_it == old_cid_to_new_cid.end()) + LOG(FATAL) << "Bookkeeping error in SparseMap::reorderMap()"; + + int new_cid = cid_it->second; + new_cid_fid[new_cid] = cid_fid_it->second; + } + + new_pid_to_cid_fid[pid] = new_cid_fid; + } + + // Swap in the new values + cid_to_filename_.swap(new_cid_to_filename); + cid_to_keypoint_map_.swap(new_cid_to_keypoint_map); + cid_to_cam_t_global_.swap(new_cid_to_cam_t_global); + cid_to_descriptor_map_.swap(new_cid_to_descriptor_map); + pid_to_cid_fid_.swap(new_pid_to_cid_fid); + + // Recreate cid_fid_to_pid_ from pid_to_cid_fid_. + InitializeCidFidToPid(); +} + bool SparseMap::Localize(const cv::Mat & image, camera::CameraModel* pose, std::vector* inlier_landmarks, std::vector* inlier_observations, diff --git a/localization/sparse_mapping/src/sparse_mapping.cc b/localization/sparse_mapping/src/sparse_mapping.cc index 666c16bc7a..d37297ed5c 100644 --- a/localization/sparse_mapping/src/sparse_mapping.cc +++ b/localization/sparse_mapping/src/sparse_mapping.cc @@ -231,7 +231,7 @@ void sparse_mapping::ReadNVM(std::string const& input_filename, f >> c[0] >> c[1] >> c[2] >> dist1 >> dist2; cid_to_filename->at(cid) = token; - // Solve for t .. which is part of the affine transform + // Solve for t, which is part of the affine transform Eigen::Matrix3d r = q.matrix(); cid_to_cam_t_global->at(cid).linear() = r; cid_to_cam_t_global->at(cid).translation() = -r * c; @@ -241,7 +241,7 @@ void sparse_mapping::ReadNVM(std::string const& input_filename, ptrdiff_t number_of_pid; f >> number_of_pid; if (number_of_pid < 1) { - LOG(FATAL) << "NVM file is missing points"; + LOG(FATAL) << "The NVM file has no triangulated points."; } // Read the point @@ -270,7 +270,7 @@ void sparse_mapping::ReadNVM(std::string const& input_filename, } if (!f.good()) - LOG(FATAL) << "Unable to correctly read PID " << pid; + LOG(FATAL) << "Unable to correctly read PID: " << pid; } } diff --git a/localization/sparse_mapping/src/tensor.cc b/localization/sparse_mapping/src/tensor.cc index 8efc3ef245..fec01b48bb 100644 --- a/localization/sparse_mapping/src/tensor.cc +++ b/localization/sparse_mapping/src/tensor.cc @@ -605,8 +605,8 @@ void CloseLoop(sparse_mapping::SparseMap * s) { &(s->cid_fid_to_pid_)); } -void BundleAdjust(bool fix_cameras, - sparse_mapping::SparseMap * map) { +void BundleAdjust(bool fix_all_cameras, sparse_mapping::SparseMap * map, + std::set const& fixed_cameras) { for (int i = 0; i < FLAGS_num_ba_passes; i++) { LOG(INFO) << "Beginning bundle adjustment, pass: " << i << ".\n"; @@ -619,16 +619,17 @@ void BundleAdjust(bool fix_cameras, options.max_num_iterations = FLAGS_max_num_iterations; options.minimizer_progress_to_stdout = true; ceres::Solver::Summary summary; - sparse_mapping::BundleAdjustment(map, - sparse_mapping::GetLossFunction(FLAGS_cost_function, - FLAGS_cost_function_threshold), - options, &summary, + ceres::LossFunction* loss = sparse_mapping::GetLossFunction(FLAGS_cost_function, + FLAGS_cost_function_threshold); + sparse_mapping::BundleAdjustment(map, loss, options, &summary, FLAGS_first_ba_index, FLAGS_last_ba_index, - fix_cameras); + fix_all_cameras, fixed_cameras); LOG(INFO) << summary.FullReport() << "\n"; - LOG(INFO) << "Starting Average Reprojection Error: " << summary.initial_cost / map->GetNumObservations(); - LOG(INFO) << "Final Average Reprojection Error: " << summary.final_cost / map->GetNumObservations(); + LOG(INFO) << "Starting average reprojection error: " + << summary.initial_cost / map->GetNumObservations(); + LOG(INFO) << "Final average reprojection error: " + << summary.final_cost / map->GetNumObservations(); } } @@ -636,13 +637,15 @@ void BundleAdjustment(sparse_mapping::SparseMap * s, ceres::LossFunction* loss, const ceres::Solver::Options & options, ceres::Solver::Summary* summary, - int first, int last, bool fix_cameras) { + int first, int last, bool fix_all_cameras, + std::set const& fixed_cameras) { sparse_mapping::BundleAdjust(s->pid_to_cid_fid_, s->cid_to_keypoint_map_, s->camera_params_.GetFocalLength(), &(s->cid_to_cam_t_global_), &(s->pid_to_xyz_), s->user_pid_to_cid_fid_, s->user_cid_to_keypoint_map_, &(s->user_pid_to_xyz_), - loss, options, summary, first, last, fix_cameras); + loss, options, summary, first, last, fix_all_cameras, + fixed_cameras); // First do BA, and only afterwards remove outliers. if (!FLAGS_skip_filtering) { @@ -655,15 +658,34 @@ void BundleAdjustment(sparse_mapping::SparseMap * s, // PrintTrackStats(s->pid_to_cid_fid_, "bundle adjustment and filtering"); } +// Check if the two arrays share elements +bool haveSharedElements(std::vector const& A, std::vector const& B) { + std::set setA; + for (size_t it = 0; it < A.size(); it++) setA.insert(A[it]); + + for (size_t it = 0; it < B.size(); it++) + if (setA.find(B[it]) != setA.end()) + return true; + + return false; +} + // Load two maps, merge the second one onto the first one, and save the result. void AppendMapFile(std::string const& mapOut, std::string const& mapIn, int num_image_overlaps_at_endpoints, - double outlier_factor, bool bundle_adjust) { + double outlier_factor, bool bundle_adjust, + bool fix_first_map) { + if (!bundle_adjust && fix_first_map) LOG(FATAL) << "Cannot fix first map if no bundle adjustment happens."; + LOG(INFO) << "Appending " << mapIn << " to " << mapOut << std::endl; sparse_mapping::SparseMap A(mapOut); sparse_mapping::SparseMap B(mapIn); + // Sanity check before we do a lot of work + if (fix_first_map && haveSharedElements(A.cid_to_filename_, B.cid_to_filename_)) + LOG(FATAL) << "Cannot fix the first map if it shares cameras with the second map."; + // C starts as A, as SparseMap lacks an empty constructor sparse_mapping::SparseMap C(mapOut); @@ -674,10 +696,23 @@ void AppendMapFile(std::string const& mapOut, std::string const& mapIn, mapOut, &C); - // Bundle adjust the merged map. + // Bundle-adjust the merged map if (bundle_adjust) { - bool fix_cameras = false; - sparse_mapping::BundleAdjust(fix_cameras, &C); + bool fix_all_cameras = false; + + std::set fixed_cameras; + + // Find in the list of images of the merged map the ones from the first map + if (fix_first_map) { + std::set setA; + for (size_t it = 0; it < A.cid_to_filename_.size(); it++) setA.insert(A.cid_to_filename_[it]); + + for (size_t it = 0; it < C.cid_to_filename_.size(); it++) { + if (setA.find(C.cid_to_filename_[it]) != setA.end()) fixed_cameras.insert(it); + } + } + + sparse_mapping::BundleAdjust(fix_all_cameras, &C, fixed_cameras); } C.Save(mapOut); @@ -918,7 +953,7 @@ void findMatchingTracks(sparse_mapping::SparseMap * A_in, // Match features. Must set num_subsequent_images to not try to // match images in same map, that was done when each map was built. - FREEFLYER_GFLAGS_NAMESPACE::SetCommandLineOption("num_subsequent_images", "0"); + google::SetCommandLineOption("num_subsequent_images", "0"); sparse_mapping::MatchFeatures(sparse_mapping::EssentialFile(output_map), sparse_mapping::MatchesFile(output_map), &C); @@ -2027,9 +2062,9 @@ void BuildMapFindEssentialAndInliers(Eigen::Matrix2Xd const& keypoints1, << " (" << round((100.0*num_pts_behind_camera) / observations2[0].cols()) << "%)"; - sparse_mapping::BundleAdjust(observations2, camera_params.GetFocalLength(), &cameras, - &pid_to_xyz, new ceres::CauchyLoss(0.5), options, - &summary); + sparse_mapping::BundleAdjustSmallSet(observations2, camera_params.GetFocalLength(), &cameras, + &pid_to_xyz, new ceres::CauchyLoss(0.5), options, + &summary); if (!summary.IsSolutionUsable()) { LOG(ERROR) << cam_a_idx << " " << cam_b_idx << " | Failed to refine RT with bundle adjustment"; diff --git a/localization/sparse_mapping/test/test_build_db.cc b/localization/sparse_mapping/test/test_build_db.cc index 08b8d69c75..1ea65875f4 100644 --- a/localization/sparse_mapping/test/test_build_db.cc +++ b/localization/sparse_mapping/test/test_build_db.cc @@ -132,3 +132,9 @@ void RunWithDB(std::string const& detector_name) { TEST(build_db_dbow2_orgbrisk, write_descriptors_build_db) { RunWithDB("ORGBRISK"); } + +// Run all the tests that were declared with TEST() +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/sparse_mapping/test/test_nvm_fileio.cc b/localization/sparse_mapping/test/test_nvm_fileio.cc index f5d8bae18b..3fbdb05919 100644 --- a/localization/sparse_mapping/test/test_nvm_fileio.cc +++ b/localization/sparse_mapping/test/test_nvm_fileio.cc @@ -154,3 +154,9 @@ TEST(nvm_fileio, read_write_loop_features) { EXPECT_EQ(descriptors1.at(1, i), descriptors2.at(1, i)); } } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/sparse_mapping/test/test_registration.cc b/localization/sparse_mapping/test/test_registration.cc index 4db194287e..386e9fb19d 100644 --- a/localization/sparse_mapping/test/test_registration.cc +++ b/localization/sparse_mapping/test/test_registration.cc @@ -146,3 +146,9 @@ TEST_F(SparseMapTest, Registration) { double scale = pow(surf_map->GetWorldTransform().linear().determinant(), 1.0/3.0); EXPECT_GT(scale, 0); // just a sanity check } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/sparse_mapping/test/test_registration.test b/localization/sparse_mapping/test/test_registration.test index d80baa30df..43e36df82b 100644 --- a/localization/sparse_mapping/test/test_registration.test +++ b/localization/sparse_mapping/test/test_registration.test @@ -17,5 +17,5 @@ - + diff --git a/localization/sparse_mapping/test/test_reprojection.cc b/localization/sparse_mapping/test/test_reprojection.cc index 3795ea12b3..eb9873b335 100644 --- a/localization/sparse_mapping/test/test_reprojection.cc +++ b/localization/sparse_mapping/test/test_reprojection.cc @@ -122,3 +122,9 @@ TEST(reprojection, affine_estimation) { EXPECT_NEAR(A.translation()[1], S[1], 1e-3); EXPECT_NEAR(A.translation()[2], S[2], 1e-3); } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/sparse_mapping/test/test_sparse_map.cc b/localization/sparse_mapping/test/test_sparse_map.cc index a91f1e4fb7..f3089b402b 100644 --- a/localization/sparse_mapping/test/test_sparse_map.cc +++ b/localization/sparse_mapping/test/test_sparse_map.cc @@ -282,9 +282,10 @@ TEST_P(SparseMapTest, MapExtractMerge) { bool skip_bundle_adjustment = false; int num_image_overlaps_at_endpoints = 10; double outlier_factor = 3; + bool fix_first_map = false; sparse_mapping::AppendMapFile(submap1_file, submap2_file, num_image_overlaps_at_endpoints, outlier_factor, - !skip_bundle_adjustment); + !skip_bundle_adjustment, fix_first_map); // Read the merged map, and check if we have 3 frames as expected LOG(INFO) << "Reading: " << submap1_file << std::endl; @@ -301,3 +302,9 @@ const Parameters test_parameters[] = { INSTANTIATE_TEST_CASE_P(SparseMapTest, SparseMapTest, ::testing::ValuesIn(test_parameters)); + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/localization/sparse_mapping/test/test_sparse_map.test b/localization/sparse_mapping/test/test_sparse_map.test index dbf49539d8..c7a4757039 100644 --- a/localization/sparse_mapping/test/test_sparse_map.test +++ b/localization/sparse_mapping/test/test_sparse_map.test @@ -17,5 +17,5 @@ - + diff --git a/localization/sparse_mapping/theia_flags.txt b/localization/sparse_mapping/theia_flags.txt new file mode 100644 index 0000000000..10689077e1 --- /dev/null +++ b/localization/sparse_mapping/theia_flags.txt @@ -0,0 +1,171 @@ +############### Input/Output ############### +# Input/output files. +# Set these if a matches file is not present. Images should be a filepath with a +# wildcard e.g., /home/my_username/my_images/*.jpg +# These should be left empty if specified on the command line. +--images= +--output_matches_file= + +# If a matches file has already been created, set the filepath here. This avoids +# having to recompute all features and matches. +# This should be left empty if specified on the command line. +--matches_file= + +# The calibration file indicates possibly known calibration e.g, from EXIF or +# explicit calibration. Theia attempts to extract EXIF focal lengths if calibration +# is not supplied for a given image. +# These should be left empty if specified on the command line. +--calibration_file= +--output_reconstruction= + +############### Multithreading ############### +# Set to the number of threads you want to use. +--num_threads=16 + +############### Feature Extraction ############### +--descriptor=SIFT +--feature_density=NORMAL + +############### Matching Options ############### +# Perform matching out-of-core. If set to true, the matching_working_directory +# must be set to a valid, writable directory (the directory will be created if +# it does not exits) Set to false to perform all-in-memory matching. +--match_out_of_core=true + +# During feature matching, features are saved to disk so that out-of-core +# matching may be performed. This directory specifies which directory those +# features should be saved to. +# This should be left empty if specified on the command line. +--matching_working_directory= + +# During feature matching we utilize an LRU cache for out-of-core matching. The size +# of that cache (in terms of number of images) is controlled by this parameter. The +# higher this number the more memory is required. +--matching_max_num_images_in_cache=128 + +--matching_strategy=CASCADE_HASHING +--lowes_ratio=0.75 +--min_num_inliers_for_valid_match=30 +# NOTE: This threshold is relative to an image with a width of 1024 pixels. It +# will be scaled appropriately based on the image resolutions. This allows a +# single threshold to be used for images with different resolutions. +--max_sampson_error_for_verified_match=6.0 +--bundle_adjust_two_view_geometry=true +--keep_only_symmetric_matches=true + +# Global descriptor extractor settings. The global image descriptors are used to +# speed up matching by selected the K most similar images for each image, and +# only performing feature matching with these images. +--num_nearest_neighbors_for_global_descriptor_matching=100 +--num_gmm_clusters_for_fisher_vector=16 +--max_num_features_for_fisher_vector_training=1000000 + +############### General SfM Options ############### +--reconstruction_estimator=GLOBAL +--min_track_length=2 +--max_track_length=50 +--reconstruct_largest_connected_component=false + +# Set to true if all views were captured with the same camera. If true, then a +# single set of camera intrinsic parameters will be used for all views in the +# reconstruction. +--shared_calibration=true + +# If set to true, only views with known calibration are reconstructed. +--only_calibrated_views=true + +############### Global SfM Options ############### +--global_position_estimator=LEAST_UNSQUARED_DEVIATION +--global_rotation_estimator=ROBUST_L1L2 +# The value below should be bigger to filter less +--post_rotation_filtering_degrees=10 + +# This refinement is very unstable for rotation-only motions so +# it is advised that this is set to false for these motions. +--refine_relative_translations_after_rotation_estimation=false + +# If true, only cameras that are well-conditioned for position estimation will +# be used for global position estimation +--extract_maximal_rigid_subgraph=false + +# Filter the relative translations with the 1DSfM filter to remove potential +# outliers in the relative pose measurements. +--filter_relative_translations_with_1dsfm=true + +# Nonlinear position estimation options +--position_estimation_min_num_tracks_per_view=0 +--position_estimation_robust_loss_width=0.1 + +# If true, perform a single iteration of bundle adjustment only on the camera +# positions and 3D points (rotation and camera intrinsics are held +# constant). This helps often to constrain inaccurate intrinsics. +--refine_camera_positions_and_points_after_position_estimation=true + +# After estimating camera poses, we perform trianguation, then BA, +# then filter out bad points. This controls how many times we repeat +# this process. +--num_retriangulation_iterations=1 + +############### Incremental SfM Options ############### +# NOTE: This threshold is relative to an image with a width of 1024 pixels. It +# will be scaled appropriately based on the image resolutions. This allows a +# single threshold to be used for images with different resolutions. +--absolute_pose_reprojection_error_threshold=4 +--partial_bundle_adjustment_num_views=20 +--full_bundle_adjustment_growth_percent=5 +--min_num_absolute_pose_inliers=30 + +############### Bundle Adjustment Options ############### +# Set this parameter to a value other than NONE if you want to utilize a robust +# cost function during bundle adjustment. This can increase robustness to outliers +# during the optimization. +--bundle_adjustment_robust_loss_function=HUBER + +# Set this value to the determine the reprojection error in pixels at which +# robustness begins (if a robust cost function is being used). +--bundle_adjustment_robust_loss_width=2.0 + +# Set this parameter to change which camera intrinsics should be +# optimized. Valid options are NONE, ALL, FOCAL_LENGTH, PRINCIPAL_POINTS, +# RADIAL_DISTORTION, ASPECT_RATIO, and SKEW. This parameter can be set using a +# bitmask (with no spaces) e.g., FOCAL_LENGTH|RADIAL_DISTORTION +--intrinsics_to_optimize=NONE + +# After BA, remove any points that have a reprojection error greater +# than this. +--max_reprojection_error_pixels=10.0 + +############### Track Subsampling Options ############### + +# If true, the estimated tracks are subsampled for bundle adjustment to increase +# the efficiency of BA. Tracks are chosen in a way which attempts to constraint +# BA as best as possible. This has been shown to provide a significant speedup +# without reducing the accuracy much (in fact, it increases the accuracy in some +# cases). +--subsample_tracks_for_bundle_adjustment=false + +# Subsampled tracks are chosen with a probability related to their track +# length. We limit the effect of long tracks by capping the track length at this +# value for the purpose of selecting tracks. +--track_subset_selection_long_track_length_threshold=10 + +# Tracks are chosen in a way that ensures each view is spatially +# constrained. Tracks are first binned in an image grid and the top ranked track +# is chosen for optimization from each grid cell to ensure good spatial coverage +# of the image. The grid cells are set to be this size. +--track_selection_image_grid_cell_size_pixels=100 + +# Tracks are chosen such that each image observes at least this many tracks +# which are being optimized. This ensures that each image is well-constrained. +--min_num_optimized_tracks_per_view=100 + +############### Triangulation Options ############### +--min_triangulation_angle_degrees=1.0 +--triangulation_reprojection_error_pixels=10.0 +--bundle_adjust_tracks=true + +############### Logging Options ############### +# Logging verbosity. +--logtostderr +# Increase this number to get more verbose logging. +--v=1 diff --git a/localization/sparse_mapping/theia_map.md b/localization/sparse_mapping/theia_map.md new file mode 100644 index 0000000000..181b16f308 --- /dev/null +++ b/localization/sparse_mapping/theia_map.md @@ -0,0 +1,143 @@ +\page theia_map Building a map with Theia + +Theia (https://github.com/sweeneychris/TheiaSfM) is a package for +global structure-from-motion (SfM). It may be faster and require less +user effort than the method used in Astrobee which consists of +creating submaps using incremental SfM on sequences of images that +overlap with their immediate neighbors, followed by merging the +submaps. Theia uses "cascade matching" to handle non-sequential image +acquisitions. + +The Theia approach was tested on about 700 images acquired at +different times and it did well. It is currently studied as an +alternative to Astrobee's approach. + +# Install Theia's prerequisites + +Fetch and install ``conda`` from: + + https://docs.conda.io/projects/conda/en/latest/user-guide/install/linux.html + +Restart your shell if it is suggested to do so. + +Create and activate the environment: + + conda create -n theia + conda activate theia + +Run the following command to install some packages and GCC 11: + + conda install -c conda-forge cmake \ + gcc_linux-64==11.1.0 gxx_linux-64==11.1.0 \ + lapack blas eigen==3.3.7 suitesparse rapidjson \ + glog gflags rocksdb OpenImageIO \ + ceres-solver=1.14.0=h0948850_10 + +The Ceres package can be quite particular about the version of Eigen +it uses, and some versions of Ceres are not built with suitesparse +which is a sparse solver that is needed for best performance, so some +care is needed with choosing the versions of the packages. + +# Fetch and build Theia + + git clone git@github.com:sweeneychris/TheiaSfM.git + cd TheiaSfM + git checkout d2112f1 # this version was tested + +Edit the file: + + applications/CMakeLists.txt + +and comment out all the lines regarding OpenGL, GLEW, GLUT, and +view_reconstruction. That visualizer logic is not easy to compile +and is not needed. + +Run ``which cmake`` to ensure its version in the ``theia`` environemnt +installed earlier is used. Otherwise run again ``conda activate +theia``. Do: + + mkdir build + cd build + cmake .. + make -j 20 + +This will create the executables ``build_reconstruction`` and +``export_to_nvm_file`` in the ``bin`` subdirectory of ``build``. That +directory needs to be added to the PATH. + +# Run the Astrobee wrapper around the Theia tools + +The conda environment set up earlier will confuse the lookup the +dependencies for the Astrobee libraries. Hence the lines ``conda`` added +to one's ``.bashrc`` should be removed, the bash shell restarted, and +one should ensure that the ``env`` command has no mentions of conda. + +Set the environment. The following lines should be adjusted as needed, +especially the robot name: + + export ASTROBEE_SOURCE_PATH=$HOME/projects/astrobee/src + source $ASTROBEE_SOURCE_PATH/../devel/setup.bash + export ASTROBEE_RESOURCE_DIR=$ASTROBEE_SOURCE_PATH/astrobee/resources + export ASTROBEE_CONFIG_DIR=$ASTROBEE_SOURCE_PATH/astrobee/config + export ASTROBEE_WORLD=iss + export ASTROBEE_ROBOT=bumble + + python $ASTROBEE_SOURCE_PATH/localization/sparse_mapping/tools/build_theia_map.py \ + --output_map theia.map --work_dir theia_work --image_list image_list.txt + +This will take care of preparing everything Theia needs, will run it, +and will export the resulting map to Astrobee's expected format. This +map will need to be registered and visualized as described in other +documentation. The work directory can be deleted later. + +This tool has the following command-line options: + + --theia_flags: The flags to pass to Theia. If not specified, use + localization/sparse_mapping/theia_flags.txt in the Astrobee repo. + --image_list: The list of distorted (original) nav cam images to + use, one per line. + --output_map: The resulting output map. + --skip_rebuilding: Do not rebuild the map after importing it from + Theia. + --work_dir: A temporary work directory to be deleted by the user + later. + --keep_undistorted_images: Do not replace the undistorted images + Theia used with the original distorted ones in the sparse map + imported from Theia. This is for testing purposes. + --help: Show this help message and exit. + +# Auxiliary import_map tool + +This tool is used to import a map from the NVM format, which Theia +exports to. These operations are done automatically by the +``build_theia_map.py`` tool. This documentation is provided for +reference only. + +An NVM map exported by Theia (or some other SfM tool) can be saved as +an Astrobee sparse map with the command:: + + astrobee/devel/lib/sparse_mapping/import_map \ + -undistorted_camera_params "wid_x wid_y focal_len opt_ctr_x opt_ctr_y" \ + \ + -input_map map.nvm -output_map map.map + +This assumes that the images were acquired with the nav camera of the +robot given by $ASTROBEE_ROBOT and undistorted with the Astrobee +program ``undistort_image``. The undistorted camera parameters to use +should be as printed on the screen (and saved to disk) by +``undistort_image``. + +If desired to replace on importing the undistorted images with the +original distorted ones, as it is usually expected of a sparse map, +the above command should be called instead as:: + + astrobee/devel/lib/sparse_mapping/import_map \ + -undistorted_images_list undist_list.txt \ + -distorted_images_list dist_list.txt \ + -input_map map.nvm -output_map map.map + +Here, the files undist_list.txt and dist_list.txt must have one image +per line and be in one-to-one correspondence. It is important that +both undistorted and distorted images be specified, as the former are +needed to look up camera poses and other data in the .nvm file before +being replaced with the distorted ones. diff --git a/localization/sparse_mapping/tools/build_map.cc b/localization/sparse_mapping/tools/build_map.cc index f13f05acd1..8dadb16eac 100644 --- a/localization/sparse_mapping/tools/build_map.cc +++ b/localization/sparse_mapping/tools/build_map.cc @@ -67,7 +67,7 @@ DEFINE_bool(incremental_ba, false, DEFINE_bool(loop_closure, false, "Take a map where images start repeating, and close the loop."); DEFINE_bool(bundle_adjustment, false, - "Perform update output_nvm with bundle adjustment."); + "Perform bundle adjustment."); DEFINE_bool(rebuild, false, "Rebuild the map with BRISK features."); DEFINE_bool(rebuild_replace_camera, false, diff --git a/localization/sparse_mapping/tools/build_theia_map.py b/localization/sparse_mapping/tools/build_theia_map.py new file mode 100644 index 0000000000..d5c1c81bb4 --- /dev/null +++ b/localization/sparse_mapping/tools/build_theia_map.py @@ -0,0 +1,392 @@ +#!/usr/bin/env python +# Copyright (c) 2017, United States Government, as represented by the +# Administrator of the National Aeronautics and Space Administration. +# +# All rights reserved. +# +# The Astrobee platform is licensed under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with the +# License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Undistort the images, create a theia map, then convert it to a sparse +map as understood by astrobee software, and rebuild it with distorted +images and SURF features, while reoptimizing the cameras. The +Theia build_reconstruction tool must be in the PATH. +""" + +import argparse +import glob +import os +import re +import shutil +import subprocess +import sys + + +def which(program): + """Find if a program is in the PATH""" + + def is_exe(fpath): + return os.path.isfile(fpath) and os.access(fpath, os.X_OK) + + fpath, fname = os.path.split(program) + if fpath: + if is_exe(program): + return program + else: + for path in os.environ["PATH"].split(os.pathsep): + path = path.strip('"') + exe_file = os.path.join(path, program) + if is_exe(exe_file): + return exe_file + + return None + + +def run_cmd(cmd): + """ + Run a command. + """ + print(" ".join(cmd) + "\n") + process = subprocess.Popen(cmd, stdout=subprocess.PIPE) + process.wait() + if process.returncode != 0: + print("Failed execution of: " + " ".join(cmd)) + sys.exit(1) + + +def sanity_checks(undistort_image_path, import_map_path, build_map_path, args): + + # Check if the environment was set + for var in [ + "ASTROBEE_RESOURCE_DIR", + "ASTROBEE_CONFIG_DIR", + "ASTROBEE_WORLD", + "ASTROBEE_ROBOT", + ]: + if var not in os.environ: + raise Exception("Must set " + var) + + if not os.path.exists(undistort_image_path): + raise Exception("Executable does not exist: " + undistort_image_path) + + if not os.path.exists(import_map_path): + raise Exception("Executable does not exist: " + import_map_path) + + if not os.path.exists(build_map_path): + raise Exception("Executable does not exist: " + build_map_path) + + if args.image_list == "": + raise Exception("The path to the output map was not specified.") + + if args.output_map == "": + raise Exception("The path to the output map was not specified.") + + if args.work_dir == "": + raise Exception("The path to the work directory was not specified.") + + if not os.path.exists(args.theia_flags): + raise Exception("Cannot find the Theia flags file: " + args.theia_flags) + + if which("build_reconstruction") is None: + raise Exception("Cannot find the 'build_reconstruction' program in PATH.") + + if which("export_to_nvm_file") is None: + raise Exception("Cannot find the 'export_to_nvm_file' program in PATH.") + + if args.keep_undistorted_images and (not args.skip_rebuilding): + raise Exception("Cannot rebuild the map if it has undistorted images.") + + +def process_args(args): + """ + Set up the parser and parse the args. + """ + + # Number of arguments before starting to parse them + num_input_args = len(sys.argv) + + # Extract some paths before the args are parsed + src_path = os.path.dirname(args[0]) + exec_path = os.path.dirname( + os.path.dirname(os.path.dirname(os.path.dirname(src_path))) + ) + undistort_image_path = os.path.join(exec_path, "devel/lib/camera/undistort_image") + import_map_path = os.path.join(exec_path, "devel/lib/sparse_mapping/import_map") + build_map_path = os.path.join(exec_path, "devel/lib/sparse_mapping/build_map") + + parser = argparse.ArgumentParser(description="") + parser.add_argument( + "--theia_flags", + dest="theia_flags", + default="", + help="The flags to pass to Theia. If not specified, use " + + "localization/sparse_mapping/theia_flags.txt in the astrobee repo.", + ) + parser.add_argument( + "--image_list", + dest="image_list", + default="", + help="The list of distorted (original) nav cam images to use, " + + "one per line.", + ) + parser.add_argument( + "--output_map", dest="output_map", default="", help="The resulting output map." + ) + parser.add_argument( + "--skip_rebuilding", + dest="skip_rebuilding", + action="store_true", + help="Do not rebuild the map after importing it from Theia.", + ) + parser.add_argument( + "--work_dir", + dest="work_dir", + default="", + help="A temporary work directory to be deleted by the user later.", + ) + + parser.add_argument( + "--keep_undistorted_images", + dest="keep_undistorted_images", + action="store_true", + help="Do not replace the undistorted images Theia used with the original " + + "distorted ones in the sparse map imported from Theia. This is for testing " + + "purposes.", + ) + args = parser.parse_args() + + # Print the help message if called with no arguments + if num_input_args <= 1: + parser.print_help() + sys.exit(1) + + if args.theia_flags == "": + args.theia_flags = os.path.dirname(src_path) + "/theia_flags.txt" + + sanity_checks(undistort_image_path, import_map_path, build_map_path, args) + + return (src_path, undistort_image_path, import_map_path, build_map_path, args) + + +def mkdir_p(path): + if path == "": + return # this can happen when path is os.path.dirname("myfile.txt") + try: + os.makedirs(path) + except OSError: + if os.path.isdir(path): + pass + else: + raise Exception( + "Could not make directory " + path + " as a file with this name exists." + ) + + +# Theia likes its images undistorted and in one directory +def gen_undist_image_list(work_dir, dist_image_list): + undist_dir = work_dir + "/undist" + mkdir_p(undist_dir) + + # Wipe any preexisting images to not confuse theia later + count = 0 + for image in glob.glob(undist_dir + "/*.jpg"): + if count == 0: + print("Wiping old images in: " + undist_dir) + count += 1 + os.remove(image) + + undist_image_list = work_dir + "/undistorted_list.txt" + print("Writing: " + undist_image_list) + + count = 10000 # to have the created images show up nicely + undist_images = [] + with open(dist_image_list, "r") as dfh: + dist_image_files = dfh.readlines() + with open(undist_image_list, "w") as ufh: + for image in dist_image_files: + base_image = str(count) + ".jpg" + undist_images.append(base_image) + undist_image = undist_dir + "/" + base_image + ufh.write(undist_image + "\n") + count += 1 + + return undist_image_list, undist_dir, undist_images + + +def gen_theia_calib_file(work_dir, undist_images, undist_intrinsics_file): + + calib_file = work_dir + "/" + "theia_calibration.json" + intrinsics_str = "" + # Parse the intrinsics + with open(undist_intrinsics_file, "r") as fh: + for line in fh: + if len(line) == 0: + continue + if line[0] == "#": + continue + + intrinsics_str = line.rstrip() # will need this later + intrinsics = line.split() + if len(intrinsics) < 5: + raise Exception( + "Expecting 5 intrinsics numbers in: " + undist_intrinsics_file + ) + + width = intrinsics[0] + height = intrinsics[1] + focal_length = intrinsics[2] + opt_ctr_x = intrinsics[3] + opt_ctr_y = intrinsics[4] + + break + + print("Writing: " + calib_file) + with open(calib_file, "w") as fh: + fh.write("{\n") + fh.write('"priors" : [\n') + + num_images = len(undist_images) + for it in range(num_images): + image = undist_images[it] + fh.write('{"CameraIntrinsicsPrior" : {\n') + fh.write('"image_name" : "' + image + '",\n') + fh.write('"width" : ' + str(width) + ",\n") + fh.write('"height" : ' + str(height) + ",\n") + fh.write('"camera_intrinsics_type" : "PINHOLE",\n') + fh.write('"focal_length" : ' + str(focal_length) + ",\n") + fh.write( + '"principal_point" : [' + str(opt_ctr_x) + ", " + str(opt_ctr_y) + "]\n" + ) + + if it < num_images - 1: + fh.write("}},\n") + else: + fh.write("}}\n") + + fh.write("]\n") + fh.write("}\n") + + return calib_file, intrinsics_str + + +if __name__ == "__main__": + + ( + src_path, + undistort_image_path, + import_map_path, + build_map_path, + args, + ) = process_args(sys.argv) + + mkdir_p(args.work_dir) + undist_image_list, undist_dir, undist_images = gen_undist_image_list( + args.work_dir, args.image_list + ) + + # Undistort the images and crop to a central region + undist_intrinsics_file = args.work_dir + "/undistorted_intrinsics.txt" + cmd = [ + undistort_image_path, + "-image_list", + args.image_list, + "--undistorted_crop_win", + "1100 776", + "-output_list", + undist_image_list, + "-undistorted_intrinsics", + undist_intrinsics_file, + ] + run_cmd(cmd) + + (calib_file, intrinsics_str) = gen_theia_calib_file( + args.work_dir, undist_images, undist_intrinsics_file + ) + recon_file = args.work_dir + "/run" + matching_dir = args.work_dir + "/matches" + + # Wipe old data + for old_recon in glob.glob(recon_file + "*"): + print("Deleting old reconstruction: " + old_recon) + os.remove(old_recon) + + count = 0 + for old_matches in glob.glob(matching_dir + "/*"): + if count == 0: + print("Wiping old matches in: " + matching_dir) + count += 1 + os.remove(old_matches) + + cmd = [ + "build_reconstruction", + "--flagfile", + args.theia_flags, + "--images", + undist_dir + "/*jpg", + "--calibration_file", + calib_file, + "--output_reconstruction", + recon_file, + "--matching_working_directory", + matching_dir, + "--intrinsics_to_optimize", + "NONE", + ] + run_cmd(cmd) + + nvm_file = recon_file + ".nvm" + cmd = [ + "export_to_nvm_file", + "-input_reconstruction_file", + recon_file + "-0", + "-output_nvm_file", + nvm_file, + ] + run_cmd(cmd) + + cmd = [ + import_map_path, + "-input_map", + nvm_file, + "-output_map", + args.output_map, + "-undistorted_images_list", + undist_image_list, + ] + + if not args.keep_undistorted_images: + cmd += [ + "-distorted_images_list", + args.image_list, + ] + else: + cmd += [ + "-undistorted_camera_params", + intrinsics_str, + ] + + run_cmd(cmd) + + if not args.skip_rebuilding: + cmd = [ + build_map_path, + "-output_map", + args.output_map, + "-rebuild", + "-rebuild_refloat_cameras", + "-rebuild_detector", + "SURF", + "--min_valid_angle", + "1.0", # to avoid features only seen in close-by images + ] + run_cmd(cmd) diff --git a/localization/sparse_mapping/tools/import_map.cc b/localization/sparse_mapping/tools/import_map.cc index 865a91c775..a70b76babf 100644 --- a/localization/sparse_mapping/tools/import_map.cc +++ b/localization/sparse_mapping/tools/import_map.cc @@ -17,6 +17,7 @@ */ #include #include +#include #include #include #include @@ -31,26 +32,109 @@ #include #include -// outputs DEFINE_string(input_map, "", - "text file."); + "Input map created with undistorted images, in a text file."); DEFINE_string(output_map, "output.map", - "Output file containing the matches and control network."); + "Output sparse map as expected by Astrobee software."); DEFINE_bool(bundler_map, false, - "If true, read the bundler or theia format."); + "If true, read the Bundler format. This will be ignored for input .nvm files."); +DEFINE_string(undistorted_camera_params, "", + "Intrinsics of the undistorted camera. Not needed if --distorted_images_list " + "is specified, as then the camera is set via ASTROBEE_ROBOT. Specify as: " + "'wid_x wid_y focal_len opt_ctr_x opt_ctr_y'."); +DEFINE_string(undistorted_images_list, "", + "The full list of undistorted images used to create the sparse map. " + "If not specified, the images should be passed on the command line. " + "Note that not all of them may have been used in the map."); +DEFINE_string(distorted_images_list, "", + "Replace the undistorted images specified on input with distorted images " + "from this list (one file per line). The correct value of ASTROBEE_ROBOT " + "must be set."); + +namespace { + // Keep these utilities in a local namespace + + void readLines(std::string const& list, std::vector & lines) { + lines.clear(); + std::string line; + std::ifstream ifs(list.c_str()); + while (ifs >> line) + lines.push_back(line); + } + + // Replace the undistorted images which Theia used with distorted images. + // The interest points need not be modified as those are always undistorted + // when saved, even if the map has distorted images. + void replaceWithDistortedImages(std::vector const& undist_images, + std::string const& distorted_images_list, + sparse_mapping::SparseMap & map) { + // Must overwrite the camera params for the distorted images + std::cout << "Using distorted camera parameters for nav_cam for robot: " + << getenv("ASTROBEE_ROBOT") << ".\n"; + config_reader::ConfigReader config; + config.AddFile("cameras.config"); + if (!config.ReadFiles()) LOG(FATAL) << "Failed to read config files.\n"; + + camera::CameraParameters cam_params(&config, "nav_cam"); + map.SetCameraParameters(cam_params); + + // Replace the undistorted images with distorted ones + std::vector distorted_images; + readLines(distorted_images_list, distorted_images); + + if (undist_images.size() != distorted_images.size()) + LOG(FATAL) << "The number of distorted images in the list and undistorted ones " + << "passed on the command line must be the same.\n"; + + std::map undist_to_dist; + for (size_t it = 0; it < undist_images.size(); it++) undist_to_dist[undist_images[it]] = distorted_images[it]; + + // Replace the images in the map. Keep in mind that the map + // may have just a subset of the input images. + for (size_t it = 0; it < map.cid_to_filename_.size(); it++) { + auto map_it = undist_to_dist.find(map.cid_to_filename_[it]); + if (map_it == undist_to_dist.end()) + LOG(FATAL) << "This map image was not specified on input: " + << map.cid_to_filename_[it] << ".\n"; + map.cid_to_filename_[it] = map_it->second; + } + } +} // namespace int main(int argc, char** argv) { ff_common::InitFreeFlyerApplication(&argc, &argv); GOOGLE_PROTOBUF_VERIFY_VERSION; - std::vector files; - for (int i = 1; i < argc; i++) { - std::cout << "adding " << argv[i] << std::endl; - files.push_back(argv[i]); + // Read the images from the .list or the command line + std::vector undist_images; + if (FLAGS_undistorted_images_list != "") { + readLines(FLAGS_undistorted_images_list, undist_images); + } else { + for (int i = 1; i < argc; i++) + undist_images.push_back(argv[i]); + } + + std::cout << "Reading map: " << FLAGS_input_map << std::endl; + sparse_mapping::SparseMap map(FLAGS_bundler_map, FLAGS_input_map, undist_images); + + if (FLAGS_distorted_images_list == "") { + // Must overwrite the camera parameters with what is passed on input + std::vector vals; + ff_common::parseStr(FLAGS_undistorted_camera_params, vals); + if (vals.size() < 5) + LOG(FATAL) << "Could not parse --undistorted_camera_params.\n"; + double widx = vals[0], widy = vals[1], f = vals[2], cx = vals[3], cy = vals[4]; + LOG(INFO) << "Using undistorted camera parameters: " + << widx << ' ' << widy << ' ' << f << ' ' << cx << ' ' << cy; + camera::CameraParameters cam_params = camera::CameraParameters(Eigen::Vector2i(widx, widy), + Eigen::Vector2d::Constant(f), + Eigen::Vector2d(cx, cy)); + map.SetCameraParameters(cam_params); + } else { + replaceWithDistortedImages(undist_images, FLAGS_distorted_images_list, map); } - std::cout << "input and output " << FLAGS_input_map << ' ' << FLAGS_output_map << std::endl; - sparse_mapping::SparseMap map(FLAGS_bundler_map, FLAGS_input_map, files); + std::cout << "Writing map: " << FLAGS_output_map << std::endl; map.Save(FLAGS_output_map); google::protobuf::ShutdownProtobufLibrary(); diff --git a/localization/sparse_mapping/tools/merge_maps.cc b/localization/sparse_mapping/tools/merge_maps.cc index ac8768bc4a..2d445029b5 100644 --- a/localization/sparse_mapping/tools/merge_maps.cc +++ b/localization/sparse_mapping/tools/merge_maps.cc @@ -59,6 +59,10 @@ DEFINE_double(outlier_factor, 3.0, DEFINE_bool(skip_bundle_adjustment, false, "If true, do not bundle adjust the merged map."); +DEFINE_bool(fix_first_map, false, + "If true and bundle adjustment is not skipped, keep the first map fixed " + "when bundle adjustment takes place."); + int main(int argc, char** argv) { ff_common::InitFreeFlyerApplication(&argc, &argv); GOOGLE_PROTOBUF_VERIFY_VERSION; @@ -80,6 +84,9 @@ int main(int argc, char** argv) { if (FLAGS_num_image_overlaps_at_endpoints <= 0) LOG(FATAL) << "Must have num_image_overlaps_at_endpoints > 0."; + if (FLAGS_fix_first_map && argc != 3) + LOG(FATAL) << "Keeping the first map fixed works only when there are two input maps."; + // The merged map starts as the first map. { sparse_mapping::SparseMap A(argv[1]); @@ -92,7 +99,8 @@ int main(int argc, char** argv) { sparse_mapping::AppendMapFile(FLAGS_output_map, argv[i], FLAGS_num_image_overlaps_at_endpoints, FLAGS_outlier_factor, - !FLAGS_skip_bundle_adjustment); + !FLAGS_skip_bundle_adjustment, + FLAGS_fix_first_map); } google::protobuf::ShutdownProtobufLibrary(); diff --git a/localization/sparse_mapping/total_station.md b/localization/sparse_mapping/total_station.md index e58f737998..9360a2d617 100644 --- a/localization/sparse_mapping/total_station.md +++ b/localization/sparse_mapping/total_station.md @@ -54,29 +54,29 @@ granite lab. Once you are done you need to export your project to the GSI format so you can read it back in. * Keep hitting ESC until you are back to the home screen. - * Press "4" to convert - * Press "1" to export + * Press "4" to convert. + * Press "1" to export. * Select Job, change to what ever is your job. * Select Format File, change to "gsi16_cartesian.frt". * Select File Name, write value for what you want the output file to be. * Press "F1" to save. - * Press "F4" to close out. + * Press "F4" to exit. Turn the nob on the side of the station where it says "CF Card" and pull the card out. Hook it up to a USB reader and get the measurement file. Here is a sample data file: -*110001+0000000000ORIGIN 81...0+0000000000000000 82...0+0000000000000000 83...0+0000000000000000 -*110002+000000000000YREF 81...0+0000000000000000 82...0+0000000000006417 83...0-0000000000000022 -*110003+0000000000000003 81...0+0000000000000004 82...0+0000000000006535 83...0-0000000000000786 + *110001+0000000000ORIGIN 81...0+0000000000000000 82...0+0000000000000000 83...0+0000000000000000 + *110002+000000000000YREF 81...0+0000000000000000 82...0+0000000000006417 83...0-0000000000000022 + *110003+0000000000000003 81...0+0000000000000004 82...0+0000000000006535 83...0-0000000000000786 What this means is that the points are in Cartesian coordinates with x,y,z measured in millimeters (note that some values are negative). After removing the redundant text and converting to meters, this looks like: -Point 1 (Origin): 0.0 0.0 0.0 -Point 2 (YREF) : 0.0 6.417 -0.022 -Point 3 : 0.004 6.535 -0.786 + Point 1 (Origin): 0.0 0.0 0.0 + Point 2 (YREF) : 0.0 6.417 -0.022 + Point 3 : 0.004 6.535 -0.786 -The program +The program: - localization/sparse_mapping/tools/parse_total_station.py + localization/sparse_mapping/tools/parse_total_station.py can be used to convert from the Total Station format to an xyz format that can be used for sparse map registration. diff --git a/localization/sparse_mapping/using_faro.md b/localization/sparse_mapping/using_faro.md index eec2c98527..989933d6fd 100644 --- a/localization/sparse_mapping/using_faro.md +++ b/localization/sparse_mapping/using_faro.md @@ -1,4 +1,4 @@ -\page using_faro Using Faro +\page faro Faro IRG has an instrument, called FARO, that can create very high-resolution 3D scans with overlaid texture. Such a scan can be diff --git a/localization/vive_localization/CMakeLists.txt b/localization/vive_localization/CMakeLists.txt index b03d1cd328..eb85937dd4 100644 --- a/localization/vive_localization/CMakeLists.txt +++ b/localization/vive_localization/CMakeLists.txt @@ -15,13 +15,30 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(vive_localization) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + rosbag + nodelet + geometry_msgs + ff_hw_msgs + ff_util + msg_conversions + config_reader +) + # Find Eigen3 find_package(Eigen3 REQUIRED) # Find OpenCV3 -find_package(OpenCV 3 REQUIRED) +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../cmake") +find_package(OpenCV 3.3.1 REQUIRED) # Non-linear optimizer find_package(Ceres REQUIRED) @@ -34,25 +51,68 @@ catkin_package( roscpp rosbag nodelet - pluginlib - sensor_msgs - geometry_msgs - nav_msgs ff_hw_msgs - tf2_ros) + ff_util + msg_conversions + config_reader +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} + ${OpenCV_INCLUDE_DIRS} + ${CERES_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(vive_localization + src/vive.cc + src/vive_solver.cc + src/tool/vive_offline.cc +) +add_dependencies(vive_localization ${catkin_EXPORTED_TARGETS}) +target_link_libraries(vive_localization json-c usb-1.0 z ${catkin_LIBRARIES}) + +## Declare a C++ executable: vive_offline +add_executable(vive_offline src/tool/vive_offline.cc) +add_dependencies(vive_offline ${catkin_EXPORTED_TARGETS}) +target_link_libraries(vive_offline + vive_localization gflags ${OpenCV_LIBRARIES} ${CERES_LIBRARIES} ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) -create_library(TARGET vive_localization - DIR src - LIBS ${catkin_LIBRARIES} ${OpenCV_LIBS} ${CERES_LIBRARIES} ff_common config_reader msg_conversions ff_nodelet - INC ${catkin_INCLUDE_DIRS} ${CERES_INCLUDE_DIRS} ${OpenCV_INCLUDE_DIRS} ${EIGEN3_INCLUDE_DIRS} - DEPS ff_common geometry_msgs sensor_msgs ff_hw_msgs) +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) -create_tool_targets( - DIR src/tool - LIBS ${catkin_LIBRARIES} ${EIGEN3_LIBRARIES} vive_localization ff_common config_reader - INC ${catkin_INCLUDE_DIRS} ${EIGEN3_INCLUDE_DIRS} -DEPS ff_common geometry_msgs sensor_msgs ff_hw_msgs) +# Install C++ executables +install(TARGETS vive_offline DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/vive_offline share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) install(DIRECTORY config/ DESTINATION config) diff --git a/localization/vive_localization/package.xml b/localization/vive_localization/package.xml index da1a6553e4..76edd7d32c 100644 --- a/localization/vive_localization/package.xml +++ b/localization/vive_localization/package.xml @@ -18,21 +18,23 @@ roscpp rosbag nodelet - pluginlib sensor_msgs geometry_msgs nav_msgs ff_hw_msgs - tf2_ros + ff_util + msg_conversions + config_reader roscpp rosbag nodelet - pluginlib sensor_msgs geometry_msgs nav_msgs ff_hw_msgs - tf2_ros + ff_util + msg_conversions + config_reader diff --git a/management/CMakeLists.txt b/management/CMakeLists.txt deleted file mode 100644 index 3ee546050c..0000000000 --- a/management/CMakeLists.txt +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -add_subdirectory(access_control) -add_subdirectory(cpu_mem_monitor) -add_subdirectory(disk_monitor) -add_subdirectory(image_sampler) -add_subdirectory(sys_monitor) -add_subdirectory(executive) -add_subdirectory(data_bagger) -add_subdirectory(log_monitor) diff --git a/management/access_control/CMakeLists.txt b/management/access_control/CMakeLists.txt index f1a9214641..d33004fc50 100644 --- a/management/access_control/CMakeLists.txt +++ b/management/access_control/CMakeLists.txt @@ -15,18 +15,42 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(access_control) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + ff_msgs + nodelet + config_reader + ff_util +) + catkin_package( LIBRARIES access_control - DEPENDS roscpp ff_msgs nodelet + DEPENDS roscpp ff_msgs nodelet config_reader ff_util +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -create_library(TARGET access_control - LIBS ${catkin_LIBRARIES} config_reader ff_nodelet - INC ${catkin_INCLUDE_DIRS} - DEPS ff_msgs config_reader +# Declare C++ libraries +add_library(access_control + src/access_control.cc ) +add_dependencies(access_control ${catkin_EXPORTED_TARGETS}) +target_link_libraries(access_control ${catkin_LIBRARIES}) if(CATKIN_ENABLE_TESTING) find_package(rostest REQUIRED) @@ -36,8 +60,27 @@ if(CATKIN_ENABLE_TESTING) ) target_link_libraries(test_access_control - ${catkin_LIBRARIES} config_reader ff_nodelet + ${catkin_LIBRARIES} glog ) endif() -install_launch_files() +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/management/access_control/package.xml b/management/access_control/package.xml index 197f319223..f734b403a2 100644 --- a/management/access_control/package.xml +++ b/management/access_control/package.xml @@ -18,9 +18,13 @@ ff_msgs roscpp nodelet + config_reader + ff_util ff_msgs roscpp nodelet + config_reader + ff_util diff --git a/management/cpu_mem_monitor/CMakeLists.txt b/management/cpu_mem_monitor/CMakeLists.txt index b27893282c..3e4933c415 100644 --- a/management/cpu_mem_monitor/CMakeLists.txt +++ b/management/cpu_mem_monitor/CMakeLists.txt @@ -15,18 +15,44 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(cpu_mem_monitor) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + ff_hw_msgs + ff_msgs + config_reader + ff_util +) + catkin_package( LIBRARIES cpu_mem_monitor - CATKIN_DEPENDS roscpp roslib nodelet ff_hw_msgs ff_msgs std_msgs + CATKIN_DEPENDS roscpp nodelet ff_hw_msgs ff_msgs config_reader ff_util +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -create_library(TARGET cpu_mem_monitor - LIBS ${catkin_LIBRARIES} config_reader ff_nodelet - INC ${catkin_INCLUDES} - DEPS ff_msgs ff_hw_msgs +# Declare C++ libraries +add_library(cpu_mem_monitor + src/cpu.cc + src/cpu_mem_monitor.cc ) +add_dependencies(cpu_mem_monitor ${catkin_EXPORTED_TARGETS}) +target_link_libraries(cpu_mem_monitor ${catkin_LIBRARIES}) if(CATKIN_ENABLE_TESTING) find_package(rostest REQUIRED) @@ -37,7 +63,7 @@ if(CATKIN_ENABLE_TESTING) ) target_link_libraries(test_init_llp_cpu_mem_monitor - ${catkin_LIBRARIES} config_reader ff_nodelet + ${catkin_LIBRARIES} glog ) # mlp CPU monitor initialization fault tester add_rostest_gtest(test_init_mlp_cpu_mem_monitor @@ -46,9 +72,29 @@ if(CATKIN_ENABLE_TESTING) ) target_link_libraries(test_init_mlp_cpu_mem_monitor - ${catkin_LIBRARIES} config_reader ff_nodelet + ${catkin_LIBRARIES} glog ) endif() -install_launch_files() +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) + diff --git a/management/cpu_mem_monitor/package.xml b/management/cpu_mem_monitor/package.xml index ed677a5f8d..8549acff8c 100644 --- a/management/cpu_mem_monitor/package.xml +++ b/management/cpu_mem_monitor/package.xml @@ -16,18 +16,17 @@ catkin roscpp - roslib nodelet ff_msgs ff_hw_msgs - std_msgs + config_reader + ff_util roscpp - roslib nodelet ff_msgs ff_hw_msgs - std_msgs - message_runtime + config_reader + ff_util diff --git a/management/cpu_mem_monitor/test/test_init_cpu_monitor.test b/management/cpu_mem_monitor/test/test_init_cpu_monitor.test deleted file mode 100644 index 2dc120ff89..0000000000 --- a/management/cpu_mem_monitor/test/test_init_cpu_monitor.test +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/management/data_bagger/CMakeLists.txt b/management/data_bagger/CMakeLists.txt index 1aa20100ca..c51d9bf161 100644 --- a/management/data_bagger/CMakeLists.txt +++ b/management/data_bagger/CMakeLists.txt @@ -15,17 +15,80 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(data_bagger) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + ff_msgs + nodelet + ff_util + config_reader + rosbag +) + catkin_package( - LIBRARIES data_bagger astrobee_recorder - DEPENDS roscpp ff_msgs nodelet + LIBRARIES data_bagger astrobee_recorder + CATKIN_DEPENDS + roscpp + ff_msgs + nodelet + ff_util + config_reader + rosbag +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(astrobee_recorder + src/astrobee_recorder.cc ) +add_dependencies(astrobee_recorder ${catkin_EXPORTED_TARGETS}) +target_link_libraries(astrobee_recorder ${catkin_LIBRARIES}) -create_library(TARGET data_bagger - LIBS ${catkin_LIBRARIES} ${GLOG_LIBRARIES} config_reader ff_nodelet - INC ${catkin_INCLUDES} ${GLOG_INCLUDE_DIRS} - DEPS ff_msgs config_reader +# Declare C++ libraries +add_library(data_bagger + src/data_bagger.cc ) +add_dependencies(data_bagger ${catkin_EXPORTED_TARGETS}) +target_link_libraries(data_bagger astrobee_recorder ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS astrobee_recorder + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) -install_launch_files() diff --git a/management/data_bagger/package.xml b/management/data_bagger/package.xml index 027837d05d..aa10969571 100644 --- a/management/data_bagger/package.xml +++ b/management/data_bagger/package.xml @@ -19,9 +19,15 @@ roscpp ff_msgs nodelet + ff_util + config_reader + rosbag roscpp ff_msgs nodelet + ff_util + config_reader + rosbag diff --git a/management/disk_monitor/CMakeLists.txt b/management/disk_monitor/CMakeLists.txt index 077795d647..9f82463ff1 100644 --- a/management/disk_monitor/CMakeLists.txt +++ b/management/disk_monitor/CMakeLists.txt @@ -15,18 +15,47 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(disk_monitor) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + ff_msgs + nodelet + config_reader + ff_util +) + catkin_package( LIBRARIES disk_monitor - DEPENDS roscpp ff_msgs nodelet + DEPENDS roscpp ff_msgs nodelet config_reader ff_util +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -create_library(TARGET disk_monitor - LIBS ${catkin_LIBRARIES} ${GLOG_LIBRARIES} config_reader ff_nodelet - INC ${catkin_INCLUDES} ${GLOG_INCLUDE_DIRS} - DEPS ff_msgs config_reader +# Declare C++ libraries +add_library(disk_monitor + src/disk_monitor_node.cc ) +add_dependencies(disk_monitor ${catkin_EXPORTED_TARGETS}) +target_link_libraries(disk_monitor ${catkin_LIBRARIES}) +# create_library(TARGET disk_monitor +# LIBS ${catkin_LIBRARIES} ${GLOG_LIBRARIES} config_reader ff_nodelet +# INC ${catkin_INCLUDES} ${GLOG_INCLUDE_DIRS} +# DEPS ff_msgs config_reader +# ) if(CATKIN_ENABLE_TESTING) find_package(rostest REQUIRED) @@ -37,7 +66,7 @@ if(CATKIN_ENABLE_TESTING) ) target_link_libraries(test_init_llp_disk_monitor - ${catkin_LIBRARIES} config_reader ff_nodelet + ${catkin_LIBRARIES} glog ) # MLP disk monitor initialization fault tester add_rostest_gtest(test_init_mlp_disk_monitor @@ -46,12 +75,28 @@ if(CATKIN_ENABLE_TESTING) ) target_link_libraries(test_init_mlp_disk_monitor - ${catkin_LIBRARIES} config_reader ff_nodelet + ${catkin_LIBRARIES} glog ) endif() -create_test_targets(DIR test - LIBS disk_monitor) +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/management/disk_monitor/package.xml b/management/disk_monitor/package.xml index bf3b29c0cb..b067e4689d 100644 --- a/management/disk_monitor/package.xml +++ b/management/disk_monitor/package.xml @@ -18,9 +18,13 @@ roscpp ff_msgs nodelet + config_reader + ff_util ff_msgs roscpp nodelet + config_reader + ff_util diff --git a/management/executive/CMakeLists.txt b/management/executive/CMakeLists.txt index 4bf49ccd1e..0901a5d81b 100644 --- a/management/executive/CMakeLists.txt +++ b/management/executive/CMakeLists.txt @@ -15,32 +15,114 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(executive) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + ff_msgs + ff_hw_msgs + nodelet + config_reader + ff_util + jsonloader +) + +# System dependencies are found with CMake's conventions +find_package(cmake_modules REQUIRED) +find_package(Boost REQUIRED COMPONENTS system iostreams) + +# Find jsoncpp +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../cmake") +find_package(JsonCpp REQUIRED) + catkin_package( LIBRARIES executive sequencer CATKIN_DEPENDS roscpp - ff_flight ff_msgs - ff_util + ff_hw_msgs nodelet + ff_util + jsonloader ) -create_library(TARGET sequencer - DIR src/utils/sequencer/ - LIBS ${catkin_LIBRARIES} ${Boost_IOSTREAMS_LIBRARY} jsonloader msg_conversions - INC ${catkin_INCLUDE_DIRS} msg_conversions - DEPS ff_msgs jsonloader msg_conversions +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${JSONCPP_INCLUDE_DIR} ) -create_library(TARGET executive - LIBS ${catkin_LIBRARIES} ff_flight ff_nodelet config_reader sequencer config_client - INC ${catkin_INCLUDE_DIRS} - DEPS ff_flight ff_msgs ff_util config_reader sequencer +# Declare C++ libraries +add_library(sequencer + src/utils/sequencer/command_conversion.cc + src/utils/sequencer/plan_io.cc + src/utils/sequencer/sequencer.cc ) +add_dependencies(sequencer ${catkin_EXPORTED_TARGETS}) +target_link_libraries(sequencer ${Boost_IOSTREAMS_LIBRARY} ${catkin_LIBRARIES}) + +# Declare C++ libraries +add_library(executive + src/executive.cc + src/op_state.cc + src/op_state_auto_return.cc + src/op_state_fault.cc + src/op_state_plan_exec.cc + src/op_state_ready.cc + src/op_state_repo.cc + src/op_state_teleop.cc +) +add_dependencies(executive ${catkin_EXPORTED_TARGETS}) +target_link_libraries(executive sequencer ${Boost_SYSTEM_LIBRARY} ${catkin_LIBRARIES}) + + +## Declare a C++ executable: data_to_disk_pub +add_executable(data_to_disk_pub tools/data_to_disk_pub.cc) +add_dependencies(data_to_disk_pub ${catkin_EXPORTED_TARGETS}) +target_link_libraries(data_to_disk_pub + executive gflags ${catkin_LIBRARIES}) + +## Declare a C++ executable: ekf_switch_mux +add_executable(ekf_switch_mux tools/ekf_switch_mux.cc) +add_dependencies(ekf_switch_mux ${catkin_EXPORTED_TARGETS}) +target_link_libraries(ekf_switch_mux + executive gflags ${catkin_LIBRARIES}) + +## Declare a C++ executable: plan_pub +add_executable(plan_pub tools/plan_pub.cc) +add_dependencies(plan_pub ${catkin_EXPORTED_TARGETS}) +target_link_libraries(plan_pub + executive gflags ${catkin_LIBRARIES} ${Boost_LIBRARIES}) + +## Declare a C++ executable: simple_move +add_executable(simple_move tools/simple_move.cc) +add_dependencies(simple_move ${catkin_EXPORTED_TARGETS}) +target_link_libraries(simple_move + executive gflags ${catkin_LIBRARIES}) + +## Declare a C++ executable: teleop_tool +add_executable(teleop_tool tools/teleop_tool.cc) +add_dependencies(teleop_tool ${catkin_EXPORTED_TARGETS}) +target_link_libraries(teleop_tool + executive gflags ${catkin_LIBRARIES}) + +## Declare a C++ executable: zones_pub +add_executable(zones_pub tools/zones_pub.cc) +add_dependencies(zones_pub ${catkin_EXPORTED_TARGETS}) +target_link_libraries(zones_pub + executive gflags ${catkin_LIBRARIES} ${Boost_LIBRARIES}) if(CATKIN_ENABLE_TESTING) find_package(rostest REQUIRED) @@ -51,15 +133,51 @@ if(CATKIN_ENABLE_TESTING) ) target_link_libraries(test_init_executive - ${catkin_LIBRARIES} config_reader ff_nodelet + ${catkin_LIBRARIES} glog ) endif() +############# +## Install ## +############# -create_tool_targets(DIR tools - LIBS executive ${catkin_LIBRARIES} ${Boost_IOSTREAMS_LIBRARY} - INC ${catkin_INCLUDE_DIRS} - DEPS executive +# Mark libraries for installation +install(TARGETS sequencer + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) -install_launch_files() +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Install C++ executables +install(TARGETS data_to_disk_pub DESTINATION bin) +install(TARGETS ekf_switch_mux DESTINATION bin) +install(TARGETS plan_pub DESTINATION bin) +install(TARGETS simple_move DESTINATION bin) +install(TARGETS teleop_tool DESTINATION bin) +install(TARGETS zones_pub DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/data_to_disk_pub share/${PROJECT_NAME} + COMMAND ln -s ../../bin/ekf_switch_mux share/${PROJECT_NAME} + COMMAND ln -s ../../bin/plan_pub share/${PROJECT_NAME} + COMMAND ln -s ../../bin/simple_move share/${PROJECT_NAME} + COMMAND ln -s ../../bin/teleop_tool share/${PROJECT_NAME} + COMMAND ln -s ../../bin/zones_pub share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/management/executive/package.xml b/management/executive/package.xml index d31fc96960..4428a2275c 100644 --- a/management/executive/package.xml +++ b/management/executive/package.xml @@ -15,16 +15,20 @@ Astrobee Flight Software catkin - ff_flight - ff_msgs - ff_util roscpp + ff_msgs + ff_hw_msgs nodelet - ff_flight - ff_msgs - ff_util + config_reader + ff_util + jsonloader roscpp + ff_msgs + ff_hw_msgs nodelet + config_reader + ff_util + jsonloader diff --git a/management/executive/tools/data_to_disk_pub.cc b/management/executive/tools/data_to_disk_pub.cc index 74dbfaecde..d1c61e1a3f 100644 --- a/management/executive/tools/data_to_disk_pub.cc +++ b/management/executive/tools/data_to_disk_pub.cc @@ -44,8 +44,6 @@ namespace fs = boost::filesystem; namespace io = boost::iostreams; -namespace flags = FREEFLYER_GFLAGS_NAMESPACE; - DEFINE_string(compression, "none", "Type of compression [none, deflate, gzip]"); @@ -86,11 +84,11 @@ int main(int argc, char** argv) { ff_common::InitFreeFlyerApplication(&argc, &argv); ros::init(argc, argv, "data_to_disk_publisher"); - if (!flags::RegisterFlagValidator(&FLAGS_compression, &ValidateCompression)) { + if (!google::RegisterFlagValidator(&FLAGS_compression, &ValidateCompression)) { std::cerr << "Failed to register compression flag validator." << std::endl; return -1; } - flags::ParseCommandLineFlags(&argc, &argv, true); + google::ParseCommandLineFlags(&argc, &argv, true); if (argc <= 1) { std::cerr << "error: must provide at least one file to send as zones" diff --git a/management/executive/tools/plan_pub.cc b/management/executive/tools/plan_pub.cc index 3836a223c8..23acd5c236 100644 --- a/management/executive/tools/plan_pub.cc +++ b/management/executive/tools/plan_pub.cc @@ -45,7 +45,6 @@ namespace fs = boost::filesystem; namespace io = boost::iostreams; -namespace flags = FREEFLYER_GFLAGS_NAMESPACE; bool ValidateCompression(const char* name, std::string const &value) { if (value == "none" || value == "gzip" || value == "deflate") @@ -109,11 +108,11 @@ int main(int argc, char** argv) { ros::Time::waitForValid(); - if (!flags::RegisterFlagValidator(&FLAGS_compression, &ValidateCompression)) { + if (!google::RegisterFlagValidator(&FLAGS_compression, &ValidateCompression)) { std::cerr << "Failed to register compression flag validator." << std::endl; return -1; } - flags::ParseCommandLineFlags(&argc, &argv, true); + google::ParseCommandLineFlags(&argc, &argv, true); if (argc <= 1) { std::cerr << "error: must provide at least one file to send as a plan" diff --git a/management/executive/tools/zones_pub.cc b/management/executive/tools/zones_pub.cc index 3c72cddc99..3c990e1348 100644 --- a/management/executive/tools/zones_pub.cc +++ b/management/executive/tools/zones_pub.cc @@ -44,8 +44,6 @@ namespace fs = boost::filesystem; namespace io = boost::iostreams; -namespace flags = FREEFLYER_GFLAGS_NAMESPACE; - DEFINE_string(compression, "none", "Type of compression [none, deflate, gzip]"); @@ -86,11 +84,11 @@ int main(int argc, char** argv) { ff_common::InitFreeFlyerApplication(&argc, &argv); ros::init(argc, argv, "zone_publisher"); - if (!flags::RegisterFlagValidator(&FLAGS_compression, &ValidateCompression)) { + if (!google::RegisterFlagValidator(&FLAGS_compression, &ValidateCompression)) { std::cerr << "Failed to register compression flag validator." << std::endl; return -1; } - flags::ParseCommandLineFlags(&argc, &argv, true); + google::ParseCommandLineFlags(&argc, &argv, true); if (argc <= 1) { std::cerr << "error: must provide at least one file to send as zones" diff --git a/management/image_sampler/CMakeLists.txt b/management/image_sampler/CMakeLists.txt index 347a329123..a2088b55fd 100644 --- a/management/image_sampler/CMakeLists.txt +++ b/management/image_sampler/CMakeLists.txt @@ -15,17 +15,72 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(image_sampler) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + ff_msgs + nodelet + config_reader + ff_util + image_transport +) + +# Find OpenCV3 +find_package(OpenCV 3.3.1 REQUIRED) + catkin_package( LIBRARIES image_sampler - DEPENDS roscpp ff_msgs nodelet + DEPENDS roscpp ff_msgs nodelet config_reader ff_util image_transport +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${OpenCV_INCLUDE_DIRS} ) -create_library(TARGET image_sampler - LIBS ${catkin_LIBRARIES} config_reader ff_nodelet ${OpenCV_LIBRARIES} - INC ${catkin_INCLUDE_DIRS} ${OpenCV_INCLUDE_DIRS} - DEPS ff_msgs config_reader opencv +# create_library(TARGET image_sampler +# LIBS ${catkin_LIBRARIES} config_reader ff_nodelet ${OpenCV_LIBRARIES} +# INC ${catkin_INCLUDE_DIRS} ${OpenCV_INCLUDE_DIRS} +# DEPS ff_msgs config_reader opencv +# ) + +# Declare C++ libraries +add_library(image_sampler + src/image_sampler.cc +) +add_dependencies(image_sampler ${catkin_EXPORTED_TARGETS}) +target_link_libraries(image_sampler ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) -install_launch_files() +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) + diff --git a/management/image_sampler/package.xml b/management/image_sampler/package.xml index f9f3ba5536..a13b338397 100644 --- a/management/image_sampler/package.xml +++ b/management/image_sampler/package.xml @@ -18,9 +18,15 @@ ff_msgs roscpp nodelet + config_reader + ff_util + image_transport ff_msgs roscpp nodelet + config_reader + ff_util + image_transport diff --git a/management/log_monitor/CMakeLists.txt b/management/log_monitor/CMakeLists.txt index bccaa47114..fdfd2ed393 100644 --- a/management/log_monitor/CMakeLists.txt +++ b/management/log_monitor/CMakeLists.txt @@ -15,29 +15,48 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(log_monitor) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp +) + catkin_package( LIBRARIES log_monitor CATKIN_DEPENDS roscpp - roslib - nodelet - ff_hw_msgs - ff_msgs - std_msgs ) -create_tool_targets( - DIR - tools - LIBS - ${catkin_LIBRARIES} - ff_common - ff_nodelet - INC - ${catkin_INCLUDE_DIRS} - DEPS - ff_msgs -) \ No newline at end of file +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + ${catkin_INCLUDE_DIRS} +) + +## Declare a C++ executable: undistort_image +add_executable(log_tool tools/log_tool.cc) +add_dependencies(log_tool ${catkin_EXPORTED_TARGETS}) +target_link_libraries(log_tool + gflags glog ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Install C++ executables +install(TARGETS log_tool DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/log_tool share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") \ No newline at end of file diff --git a/management/log_monitor/package.xml b/management/log_monitor/package.xml index be1c8747af..9cce74e472 100644 --- a/management/log_monitor/package.xml +++ b/management/log_monitor/package.xml @@ -15,17 +15,6 @@ catkin roscpp - roslib - nodelet - ff_msgs - ff_hw_msgs - std_msgs roscpp - roslib - nodelet - ff_msgs - ff_hw_msgs - std_msgs - message_runtime diff --git a/management/sys_monitor/CMakeLists.txt b/management/sys_monitor/CMakeLists.txt index 92183c49d8..bc7ec205ef 100644 --- a/management/sys_monitor/CMakeLists.txt +++ b/management/sys_monitor/CMakeLists.txt @@ -15,18 +15,48 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(sys_monitor) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + ff_util + ff_msgs + nodelet + config_reader +) + catkin_package( LIBRARIES sys_monitor - DEPENDS roscpp ff_msgs nodelet + DEPENDS roscpp ff_util ff_msgs nodelet config_reader +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -create_library(TARGET sys_monitor - LIBS ${catkin_LIBRARIES} config_reader ff_nodelet - INC ${catkin_INCLUDE_DIRS} - DEPS ff_msgs config_reader +# Declare C++ libraries +add_library(sys_monitor + src/sys_monitor.cc ) +add_dependencies(sys_monitor ${catkin_EXPORTED_TARGETS}) +target_link_libraries(sys_monitor ${catkin_LIBRARIES}) + +## Declare a C++ executable: fault_tester +add_executable(fault_tester tools/fault_tester.cc) +add_dependencies(fault_tester ${catkin_EXPORTED_TARGETS}) +target_link_libraries(fault_tester + sys_monitor gflags glog ${catkin_LIBRARIES}) if(CATKIN_ENABLE_TESTING) find_package(rostest REQUIRED) @@ -37,7 +67,7 @@ if(CATKIN_ENABLE_TESTING) ) target_link_libraries(test_init_sys_monitor - ${catkin_LIBRARIES} config_reader ff_nodelet + ${catkin_LIBRARIES} glog ) # System monitor tester add_rostest_gtest(test_sys_monitor @@ -46,13 +76,36 @@ if(CATKIN_ENABLE_TESTING) ) target_link_libraries(test_sys_monitor - ${catkin_LIBRARIES} config_reader ff_nodelet + ${catkin_LIBRARIES} glog ) endif() -create_tool_targets(DIR tools - LIBS sys_monitor - DEPS sys_monitor +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) -install_launch_files() +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Install C++ executables +install(TARGETS fault_tester DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/fault_tester share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/management/sys_monitor/package.xml b/management/sys_monitor/package.xml index 32d48e5013..0a98ba1a9c 100644 --- a/management/sys_monitor/package.xml +++ b/management/sys_monitor/package.xml @@ -15,12 +15,16 @@ Astrobee Flight Software catkin + ff_util ff_msgs roscpp nodelet + config_reader + ff_util ff_msgs roscpp nodelet + config_reader diff --git a/mobility/CMakeLists.txt b/mobility/CMakeLists.txt deleted file mode 100644 index c237dfe2aa..0000000000 --- a/mobility/CMakeLists.txt +++ /dev/null @@ -1,13 +0,0 @@ -# Basic system -add_subdirectory(framestore) -add_subdirectory(choreographer) -add_subdirectory(mapper) - -# Planners -add_subdirectory(planner_trapezoidal) -if (ENABLE_QP) - add_subdirectory(planner_qp) -endif() - -# High-level tools -add_subdirectory(mobility) diff --git a/mobility/choreographer/CMakeLists.txt b/mobility/choreographer/CMakeLists.txt index 412ddbcab2..1d3229056e 100644 --- a/mobility/choreographer/CMakeLists.txt +++ b/mobility/choreographer/CMakeLists.txt @@ -15,8 +15,24 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(choreographer) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + ff_util + mapper + ff_msgs + ff_hw_msgs + tf2_ros + jsonloader +) + catkin_package( INCLUDE_DIRS include @@ -30,17 +46,27 @@ catkin_package( ff_util tf2 tf2_ros - nav_msgs - geometry_msgs - visualization_msgs + jsonloader ) -create_library(TARGET choreographer - LIBS ${catkin_LIBRARIES} ff_common ff_nodelet ff_flight config_server mapper - INC ${catkin_INCLUDE_DIRS} - DEPS ff_common nav_msgs geometry_msgs ff_msgs ff_util mapper +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) +# Declare C++ libraries +add_library(choreographer + src/choreographer_nodelet.cc + src/validator.cc +) +add_dependencies(choreographer ${catkin_EXPORTED_TARGETS}) +target_link_libraries(choreographer ${catkin_LIBRARIES}) + if(CATKIN_ENABLE_TESTING) find_package(rostest REQUIRED) # Choreographer initialization fault tester @@ -49,7 +75,7 @@ if(CATKIN_ENABLE_TESTING) test/test_init_choreographer.cc ) target_link_libraries(test_init_choreographer - ${catkin_LIBRARIES} config_reader ff_nodelet + ${catkin_LIBRARIES} glog ) if(ENABLE_INTEGRATION_TESTING) # Choreographer test obstacles @@ -59,7 +85,7 @@ if(CATKIN_ENABLE_TESTING) ) target_link_libraries(test_obstacle - ${catkin_LIBRARIES} config_client ff_nodelet + ${catkin_LIBRARIES} ) # Choreographer test zones keepin @@ -69,7 +95,7 @@ if(CATKIN_ENABLE_TESTING) ) target_link_libraries(test_zones_keepin - ${catkin_LIBRARIES} config_client ff_nodelet + ${catkin_LIBRARIES} ) # Choreographer test zones keepout @@ -79,7 +105,7 @@ if(CATKIN_ENABLE_TESTING) ) target_link_libraries(test_zones_keepout - ${catkin_LIBRARIES} config_client ff_nodelet + ${catkin_LIBRARIES} ) # Choreographer test zones nominal @@ -89,9 +115,35 @@ if(CATKIN_ENABLE_TESTING) ) target_link_libraries(test_zones_nominal - ${catkin_LIBRARIES} config_client ff_nodelet + ${catkin_LIBRARIES} ) endif() endif() -install_launch_files() +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/mobility/choreographer/package.xml b/mobility/choreographer/package.xml index 9710a2f49a..fca0042978 100644 --- a/mobility/choreographer/package.xml +++ b/mobility/choreographer/package.xml @@ -17,24 +17,28 @@ catkin roscpp nodelet + mapper tf2 tf2_ros ff_msgs ff_util - geometry_msgs - nav_msgs - visualization_msgs + ff_msgs + ff_hw_msgs + tf2_ros + jsonloader roscpp nodelet mapper tf2 tf2_ros ff_msgs + jsonloader + ff_util - geometry_msgs - nav_msgs - visualization_msgs + ff_msgs + ff_hw_msgs + tf2_ros diff --git a/mobility/framestore/CMakeLists.txt b/mobility/framestore/CMakeLists.txt index 7795c721ce..a4488d367f 100644 --- a/mobility/framestore/CMakeLists.txt +++ b/mobility/framestore/CMakeLists.txt @@ -15,27 +15,79 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(framestore) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + pluginlib + ff_util + tf2_ros + geometry_msgs + config_reader +) + # Call catkin catkin_package( CATKIN_DEPENDS - roscpp roscpp tf2 tf2_ros geometry_msgs + roscpp nodelet pluginlib ff_util tf2_ros geometry_msgs config_reader +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -# Create global_transforms -create_tool_targets(DIR tools - LIBS ${catkin_LIBRARIES} ff_common msg_conversions config_reader - INC ${catkin_INCLUDE_DIRS} - DEPS ff_common msg_conversions config_reader tf2 tf2_ros geometry_msgs +# Declare C++ libraries +add_library(framestore + src/framestore.cc ) +add_dependencies(framestore ${catkin_EXPORTED_TARGETS}) +target_link_libraries(framestore ${catkin_LIBRARIES}) -# Create framestore -create_library(TARGET framestore - LIBS ${catkin_LIBRARIES} ff_common ff_nodelet msg_conversions - INC ${catkin_INCLUDE_DIRS} - DEPS ff_common msg_conversions roscpp nodelet pluginlib tf2 tf2_ros geometry_msgs + +## Declare a C++ executable: global_transforms +add_executable(global_transforms tools/global_transforms.cc) +add_dependencies(global_transforms ${catkin_EXPORTED_TARGETS}) +target_link_libraries(global_transforms + framestore gflags glog ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) -# Install our launch file -install_launch_files() +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Install C++ executables +install(TARGETS global_transforms DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/global_transforms share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/mobility/framestore/package.xml b/mobility/framestore/package.xml index 8d2951a34e..fecc5f5f12 100644 --- a/mobility/framestore/package.xml +++ b/mobility/framestore/package.xml @@ -18,15 +18,17 @@ roscpp nodelet pluginlib - tf2 + ff_util tf2_ros geometry_msgs + config_reader roscpp nodelet pluginlib - tf2 + ff_util tf2_ros geometry_msgs + config_reader diff --git a/mobility/mapper/CMakeLists.txt b/mobility/mapper/CMakeLists.txt index 4b48ef3fa8..5db03a90e8 100644 --- a/mobility/mapper/CMakeLists.txt +++ b/mobility/mapper/CMakeLists.txt @@ -15,36 +15,75 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(mapper) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## This fixes the Eigen aligment issue +## http://eigen.tuxfamily.org/dox-devel/group__TopicUnalignedArrayAssert.html +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D EIGEN_MAX_STATIC_ALIGN_BYTES=0") + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + pluginlib + ff_util + ff_msgs + message_runtime + octomap +) + +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../cmake") +find_package(Octomap REQUIRED) + +# Find Eigen3 +find_package(Eigen3 REQUIRED) +# Find PCL +find_package(PCL REQUIRED COMPONENTS common) + catkin_package( - DEPENDS - Eigen - INCLUDE_DIRS - include - ${EIGEN3_INCLUDE_DIRS} - ${PCL_INCLUDE_DIRS} + INCLUDE_DIRS include LIBRARIES mapper CATKIN_DEPENDS roscpp nodelet pluginlib + ff_util ff_msgs message_runtime - cmake_modules ) -set(INCLUDES - ${EIGEN3_INCLUDE_DIRS} +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} ${PCL_INCLUDE_DIRS} - src + ${OCTOMAP_INCLUDE_DIRS} ) -create_library(TARGET mapper - LIBS ${catkin_LIBRARIES} ff_nodelet ff_flight config_server ${PCL_LIBRARIES} ${OCTOMAP_LIBRARIES} - INC ${catkin_INCLUDE_DIRS} ${INCLUDES} ${OCTOMAP_INCLUDE_DIRS} +# Declare C++ libraries +add_library(mapper + src/callbacks.cc + src/mapper_nodelet.cc + src/octoclass.cc + src/polynomials.cc + src/sampled_trajectory.cc + src/services.cc + src/threads.cc + src/visualization_functions.cc ) +add_dependencies(mapper ${catkin_EXPORTED_TARGETS}) +target_link_libraries(mapper ${OCTOMAP_LIBRARIES} ${catkin_LIBRARIES}) + if(CATKIN_ENABLE_TESTING) find_package(rostest REQUIRED) @@ -55,8 +94,35 @@ if(CATKIN_ENABLE_TESTING) ) target_link_libraries(test_init_mapper - ${catkin_LIBRARIES} config_reader ff_nodelet + ${catkin_LIBRARIES} glog ) endif() -install_launch_files() +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) + diff --git a/mobility/mapper/include/mapper/mapper_nodelet.h b/mobility/mapper/include/mapper/mapper_nodelet.h index ca567b9efb..8b2265b5ad 100644 --- a/mobility/mapper/include/mapper/mapper_nodelet.h +++ b/mobility/mapper/include/mapper/mapper_nodelet.h @@ -157,16 +157,11 @@ class MapperNodelet : public ff_util::FreeFlyerNodelet { // Thread for fading memory of the octomap void FadeTask(ros::TimerEvent const& event); - // Threads for constantly updating the tfTree values - void HazTfTask(ros::TimerEvent const& event); - void PerchTfTask(ros::TimerEvent const& event); - void BodyTfTask(ros::TimerEvent const& event); - - // Thread for collision checking + // Collision checking void CollisionCheckTask(); - // Thread for getting pcl data and populating the octomap - void OctomappingTask(); + // Timer for getting pcl data and populating the octomap + void OctomappingTask(ros::TimerEvent const& event); // Initialize fault management void InitFault(std::string const& msg); @@ -174,11 +169,11 @@ class MapperNodelet : public ff_util::FreeFlyerNodelet { private: // Declare global variables (structures defined in structs.h) GlobalVariables globals_; - MutexStruct mutexes_; - SemaphoreStruct semaphores_; - // Thread variables - std::thread h_octo_thread_, h_fade_thread_, h_collision_check_thread_; + // Timer variables + ros::Timer timer_o_; // Octomapping Task + ros::Timer timer_d_; // Diagnostics + ros::Timer timer_f_; // Fade Task // Subscriber variables ros::Subscriber haz_sub_, perch_sub_, segment_sub_, reset_sub_; @@ -188,16 +183,17 @@ class MapperNodelet : public ff_util::FreeFlyerNodelet { ros::ServiceServer get_resolution_srv_, get_memory_time_srv_, get_collision_distance_srv_; ros::ServiceServer reset_map_srv_; - // Thread rates (hz) - double tf_update_rate_, fading_memory_update_rate_; + // Timer rates (hz) + double octomap_update_rate_, tf_update_rate_, fading_memory_update_rate_; // Trajectory validation variables ----------------------------- State state_; // State of the mapper (structure defined in struct.h) ff_util::Segment segment_; // Segment + + // Services ros::ServiceServer get_free_map_srv_; // Get free map service ros::ServiceServer get_obstacle_map_srv_; // Set obstacle map service - ros::Timer timer_d_, timer_f_, timer_h_, timer_p_, timer_b_; // Diagnostics - std::atomic killed_; + ff_util::ConfigServer cfg_; // Config server // TF2 diff --git a/mobility/mapper/include/mapper/structs.h b/mobility/mapper/include/mapper/structs.h index b88978f45a..09d65b7d67 100644 --- a/mobility/mapper/include/mapper/structs.h +++ b/mobility/mapper/include/mapper/structs.h @@ -59,18 +59,6 @@ struct GlobalVariables { std::queue pcl_queue; }; -struct MutexStruct { - std::mutex sampled_traj; - std::mutex tf; - std::mutex octomap; - std::mutex point_cloud; -}; - -struct SemaphoreStruct { - std::condition_variable pcl; - std::condition_variable collision_check; -}; - } // namespace mapper #endif // MAPPER_STRUCTS_H_ diff --git a/mobility/mapper/package.xml b/mobility/mapper/package.xml index 63e22acfff..550fa3b848 100644 --- a/mobility/mapper/package.xml +++ b/mobility/mapper/package.xml @@ -17,20 +17,20 @@ catkin roscpp nodelet + ff_util pluginlib - actionlib ff_msgs + message_runtime cmake_modules - visualization_msgs - + octomap roscpp nodelet pluginlib - actionlib + ff_util ff_msgs - visualization_msgs - cmake_modules message_runtime + cmake_modules + octomap diff --git a/mobility/mapper/src/callbacks.cc b/mobility/mapper/src/callbacks.cc index 55cc2fdca6..c3c198528e 100644 --- a/mobility/mapper/src/callbacks.cc +++ b/mobility/mapper/src/callbacks.cc @@ -35,22 +35,15 @@ void MapperNodelet::PclCallback(const sensor_msgs::PointCloud2::ConstPtr &msg) { // Get transform from camera to world const std::string pcl_frame = cloud.header.frame_id; - mutexes_.tf.lock(); if (!pcl_frame.compare("haz_cam")) new_pcl.tf_cam2world = globals_.tf_cam2world; else if (!pcl_frame.compare("perch_cam")) new_pcl.tf_cam2world = globals_.tf_perch2world; - mutexes_.tf.unlock(); // save into global variables - mutexes_.point_cloud.lock(); globals_.pcl_queue.push(new_pcl); if (globals_.pcl_queue.size() > max_queue_size) globals_.pcl_queue.pop(); - mutexes_.point_cloud.unlock(); - - // signal octomap thread to process new pcl data - semaphores_.pcl.notify_one(); } @@ -73,7 +66,6 @@ void MapperNodelet::SegmentCallback(const ff_msgs::Segment::ConstPtr &msg) { double ts = 0.1; sampled_traj::SampledTrajectory3D sampled_traj(ts, poly_trajectories); - mutexes_.sampled_traj.lock(); globals_.sampled_traj.pos_ = sampled_traj.pos_; globals_.sampled_traj.time_ = sampled_traj.time_; globals_.sampled_traj.n_points_ = sampled_traj.n_points_; @@ -94,10 +86,9 @@ void MapperNodelet::SegmentCallback(const ff_msgs::Segment::ConstPtr &msg) { // populate kdtree for finding nearest neighbor w.r.t. collisions globals_.sampled_traj.CreateKdTree(); - mutexes_.sampled_traj.unlock(); // Notify the collision checker to check for collision - semaphores_.collision_check.notify_one(); + CollisionCheckTask(); ros::Duration solver_time = ros::Time::now() - t0; ROS_DEBUG("Time to compute octotraj: %f", solver_time.toSec()); @@ -117,9 +108,7 @@ bool MapperNodelet::ReconfigureCallback(dynamic_reconfigure::Config &config) { } void MapperNodelet::ResetCallback(std_msgs::EmptyConstPtr const& msg) { - mutexes_.octomap.lock(); globals_.octomap.ResetMap(); - mutexes_.octomap.unlock(); } diff --git a/mobility/mapper/src/mapper_nodelet.cc b/mobility/mapper/src/mapper_nodelet.cc index 29b451d764..d0b5e6ef28 100644 --- a/mobility/mapper/src/mapper_nodelet.cc +++ b/mobility/mapper/src/mapper_nodelet.cc @@ -25,19 +25,10 @@ namespace mapper { MapperNodelet::MapperNodelet() : - ff_util::FreeFlyerNodelet(NODE_MAPPER), state_(IDLE), killed_(false) { + ff_util::FreeFlyerNodelet(NODE_MAPPER), state_(IDLE) { } -MapperNodelet::~MapperNodelet() { - // Mark as thread as killed - killed_ = true; - // Notify all threads to ensure shutdown starts - semaphores_.collision_check.notify_one(); - semaphores_.pcl.notify_one(); - // Join threads once shutdown - h_octo_thread_.join(); - h_collision_check_thread_.join(); -} +MapperNodelet::~MapperNodelet() {} void MapperNodelet::Initialize(ros::NodeHandle *nh) { listener_.reset(new tf2_ros::TransformListener(buffer_)); @@ -78,7 +69,7 @@ void MapperNodelet::Initialize(ros::NodeHandle *nh) { clamping_threshold_max = cfg_.Get("clamping_threshold_max"); compression_max_dev = cfg_.Get("traj_compression_max_dev"); traj_resolution = cfg_.Get("traj_compression_resolution"); - tf_update_rate_ = cfg_.Get("tf_update_rate"); + octomap_update_rate_ = cfg_.Get("octomap_update_rate"); fading_memory_update_rate_ = cfg_.Get("fading_memory_update_rate"); use_haz_cam = cfg_.Get("use_haz_cam"); use_perch_cam = cfg_.Get("use_perch_cam"); @@ -123,24 +114,13 @@ void MapperNodelet::Initialize(ros::NodeHandle *nh) { hazard_pub_ = nh->advertise( TOPIC_MOBILITY_HAZARD, 1); - // Threads - h_octo_thread_ = std::thread(&MapperNodelet::OctomappingTask, this); - h_collision_check_thread_ = std::thread( - &MapperNodelet::CollisionCheckTask, this); - // Timers + timer_o_ = nh->createTimer( + ros::Duration(ros::Rate(octomap_update_rate_)), + &MapperNodelet::OctomappingTask, this, false, true); timer_f_ = nh->createTimer( ros::Duration(ros::Rate(fading_memory_update_rate_)), &MapperNodelet::FadeTask, this, false, true); - timer_h_ = nh->createTimer( - ros::Duration(ros::Rate(tf_update_rate_)), - &MapperNodelet::HazTfTask, this, false, true); - timer_p_ = nh->createTimer( - ros::Duration(ros::Rate(tf_update_rate_)), - &MapperNodelet::PerchTfTask, this, false, true); - timer_b_ = nh->createTimer( - ros::Duration(ros::Rate(tf_update_rate_)), - &MapperNodelet::BodyTfTask, this, false, true); // Subscribers std::string cam_prefix = TOPIC_HARDWARE_PICOFLEXX_PREFIX; diff --git a/mobility/mapper/src/services.cc b/mobility/mapper/src/services.cc index 140275f430..dc9cc1bb20 100644 --- a/mobility/mapper/src/services.cc +++ b/mobility/mapper/src/services.cc @@ -25,9 +25,7 @@ namespace mapper { // Update resolution of the map bool MapperNodelet::SetResolution(ff_msgs::SetFloat::Request &req, ff_msgs::SetFloat::Response &res) { - mutexes_.octomap.lock(); globals_.octomap.SetResolution(req.data); - mutexes_.octomap.unlock(); res.success = true; return true; } @@ -42,9 +40,7 @@ bool MapperNodelet::GetResolution(ff_msgs::GetFloat::Request &req, // Update map memory time bool MapperNodelet::SetMemoryTime(ff_msgs::SetFloat::Request &req, ff_msgs::SetFloat::Response &res) { - mutexes_.octomap.lock(); globals_.octomap.SetMemoryTime(req.data); - mutexes_.octomap.unlock(); res.success = true; return true; } @@ -58,9 +54,7 @@ bool MapperNodelet::GetMemoryTime(ff_msgs::GetFloat::Request &req, bool MapperNodelet::SetCollisionDistance(ff_msgs::SetFloat::Request &req, ff_msgs::SetFloat::Response &res) { - mutexes_.octomap.lock(); globals_.octomap.SetMapInflation(req.data + cfg_.Get("robot_radius")); - mutexes_.octomap.unlock(); res.success = true; return true; } @@ -73,9 +67,7 @@ bool MapperNodelet::GetMapInflation(ff_msgs::GetFloat::Request &req, bool MapperNodelet::ResetMap(std_srvs::Trigger::Request &req, std_srvs::Trigger::Response &res) { - mutexes_.octomap.lock(); globals_.octomap.ResetMap(); - mutexes_.octomap.unlock(); res.success = true; res.message = "Map has been reset!"; return true; @@ -86,9 +78,7 @@ bool MapperNodelet::GetFreeMapCallback(ff_msgs::GetMap::Request &req, visualization_msgs::MarkerArray om, fm; sensor_msgs::PointCloud2 oc, fc; - mutexes_.octomap.lock(); globals_.octomap.InflatedVisMarkers(&om, &fm, &oc, &fc); - mutexes_.octomap.unlock(); res.points = fc; res.resolution = globals_.octomap.GetResolution(); @@ -101,9 +91,7 @@ bool MapperNodelet::GetObstacleMapCallback(ff_msgs::GetMap::Request &req, visualization_msgs::MarkerArray om, fm; sensor_msgs::PointCloud2 oc, fc; - mutexes_.octomap.lock(); globals_.octomap.InflatedVisMarkers(&om, &fm, &oc, &fc); - mutexes_.octomap.unlock(); res.points = oc; res.resolution = globals_.octomap.GetResolution(); diff --git a/mobility/mapper/src/threads.cc b/mobility/mapper/src/threads.cc index ff72c1791e..2986046947 100644 --- a/mobility/mapper/src/threads.cc +++ b/mobility/mapper/src/threads.cc @@ -25,46 +25,12 @@ namespace mapper { // Thread for fading memory of the octomap void MapperNodelet::FadeTask(ros::TimerEvent const& event) { - mutexes_.octomap.lock(); if (globals_.octomap.memory_time_ > 0) globals_.octomap.FadeMemory(fading_memory_update_rate_); - mutexes_.octomap.unlock(); -} - -// Thread for constantly updating the tfTree values -void MapperNodelet::HazTfTask(ros::TimerEvent const& event) { - mutexes_.tf.lock(); - try { - globals_.tf_cam2world = buffer_.lookupTransform(FRAME_NAME_WORLD, - GetTransform(FRAME_NAME_HAZ_CAM), ros::Time(0)); - } catch (tf2::TransformException &ex) {} - mutexes_.tf.unlock(); -} - -// Thread for constantly updating the tfTree values -void MapperNodelet::PerchTfTask(ros::TimerEvent const& event) { - mutexes_.tf.lock(); - try { - globals_.tf_perch2world = buffer_.lookupTransform(FRAME_NAME_WORLD, - GetTransform(FRAME_NAME_PERCH_CAM), ros::Time(0)); - } catch (tf2::TransformException &ex) {} - mutexes_.tf.unlock(); -} - -// Thread for updating the tfTree values -void MapperNodelet::BodyTfTask(ros::TimerEvent const& event) { - mutexes_.tf.lock(); - try { - globals_.tf_body2world = buffer_.lookupTransform(FRAME_NAME_WORLD, - GetTransform(FRAME_NAME_BODY), ros::Time(0)); - } catch (tf2::TransformException &ex) {} - mutexes_.tf.unlock(); } // Sentinel void MapperNodelet::CollisionCheckTask() { - ROS_DEBUG("collisionCheck Thread started!"); - // visualization markers visualization_msgs::MarkerArray traj_markers, samples_markers; visualization_msgs::MarkerArray compressed_samples_markers, collision_markers; @@ -72,212 +38,184 @@ void MapperNodelet::CollisionCheckTask() { // pcl variables int cloudsize; - std::mutex mtx; - std::unique_lock lck(mtx); - while (!killed_) { - // Wait until there is a new trajectory or a new update on the map - semaphores_.collision_check.wait(lck); - if (killed_) - return; - - // Get time for when this task started - ros::Time time_now = ros::Time::now(); - - // Copy trajectory into local point cloud - pcl::PointCloud point_cloud_traj; - std::vector colliding_nodes; - traj_markers.markers.clear(); - collision_markers.markers.clear(); - samples_markers.markers.clear(); - compressed_samples_markers.markers.clear(); - - mutexes_.sampled_traj.lock(); - point_cloud_traj = globals_.sampled_traj.point_cloud_traj_; - std::vector time = globals_.sampled_traj.time_; + // Get time for when this task started + ros::Time time_now = ros::Time::now(); + + // Copy trajectory into local point cloud + pcl::PointCloud point_cloud_traj; + std::vector colliding_nodes; + traj_markers.markers.clear(); + collision_markers.markers.clear(); + samples_markers.markers.clear(); + compressed_samples_markers.markers.clear(); + + point_cloud_traj = globals_.sampled_traj.point_cloud_traj_; + std::vector time = globals_.sampled_traj.time_; + + // Send visualization markers + globals_.sampled_traj.TrajVisMarkers(&traj_markers); + globals_.sampled_traj.SamplesVisMarkers(&samples_markers); + globals_.sampled_traj.CompressedVisMarkers(&compressed_samples_markers); + path_marker_pub_.publish(traj_markers); + path_marker_pub_.publish(samples_markers); + path_marker_pub_.publish(compressed_samples_markers); + + // Stop execution if there are no points in the trajectory structure + cloudsize = point_cloud_traj.size(); + if (cloudsize <= 0) { + visualization_functions::DrawCollidingNodes(colliding_nodes, "world", 0.0, &collision_markers); + path_marker_pub_.publish(traj_markers); + path_marker_pub_.publish(collision_markers); + return; + } - // Send visualization markers + // Stop execution if the current time is beyond the final time of the trajectory + if (time_now.toSec() > time.back()) { + globals_.sampled_traj.ClearObject(); globals_.sampled_traj.TrajVisMarkers(&traj_markers); - globals_.sampled_traj.SamplesVisMarkers(&samples_markers); - globals_.sampled_traj.CompressedVisMarkers(&compressed_samples_markers); + visualization_functions::DrawCollidingNodes(colliding_nodes, "world", 0.0, &collision_markers); path_marker_pub_.publish(traj_markers); - path_marker_pub_.publish(samples_markers); - path_marker_pub_.publish(compressed_samples_markers); - mutexes_.sampled_traj.unlock(); - - // Stop execution if there are no points in the trajectory structure - cloudsize = point_cloud_traj.size(); - if (cloudsize <= 0) { - visualization_functions::DrawCollidingNodes(colliding_nodes, "world", 0.0, &collision_markers); - path_marker_pub_.publish(traj_markers); - path_marker_pub_.publish(collision_markers); - continue; - } - - // Stop execution if the current time is beyond the final time of the trajectory - if (time_now.toSec() > time.back()) { - mutexes_.sampled_traj.lock(); - globals_.sampled_traj.ClearObject(); - globals_.sampled_traj.TrajVisMarkers(&traj_markers); - mutexes_.sampled_traj.unlock(); - visualization_functions::DrawCollidingNodes(colliding_nodes, "world", 0.0, &collision_markers); - path_marker_pub_.publish(traj_markers); - path_marker_pub_.publish(collision_markers); - continue; - } - - // Check if trajectory collides with points in the point-cloud - mutexes_.octomap.lock(); - double res = globals_.octomap.tree_inflated_.getResolution(); - globals_.octomap.FindCollidingNodesInflated(point_cloud_traj, &colliding_nodes); - mutexes_.octomap.unlock(); - - if (colliding_nodes.size() > 0) { - // Sort collision time (use kdtree for nearest neighbor) - std::vector sorted_collisions; - mutexes_.sampled_traj.lock(); - globals_.sampled_traj.SortCollisions(colliding_nodes, &sorted_collisions); - mutexes_.sampled_traj.unlock(); + path_marker_pub_.publish(collision_markers); + return; + } - double collision_time = (sorted_collisions[0].header.stamp - ros::Time::now()).toSec(); - // uint lastCollisionIdx = sorted_collisions.back().header.seq; - if (collision_time > 0) { - ROS_WARN("Imminent collision within %.3f seconds!", collision_time); - // Publish the message - ff_msgs::Hazard info; - info.header.stamp = ros::Time::now(); - info.header.frame_id = GetPlatform(); - info.type = ff_msgs::Hazard::TYPE_OBSTACLE; - info.hazard = sorted_collisions[0]; - hazard_pub_.publish(info); - // Unlock resources - mutexes_.sampled_traj.lock(); - globals_.sampled_traj.ClearObject(); - mutexes_.sampled_traj.unlock(); - } + // Check if trajectory collides with points in the point-cloud + double res = globals_.octomap.tree_inflated_.getResolution(); + globals_.octomap.FindCollidingNodesInflated(point_cloud_traj, &colliding_nodes); + + if (colliding_nodes.size() > 0) { + // Sort collision time (use kdtree for nearest neighbor) + std::vector sorted_collisions; + globals_.sampled_traj.SortCollisions(colliding_nodes, &sorted_collisions); + + double collision_time = (sorted_collisions[0].header.stamp - ros::Time::now()).toSec(); + // uint lastCollisionIdx = sorted_collisions.back().header.seq; + if (collision_time > 0) { + ROS_WARN("Imminent collision within %.3f seconds!", collision_time); + // Publish the message + ff_msgs::Hazard info; + info.header.stamp = ros::Time::now(); + info.header.frame_id = GetPlatform(); + info.type = ff_msgs::Hazard::TYPE_OBSTACLE; + info.hazard = sorted_collisions[0]; + hazard_pub_.publish(info); + // Unlock resources + globals_.sampled_traj.ClearObject(); } - - // Draw colliding markers (delete if none) - visualization_functions::DrawCollidingNodes(colliding_nodes, "world", 1.01*res, &collision_markers); - path_marker_pub_.publish(traj_markers); - path_marker_pub_.publish(collision_markers); - ros::Duration solver_time = ros::Time::now() - time_now; } - ROS_DEBUG("Exiting collisionCheck Thread..."); + // Draw colliding markers (delete if none) + visualization_functions::DrawCollidingNodes(colliding_nodes, "world", 1.01*res, &collision_markers); + path_marker_pub_.publish(traj_markers); + path_marker_pub_.publish(collision_markers); + ros::Duration solver_time = ros::Time::now() - time_now; } -void MapperNodelet::OctomappingTask() { - ROS_DEBUG("OctomappingTask Thread started!"); - geometry_msgs::TransformStamped tf_cam2world; +void MapperNodelet::OctomappingTask(ros::TimerEvent const& event) { pcl::PointCloud< pcl::PointXYZ > pcl_world; - std::mutex mtx; - std::unique_lock lck(mtx); - while (!killed_) { - // Take care of the special case where the notify_one was sent during the - // processing of the previous point cloud. The signal that this happened - // is a non-empty point cloud queue. In this case we pass straight through. - if (globals_.pcl_queue.empty()) - semaphores_.pcl.wait(lck); - if (killed_) - return; - mutexes_.point_cloud.lock(); - - // Get time for when this task started - const ros::Time t0 = ros::Time::now(); - - // Get Point Cloud - pcl::PointCloud point_cloud = - globals_.pcl_queue.front().cloud; - const geometry_msgs::TransformStamped tf_cam2world = - globals_.pcl_queue.front().tf_cam2world; + // If there are no pcl point clounds + if (globals_.pcl_queue.empty()) + return; - // Remove data from queue - globals_.pcl_queue.pop(); - mutexes_.point_cloud.unlock(); - - // Check if a tf message has been received already. If not, return - if (tf_cam2world.header.stamp.toSec() == 0) - continue; - - // Transform pcl into world frame - Eigen::Affine3d transform = Eigen::Affine3d::Identity(); - transform.translation() << tf_cam2world.transform.translation.x, - tf_cam2world.transform.translation.y, - tf_cam2world.transform.translation.z; - transform.rotate(Eigen::Quaterniond( - tf_cam2world.transform.rotation.w, - tf_cam2world.transform.rotation.x, - tf_cam2world.transform.rotation.y, - tf_cam2world.transform.rotation.z)); - pcl::transformPointCloud(point_cloud, pcl_world, transform); - - // Save into octomap - algebra_3d::FrustumPlanes world_frustum; - mutexes_.octomap.lock(); - globals_.octomap.cam_frustum_.TransformFrustum(transform, &world_frustum); - globals_.octomap.PclToRayOctomap(pcl_world, tf_cam2world, world_frustum); - globals_.octomap.tree_.prune(); // prune the tree before visualizing - globals_.octomap.tree_inflated_.prune(); - // globals_.octomap.tree.writeBinary("simple_tree.bt"); - mutexes_.octomap.unlock(); - - // Publish visualization markers iff at least one node is subscribed to it - bool pub_obstacles, pub_free, pub_obstacles_inflated, pub_free_inflated; - pub_obstacles = (obstacle_marker_pub_.getNumSubscribers() > 0) || (obstacle_cloud_pub_.getNumSubscribers() > 0); - pub_free = (free_space_marker_pub_.getNumSubscribers() > 0) || (free_space_cloud_pub_.getNumSubscribers() > 0); - pub_obstacles_inflated = (inflated_obstacle_marker_pub_.getNumSubscribers() > 0) - || (inflated_obstacle_cloud_pub_.getNumSubscribers() > 0); - pub_free_inflated = (inflated_free_space_marker_pub_.getNumSubscribers() > 0) - || (inflated_free_space_cloud_pub_.getNumSubscribers() > 0); +// Update TF values + try { + globals_.tf_cam2world = buffer_.lookupTransform(FRAME_NAME_WORLD, + GetTransform(FRAME_NAME_HAZ_CAM), ros::Time(0)); + } catch (tf2::TransformException &ex) {} + try { + globals_.tf_perch2world = buffer_.lookupTransform(FRAME_NAME_WORLD, + GetTransform(FRAME_NAME_PERCH_CAM), ros::Time(0)); + } catch (tf2::TransformException &ex) {} + try { + globals_.tf_body2world = buffer_.lookupTransform(FRAME_NAME_WORLD, + GetTransform(FRAME_NAME_BODY), ros::Time(0)); + } catch (tf2::TransformException &ex) {} - if (pub_obstacles || pub_free) { - visualization_msgs::MarkerArray obstacle_markers, free_markers; - sensor_msgs::PointCloud2 obstacle_cloud, free_cloud; - mutexes_.octomap.lock(); - globals_.octomap.TreeVisMarkers(&obstacle_markers, &free_markers, - &obstacle_cloud, &free_cloud); - mutexes_.octomap.unlock(); - if (pub_obstacles) { - obstacle_marker_pub_.publish(obstacle_markers); - obstacle_cloud_pub_.publish(obstacle_cloud); - } - if (pub_free) { - free_space_marker_pub_.publish(free_markers); - free_space_cloud_pub_.publish(free_cloud); - } + // Get time for when this task started + const ros::Time t0 = ros::Time::now(); + + // Get Point Cloud + pcl::PointCloud point_cloud = + globals_.pcl_queue.front().cloud; + const geometry_msgs::TransformStamped tf_cam2world = + globals_.pcl_queue.front().tf_cam2world; + + // Remove data from queue + globals_.pcl_queue.pop(); + + // Check if a tf message has been received already. If not, return + if (tf_cam2world.header.stamp.toSec() == 0) + return; + + // Transform pcl into world frame + Eigen::Affine3d transform = Eigen::Affine3d::Identity(); + transform.translation() << tf_cam2world.transform.translation.x, + tf_cam2world.transform.translation.y, + tf_cam2world.transform.translation.z; + transform.rotate(Eigen::Quaterniond( + tf_cam2world.transform.rotation.w, + tf_cam2world.transform.rotation.x, + tf_cam2world.transform.rotation.y, + tf_cam2world.transform.rotation.z)); + pcl::transformPointCloud(point_cloud, pcl_world, transform); + + // Save into octomap + algebra_3d::FrustumPlanes world_frustum; + globals_.octomap.cam_frustum_.TransformFrustum(transform, &world_frustum); + globals_.octomap.PclToRayOctomap(pcl_world, tf_cam2world, world_frustum); + globals_.octomap.tree_.prune(); // prune the tree before visualizing + globals_.octomap.tree_inflated_.prune(); + // globals_.octomap.tree.writeBinary("simple_tree.bt"); + + // Publish visualization markers iff at least one node is subscribed to it + bool pub_obstacles, pub_free, pub_obstacles_inflated, pub_free_inflated; + pub_obstacles = (obstacle_marker_pub_.getNumSubscribers() > 0) || (obstacle_cloud_pub_.getNumSubscribers() > 0); + pub_free = (free_space_marker_pub_.getNumSubscribers() > 0) || (free_space_cloud_pub_.getNumSubscribers() > 0); + pub_obstacles_inflated = (inflated_obstacle_marker_pub_.getNumSubscribers() > 0) + || (inflated_obstacle_cloud_pub_.getNumSubscribers() > 0); + pub_free_inflated = (inflated_free_space_marker_pub_.getNumSubscribers() > 0) + || (inflated_free_space_cloud_pub_.getNumSubscribers() > 0); + + if (pub_obstacles || pub_free) { + visualization_msgs::MarkerArray obstacle_markers, free_markers; + sensor_msgs::PointCloud2 obstacle_cloud, free_cloud; + globals_.octomap.TreeVisMarkers(&obstacle_markers, &free_markers, + &obstacle_cloud, &free_cloud); + if (pub_obstacles) { + obstacle_marker_pub_.publish(obstacle_markers); + obstacle_cloud_pub_.publish(obstacle_cloud); } - - if (pub_obstacles_inflated || pub_free_inflated) { - visualization_msgs::MarkerArray inflated_obstacle_markers, inflated_free_markers; - sensor_msgs::PointCloud2 inflated_obstacle_cloud, inflated_free_cloud; - mutexes_.octomap.lock(); - globals_.octomap.InflatedVisMarkers(&inflated_obstacle_markers, &inflated_free_markers, - &inflated_obstacle_cloud, &inflated_free_cloud); - mutexes_.octomap.unlock(); - if (pub_obstacles_inflated) { - inflated_obstacle_marker_pub_.publish(inflated_obstacle_markers); - inflated_obstacle_cloud_pub_.publish(inflated_obstacle_cloud); - } - if (pub_free_inflated) { - inflated_free_space_marker_pub_.publish(inflated_free_markers); - inflated_free_space_cloud_pub_.publish(inflated_free_cloud); - } + if (pub_free) { + free_space_marker_pub_.publish(free_markers); + free_space_cloud_pub_.publish(free_cloud); } + } - if (cam_frustum_pub_.getNumSubscribers() > 0) { - visualization_msgs::Marker frustum_markers; - mutexes_.octomap.lock(); - globals_.octomap.cam_frustum_.VisualizeFrustum(point_cloud.header.frame_id, &frustum_markers); - mutexes_.octomap.unlock(); - cam_frustum_pub_.publish(frustum_markers); + if (pub_obstacles_inflated || pub_free_inflated) { + visualization_msgs::MarkerArray inflated_obstacle_markers, inflated_free_markers; + sensor_msgs::PointCloud2 inflated_obstacle_cloud, inflated_free_cloud; + globals_.octomap.InflatedVisMarkers(&inflated_obstacle_markers, &inflated_free_markers, + &inflated_obstacle_cloud, &inflated_free_cloud); + if (pub_obstacles_inflated) { + inflated_obstacle_marker_pub_.publish(inflated_obstacle_markers); + inflated_obstacle_cloud_pub_.publish(inflated_obstacle_cloud); } + if (pub_free_inflated) { + inflated_free_space_marker_pub_.publish(inflated_free_markers); + inflated_free_space_cloud_pub_.publish(inflated_free_cloud); + } + } - // Notify the collision checker to check for collision - semaphores_.collision_check.notify_one(); - ros::Duration map_time = ros::Time::now() - t0; + if (cam_frustum_pub_.getNumSubscribers() > 0) { + visualization_msgs::Marker frustum_markers; + globals_.octomap.cam_frustum_.VisualizeFrustum(point_cloud.header.frame_id, &frustum_markers); + cam_frustum_pub_.publish(frustum_markers); } - ROS_DEBUG("Exiting OctomappingTask Thread..."); + + // Notify the collision checker to check for collision + CollisionCheckTask(); + ros::Duration map_time = ros::Time::now() - t0; } } // namespace mapper diff --git a/mobility/mobility/CMakeLists.txt b/mobility/mobility/CMakeLists.txt index 072ee98b3b..5a54389cac 100644 --- a/mobility/mobility/CMakeLists.txt +++ b/mobility/mobility/CMakeLists.txt @@ -15,25 +15,66 @@ # License for the specific language governing permissions and limitations # under the License. -# Subsystem matapackage +cmake_minimum_required(VERSION 3.0) project(mobility) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + choreographer + mapper + planner_trapezoidal + planner_qp + ff_msgs + ff_util + jsonloader +) catkin_package( CATKIN_DEPENDS choreographer mapper - sentinel planner_trapezoidal planner_qp - docker ff_msgs ff_util + jsonloader ) -create_tool_targets( - DIR tools - LIBS ${catkin_LIBRARIES} ${EIGEN3_LIBRARIES} config_client ff_common ff_flight planner_trapezoidal - INC ${catkin_INCLUDE_DIRS} ${EIGEN3_INCLUDE_DIRS} - DEPS ff_msgs ff_common +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) +## Declare a C++ executable: plangen +add_executable(plangen tools/plangen.cc) +add_dependencies(plangen ${catkin_EXPORTED_TARGETS}) +target_link_libraries(plangen + gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: teleop +add_executable(teleop tools/teleop.cc) +add_dependencies(teleop ${catkin_EXPORTED_TARGETS}) +target_link_libraries(teleop + gflags glog ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Install C++ executables +install(TARGETS plangen DESTINATION bin) +install(TARGETS teleop DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/plangen share/${PROJECT_NAME} + COMMAND ln -s ../../bin/teleop share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") diff --git a/mobility/mobility/package.xml b/mobility/mobility/package.xml index 71233c68db..a9f0f1b3c4 100644 --- a/mobility/mobility/package.xml +++ b/mobility/mobility/package.xml @@ -17,12 +17,16 @@ catkin ff_msgs ff_util + choreographer + mapper + planner_trapezoidal + planner_qp + jsonloader ff_msgs ff_util choreographer mapper - sentinel planner_trapezoidal planner_qp - docker - \ No newline at end of file + jsonloader + diff --git a/mobility/planner_qp/planner_qp/CMakeLists.txt b/mobility/planner_qp/planner_qp/CMakeLists.txt index ae6a315885..065d2a6acf 100644 --- a/mobility/planner_qp/planner_qp/CMakeLists.txt +++ b/mobility/planner_qp/planner_qp/CMakeLists.txt @@ -15,27 +15,34 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(planner_qp) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17 -Wall -Wno-deprecated-declarations -Werror=return-type") + +list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/../../../cmake") + + +# Find OpenCV +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../../cmake") +find_package(OpenCV331 REQUIRED) + find_package(JPS3D REQUIRED) find_package(DecompUtil REQUIRED) -find_package(catkin2 REQUIRED COMPONENTS roscpp nodelet pluginlib actionlib tf) - -set(LIBS - ${TRAJ_OPT_PRO_LIBRARIES} - ${TRAJ_OPT_ROS_LIBRARIES} - ${JPS3D_LIBRARIES} - ${DECOMP_UTIL_LIBRARIES} - ${SPARSE_MAPPING_LIBRARIES} -) -# message(DECOMP_UTIL_LIBRARIES ${DECOMP_UTIL_LIBRARIES}) -# message(LIBS ${LIBS}) -set(INCLUDES - ${TRAJ_OPT_PRO_INCLUDE_DIRS} - ${TRAJ_OPT_ROS_INCLUDE_DIRS} - ${JPS3D_INCLUDE_DIRS} - ${DECOMP_UTIL_INCLUDE_DIRS} - ${SPARSE_MAPPING_INCLUDE_DIRS} +find_package(PCL REQUIRED COMPONENTS common) + + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + pluginlib + actionlib + tf + ff_util + choreographer + traj_opt_basic + traj_opt_pro + traj_opt_ros ) catkin_package( @@ -48,15 +55,65 @@ catkin_package( pluginlib actionlib tf + ff_util + choreographer + traj_opt_basic + traj_opt_pro + traj_opt_ros ) -create_library(TARGET planner_qp - LIBS ${LIBS} ${catkin_LIBRARIES} ff_nodelet msg_conversions config_server config_client jsonloader ff_flight mapper - INC ${catkin_INCLUDE_DIRS} ${INCLUDES} - DEPS choreographer + +########### +## Build ## +########### +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${OpenCV_INCLUDE_DIRS} + ${JPS3D_INCLUDE_DIR} + ${DECOMP_UTIL_INCLUDE_DIR} + ${PCL_INCLUDE_DIRS} ) -create_tool_targets(DIR test - LIBS ${LIBS} - INC ${INCLUDES} + +add_library(planner_qp + src/planner_qp.cc ) +add_dependencies(planner_qp ${catkin_EXPORTED_TARGETS}) +target_link_libraries(planner_qp ${JPS3D_LIBRARIES} ${DECOMP_UTIL_LIBRARIES} ${PCL_LIBRARIES} ${catkin_LIBRARIES}) + +## Declare a C++ executable: sfc_server +add_executable(sfc_server test/sfc_server.cc) +add_dependencies(sfc_server ${catkin_EXPORTED_TARGETS}) +target_link_libraries(sfc_server + planner_qp gflags ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Install C++ executables +install(TARGETS sfc_server DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/sfc_server share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) -install_launch_files() diff --git a/mobility/planner_qp/planner_qp/package.xml b/mobility/planner_qp/planner_qp/package.xml index 7afb2db4da..7256ee995a 100644 --- a/mobility/planner_qp/planner_qp/package.xml +++ b/mobility/planner_qp/planner_qp/package.xml @@ -20,6 +20,11 @@ pluginlib actionlib tf + ff_util + choreographer + traj_opt_basic + traj_opt_pro + traj_opt_ros diff --git a/mobility/planner_qp/traj_opt_basic/CMakeLists.catkin b/mobility/planner_qp/traj_opt_basic/CMakeLists.catkin deleted file mode 100644 index 0329d86e77..0000000000 --- a/mobility/planner_qp/traj_opt_basic/CMakeLists.catkin +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -cmake_minimum_required(VERSION 2.8.3) -project(traj_opt_basic) -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wno-deprecated-declarations -Werror=return-type") - -find_package(catkin_simple REQUIRED) - -find_package(Eigen3 REQUIRED) -find_package(Boost REQUIRED) -include_directories(${EIGEN3_INCLUDE_DIR} ${Boost_INCLUDE_DIRS}) - - -catkin_simple() - -cs_add_library(traj_opt_basic src/msg_traj.cpp src/polynomial_basis.cpp src/trajectory.cpp) - -cs_install() - -cs_export(DEPENDS Eigen3 Boost) \ No newline at end of file diff --git a/mobility/planner_qp/traj_opt_basic/CMakeLists.nasa b/mobility/planner_qp/traj_opt_basic/CMakeLists.nasa deleted file mode 100644 index 5763c8026a..0000000000 --- a/mobility/planner_qp/traj_opt_basic/CMakeLists.nasa +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -project(traj_opt_basic) -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wno-deprecated-declarations -Werror=return-type") - -create_library(TARGET traj_opt_basic - INC ${EIGEN3_INCLUDE_DIR} ${Boost_INCLUDE_DIRS} - ADD_SRCS src/msg_traj.cpp src/polynomial_basis.cpp src/trajectory.cpp -) - -set(TRAJ_OPT_BASIC_INCLUDE_DIRS - ${CMAKE_CURRENT_SOURCE_DIR}/include - ${EIGEN3_INCLUDE_DIRS} - ${Boost_INCLUDE_DIRS} - PARENT_SCOPE) -set(TRAJ_OPT_BASIC_LIBRARIES traj_opt_basic PARENT_SCOPE) - - diff --git a/mobility/planner_qp/traj_opt_basic/CMakeLists.txt b/mobility/planner_qp/traj_opt_basic/CMakeLists.txt index db5401dc12..c0092c14c9 100644 --- a/mobility/planner_qp/traj_opt_basic/CMakeLists.txt +++ b/mobility/planner_qp/traj_opt_basic/CMakeLists.txt @@ -1,24 +1,68 @@ # Copyright (c) 2017, United States Government, as represented by the # Administrator of the National Aeronautics and Space Administration. -# +# # All rights reserved. -# +# # The Astrobee platform is licensed under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with the # License. You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -cmake_minimum_required(VERSION 2.8.3) -option(NASA "Use NASA's build system" OFF) -if(NASA) - include(CMakeLists.nasa) -else() - include(CMakeLists.catkin) -endif() +cmake_minimum_required(VERSION 3.0) +project(traj_opt_basic) +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++14 -Wall -Wno-deprecated-declarations -Werror=return-type") + +find_package(catkin REQUIRED COMPONENTS +) + +find_package(Eigen3 REQUIRED) +find_package(Boost REQUIRED) + +catkin_package( + INCLUDE_DIRS include + LIBRARIES + traj_opt_basic +) + +########### +## Build ## +########### +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} + ${Boost_INCLUDE_DIRS} +) + +add_library(traj_opt_basic + src/msg_traj.cpp + src/polynomial_basis.cpp + src/trajectory.cpp) +add_dependencies(traj_opt_basic ${catkin_EXPORTED_TARGETS}) +target_link_libraries(traj_opt_basic ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) \ No newline at end of file diff --git a/mobility/planner_qp/traj_opt_msgs/CMakeLists.catkin b/mobility/planner_qp/traj_opt_msgs/CMakeLists.catkin deleted file mode 100644 index 0d4a79af6f..0000000000 --- a/mobility/planner_qp/traj_opt_msgs/CMakeLists.catkin +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -cmake_minimum_required(VERSION 2.8.3) - -project(traj_opt_msgs) - -find_package(catkin_simple REQUIRED) - -catkin_simple() - -cs_install() - -cs_export() diff --git a/mobility/planner_qp/traj_opt_msgs/CMakeLists.nasa b/mobility/planner_qp/traj_opt_msgs/CMakeLists.nasa deleted file mode 100644 index d86e4043ba..0000000000 --- a/mobility/planner_qp/traj_opt_msgs/CMakeLists.nasa +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -project(traj_opt_msgs) - -find_package(catkin2 REQUIRED COMPONENTS - message_generation - std_msgs -) - -create_msg_targets(DIR msg - DEPS std_msgs) diff --git a/mobility/planner_qp/traj_opt_msgs/CMakeLists.txt b/mobility/planner_qp/traj_opt_msgs/CMakeLists.txt index 9d0a5a98fb..f192b5bd51 100644 --- a/mobility/planner_qp/traj_opt_msgs/CMakeLists.txt +++ b/mobility/planner_qp/traj_opt_msgs/CMakeLists.txt @@ -1,25 +1,43 @@ # Copyright (c) 2017, United States Government, as represented by the # Administrator of the National Aeronautics and Space Administration. -# +# # All rights reserved. -# +# # The Astrobee platform is licensed under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with the # License. You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -cmake_minimum_required(VERSION 2.8.3) +cmake_minimum_required(VERSION 3.0) -option(NASA "Use NASA's build system" OFF) -if(NASA) - include(CMakeLists.nasa) -else() - include(CMakeLists.catkin) -endif() +project(traj_opt_msgs) + +find_package(catkin REQUIRED COMPONENTS + message_generation + std_msgs +) + + +# Generate messages in the 'msg' folder +add_message_files( + FILES + Polynomial.msg + SolverInfo.msg + Spline.msg + Trajectory.msg +) + +generate_messages( + DEPENDENCIES std_msgs +) + +catkin_package( + LIBRARIES +) diff --git a/mobility/planner_qp/traj_opt_pro/CMakeLists.catkin b/mobility/planner_qp/traj_opt_pro/CMakeLists.catkin deleted file mode 100644 index fe18bbe44b..0000000000 --- a/mobility/planner_qp/traj_opt_pro/CMakeLists.catkin +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -cmake_minimum_required(VERSION 2.8.3) - -project(traj_opt_pro) -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wno-deprecated-declarations -Werror=return-type") -set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${PROJECT_SOURCE_DIR}/cmake") - - -############################ Optional Build Params ############################# -set(BUILD_GUROBI TRUE) # Commercial backend -set(INTEL_MKL FALSE) # Use Intel's Math Kernel Library (Not faster on an i7) -############################ End Optional Params ############################### - -# Find optional packages -if(BUILD_GUROBI) - find_package(GUROBI REQUIRED) - include_directories(${GUROBI_INCLUDE_DIRS}) -endif() -if(INTEL_MKL) - add_definitions(-DEIGEN_USE_MKL_ALL) - find_package(IntelMKL REQUIRED) - set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--no-as-needed -lpthread -lm -ldl" ) - set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--no-as-needed -lpthread -lm -ldl" ) - include_directories(${INTEL_MKL_INCLUDES}) -endif() - -# Find required packages -find_package(catkin_simple REQUIRED) - -find_package(Eigen3 REQUIRED) -include_directories(${EIGEN3_INCLUDE_DIR}) -find_package(OpenCV REQUIRED) -include_directories(${OpenCV_INCLUDE_DIR}) - - -catkin_simple() - -# Add libraries -cs_add_library(ff_common src/polynomial_basis.cpp src/trajectory_solver.cpp) - -if(BUILD_GUROBI) - cs_add_library(gurobi_backend src/gurobi_solver.cpp src/gurobi_trajectory.cpp ) - target_link_libraries(gurobi_backend ff_common ${GUROBI_LIBRARIES}) -endif() - -cs_add_library(fancy_custom_backend src/nonlinear_polynomial.cpp src/nonlinear_solver.cpp src/nonlinear_trajectory.cpp) -target_link_libraries(fancy_custom_backend ff_common ${OpenCV_LIBRARIES}) - -# Catkin simple stuff -cs_install() - -cs_export(DEPENDS OpenCV) diff --git a/mobility/planner_qp/traj_opt_pro/CMakeLists.nasa b/mobility/planner_qp/traj_opt_pro/CMakeLists.nasa deleted file mode 100644 index de2685d876..0000000000 --- a/mobility/planner_qp/traj_opt_pro/CMakeLists.nasa +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -project(traj_opt_pro) -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wno-deprecated-declarations -Werror=return-type") - -set(LIBS - ${OpenCV_LIBRARIES} - ${TRAJ_OPT_BASIC_LIBRARIES} -) -set(INCLUDES - ${TRAJ_OPT_BASIC_INCLUDE_DIRS} - ${OpenCV_INCLUDE_DIRS} -) - -create_library(TARGET traj_opt_pro - INC ${INCLUDES} - LIBS ${LIBS} - DEPS traj_opt_basic - ADD_SRCS src/polynomial_basis.cpp src/trajectory_solver.cpp src/nonlinear_polynomial.cpp src/nonlinear_solver.cpp src/nonlinear_trajectory.cpp -) - -set(TRAJ_OPT_PRO_INCLUDE_DIRS - ${CMAKE_CURRENT_SOURCE_DIR}/include - ${INCLUDES} - PARENT_SCOPE) -set(TRAJ_OPT_PRO_LIBRARIES traj_opt_pro ${LIBS} PARENT_SCOPE) - - diff --git a/mobility/planner_qp/traj_opt_pro/CMakeLists.txt b/mobility/planner_qp/traj_opt_pro/CMakeLists.txt index db5401dc12..44b0306832 100644 --- a/mobility/planner_qp/traj_opt_pro/CMakeLists.txt +++ b/mobility/planner_qp/traj_opt_pro/CMakeLists.txt @@ -1,24 +1,104 @@ # Copyright (c) 2017, United States Government, as represented by the # Administrator of the National Aeronautics and Space Administration. -# +# # All rights reserved. -# +# # The Astrobee platform is licensed under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with the # License. You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -cmake_minimum_required(VERSION 2.8.3) -option(NASA "Use NASA's build system" OFF) -if(NASA) - include(CMakeLists.nasa) -else() - include(CMakeLists.catkin) +cmake_minimum_required(VERSION 3.0) + +project(traj_opt_pro) +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++14 -Wall -Wno-deprecated-declarations -Werror=return-type") + +############################ Optional Build Params ############################# +set(BUILD_GUROBI FALSE) # Commercial backend +set(INTEL_MKL FALSE) # Use Intel's Math Kernel Library (Not faster on an i7) +############################ End Optional Params ############################### + +# Find optional packages +if(BUILD_GUROBI) + find_package(GUROBI REQUIRED) + include_directories(${GUROBI_INCLUDE_DIRS}) endif() +if(INTEL_MKL) + add_definitions(-DEIGEN_USE_MKL_ALL) + find_package(IntelMKL REQUIRED) + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--no-as-needed -lpthread -lm -ldl" ) + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--no-as-needed -lpthread -lm -ldl" ) + include_directories(${INTEL_MKL_INCLUDES}) +endif() + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + traj_opt_basic +) + +find_package(Eigen3 REQUIRED) + +# Find OpenCV +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../../cmake") +find_package(OpenCV331 REQUIRED) + +catkin_package( + INCLUDE_DIRS include + LIBRARIES + traj_opt_pro fancy_custom_backend ${OpenCV_LIBRARIES} +) + + +########### +## Build ## +########### +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} + ${OpenCV_INCLUDE_DIR} +) + +# Add libraries +add_library(traj_opt_pro + src/polynomial_basis.cpp + src/trajectory_solver.cpp) + +if(BUILD_GUROBI) + add_library(gurobi_backend + src/gurobi_solver.cpp + src/gurobi_trajectory.cpp ) + target_link_libraries(gurobi_backend traj_opt_pro ${GUROBI_LIBRARIES}) +endif() + +add_library(fancy_custom_backend + src/nonlinear_polynomial.cpp + src/nonlinear_solver.cpp + src/nonlinear_trajectory.cpp) +target_link_libraries(fancy_custom_backend traj_opt_pro ${OpenCV_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} fancy_custom_backend + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) \ No newline at end of file diff --git a/mobility/planner_qp/traj_opt_ros/CMakeLists.catkin b/mobility/planner_qp/traj_opt_ros/CMakeLists.catkin deleted file mode 100644 index 84e2a4cc93..0000000000 --- a/mobility/planner_qp/traj_opt_ros/CMakeLists.catkin +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -cmake_minimum_required(VERSION 2.8.3) -project(traj_opt_ros) -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wno-deprecated-declarations -Werror=return-type -DQT_NO_KEYWORDS") - -# Qt Stuff, assume Qt5 -set(CMAKE_AUTOMOC ON) - -message(STATUS "Using Qt5 based on the rviz_QT_VERSION: ${rviz_QT_VERSION}") -find_package(Qt5 REQUIRED Core Widgets) -set(QT_LIBRARIES Qt5::Widgets) - -include_directories(${QT_QTCORE_INCLUDE_DIR} ${QT_QTGUI_INCLUDE_DIR}) - -find_package(catkin_simple REQUIRED) - -catkin_simple() - -cs_add_library(traj_ros_bridge src/ros_bridge.cpp) -cs_add_library(${PROJECT_NAME} src/trajectory_visual.cpp src/trajectory_display.cpp ${MOC_FILES}) -target_link_libraries(${PROJECT_NAME} ${QT_LIBRARIES} ${catkin_LIBRARIES} ${rviz_DEFAULT_PLUGIN_LIBRARIES} traj_ros_bridge) - - -cs_install() - -cs_export() diff --git a/mobility/planner_qp/traj_opt_ros/CMakeLists.nasa b/mobility/planner_qp/traj_opt_ros/CMakeLists.nasa deleted file mode 100644 index b139a05cf7..0000000000 --- a/mobility/planner_qp/traj_opt_ros/CMakeLists.nasa +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -project(traj_opt_ros) -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wno-deprecated-declarations -Werror=return-type") - - -set(LIBS - ${TRAJ_OPT_BASIC_LIBRARIES} -) -set(INCLUDES - ${TRAJ_OPT_BASIC_INCLUDE_DIRS} -) -set(LIBRARIES traj_ros_bridge) -set(CAT_DEP roscpp) - -set(TRAJ_OPT_ROS_INCLUDE_DIRS - ${CMAKE_CURRENT_SOURCE_DIR}/include - ${INCLUDES} - PARENT_SCOPE) -set(TRAJ_OPT_ROS_LIBRARIES ${LIBRARIES} ${LIBS} ${catkin_LIBRARIES} PARENT_SCOPE) - -# build rviz plugin if not cross compiling -if(NOT USE_CTC) - set(CMAKE_AUTOMOC ON) - find_package(catkin COMPONENTS rviz QUIET) - if(rviz_QT_VERSION) - message(STATUS "Using Qt5 based on the rviz_QT_VERSION: ${rviz_QT_VERSION}") - find_package(Qt5 ${rviz_QT_VERSION} REQUIRED Core Widgets) - set(QT_LIBRARIES Qt5::Widgets) - - set(LIBRARIES ${LIBRARIES} traj_opt_ros) - set(CAT_DEP ${CAT_DEP} rviz) - set(LIBS ${LIBS} ${QT_LIBRARIES} ${rviz_DEFAULT_PLUGIN_LIBRARIES}) - set(INCLUDES ${INCLUDES} ${Qt5Core_INCLUDE_DIRS} ${Qt5Gui_INCLUDE_DIRS}) - endif() -endif() -find_package(catkin REQUIRED COMPONENTS ${CAT_DEP}) - - -catkin_package( - LIBRARIES ${LIBRARIES} - DEPENDS traj_opt_basic traj_opt_msgs - CATKIN_DEPENDS ${CAT_DEP} -) - - -create_library(TARGET traj_ros_bridge - INC ${INCLUDES} ${catkin_INCLUDE_DIRS} - LIBS ${LIBS} ${catkin_LIBRARIES} - DEPS traj_opt_basic traj_opt_msgs - ADD_SRCS src/ros_bridge.cpp -) - -if(NOT USE_CTC) - if(rviz_QT_VERSION) - create_library(TARGET traj_opt_ros - INC ${INCLUDES} ${catkin_INCLUDE_DIRS} - LIBS ${LIBS} ${catkin_LIBRARIES} traj_ros_bridge - DEPS traj_opt_basic traj_ros_bridge - ADD_SRCS src/trajectory_visual.cpp src/trajectory_display.cpp ${MOC_FILES} - ) -endif() -endif() - - - - diff --git a/mobility/planner_qp/traj_opt_ros/CMakeLists.txt b/mobility/planner_qp/traj_opt_ros/CMakeLists.txt index db5401dc12..8b8bd427c9 100644 --- a/mobility/planner_qp/traj_opt_ros/CMakeLists.txt +++ b/mobility/planner_qp/traj_opt_ros/CMakeLists.txt @@ -1,24 +1,105 @@ # Copyright (c) 2017, United States Government, as represented by the # Administrator of the National Aeronautics and Space Administration. -# +# # All rights reserved. -# +# # The Astrobee platform is licensed under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with the # License. You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -cmake_minimum_required(VERSION 2.8.3) -option(NASA "Use NASA's build system" OFF) -if(NASA) - include(CMakeLists.nasa) -else() - include(CMakeLists.catkin) +cmake_minimum_required(VERSION 3.0) +project(traj_opt_ros) +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++14 -Wall -Wno-deprecated-declarations -Werror=return-type -DQT_NO_KEYWORDS") + +# Qt Stuff, assume Qt5 +set(CMAKE_AUTOMOC ON) + +## Find catkin macros and libraries +set(LIBRARIES traj_ros_bridge) + +# build rviz plugin if not cross compiling +if(NOT USE_CTC) + set(CMAKE_AUTOMOC ON) + find_package(catkin2 COMPONENTS rviz QUIET) + if(rviz_QT_VERSION) + message(STATUS "Using Qt5 based on the rviz_QT_VERSION: ${rviz_QT_VERSION}") + find_package(Qt5 ${rviz_QT_VERSION} REQUIRED Core Widgets) + set(QT_LIBRARIES Qt5::Widgets) + set(LIBRARIES ${LIBRARIES} traj_opt_ros) + set(INCLUDES ${INCLUDES} ${Qt5Core_INCLUDE_DIRS} ${Qt5Gui_INCLUDE_DIRS} ${catkin_INCLUDE_DIRS}) + endif() endif() + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + traj_opt_basic + traj_opt_msgs +) + + +catkin_package( + INCLUDE_DIRS include + LIBRARIES + ${LIBRARIES} + CATKIN_DEPENDS + roscpp + traj_opt_basic + traj_opt_msgs +) + +########### +## Build ## +########### +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${INCLUDES} +) + +add_library(traj_ros_bridge + src/ros_bridge.cpp) +target_link_libraries(traj_ros_bridge ${catkin_LIBRARIES}) + +if (NOT USE_CTC) + if(rviz_QT_VERSION) + add_library(${PROJECT_NAME} + src/trajectory_visual.cpp + src/trajectory_display.cpp + ${MOC_FILES}) + target_link_libraries(${PROJECT_NAME} ${QT_LIBRARIES} ${catkin_LIBRARIES} ${rviz_DEFAULT_PLUGIN_LIBRARIES} traj_ros_bridge) + endif() +endif (NOT USE_CTC) + + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS traj_ros_bridge + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) diff --git a/mobility/planner_trapezoidal/CMakeLists.txt b/mobility/planner_trapezoidal/CMakeLists.txt index cca87ff9ec..5e3b7ad10e 100644 --- a/mobility/planner_trapezoidal/CMakeLists.txt +++ b/mobility/planner_trapezoidal/CMakeLists.txt @@ -15,11 +15,30 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(planner_trapezoidal) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + pluginlib + actionlib + choreographer + tf2_geometry_msgs + ff_util + ff_msgs + mapper +) + +## System dependencies are found with CMake's conventions +find_package(PCL REQUIRED COMPONENTS common) + catkin_package( - INCLUDE_DIRS - include + INCLUDE_DIRS include LIBRARIES planner_trapezoidal CATKIN_DEPENDS @@ -29,13 +48,54 @@ catkin_package( actionlib choreographer tf2_geometry_msgs + ff_util ff_msgs + mapper +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${PCL_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(planner_trapezoidal + src/planner_trapezoidal.cc + src/planner_trapezoidal_nodelet.cc +) +add_dependencies(planner_trapezoidal ${catkin_EXPORTED_TARGETS}) +target_link_libraries(planner_trapezoidal ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark nodelet_plugin for installation +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} ) -create_library(TARGET planner_trapezoidal - LIBS ${catkin_LIBRARIES} ff_nodelet msg_conversions ff_flight config_server config_client jsonloader choreographer - INC ${catkin_INCLUDE_DIRS} - DEPS choreographer +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE ) -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/mobility/planner_trapezoidal/package.xml b/mobility/planner_trapezoidal/package.xml index b17457336a..0f9ad6faa5 100644 --- a/mobility/planner_trapezoidal/package.xml +++ b/mobility/planner_trapezoidal/package.xml @@ -21,14 +21,18 @@ actionlib choreographer tf2_geometry_msgs + ff_util ff_msgs + mapper roscpp nodelet pluginlib actionlib choreographer tf2_geometry_msgs + ff_util ff_msgs + mapper diff --git a/scripts/build/build_debian.sh b/scripts/build/build_debian.sh index e4c3d5e9f4..bee79db433 100755 --- a/scripts/build/build_debian.sh +++ b/scripts/build/build_debian.sh @@ -14,5 +14,6 @@ if [[ $* == *--config* ]]; then fi pushd $DIR/../.. -DEB_BUILD_OPTIONS="parallel=20" debuild -e ARMHF_CHROOT_DIR -e ARMHF_TOOLCHAIN -us -uc $EXTRA_FLAGS +export CMAKE_TOOLCHAIN_FILE=${DIR}/ubuntu_cross.cmake +DEB_BUILD_OPTIONS="parallel=20" debuild -e ARMHF_CHROOT_DIR -e ARMHF_TOOLCHAIN -e CMAKE_TOOLCHAIN_FILE -e CMAKE_PREFIX_PATH -us -uc $EXTRA_FLAGS popd > /dev/null diff --git a/scripts/build/ubuntu_cross.cmake b/scripts/build/ubuntu_cross.cmake index 5fbd5e54c6..75ed774e28 100644 --- a/scripts/build/ubuntu_cross.cmake +++ b/scripts/build/ubuntu_cross.cmake @@ -51,8 +51,10 @@ SET(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) # If this is not done, the resulting rpath will not look for them in the chroot # environment. # Also, RTI DDS is included once for the libraries and once for the headers. +execute_process(COMMAND catkin locate -i OUTPUT_VARIABLE CATKIN_INSTALL_PATH OUTPUT_STRIP_TRAILING_WHITESPACE) +execute_process(COMMAND catkin locate -d OUTPUT_VARIABLE CATKIN_DEVEL_PATH OUTPUT_STRIP_TRAILING_WHITESPACE) SET(CMAKE_FIND_ROOT_PATH - ${ARM_CHROOT_DIR}) + ${ARM_CHROOT_DIR} ${CATKIN_INSTALL_PATH} ${CATKIN_DEVEL_PATH}) SET(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY BOTH) SET(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE BOTH) @@ -67,7 +69,8 @@ IF( DEFINED EXTRA_ROOT_PATH ) SET(CMAKE_FIND_ROOT_PATH ${EXTRA_ROOT_PATH} ${CMAKE_FIND_ROOT_PATH}) ENDIF( DEFINED EXTRA_ROOT_PATH ) -SET(catkin2_DIR ${CMAKE_SOURCE_DIR}/cmake) +execute_process(COMMAND catkin locate -s OUTPUT_VARIABLE CATKIN_SRC_PATH OUTPUT_STRIP_TRAILING_WHITESPACE) +SET(catkin2_DIR ${CATKIN_SRC_PATH}/cmake) # needed for gflag to compile... SET( THREADS_PTHREAD_ARG diff --git a/scripts/calibrate/readme.md b/scripts/calibrate/readme.md index 2acab51651..2fdf240abd 100644 --- a/scripts/calibrate/readme.md +++ b/scripts/calibrate/readme.md @@ -6,11 +6,11 @@ This folder contains various scripts for calibration. - Build and install the Astrobee code on the robot. - Install Kalibr on your computer. -## Installation instructions for Kalibr for Ubuntu 16.04 +## Installation instructions for Kalibr for Ubuntu 18.04 sudo apt install python-rosinstall ipython python-software-properties \ python-git ipython python-catkin-tools -sudo apt install libopencv-dev ros-kinetic-vision-opencv +sudo apt install libopencv-dev ros-melodic-vision-opencv # Install pip and use it to install python packages @@ -33,9 +33,9 @@ so some effort may be needed to install the Python 2 dependencies. export KALIBR_WS=$HOME/source/kalibr_workspace mkdir -p $KALIBR_WS/src cd $KALIBR_WS -source /opt/ros/kinetic/setup.bash +source /opt/ros/melodic/setup.bash catkin init -catkin config --extend /opt/ros/kinetic +catkin config --extend /opt/ros/melodic catkin config --cmake-args -DCMAKE_BUILD_TYPE=Release cd $KALIBR_WS/src git clone git@github.com:oleg-alexandrov/Kalibr.git @@ -276,7 +276,7 @@ done by the algorithm. One can also attempt to use just a portion of the bag, using the --from and --to options. The --verbose flag is useful to see if the calibration target corners -are detected properly. +are detected properly. This may result in a crash on Ubuntu 18. The depth cameras can be tricky to calibrate, since they have lower resolution and it is hard to do corner detection. One should try to diff --git a/scripts/configure.sh b/scripts/configure.sh index 9040e4c0bb..7c648e1ac5 100755 --- a/scripts/configure.sh +++ b/scripts/configure.sh @@ -40,20 +40,30 @@ gcc_major=`gcc -dumpversion | cut -f-2 -d .` archname="${cpu_type}_${os_kernel}_gcc${gcc_major}" # prefix for installation path -prefix="" - -# build name (if used, build under astrobee/build/$buildname) -buildname="" +install_path="" # configure nothing by default native_build=0 armhf_build=0 -# by default DDS is activated -dds_opt="-DUSE_DDS=on" - -# No extra options by default -extra_opts="" +# Define our options +use_ccache=" -DUSE_CCACHE=on" # Use ccache to speed up compiling, at the cost of optimization +use_ctc=" -DUSE_CTC=off" # Use cross compile toolchain for making ARM binaries +use_ros=" -DUSE_ROS=on" # Build the ROS-dependent functionality. +use_dds=" -DUSE_DDS=on" # Build the DDS-dependent functionality. +use_static_libs="" # Build using static libraries. Will use lots of drive space. +test_coverage="" # Build the code with code coverage options. Not compatible with USE_CTC. +use_drivers=" -DUSE_DRIVERS=on" # Build the tools in the drivers directory. +is_bamboo_build="" # The code is being built under the bamboo CI system" +enable_gprof="" # Enable compling with support for profiling wih gprof (the GNU Profiler)." +enable_google_prof="" # Enable support for profiling wih pprof (the Google Profiler). +enable_qp=" -DENABLE_QP=on" # Enable support for the QP planner. +enable_picoflexx=" -DENABLE_PICOFLEXX=on" # Enable support for building the PicoFlexx driver +enable_gazebo="" # Enable support for building the Gazebo simulator +enable_vive=" -DENABLE_VIVE=on" # Enable support for building the Vive drivers +enable_vive_solver=" -DENABLE_VIVE_SOLVER=on" # Enable support for building the Vive offline solver +enable_integration_testing=" -DENABLE_INTEGRATION_TESTING=on" # Build the integration tests if tests are active. +build_loc_rviz_plugins=" -DBUILD_LOC_RVIZ_PLUGINS=on" # Build the localization rviz plugins. # Force cache cleaning by default clean_cache=1 @@ -66,7 +76,7 @@ target="install" # short help usage_string="$scriptname [-h] [-l ] [-a ]\ - [-p install_path] [-b build_path] [-B build_type] [-c ]\ + [-p install_path] [-w workspace_path] [-B build_type] [-c ]\ [-C ] [-d ] [-D ]\ [-r ] [-R ]\ [-f ] [-F ]\ @@ -77,7 +87,7 @@ usage_string="$scriptname [-h] [-l ] [-a ]\ #[-t make_target] # options to parse -optstring="hlap:b:B:cCdDrRfFkKvVtTg" +optstring="hlap:w:B:cCdDrRfFkKvVtTg" # Print the script usage print_usage() @@ -94,8 +104,8 @@ print_help() echo -e "\t-a Generate a cross-compiled ARM build" echo -e "\t-p install_path specify the installation directory" echo -e "\t default=${HOME}/cmake_install_platform" - echo -e "\t-b build_path specify the build directory to use" - echo -e "\t default=${HOME}/cmake_build_platform" + echo -e "\t-w workspace_path specify the workspace directory" + echo -e "\t default=${ASTROBEE_WS}" echo -e "\t-B build_type specify build type (Debug|Release|RelWithDebInfo)" echo -e "\t-c delete the cmake cache before for every modules: default on" echo -e "\t (necessary when re-running buildall with diffent options)" @@ -132,57 +142,57 @@ parse_args() { while getopts $optstring opt $@ do - case $opt in - "h") print_help - exit 0 - ;; - "?") print_usage - exit 1 - ;; - "l") native_build=1 - ;; - "a") armhf_build=1 - ;; - "p") prefix=$OPTARG - ;; - "b") build_path=$OPTARG - ;; - "B") build_type=$OPTARG - ;; - "c") clean_cache=1 - ;; - "C") clean_cache=0 - ;; - "d") dds_opt="-DUSE_DDS=on" - ;; - "D") dds_opt="-DUSE_DDS=off" - ;; - "r") extra_opts+=" -DENABLE_QP=on" - ;; - "R") extra_opts+=" -DENABLE_QP=off" - ;; - "f") extra_opts+=" -DENABLE_PICOFLEXX=on" - ;; - "F") extra_opts+=" -DENABLE_PICOFLEXX=off" - ;; - "k") extra_opts+=" -DUSE_CCACHE=on" - ;; - "K") extra_opts+=" -DUSE_CCACHE=off" - ;; - "v") extra_opts+=" -DENABLE_VIVE=on" - ;; - "V") extra_opts+=" -DENABLE_VIVE=off" - ;; - "t") extra_opts+=" -DENABLE_INTEGRATION_TESTING=on" - ;; - "T") extra_opts+=" -DENABLE_INTEGRATION_TESTING=off" - ;; - "g") debug_mode=1 - ;; - *) print_usage - exit 1 - ;; - esac + case $opt in + "h") print_help + exit 0 + ;; + "?") print_usage + exit 1 + ;; + "l") native_build=1 + ;; + "a") armhf_build=1 + ;; + "p") install_path=$OPTARG + ;; + "w") workspace_path=$OPTARG/ + ;; + "B") build_type=$OPTARG + ;; + "c") clean_cache=1 + ;; + "C") clean_cache=0 + ;; + "d") use_dds=" -DUSE_DDS=on" + ;; + "D") use_dds=" -DUSE_DDS=off" + ;; + "r") enable_qp=" -DENABLE_QP=on" + ;; + "R") enable_qp=" -DENABLE_QP=off" + ;; + "f") enable_picoflexx=" -DENABLE_PICOFLEXX=on" + ;; + "F") enable_picoflexx=" -DENABLE_PICOFLEXX=off" + ;; + "k") use_ccache=" -DUSE_CCACHE=on" + ;; + "K") use_ccache=" -DUSE_CCACHE=off" + ;; + "v") enable_vive=" -DENABLE_VIVE=on" + ;; + "V") enable_vive=" -DENABLE_VIVE=off" + ;; + "t") enable_integration_testing=" -DENABLE_INTEGRATION_TESTING=on" + ;; + "T") enable_integration_testing=" -DENABLE_INTEGRATION_TESTING=off" + ;; + "g") debug_mode=1 + ;; + *) print_usage + exit 1 + ;; + esac done } @@ -197,104 +207,51 @@ canonicalize() freepath=$1 os_name=`uname -s` case $os_name in - Linux) - # just use readlink :-) - canonical_path=`readlink -f $freepath` - readl_ret=$? - echo $canonical_path - if [ $readl_ret == 1 ] ; then - return 1 - else - return 0 - fi - ;; - Darwin | SunOS) - # BSD systems do not support readlink :-( - if [ -d $freepath ] ; then - # the argument is a directory - canonical_path=`cd $freepath && pwd -P` - else - if [ -f $freepath ] ; then - # the argument is a file - freedir=`dirname $freepath` - freefile=`basename $freepath` - if [ -L $freepath ] ; then - canfile=`cd $freedir && stat -f "%Y" $freefile` - else - canfile=$freefile - fi - candir=`cd $freedir && pwd -P` - canonical_path="${candir}/${canfile}" - else - # given path does not exsit - # since readlink does not return any string for this - # scenario, just lets do the same and return an error - canonical_path="" - return 1 - fi - fi - echo $canonical_path - return 0 - ;; - *) - # echo platform not supported yet - echo "/${os_name}/is/not/yet/supported/by/canonicalize" - return 1 - ;; - esac -} - -# function to use the cmake configure with the right arguments -# arguments: -# 1: build_path -# 2: build_type -# 3: install_path -# 4: freeflyer_path -# 5: clean_cache -# 6-*: other options -configure() -{ - local build_path=$1 - shift - local build_type=$1 - shift - local install_path=$1 - shift - local ff_path=$1 - shift - local clean_cache=$1 - shift - local cmake_opts=$@ - - if [ $debug_mode == 1 ]; then - echo "build type: ${build_type}" - echo "build path: ${build_path}" - echo "install directory: ${install_path}" - echo - else - if [ "$install_path" != "none" ] ; then - if [ ! -d ${install_path} ] ; then - echo "Creating install directory: ${install_path}" - mkdir -p $install_path + Linux) + # just use readlink :-) + canonical_path=`readlink -f $freepath` + readl_ret=$? + echo $canonical_path + if [ $readl_ret == 1 ] ; then + return 1 + else + return 0 + fi + ;; + Darwin | SunOS) + # BSD systems do not support readlink :-( + if [ -d $freepath ] ; then + # the argument is a directory + canonical_path=`cd $freepath && pwd -P` + else + if [ -f $freepath ] ; then + # the argument is a file + freedir=`dirname $freepath` + freefile=`basename $freepath` + if [ -L $freepath ] ; then + canfile=`cd $freedir && stat -f "%Y" $freefile` + else + canfile=$freefile fi - full_install_path=`canonicalize $install_path` - install_opt="-DCMAKE_INSTALL_PREFIX=${full_install_path}" - fi - - if [ ! -d ${build_path} ] ; then - echo "Creating build directory: ${build_path}" - mkdir -p ${build_path} - fi - cd ${build_path} - - if [ ${clean_cache} -eq 1 ] ; then - echo "Remove the CMake Cache for ${build_path}" - rm -f CMakeCache.txt - fi - cmd="cmake -DCMAKE_BUILD_TYPE=${build_type} ${install_opt} ${cmake_opts} ${ff_path}" - echo $cmd # to se what we are geting - $cmd - fi + candir=`cd $freedir && pwd -P` + canonical_path="${candir}/${canfile}" + else + # given path does not exsit + # since readlink does not return any string for this + # scenario, just lets do the same and return an error + canonical_path="" + return 1 + fi + fi + echo $canonical_path + return 0 + ;; + *) + # echo platform not supported yet + echo "/${os_name}/is/not/yet/supported/by/canonicalize" + return 1 + ;; + esac } # Start the real work here... @@ -303,6 +260,15 @@ parse_args $@ # Define the paths to use ff_path=`canonicalize ${rootpath}` +DIST=`cat /etc/os-release | grep -oP "(?<=VERSION_CODENAME=).*"` +if [ "$DIST" = "xenial" ]; then + ros_version=kinetic +elif [ "$DIST" = "bionic" ]; then + ros_version=melodic +elif [ "$DIST" = "focal" ]; then + ros_version=noetic +fi + if [ $debug_mode == 1 ]; then echo "script is called from: $workdir" echo "script dir is: $confdir" @@ -316,6 +282,12 @@ if [ $debug_mode == 1 ]; then echo fi +extra_opts+=${use_ccache}${use_ros}${use_dds}${use_static_libs}${test_coverage}${use_drivers} +extra_opts+=${is_bamboo_build}${enable_gprof}${enable_google_prof}${enable_qp}${enable_picoflexx} +extra_opts+=${enable_vive}${enable_vive_solver}${enable_integration_testing} + + + if [[ $native_build == 0 && $armhf_build == 0 ]] ; then echo "Nothing to configure (use -l or -a)..." echo "Use $scriptname -h for the full list of options" @@ -325,29 +297,53 @@ fi if [[ $native_build == 1 && $armhf_build == 1 ]] ; then echo -n "Linux and ArmHF invoked simultanously:" echo " dropping any option -p and -b!" - prefix="" - build_path="" + workspace_path="" + install_path="" fi if [ $native_build == 1 ] ; then echo "configuring for native linux..." - # Performance of the shared disk system is horrendous on Vagrant. - # So by default we build in the home directory that is Vagrant native. - # In addition, we do not create an install directory by default. - # Update: we are currently forced to provide an install prefix! - native_build_path=${build_path:-${HOME}/astrobee_build/native} - native_install_path=${prefix:-${HOME}/astrobee_install/native} - configure ${native_build_path} ${build_type:-RelWithDebInfo} \ - ${native_install_path} ${ff_path} ${clean_cache} \ - ${dds_opt} ${extra_opts} + catkin init + enable_gazebo=" -DENABLE_GAZEBO=on" + + # Add our cmake to paths and bashrc + grep -qF 'source /opt/ros/'$ros_version'/setup.bash' ~/.bashrc || echo 'source /opt/ros/'$ros_version'/setup.bash' >> ~/.bashrc + cmake_astrobee_path=`catkin locate -s`/cmake + grep -qF ${cmake_astrobee_path} ~/.bashrc || { + echo -e '\n' >> ~/.bashrc \ + echo 'if [[ ":$CMAKE_PREFIX_PATH:" != *":'${cmake_astrobee_path}':"* ]]; then CMAKE_PREFIX_PATH="${CMAKE_PREFIX_PATH:+"$CMAKE_PREFIX_PATH:"}'${cmake_astrobee_path}'"; fi' >> ~/.bashrc + } + source ~/.bashrc + + catkin profile add native + catkin profile set native + catkin config --no-extend \ + --build-space ${workspace_path}build \ + --install-space ${install_path:-${workspace_path}install} \ + --devel-space ${workspace_path}devel \ + --log-space ${workspace_path}log \ + --no-install \ + --cmake-args ${enable_gazebo} ${build_loc_rviz_plugins} ${extra_opts} -DCMAKE_BUILD_TYPE=RelWithDebInfo + + fi if [ $armhf_build == 1 ] ; then echo "configuring for armhf..." - armhf_opts="-DCMAKE_TOOLCHAIN_FILE=${ff_path}/scripts/build/ubuntu_cross.cmake -DUSE_CTC=true" - armhf_build_path=${build_path:-${HOME}/astrobee_build/armhf} - armhf_install_path=${prefix:-${HOME}/astrobee_install/armhf} - configure ${armhf_build_path} ${build_type:-Release} \ - ${armhf_install_path} ${ff_path} ${clean_cache} \ - ${dds_opt} ${armhf_opts} ${extra_opts} + catkin init + armhf_opts="-DCMAKE_TOOLCHAIN_FILE=${ff_path}/scripts/build/ubuntu_cross.cmake" + use_ctc=" -DUSE_CTC=on" + enable_gazebo="" + build_loc_rviz_plugins="" + catkin profile add armhf + catkin profile set armhf + catkin config --extend $ARMHF_CHROOT_DIR/opt/ros/kinetic \ + --build-space ${workspace_path:-armhf/}build \ + --install-space ${install_path:-${workspace_path:-armhf/}}opt/astrobee \ + --devel-space ${workspace_path:-armhf/}devel \ + --log-space ${workspace_path:-armhf/}logs \ + --install \ + --blacklist astrobee_handrail_8_5 astrobee_handrail_21_5 astrobee_handrail_30 astrobee_handrail_41_5 astrobee_iss astrobee_granite \ + astrobee_dock astrobee_freeflyer astrobee_gazebo localization_rviz_plugins \ + --cmake-args -DARMHF_CHROOT_DIR=$ARMHF_CHROOT_DIR ${armhf_opts} ${use_ctc} ${enable_gazebo} ${build_loc_rviz_plugins} ${extra_opts} -DCMAKE_BUILD_TYPE=Release fi diff --git a/scripts/docker/astrobee.Dockerfile b/scripts/docker/astrobee.Dockerfile index 1039a98530..0ef9ee3980 100755 --- a/scripts/docker/astrobee.Dockerfile +++ b/scripts/docker/astrobee.Dockerfile @@ -3,12 +3,16 @@ # You must set the docker context to be the repository root directory ARG UBUNTU_VERSION=16.04 -FROM astrobee/astrobee:base-latest-ubuntu${UBUNTU_VERSION} +ARG REMOTE=astrobee +FROM ${REMOTE}/astrobee:base-latest-ubuntu${UBUNTU_VERSION} -ENV USERNAME astrobee +ARG ROS_VERSION=kinetic -COPY . /src/astrobee -RUN /src/astrobee/scripts/configure.sh -l -F -D -T -p /opt/astrobee -b /build/astrobee -RUN cd /build/astrobee && make -j`nproc` +COPY . /src/astrobee/src/ +RUN . /opt/ros/${ROS_VERSION}/setup.sh \ + && cd /src/astrobee \ + && ./src/scripts/configure.sh -l -F -D -T \ + && CMAKE_PREFIX_PATH=$CMAKE_PREFIX_PATH:/src/astrobee/src/cmake \ + && catkin build COPY ./astrobee/resources /opt/astrobee/share/astrobee/resources diff --git a/scripts/docker/astrobee_base.Dockerfile b/scripts/docker/astrobee_base.Dockerfile index 227b5aecb7..152ca194a5 100755 --- a/scripts/docker/astrobee_base.Dockerfile +++ b/scripts/docker/astrobee_base.Dockerfile @@ -55,20 +55,8 @@ COPY ./scripts/setup/packages_*.lst /setup/astrobee/ RUN /setup/astrobee/install_desktop_packages.sh \ && rm -rf /var/lib/apt/lists/* -#Add new sudo user -ENV USERNAME astrobee -RUN useradd -m $USERNAME && \ - echo "$USERNAME:$USERNAME" | chpasswd && \ - usermod --shell /bin/bash $USERNAME && \ - usermod -aG sudo $USERNAME && \ - echo "$USERNAME ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers.d/$USERNAME && \ - chmod 0440 /etc/sudoers.d/$USERNAME && \ - # Replace 1000 with your user/group id - usermod --uid 1000 $USERNAME && \ - groupmod --gid 1000 $USERNAME - #Add the entrypoint for docker -RUN echo "#!/bin/bash\nset -e\n\nsource \"/opt/ros/noetic/setup.bash\"\nsource \"/build/astrobee/devel/setup.bash\"\nexport ASTROBEE_CONFIG_DIR=\"/src/astrobee/astrobee/config\"\nexec \"\$@\"" > /astrobee_init.sh && \ +RUN echo "#!/bin/bash\nset -e\n\nsource \"/opt/ros/${ROS_VERSION}/setup.bash\"\nsource \"/src/astrobee/devel/setup.bash\"\nexport ASTROBEE_CONFIG_DIR=\"/src/astrobee/src/astrobee/config\"\nexec \"\$@\"" > /astrobee_init.sh && \ chmod +x /astrobee_init.sh && \ rosdep init && \ rosdep update 2>&1 | egrep -v 'as root|fix-permissions' diff --git a/scripts/docker/build.sh b/scripts/docker/build.sh index b776e5afc6..5a93aa1d64 100755 --- a/scripts/docker/build.sh +++ b/scripts/docker/build.sh @@ -76,4 +76,5 @@ docker build ${rootdir}/ \ docker build ${rootdir}/ \ -f ${rootdir}/scripts/docker/astrobee.Dockerfile \ --build-arg UBUNTU_VERSION=${UBUNTU_VERSION} \ + --build-arg ROS_VERSION=${ROS_VERSION} \ -t astrobee/astrobee:latest-ubuntu${UBUNTU_VERSION} \ No newline at end of file diff --git a/scripts/docker/readme.md b/scripts/docker/readme.md index 0bea4c6d5b..63f1a9b326 100644 --- a/scripts/docker/readme.md +++ b/scripts/docker/readme.md @@ -31,6 +31,7 @@ If `UBUNTU_VERSION` is `"20.04"`, `ROS_VERSION` and `PYTHON` must be `"neotic"` To build the docker images, run: ./build.sh + The build script will automatically detect the current Ubuntu OS version and define the docker files variables `UBUNTU_VERSION`, `ROS_VERSION`, and `PYTHON`. If a specific version is desired, the option --xenial, --bionic, and --focal is used for ubuntu 16.04, 18.04, and 20.04 docker images, respectively. @@ -40,8 +41,14 @@ and --focal is used for ubuntu 16.04, 18.04, and 20.04 docker images, respective To run the docker container: ./run.sh + It will automatically detect the current Ubuntu OS version. If a specific version is desired, the option --xenial, --bionic, and --focal is used for ubuntu 16.04, 18.04, and 20.04 docker images, respectively. +To add arguments to the launch file in addition to `dds:=false robot:=sim_pub` you can do instead: + + ./run.sh --args "rviz:=true sviz:=true" + +*Note: You have to install the nvidia-container-toolkit for the gazebo simulation to run properly* To open another terminal inside the docker container: diff --git a/scripts/docker/run.sh b/scripts/docker/run.sh index 501bac6007..47d811caa1 100755 --- a/scripts/docker/run.sh +++ b/scripts/docker/run.sh @@ -18,15 +18,20 @@ # under the License. # short help -usage_string="$scriptname [-h] [-x ]\ - [-b ] [-f ]" +usage_string="$0 usage: [-h] [-x ]\ + [-b ] [-f ]\ + [-r ]" #[-t make_target] +docs_url="https://nasa.github.io/astrobee/html/install-docker.html" usage() { - echo "usage: sysinfo_page [[[-a file ] [-i]] | [-h]]" + echo "$usage_string" + echo "see: $docs_url" } os=`cat /etc/os-release | grep -oP "(?<=VERSION_CODENAME=).*"` +args="dds:=false robot:=sim_pub" +tagrepo=astrobee while [ "$1" != "" ]; do case $1 in @@ -36,50 +41,49 @@ while [ "$1" != "" ]; do ;; -f | --focal ) os="focal" ;; - -h | --help ) usage - exit - ;; - * ) usage - exit 1 + -r | --remote ) tagrepo=ghcr.io/nasa + ;; + --args ) args+=" $2" + shift + ;; + -h | --help ) usage + exit + ;; + * ) usage + exit 1 esac shift done +if [ "$os" = "xenial" ]; then +tag=astrobee:latest-ubuntu16.04 +elif [ "$os" = "bionic" ]; then +tag=astrobee:latest-ubuntu18.04 +elif [ "$os" = "focal" ]; then +tag=astrobee:latest-ubuntu20.04 +fi + +# check if local docker tag exists +if [ "$tagrepo" = "astrobee" ] && [[ "$(docker images -q $tagrepo/$tag 2> /dev/null)" == "" ]]; then + echo "Tag $tagrepo/$tag not found locally, either build astrobee locally or use the --remote flag." + usage + exit 1 +fi rootdir=$(dirname "$(readlink -f "$0")") cd $rootdir +# setup XServer for Docker XSOCK=/tmp/.X11-unix XAUTH=/tmp/.docker.xauth touch $XAUTH xauth nlist $DISPLAY | sed -e 's/^..../ffff/' | xauth -f $XAUTH nmerge - -if [ "$os" = "xenial" ]; then docker run -it --rm --name astrobee \ --volume=$XSOCK:$XSOCK:rw \ --volume=$XAUTH:$XAUTH:rw \ --env="XAUTHORITY=${XAUTH}" \ --env="DISPLAY" \ - --user="astrobee" \ --gpus all \ - astrobee/astrobee:latest-ubuntu16.04 \ - /astrobee_init.sh roslaunch astrobee sim.launch dds:=false -elif [ "$os" = "bionic" ]; then -docker run -it --rm --name astrobee \ - --volume=$XSOCK:$XSOCK:rw \ - --volume=$XAUTH:$XAUTH:rw \ - --env="XAUTHORITY=${XAUTH}" \ - --env="DISPLAY" \ - --user="astrobee" \ - astrobee/astrobee:latest-ubuntu18.04 \ - /astrobee_init.sh roslaunch astrobee sim.launch dds:=false -elif [ "$os" = "focal" ]; then -docker run -it --rm --name astrobee \ - --volume=$XSOCK:$XSOCK:rw \ - --volume=$XAUTH:$XAUTH:rw \ - --env="XAUTHORITY=${XAUTH}" \ - --env="DISPLAY" \ - --user="astrobee" \ - astrobee/astrobee:latest-ubuntu20.04 \ - /astrobee_init.sh roslaunch astrobee sim.launch dds:=false -fi + $tagrepo/$tag \ + /astrobee_init.sh roslaunch astrobee sim.launch $args diff --git a/scripts/docker/test_astrobee.Dockerfile b/scripts/docker/test_astrobee.Dockerfile index 04c5b3949e..0ce4c93c44 100644 --- a/scripts/docker/test_astrobee.Dockerfile +++ b/scripts/docker/test_astrobee.Dockerfile @@ -2,7 +2,11 @@ # You must set the docker context to be the repository root directory ARG UBUNTU_VERSION=16.04 -FROM astrobee/astrobee:latest-ubuntu${UBUNTU_VERSION} +ARG REMOTE=astrobee +FROM ${REMOTE}/astrobee:latest-ubuntu${UBUNTU_VERSION} # Run tests -RUN cd /build/astrobee && make -j`nproc` tests && make -j`nproc` test +RUN cd /src/astrobee && catkin build --make-args tests \ + && catkin build --make-args test \ + && { . devel/setup.sh || true; } \ + && catkin_test_results build diff --git a/scripts/git/cpplint.py b/scripts/git/cpplint.py index 8abae00375..300489ed0c 100755 --- a/scripts/git/cpplint.py +++ b/scripts/git/cpplint.py @@ -1,5 +1,5 @@ #!/usr/bin/python -# + # Copyright (c) 2009 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without diff --git a/scripts/git/cpplint_repo.py b/scripts/git/cpplint_repo.py index 7ab54f59c0..c60ed9f999 100755 --- a/scripts/git/cpplint_repo.py +++ b/scripts/git/cpplint_repo.py @@ -1,5 +1,22 @@ #!/usr/bin/env python3 +# Copyright (c) 2017, United States Government, as represented by the +# Administrator of the National Aeronautics and Space Administration. +# +# All rights reserved. +# +# The Astrobee platform is licensed under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with the +# License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + import fnmatch import importlib import os @@ -33,12 +50,8 @@ def run_cpplint(filename, cpplint_path): def print_objection(): - print(" ____ __ _ __ _ __") - print(" / __ \/ /_ (_)__ _____/ /_(_)___ ____ / /") - print(" / / / / __ \ / / _ \/ ___/ __/ / __ \/ __ \/ / ") - print("/ /_/ / /_/ / / / __/ /__/ /_/ / /_/ / / / /_/ ") - print("\____/_.___/_/ /\___/\___/\__/_/\____/_/ /_(_) ") - print(" /___/ ") + print("Code formatting errors were found.") + print("==================================") def main(): @@ -98,9 +111,9 @@ def main(): print(("=" * 50)) if num_errors > 0: - print((" You have %d lint errors" % num_errors)) + print((" You have %d lint errors." % num_errors)) elif num_errors == 0: - print(" Code adheres to style guide lines") + print(" Code adheres to style guidelines.") exit(num_errors) diff --git a/scripts/git/pre-commit.linter_cpp b/scripts/git/pre-commit.linter_cpp index 836ddbb117..59911711b5 100755 --- a/scripts/git/pre-commit.linter_cpp +++ b/scripts/git/pre-commit.linter_cpp @@ -58,13 +58,8 @@ def run_cpplint(filename, cpplint_path): def print_objection(): - print(" ____ __ _ __ _ __") - print(" / __ \/ /_ (_)__ _____/ /_(_)___ ____ / /") - print(" / / / / __ \ / / _ \/ ___/ __/ / __ \/ __ \/ / ") - print("/ /_/ / /_/ / / / __/ /__/ /_/ / /_/ / / / /_/ ") - print("\____/_.___/_/ /\___/\___/\__/_/\____/_/ /_(_) ") - print(" /___/ ") - + print("Code formatting errors were found.") + print("==================================") def main(): num_errors = 0 @@ -144,10 +139,11 @@ def main(): print("=" * 50) if num_errors > 0: - print(" You have %d lint errors, commit failed" % num_errors) - print(" The errors were automatically piped through clang-format-8") + print(" You have %d lint errors, commit failed." % num_errors) + print(" Your code was modified with clang-format-8. " + \ + "Try to add and commit your files again to get an updated error count.") elif num_errors == 0: - print(" Code adheres to style guide lines, committing ...") + print(" Code adheres to style guide lines. Committing.") exit(num_errors) diff --git a/scripts/git/pre-commit.linter_python b/scripts/git/pre-commit.linter_python index 80ea0a186b..a3d2123778 100755 --- a/scripts/git/pre-commit.linter_python +++ b/scripts/git/pre-commit.linter_python @@ -28,36 +28,38 @@ fi failed_lint=false -if ! command -v black &> /dev/null -then - echo "Black was not found, to install check https://github.com/psf/black" - echo "Unfortunately black requires Python 3.6.2+ to run" +command -v black > /dev/null 2>&1 +ans=$? +if [ "$ans" -ne "0" ]; then + echo "'black' was not found. To install, check https://github.com/psf/black." + echo "Note that black requires Python 3.6.2+ to run." exit 0 fi -if ! command -v isort &> /dev/null -then - echo "isort was not found, to install check https://github.com/PyCQA/isort" +command -v isort > /dev/null 2>&1 +ans=$? +if [ "$ans" -ne "0" ]; then + echo "'isort' was not found. To install, check https://github.com/PyCQA/isort." exit 0 fi echo "==================================================" -echo " Analysing with black" +echo " Analysing python code style with 'black'." # This check the files but they will not be commited if `black . --include ${files} --check --quiet`; then - echo "Linter black checks passed" + echo "Linter checks using 'black' passed." else - echo "Errors detected, fixing ..." - black . --include ${files} - failed_lint=true + echo "Errors detected with 'black'. Fixing them. Try to add and commit your files again." + black . --include ${files} + failed_lint=true fi echo "==================================================" -echo " Analysing with isort" +echo " Analysing python code style with 'isort'." if $(isort ${files} --extend-skip cmake --profile black --diff --check-only --quiet >/dev/null); then - echo "Linter isort checks passed" + echo "Linter checks using 'isort' passed." else - echo "Errors detected, fixing ..." + echo "Errors detected with 'isort'. Fixing them. Try to add and commit your files again." isort ${files} --extend-skip cmake --profile black >/dev/null failed_lint=true fi diff --git a/scripts/postprocessing/coverage_analysis/activity_db_generator.py b/scripts/postprocessing/coverage_analysis/activity_db_generator.py new file mode 100644 index 0000000000..d6b7f4d15f --- /dev/null +++ b/scripts/postprocessing/coverage_analysis/activity_db_generator.py @@ -0,0 +1,143 @@ +#!/usr/bin/python + +""" Creates a CSV file containing the Mapped Landmark features per robot pose published by + the /loc/ml/features topic +""" + +import sys + +import numpy as np +import rospy +from ff_msgs.msg import VisualLandmarks +from tf.transformations import * + + +class Activity_DBGenerator: + def __init__(self): + print("Activity_DBGenerator started...") + + self.activity_name = "" + self.activity_date = "" + self.output_filename = "" + self.fileOut = None + + def callback(self, data): + count = len(data.landmarks) + ts = data.header.stamp + + if count == 0: + robot_pose_str = "nan, nan, nan" + else: + robot_pose = self.body_frame_from_navcam( + data.pose.position.x, + data.pose.position.y, + data.pose.position.z, + data.pose.orientation.x, + data.pose.orientation.y, + data.pose.orientation.z, + data.pose.orientation.w, + ) + + robot_pose_str = "{:.3f} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f}".format( + robot_pose[0], + robot_pose[1], + robot_pose[2], + robot_pose[3], + robot_pose[4], + robot_pose[5], + robot_pose[6], + ) + + self.fileOut.write("{:d} {:s}\n".format(count, robot_pose_str)) + + for i in range(count): + ml_pose_str = "{:.3f} {:.3f} {:.3f}".format( + data.landmarks[i].x, data.landmarks[i].y, data.landmarks[i].z + ) + self.fileOut.write("{:s}\n".format(ml_pose_str)) + + def body_frame_from_navcam(self, px, py, pz, qi, qj, qk, qw): + # As defined in Astrobee repo: ${HOME}/astrobee/astrobee/config/robots/bumble.config + pose_const = np.array([0.1157 + 0.002, -0.0422, -0.0826, 1.0]) + quat_const = [0.500, 0.500, 0.500, 0.500] + + rot_const = quaternion_matrix(quat_const) + rot_const[:, 3] = pose_const.T + + rot_in = quaternion_matrix([qi, qj, qk, qw]) + rot_in[:, 3] = np.array([px, py, pz, 1.0]).T + + t_body = rot_in.dot(rot_const) + + quat_out = np.zeros((4, 4)) + quat_out[0:3, 0:3] = t_body[0:3, 0:3] + quat_out[3, 3] = 1.0 + + quat_out = quaternion_from_matrix(quat_out) + pose_out = t_body[0:3:, 3] + + return [ + pose_out[0], + pose_out[1], + pose_out[2], + quat_out[0], + quat_out[1], + quat_out[2], + quat_out[3], + ] + + def open_file(self, file, activity, map, date): + self.fileOut = open(file, "w") + self.fileOut.write( + "### Date-Map-Activity: " + date + "-" + map + "-" + activity + "\n" + ) + self.fileOut.write( + "### ML Number Robot Pose (x,y,z,r,p,y,w)\n### ML Coordinate(x,y,z)\n" + ) + + def close_file(self): + self.fileOut.close() + + def listener(self): + rospy.init_node("features_database_update", anonymous=True) + rospy.Subscriber("/loc/ml/features", VisualLandmarks, self.callback) + rospy.spin() + + +if __name__ == "__main__": + + try: + + if len(sys.argv) < 5: + print( + "Usage: activity_db_generator.py " + ) + print(" = 'YYYYMMDD'") + print(" = 'mapName'") + print(" = 'activityName'") + print(" = '/location/to/save/'") + sys.exit(1) + + else: + activity_date = sys.argv[1] + map_name = sys.argv[2] + activity_name = sys.argv[3] + location_name = sys.argv[4] + output_filename = ( + location_name + + activity_date + + "_" + + map_name + + "_" + + activity_name + + "_db.csv" + ) + + obj = Activity_DBGenerator() + obj.open_file(output_filename, activity_name, map_name, activity_date) + obj.listener() + + except KeyboardInterrupt: + print("\n <-CTRL-C EXIT: USER manually exited!->") + obj.close_file() + sys.exit(0) diff --git a/scripts/postprocessing/coverage_analysis/constants.py b/scripts/postprocessing/coverage_analysis/constants.py new file mode 100644 index 0000000000..dcf87e8eb7 --- /dev/null +++ b/scripts/postprocessing/coverage_analysis/constants.py @@ -0,0 +1,45 @@ +""" Constants in meters based on JEM keepin dimensions as defined in + https://babelfish.arc.nasa.gov/bitbucket/projects/ASTROBEE/repos/astrobee_ops/browse/gds/ControlStationConfig/IssWorld-Default/keepins/keepin.json +""" + +# Lower FWD corner x coordinate at the entry node, keepin zone from bay 1 to 6 +XMIN_BAY16 = 11.99 +YMIN_BAY16 = -2.75 +ZMIN_BAY16 = 5.92 +# Higher AFT corner x coordinate at bay 6/7, keepin zone from bay 1 to 6 +XMAX_BAY16 = 9.86 +YMAX_BAY16 = -9.24 +ZMAX_BAY16 = 3.78 +# Lower FWD corner x coordinate at bay 6/7. keepin zone from bay 6/7 to 8 +XMIN_BAY67 = 12.34 +YMIN_BAY67 = -9.24 +ZMIN_BAY67 = 5.95 +# Higher AFT corner x coordinate at bay 6/7. keepin zone from bay 6/7 to 8 +XMAX_BAY67 = 9.54 +YMAX_BAY67 = -11.64 +ZMAX_BAY67 = 3.75 +# Lower right side corner, airlock +XMIN_AIRLK = 11.75 +YMIN_AIRLK = -10.60 +ZMIN_AIRLK = 6.00 +# Higher left side corner, airlock +XMAX_AIRLK = 10.25 +YMAX_AIRLK = -10.70 +ZMAX_AIRLK = 4.55 + +# Number of features and robot coordinates saved in database CSV file +NUM_FEAT_AND_ROBOT_COORDS_COLUMNS = 8 +# Number of columns representing ML Features X-Y-Z coordinates saved in database CSV file +FEAT_COORDS_COLUMNS = 3 + +# Start and End points along each bay's y-axis +BAYS_Y_LIMITS = { + 0: [-2.750, -4.300], + 1: [-4.300, -5.220], + 2: [-5.220, -6.500], + 3: [-6.430, -7.550], + 4: [-7.430, -8.660], + 5: [-8.550, -9.500], + 6: [-9.480, -10.800], + 7: [-10.712, -12.050], +} diff --git a/scripts/postprocessing/coverage_analysis/coverage_analyzer.py b/scripts/postprocessing/coverage_analysis/coverage_analyzer.py new file mode 100644 index 0000000000..8eff09abb0 --- /dev/null +++ b/scripts/postprocessing/coverage_analysis/coverage_analyzer.py @@ -0,0 +1,543 @@ +#!/usr/bin/env python + +""" Determines if a given pose or list of poses in a file is within an Astrobee volume + based on a database of known poses and their corresponding ML features +""" + +import math +import os +import re +import subprocess +import sys +import timeit + +import constants +from tf.transformations import * + + +class Coverage_Analyzer: + def __init__(self): + + self.grid_size = 0.30 # meters + self.max_pose_error = ((self.grid_size / 2.0) ** 2) * 3 # Max squared error + self.matchCounter = 0 + print( + "Grid size: {:.3f}, Max Pose Error: {:.3f}".format( + self.grid_size, self.max_pose_error + ) + ) + + self.grid_params_z16 = [ + 2 + ] # Bays 1-6: Grid params for Z-axis of FWD and AFT walls + self.grid_params_x16 = [ + 2 + ] # Bays 1-6: Grid params for X-axis of OVHD and DECK walls + self.grid_params_z67 = [ + 2 + ] # Bays 6-8: Grid params for Z-axis of FWD and AFT walls + self.grid_params_x67 = [ + 2 + ] # Bays 6-8: Grid params for X-axis of FWD and AFT walls + self.x_init_bay16 = 0.0 + self.x_fin_bay16 = 0.0 + self.x_init_bay67 = 0.0 + self.x_fin_bay67 = 0.0 + + # Returns the squared distance between two Points (x,y,z) + def pose_inside_cube(self, robot_pose, cube_pose, grid_size): + cube_center = grid_size / 2.0 + + cube_squared = ((cube_center) ** 2) * 3 + + error_squared = ( + (robot_pose[0] - cube_pose[0]) ** 2 + + (robot_pose[1] - cube_pose[1]) ** 2 + + (robot_pose[2] - cube_pose[2]) ** 2 + ) + + # True if inside cube + return error_squared < cube_squared + + # Calculates the relative rotation between 2 quaternions, returns angle between them + def quat_error(self, robot_quat, test_quat): + + q1_inverse = self.quat_inverse(robot_quat) + + q_relative = quaternion_multiply(test_quat, q1_inverse) + + ang_error = 2 * math.atan2( + math.sqrt(q_relative[0] ** 2 + q_relative[1] ** 2 + q_relative[2] ** 2), + q_relative[3], + ) + + # To compensate for a singularity where atan2 = Pi + tolerance = 0.008 # 0.00 rad = 0.46 deg + if abs((math.pi * 2) - ang_error) < tolerance: + ang_error = 0.0 + + return ang_error + + # Returns the inverse (conjugate) of the quaternion given + def quat_inverse(self, robot_quat): + q_inv = [robot_quat[0], robot_quat[1], robot_quat[2], -robot_quat[3]] + + return q_inv + + def find_nearby_pose(self, testPt, fileIn, fileOut): + write_features = False + + print( + "Finding nearby poses around: {:3f} {:3f} {:3f} {:3f} {:3f} {:3f} {:3f}".format( + testPt[1], + testPt[2], + testPt[3], + testPt[4], + testPt[5], + testPt[6], + testPt[7], + ) + ) # skips ML num + + with open(fileIn) as f: + f_out = open(fileOut, "a") + lines = f.readlines() + + for line in lines: + # Skip empty lines and those starting with comments + if re.match("^\s*\n", line) or re.match("^\s*\#", line): + continue + + # Extract all the values separated by spaces + vals = [] + for v in re.split("\s+", line): + if v == "": + continue + vals.append(v) + + if len(vals) == constants.NUM_FEAT_AND_ROBOT_COORDS_COLUMNS: + ml_num = int(vals[0]) + robot_pose = [float(vals[1]), float(vals[2]), float(vals[3])] + robot_quat = [ + float(vals[4]), + float(vals[5]), + float(vals[6]), + float(vals[7]), + ] + + test_pose = [testPt[1], testPt[2], testPt[3]] + test_quat = [testPt[4], testPt[5], testPt[6], testPt[7]] + + if self.pose_inside_cube(robot_pose, test_pose, grid_size) == True: + f_out.write( + "{:d} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f}\n".format( + ml_num, + robot_pose[0], + robot_pose[1], + robot_pose[2], + robot_quat[0], + robot_quat[1], + robot_quat[2], + robot_quat[3], + ) + ) + + write_features = True + self.matchCounter += 1 + + else: + write_features = False + ml_num = 0 + + if ( + len(vals) == constants.FEAT_COORDS_COLUMNS + and write_features + and ml_num > 0 + ): + f_out.write("{:s}".format(line)) + ml_num -= 1 + + # From the file containing each trajectory pose and its corresponding landmark poses + # save only the landmark poses in another file + def feature_extraction(self, databaseBodyFrame_fileIn, database_fileOut): + + print( + "Extracting features from: {:s} to {:s}".format( + databaseBodyFrame_fileIn, database_fileOut + ) + ) + with open(database_fileOut, "w") as f_out: + with open(databaseBodyFrame_fileIn, "r") as f_in: + + for line in f_in: + # Skip empty lines and those starting with comments + if re.match("^\s*\n", line) or re.match("^\s*\#", line): + continue + + # Extract all the values separated by spaces + vals = [] + for v in re.split("\s+", line): + if v == "": + continue + vals.append(v) + + if len(vals) == 3: + f_out.write("{:s}".format(line)) + + # From the landmark poses remove all repeated landmarks + def remove_repeated_poses(self, database_fileOut, feat_only_fileOut): + + lines_seen = set() # holds lines seen + print( + "Removing duplicated feature poses from {:s} and creating {:s}".format( + database_fileOut, feat_only_fileOut + ) + ) + + match = 0 + numlines = 0 + with open(feat_only_fileOut, "w") as f_out: + with open(database_fileOut, "r+") as f_in: + + for line in f_in: + if line not in lines_seen: + f_out.write(line) + lines_seen.add(line) + match += 1 + numlines += 1 + print("Found {:d} unique lines in {:d} lines".format(match, numlines)) + + # Find any landmark pose within a cube of grid_size from the landmark poses-only file + # and generate a coverage file with the tested pose (center of the cube) and + # the number of features found within that cube + def find_nearby_cube_pose( + self, px, py, pz, grid_size, feat_only_fileOut, coverage_fileOut + ): + + num_ml = 0 + + with open(coverage_fileOut, "a") as f_out: + with open(feat_only_fileOut, "r") as f_in: + + for line in f_in: + # Skip empty lines and those starting with comments + if re.match("^\s*\n", line) or re.match("^\s*\#", line): + continue + + # Extract all the values separated by spaces + vals = [] + for v in re.split("\s+", line): + if v == "": + continue + vals.append(v) + + if len(vals) == 3: + robot_pose = [float(vals[0]), float(vals[1]), float(vals[2])] + cube_pose = [px, py, pz] + + if ( + self.pose_inside_cube(robot_pose, cube_pose, grid_size) + == True + ): + num_ml += 1 + f_out.write("{:d} {:.3f} {:.3f} {:.3f}\n".format(num_ml, px, py, pz)) + + # Equivalent to using math.isclose() since this is Python 2.7x + def is_close(self, a, b, rel_tol=1e-09, abs_tol=0.0): + return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol) + + def calculate_grid_params(self, initial_grid_size, start_pos, end_pos): + dist = abs(end_pos - start_pos) + num_cubes = math.floor(dist / initial_grid_size) # float + + dist_delta = 100.0 # arbitrary initial value + result_grid_size = 0.0 + counter = 1 + max_num_counter = 10000.0 + + # Minimizing dist_delta by changing the grid_size + while self.is_close(dist_delta, 0.0, abs_tol=0.01) == False: + result_grid_size = initial_grid_size + ( + initial_grid_size * (counter / max_num_counter) + ) + dist_delta = dist - num_cubes * result_grid_size + counter += 1 + if counter == max_num_counter: + print("dist_delta did not converge") + break + + return [int(num_cubes), result_grid_size] + + def set_grid_params(self): + + # Bays 1-6: Grid params for Z-axis of FWD and AFT walls + self.grid_params_z16 = self.calculate_grid_params( + self.grid_size, constants.ZMAX_BAY16, constants.ZMIN_BAY16 + ) + + # Bays 1-6: Grid params for X-axis of OVHD and DECK walls + self.x_init_bay16 = constants.XMIN_BAY16 - self.grid_params_z16[1] + self.x_fin_bay16 = constants.XMAX_BAY16 + self.grid_params_z16[1] + + self.grid_params_x16 = self.calculate_grid_params( + self.grid_size, self.x_init_bay16, self.x_fin_bay16 + ) + + # Bays 6-8: Grid params for Z-axis of FWD and AFT walls + self.grid_params_z67 = self.calculate_grid_params( + self.grid_size, constants.ZMAX_BAY67, constants.ZMIN_BAY67 + ) + + # Bays 6-8: Grid params for X-axis of OVHD and DECK walls + self.x_init_bay67 = constants.XMIN_BAY67 - self.grid_params_z67[1] + self.x_fin_bay67 = constants.XMAX_BAY67 + self.grid_params_z67[1] + + self.grid_params_x67 = self.calculate_grid_params( + self.grid_size, self.x_init_bay67, self.x_fin_bay67 + ) + + def calculate_coverage_overhead( + self, bay_num, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ): + + current_bay = bay_num + 1 + print("Finding matches Overhead Bay" + str(current_bay)) + + if current_bay < 6: + grid_size = self.grid_params_x16[1] + xlen = self.grid_params_x16[0] + start_pos_x = self.x_init_bay16 + p_center = grid_size / 2.0 # Center of the cube + pz = constants.ZMAX_BAY16 + p_center + + else: + grid_size = self.grid_params_x67[1] + xlen = self.grid_params_x67[0] + start_pos_x = self.x_init_bay67 + p_center = grid_size / 2.0 # Center of the cube + pz = constants.ZMAX_BAY67 + p_center + + ylen = int(abs(stop_pos_y - start_pos_y) / grid_size) + + f_out = open(coverage_fileOut, "a") + f_out.write("# Wall Overhead_" + str(current_bay) + " " + str(grid_size) + "\n") + f_out.close() + + for i in range(xlen): + for j in range(ylen): + px = (start_pos_x - grid_size * i) - p_center + py = (start_pos_y - grid_size * j) - p_center + self.find_nearby_cube_pose( + px, py, pz, grid_size, feat_only_fileOut, coverage_fileOut + ) + + def calculate_coverage_forward( + self, bay_num, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ): + + current_bay = bay_num + 1 + print("Finding matches Forward Bay" + str(current_bay)) + + if current_bay < 6: + grid_size = self.grid_params_z16[1] + zlen = self.grid_params_z16[0] + p_center = grid_size / 2.0 # Center of the cube + px = constants.XMIN_BAY16 - p_center + + else: + grid_size = self.grid_params_z67[1] + zlen = self.grid_params_z67[0] + p_center = grid_size / 2.0 # Center of the cube + px = constants.XMIN_BAY67 - p_center + + ylen = int(abs(stop_pos_y - start_pos_y) / grid_size) + + f_out = open(coverage_fileOut, "a") + f_out.write("# Wall Forward_" + str(current_bay) + " " + str(grid_size) + "\n") + f_out.close() + + for i in range(ylen): + for j in range(zlen): + py = (start_pos_y - grid_size * i) - p_center + pz = ( + constants.ZMAX_BAY16 + grid_size * j + ) + p_center # Using z_max to have better coverage on the overhead. + self.find_nearby_cube_pose( + px, py, pz, grid_size, feat_only_fileOut, coverage_fileOut + ) + + def calculate_coverage_aft( + self, bay_num, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ): + + current_bay = bay_num + 1 + print("Finding matches Aft Bay" + str(current_bay)) + + if current_bay < 6: + grid_size = self.grid_params_z16[1] + zlen = self.grid_params_z16[0] + p_center = grid_size / 2.0 # Center of the cube + px = constants.XMAX_BAY16 + p_center + + else: + grid_size = self.grid_params_z67[1] + zlen = self.grid_params_z67[0] + p_center = grid_size / 2.0 # Center of the cube + px = constants.XMAX_BAY67 + p_center + + ylen = int(abs(stop_pos_y - start_pos_y) / grid_size) + + f_out = open(coverage_fileOut, "a") + f_out.write("# Wall Aft_" + str(current_bay) + " " + str(grid_size) + "\n") + f_out.close() + + for i in range(ylen): + for j in range(zlen): + py = (start_pos_y - grid_size * i) - p_center + pz = ( + constants.ZMAX_BAY16 + grid_size * j + ) + p_center # Using z_max to have better coverage on the overhead. + self.find_nearby_cube_pose( + px, py, pz, grid_size, feat_only_fileOut, coverage_fileOut + ) + + def calculate_coverage_deck( + self, bay_num, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ): + current_bay = bay_num + 1 + print("Finding matches Deck Bay" + str(current_bay)) + + if current_bay < 6: + grid_size = self.grid_params_x16[1] + xlen = self.grid_params_x16[0] + start_pos_x = self.x_init_bay16 + p_center = grid_size / 2.0 # Center of the cube + pz = constants.ZMIN_BAY16 - p_center + + else: + grid_size = self.grid_params_x16[1] + xlen = self.grid_params_x67[0] + start_pos_x = self.x_init_bay67 + p_center = grid_size / 2.0 # Center of the cube + pz = constants.ZMIN_BAY67 + p_center + + ylen = int(abs(stop_pos_y - start_pos_y) / grid_size) + + f_out = open(coverage_fileOut, "a") + f_out.write("# Wall Deck_" + str(current_bay) + " " + str(grid_size) + "\n") + f_out.close() + + for i in range(xlen): + for j in range(ylen): + px = (start_pos_x - grid_size * i) - p_center + py = (start_pos_y - grid_size * j) - p_center + self.find_nearby_cube_pose( + px, py, pz, grid_size, feat_only_fileOut, coverage_fileOut + ) + + def calculate_coverage_airlock(self, feat_only_fileOut, coverage_fileOut): + + f_out = open(coverage_fileOut, "a") + f_out.write("# Wall Airlock" + " " + str(self.grid_size) + "\n") + f_out.close() + + print("Finding matches Airlock") + p_center = self.grid_size / 2.0 # Center of the cube + + xlen = int( + math.floor( + abs(constants.XMAX_AIRLK - constants.XMIN_AIRLK) / self.grid_size + ) + ) + zlen = int( + math.floor( + abs(constants.ZMAX_AIRLK - constants.ZMIN_AIRLK) / self.grid_size + ) + ) + + py = constants.YMAX_AIRLK + p_center + for i in range(xlen): + for j in range(zlen): + px = (constants.XMAX_AIRLK + self.grid_size * i) + p_center + pz = (constants.ZMAX_AIRLK + self.grid_size * j) + p_center + self.find_nearby_cube_pose( + px, py, pz, self.grid_size, feat_only_fileOut, coverage_fileOut + ) + + def find_nearby_features_from_file(self, feat_only_fileOut, coverage_fileOut): + + print( + "Finding matches from pose file {:s}, creating coverage file: {:s}".format( + feat_only_fileOut, coverage_fileOut + ) + ) + + f_out = open(coverage_fileOut, "w") + f_out.write("#Num_ML Cube_Center_Pose (X Y Z)\n") + f_out.close() + + for bay, positions in constants.BAYS_Y_LIMITS.items(): + start_pos_y = positions[0] + stop_pos_y = positions[1] + + self.calculate_coverage_overhead( + bay, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ) + self.calculate_coverage_forward( + bay, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ) + self.calculate_coverage_aft( + bay, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ) + self.calculate_coverage_deck( + bay, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ) + + self.calculate_coverage_airlock(feat_only_fileOut, coverage_fileOut) + + def remove_tmp_file(self, fileToRemove): + os.remove(fileToRemove) + print("Removing temporary file: {:s}".format(fileToRemove)) + + +if __name__ == "__main__": + + try: + + if len(sys.argv) < 1: + print("Usage: " + sys.argv[0] + "") + print( + " : '/dir/to/activity_database_file.csv' containing robot poses and ML features to compare with\n" + ) + sys.exit(1) + + activity_database_fileIn = sys.argv[1] + + f_in = activity_database_fileIn.find("_db.csv") + database_fileOut = ( + activity_database_fileIn[:f_in] + "_features_db.csv" + ) # Name of file with all features' pose + feat_only_fileOut = ( + activity_database_fileIn[:f_in] + "_features_db_nonrepeat.csv" + ) # Name of file with non-repeated features' pose + coverage_fileOut = ( + activity_database_fileIn[:f_in] + "_coverage_db.csv" + ) # Name of output file with coverage database + + obj = Coverage_Analyzer() + + obj.set_grid_params() + obj.feature_extraction(activity_database_fileIn, database_fileOut) + obj.remove_repeated_poses(database_fileOut, feat_only_fileOut) + + tic = timeit.default_timer() + obj.find_nearby_features_from_file(feat_only_fileOut, coverage_fileOut) + toc = timeit.default_timer() + elapsedTime = toc - tic + print("Time to create coverage file: {:.2f}s".format(elapsedTime)) + obj.remove_tmp_file(database_fileOut) + obj.remove_tmp_file(feat_only_fileOut) + + except KeyboardInterrupt: + print("\n <-CTRL-C EXIT: USER manually exited!->") + sys.exit(0) diff --git a/scripts/postprocessing/coverage_analysis/coverage_painter.py b/scripts/postprocessing/coverage_analysis/coverage_painter.py new file mode 100644 index 0000000000..8eff09abb0 --- /dev/null +++ b/scripts/postprocessing/coverage_analysis/coverage_painter.py @@ -0,0 +1,543 @@ +#!/usr/bin/env python + +""" Determines if a given pose or list of poses in a file is within an Astrobee volume + based on a database of known poses and their corresponding ML features +""" + +import math +import os +import re +import subprocess +import sys +import timeit + +import constants +from tf.transformations import * + + +class Coverage_Analyzer: + def __init__(self): + + self.grid_size = 0.30 # meters + self.max_pose_error = ((self.grid_size / 2.0) ** 2) * 3 # Max squared error + self.matchCounter = 0 + print( + "Grid size: {:.3f}, Max Pose Error: {:.3f}".format( + self.grid_size, self.max_pose_error + ) + ) + + self.grid_params_z16 = [ + 2 + ] # Bays 1-6: Grid params for Z-axis of FWD and AFT walls + self.grid_params_x16 = [ + 2 + ] # Bays 1-6: Grid params for X-axis of OVHD and DECK walls + self.grid_params_z67 = [ + 2 + ] # Bays 6-8: Grid params for Z-axis of FWD and AFT walls + self.grid_params_x67 = [ + 2 + ] # Bays 6-8: Grid params for X-axis of FWD and AFT walls + self.x_init_bay16 = 0.0 + self.x_fin_bay16 = 0.0 + self.x_init_bay67 = 0.0 + self.x_fin_bay67 = 0.0 + + # Returns the squared distance between two Points (x,y,z) + def pose_inside_cube(self, robot_pose, cube_pose, grid_size): + cube_center = grid_size / 2.0 + + cube_squared = ((cube_center) ** 2) * 3 + + error_squared = ( + (robot_pose[0] - cube_pose[0]) ** 2 + + (robot_pose[1] - cube_pose[1]) ** 2 + + (robot_pose[2] - cube_pose[2]) ** 2 + ) + + # True if inside cube + return error_squared < cube_squared + + # Calculates the relative rotation between 2 quaternions, returns angle between them + def quat_error(self, robot_quat, test_quat): + + q1_inverse = self.quat_inverse(robot_quat) + + q_relative = quaternion_multiply(test_quat, q1_inverse) + + ang_error = 2 * math.atan2( + math.sqrt(q_relative[0] ** 2 + q_relative[1] ** 2 + q_relative[2] ** 2), + q_relative[3], + ) + + # To compensate for a singularity where atan2 = Pi + tolerance = 0.008 # 0.00 rad = 0.46 deg + if abs((math.pi * 2) - ang_error) < tolerance: + ang_error = 0.0 + + return ang_error + + # Returns the inverse (conjugate) of the quaternion given + def quat_inverse(self, robot_quat): + q_inv = [robot_quat[0], robot_quat[1], robot_quat[2], -robot_quat[3]] + + return q_inv + + def find_nearby_pose(self, testPt, fileIn, fileOut): + write_features = False + + print( + "Finding nearby poses around: {:3f} {:3f} {:3f} {:3f} {:3f} {:3f} {:3f}".format( + testPt[1], + testPt[2], + testPt[3], + testPt[4], + testPt[5], + testPt[6], + testPt[7], + ) + ) # skips ML num + + with open(fileIn) as f: + f_out = open(fileOut, "a") + lines = f.readlines() + + for line in lines: + # Skip empty lines and those starting with comments + if re.match("^\s*\n", line) or re.match("^\s*\#", line): + continue + + # Extract all the values separated by spaces + vals = [] + for v in re.split("\s+", line): + if v == "": + continue + vals.append(v) + + if len(vals) == constants.NUM_FEAT_AND_ROBOT_COORDS_COLUMNS: + ml_num = int(vals[0]) + robot_pose = [float(vals[1]), float(vals[2]), float(vals[3])] + robot_quat = [ + float(vals[4]), + float(vals[5]), + float(vals[6]), + float(vals[7]), + ] + + test_pose = [testPt[1], testPt[2], testPt[3]] + test_quat = [testPt[4], testPt[5], testPt[6], testPt[7]] + + if self.pose_inside_cube(robot_pose, test_pose, grid_size) == True: + f_out.write( + "{:d} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f}\n".format( + ml_num, + robot_pose[0], + robot_pose[1], + robot_pose[2], + robot_quat[0], + robot_quat[1], + robot_quat[2], + robot_quat[3], + ) + ) + + write_features = True + self.matchCounter += 1 + + else: + write_features = False + ml_num = 0 + + if ( + len(vals) == constants.FEAT_COORDS_COLUMNS + and write_features + and ml_num > 0 + ): + f_out.write("{:s}".format(line)) + ml_num -= 1 + + # From the file containing each trajectory pose and its corresponding landmark poses + # save only the landmark poses in another file + def feature_extraction(self, databaseBodyFrame_fileIn, database_fileOut): + + print( + "Extracting features from: {:s} to {:s}".format( + databaseBodyFrame_fileIn, database_fileOut + ) + ) + with open(database_fileOut, "w") as f_out: + with open(databaseBodyFrame_fileIn, "r") as f_in: + + for line in f_in: + # Skip empty lines and those starting with comments + if re.match("^\s*\n", line) or re.match("^\s*\#", line): + continue + + # Extract all the values separated by spaces + vals = [] + for v in re.split("\s+", line): + if v == "": + continue + vals.append(v) + + if len(vals) == 3: + f_out.write("{:s}".format(line)) + + # From the landmark poses remove all repeated landmarks + def remove_repeated_poses(self, database_fileOut, feat_only_fileOut): + + lines_seen = set() # holds lines seen + print( + "Removing duplicated feature poses from {:s} and creating {:s}".format( + database_fileOut, feat_only_fileOut + ) + ) + + match = 0 + numlines = 0 + with open(feat_only_fileOut, "w") as f_out: + with open(database_fileOut, "r+") as f_in: + + for line in f_in: + if line not in lines_seen: + f_out.write(line) + lines_seen.add(line) + match += 1 + numlines += 1 + print("Found {:d} unique lines in {:d} lines".format(match, numlines)) + + # Find any landmark pose within a cube of grid_size from the landmark poses-only file + # and generate a coverage file with the tested pose (center of the cube) and + # the number of features found within that cube + def find_nearby_cube_pose( + self, px, py, pz, grid_size, feat_only_fileOut, coverage_fileOut + ): + + num_ml = 0 + + with open(coverage_fileOut, "a") as f_out: + with open(feat_only_fileOut, "r") as f_in: + + for line in f_in: + # Skip empty lines and those starting with comments + if re.match("^\s*\n", line) or re.match("^\s*\#", line): + continue + + # Extract all the values separated by spaces + vals = [] + for v in re.split("\s+", line): + if v == "": + continue + vals.append(v) + + if len(vals) == 3: + robot_pose = [float(vals[0]), float(vals[1]), float(vals[2])] + cube_pose = [px, py, pz] + + if ( + self.pose_inside_cube(robot_pose, cube_pose, grid_size) + == True + ): + num_ml += 1 + f_out.write("{:d} {:.3f} {:.3f} {:.3f}\n".format(num_ml, px, py, pz)) + + # Equivalent to using math.isclose() since this is Python 2.7x + def is_close(self, a, b, rel_tol=1e-09, abs_tol=0.0): + return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol) + + def calculate_grid_params(self, initial_grid_size, start_pos, end_pos): + dist = abs(end_pos - start_pos) + num_cubes = math.floor(dist / initial_grid_size) # float + + dist_delta = 100.0 # arbitrary initial value + result_grid_size = 0.0 + counter = 1 + max_num_counter = 10000.0 + + # Minimizing dist_delta by changing the grid_size + while self.is_close(dist_delta, 0.0, abs_tol=0.01) == False: + result_grid_size = initial_grid_size + ( + initial_grid_size * (counter / max_num_counter) + ) + dist_delta = dist - num_cubes * result_grid_size + counter += 1 + if counter == max_num_counter: + print("dist_delta did not converge") + break + + return [int(num_cubes), result_grid_size] + + def set_grid_params(self): + + # Bays 1-6: Grid params for Z-axis of FWD and AFT walls + self.grid_params_z16 = self.calculate_grid_params( + self.grid_size, constants.ZMAX_BAY16, constants.ZMIN_BAY16 + ) + + # Bays 1-6: Grid params for X-axis of OVHD and DECK walls + self.x_init_bay16 = constants.XMIN_BAY16 - self.grid_params_z16[1] + self.x_fin_bay16 = constants.XMAX_BAY16 + self.grid_params_z16[1] + + self.grid_params_x16 = self.calculate_grid_params( + self.grid_size, self.x_init_bay16, self.x_fin_bay16 + ) + + # Bays 6-8: Grid params for Z-axis of FWD and AFT walls + self.grid_params_z67 = self.calculate_grid_params( + self.grid_size, constants.ZMAX_BAY67, constants.ZMIN_BAY67 + ) + + # Bays 6-8: Grid params for X-axis of OVHD and DECK walls + self.x_init_bay67 = constants.XMIN_BAY67 - self.grid_params_z67[1] + self.x_fin_bay67 = constants.XMAX_BAY67 + self.grid_params_z67[1] + + self.grid_params_x67 = self.calculate_grid_params( + self.grid_size, self.x_init_bay67, self.x_fin_bay67 + ) + + def calculate_coverage_overhead( + self, bay_num, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ): + + current_bay = bay_num + 1 + print("Finding matches Overhead Bay" + str(current_bay)) + + if current_bay < 6: + grid_size = self.grid_params_x16[1] + xlen = self.grid_params_x16[0] + start_pos_x = self.x_init_bay16 + p_center = grid_size / 2.0 # Center of the cube + pz = constants.ZMAX_BAY16 + p_center + + else: + grid_size = self.grid_params_x67[1] + xlen = self.grid_params_x67[0] + start_pos_x = self.x_init_bay67 + p_center = grid_size / 2.0 # Center of the cube + pz = constants.ZMAX_BAY67 + p_center + + ylen = int(abs(stop_pos_y - start_pos_y) / grid_size) + + f_out = open(coverage_fileOut, "a") + f_out.write("# Wall Overhead_" + str(current_bay) + " " + str(grid_size) + "\n") + f_out.close() + + for i in range(xlen): + for j in range(ylen): + px = (start_pos_x - grid_size * i) - p_center + py = (start_pos_y - grid_size * j) - p_center + self.find_nearby_cube_pose( + px, py, pz, grid_size, feat_only_fileOut, coverage_fileOut + ) + + def calculate_coverage_forward( + self, bay_num, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ): + + current_bay = bay_num + 1 + print("Finding matches Forward Bay" + str(current_bay)) + + if current_bay < 6: + grid_size = self.grid_params_z16[1] + zlen = self.grid_params_z16[0] + p_center = grid_size / 2.0 # Center of the cube + px = constants.XMIN_BAY16 - p_center + + else: + grid_size = self.grid_params_z67[1] + zlen = self.grid_params_z67[0] + p_center = grid_size / 2.0 # Center of the cube + px = constants.XMIN_BAY67 - p_center + + ylen = int(abs(stop_pos_y - start_pos_y) / grid_size) + + f_out = open(coverage_fileOut, "a") + f_out.write("# Wall Forward_" + str(current_bay) + " " + str(grid_size) + "\n") + f_out.close() + + for i in range(ylen): + for j in range(zlen): + py = (start_pos_y - grid_size * i) - p_center + pz = ( + constants.ZMAX_BAY16 + grid_size * j + ) + p_center # Using z_max to have better coverage on the overhead. + self.find_nearby_cube_pose( + px, py, pz, grid_size, feat_only_fileOut, coverage_fileOut + ) + + def calculate_coverage_aft( + self, bay_num, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ): + + current_bay = bay_num + 1 + print("Finding matches Aft Bay" + str(current_bay)) + + if current_bay < 6: + grid_size = self.grid_params_z16[1] + zlen = self.grid_params_z16[0] + p_center = grid_size / 2.0 # Center of the cube + px = constants.XMAX_BAY16 + p_center + + else: + grid_size = self.grid_params_z67[1] + zlen = self.grid_params_z67[0] + p_center = grid_size / 2.0 # Center of the cube + px = constants.XMAX_BAY67 + p_center + + ylen = int(abs(stop_pos_y - start_pos_y) / grid_size) + + f_out = open(coverage_fileOut, "a") + f_out.write("# Wall Aft_" + str(current_bay) + " " + str(grid_size) + "\n") + f_out.close() + + for i in range(ylen): + for j in range(zlen): + py = (start_pos_y - grid_size * i) - p_center + pz = ( + constants.ZMAX_BAY16 + grid_size * j + ) + p_center # Using z_max to have better coverage on the overhead. + self.find_nearby_cube_pose( + px, py, pz, grid_size, feat_only_fileOut, coverage_fileOut + ) + + def calculate_coverage_deck( + self, bay_num, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ): + current_bay = bay_num + 1 + print("Finding matches Deck Bay" + str(current_bay)) + + if current_bay < 6: + grid_size = self.grid_params_x16[1] + xlen = self.grid_params_x16[0] + start_pos_x = self.x_init_bay16 + p_center = grid_size / 2.0 # Center of the cube + pz = constants.ZMIN_BAY16 - p_center + + else: + grid_size = self.grid_params_x16[1] + xlen = self.grid_params_x67[0] + start_pos_x = self.x_init_bay67 + p_center = grid_size / 2.0 # Center of the cube + pz = constants.ZMIN_BAY67 + p_center + + ylen = int(abs(stop_pos_y - start_pos_y) / grid_size) + + f_out = open(coverage_fileOut, "a") + f_out.write("# Wall Deck_" + str(current_bay) + " " + str(grid_size) + "\n") + f_out.close() + + for i in range(xlen): + for j in range(ylen): + px = (start_pos_x - grid_size * i) - p_center + py = (start_pos_y - grid_size * j) - p_center + self.find_nearby_cube_pose( + px, py, pz, grid_size, feat_only_fileOut, coverage_fileOut + ) + + def calculate_coverage_airlock(self, feat_only_fileOut, coverage_fileOut): + + f_out = open(coverage_fileOut, "a") + f_out.write("# Wall Airlock" + " " + str(self.grid_size) + "\n") + f_out.close() + + print("Finding matches Airlock") + p_center = self.grid_size / 2.0 # Center of the cube + + xlen = int( + math.floor( + abs(constants.XMAX_AIRLK - constants.XMIN_AIRLK) / self.grid_size + ) + ) + zlen = int( + math.floor( + abs(constants.ZMAX_AIRLK - constants.ZMIN_AIRLK) / self.grid_size + ) + ) + + py = constants.YMAX_AIRLK + p_center + for i in range(xlen): + for j in range(zlen): + px = (constants.XMAX_AIRLK + self.grid_size * i) + p_center + pz = (constants.ZMAX_AIRLK + self.grid_size * j) + p_center + self.find_nearby_cube_pose( + px, py, pz, self.grid_size, feat_only_fileOut, coverage_fileOut + ) + + def find_nearby_features_from_file(self, feat_only_fileOut, coverage_fileOut): + + print( + "Finding matches from pose file {:s}, creating coverage file: {:s}".format( + feat_only_fileOut, coverage_fileOut + ) + ) + + f_out = open(coverage_fileOut, "w") + f_out.write("#Num_ML Cube_Center_Pose (X Y Z)\n") + f_out.close() + + for bay, positions in constants.BAYS_Y_LIMITS.items(): + start_pos_y = positions[0] + stop_pos_y = positions[1] + + self.calculate_coverage_overhead( + bay, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ) + self.calculate_coverage_forward( + bay, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ) + self.calculate_coverage_aft( + bay, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ) + self.calculate_coverage_deck( + bay, start_pos_y, stop_pos_y, feat_only_fileOut, coverage_fileOut + ) + + self.calculate_coverage_airlock(feat_only_fileOut, coverage_fileOut) + + def remove_tmp_file(self, fileToRemove): + os.remove(fileToRemove) + print("Removing temporary file: {:s}".format(fileToRemove)) + + +if __name__ == "__main__": + + try: + + if len(sys.argv) < 1: + print("Usage: " + sys.argv[0] + "") + print( + " : '/dir/to/activity_database_file.csv' containing robot poses and ML features to compare with\n" + ) + sys.exit(1) + + activity_database_fileIn = sys.argv[1] + + f_in = activity_database_fileIn.find("_db.csv") + database_fileOut = ( + activity_database_fileIn[:f_in] + "_features_db.csv" + ) # Name of file with all features' pose + feat_only_fileOut = ( + activity_database_fileIn[:f_in] + "_features_db_nonrepeat.csv" + ) # Name of file with non-repeated features' pose + coverage_fileOut = ( + activity_database_fileIn[:f_in] + "_coverage_db.csv" + ) # Name of output file with coverage database + + obj = Coverage_Analyzer() + + obj.set_grid_params() + obj.feature_extraction(activity_database_fileIn, database_fileOut) + obj.remove_repeated_poses(database_fileOut, feat_only_fileOut) + + tic = timeit.default_timer() + obj.find_nearby_features_from_file(feat_only_fileOut, coverage_fileOut) + toc = timeit.default_timer() + elapsedTime = toc - tic + print("Time to create coverage file: {:.2f}s".format(elapsedTime)) + obj.remove_tmp_file(database_fileOut) + obj.remove_tmp_file(feat_only_fileOut) + + except KeyboardInterrupt: + print("\n <-CTRL-C EXIT: USER manually exited!->") + sys.exit(0) diff --git a/scripts/postprocessing/coverage_analysis/coverage_stats_reporter.py b/scripts/postprocessing/coverage_analysis/coverage_stats_reporter.py new file mode 100644 index 0000000000..322d90259f --- /dev/null +++ b/scripts/postprocessing/coverage_analysis/coverage_stats_reporter.py @@ -0,0 +1,308 @@ +#!/usr/bin/env python + +""" Create a file summarizing the coverage percentage across the different + walls of the JEM at the ISS +""" + +import math +import re +import sys + +import matplotlib.pyplot as plt +import numpy as np +from matplotlib.backends.backend_pdf import PdfPages + + +class Coverage_StatsReporter: + def __init__(self): + print("Coverage_StatsReporter started") + self.max_num_features = 0 + self.first_bin = 0 + self.second_bin = 0 + self.third_bin = 0 + self.fourth_bin = 0 + self.fifth_bin = 0 + + # Wall, 0%,1-10%,11-20%,21-40%,40+%, total_wall + self.coverage_per_wall = ["", 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] + self.final_stats = [] + + def reset_values(self): + self.total_distribution = 0 + self.first_bin = 0 + self.second_bin = 0 + self.third_bin = 0 + self.fourth_bin = 0 + self.fifth_bin = 0 + self.coverage_per_wall = ["", 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] + + # Categorize the number of cubes in each category + # 0, 25, 50, 75, 100 percent of map coverage defined as + # number of registered ML features per cube. + def analyze_wall_coverage(self, num_features_per_cube): + """# Normalized visualization desired (x' = (x -xmin)/ (xmax-xmin))""" + + # If ML number based visualization is desired + if num_features_per_cube == 0: + self.first_bin += 1.0 + elif num_features_per_cube >= 1 and num_features_per_cube <= 10: + self.second_bin += 1.0 + elif num_features_per_cube >= 11 and num_features_per_cube <= 20: + self.third_bin += 1.0 + elif num_features_per_cube >= 21 and num_features_per_cube <= 40: + self.fourth_bin += 1.0 + elif num_features_per_cube >= 40: + self.fifth_bin += 1.0 + + self.total_distribution = ( + self.first_bin + + self.second_bin + + self.third_bin + + self.fourth_bin + + self.fifth_bin + ) + + self.coverage_per_wall[1] = (self.first_bin) * 100.0 / self.total_distribution + self.coverage_per_wall[2] = (self.second_bin) * 100.0 / self.total_distribution + self.coverage_per_wall[3] = (self.third_bin) * 100.0 / self.total_distribution + self.coverage_per_wall[4] = (self.fourth_bin) * 100.0 / self.total_distribution + self.coverage_per_wall[5] = (self.fifth_bin) * 100.0 / self.total_distribution + + self.coverage_per_wall[6] = self.coverage_per_wall[6] + num_features_per_cube + + # Generate a report file containing the wall, percentage in a five-bin + # distribution and the total number of registered ML features in a given + # wall of the ISS' JEM. + def generate_coverage_stats(self, fileIn): + + print("Reading {:s}".format(fileIn)) + + wall_counter = -1 + + with open(fileIn) as f_in: + for line in f_in: + # Skip empty lines + if re.match("^\s*\n", line): + # print("Skipped empty line\n") + continue + + # Check if comments has string "Wall" + if re.match("^\s*\#", line): + + vals = [] + for v in re.split("\s+", line): + if v == "": + continue + vals.append(v) + + if vals[1] == "Wall": + if wall_counter == -1: + self.coverage_per_wall[0] = vals[ + 2 + ] # Save the name of the wall + wall_counter += 1 + + else: + # Save the coverage of this wall in final array + self.final_stats.insert( + wall_counter, self.coverage_per_wall + ) + + # Reset values for each wall. + self.reset_values() + + self.coverage_per_wall[0] = vals[ + 2 + ] # Save the name of the wall + wall_counter += 1 + + # Extract all the values separated by spaces + else: + vals = [] + for v in re.split("\s+", line): + if v == "": + continue + vals.append(v) + + if len(vals) == 4: + self.analyze_wall_coverage(int(vals[0])) + + # EOF so write the last values + self.final_stats.insert(wall_counter, self.coverage_per_wall) + self.write_report(fileIn) + + def write_report(self, fileIn): + max_num_features = 0.0 + + last_column = len(self.final_stats[0]) - 1 + + f_stats_index = fileIn.find(".csv") + stats_filename = fileIn[:f_stats_index] + "_stats.csv" + print("Creating {:s}".format(stats_filename)) + + with open(stats_filename, "w") as f_stats: + f_stats.write( + "# Coverage Distribution along the different walls of the JEM\n" + ) + f_stats.write( + "# Columns represent percentage of registered ML Features per wall (0, 1-10, 11-20, 21-40, 40+)\n" + ) + f_stats.write("# Analyisis corresponding to: {:s}\n".format(fileIn)) + f_stats.write( + "JEM Wall, 0ML, 1-10ML, 11-20ML, 21-40ML, 40+ML, Total Reg. ML Features per wall, Percentage of Total Num ML Features Analyzed\n" + ) + + for i in range(len(self.final_stats)): + max_num_features = max_num_features + self.final_stats[i][last_column] + + for i in range(len(self.final_stats)): + self.final_stats[i].extend( + [self.final_stats[i][last_column] * 100.0 / max_num_features] + ) + for i in range(len(self.final_stats)): + f_stats.write( + "{:s}, {:.2f}, {:.2f}, {:.2f}, {:.2f}, {:.2f}, {:.2f}, {:.2f}\n".format( + self.final_stats[i][0], # JEM Wall + self.final_stats[i][1], # 0ML percentage + self.final_stats[i][2], # 1-10ML % + self.final_stats[i][3], # 11-20ML % + self.final_stats[i][4], # 21-40ML % + self.final_stats[i][5], # 40+ML % + self.final_stats[i][6], # Total Reg ML/wall + self.final_stats[i][7], + ) + ) # Total Num ML Features Analyzed % + f_stats.write( + "Total Number of Registered Features, {:d}\n".format( + int(max_num_features) + ) + ) + + def print_pdf_report(self, fileIn): + + f_in = fileIn.find(".csv") + report_filename = fileIn[:f_in] + "_stats_report.pdf" + print("Creating {:s}".format(report_filename)) + + # Organize data + num_walls = len(self.final_stats) # All the walls analyzed + + x_data = np.arange(num_walls) + first_bin = np.zeros(num_walls) + second_bin = np.zeros(num_walls) + third_bin = np.zeros(num_walls) + fourth_bin = np.zeros(num_walls) + fifth_bin = np.zeros(num_walls) + x_ticks = np.empty(num_walls, dtype=object) + total_prcnt = np.zeros(num_walls) + + for i in range(len(self.final_stats)): + x_ticks[i] = str(self.final_stats[i][0]) + first_bin[i] = self.final_stats[i][1] + second_bin[i] = self.final_stats[i][2] + third_bin[i] = self.final_stats[i][3] + fourth_bin[i] = self.final_stats[i][4] + fifth_bin[i] = self.final_stats[i][5] + total_prcnt[i] = self.final_stats[i][7] + + # Create the PdfPages object to which we will save the pages: + # The with statement makes sure that the PdfPages object is closed properly at + # the end of the block, even if an Exception occurs. + with PdfPages(report_filename) as pdf: + + # Plot General Coverage + plt.title("General Coverage Distribution per Wall\n" + fileIn, fontsize=8) + + # plot data in grouped manner of bar type + plt.bar( + x_data, + total_prcnt, + width=0.8, + label="Coverage Percentage", + color="blue", + ) + + plt.xticks(x_data, x_ticks, fontsize=4, rotation=45, ha="right") + plt.yticks(fontsize=8) + + plt.xlabel(" ", fontsize=5) + plt.ylabel("Coverage %", fontsize=8) + plt.tight_layout() # adjusts the bottom margin of the plot so that the labels don't go off the bottom edge. + + pdf.savefig() # saves the current figure into a pdf page + plt.close() + + # Plot Detailed Coverage + plt.title("Detailed Coverage Distribution per Wall\n" + fileIn, fontsize=8) + + # plot data in grouped manner of bar type + plt.bar(x_data, first_bin, width=0.8, label="0ML", color="red") + plt.bar( + x_data, + second_bin, + width=0.8, + label="1-10ML", + color="orange", + bottom=first_bin, + ) + plt.bar( + x_data, + third_bin, + width=0.8, + label="11-20ML", + color="yellow", + bottom=second_bin + first_bin, + ) + plt.bar( + x_data, + fourth_bin, + width=0.8, + label="21-40ML", + color="green", + bottom=third_bin + second_bin + first_bin, + ) + plt.bar( + x_data, + fifth_bin, + width=0.8, + label="40+ML", + color="blue", + bottom=fourth_bin + third_bin + second_bin + first_bin, + ) + + plt.xticks(x_data, x_ticks, fontsize=4, rotation=45, ha="right") + plt.yticks(fontsize=8) + plt.xlabel(" ", fontsize=5) + plt.ylabel("Coverage %", fontsize=8) + plt.tight_layout( + rect=[0, 0, 0.95, 1.0] + ) # adjusts the bottom margin of the plot so that the labels don't go off the bottom edge. + plt.legend( + fontsize=5, bbox_to_anchor=(0.5, -0.1), loc="upper center", ncol=5 + ) # , mode="expand", borderaxespad=0.0) + + pdf.savefig() # saves the current figure into a pdf page + plt.close() + + +# The main program +if __name__ == "__main__": + + try: + if len(sys.argv) < 2: + print("Usage: " + sys.argv[0] + " ") + print( + " : 'dir/to/activity-coverage-database-file.csv' " + ) + sys.exit(1) + + fileIn = sys.argv[1] + + obj = Coverage_StatsReporter() + + obj.generate_coverage_stats(fileIn) + obj.print_pdf_report(fileIn) + + except KeyboardInterrupt: + print("\n <-CTRL-C EXIT: USER manually exited!->") + sys.exit(0) diff --git a/scripts/postprocessing/coverage_analysis/img_to_bag.py b/scripts/postprocessing/coverage_analysis/img_to_bag.py new file mode 100755 index 0000000000..17a741f925 --- /dev/null +++ b/scripts/postprocessing/coverage_analysis/img_to_bag.py @@ -0,0 +1,78 @@ +#!/usr/bin/python + +""" Generates an image bag containing only the images in a given folder. + Modified from https://answers.ros.org/question/11537/creating-a-bag-file-out-of-a-image-sequence/. +""" + +import os +import sys +import time + +import roslib +import rospy +from ros import rosbag + +roslib.load_manifest("sensor_msgs") +import cv2 +from cv_bridge import CvBridge +from PIL import ImageFile +from sensor_msgs.msg import Image + + +def get_files_from_dir(dir): + """Generates a list of files from the directory""" + print("Searching directory %s" % dir) + all = [] + left_files = [] + right_files = [] + if os.path.exists(dir): + for path, names, files in os.walk(dir): + for f in sorted(files): + if os.path.splitext(f)[1] in [".bmp", ".png", ".jpg", ".ppm"]: + if "left" in f or "left" in path: + left_files.append(os.path.join(path, f)) + elif "right" in f or "right" in path: + right_files.append(os.path.join(path, f)) + all.append(os.path.join(path, f)) + return all, left_files, right_files + + +def create_mono_bag(imgs, bagname): + """Creates a bag file with camera images""" + bag = rosbag.Bag(bagname, "w") + + try: + for i in range(len(imgs)): + print("Adding %s" % imgs[i]) + img = cv2.imread(imgs[i]) + bridge = CvBridge() + + Stamp = rospy.rostime.Time.from_sec(time.time()) + img_msg = Image() + img_msg = bridge.cv2_to_imgmsg(img, "bgr8") + img_msg.header.seq = i + img_msg.header.stamp = Stamp + img_msg.header.frame_id = "camera" + + bag.write("mgt/img_sampler/nav_cam/image_record", img_msg, Stamp) + finally: + bag.close() + + +def create_bag(args): + """Creates the actual bag file by successively adding images""" + all_imgs, left_imgs, right_imgs = get_files_from_dir(args[0]) + + if len(all_imgs) <= 0: + print("No images found in %s" % args[0]) + exit() + + # create bagfile with mono camera image stream + create_mono_bag(all_imgs, args[1]) + + +if __name__ == "__main__": + if len(sys.argv) == 3: + create_bag(sys.argv[1:]) + else: + print("Usage: img2bag imagedir bagfilename") diff --git a/scripts/postprocessing/coverage_analysis/readme.txt b/scripts/postprocessing/coverage_analysis/readme.txt new file mode 100644 index 0000000000..1c9908ba1b --- /dev/null +++ b/scripts/postprocessing/coverage_analysis/readme.txt @@ -0,0 +1,128 @@ +Tools to generate a heatmap visualization in RViz (Gazebo) of the features distribution (or map coverage) for a given map or trajectory contained in a bag. +If this process is used to observe the coverage in a given bag at a given activity, Step 1 can be skipped. + +Steps: +1. Generate a bag from all the images in the map of interest. +Inputs: dir/where/jpg are +Tool: ~/coverage_analysis/img_to_bag.py +Outputs: bagOfTheMap.bag + +Move all the image files used to generate the map to a single directory +Run "python img_to_bag.py /dir/where/jpgs/are /dir/where/bag/should/go/bagOfTheMap.bag", bagOfTheMap.bag = 20210518_cabanel.bag +If this gives an error, run rosdep update. + +2. Run the localization node to find ML landmarks +Inputs: activity_name, activity_date, bagOfInterest.bag +Tools: roslaunch, rosservice, ~/coverage_analysis/activity_db_generator.py, rosbag play +Outputs: activity_database_file with recommended naming convention "YYYYMMDD_map_activity_db.csv" (The coordinates in the CSV output file are in the Astrobee's body inertial frame already). + +Follow the instructions in https://github.com/nasa/astrobee/blob/master/localization/sparse_mapping/build_map.md for "Verify localization against +a sparse map on a local machine" + +Make sure your environment variables are correct, and that the symbolic link in ~/astrobee/astrobee/resources/maps/iss.map points to the map of interest. +To do this, first check iss.map is pointing to your map of interest by running in a terminal "ls -lah~/astrobee/astrobee/resources/maps/iss.map". +If it is not pointing to the map of interest, move the current map, and make it point to the correct one by running in a terminal + "mv ~/astrobee/astrobee/resources/maps/iss.map ~/astrobee/astrobee/resources/maps/iss_backup.map" + "ln -s ~/map/location/.map ~/astrobee/astrobee/resources/maps/iss.map" +Run in a terminal + "roslaunch astrobee astrobee.launch mlp:=local llp:=disabled nodes:=framestore,localization_node world:=iss". +In another terminal enable localization: + "rosservice call /loc/ml/enable true" and wait for it to return "success: True". +In this same terminal, run +"python ~/coverage_analysis/activity_db_generator.py ", where +activity date = 20210726 +map name = dali +activity name = KiboEventRehRun1 +location to save = /dir/where/database/file/will/be/ +The final file will be /dir/where/database/file/will/be/20210726_dali_KiboEventRehRun1_db.csv + +After starting activity_DatabaseGenerator.py, run in another terminal the bag just created: +"rosbag play bagOfInterest.bag /mgt/img_sampler/nav_cam/image_record:=/hw/cam_nav" +If necessary, once the database of ML features and robot poses is generated change back the map as it was by running in a terminal +"rm ~/astrobee/astrobee/resources/maps/iss.map; mv ~/astrobee/astrobee/resources/maps/iss_backup.map ~/astrobee/astrobee/resources/maps/iss.map" + +After the process has finished, Ctrl-C all the terminals. + +3. Generate a coverage database +Inputs: activity_database_file +Tools: ~/coverage_analysis/coverage_analyzer.py +Outputs: activity_coverage_database_file. Temporary files: output_features_database.csv, output_features_database_nonrepeat.csv, + +This step first extracts from activity_database.csv only the ML feature poses into a temporary file called, for example, 20210518_cabanel_features_db.csv or 20210726_dali_KiboEvRhRun1_features_db.csv. +It then identifies repeated ML features and copies only one of the repeated ML features into a temporary file called, for example, 20210518_cabanel_features_db_nonrepeat.csv or 20210726_dali_KiboEvRhRun1_features_db_nonrepeat.csv +These repeated features are unique, however it should be kept in mind that multiple cameras may be able to see the same feature. For visualization purposes is faster to have unique features. +The next step generates the list of cube centers of size defined by grid_size along the different walls in the JEM (Overhead, Aft, Forward, Deck). +These walls are divided into the 8 bays of the JEM, where 1 corresponds to the bay closest to the entry node and 8 to the bay closest to the airlock. +If a feature is within the volume of the cube centered at the pose being searched, a counter is increased and the final number of matches and the center of the cube is saved in the activity_coverage_database.csv file called, for example, +20210518_cabanel_coverage_db.csv or 20210726_dali_KiboEvRhRun1_coverage_db.csv +The format of this file consist of Number-of-ML-Features-found-within-the-cube-defined-by X-Y-Z-Pose-of-the-center-of-the-cube-checked. + +Run +"python ~/coverage_analysis/coverage_analyzer.py " where +activity_database_file = /dir/where/database/file/is/20210726_dali_KiboEvRhRun1_db.csv, +database_fileOut = /dir/where/results/are/going/to/be/placed/20210726_dali_KiboEvRhRun1_features_db.csv, +feat_only_fileOut = /dir/where/results/are/going/to/be/placed/20210726_dali_KiboEvRhRun1_features_db_nonrepeat.csv, and +activity_coverage_database_file = /dir/where/results/are/going/to/be/placed/20210518_cabanel_coverage_db.csv or /dir/where/results/are/going/to/be/placed/20210726_dali_KiboEvRhRun1_db.csv + +At this point, the user has a few options: +1. Generate a 3D heatmap visualization in Gazebo of the studied map's general coverage of the JEM + This is accomplished following Step 4. +2. Generate a 3D animation and visualization in Gazebo of the studied robot's trajectory coverage at each pose of the trajectory + This is accomplished following Step 5. +3. Generate a statistics report in PDF of the general and detailed coverage of the studied map or robot's trajectory's coverage + This is accomplished following Step 6. + +4. Generate Walls Heatmap visualization in Gazebo +Inputs: activity_coverage_database_file +Tools: ~/coverage_painter.py, roslaunch +Outputs: 30cm^3 grid painted as shown in rviz (Gazebo) + +In a terminal, run + "roslaunch astrobee sim.launch dds:=false speed:=0.75 rviz:=true" to bring RViz up. +If the Rviz display looks too cluttered, most topics can be turned off except "Registration". +In another terminal run + "python ~/coverage_anaylisis/coverage_painter.py map_coverage" where + +activity_coverage_database_file = /dir/where/results/are/going/to/be/placed/20210518_cabanel_coverage_db.csv + +This publishes cubes representing the map coverage in 20210518_cabanel_coverage_db.csv according to the following color scheme: +red if 0 registered ML features found in any cube are present in this cube +orange if 1-10 registered ML features found in any cube are present in this cube +yellow if 11-20 registered ML features found in any cube are present in this cube +green if 21-40 registered ML features found in any cube are present in this cube +blue if 40+ registered ML features found in any cube are present in this cube + +5. Generate Robot's Trajectory Heatmap visualization in RViz +Inputs: activity_database_file +Tools: ~/coverage_painter.py, roslaunch +Outputs: Animated trajectory consisting of each of the trajectory poses colored coded according to their coverage level + +In a terminal, run "roslaunch astrobee sim.launch dds:=false speed:=0.75 rviz:=true" to bring RViz up. +If the Rviz display looks too cluttered, most topics can be turned off except "Registration". +In another terminal run + "python ~/coverage/anaylisis/coverage_painter.py robot_coverage" where + +activity_database_file = /dir/where/database/file/is/20210726_dali_KiboEvRhRun1_db.csv, + +This will publish each pose contained in the trajectory and will color code them according to the following color scheme: +red if 0 registered ML features found in any cube are present in this cube +orange if 1-10 registered ML features found in any cube are present in this cube +yellow if 11-20 registered ML features found in any cube are present in this cube +green if 21-40 registered ML features found in any cube are present in this cube +blue if 40+ registered ML features found in any cube are present in this cube + +6. Generate stats report +Inputs: activity_coverage_database_file = /dir/where/map-coverage/file/is/20210518_cabanel_coverage_db.csv or /dir/where/robot-trajectory-coverage/file/is/20210726_dali_KiboEvRhRun1_coverage_db.csv +Tools: ~/coverage_stats_reporter.py +Outputs: CSV and PDF Stats reports, saved at the input file's same location +(CSV: /dir/where/map-coverage/file/is/20210707_dali_coverage_db_stats.csv or /dir/where/robot-trajector-coverage/file/is/20210726_dali_KiboEvRhRun1_coverage_db_stats.csv) +(PDF: /dir/where/map-coverage/file/is/20210707_dali_coverage_db_stats_report.pdf or /dir/where/robot-trajector-coverage/file/is/20210726_dali_KiboEvRhRun1_coverage_db_stats_report.pdf) + +Generates a statistics report in CSV and PDF of the coverage on each wall (Overhead, Aft, Forward, Deck) and airlock based on the number of registered ML features found on every cube analyzed. +The report consists of two images: 1) the general map or bag coverage distribution and the 2) the detailed map or bag coverage distribution. +It provides the coverage percentage based on the number of registered ML features at each wall distributed across five ranges: 0ML, 1-10ML, 11-20ML, 21-40ML, and 40+ML, as well as the total number of registered ML features in +the analized wall, and the percentage of the total number of ML features analyzed within the JEM walls and airlock. +In a terminal run + "python ~/coverage_analysis/coverage_stats_reporter.py " where +activity_coverage_database_file = /dir/where/map-coverage/file/is/20210518_cabanel_coverage_db.csv or /dir/where/robot-trajectory-coverage/file/is/20210726_dali_KiboEvRhRun1_coverage_db.csv + diff --git a/scripts/setup/debians/update_release.sh b/scripts/setup/debians/update_release.sh index 088a80ceac..303e2232ba 100755 --- a/scripts/setup/debians/update_release.sh +++ b/scripts/setup/debians/update_release.sh @@ -12,7 +12,7 @@ cd $DIR/../../.. dch -c debian/changelog -v $1 -sed -i -e "s/set(ASTROBEE_VERSION .*)/set(ASTROBEE_VERSION $1)/g" CMakeLists.txt +sed -i -e "s/set(ASTROBEE_VERSION .*)/set(ASTROBEE_VERSION $1)/g" astrobee/CMakeLists.txt sed -i -e "s/\# Astrobee Robot Software v1/\# Astrobee Robot Software v1\n\n\#\# Release $1\n\nINSERT DESCRIPTION HERE/g" RELEASE.md diff --git a/scripts/setup/packages_base_bionic.lst b/scripts/setup/packages_base_bionic.lst index 5b242bd0a2..982b2c902d 100644 --- a/scripts/setup/packages_base_bionic.lst +++ b/scripts/setup/packages_base_bionic.lst @@ -75,6 +75,7 @@ ros-melodic-xmlrpcpp ros-melodic-mavlink ros-melodic-xacro ros-melodic-octomap +ros-melodic-catkin libalvar2 libdbow21 libgtsam @@ -125,3 +126,4 @@ libdecomputil-dev libjps3d-dev libopencv3.3.1 libopencv-dev3.3.1 +python-catkin-tools diff --git a/scripts/setup/packages_base_focal.lst b/scripts/setup/packages_base_focal.lst index 2158eec519..0a016a8397 100644 --- a/scripts/setup/packages_base_focal.lst +++ b/scripts/setup/packages_base_focal.lst @@ -73,6 +73,7 @@ ros-noetic-xmlrpcpp ros-noetic-mavlink ros-noetic-xacro ros-noetic-octomap +ros-noetic-catkin libalvar2 libdbow21 libgtsam @@ -122,3 +123,4 @@ libdecomputil-dev libjps3d-dev libopencv3.3.1 libopencv-dev3.3.1 +python3-catkin-tools diff --git a/scripts/setup/packages_base_xenial.lst b/scripts/setup/packages_base_xenial.lst index aae3d2f495..478b33c25e 100644 --- a/scripts/setup/packages_base_xenial.lst +++ b/scripts/setup/packages_base_xenial.lst @@ -73,6 +73,7 @@ ros-kinetic-opencv3 ros-kinetic-mavlink ros-kinetic-xacro ros-kinetic-octomap +ros-kinetic-catkin libalvar2 libdbow21 libgtsam @@ -121,3 +122,4 @@ libsoracore-dev libmiro-dev libdecomputil-dev libjps3d-dev +python-catkin-tools diff --git a/shared/CMakeLists.txt b/shared/CMakeLists.txt deleted file mode 100644 index 5b256d3f05..0000000000 --- a/shared/CMakeLists.txt +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -add_subdirectory(ff_common) -add_subdirectory(config_reader) -add_subdirectory(jsonloader) -if(USE_ROS) - add_subdirectory(ff_util) - add_subdirectory(msg_conversions) -endif(USE_ROS) diff --git a/shared/config_reader/CMakeLists.txt b/shared/config_reader/CMakeLists.txt index cc2f9d9fee..f23ac4fcc1 100644 --- a/shared/config_reader/CMakeLists.txt +++ b/shared/config_reader/CMakeLists.txt @@ -15,21 +15,63 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(config_reader) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + ff_common +) + +## System dependencies are found with CMake's conventions +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../cmake") +find_package(Luajit20 REQUIRED) + catkin_package( - INCLUDE_DIRS include ${GFLAGS_INCLUDE_DIRS} ${GLOG_INCLUDE_DIRS} ${LUAJIT_INCLUDE_DIR} - LIBRARIES config_reader - CATKIN_DEPENDS ff_common + INCLUDE_DIRS include ${LUAJIT_INCLUDE_DIR} + LIBRARIES config_reader ${LUAJIT_LIBRARY} ${LUAJIT_MATH_LIBRARY} + CATKIN_DEPENDS roscpp nodelet ff_common ) +########### +## Build ## +########### + +# Specify additional locations of header files +# Your package locations should be listed before other locations include_directories( include ${catkin_INCLUDE_DIRS} + ${LUAJIT_INCLUDE_DIR} +) + +# Declare C++ libraries +add_library(config_reader + src/config_reader.cc + src/watch_files.cc +) +add_dependencies(config_reader ${catkin_EXPORTED_TARGETS}) +target_link_libraries(config_reader gflags glog ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) -create_library(TARGET config_reader - INC ${GFLAGS_INCLUDE_DIRS} ${GLOG_INCLUDE_DIRS} ${LUAJIT_INCLUDE_DIR} ff_common - ${CMAKE_CURRENT_BINARY_DIR}/include - LIBS ${GFLAGS_LIBRARIES} ${GLOG_LIBRARIES} ${LUAJIT_LIBRARIES} ff_common +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE ) diff --git a/shared/ff_common/CMakeLists.txt b/shared/ff_common/CMakeLists.txt index 84865de2e6..32a5fa9ea2 100644 --- a/shared/ff_common/CMakeLists.txt +++ b/shared/ff_common/CMakeLists.txt @@ -15,18 +15,49 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(ff_common) + +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + std_msgs + nodelet +) + +find_package(Eigen3 REQUIRED) + catkin_package( INCLUDE_DIRS include LIBRARIES ff_common - CATKIN_DEPENDS + CATKIN_DEPENDS roscpp std_msgs nodelet +) + +########### +## Build ## +########### + +# Specify additional locations of header files +# Your package locations should be listed before other locations +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} ) -create_library(TARGET ff_common - INC ${GFLAGS_INCLUDE_DIRS} ${GLOG_INCLUDE_DIRS} - ${CMAKE_CURRENT_BINARY_DIR}/include - LIBS ${GFLAGS_LIBRARIES} ${GLOG_LIBRARIES}) +# Declare C++ libraries +add_library(ff_common + src/init.cc + src/thread.cc + src/utils.cc +) +add_dependencies(ff_common ${catkin_EXPORTED_TARGETS}) +target_link_libraries(ff_common gflags glog ${catkin_LIBRARIES} ) + if(CATKIN_ENABLE_TESTING) find_package(rostest REQUIRED) @@ -41,8 +72,20 @@ if(CATKIN_ENABLE_TESTING) endif() -include_directories( - include - ${catkin_INCLUDE_DIRS} +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) diff --git a/shared/ff_common/include/ff_common/eigen_vectors.h b/shared/ff_common/include/ff_common/eigen_vectors.h index dd0f312626..7e0ce3b0b4 100644 --- a/shared/ff_common/include/ff_common/eigen_vectors.h +++ b/shared/ff_common/include/ff_common/eigen_vectors.h @@ -30,6 +30,7 @@ EIGEN_DEFINE_STL_VECTOR_SPECIALIZATION(Eigen::Vector2d) EIGEN_DEFINE_STL_VECTOR_SPECIALIZATION(Eigen::Vector3d) EIGEN_DEFINE_STL_VECTOR_SPECIALIZATION(Eigen::Affine3d) +EIGEN_DEFINE_STL_VECTOR_SPECIALIZATION(Eigen::Isometry3d) EIGEN_DEFINE_STL_VECTOR_SPECIALIZATION(Eigen::Matrix) EIGEN_DEFINE_STL_VECTOR_SPECIALIZATION(Eigen::Matrix2Xd) #endif // FF_COMMON_EIGEN_VECTORS_H_ diff --git a/shared/ff_common/package.xml b/shared/ff_common/package.xml index c14b84ddeb..388d39df21 100644 --- a/shared/ff_common/package.xml +++ b/shared/ff_common/package.xml @@ -15,10 +15,10 @@ catkin roscpp - roslib + std_msgs nodelet roscpp - roslib + std_msgs nodelet diff --git a/shared/ff_common/src/init.cc b/shared/ff_common/src/init.cc index f74dec2b13..5a839a2b05 100644 --- a/shared/ff_common/src/init.cc +++ b/shared/ff_common/src/init.cc @@ -40,7 +40,7 @@ namespace ff_common { google::InitGoogleLogging((*argv)[0]); logging_initialized = true; } - FREEFLYER_GFLAGS_NAMESPACE::ParseCommandLineFlags(argc, argv, true); + google::ParseCommandLineFlags(argc, argv, true); } // ros nodelets get commandline arguments in this form... @@ -57,7 +57,7 @@ namespace ff_common { if (main_thread) { InitFreeFlyerApplication(&argc, &casted_argv); } else { - FREEFLYER_GFLAGS_NAMESPACE::ParseCommandLineFlags(&argc, &casted_argv, true); + google::ParseCommandLineFlags(&argc, &casted_argv, true); } free(argv); } diff --git a/shared/ff_common/test/test_thread.cc b/shared/ff_common/test/test_thread.cc index 7a1ed3c2fe..bd6ed85ae6 100644 --- a/shared/ff_common/test/test_thread.cc +++ b/shared/ff_common/test/test_thread.cc @@ -74,3 +74,9 @@ TEST(thread, thread_pool) { pool.Join(); EXPECT_EQ(4u, vec.size()); } + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/shared/ff_util/CMakeLists.txt b/shared/ff_util/CMakeLists.txt index 6d2942752e..cc1a72d969 100644 --- a/shared/ff_util/CMakeLists.txt +++ b/shared/ff_util/CMakeLists.txt @@ -15,46 +15,80 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(ff_util) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + config_reader + roscpp + nodelet + dynamic_reconfigure + ff_msgs diagnostic_msgs + tf2_geometry_msgs + actionlib + msg_conversions +) + catkin_package( INCLUDE_DIRS include - LIBRARIES ff_nodelet config_server config_client perf_timer - CATKIN_DEPENDS config_reader roscpp nodelet dynamic_reconfigure ff_msgs diagnostic_msgs tf2_geometry_msgs actionlib + LIBRARIES ff_nodelet ff_flight config_server config_client perf_timer + CATKIN_DEPENDS config_reader roscpp nodelet dynamic_reconfigure ff_msgs diagnostic_msgs tf2_geometry_msgs actionlib msg_conversions +) + +########### +## Build ## +########### + +# Specify additional locations of header files +# Your package locations should be listed before other locations +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${JSONCPP_INCLUDE_DIRS} ) -create_library(TARGET ff_nodelet - DIR src/ff_nodelet - LIBS ${roscpp_LIBRARIES} ${nodelet_LIBRARIES} config_reader - INC ${catkin_INCLUDE_DIRS} - DEPS ff_msgs +# Declare C++ libraries +add_library(ff_nodelet + src/ff_nodelet/ff_nodelet.cc ) +add_dependencies(ff_nodelet ${catkin_EXPORTED_TARGETS}) +target_link_libraries(ff_nodelet ${catkin_LIBRARIES}) -create_library(TARGET ff_flight - DIR src/ff_flight - LIBS ${catkin_LIBRARIES} ${EIGEN3_LIBRARIES} config_reader msg_conversions - INC ${catkin_INCLUDE_DIRS} ${EIGEN3_INCLUDE_DIRS} - DEPS ff_msgs +# Declare C++ libraries +add_library(ff_flight + src/ff_flight/ff_flight.cc ) +add_dependencies(ff_flight ${catkin_EXPORTED_TARGETS}) +target_link_libraries(ff_flight ${catkin_LIBRARIES}) -create_library(TARGET config_server - DIR src/config_server - LIBS ${catkin_LIBRARIES} config_reader - INC ${catkin_INCLUDE_DIRS} - DEPS config_reader diagnostic_msgs +# Declare C++ libraries +add_library(config_server + src/config_server/config_server.cc ) +add_dependencies(config_server ${catkin_EXPORTED_TARGETS}) +target_link_libraries(config_server ${catkin_LIBRARIES}) -create_library(TARGET config_client - DIR src/config_client - LIBS ${catkin_LIBRARIES} - INC ${catkin_INCLUDE_DIRS} +# Declare C++ libraries +add_library(config_client + src/config_client/config_client.cc ) +add_dependencies(config_client ${catkin_EXPORTED_TARGETS}) +target_link_libraries(config_client ${catkin_LIBRARIES}) -create_library(TARGET perf_timer - DIR src/perf_timer - LIBS ${catkin_LIBRARIES} - INC ${catkin_INCLUDE_DIRS} +# Declare C++ libraries +add_library(perf_timer + src/perf_timer/perf_timer.cc ) +add_dependencies(perf_timer ${catkin_EXPORTED_TARGETS}) +target_link_libraries(perf_timer ${catkin_LIBRARIES}) + +########## +## Test ## +########## # Only test if it is enabled if (CATKIN_ENABLE_TESTING) @@ -64,7 +98,6 @@ if (CATKIN_ENABLE_TESTING) include_directories(${catkin_INCLUDE_DIRS} include) # ff_action - add_rostest_gtest(ff_action_nominal_behaviour test/ff_action_nominal_behaviour.test test/ff_action_nominal_behaviour.cc) @@ -108,4 +141,25 @@ if (CATKIN_ENABLE_TESTING) endif() -install_launch_files() +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ff_nodelet ff_flight config_server config_client perf_timer + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) + +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) diff --git a/shared/ff_util/include/ff_util/ff_action.h b/shared/ff_util/include/ff_util/ff_action.h index fcf91001a8..8556c3eb48 100644 --- a/shared/ff_util/include/ff_util/ff_action.h +++ b/shared/ff_util/include/ff_util/ff_action.h @@ -26,7 +26,7 @@ #include #include -// C++11 includes +// C++ includes #include #include #include diff --git a/shared/ff_util/include/ff_util/ff_fsm.h b/shared/ff_util/include/ff_util/ff_fsm.h index 74a2a1f2b4..1294abfa00 100644 --- a/shared/ff_util/include/ff_util/ff_fsm.h +++ b/shared/ff_util/include/ff_util/ff_fsm.h @@ -19,7 +19,7 @@ #ifndef FF_UTIL_FF_FSM_H_ #define FF_UTIL_FF_FSM_H_ -// C++11 includes +// C++ includes #include #include #include diff --git a/shared/ff_util/include/ff_util/ff_service.h b/shared/ff_util/include/ff_util/ff_service.h index 24e41fbee1..d3b5b84e11 100644 --- a/shared/ff_util/include/ff_util/ff_service.h +++ b/shared/ff_util/include/ff_util/ff_service.h @@ -22,7 +22,7 @@ // ROS includes #include -// C++11 includes +// C++ includes #include #include #include diff --git a/shared/ff_util/package.xml b/shared/ff_util/package.xml index 4e42c11d7e..f31dcce699 100644 --- a/shared/ff_util/package.xml +++ b/shared/ff_util/package.xml @@ -24,6 +24,7 @@ ff_msgs tf2_geometry_msgs config_reader + msg_conversions roscpp roslib nodelet @@ -34,5 +35,6 @@ ff_msgs tf2_geometry_msgs config_reader + msg_conversions diff --git a/shared/ff_util/test/ff_action_active_timeout.cc b/shared/ff_util/test/ff_action_active_timeout.cc index a606552d80..3e0c2183b7 100644 --- a/shared/ff_util/test/ff_action_active_timeout.cc +++ b/shared/ff_util/test/ff_action_active_timeout.cc @@ -34,7 +34,7 @@ // Borrow the example from actionlib #include -// C++11 incliudes +// C++ includes #include #include diff --git a/shared/ff_util/test/ff_action_cancel_own_goal.cc b/shared/ff_util/test/ff_action_cancel_own_goal.cc index 84639b4439..833fa444b7 100644 --- a/shared/ff_util/test/ff_action_cancel_own_goal.cc +++ b/shared/ff_util/test/ff_action_cancel_own_goal.cc @@ -35,7 +35,7 @@ // Borrow the example from actionlib #include -// C++11 incliudes +// C++ includes #include #include diff --git a/shared/ff_util/test/ff_action_connect_timeout.cc b/shared/ff_util/test/ff_action_connect_timeout.cc index bf795afc66..1ebd79e9fe 100644 --- a/shared/ff_util/test/ff_action_connect_timeout.cc +++ b/shared/ff_util/test/ff_action_connect_timeout.cc @@ -33,7 +33,7 @@ // Borrow the example from actionlib #include -// C++11 incliudes +// C++ includes #include #include diff --git a/shared/ff_util/test/ff_action_deadline_timeout.cc b/shared/ff_util/test/ff_action_deadline_timeout.cc index 2e2a78577b..74f19d9aba 100644 --- a/shared/ff_util/test/ff_action_deadline_timeout.cc +++ b/shared/ff_util/test/ff_action_deadline_timeout.cc @@ -34,7 +34,7 @@ // Borrow the example from actionlib #include -// C++11 incliudes +// C++ includes #include #include diff --git a/shared/ff_util/test/ff_action_nominal_behaviour.cc b/shared/ff_util/test/ff_action_nominal_behaviour.cc index 8ff77372fd..9dbdea5121 100644 --- a/shared/ff_util/test/ff_action_nominal_behaviour.cc +++ b/shared/ff_util/test/ff_action_nominal_behaviour.cc @@ -33,7 +33,7 @@ // Borrow the example from actionlib #include -// C++11 incliudes +// C++ includes #include #include diff --git a/shared/ff_util/test/ff_action_preempt_others_goal.cc b/shared/ff_util/test/ff_action_preempt_others_goal.cc index f56e4a9ca6..243a0303df 100644 --- a/shared/ff_util/test/ff_action_preempt_others_goal.cc +++ b/shared/ff_util/test/ff_action_preempt_others_goal.cc @@ -34,7 +34,7 @@ // Borrow the example from actionlib #include -// C++11 incliudes +// C++ includes #include #include diff --git a/shared/ff_util/test/ff_action_preempt_own_goal.cc b/shared/ff_util/test/ff_action_preempt_own_goal.cc index d567cded15..97333782cb 100644 --- a/shared/ff_util/test/ff_action_preempt_own_goal.cc +++ b/shared/ff_util/test/ff_action_preempt_own_goal.cc @@ -36,7 +36,7 @@ // Borrow the example from actionlib #include -// C++11 incliudes +// C++ includes #include #include diff --git a/shared/ff_util/test/ff_action_response_timeout.cc b/shared/ff_util/test/ff_action_response_timeout.cc index fc196fcf7a..4eb7c5713f 100644 --- a/shared/ff_util/test/ff_action_response_timeout.cc +++ b/shared/ff_util/test/ff_action_response_timeout.cc @@ -29,7 +29,7 @@ // Borrow the example from actionlib #include -// C++11 incliudes +// C++ includes #include #include diff --git a/shared/jsonloader/CMakeLists.txt b/shared/jsonloader/CMakeLists.txt index 5ee73a9285..2e4e496228 100644 --- a/shared/jsonloader/CMakeLists.txt +++ b/shared/jsonloader/CMakeLists.txt @@ -15,31 +15,116 @@ # License for the specific language governing permissions and limitations # under the License. -set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-unused-variable") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wno-unused-variable") - -set(LIBS - ff_common - ${GLOG_LIBRARIES} - ${JSONCPP_LIBRARIES} - ${Boost_SYSTEM_LIBRARY} - ${Boost_FILESYSTEM_LIBRARY} - ) -set(INCLUDES - ff_common - ${CMAKE_CURRENT_SOURCE_DIR}/include - ${EIGEN3_INCLUDE_DIRS} - ${GLOG_INCLUDE_DIRS} +cmake_minimum_required(VERSION 3.0) +project(jsonloader) + + +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + std_msgs + nodelet +) + +find_package(Eigen3 REQUIRED) +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../cmake") +find_package(JsonCpp REQUIRED) + +catkin_package( + INCLUDE_DIRS include + LIBRARIES jsonloader + CATKIN_DEPENDS roscpp std_msgs nodelet +) + +########### +## Build ## +########### + +# Specify additional locations of header files +# Your package locations should be listed before other locations +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} ${JSONCPP_INCLUDE_DIRS} - ${Boost_INCLUDE_DIR} +) + +# Declare C++ libraries +add_library(jsonloader + src/command.cc + src/command_repo.cc + src/keepout.cc + src/keepoutio.cc + src/plan.cc + src/planio.cc + src/validation.cc +) +add_dependencies(jsonloader ${catkin_EXPORTED_TARGETS}) +target_link_libraries(jsonloader gflags glog ${JSONCPP_LIBRARIES} ${catkin_LIBRARIES} ) + + +if(CATKIN_ENABLE_TESTING) + find_package(rostest REQUIRED) + + add_rostest_gtest(test_command + test/test_command.test + test/test_command.cxx + ) + target_link_libraries(test_command + jsonloader + ) + + add_rostest_gtest(test_keepout + test/test_keepout.test + test/test_keepout.cxx + ) + target_link_libraries(test_keepout + jsonloader + ) + + add_rostest_gtest(test_keepoutio + test/test_keepoutio.test + test/test_keepoutio.cxx + ) + target_link_libraries(test_keepoutio + jsonloader + ) + + add_rostest_gtest(test_plan + test/test_plan.test + test/test_plan.cxx ) + target_link_libraries(test_plan + jsonloader + ) + + add_rostest_gtest(test_validation + test/test_validation.test + test/test_validation.cxx + ) + target_link_libraries(test_validation + jsonloader + ) + +endif() + +############# +## Install ## +############# -create_library(TARGET jsonloader - LIBS ${LIBS} - INC ${INCLUDES} - DEPS ${DEPS}) -create_test_targets(DIR test - LIBS jsonloader - INC ${INCLUDES} - DEPS ${DEPS}) +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) diff --git a/tools/simulator/arm_sim/package.xml b/shared/jsonloader/package.xml similarity index 64% rename from tools/simulator/arm_sim/package.xml rename to shared/jsonloader/package.xml index dfacc7ced2..62ad5593f4 100644 --- a/tools/simulator/arm_sim/package.xml +++ b/shared/jsonloader/package.xml @@ -1,9 +1,8 @@ - - arm_sim - 0.0.0 + jsonloader + 1.0.0 - The arm simulator simulates the astrobee arm. + Package for loading json from fplans Apache License, Version 2.0 @@ -16,12 +15,10 @@ catkin roscpp - ff_msgs + std_msgs nodelet - ff_msgs roscpp + std_msgs nodelet - - - + diff --git a/shared/jsonloader/test/test_command.cxx b/shared/jsonloader/test/test_command.cxx index 8348df0d89..3d38557e76 100644 --- a/shared/jsonloader/test/test_command.cxx +++ b/shared/jsonloader/test/test_command.cxx @@ -414,3 +414,9 @@ TEST(Command, ValidStartRecording) { ASSERT_STREQ(s_cmd->description().data(), "mom"); } + +// Run all the tests that were declared with TEST() +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/shared/jsonloader/test/test_command.test b/shared/jsonloader/test/test_command.test new file mode 100644 index 0000000000..f083b37a9d --- /dev/null +++ b/shared/jsonloader/test/test_command.test @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/shared/jsonloader/test/test_keepout.cxx b/shared/jsonloader/test/test_keepout.cxx index f8f62ca641..65847da773 100644 --- a/shared/jsonloader/test/test_keepout.cxx +++ b/shared/jsonloader/test/test_keepout.cxx @@ -63,3 +63,9 @@ TEST(Keepout, MergeCorrect) { a.Merge(b); EXPECT_EQ(a.GetBoxes().size(), 2); } + +// Run all the tests that were declared with TEST() +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/shared/jsonloader/test/test_keepout.test b/shared/jsonloader/test/test_keepout.test new file mode 100644 index 0000000000..d2a1607f83 --- /dev/null +++ b/shared/jsonloader/test/test_keepout.test @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/shared/jsonloader/test/test_keepoutio.cxx b/shared/jsonloader/test/test_keepoutio.cxx index 8d0522aab5..2141500276 100644 --- a/shared/jsonloader/test/test_keepoutio.cxx +++ b/shared/jsonloader/test/test_keepoutio.cxx @@ -27,7 +27,7 @@ #include // TEST_DIR is defined by cmake as the test directory of our module -const std::string kDataDir = std::string(TEST_DIR) + "/data/"; +const std::string kDataDir = std::string(std::getenv("DATA_DIR")); TEST(KeepoutIO, LoadGoodKeepout) { jsonloader::Keepout k(true); // opposite of what it should be @@ -63,3 +63,9 @@ TEST(KeepoutIO, LoadDir) { EXPECT_EQ(safeZone.GetBoxes().size(), 1); EXPECT_EQ(dangerZone.GetBoxes().size(), 2); } + +// Run all the tests that were declared with TEST() +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/shared/jsonloader/test/test_keepoutio.test b/shared/jsonloader/test/test_keepoutio.test new file mode 100644 index 0000000000..559baa9702 --- /dev/null +++ b/shared/jsonloader/test/test_keepoutio.test @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/shared/jsonloader/test/test_plan.cxx b/shared/jsonloader/test/test_plan.cxx index 2f47a60b8f..3d30ede81a 100644 --- a/shared/jsonloader/test/test_plan.cxx +++ b/shared/jsonloader/test/test_plan.cxx @@ -37,7 +37,7 @@ namespace fs = boost::filesystem; -const std::string kDataDir = std::string(TEST_DIR) + "/data/"; +const std::string kDataDir = std::string(std::getenv("DATA_DIR")); std::string SlurpFile(std::string const& f_name) { std::ifstream file(f_name, std::ios::in | std::ios::binary); @@ -315,3 +315,9 @@ TEST(PlanIO, PlanEvolution) { LOG(INFO) << "Operating Limits: " << ol.valid(); } } + +// Run all the tests that were declared with TEST() +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/shared/jsonloader/test/test_plan.test b/shared/jsonloader/test/test_plan.test new file mode 100644 index 0000000000..6b19ad8f80 --- /dev/null +++ b/shared/jsonloader/test/test_plan.test @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/shared/jsonloader/test/test_validation.cxx b/shared/jsonloader/test/test_validation.cxx index 23ec6a3e4c..d5fd191cf1 100644 --- a/shared/jsonloader/test/test_validation.cxx +++ b/shared/jsonloader/test/test_validation.cxx @@ -179,3 +179,8 @@ TEST(Validation, EnumField) { EXPECT_TRUE(optional.Validate(v)); } +// Run all the tests that were declared with TEST() +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/shared/jsonloader/test/test_validation.test b/shared/jsonloader/test/test_validation.test new file mode 100644 index 0000000000..6cada9502d --- /dev/null +++ b/shared/jsonloader/test/test_validation.test @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/shared/msg_conversions/CMakeLists.txt b/shared/msg_conversions/CMakeLists.txt index 47064ef145..4a66b1750e 100644 --- a/shared/msg_conversions/CMakeLists.txt +++ b/shared/msg_conversions/CMakeLists.txt @@ -15,27 +15,87 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(msg_conversions) -if (USE_ROS) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + message_runtime + std_msgs + geometry_msgs + config_reader + ff_msgs + tf2 + tf2_ros +) + +find_package(Eigen3 REQUIRED) catkin_package( LIBRARIES ${PROJECT_NAME} INCLUDE_DIRS include - CATKIN_DEPENDS roscpp message_runtime std_msgs geometry_msgs sensor_msgs - DEPENDS ff_msgs + CATKIN_DEPENDS roscpp message_runtime std_msgs geometry_msgs config_reader tf2 tf2_ros ) -create_library(TARGET msg_conversions - LIBS ${catkin_LIBRARIES} config_reader - DEPS ff_msgs eigen - INC ${catkin_INCLUDE_DIRS} ${EIGEN3_INCLUDE_DIRS} +########### +## Build ## +########### + +# Specify additional locations of header files +# Your package locations should be listed before other locations +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -create_tool_targets(DIR tools - LIBS ${catkin_LIBRARIES} ${GFLAGS_LIBRARIES} ff_common - INC ${catkin_INCLUDE_DIRS} ${GFLAGS_INCLUDE_DIRS} - DEPS ff_msgs +# Declare C++ libraries +add_library(msg_conversions + src/msg_conversions.cc ) +add_dependencies(msg_conversions ${catkin_EXPORTED_TARGETS}) +target_link_libraries(msg_conversions ${EIGEN_LIBRARIES} ${catkin_LIBRARIES}) + +## Declare a C++ executable: inspection_tool +add_executable(landmark_msg_cnv tools/landmark_msg_cnv.cc) +add_dependencies(landmark_msg_cnv ${catkin_EXPORTED_TARGETS}) +target_link_libraries(landmark_msg_cnv + msg_conversions gflags ${catkin_LIBRARIES}) + +## Declare a C++ executable: inspection_tool +add_executable(pose_stamped_msg_cnv tools/pose_stamped_msg_cnv.cc) +add_dependencies(pose_stamped_msg_cnv ${catkin_EXPORTED_TARGETS}) +target_link_libraries(pose_stamped_msg_cnv + msg_conversions gflags ${catkin_LIBRARIES}) -endif (USE_ROS) +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Install C++ executables +install(TARGETS landmark_msg_cnv DESTINATION bin) +install(TARGETS pose_stamped_msg_cnv DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/landmark_msg_cnv share/${PROJECT_NAME} + COMMAND ln -s ../../bin/pose_stamped_msg_cnv share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) diff --git a/shared/msg_conversions/include/msg_conversions/msg_conversions.h b/shared/msg_conversions/include/msg_conversions/msg_conversions.h index b2012975ca..b40ffc4025 100644 --- a/shared/msg_conversions/include/msg_conversions/msg_conversions.h +++ b/shared/msg_conversions/include/msg_conversions/msg_conversions.h @@ -21,8 +21,8 @@ #include -#include -#include +#include +#include #include #include diff --git a/shared/msg_conversions/package.xml b/shared/msg_conversions/package.xml index d926f2347c..3969db8423 100644 --- a/shared/msg_conversions/package.xml +++ b/shared/msg_conversions/package.xml @@ -15,14 +15,22 @@ catkin roscpp + message_runtime std_msgs sensor_msgs geometry_msgs ff_msgs + config_reader + tf2 + tf2_ros roscpp message_runtime + message_runtime std_msgs sensor_msgs geometry_msgs ff_msgs + config_reader + tf2 + tf2_ros diff --git a/simulation/CMakeLists.txt b/simulation/CMakeLists.txt index 84e2957837..09f919d6d5 100644 --- a/simulation/CMakeLists.txt +++ b/simulation/CMakeLists.txt @@ -15,8 +15,30 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(astrobee_gazebo) +if (ENABLE_GAZEBO) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + visualization_msgs + tf2 + tf2_ros + tf2_eigen + tf2_msgs + gazebo_ros + sensor_msgs + ff_msgs + ff_hw_msgs + tf2_geometry_msgs + ff_util + gnc_autocode + camera +) + # Find packages find_package(gazebo REQUIRED) @@ -62,477 +84,278 @@ catkin_package( sensor_msgs ff_msgs ff_hw_msgs + tf2_geometry_msgs ff_util + gnc_autocode + camera ) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${GAZEBO_INCLUDE_DIRS} +) + +link_directories(${GAZEBO_LIBRARY_DIRS}) +list(APPEND CMAKE_CXX_FLAGS "${GAZEBO_CXX_FLAGS}") + # Create reusable classes for models and plugins -create_library( - DIR - src - TARGET - astrobee_gazebo - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - ff_nodelet - INC - ${GAZEBO_INCLUDE_DIRS} - ${EIGEN3_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util +add_library(astrobee_gazebo + src/astrobee_gazebo.cc +) +add_dependencies(astrobee_gazebo ${catkin_EXPORTED_TARGETS}) +target_link_libraries(astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a model plugin for the EPS system -create_library( - DIR - src/gazebo_model_plugin_eps - TARGET - gazebo_model_plugin_eps - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util - ff_hw_msgs +add_library(gazebo_model_plugin_eps + src/gazebo_model_plugin_eps/gazebo_model_plugin_eps.cc +) +add_dependencies(gazebo_model_plugin_eps ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_model_plugin_eps astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a model plugin for aerodynamic drag -create_library( - DIR - src/gazebo_model_plugin_drag - TARGET - gazebo_model_plugin_drag - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util +add_library(gazebo_model_plugin_drag + src/gazebo_model_plugin_drag/gazebo_model_plugin_drag.cc +) +add_dependencies(gazebo_model_plugin_drag ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_model_plugin_drag astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a model plugin for the ground truth -create_library( - DIR - src/gazebo_model_plugin_truth - TARGET - gazebo_model_plugin_truth - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util +add_library(gazebo_model_plugin_truth + src/gazebo_model_plugin_truth/gazebo_model_plugin_truth.cc +) +add_dependencies(gazebo_model_plugin_truth ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_model_plugin_truth astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a model plugin for the propulsion system -create_library( - DIR - src/gazebo_model_plugin_pmc - TARGET - gazebo_model_plugin_pmc - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - gnc_autocode - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util +add_library(gazebo_model_plugin_pmc + src/gazebo_model_plugin_pmc/gazebo_model_plugin_pmc.cc +) +add_dependencies(gazebo_model_plugin_pmc ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_model_plugin_pmc astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a model plugin for the perching arm -create_library( - DIR - src/gazebo_model_plugin_perching_arm - TARGET - gazebo_model_plugin_perching_arm - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util +add_library(gazebo_model_plugin_perching_arm + src/gazebo_model_plugin_perching_arm/gazebo_model_plugin_perching_arm.cc +) +add_dependencies(gazebo_model_plugin_perching_arm ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_model_plugin_perching_arm astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) -# Create a model plugin for the flashlight -create_library( - DIR - src/gazebo_model_plugin_flashlight - TARGET - gazebo_model_plugin_flashlight - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util - ff_hw_msgs +# Create a model plugin for the front flashlight +add_library(gazebo_model_plugin_flashlight + src/gazebo_model_plugin_flashlight/gazebo_model_plugin_flashlight.cc +) +add_dependencies(gazebo_model_plugin_flashlight ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_model_plugin_flashlight astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a model plugin for the laser -create_library( - DIR - src/gazebo_model_plugin_laser - TARGET - gazebo_model_plugin_laser - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util - ff_hw_msgs +add_library(gazebo_model_plugin_laser + src/gazebo_model_plugin_laser/gazebo_model_plugin_laser.cc +) +add_dependencies(gazebo_model_plugin_laser ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_model_plugin_laser astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a model plugin for the LLP CPU mem monitor -create_library( - DIR - src/gazebo_model_plugin_llp_cpu_mem_monitor - TARGET - gazebo_model_plugin_llp_cpu_mem_monitor - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util - ff_hw_msgs +add_library(gazebo_model_plugin_llp_cpu_mem_monitor + src/gazebo_model_plugin_llp_cpu_mem_monitor/gazebo_model_plugin_llp_cpu_mem_monitor.cc +) +add_dependencies(gazebo_model_plugin_llp_cpu_mem_monitor ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_model_plugin_llp_cpu_mem_monitor astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a model plugin for the LLP disk monitor -create_library( - DIR - src/gazebo_model_plugin_llp_disk_monitor - TARGET - gazebo_model_plugin_llp_disk_monitor - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util - ff_hw_msgs +add_library(gazebo_model_plugin_llp_disk_monitor + src/gazebo_model_plugin_llp_disk_monitor/gazebo_model_plugin_llp_disk_monitor.cc +) +add_dependencies(gazebo_model_plugin_llp_disk_monitor ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_model_plugin_llp_disk_monitor astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a model plugin for the MLP CPU mem monitor -create_library( - DIR - src/gazebo_model_plugin_mlp_cpu_mem_monitor - TARGET - gazebo_model_plugin_mlp_cpu_mem_monitor - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util - ff_hw_msgs +add_library(gazebo_model_plugin_mlp_cpu_mem_monitor + src/gazebo_model_plugin_mlp_cpu_mem_monitor/gazebo_model_plugin_mlp_cpu_mem_monitor.cc +) +add_dependencies(gazebo_model_plugin_mlp_cpu_mem_monitor ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_model_plugin_mlp_cpu_mem_monitor astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a model plugin for the MLP disk monitor -create_library( - DIR - src/gazebo_model_plugin_mlp_disk_monitor - TARGET - gazebo_model_plugin_mlp_disk_monitor - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util - ff_hw_msgs +add_library(gazebo_model_plugin_mlp_disk_monitor + src/gazebo_model_plugin_mlp_disk_monitor/gazebo_model_plugin_mlp_disk_monitor.cc +) +add_dependencies(gazebo_model_plugin_mlp_disk_monitor ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_model_plugin_mlp_disk_monitor astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a model plugin for the speed camera -create_library( - DIR - src/gazebo_model_plugin_speed_cam - TARGET - gazebo_model_plugin_speed_cam - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util - ff_hw_msgs +add_library(gazebo_model_plugin_speed_cam + src/gazebo_model_plugin_speed_cam/gazebo_model_plugin_speed_cam.cc +) +add_dependencies(gazebo_model_plugin_speed_cam ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_model_plugin_speed_cam astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a model plugin for the signal lights -create_library( - DIR - src/gazebo_model_plugin_signal_lights - TARGET - gazebo_model_plugin_signal_lights - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util - ff_hw_msgs +add_library(gazebo_model_plugin_signal_lights + src/gazebo_model_plugin_signal_lights/gazebo_model_plugin_signal_lights.cc +) +add_dependencies(gazebo_model_plugin_signal_lights ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_model_plugin_signal_lights astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a sensor plugin for the IMU -create_library( - DIR - src/gazebo_sensor_plugin_imu - TARGET - gazebo_sensor_plugin_imu - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util +add_library(gazebo_sensor_plugin_imu + src/gazebo_sensor_plugin_imu/gazebo_sensor_plugin_imu.cc +) +add_dependencies(gazebo_sensor_plugin_imu ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_sensor_plugin_imu astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a sensor plugin for the perch camera -create_library( - DIR - src/gazebo_sensor_plugin_perch_cam - TARGET - gazebo_sensor_plugin_perch_cam - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util +add_library(gazebo_sensor_plugin_perch_cam + src/gazebo_sensor_plugin_perch_cam/gazebo_sensor_plugin_perch_cam.cc +) +add_dependencies(gazebo_sensor_plugin_perch_cam ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_sensor_plugin_perch_cam astrobee_gazebo + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a sensor plugin for the handrail detection algorithm -#create_library( -# DIR -# src/gazebo_sensor_plugin_handrail_detect -# TARGET -# gazebo_sensor_plugin_handrail_detect -# LIBS -# ${GAZEBO_LIBRARIES} -# ${catkin_LIBRARIES} -# astrobee_gazebo -# camera -# INC -# ${GAZEBO_INCLUDE_DIRS} -# ${catkin_INCLUDE_DIRS} -# ${EIGEN3_INCLUDE_DIRS} -# DEPS -# ff_util -# ff_msgs -#) +# add_library(gazebo_sensor_plugin_handrail_detect +# src/gazebo_sensor_plugin_handrail_detect/gazebo_sensor_plugin_handrail_detect.cc +# ) +# add_dependencies(gazebo_sensor_plugin_handrail_detect ${catkin_EXPORTED_TARGETS}) +# target_link_libraries(gazebo_sensor_plugin_handrail_detect +# ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} +# ) # Create a sensor plugin for the hazard camera -create_library( - DIR - src/gazebo_sensor_plugin_haz_cam - TARGET - gazebo_sensor_plugin_haz_cam - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util +add_library(gazebo_sensor_plugin_haz_cam + src/gazebo_sensor_plugin_haz_cam/gazebo_sensor_plugin_haz_cam.cc +) +add_dependencies(gazebo_sensor_plugin_haz_cam ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_sensor_plugin_haz_cam + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a sensor plugin for the nav cam -create_library( - DIR - src/gazebo_sensor_plugin_nav_cam - TARGET - gazebo_sensor_plugin_nav_cam - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util - ff_msgs +add_library(gazebo_sensor_plugin_nav_cam + src/gazebo_sensor_plugin_nav_cam/gazebo_sensor_plugin_nav_cam.cc +) +add_dependencies(gazebo_sensor_plugin_nav_cam ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_sensor_plugin_nav_cam + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a sensor plugin for the dock cam -create_library( - DIR - src/gazebo_sensor_plugin_dock_cam - TARGET - gazebo_sensor_plugin_dock_cam - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util +add_library(gazebo_sensor_plugin_dock_cam + src/gazebo_sensor_plugin_dock_cam/gazebo_sensor_plugin_dock_cam.cc +) +add_dependencies(gazebo_sensor_plugin_dock_cam ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_sensor_plugin_dock_cam + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a sensor plugin for the sci cam -create_library( - DIR - src/gazebo_sensor_plugin_sci_cam - TARGET - gazebo_sensor_plugin_sci_cam - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util - ff_msgs +add_library(gazebo_sensor_plugin_sci_cam + src/gazebo_sensor_plugin_sci_cam/gazebo_sensor_plugin_sci_cam.cc +) +add_dependencies(gazebo_sensor_plugin_sci_cam ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_sensor_plugin_sci_cam + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a model plugin for sparse mapping -create_library( - DIR - src/gazebo_sensor_plugin_sparse_map - TARGET - gazebo_sensor_plugin_sparse_map - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - camera - INC - ${GAZEBO_INCLUDE_DIRS} - ${EIGEN3_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util - ff_msgs +add_library(gazebo_sensor_plugin_sparse_map + src/gazebo_sensor_plugin_sparse_map/gazebo_sensor_plugin_sparse_map.cc +) +add_dependencies(gazebo_sensor_plugin_sparse_map ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_sensor_plugin_sparse_map + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a model plugin for AR localization -create_library( - DIR - src/gazebo_sensor_plugin_ar_tags - TARGET - gazebo_sensor_plugin_ar_tags - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - camera - msg_conversions - INC - ${GAZEBO_INCLUDE_DIRS} - ${EIGEN3_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util - ff_msgs +add_library(gazebo_sensor_plugin_ar_tags + src/gazebo_sensor_plugin_ar_tags/gazebo_sensor_plugin_ar_tags.cc +) +add_dependencies(gazebo_sensor_plugin_ar_tags ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_sensor_plugin_ar_tags + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) # Create a model plugin for optical flow -create_library( - DIR - src/gazebo_sensor_plugin_optical_flow - TARGET - gazebo_sensor_plugin_optical_flow - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - astrobee_gazebo - camera - INC - ${GAZEBO_INCLUDE_DIRS} - ${EIGEN3_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} - DEPS - ff_util - ff_msgs +add_library(gazebo_sensor_plugin_optical_flow + src/gazebo_sensor_plugin_optical_flow/gazebo_sensor_plugin_optical_flow.cc ) - -# Create a model plugin for sparse mapping -create_library( - DIR - src/gazebo_world_plugin_speed - TARGET - gazebo_world_plugin_speed - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} +add_dependencies(gazebo_sensor_plugin_optical_flow ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_sensor_plugin_optical_flow + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) +# Create a world plugin for speed +add_library(gazebo_world_plugin_speed + src/gazebo_world_plugin_speed/gazebo_world_plugin_speed.cc +) +add_dependencies(gazebo_world_plugin_speed ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_world_plugin_speed + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} +) -# Create a model plugin for the client gui -create_library( - DIR - src/gazebo_system_plugin_client - TARGET - gazebo_system_plugin_client - LIBS - ${GAZEBO_LIBRARIES} - ${catkin_LIBRARIES} - INC - ${GAZEBO_INCLUDE_DIRS} - ${catkin_INCLUDE_DIRS} +# Create a system plugin for the client gui +add_library(gazebo_system_plugin_client + src/gazebo_system_plugin_client/gazebo_system_plugin_client.cc +) +add_dependencies(gazebo_system_plugin_client ${catkin_EXPORTED_TARGETS}) +target_link_libraries(gazebo_system_plugin_client + ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES} ) +############# +## Install ## +############# -# Install our launch files -install_launch_files() +# Mark launch files for installation +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) # Also install some other resources install(DIRECTORY worlds DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}) -catkin_install_python(PROGRAMS scripts/spawn_model scripts/perf_wrapper scripts/start_server scripts/common +install(PROGRAMS scripts/perf_wrapper scripts/start_server scripts/common + DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}) +catkin_install_python(PROGRAMS scripts/spawn_model DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}) + +else (ENABLE_GAZEBO) + find_package(catkin REQUIRED COMPONENTS) + catkin_package() +endif (ENABLE_GAZEBO) diff --git a/simulation/package.xml b/simulation/package.xml index a0e6fa6be0..f1dbeaa26c 100644 --- a/simulation/package.xml +++ b/simulation/package.xml @@ -25,7 +25,10 @@ sensor_msgs ff_msgs ff_hw_msgs + tf2_geometry_msgs ff_util + gnc_autocode + camera visualization_msgs tf2 tf2_ros @@ -35,7 +38,10 @@ sensor_msgs ff_msgs ff_hw_msgs + tf2_geometry_msgs ff_util + gnc_autocode + camera diff --git a/submodules/platform b/submodules/platform index a7a92c4c4f..8e29458611 160000 --- a/submodules/platform +++ b/submodules/platform @@ -1 +1 @@ -Subproject commit a7a92c4c4ff15bd501d83243827a3eb379aea90e +Subproject commit 8e29458611ed62c1b554a74b5796f3c67ed068b1 diff --git a/tools/CMakeLists.txt b/tools/CMakeLists.txt deleted file mode 100644 index f64c1178c2..0000000000 --- a/tools/CMakeLists.txt +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -add_subdirectory(ekf_bag) -add_subdirectory(graph_bag) -add_subdirectory(imu_bias_tester) -add_subdirectory(localization_rviz_plugins) -add_subdirectory(rviz_visualizer) -add_subdirectory(visualeyez) -add_subdirectory(simulator) - -if (FFMPEG_FOUND) - add_subdirectory(ekf_video) -endif (FFMPEG_FOUND) diff --git a/tools/calibration/CMakeLists.txt b/tools/calibration/CMakeLists.txt new file mode 100644 index 0000000000..dd48d3a5e9 --- /dev/null +++ b/tools/calibration/CMakeLists.txt @@ -0,0 +1,150 @@ +#Copyright(c) 2017, United States Government, as represented by the +#Administrator of the National Aeronautics and Space Administration. +# +#All rights reserved. +# +#The Astrobee platform is licensed under the Apache License, Version 2.0 +#(the "License"); you may not use this file except in compliance with the +#License.You may obtain a copy of the License at +# +#http: // www.apache.org/licenses/LICENSE-2.0 +# +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +#WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.See the +#License for the specific language governing permissions and limitations +#under the License. + +cmake_minimum_required(VERSION 3.0) +project(calibration) + +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + camera + localization_common + msg_conversions + optimization_common +) + +# Find OpenCV3 +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../cmake") +find_package(OpenCV 3.3.1 REQUIRED) + +find_package(Eigen3 REQUIRED) + +find_package(Ceres REQUIRED) + +catkin_package( + INCLUDE_DIRS include + LIBRARIES ${PROJECT_NAME} ${CERES_LIBRARIES} ${OpenCV_LIBRARIES} + CATKIN_DEPENDS camera localization_common msg_conversions optimization_common +) + +########### +## Build ## +########### +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${CERES_INCLUDE_DIRS} + ${OpenCV_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(${PROJECT_NAME} + src/camera_utilities.cc + src/parameter_reader.cc + src/utilities.cc +) +add_dependencies(${PROJECT_NAME} ${catkin_EXPORTED_TARGETS}) +target_link_libraries(${PROJECT_NAME} ${catkin_LIBRARIES} ${OpenCV_LIBRARIES} ${CERES_LIBRARIES}) + +## Declare a C++ executable: create_undistorted_images +add_executable(create_undistorted_images tools/create_undistorted_images.cc) +add_dependencies(create_undistorted_images ${catkin_EXPORTED_TARGETS}) +target_link_libraries(create_undistorted_images + ${PROJECT_NAME} ${catkin_LIBRARIES} ${CERES_LIBRARIES} ) + +## Declare a C++ executable: create_undistorted_images +add_executable(run_camera_target_based_intrinsics_calibrator tools/run_camera_target_based_intrinsics_calibrator.cc) +add_dependencies(run_camera_target_based_intrinsics_calibrator ${catkin_EXPORTED_TARGETS}) +target_link_libraries(run_camera_target_based_intrinsics_calibrator + ${PROJECT_NAME} ${catkin_LIBRARIES} ${CERES_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) + +# Install C++ executables +install(TARGETS create_undistorted_images DESTINATION bin) +install(TARGETS run_camera_target_based_intrinsics_calibrator DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/create_undistorted_images share/${PROJECT_NAME} + COMMAND ln -s ../../bin/run_camera_target_based_intrinsics_calibrator share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + +if(CATKIN_ENABLE_TESTING) + find_package(rostest REQUIRED) + add_rostest_gtest(test_ransac_pnp + test/test_ransac_pnp.test + test/test_ransac_pnp.cc + test/test_utilities.cc + ) + target_link_libraries(test_ransac_pnp + ${PROJECT_NAME} ${catkin_LIBRARIES} + ) + add_rostest_gtest(test_reprojection_pose_estimate + test/test_reprojection_pose_estimate.test + test/test_reprojection_pose_estimate.cc + test/test_utilities.cc + ) + target_link_libraries(test_reprojection_pose_estimate + ${PROJECT_NAME} ${catkin_LIBRARIES} + ) + add_rostest_gtest(test_pnp + test/test_pnp.test + test/test_pnp.cc + test/test_utilities.cc + ) + target_link_libraries(test_pnp + ${PROJECT_NAME} ${catkin_LIBRARIES} + ) + add_rostest_gtest(test_camera_utilities + test/test_camera_utilities.test + test/test_camera_utilities.cc + test/test_utilities.cc + ) + target_link_libraries(test_camera_utilities + ${PROJECT_NAME} ${catkin_LIBRARIES} + ) + add_rostest_gtest(test_camera_target_based_intrinsics_calibrator + test/test_camera_target_based_intrinsics_calibrator.test + test/test_camera_target_based_intrinsics_calibrator.cc + test/test_utilities.cc + ) + target_link_libraries(test_camera_target_based_intrinsics_calibrator + ${PROJECT_NAME} ${catkin_LIBRARIES} + ) +endif() diff --git a/tools/calibration/doc/images/calibrated_reprojection_from_all_targets_absolute_image.png b/tools/calibration/doc/images/calibrated_reprojection_from_all_targets_absolute_image.png new file mode 100644 index 0000000000..364b504187 Binary files /dev/null and b/tools/calibration/doc/images/calibrated_reprojection_from_all_targets_absolute_image.png differ diff --git a/tools/calibration/doc/images/detection_image.jpg b/tools/calibration/doc/images/detection_image.jpg new file mode 100644 index 0000000000..8a4e30d856 Binary files /dev/null and b/tools/calibration/doc/images/detection_image.jpg differ diff --git a/tools/calibration/doc/images/distorted.png b/tools/calibration/doc/images/distorted.png new file mode 100644 index 0000000000..1486f294f8 Binary files /dev/null and b/tools/calibration/doc/images/distorted.png differ diff --git a/tools/calibration/doc/images/fov_undistorted.png b/tools/calibration/doc/images/fov_undistorted.png new file mode 100644 index 0000000000..d93d72d803 Binary files /dev/null and b/tools/calibration/doc/images/fov_undistorted.png differ diff --git a/tools/calibration/doc/images/norm_errors.png b/tools/calibration/doc/images/norm_errors.png new file mode 100644 index 0000000000..2a889bc4e5 Binary files /dev/null and b/tools/calibration/doc/images/norm_errors.png differ diff --git a/tools/calibration/doc/images/x_errors.png b/tools/calibration/doc/images/x_errors.png new file mode 100644 index 0000000000..c9d5701356 Binary files /dev/null and b/tools/calibration/doc/images/x_errors.png differ diff --git a/tools/calibration/doc/images/y_errors.png b/tools/calibration/doc/images/y_errors.png new file mode 100644 index 0000000000..08457e4bca Binary files /dev/null and b/tools/calibration/doc/images/y_errors.png differ diff --git a/tools/calibration/include/calibration/camera_target_based_intrinsics_calibrator.h b/tools/calibration/include/calibration/camera_target_based_intrinsics_calibrator.h new file mode 100644 index 0000000000..505683dc4b --- /dev/null +++ b/tools/calibration/include/calibration/camera_target_based_intrinsics_calibrator.h @@ -0,0 +1,249 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef CALIBRATION_CAMERA_TARGET_BASED_INTRINSICS_CALIBRATOR_H_ +#define CALIBRATION_CAMERA_TARGET_BASED_INTRINSICS_CALIBRATOR_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include +#include + +#include +#include + +namespace calibration { +template +class CameraTargetBasedIntrinsicsCalibrator { + public: + explicit CameraTargetBasedIntrinsicsCalibrator(const CameraTargetBasedIntrinsicsCalibratorParams& params) + : params_(params) {} + + bool EstimateInitialTargetPosesAndCalibrate( + const std::vector& correspondences_set, + const StateParameters& initial_state_parameters, StateParameters& calibrated_state_parameters, + StateParametersCovariances& covariances); + + bool Calibrate(const std::vector& match_sets, const StateParameters& initial_state_parameters, + StateParameters& calibrated_state_parameters, StateParametersCovariances& covariances); + + const CameraTargetBasedIntrinsicsCalibratorParams& params() { return params_; } + + private: + void Initialize(const StateParameters& initial_state_parameters); + boost::optional Covariances(); + void AddCameraTTargetParameter(const Eigen::Isometry3d& camera_T_target); + double RadialScaleFactor(const Eigen::Vector2d& image_point, const Eigen::Vector2i& image_size) const; + void SaveResults(const StateParameters& calibrated_state_parameters, const std::vector& match_sets) const; + localization_common::ImageCorrespondences InlierMatches(const localization_common::ImageCorrespondences& match_set, + const std::vector& inliers) const; + + CameraTargetBasedIntrinsicsCalibratorParams params_; + OptimizationStateParameters state_parameters_; + ceres::Problem problem_; + // Keep track of correspondences used to create final reprojection image + std::vector valid_correspondences_set_; +}; + +template +bool CameraTargetBasedIntrinsicsCalibrator::EstimateInitialTargetPosesAndCalibrate( + const std::vector& correspondences_set, + const StateParameters& initial_state_parameters, StateParameters& calibrated_state_parameters, + StateParametersCovariances& covariances) { + std::vector match_sets; + const Eigen::Matrix3d initial_intrinsics = + optimization_common::Intrinsics(initial_state_parameters.focal_lengths, initial_state_parameters.principal_points); + for (const auto& correspondences : correspondences_set) { + const auto camera_T_target = ReprojectionPoseEstimate( + correspondences.image_points, correspondences.points_3d, initial_state_parameters.focal_lengths, + initial_state_parameters.principal_points, initial_state_parameters.distortion, + params_.reprojection_pose_estimate); + + if (!camera_T_target) { + LogError("Failed to get camera_T_target with " << correspondences.size() << " matches."); + continue; + } + + match_sets.emplace_back(correspondences, camera_T_target->pose, camera_T_target->inliers); + if (params_.save_individual_initial_reprojection_images) { + static int image_count = 0; + const auto& match_set = match_sets.back(); + SaveReprojectionImage(match_set.correspondences.image_points, match_set.correspondences.points_3d, + match_set.inliers, initial_intrinsics, initial_state_parameters.distortion, + match_set.pose_estimate, params_.individual_max_visualization_error_norm, + "reprojection_image_" + std::to_string(image_count++) + ".png"); + } + } + + return Calibrate(match_sets, initial_state_parameters, calibrated_state_parameters, covariances); +} + +template +bool CameraTargetBasedIntrinsicsCalibrator::Calibrate(const std::vector& match_sets, + const StateParameters& initial_state_parameters, + StateParameters& calibrated_state_parameters, + StateParametersCovariances& covariances) { + Initialize(initial_state_parameters); + // Reserve so that pointers aren't modified while new target poses are added. + // This can mess up ceres::Problem since it relies on pointers of add parameters. + state_parameters_.camera_T_targets.reserve(match_sets.size()); + // Add residuals to problem + for (const auto& match_set : match_sets) { + AddCameraTTargetParameter(match_set.pose_estimate); + localization_common::ImageCorrespondences valid_correspondences = + params_.only_use_inliers ? InlierMatches(match_set.correspondences, match_set.inliers) + : match_set.correspondences; + valid_correspondences_set_.emplace_back(valid_correspondences); + for (int i = 0; i < static_cast(valid_correspondences.size()) && i < params_.max_num_match_sets; ++i) { + const double radial_scale_factor = RadialScaleFactor(valid_correspondences.image_points[i], params_.image_size); + optimization_common::ReprojectionError::AddCostFunction( + valid_correspondences.image_points[i], valid_correspondences.points_3d[i], + state_parameters_.camera_T_targets.back(), state_parameters_.focal_lengths, state_parameters_.principal_points, + state_parameters_.distortion, problem_, radial_scale_factor, params_.optimization.huber_loss); + } + } + + ceres::Solver::Summary summary; + ceres::Solve(params_.optimization.solver_options, &problem_, &summary); + if (params_.optimization.verbose) std::cout << summary.FullReport() << std::endl; + if (!summary.IsSolutionUsable()) { + LogError("Calibrate: Calibration failed."); + return false; + } + + calibrated_state_parameters = state_parameters_.OptimizedStateParameters(); + SaveResults(calibrated_state_parameters, match_sets); + + const auto state_covariances = Covariances(); + if (!state_covariances) { + LogError("Calibrate: Failed to get covariances"); + return false; + } + covariances = *state_covariances; + + return true; +} + +template +void CameraTargetBasedIntrinsicsCalibrator::Initialize(const StateParameters& initial_state_parameters) { + state_parameters_.SetInitialStateParameters(initial_state_parameters); + optimization_common::AddParameterBlock(2, state_parameters_.focal_lengths.data(), problem_, + !params_.calibrate_focal_lengths); + optimization_common::AddParameterBlock(2, state_parameters_.principal_points.data(), problem_, + !params_.calibrate_principal_points); + optimization_common::AddParameterBlock(DISTORTER::kNumParams, state_parameters_.distortion.data(), problem_, + !params_.calibrate_distortion); + if (params_.optimization.verbose) { + if (params_.calibrate_focal_lengths) LogInfo("Calibrating focal lengths."); + if (params_.calibrate_principal_points) LogInfo("Calibrating principal points."); + if (params_.calibrate_distortion) LogInfo("Calibrating distortion."); + if (params_.calibrate_target_poses) LogInfo("Calibrating target poses."); + } +} + +template +boost::optional CameraTargetBasedIntrinsicsCalibrator::Covariances() { + ceres::Covariance::Options options; + ceres::Covariance covariance(options); + + std::vector > covariance_blocks; + covariance_blocks.push_back( + std::make_pair(state_parameters_.focal_lengths.data(), state_parameters_.focal_lengths.data())); + covariance_blocks.push_back( + std::make_pair(state_parameters_.principal_points.data(), state_parameters_.principal_points.data())); + covariance_blocks.push_back(std::make_pair(state_parameters_.distortion.data(), state_parameters_.distortion.data())); + + if (!covariance.Compute(covariance_blocks, &problem_)) { + LogError("Covariances: Failed to compute covariances."); + return boost::none; + } + + StateParametersCovariances covariances; + covariance.GetCovarianceBlock(state_parameters_.focal_lengths.data(), state_parameters_.focal_lengths.data(), + covariances.focal_lengths.data()); + covariance.GetCovarianceBlock(state_parameters_.principal_points.data(), state_parameters_.principal_points.data(), + covariances.principal_points.data()); + covariances.distortion = Eigen::MatrixXd(state_parameters_.distortion.size(), state_parameters_.distortion.size()); + covariance.GetCovarianceBlock(state_parameters_.distortion.data(), state_parameters_.distortion.data(), + covariances.distortion.data()); + return covariances; +} + +template +void CameraTargetBasedIntrinsicsCalibrator::AddCameraTTargetParameter( + const Eigen::Isometry3d& camera_T_target) { + state_parameters_.AddCameraTTarget(camera_T_target); + optimization_common::AddParameterBlock(6, state_parameters_.camera_T_targets.back().data(), problem_, + !params_.calibrate_target_poses); + ceres::LocalParameterization* se3_local_parameterization = new optimization_common::SE3LocalParameterization; + problem_.SetParameterization(state_parameters_.camera_T_targets.back().data(), se3_local_parameterization); +} + +template +double CameraTargetBasedIntrinsicsCalibrator::RadialScaleFactor(const Eigen::Vector2d& image_point, + const Eigen::Vector2i& image_size) const { + if (!params_.scale_loss_radially) return 1.0; + const Eigen::Vector2d centered_image_point = image_point - image_size.cast() / 2.0; + const double radius = centered_image_point.norm(); + return std::pow(radius, params_.radial_scale_power); +} + +template +localization_common::ImageCorrespondences CameraTargetBasedIntrinsicsCalibrator::InlierMatches( + const localization_common::ImageCorrespondences& match_set, const std::vector& inliers) const { + std::vector inlier_image_points; + std::vector inlier_points_3d; + for (const int inlier_index : inliers) { + inlier_image_points.emplace_back(match_set.image_points[inlier_index]); + inlier_points_3d.emplace_back(match_set.points_3d[inlier_index]); + } + return localization_common::ImageCorrespondences(inlier_image_points, inlier_points_3d); +} + +template +void CameraTargetBasedIntrinsicsCalibrator::SaveResults(const StateParameters& calibrated_state_parameters, + const std::vector& match_sets) const { + if (!(params_.calibrate_target_poses && params_.optimization.verbose) && !params_.save_final_reprojection_image) + return; + + const auto calibrated_camera_T_targets = state_parameters_.OptimizedCameraTTargets(); + if (params_.calibrate_target_poses && params_.optimization.verbose) + PrintCameraTTargetsStats(match_sets, calibrated_camera_T_targets); + + if (params_.save_final_reprojection_image) { + const Eigen::Matrix3d calibrated_intrinsics = optimization_common::Intrinsics( + calibrated_state_parameters.focal_lengths, calibrated_state_parameters.principal_points); + SaveReprojectionFromAllTargetsImage(calibrated_camera_T_targets, valid_correspondences_set_, + calibrated_intrinsics, calibrated_state_parameters.distortion, + params_.image_size, params_.max_visualization_error_norm); + } +} +} // namespace calibration + +#endif // CALIBRATION_CAMERA_TARGET_BASED_INTRINSICS_CALIBRATOR_H_ diff --git a/tools/calibration/include/calibration/camera_target_based_intrinsics_calibrator_params.h b/tools/calibration/include/calibration/camera_target_based_intrinsics_calibrator_params.h new file mode 100644 index 0000000000..72fda66941 --- /dev/null +++ b/tools/calibration/include/calibration/camera_target_based_intrinsics_calibrator_params.h @@ -0,0 +1,53 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef CALIBRATION_CAMERA_TARGET_BASED_INTRINSICS_CALIBRATOR_PARAMS_H_ +#define CALIBRATION_CAMERA_TARGET_BASED_INTRINSICS_CALIBRATOR_PARAMS_H_ + +#include +#include +#include + +#include + +#include +#include + +namespace calibration { +struct CameraTargetBasedIntrinsicsCalibratorParams { + OptimizationParams optimization; + ReprojectionPoseEstimateParams reprojection_pose_estimate; + bool calibrate_focal_lengths; + bool calibrate_principal_points; + bool calibrate_distortion; + bool calibrate_target_poses; + // Optimization Options + bool scale_loss_radially; + double radial_scale_power; + // Other + bool only_use_inliers; + int max_num_match_sets; + int min_num_target_inliers; + double max_visualization_error_norm; + bool save_individual_initial_reprojection_images; + bool save_final_reprojection_image; + double individual_max_visualization_error_norm; + Eigen::Vector2i image_size; +}; +} // namespace calibration + +#endif // CALIBRATION_CAMERA_TARGET_BASED_INTRINSICS_CALIBRATOR_PARAMS_H_ diff --git a/tools/calibration/include/calibration/camera_utilities.h b/tools/calibration/include/calibration/camera_utilities.h new file mode 100644 index 0000000000..777f5657e8 --- /dev/null +++ b/tools/calibration/include/calibration/camera_utilities.h @@ -0,0 +1,340 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef CALIBRATION_CAMERA_UTILITIES_H_ +#define CALIBRATION_CAMERA_UTILITIES_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include +#include + +#include +#include +#include +#include +#include + +#include + +#include +#include +#include +#include +#include + +namespace calibration { +Eigen::Vector2d Project3dPointToImageSpace(const Eigen::Vector3d& cam_t_point, const Eigen::Matrix3d& intrinsics); + +template +Eigen::Vector2d Project3dPointToImageSpaceWithDistortion(const Eigen::Vector3d& cam_t_point, + const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion_params); + +Eigen::Isometry3d Isometry3d(const cv::Mat& rodrigues_rotation_cv, const cv::Mat& translation_cv); + +void UndistortedPnP(const std::vector& undistorted_image_points, const std::vector& points_3d, + const cv::Mat& intrinsics, const int pnp_method, cv::Mat& rotation, cv::Mat& translation); + +std::vector RandomNIndices(const int num_possible_indices, const int num_sampled_indices); + +template +int Inliers(const std::vector& image_points, const std::vector& points_3d, + const Eigen::Matrix3d& intrinsics, const Eigen::VectorXd& distortion, + const Eigen::Isometry3d& pose_estimate, const double max_inlier_threshold, + boost::optional&> inliers = boost::none); + +template +std::vector SampledValues(const std::vector& values, const std::vector& indices); + +template +boost::optional RansacPnP(const std::vector& image_points, + const std::vector& points_3d, + const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion, + const RansacPnPParams& params); + +template +boost::optional ReprojectionPoseEstimate(const std::vector& image_points, + const std::vector& points_3d, + const Eigen::Vector2d& focal_lengths, + const Eigen::Vector2d& principal_points, + const Eigen::VectorXd& distortion, + const ReprojectionPoseEstimateParams& params); + +template +boost::optional ReprojectionPoseEstimate(const std::vector& image_points, + const std::vector& points_3d, + const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion, + const ReprojectionPoseEstimateParams& params); + +template +boost::optional ReprojectionPoseEstimateWithInitialEstimate( + const std::vector& image_points, const std::vector& points_3d, + const Eigen::Vector2d& focal_lengths, const Eigen::Vector2d& principal_points, const Eigen::VectorXd& distortion, + const ReprojectionPoseEstimateParams& params, const Eigen::Isometry3d& initial_estimate, + const std::vector& initial_inliers); + +template +boost::optional ReprojectionPoseEstimateWithInitialEstimate( + const std::vector& image_points, const std::vector& points_3d, + const Eigen::Matrix3d& intrinsics, const Eigen::VectorXd& distortion, const ReprojectionPoseEstimateParams& params, + const Eigen::Isometry3d& initial_estimate, const std::vector& initial_inliers); + +template +Eigen::Vector2d Project3dPointToImageSpaceWithDistortion(const Eigen::Vector3d& cam_t_point, + const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion_params) { + const Eigen::Vector2d undistorted_image_point = Project3dPointToImageSpace(cam_t_point, intrinsics); + const DISTORTER distorter; + return distorter.Distort(distortion_params, intrinsics, undistorted_image_point); +} + +template +int Inliers(const std::vector& image_points, const std::vector& points_3d, + const Eigen::Matrix3d& intrinsics, const Eigen::VectorXd& distortion, + const Eigen::Isometry3d& pose_estimate, const double max_inlier_threshold, + boost::optional&> inliers) { + int num_inliers = 0; + for (int i = 0; i < static_cast(image_points.size()); ++i) { + const Eigen::Vector2d& image_point = image_points[i]; + const Eigen::Vector3d& point_3d = points_3d[i]; + const Eigen::Vector3d transformed_point_3d = pose_estimate * point_3d; + const Eigen::Vector2d projected_image_point = + Project3dPointToImageSpaceWithDistortion(transformed_point_3d, intrinsics, distortion); + const Eigen::Vector2d error = (image_point - projected_image_point); + const double error_norm = error.norm(); + if (error_norm <= max_inlier_threshold) { + ++num_inliers; + if (inliers) inliers->emplace_back(i); + } + } + return num_inliers; +} + +template +std::vector SampledValues(const std::vector& values, const std::vector& indices) { + std::vector sampled_values; + for (const int i : indices) { + sampled_values.emplace_back(values[i]); + } + return sampled_values; +} + +template +boost::optional RansacPnP(const std::vector& image_points, + const std::vector& points_3d, + const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion, + const RansacPnPParams& params) { + if (image_points.size() < 4) { + LogError("RansacPnP: Too few matched points given."); + return boost::none; + } + + const DISTORTER distorter; + const std::vector undistorted_image_points = + distorter.Undistort(image_points, intrinsics, distortion); + + std::vector undistorted_image_points_cv; + for (const auto& undistorted_image_point : undistorted_image_points) { + undistorted_image_points_cv.emplace_back(cv::Point2d(undistorted_image_point.x(), undistorted_image_point.y())); + } + std::vector points_3d_cv; + for (const auto& point_3d : points_3d) { + points_3d_cv.emplace_back(point_3d.x(), point_3d.y(), point_3d.z()); + } + + cv::Mat intrinsics_cv; + cv::eigen2cv(intrinsics, intrinsics_cv); + cv::Mat rodrigues_rotation_cv(3, 1, cv::DataType::type, cv::Scalar(0)); + cv::Mat translation_cv(3, 1, cv::DataType::type, cv::Scalar(0)); + int max_num_inliers = 0; + Eigen::Isometry3d best_pose_estimate; + // Use 4 values for P3P + // TODO(rsoussan): make this a param + constexpr int kNumSampledValues = 4; + for (int i = 0; i < params.num_iterations; ++i) { + const auto sampled_indices = RandomNIndices(image_points.size(), kNumSampledValues); + const auto sampled_undistorted_image_points = SampledValues(undistorted_image_points_cv, sampled_indices); + const auto sampled_points_3d = SampledValues(points_3d_cv, sampled_indices); + UndistortedPnP(sampled_undistorted_image_points, sampled_points_3d, intrinsics_cv, params.pnp_method, + rodrigues_rotation_cv, translation_cv); + const Eigen::Isometry3d pose_estimate = Isometry3d(rodrigues_rotation_cv, translation_cv); + const int num_inliers = + Inliers(image_points, points_3d, intrinsics, distortion, pose_estimate, params.max_inlier_threshold); + if (num_inliers > max_num_inliers) { + best_pose_estimate = pose_estimate; + max_num_inliers = num_inliers; + } + } + + if (max_num_inliers < params.min_num_inliers) { + LogError("RansacPnP: Failed to find pose with enough inliers."); + return boost::none; + } + + std::vector inliers; + Inliers(image_points, points_3d, intrinsics, distortion, best_pose_estimate, params.max_inlier_threshold, + inliers); + // TODO(rsoussan): Get covariance for ransac pnp + return PoseWithCovarianceAndInliers(best_pose_estimate, Eigen::Matrix::Identity(), inliers); +} + +template +boost::optional ReprojectionPoseEstimate(const std::vector& image_points, + const std::vector& points_3d, + const Eigen::Vector2d& focal_lengths, + const Eigen::Vector2d& principal_points, + const Eigen::VectorXd& distortion, + const ReprojectionPoseEstimateParams& params) { + if (image_points.size() < 4) { + LogError("ReprojectionPoseEstimate: Too few matched points given."); + return boost::none; + } + + const Eigen::Matrix3d intrinsics = optimization_common::Intrinsics(focal_lengths, principal_points); + // Use RansacPnP for initial estimate since using identity transform can lead to image projection issues + // if any points_3d z values are 0. + const auto ransac_pnp_estimate = + RansacPnP(image_points, points_3d, intrinsics, distortion, params.ransac_pnp); + if (!ransac_pnp_estimate) { + LogError("ReprojectionPoseEstimate: Failed to get ransac pnp initial estimate."); + return boost::none; + } + + const int num_inliers = ransac_pnp_estimate->inliers.size(); + if (num_inliers < params.ransac_pnp.min_num_inliers) { + LogError("ReprojectionPoseEstimate: Too few inliers found. Need " << params.ransac_pnp.min_num_inliers << ", got " + << num_inliers << "."); + return boost::none; + } + + return ReprojectionPoseEstimateWithInitialEstimate( + image_points, points_3d, focal_lengths, principal_points, distortion, params, ransac_pnp_estimate->pose, + ransac_pnp_estimate->inliers); +} + +template +boost::optional ReprojectionPoseEstimateWithInitialEstimate( + const std::vector& image_points, const std::vector& points_3d, + const Eigen::Vector2d& focal_lengths, const Eigen::Vector2d& principal_points, const Eigen::VectorXd& distortion, + const ReprojectionPoseEstimateParams& params, const Eigen::Isometry3d& initial_estimate, + const std::vector& initial_inliers) { + if (image_points.size() < 4) { + LogError("ReprojectionPoseEstimateWithInitialEstimate: Too few matched points given."); + return boost::none; + } + + const int num_initial_inliers = initial_inliers.size(); + if (num_initial_inliers < params.ransac_pnp.min_num_inliers) { + LogError("ReprojectionPoseEstimateWithInitialEstimate: Too few inliers provided. Need " + << params.ransac_pnp.min_num_inliers << ", got " << num_initial_inliers << "."); + return boost::none; + } + + // TODO(rsoussan): Get covariance from ransac pnp + if (!params.optimize_estimate) + return PoseWithCovarianceAndInliers(initial_estimate, Eigen::Matrix::Identity(), initial_inliers); + + ceres::Problem problem; + optimization_common::AddConstantParameterBlock(2, focal_lengths.data(), problem); + optimization_common::AddConstantParameterBlock(2, principal_points.data(), problem); + optimization_common::AddConstantParameterBlock(DISTORTER::kNumParams, distortion.data(), problem); + + Eigen::Matrix pose_estimate_vector = optimization_common::VectorFromIsometry3d(initial_estimate); + problem.AddParameterBlock(pose_estimate_vector.data(), 6); + + for (int i = 0; i < num_initial_inliers; ++i) { + const int inlier_index = initial_inliers[i]; + optimization_common::ReprojectionError::AddCostFunction( + image_points[inlier_index], points_3d[inlier_index], pose_estimate_vector, + const_cast(focal_lengths), const_cast(principal_points), + const_cast(distortion), problem, 1, params.optimization.huber_loss); + } + + ceres::Solver::Summary summary; + ceres::Solve(params.optimization.solver_options, &problem, &summary); + const Eigen::Isometry3d optimized_estimate = optimization_common::Isometry3d(pose_estimate_vector); + if (params.optimization.verbose) { + LogError("ReprojectionPoseEstimateWithInitialEstimate: Initial estimate: " << std::endl + << initial_estimate.matrix() << std::endl + << "Optimized estimate: " << std::endl + << optimized_estimate.matrix() + << std::endl + << "Summary: " << std::endl + << summary.FullReport()); + } + if (!summary.IsSolutionUsable()) { + LogError("ReprojectionPoseEstimateWithInitialEstimate: Failed to find solution."); + return boost::none; + } + + ceres::Covariance::Options options; + ceres::Covariance covariance(options); + std::vector> covariance_blocks; + covariance_blocks.push_back(std::make_pair(pose_estimate_vector.data(), pose_estimate_vector.data())); + if (!covariance.Compute(covariance_blocks, &problem)) { + LogError("ReprojectionPoseEstimateWithInitialEstimate: Failed to compute covariances."); + return boost::none; + } + + localization_common::PoseCovariance pose_covariance; + covariance.GetCovarianceBlock(pose_estimate_vector.data(), pose_estimate_vector.data(), pose_covariance.data()); + + std::vector inliers; + const Eigen::Matrix3d intrinsics = optimization_common::Intrinsics(focal_lengths, principal_points); + Inliers(image_points, points_3d, intrinsics, distortion, optimized_estimate, params.max_inlier_threshold, + inliers); + return PoseWithCovarianceAndInliers(optimized_estimate, pose_covariance, inliers); +} + +template +boost::optional ReprojectionPoseEstimate(const std::vector& image_points, + const std::vector& points_3d, + const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion, + const ReprojectionPoseEstimateParams& params) { + const auto focal_lengths = localization_common::FocalLengths(intrinsics); + const auto principal_points = localization_common::PrincipalPoints(intrinsics); + return ReprojectionPoseEstimate(image_points, points_3d, focal_lengths, principal_points, distortion, + params); +} + +template +boost::optional ReprojectionPoseEstimateWithInitialEstimate( + const std::vector& image_points, const std::vector& points_3d, + const Eigen::Matrix3d& intrinsics, const Eigen::VectorXd& distortion, const ReprojectionPoseEstimateParams& params, + const Eigen::Isometry3d& initial_estimate, const std::vector& initial_inliers) { + const auto focal_lengths = localization_common::FocalLengths(intrinsics); + const auto principal_points = localization_common::PrincipalPoints(intrinsics); + return ReprojectionPoseEstimateWithInitialEstimate( + image_points, points_3d, focal_lengths, principal_points, distortion, params, initial_estimate, initial_inliers); +} +} // namespace calibration +#endif // CALIBRATION_CAMERA_UTILITIES_H_ diff --git a/tools/calibration/include/calibration/match_set.h b/tools/calibration/include/calibration/match_set.h new file mode 100644 index 0000000000..6daf91452b --- /dev/null +++ b/tools/calibration/include/calibration/match_set.h @@ -0,0 +1,38 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef CALIBRATION_MATCH_SET_H_ +#define CALIBRATION_MATCH_SET_H_ + +#include + +#include + +#include + +namespace calibration { +struct MatchSet { + MatchSet(const localization_common::ImageCorrespondences& correspondences, const Eigen::Isometry3d& pose_estimate, + const std::vector& inliers) + : correspondences(correspondences), pose_estimate(pose_estimate), inliers(inliers) {} + localization_common::ImageCorrespondences correspondences; + Eigen::Isometry3d pose_estimate; + std::vector inliers; +}; +} // namespace calibration + +#endif // CALIBRATION_MATCH_SET_H_ diff --git a/tools/calibration/include/calibration/optimization_params.h b/tools/calibration/include/calibration/optimization_params.h new file mode 100644 index 0000000000..4a9d5c5cd1 --- /dev/null +++ b/tools/calibration/include/calibration/optimization_params.h @@ -0,0 +1,31 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef CALIBRATION_OPTIMIZATION_PARAMS_H_ +#define CALIBRATION_OPTIMIZATION_PARAMS_H_ + +#include + +namespace calibration { +struct OptimizationParams { + ceres::Solver::Options solver_options; + bool verbose; + double huber_loss; +}; +} // namespace calibration + +#endif // CALIBRATION_OPTIMIZATION_PARAMS_H_ diff --git a/tools/calibration/include/calibration/parameter_reader.h b/tools/calibration/include/calibration/parameter_reader.h new file mode 100644 index 0000000000..4011afc578 --- /dev/null +++ b/tools/calibration/include/calibration/parameter_reader.h @@ -0,0 +1,48 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef CALIBRATION_PARAMETER_READER_H_ +#define CALIBRATION_PARAMETER_READER_H_ + +#include +#include +#include +#include +#include + +#include + +#include + +namespace calibration { +void LoadRunCalibratorParams(config_reader::ConfigReader& config, RunCalibratorParams& params); + +void LoadCameraTargetBasedIntrinsicsCalibratorParams(config_reader::ConfigReader& config, + CameraTargetBasedIntrinsicsCalibratorParams& params); + +void LoadReprojectionPoseEstimateParams(config_reader::ConfigReader& config, ReprojectionPoseEstimateParams& params); + +void LoadOptimizationParams(config_reader::ConfigReader& config, OptimizationParams& params, + const std::string& prefix = ""); + +void LoadSolverOptions(config_reader::ConfigReader& config, ceres::Solver::Options& solver_options, + const std::string& prefix = ""); + +void LoadRansacPnPParams(config_reader::ConfigReader& config, RansacPnPParams& params); +} // namespace calibration + +#endif // CALIBRATION_PARAMETER_READER_H_ diff --git a/tools/calibration/include/calibration/pose_with_covariance_and_inliers.h b/tools/calibration/include/calibration/pose_with_covariance_and_inliers.h new file mode 100644 index 0000000000..3a510b7fe2 --- /dev/null +++ b/tools/calibration/include/calibration/pose_with_covariance_and_inliers.h @@ -0,0 +1,36 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef CALIBRATION_POSE_WITH_COVARIANCE_AND_INLIERS_H_ +#define CALIBRATION_POSE_WITH_COVARIANCE_AND_INLIERS_H_ + +#include + +#include + +#include + +namespace calibration { +struct PoseWithCovarianceAndInliers : public localization_common::PoseWithCovariance { + PoseWithCovarianceAndInliers(const Eigen::Isometry3d& pose, const localization_common::PoseCovariance& covariance, + const std::vector& inliers) + : PoseWithCovariance(pose, covariance), inliers(inliers) {} + std::vector inliers; +}; +} // namespace calibration + +#endif // CALIBRATION_POSE_WITH_COVARIANCE_AND_INLIERS_H_ diff --git a/tools/calibration/include/calibration/ransac_pnp_params.h b/tools/calibration/include/calibration/ransac_pnp_params.h new file mode 100644 index 0000000000..515738cc89 --- /dev/null +++ b/tools/calibration/include/calibration/ransac_pnp_params.h @@ -0,0 +1,33 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef CALIBRATION_RANSAC_PNP_PARAMS_H_ +#define CALIBRATION_RANSAC_PNP_PARAMS_H_ + +#include + +namespace calibration { +struct RansacPnPParams { + double max_inlier_threshold; + int num_iterations; + int min_num_inliers; + // TODO(rsoussan): Change this to cv::SolvePnPMethod when opencv version updated + int pnp_method; +}; +} // namespace calibration + +#endif // CALIBRATION_RANSAC_PNP_PARAMS_H_ diff --git a/tools/calibration/include/calibration/reprojection_pose_estimate_params.h b/tools/calibration/include/calibration/reprojection_pose_estimate_params.h new file mode 100644 index 0000000000..f7ebe47175 --- /dev/null +++ b/tools/calibration/include/calibration/reprojection_pose_estimate_params.h @@ -0,0 +1,33 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef CALIBRATION_REPROJECTION_POSE_ESTIMATE_PARAMS_H_ +#define CALIBRATION_REPROJECTION_POSE_ESTIMATE_PARAMS_H_ + +#include +#include + +namespace calibration { +struct ReprojectionPoseEstimateParams { + OptimizationParams optimization; + RansacPnPParams ransac_pnp; + bool optimize_estimate; + double max_inlier_threshold; +}; +} // namespace calibration + +#endif // CALIBRATION_REPROJECTION_POSE_ESTIMATE_PARAMS_H_ diff --git a/tools/simulator/dock_sim/include/dock_sim/dock_sim.h b/tools/calibration/include/calibration/run_calibrator_params.h similarity index 54% rename from tools/simulator/dock_sim/include/dock_sim/dock_sim.h rename to tools/calibration/include/calibration/run_calibrator_params.h index 8be3fb7292..a114391c53 100644 --- a/tools/simulator/dock_sim/include/dock_sim/dock_sim.h +++ b/tools/calibration/include/calibration/run_calibrator_params.h @@ -1,54 +1,37 @@ /* Copyright (c) 2017, United States Government, as represented by the * Administrator of the National Aeronautics and Space Administration. - * + * * All rights reserved. - * + * * The Astrobee platform is licensed under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ +#ifndef CALIBRATION_RUN_CALIBRATOR_PARAMS_H_ +#define CALIBRATION_RUN_CALIBRATOR_PARAMS_H_ +#include +#include -#ifndef DOCK_SIM_DOCK_SIM_H_ -#define DOCK_SIM_DOCK_SIM_H_ - -#include - -#include -#include - -#include - -#include -#include - -#include - +#include #include -namespace dock_sim { - -class DockSim : public nodelet::Nodelet { - public: - DockSim(); - ~DockSim(); - - void DockGoalCallback(ff_msgs::DockGoalConstPtr const& goal); - - virtual void onInit(); - - private: - std::shared_ptr> sas_dock_; +namespace calibration { +struct RunCalibratorParams { + CameraTargetBasedIntrinsicsCalibratorParams camera_target_based_intrinsics_calibrator; + std::shared_ptr camera_params; + std::string camera_name; + // fov, rad, or radtan + std::string distortion_type; }; +} // namespace calibration -} // namespace dock_sim - -#endif // DOCK_SIM_DOCK_SIM_H_ +#endif // CALIBRATION_RUN_CALIBRATOR_PARAMS_H_ diff --git a/tools/calibration/include/calibration/state_parameters.h b/tools/calibration/include/calibration/state_parameters.h new file mode 100644 index 0000000000..bca575b8f1 --- /dev/null +++ b/tools/calibration/include/calibration/state_parameters.h @@ -0,0 +1,89 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef CALIBRATION_STATE_PARAMETERS_H_ +#define CALIBRATION_STATE_PARAMETERS_H_ + +#include +#include +#include + +#include +#include + +#include + +namespace calibration { +struct StateParameters { + void Print() { + LogInfo("Focal lengths: " << std::endl << focal_lengths.matrix()); + LogInfo("Principal points: " << std::endl << principal_points.matrix()); + LogInfo("Distortion: " << std::endl << distortion.matrix()); + } + + Eigen::Vector2d focal_lengths; + Eigen::Vector2d principal_points; + Eigen::VectorXd distortion; +}; + +// Container for state parameters in vector form used during optimization +struct OptimizationStateParameters { + void SetInitialStateParameters(const StateParameters& initial_state_parameters) { + focal_lengths = initial_state_parameters.focal_lengths; + principal_points = initial_state_parameters.principal_points; + distortion = initial_state_parameters.distortion; + } + + void AddCameraTTarget(const Eigen::Isometry3d& camera_T_target) { + camera_T_targets.emplace_back(optimization_common::VectorFromIsometry3d(camera_T_target)); + } + + StateParameters OptimizedStateParameters() const { + StateParameters optimized_state_parameters; + optimized_state_parameters.focal_lengths = focal_lengths; + optimized_state_parameters.principal_points = principal_points; + optimized_state_parameters.distortion = distortion; + return optimized_state_parameters; + } + + std::vector OptimizedCameraTTargets() const { + std::vector optimized_camera_T_targets; + for (const auto& camera_T_target : camera_T_targets) + optimized_camera_T_targets.emplace_back(optimization_common::Isometry3d(camera_T_target)); + return optimized_camera_T_targets; + } + + Eigen::Vector2d focal_lengths; + Eigen::Vector2d principal_points; + Eigen::VectorXd distortion; + std::vector> camera_T_targets; +}; + +struct StateParametersCovariances { + void Print() { + LogInfo("Focal length covariances: " << std::endl << focal_lengths.matrix()); + LogInfo("Principal point covariances: " << std::endl << principal_points.matrix()); + LogInfo("Distortion covariances: " << std::endl << distortion.matrix()); + } + + Eigen::Matrix2d focal_lengths; + Eigen::Matrix2d principal_points; + Eigen::MatrixXd distortion; +}; +} // namespace calibration + +#endif // CALIBRATION_STATE_PARAMETERS_H_ diff --git a/tools/calibration/include/calibration/utilities.h b/tools/calibration/include/calibration/utilities.h new file mode 100644 index 0000000000..bd3f99cb75 --- /dev/null +++ b/tools/calibration/include/calibration/utilities.h @@ -0,0 +1,169 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef CALIBRATION_UTILITIES_H_ +#define CALIBRATION_UTILITIES_H_ + +#include +#include +#include +#include +#include + +#include + +#include +#include +#include + +#include + +#include +#include +#include +#include +#include + +namespace calibration { +void PrintCameraTTargetsStats(const std::vector& match_sets, + const std::vector& optimized_camera_T_targets); + +template +void SaveReprojectionImage(const std::vector& image_points, + const std::vector& points_3d, const std::vector& indices, + const Eigen::Matrix3d& intrinsics, const Eigen::VectorXd& distortion, + const Eigen::Isometry3d& pose, const double max_visualization_error_norm, + const std::string& name); + +template +void SaveReprojectionFromAllTargetsImage(const std::vector& camera_T_targets, + const std::vector& valid_match_sets, + const Eigen::Matrix3d& intrinsics, const Eigen::VectorXd& distortion, + const Eigen::Vector2i& image_size, + const double max_visualization_error_norm = 100); + +int ErrorColor(const double error, const double max_error, const double max_color_value); + +cv::Mat MapImageColors(const cv::Mat& gray_image); + +template +void SaveReprojectionImage(const std::vector& image_points, + const std::vector& points_3d, const std::vector& indices, + const Eigen::Matrix3d& intrinsics, const Eigen::VectorXd& distortion, + const Eigen::Isometry3d& pose, const double max_visualization_error_norm, + const std::string& name) { + cv::Mat reprojection_image_grayscale(960, 1280, CV_8UC1, cv::Scalar(0)); + std::unordered_set inlier_indices(indices.begin(), indices.end()); + for (int i = 0; i < static_cast(image_points.size()); ++i) { + const Eigen::Vector2d& image_point = image_points[i]; + const Eigen::Vector3d& point_3d = points_3d[i]; + const Eigen::Vector3d camera_t_target_point = pose * point_3d; + const Eigen::Vector2d projected_image_point = + Project3dPointToImageSpaceWithDistortion(camera_t_target_point, intrinsics, distortion); + const Eigen::Vector2d error = (image_point - projected_image_point); + const double error_norm = error.norm(); + const cv::Point2i rounded_image_point(std::round(image_point.x()), std::round(image_point.y())); + // Add 1 to each value so background pixels stay white and we can map these back to white + // after applying colormap. + // Only map up to 235 since darker reds that occur from 235-255 are hard to differentiate from + // darker blues from 0 to 20 or so. + constexpr double max_color_value = 235.0; + const int error_color = ErrorColor(error_norm, max_visualization_error_norm, max_color_value) + 1; + if (inlier_indices.count(i) > 0) { + cv::circle(reprojection_image_grayscale, rounded_image_point, 4, cv::Scalar(235), -1); + cv::circle(reprojection_image_grayscale, + cv::Point2i(std::round(projected_image_point.x()), std::round(projected_image_point.y())), 4, + cv::Scalar(error_color), -1); + } else { // Draw outlier with a triangle + cv::drawMarker(reprojection_image_grayscale, rounded_image_point, cv::Scalar(235), cv::MARKER_TRIANGLE_DOWN, 6, + 2); + cv::drawMarker(reprojection_image_grayscale, + cv::Point2i(std::round(projected_image_point.x()), std::round(projected_image_point.y())), + cv::Scalar(error_color), cv::MARKER_TRIANGLE_DOWN, 6, 2); + } + } + + const cv::Mat reprojection_image_color = MapImageColors(reprojection_image_grayscale); + cv::imwrite(name, reprojection_image_color); +} + +template +void SaveReprojectionFromAllTargetsImage(const std::vector& camera_T_targets, + const std::vector& valid_match_sets, + const Eigen::Matrix3d& intrinsics, const Eigen::VectorXd& distortion, + const Eigen::Vector2i& image_size, const double max_visualization_error_norm) { + double max_error_norm = 0; + // Absolute Image + cv::Mat absolute_reprojection_image_grayscale(image_size.y(), image_size.x(), CV_8UC1, cv::Scalar(0)); + std::ofstream errors_file; + errors_file.open("errors_file.txt"); + for (int i = 0; i < static_cast(valid_match_sets.size()); ++i) { + const auto& match_set = valid_match_sets[i]; + const Eigen::Isometry3d camera_T_target = camera_T_targets[i]; + for (int j = 0; j < static_cast(match_set.size()); ++j) { + const auto& image_point = match_set.image_points[j]; + const auto& target_point = match_set.points_3d[j]; + const Eigen::Vector3d camera_t_target_point = camera_T_target * target_point; + const Eigen::Vector2d projected_image_point = + Project3dPointToImageSpaceWithDistortion(camera_t_target_point, intrinsics, distortion); + const Eigen::Vector2d error = (image_point - projected_image_point); + const double error_norm = error.norm(); + if (error_norm > max_error_norm) max_error_norm = error_norm; + errors_file << error.x() << " " << error.y() << std::endl; + const cv::Point2i rounded_image_point(std::round(image_point.x()), std::round(image_point.y())); + // Add 1 to each value so background pixels stay white and we can map these back to white + // after applying colormap. + // Only map up to 235 since darker reds that occur from 235-255 are hard to differentiate from + // darker blues from 0 to 20 or so. + constexpr double max_color_value = 235.0; + const int absolute_error_color = ErrorColor(error_norm, max_visualization_error_norm, max_color_value) + 1; + cv::circle(absolute_reprojection_image_grayscale, rounded_image_point, 4, cv::Scalar(absolute_error_color), -1); + } + } + const cv::Mat absolute_reprojection_image_color = MapImageColors(absolute_reprojection_image_grayscale); + cv::imwrite("calibrated_reprojection_from_all_targets_absolute_image.png", absolute_reprojection_image_color); + errors_file.close(); + + // Relative Image + cv::Mat relative_reprojection_image_grayscale(image_size.y(), image_size.x(), CV_8UC1, cv::Scalar(0)); + for (int i = 0; i < static_cast(valid_match_sets.size()); ++i) { + const auto& match_set = valid_match_sets[i]; + const Eigen::Isometry3d camera_T_target = camera_T_targets[i]; + for (int j = 0; j < static_cast(match_set.size()); ++j) { + const auto& image_point = match_set.image_points[j]; + const auto& target_point = match_set.points_3d[j]; + const Eigen::Vector3d camera_t_target_point = camera_T_target * target_point; + const Eigen::Vector2d projected_image_point = + Project3dPointToImageSpaceWithDistortion(camera_t_target_point, intrinsics, distortion); + const Eigen::Vector2d error = (image_point - projected_image_point); + const double error_norm = error.norm(); + const cv::Point2i rounded_image_point(std::round(image_point.x()), std::round(image_point.y())); + // Add 1 to each value so background pixels stay white and we can map these back to white + // after applying colormap. + // Only map up to 235 since darker reds that occur from 235-255 are hard to differentiate from + // darker blues from 0 to 20 or so. + constexpr double max_color_value = 235.0; + const int relative_error_color = ErrorColor(error_norm, max_error_norm, max_color_value) + 1; + cv::circle(relative_reprojection_image_grayscale, rounded_image_point, 4, cv::Scalar(relative_error_color), -1); + } + } + const cv::Mat relative_reprojection_image_color = MapImageColors(relative_reprojection_image_grayscale); + cv::imwrite("calibrated_reprojection_from_all_targets_relative_image.png", relative_reprojection_image_color); +} +} // namespace calibration + +#endif // CALIBRATION_UTILITIES_H_ diff --git a/tools/calibration/package.xml b/tools/calibration/package.xml new file mode 100644 index 0000000000..ff430597e2 --- /dev/null +++ b/tools/calibration/package.xml @@ -0,0 +1,27 @@ + + calibration + 1.0.0 + + The calibration package + + + Apache License, Version 2.0 + + + Astrobee Flight Software + + + Astrobee Flight Software + + catkin + eigen_conversions + camera + localization_common + msg_conversions + optimization_common + eigen_conversions + camera + localization_common + msg_conversions + optimization_common + diff --git a/tools/calibration/readme.md b/tools/calibration/readme.md new file mode 100644 index 0000000000..542ff632eb --- /dev/null +++ b/tools/calibration/readme.md @@ -0,0 +1,121 @@ +\page calibration Calibration + +# Package Overview +# Camera Target Based Intrinsics Calibration +Calibrating camera intrinsics involves generating target detection files +from a set of bagfiles, generating or providing an initial estimate for the camera intrinsics and distortion values, +and finally running the calibrator. +Several camera distortion models (fov, rad, radtan) are supported for calibration and various scripts are provided to view the quality of the input target detections and the calibration results. + +## Example Usage +### Generate target detections from bagfiles +```rosrun calibration save\_images\_with\_target\_detections -d ~/bag\_files -o target\_detections -t ~/target.yaml```
+Here ```~/bag\_files``` contains a directory of bagfiles containing target detection images. +### View target detection coverage in image space +```rosrun calibration view\_all\_detections.py -d target\_detections```
+```feh detection\_image.jpg```
+

+ +

+ +Ideally the target detections span the entire image. If only the middle of the image contains detections or if parts of the image have no detections, this may lead to an underconstrained calibration problem. Ensure good target coverage before attempting to calibrate a camera.
+### Calibrate +#### Calibration Parameters +In addition to the script parameters, the camera\_target\_based\_intrinsics\_calibrator.config contains most of the configuration options for camera target based intrinsics calibration. The distortion model, camera name, parameters for the reprojection pose estimator handling target pose estimation, visualization, and other general parameters can be selected.
+The parameters to calibrate can also be selected. Depending on the distortion model used, different parameters should be calibrated concurrently to avoid degeneracies and underconstrained calibration. For example, calibrating the camera principal points and target poses at the same time should be avoided. Calibration may be repeated while incrementally updating solved parameters and toggling different concurrent parameter sets to solve for until all parameters are adequately calibrated. +#### Run Calibration +```rosrun calibration calibrate\_intrinsics\_and\_save\_results.py target\_detections ~\/astrobee\/src\/astrobee\/config config\/robots\/bumble.config -u -p -i target\_detections -d fov```
+For more details of the calibration script see below in the Scripts section. +#### Calibration Output +The calibrated results are saved in the calibrated\_params.txt file while more verbose output is saved to the calibration\_output.txt file. +#### Judging Calibration Results +Rely primarily on the covariances for each calibrated parameter reported at the end of the calibration\_output.txt file.
+##### Reprojection Image +The output calibrated\_reprojection\_from\_all\_targets\_absolute\_image.png displays reprojection errors after the calibration procedure has completed and can give a general sense of the calibration accuracy, although it does not discern between accurate calibration and overfitting, which is why the covariances mentioned previously should be the primary indicator of calibration accuracy.
+Here reprojection errors are colored by the norm of the error on a continuous color spectrum, with dark blue indicated small/zero error and red indicating large error. +

+ +

+ +##### Error Histogram +The generated error histogram (which displays the overall norm, x, and y errors on seperate plots) can be helpful to view overall error during calibration and to see if any bias exists in the errors. For example, if the x or y error histograms are not zero centered, this can indicate an error in the calibrated principal points.
+

+ +

+

+ +

+

+ +

+ +This is generated by passing the -p option to the calibration script or by running the make\_error\_histograms.py script on the output errors.txt file after calibration has completed. + +##### Undistorted Images +The undistorted images generated by passing -u to the calibration script or by seperately running the create\_undistorted\_images script give an additional indicator of calibration accuracy. Undistorted calibration targets should have straight boundary edges and straight lines within the target. Curved lines can indicate an error in the calibration distortion parameters.
+ +Distorted Image: + +

+ +

+ +Undistorted Image (FOV distortion): + +

+ +

+ +The black dots in the undistorted image are a result of the FOV undistortion procedure, while the Rad and RadTan uses interpolation to fill these. + + +# Tools + +## create\_undistorted\_images +Generates undistorted images from a set of distorted images and provided camera calibration parameters. +See 'rosrun calibration create\_undistorted\_images -h' +for further details and usage instructions. + +## run\_camera\_target\_based\_intrinsics\_calibrator +Runs the intrinsics calibrator using a set of target detection files and initial estimates +for the camera intrinsics and distortion values. Support various distortion models. +See 'rosrun calibration run\_camera\_target\_based\_intrinsics\_calibrator -h' +for further details and usage instructions. + +# Scripts + +## calibrate\_intrinsics\_and\_save\_results.py +Runs camera intrinsic calibration using provided target detections and a config file with camera +parameters (including initial estimate for camera intrinsics and distortion values). +See 'rosrun calibration calibrate\_intrinsics\_and\_save\_results.py -h' +for further details and usage instructions. + +## copy\_calibration\_params\_to\_config.py +Helper script that copies calibration parameters from the output of the +calibration pipeline and writes these to the camera config file. +See 'rosrun calibration copy\_calibration\_params\_to\_config.py -h' +for further details and usage instructions. + +## get\_bags\_with\_topic.py +Helper script that generates a list of bag files in a directory +with the provided topic. +See 'rosrun calibration get\_bags\_with\_topic.py -h' +for further details and usage instructions. + +## make\_error\_histograms.py +Generates a histogram of errors using the output errors from +the calibration pipeline. +See 'rosrun calibration make\_error\_histograms.py -h' +for further details and usage instructions. + +## save\_images\_with\_target\_detections.py +Generates target detection files for use with the calibration +pipeline from a set of bagfiles containing images of target detections. +See 'rosrun calibration save\_images\_with\_target\_detections.py -h' +for further details and usage instructions. + +## view\_all\_detections.py +Generates an image containing all images space detections of a target +for a set of target detection files. +See 'rosrun calibration view\_all\_detections.py -h' +for further details and usage instructions. diff --git a/tools/calibration/scripts/calibrate_intrinsics_and_save_results.py b/tools/calibration/scripts/calibrate_intrinsics_and_save_results.py new file mode 100755 index 0000000000..ef22c63786 --- /dev/null +++ b/tools/calibration/scripts/calibrate_intrinsics_and_save_results.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python +# Copyright (c) 2017, United States Government, as represented by the +# Administrator of the National Aeronautics and Space Administration. +# +# All rights reserved. +# +# The Astrobee platform is licensed under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with the +# License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +Calibrates camera intrinsics using provided config file and target detections +and updates the provided config file with the calibration results. +Optionally undistorts images for provided directory with calibration results. +""" + +import argparse +import os +import subprocess +import sys + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument( + "corners_directory", help="Path to target detections to use for calibration." + ) + parser.add_argument("config_path") + parser.add_argument("robot_config_file") + parser.add_argument("-w", "--world", default="iss") + parser.add_argument( + "-o", "--output-directory", default="intrinsics_calibration_output" + ) + parser.add_argument("-u", "--undistort-images", action="store_true") + parser.add_argument("-p", "--plot-histogram-errors", action="store_true") + parser.add_argument( + "-i", + "--image-directory-to-undistort", + default=None, + help="Optional seperate directory of images to undistort with calibration results. Ensure that the --undistort-images flag is passed.", + ) + # TODO(rsoussan): Get these from calibration config file + parser.add_argument( + "-c", "--camera-name", default="nav_cam", help="Used for optional undistortion." + ) + parser.add_argument( + "-d", "--distortion-type", default="fov", help="Used for optional undistortion." + ) + args = parser.parse_args() + if not os.path.isdir(args.corners_directory): + print("Corners directory " + args.corners_directory + " does not exist.") + sys.exit() + if os.path.isdir(args.output_directory): + print("Output directory " + args.output_directory + " already exists.") + sys.exit() + if args.image_directory_to_undistort and not os.path.isdir( + args.image_directory_to_undistort + ): + print( + "Image directory to undistort " + + args.image_directory_to_undistort + + " does not exist." + ) + sys.exit() + + # Get absolute paths to directories so we can change to the output directory without breaking other directory paths + corners_directory = os.path.abspath(args.corners_directory) + images_directory_to_undistort = ( + os.path.abspath(args.image_directory_to_undistort) + if args.image_directory_to_undistort + else None + ) + + os.mkdir(args.output_directory) + os.chdir(args.output_directory) + + calibration_command = ( + "rosrun calibration run_camera_target_based_intrinsics_calibrator " + + corners_directory + + " -c " + + args.config_path + + " -r " + + args.robot_config_file + + " -w " + + args.world + ) + with open("calibration_output.txt", "w") as output_file: + subprocess.call( + calibration_command, shell=True, stdout=output_file, stderr=output_file + ) + + if args.plot_histogram_errors: + histogram_plotter_command = "rosrun calibration make_error_histograms.py" + os.system(histogram_plotter_command) + + if not args.undistort_images: + sys.exit() + + robot_config_file_full_path = os.path.join(args.config_path, args.robot_config_file) + copy_calibration_params_command = ( + "rosrun calibration copy_calibration_params_to_config.py --config " + + robot_config_file_full_path + + " --camera-name " + + args.camera_name + ) + os.system(copy_calibration_params_command) + + undistort_directory = ( + images_directory_to_undistort + if images_directory_to_undistort + else corners_directory + ) + undistort_images_command = ( + "rosrun calibration create_undistorted_images " + + undistort_directory + + " " + + args.distortion_type + + " " + + args.camera_name + + " " + + args.config_path + + " --robot-config-file " + + args.robot_config_file + + " -w " + + args.world + ) + os.system(undistort_images_command) diff --git a/tools/calibration/scripts/copy_calibration_params_to_config.py b/tools/calibration/scripts/copy_calibration_params_to_config.py new file mode 100755 index 0000000000..4c72e46856 --- /dev/null +++ b/tools/calibration/scripts/copy_calibration_params_to_config.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python +# Copyright (c) 2017, United States Government, as represented by the +# Administrator of the National Aeronautics and Space Administration. +# +# All rights reserved. +# +# The Astrobee platform is licensed under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with the +# License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +Updates robot config file with calibration output generated +by intrinsics calibrator. +""" + +from __future__ import print_function + +import argparse +import csv +import fileinput +import os +import sys + +import cv2 +import numpy as np + + +def make_calibration_params_string(calibration_filename): + fx = 0 + fy = 0 + px = 0 + py = 0 + distortion = [] + with open(calibration_filename) as calibration_file: + reader = csv.reader(calibration_file, delimiter=" ") + rows = list(reader) + # Row order is focal lengths, principal points, distortion params + fx = rows[0][0] + fy = rows[0][1] + px = rows[1][0] + py = rows[1][1] + for val in rows[2]: + distortion.append(val) + params_string = " distortion_coeff = " + if len(distortion) > 1: + params_string += "{" + for i in range(len(distortion)): + params_string += distortion[i] + if i < (len(distortion) - 1): + params_string += ", " + params_string += "}," + else: + params_string += distortion[0] + "," + params_string += "\n" + params_string += " intrinsic_matrix = {\n" + params_string += " " + str(fx) + ", " + "0.0" + ", " + str(px) + ",\n" + params_string += " " + "0.0" + ", " + str(fy) + ", " + str(py) + ",\n" + params_string += " " + "0.0, 0.0, 1.0\n" + params_string += " }," + return params_string + + +def copy_calibration_params_to_config(config, camera_name, calibration_file): + camera_name_line = camera_name + " = {" + new_calibration_string = make_calibration_params_string(calibration_file) + config_file = fileinput.input(config, inplace=1) + for line in config_file: + print(line, end="") + if line.strip() == camera_name_line: + print(new_calibration_string) + [config_file.next() for x in range(6)] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument("--config", help="Full path to robot config file.") + parser.add_argument( + "--camera-name", help="Camera name in robot config file to update." + ) + parser.add_argument( + "-c", + "--calibration_file", + default="calibrated_params.txt", + help="Calibration file with new calibration params to use for update.", + ) + args = parser.parse_args() + if not os.path.isfile(args.config): + print("Config file " + args.config + " does not exist.") + sys.exit() + if not os.path.isfile(args.calibration_file): + print("Calibration file " + args.calibration_file + " does not exist.") + sys.exit() + copy_calibration_params_to_config( + args.config, args.camera_name, args.calibration_file + ) diff --git a/tools/calibration/scripts/get_bags_with_topic.py b/tools/calibration/scripts/get_bags_with_topic.py new file mode 100755 index 0000000000..1bd4a71dbb --- /dev/null +++ b/tools/calibration/scripts/get_bags_with_topic.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python +# Copyright (c) 2017, United States Government, as represented by the +# Administrator of the National Aeronautics and Space Administration. +# +# All rights reserved. +# +# The Astrobee platform is licensed under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with the +# License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +Finds all bags in a directory with a given topic. +""" + +import argparse +import os +import sys + +import rosbag + + +def get_bags_with_topic(bag_names, topic): + bag_names_with_topic = [] + for bag_name in bag_names: + with rosbag.Bag(bag_name, "r") as bag: + if bag.get_message_count(topic) > 0: + bag_names_with_topic.append(bag_name) + return bag_names_with_topic + + +# TODO(rsoussan): combine this with same fcn in graph_bag/merge_bags.py +def find_bags_in_directory(directory): + # Find bagfiles with bag prefix in current directory, fail if none found + # TODO(rsoussan): make function for getting filenames in a dir with extenstion, use for view_all_detections.py as well + bag_names = [ + os.path.join(directory, bag) + for bag in os.listdir(directory) + if os.path.isfile(os.path.join(directory, bag)) and bag.endswith(".bag") + ] + if len(bag_names) == 0: + print("No bag files found") + sys.exit() + else: + print(("Found " + str(len(bag_names)) + " bag files.")) + for bag_name in bag_names: + print(bag_name) + return bag_names + + +def find_bags_with_topic(directory, topic): + bag_names = find_bags_in_directory(directory) + return get_bags_with_topic(bag_names, topic) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument( + "--cam-topic", + default="/hw/cam_nav", + help="Camera topic that bag files must contain.", + ) + parser.add_argument( + "-d", "--directory", default="./", help="Directory to search in." + ) + args = parser.parse_args() + bag_names_with_topic = find_bags_with_topic(args.directory, args.cam_topic) + if len(bag_names_with_topic) == 0: + print("No bag files with topic " + args.cam_topic + " found.") + sys.exit() + else: + print( + ( + "Found " + + str(len(bag_names_with_topic)) + + " bag files with " + + args.cam_topic + + " topic." + ) + ) + for bag_name in bag_names_with_topic: + print(bag_name) diff --git a/tools/calibration/scripts/make_error_histograms.py b/tools/calibration/scripts/make_error_histograms.py new file mode 100755 index 0000000000..c2b7f31b8c --- /dev/null +++ b/tools/calibration/scripts/make_error_histograms.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# Copyright (c) 2017, United States Government, as represented by the +# Administrator of the National Aeronautics and Space Administration. +# +# All rights reserved. +# +# The Astrobee platform is licensed under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with the +# License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +Generates a histogram of errors using the output errors file from +a calibration run. +""" + + +import argparse +import csv +import os +import sys + +import matplotlib + +matplotlib.use("pdf") +import matplotlib.pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages + + +def make_histograms(errors_file): + x_errors = [] + y_errors = [] + error_norms = [] + with open(errors_file) as errors_csvfile: + reader = csv.reader(errors_csvfile, delimiter=" ") + for row in reader: + x_error = float(row[0]) + y_error = float(row[1]) + error_norm = x_error * x_error + y_error * y_error + x_errors.append(x_error) + y_errors.append(y_error) + error_norms.append(error_norm) + + with PdfPages("errors_histograms.pdf") as pdf: + plt.hist(x_errors, bins=100) + plt.ylabel("Count") + plt.xlabel("X Errors") + pdf.savefig() + plt.close() + + plt.hist(y_errors, bins=100) + plt.ylabel("Count") + plt.xlabel("Y Errors") + pdf.savefig() + plt.close() + + plt.hist(error_norms, bins=100) + plt.ylabel("Count") + plt.xlabel("Error Norms") + pdf.savefig() + plt.close() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument( + "-e", + "--errors-file", + default="errors_file.txt", + help="Errors file used to generate histogram. Errors are output from the intrinsics calibration pipeline.", + ) + args = parser.parse_args() + if not os.path.isfile(args.errors_file): + print("Errors file not found.") + sys.exit() + make_histograms(args.errors_file) diff --git a/tools/calibration/scripts/save_images_with_target_detections.py b/tools/calibration/scripts/save_images_with_target_detections.py new file mode 100755 index 0000000000..ef964b17ca --- /dev/null +++ b/tools/calibration/scripts/save_images_with_target_detections.py @@ -0,0 +1,222 @@ +#!/usr/bin/env python +# Copyright (c) 2017, United States Government, as represented by the +# Administrator of the National Aeronautics and Space Administration. +# +# All rights reserved. +# +# The Astrobee platform is licensed under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with the +# License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +Generates target corner detection files from a set of bagfiles in +a directory containing images. Optionally prunes overlapping detections +to help prevent having too many detections in one area and potentially +biasing the calibrator. +""" + +import argparse +import os +import sys + +import aslam_cv_backend as acvb +import cv2 +import get_bags_with_topic +import kalibr_camera_calibration as kcc +import kalibr_common as kc +import numpy as np + + +class Corner: + def __init__(self, corner_id, target_corner, image_corner): + self.id = corner_id + self.target_corner = np.array([target_corner[0], target_corner[1]]) + self.image_corner = np.array([image_corner[0], image_corner[1]]) + + +class Corners: + def __init__(self, observation): + self.id_corner_map = {} + target_corners = observation.getCornersTargetFrame() + image_corners = observation.getCornersImageFrame() + ids = observation.getCornersIdx() + for i in range(len(target_corners)): + corner = Corner(ids[i], target_corners[i], image_corners[i]) + self.id_corner_map[corner.id] = corner + + def similar(self, other_corners, threshold): + # Make sure keys are the same + if not set(self.id_corner_map.keys()) == set( + other_corners.id_corner_map.keys() + ): + return False + + norm_sums = 0 + for corner_id in self.id_corner_map.keys(): + a = ( + self.id_corner_map[corner_id].image_corner + - other_corners.id_corner_map[corner_id].image_corner + ) + image_diff_norm = np.linalg.norm( + self.id_corner_map[corner_id].image_corner + - other_corners.id_corner_map[corner_id].image_corner + ) + norm_sums += image_diff_norm + mean_norm = norm_sums / float(len(self.id_corner_map.keys())) + if mean_norm < threshold: + print( + "Ignoring image, mean " + + str(mean_norm) + + " below threshold " + + str(threshold) + ) + return True + return False + + +class AddedCorners: + def __init__(self, threshold): + self.corners = [] + self.threshold = threshold + + def add_corners(self, corners): + self.corners.append(corners) + + def redundant(self, new_corners): + if len(self.corners) == 0: + return False + for corners in self.corners: + if corners.similar(new_corners, self.threshold): + return True + return False + + +def save_corners(observation, filename): + target_corners = observation.getCornersTargetFrame() + image_corners = observation.getCornersImageFrame() + ids = observation.getCornersIdx() + with open(filename, "w") as corners_file: + for i in range(len(target_corners)): + corners_file.write( + "%0.17g %0.17g %0.17g %0.17g %0.17g\n" + % ( + ids[i], + target_corners[i][0], + target_corners[i][1], + image_corners[i][0], + image_corners[i][1], + ) + ) + + +def save_images_from_dataset_with_target_detections( + dataset, detector, output_directory, added_corners +): + for timestamp, image in dataset.readDataset(): + success, observation = detector.findTargetNoTransformation( + timestamp, np.array(image) + ) + if success: + # TODO(rsoussan): Why do no corners show up as success sometimes? + if len(observation.getCornersIdx()) == 0: + print("No Corners!") + continue + corners = Corners(observation) + if added_corners.redundant(corners): + continue + else: + added_corners.add_corners(corners) + filepath = os.path.join( + output_directory, + os.path.splitext(os.path.basename(dataset.bagfile))[0] + + "_" + + str(timestamp.toSec()), + ) + image_name = filepath + ".jpg" + print("Saving " + filepath) + cv2.imwrite(image_name, image) + corners_name = filepath + ".txt" + save_corners(observation, corners_name) + + +def save_images_from_bags_directory_with_target_detections( + bags_directory, target_yaml, cam_topic, output_directory, threshold +): + bag_names = get_bags_with_topic.find_bags_with_topic(bags_directory, cam_topic) + if len(bag_names) == 0: + print("No bag files with topic " + cam_topic + " found.") + sys.exit() + else: + print( + ( + "Found " + + str(len(bag_names)) + + " bag files with " + + cam_topic + + " topic." + ) + ) + for bag_name in bag_names: + print(bag_name) + + if not os.path.isdir(output_directory): + os.mkdir(output_directory) + + # Camera model isn't used for target detection so any model should suffice + camera_model = acvb.DistortedPinhole + target_config = kc.CalibrationTargetParameters(target_yaml) + target_detector = kcc.TargetDetector(target_config, camera_model.geometry()) + added_corners = AddedCorners(threshold) + for bag_name in bag_names: + dataset = kc.BagImageDatasetReader(bag_name, cam_topic) + camera_geometry = kcc.CameraGeometry(camera_model, target_config, dataset) + print("Saving images from " + bag_name) + save_images_from_dataset_with_target_detections( + dataset, target_detector.detector, args.output_directory, added_corners + ) + print("Finished savings images for " + bag_name) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + + parser.add_argument( + "-d", + "--directory", + default="./", + help="Directory of bagfiles containing images used for target detection.", + ) + parser.add_argument( + "-o", "--output-directory", default="./images_with_target_detections" + ) + parser.add_argument( + "--cam-topic", default="/hw/cam_nav", help="Camera topic in bagfiles." + ) + parser.add_argument( + "-t", + "--target-yaml", + help="Target yamf file specifying target parameters for target detection.", + ) + parser.add_argument( + "--threshold", + type=float, + default=100, + help="Threshold to prevent overlapping images. If the mean difference norm in image space for a new image's target detection points compared with any already added target detections image are below the provided value, the image is ignored. The two images must detect the exact same point set.", + ) + args = parser.parse_args() + save_images_from_bags_directory_with_target_detections( + args.directory, + args.target_yaml, + args.cam_topic, + args.output_directory, + args.threshold, + ) diff --git a/tools/calibration/scripts/view_all_detections.py b/tools/calibration/scripts/view_all_detections.py new file mode 100755 index 0000000000..e954522999 --- /dev/null +++ b/tools/calibration/scripts/view_all_detections.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python +# Copyright (c) 2017, United States Government, as represented by the +# Administrator of the National Aeronautics and Space Administration. +# +# All rights reserved. +# +# The Astrobee platform is licensed under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with the +# License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +Generates an image showing all image space target detections for target +detection files in a directory. +""" + +import argparse +import csv +import os +import sys + +import cv2 +import numpy as np + + +def view_detections(directory): + detection_files = [ + os.path.join(directory, filename) + for filename in os.listdir(directory) + if os.path.isfile(os.path.join(directory, filename)) + and filename.endswith(".txt") + ] + image_files = [ + os.path.join(directory, filename) + for filename in os.listdir(directory) + if os.path.isfile(os.path.join(directory, filename)) + and filename.endswith(".jpg") + ] + if len(detection_files) == 0 or len(image_files) == 0: + print("No detection files or no images found in directory.") + sys.exit() + + image = cv2.imread(image_files[0]) + height, width, channels = image.shape + detection_image = np.zeros((height, width, 3), np.uint8) + for detection_file in detection_files: + with open(detection_file) as detection_csvfile: + reader = csv.reader(detection_csvfile, delimiter=" ") + for row in reader: + cv2.circle( + detection_image, + (int(round(float(row[3]))), int(round(float(row[4])))), + 1, + (0, 255, 0), + -1, + ) + cv2.imwrite("detection_image.jpg", detection_image) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + + parser.add_argument( + "-d", + "--directory", + default="./", + help="Directory containing target detection files. Assumes at least one image is in same directory from which the image dimensions can be used for the generated image.", + ) + args = parser.parse_args() + if not os.path.isdir(args.directory): + print("Directory " + args.directory + " does not exist.") + sys.exit() + view_detections(args.directory) diff --git a/tools/calibration/src/camera_utilities.cc b/tools/calibration/src/camera_utilities.cc new file mode 100644 index 0000000000..108301f9d5 --- /dev/null +++ b/tools/calibration/src/camera_utilities.cc @@ -0,0 +1,66 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include +#include + +#include + +#include +#include +#include + +namespace calibration { +namespace lc = localization_common; + +Eigen::Vector2d Project3dPointToImageSpace(const Eigen::Vector3d& cam_t_point, const Eigen::Matrix3d& intrinsics) { + return (intrinsics * cam_t_point).hnormalized(); +} + +Eigen::Isometry3d Isometry3d(const cv::Mat& rodrigues_rotation_cv, const cv::Mat& translation_cv) { + Eigen::Vector3d translation; + cv::cv2eigen(translation_cv, translation); + Eigen::Matrix3d rotation; + cv::Mat rotation_cv; + cv::Rodrigues(rodrigues_rotation_cv, rotation_cv); + cv::cv2eigen(rotation_cv, rotation); + return lc::Isometry3d(translation, rotation); +} + +void UndistortedPnP(const std::vector& undistorted_image_points, const std::vector& points_3d, + const cv::Mat& intrinsics, const int pnp_method, cv::Mat& rotation, cv::Mat& translation) { + cv::Mat zero_distortion(4, 1, cv::DataType::type, cv::Scalar(0)); + cv::solvePnP(points_3d, undistorted_image_points, intrinsics, zero_distortion, rotation, translation, false, + pnp_method); +} + +std::vector RandomNIndices(const int num_possible_indices, const int num_sampled_indices) { + static std::random_device rd; + static std::mt19937 gen(rd()); + std::uniform_int_distribution<> distribution(0, num_possible_indices - 1); + std::unordered_set sampled_indices_set; + std::vector sampled_indices; + while (static_cast(sampled_indices.size()) < num_sampled_indices) { + const int random_index = distribution(gen); + if (sampled_indices_set.count(random_index) > 0) continue; + sampled_indices_set.emplace(random_index); + sampled_indices.emplace_back(random_index); + } + return sampled_indices; +} +} // namespace calibration diff --git a/tools/calibration/src/parameter_reader.cc b/tools/calibration/src/parameter_reader.cc new file mode 100644 index 0000000000..4cf4070924 --- /dev/null +++ b/tools/calibration/src/parameter_reader.cc @@ -0,0 +1,113 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include +#include +#include + +#include + +namespace calibration { +namespace mc = msg_conversions; + +void LoadRunCalibratorParams(config_reader::ConfigReader& config, RunCalibratorParams& params) { + LoadCameraTargetBasedIntrinsicsCalibratorParams(config, params.camera_target_based_intrinsics_calibrator); + params.camera_name = mc::LoadString(config, "camera"); + params.camera_params.reset(new camera::CameraParameters(&config, params.camera_name.c_str())); + params.distortion_type = mc::LoadString(config, "distortion_type"); +} + +void LoadCameraTargetBasedIntrinsicsCalibratorParams(config_reader::ConfigReader& config, + CameraTargetBasedIntrinsicsCalibratorParams& params) { + LoadOptimizationParams(config, params.optimization, "calibrator_"); + LoadReprojectionPoseEstimateParams(config, params.reprojection_pose_estimate); + params.calibrate_focal_lengths = mc::LoadBool(config, "calibrate_focal_lengths"); + params.calibrate_principal_points = mc::LoadBool(config, "calibrate_principal_points"); + params.calibrate_distortion = mc::LoadBool(config, "calibrate_distortion"); + params.calibrate_target_poses = mc::LoadBool(config, "calibrate_target_poses"); + params.scale_loss_radially = mc::LoadBool(config, "scale_loss_radially"); + params.radial_scale_power = mc::LoadDouble(config, "radial_scale_power"); + params.max_num_match_sets = mc::LoadInt(config, "max_num_match_sets"); + params.min_num_target_inliers = mc::LoadInt(config, "min_num_target_inliers"); + params.only_use_inliers = mc::LoadBool(config, "only_use_inliers"); + params.max_visualization_error_norm = mc::LoadDouble(config, "max_visualization_error_norm"); + params.save_individual_initial_reprojection_images = + mc::LoadBool(config, "save_individual_initial_reprojection_images"); + params.save_individual_initial_reprojection_images = + mc::LoadBool(config, "save_individual_initial_reprojection_images"); + params.save_final_reprojection_image = mc::LoadBool(config, "save_final_reprojection_image"); + const int image_width = mc::LoadInt(config, "image_width"); + const int image_height = mc::LoadInt(config, "image_height"); + params.image_size = Eigen::Vector2i(image_width, image_height); +} + +void LoadReprojectionPoseEstimateParams(config_reader::ConfigReader& config, ReprojectionPoseEstimateParams& params) { + LoadOptimizationParams(config, params.optimization, "reprojection_"); + LoadRansacPnPParams(config, params.ransac_pnp); + params.optimize_estimate = mc::LoadBool(config, "reprojection_optimize_estimate"); + params.max_inlier_threshold = mc::LoadDouble(config, "reprojection_max_inlier_threshold"); +} + +void LoadSolverOptions(config_reader::ConfigReader& config, ceres::Solver::Options& solver_options, + const std::string& prefix) { + const std::string linear_solver = mc::LoadString(config, prefix + "linear_solver"); + if (linear_solver == "dense_qr") { + solver_options.linear_solver_type = ceres::DENSE_QR; + } else if (linear_solver == "dense_schur") { + solver_options.linear_solver_type = ceres::DENSE_SCHUR; + } else if (linear_solver == "sparse_normal_cholesky") { + solver_options.linear_solver_type = ceres::SPARSE_NORMAL_CHOLESKY; + } else if (linear_solver == "sparse_schur") { + solver_options.linear_solver_type = ceres::SPARSE_SCHUR; + } else if (linear_solver == "iterative_schur") { + solver_options.linear_solver_type = ceres::ITERATIVE_SCHUR; + } else { + LogFatal("LoadSolverOptions: Invalid linear solver provided."); + } + solver_options.use_explicit_schur_complement = mc::LoadBool(config, prefix + "use_explicit_schur_complement"); + solver_options.max_num_iterations = mc::LoadInt(config, prefix + "max_num_iterations"); + solver_options.function_tolerance = mc::LoadDouble(config, prefix + "function_tolerance"); + solver_options.parameter_tolerance = mc::LoadDouble(config, prefix + "parameter_tolerance"); +} + +void LoadOptimizationParams(config_reader::ConfigReader& config, OptimizationParams& params, + const std::string& prefix) { + LoadSolverOptions(config, params.solver_options, prefix); + params.verbose = mc::LoadBool(config, prefix + "verbose_optimization"); + params.huber_loss = mc::LoadDouble(config, prefix + "huber_loss"); +} + +void LoadRansacPnPParams(config_reader::ConfigReader& config, RansacPnPParams& params) { + params.max_inlier_threshold = mc::LoadDouble(config, "ransac_max_inlier_threshold"); + params.num_iterations = mc::LoadInt(config, "ransac_num_iterations"); + params.min_num_inliers = mc::LoadInt(config, "ransac_min_num_inliers"); + const std::string pnp_method = mc::LoadString(config, "ransac_pnp_method"); + // TODO(rsoussan): Add ippe once opencv version is updated + if (pnp_method == "p3p") { + params.pnp_method = cv::SOLVEPNP_P3P; + } else if (pnp_method == "epnp") { + params.pnp_method = cv::SOLVEPNP_EPNP; + } else if (pnp_method == "ap3p") { + params.pnp_method = cv::SOLVEPNP_AP3P; + } else if (pnp_method == "it") { + params.pnp_method = cv::SOLVEPNP_ITERATIVE; + } else { + LogFatal("LoadRansacPnPParams: Invalid pnp type provided."); + } +} +} // namespace calibration diff --git a/tools/calibration/src/utilities.cc b/tools/calibration/src/utilities.cc new file mode 100644 index 0000000000..53c1d30a8a --- /dev/null +++ b/tools/calibration/src/utilities.cc @@ -0,0 +1,62 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include +#include +#include +#include + +#include + +namespace calibration { +namespace lc = localization_common; +namespace oc = optimization_common; + +void PrintCameraTTargetsStats(const std::vector& match_sets, + const std::vector& optimized_camera_T_targets) { + lc::Averager position_diff_norm_averager("Initial vs. optimized camera_T_target position diff norm"); + lc::Averager rotation_diff_averager("Initial vs. optimized camera_T_target rotation diff"); + for (int i = 0; i < static_cast(match_sets.size()); ++i) { + const Eigen::Isometry3d& initial_camera_T_target = match_sets[i].pose_estimate; + const Eigen::Isometry3d& optimized_camera_T_target = optimized_camera_T_targets[i]; + const double position_diff_norm = + (initial_camera_T_target.translation() - optimized_camera_T_target.translation()).norm(); + position_diff_norm_averager.Update(position_diff_norm); + const Eigen::Matrix3d optimized_camera_R_initial_camera = + (optimized_camera_T_target * initial_camera_T_target.inverse()).linear(); + const double rotation_diff = std::abs(Eigen::AngleAxisd(optimized_camera_R_initial_camera).angle()); + rotation_diff_averager.Update(rotation_diff); + } + position_diff_norm_averager.Log(); + rotation_diff_averager.Log(); +} + +int ErrorColor(const double error, const double max_error, const double max_color_value) { + return std::round(std::min(error, max_error) / max_error * max_color_value); +} + +cv::Mat MapImageColors(const cv::Mat& gray_image) { + cv::Mat color_image; + cv::applyColorMap(gray_image, color_image, cv::COLORMAP_JET); + // Map white pixels back from lowest JET value (128, 0, 0) to white + cv::Mat base_mask; + cv::inRange(color_image, cv::Scalar(128, 0, 0), cv::Scalar(128, 0, 0), base_mask); + color_image.setTo(cv::Scalar(255, 255, 255), base_mask); + return color_image; +} +} // namespace calibration diff --git a/tools/calibration/test/test_camera_target_based_intrinsics_calibrator.cc b/tools/calibration/test/test_camera_target_based_intrinsics_calibrator.cc new file mode 100644 index 0000000000..606b6dbe48 --- /dev/null +++ b/tools/calibration/test/test_camera_target_based_intrinsics_calibrator.cc @@ -0,0 +1,178 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include "test_utilities.h" // NOLINT + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +namespace ca = calibration; +namespace lc = localization_common; +namespace oc = optimization_common; + +TEST(CameraTargetBasedIntrinsicsCalibratorTester, EvenlySpacedTargetsIdentityDistortionWithNoise) { + auto params = ca::DefaultCameraTargetBasedIntrinsicsCalibratorParams(); + params.calibrate_distortion = false; + params.calibrate_target_poses = false; + const int num_target_points_per_row_and_col = 10; + const int num_pose_rows = 3; + const int num_pose_cols = 5; + const int num_pose_y_levels = 2; + const double focal_lengths_stddev = 1.0; + const double principal_points_stddev = 1.0; + const double distortion_stddev = 0; + for (int i = 0; i < 10; ++i) { + const Eigen::Matrix3d intrinsics = lc::RandomIntrinsics(); + ca::StateParameters true_state_parameters; + true_state_parameters.focal_lengths = lc::FocalLengths(intrinsics); + true_state_parameters.principal_points = lc::PrincipalPoints(intrinsics); + true_state_parameters.distortion = Eigen::VectorXd(1); + true_state_parameters.distortion[0] = 0.0; + const auto noisy_state_parameters = ca::AddNoiseToStateParameters(true_state_parameters, focal_lengths_stddev, + principal_points_stddev, distortion_stddev); + ca::StateParameters calibrated_state_parameters; + const auto match_sets = ca::EvenlySpacedTargetMatchSets( + num_pose_rows, num_pose_cols, num_pose_y_levels, num_target_points_per_row_and_col, intrinsics); + ca::CameraTargetBasedIntrinsicsCalibrator calibrator(params); + ca::StateParametersCovariances covariances; + ASSERT_TRUE(calibrator.Calibrate(match_sets, noisy_state_parameters, calibrated_state_parameters, covariances)); + ASSERT_PRED2(lc::MatrixEquality<2>, true_state_parameters.focal_lengths.matrix(), + calibrated_state_parameters.focal_lengths.matrix()); + ASSERT_PRED2(lc::MatrixEquality<2>, true_state_parameters.principal_points.matrix(), + calibrated_state_parameters.principal_points.matrix()); + } +} + +TEST(CameraTargetBasedIntrinsicsCalibratorTester, EvenlySpacedTargetsFovDistortionWithNoise) { + auto params = ca::DefaultCameraTargetBasedIntrinsicsCalibratorParams(); + params.calibrate_target_poses = false; + const int num_target_points_per_row_and_col = 10; + const int num_pose_rows = 3; + const int num_pose_cols = 5; + const int num_pose_y_levels = 2; + const double focal_lengths_stddev = 1.0; + const double principal_points_stddev = 1.0; + const double distortion_stddev = 0.05; + for (int i = 0; i < 10; ++i) { + const Eigen::Matrix3d intrinsics = lc::RandomIntrinsics(); + ca::StateParameters true_state_parameters; + true_state_parameters.focal_lengths = lc::FocalLengths(intrinsics); + true_state_parameters.principal_points = lc::PrincipalPoints(intrinsics); + true_state_parameters.distortion = ca::RandomFovDistortion(); + const auto noisy_state_parameters = ca::AddNoiseToStateParameters(true_state_parameters, focal_lengths_stddev, + principal_points_stddev, distortion_stddev); + ca::StateParameters calibrated_state_parameters; + const auto match_sets = ca::EvenlySpacedTargetMatchSets( + num_pose_rows, num_pose_cols, num_pose_y_levels, num_target_points_per_row_and_col, intrinsics, + true_state_parameters.distortion); + ca::CameraTargetBasedIntrinsicsCalibrator calibrator(params); + ca::StateParametersCovariances covariances; + ASSERT_TRUE(calibrator.Calibrate(match_sets, noisy_state_parameters, calibrated_state_parameters, covariances)); + ASSERT_PRED2(lc::MatrixEquality<2>, true_state_parameters.focal_lengths.matrix(), + calibrated_state_parameters.focal_lengths.matrix()); + ASSERT_PRED2(lc::MatrixEquality<2>, true_state_parameters.principal_points.matrix(), + calibrated_state_parameters.principal_points.matrix()); + // Use absolute value for Fov distortion comparison since positive and negative values have same meaning + ASSERT_PRED2(lc::MatrixEquality<2>, true_state_parameters.distortion.cwiseAbs().matrix(), + calibrated_state_parameters.distortion.cwiseAbs().matrix()); + } +} + +TEST(CameraTargetBasedIntrinsicsCalibratorTester, EvenlySpacedTargetsRadDistortionWithNoise) { + auto params = ca::DefaultCameraTargetBasedIntrinsicsCalibratorParams(); + params.calibrate_target_poses = false; + const int num_target_points_per_row_and_col = 10; + const int num_pose_rows = 3; + const int num_pose_cols = 5; + const int num_pose_y_levels = 2; + const double focal_lengths_stddev = 1.0; + const double principal_points_stddev = 1.0; + const double distortion_stddev = 0.05; + for (int i = 0; i < 10; ++i) { + const Eigen::Matrix3d intrinsics = lc::RandomIntrinsics(); + ca::StateParameters true_state_parameters; + true_state_parameters.focal_lengths = lc::FocalLengths(intrinsics); + true_state_parameters.principal_points = lc::PrincipalPoints(intrinsics); + true_state_parameters.distortion = ca::RandomRadDistortion(); + const auto noisy_state_parameters = ca::AddNoiseToStateParameters(true_state_parameters, focal_lengths_stddev, + principal_points_stddev, distortion_stddev); + ca::StateParameters calibrated_state_parameters; + const auto match_sets = ca::EvenlySpacedTargetMatchSets( + num_pose_rows, num_pose_cols, num_pose_y_levels, num_target_points_per_row_and_col, intrinsics, + true_state_parameters.distortion); + ca::CameraTargetBasedIntrinsicsCalibrator calibrator(params); + ca::StateParametersCovariances covariances; + ASSERT_TRUE(calibrator.Calibrate(match_sets, noisy_state_parameters, calibrated_state_parameters, covariances)); + ASSERT_PRED2(lc::MatrixEquality<2>, true_state_parameters.focal_lengths.matrix(), + calibrated_state_parameters.focal_lengths.matrix()); + ASSERT_PRED2(lc::MatrixEquality<2>, true_state_parameters.principal_points.matrix(), + calibrated_state_parameters.principal_points.matrix()); + ASSERT_PRED2(lc::MatrixEquality<2>, true_state_parameters.distortion.matrix(), + calibrated_state_parameters.distortion.matrix()); + } +} + +TEST(CameraTargetBasedIntrinsicsCalibratorTester, EvenlySpacedTargetsRadTanDistortionWithNoise) { + auto params = ca::DefaultCameraTargetBasedIntrinsicsCalibratorParams(); + params.calibrate_target_poses = false; + const int num_target_points_per_row_and_col = 10; + const int num_pose_rows = 3; + const int num_pose_cols = 5; + const int num_pose_y_levels = 2; + const double focal_lengths_stddev = 1.0; + const double principal_points_stddev = 1.0; + const double distortion_stddev = 0.05; + for (int i = 0; i < 10; ++i) { + const Eigen::Matrix3d intrinsics = lc::RandomIntrinsics(); + ca::StateParameters true_state_parameters; + true_state_parameters.focal_lengths = lc::FocalLengths(intrinsics); + true_state_parameters.principal_points = lc::PrincipalPoints(intrinsics); + true_state_parameters.distortion = ca::RandomRadTanDistortion(); + const auto noisy_state_parameters = ca::AddNoiseToStateParameters(true_state_parameters, focal_lengths_stddev, + principal_points_stddev, distortion_stddev); + ca::StateParameters calibrated_state_parameters; + const auto match_sets = ca::EvenlySpacedTargetMatchSets( + num_pose_rows, num_pose_cols, num_pose_y_levels, num_target_points_per_row_and_col, intrinsics, + true_state_parameters.distortion); + ca::CameraTargetBasedIntrinsicsCalibrator calibrator(params); + ca::StateParametersCovariances covariances; + ASSERT_TRUE(calibrator.Calibrate(match_sets, noisy_state_parameters, calibrated_state_parameters, covariances)); + ASSERT_PRED2(lc::MatrixEquality<2>, true_state_parameters.focal_lengths.matrix(), + calibrated_state_parameters.focal_lengths.matrix()); + ASSERT_PRED2(lc::MatrixEquality<2>, true_state_parameters.principal_points.matrix(), + calibrated_state_parameters.principal_points.matrix()); + ASSERT_PRED2(lc::MatrixEquality<2>, true_state_parameters.distortion.matrix(), + calibrated_state_parameters.distortion.matrix()); + } +} +// TODO(rsoussan): Add test with EstimateTargetPoseAndCalibrateIntrinsics once pnp issues are resolved + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/tools/calibration/test/test_camera_target_based_intrinsics_calibrator.test b/tools/calibration/test/test_camera_target_based_intrinsics_calibrator.test new file mode 100644 index 0000000000..4ed12db913 --- /dev/null +++ b/tools/calibration/test/test_camera_target_based_intrinsics_calibrator.test @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/tools/calibration/test/test_camera_utilities.cc b/tools/calibration/test/test_camera_utilities.cc new file mode 100644 index 0000000000..11b7030dcc --- /dev/null +++ b/tools/calibration/test/test_camera_utilities.cc @@ -0,0 +1,74 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include "test_utilities.h" // NOLINT + +#include +#include +#include +#include +#include + +#include + +namespace ca = calibration; +namespace lc = localization_common; +namespace oc = optimization_common; +TEST(CameraUtilitiesTester, Inliers) { + const auto params = ca::DefaultReprojectionPoseEstimateParams(); + const double inlier_threshold = 3.0; + const int num_desired_points = 20; + for (int i = 0; i < 500; ++i) { + const auto correspondences = ca::RegistrationCorrespondences( + ca::RandomFrontFacingPose(), lc::RandomIntrinsics(), ca::RandomFrontFacingPoints(num_desired_points)); + const int num_points = static_cast(correspondences.correspondences().size()); + std::vector noisy_image_points; + std::unordered_set noisy_point_indices; + std::unordered_set inlier_point_indices; + for (int j = 0; j < num_points; ++j) { + const auto& image_point = correspondences.correspondences().image_points[j]; + const bool add_noise = lc::RandomBool(); + if (add_noise) { + const double noise = inlier_threshold + lc::RandomPositiveDouble(); + const Eigen::Vector2d noisy_image_point = + lc::RandomBool() ? image_point + Eigen::Vector2d(noise, 0) : image_point + Eigen::Vector2d(0, noise); + noisy_image_points.emplace_back(noisy_image_point); + noisy_point_indices.emplace(j); + } else { + noisy_image_points.emplace_back(image_point); + inlier_point_indices.emplace(j); + } + } + std::vector inliers; + const int num_inliers = ca::Inliers( + noisy_image_points, correspondences.correspondences().points_3d, correspondences.intrinsics(), Eigen::VectorXd(1), + correspondences.camera_T_target(), inlier_threshold, inliers); + ASSERT_EQ(num_points, inlier_point_indices.size() + noisy_point_indices.size()); + ASSERT_EQ(num_inliers, num_points - noisy_point_indices.size()); + for (const auto inlier_index : inliers) { + ASSERT_GT(inlier_point_indices.count(inlier_index), 0); + ASSERT_EQ(noisy_point_indices.count(inlier_index), 0); + } + } +} + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/tools/calibration/test/test_camera_utilities.test b/tools/calibration/test/test_camera_utilities.test new file mode 100644 index 0000000000..70780045b8 --- /dev/null +++ b/tools/calibration/test/test_camera_utilities.test @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/tools/calibration/test/test_pnp.cc b/tools/calibration/test/test_pnp.cc new file mode 100644 index 0000000000..7dd51ba588 --- /dev/null +++ b/tools/calibration/test/test_pnp.cc @@ -0,0 +1,74 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include "test_utilities.h" // NOLINT + +#include +#include +#include +#include +#include + +#include + +#include + +namespace ca = calibration; +namespace lc = localization_common; +namespace oc = optimization_common; +// TODO(rsoussan): Put back ransacpnp tests once pnp issues are resolved +/*TEST(RansacPnPTester, RansacPnP) { + Eigen::Isometry3d pose = Eigen::Isometry3d::Identity(); + pose.translation().z() = 3; + Eigen::Matrix3d intrinsics = Eigen::Matrix3d::Identity(); + intrinsics(0, 0) = 500; + intrinsics(1, 1) = 500; + intrinsics(0, 2) = 500; + intrinsics(1, 2) = 500; + for (int i = 0; i < 500; ++i) { + const auto correspondences = ca::RegistrationCorrespondences(pose, intrinsics, ca::TargetPoints(2, 2)); + std::vector image_points_cv; + for (const auto& image_point : correspondences.correspondences().image_points) { + image_points_cv.emplace_back(cv::Point2d(image_point.x(), image_point.y())); + } + + std::vector points_3d_cv; + for (const auto& point_3d : correspondences.correspondences().points_3d) { + points_3d_cv.emplace_back(point_3d.x(), point_3d.y(), point_3d.z()); + } + + cv::Mat intrinsics_cv; + cv::eigen2cv(correspondences.intrinsics(), intrinsics_cv); + cv::Mat rodrigues_rotation_cv(3, 1, cv::DataType::type, cv::Scalar(0)); + cv::Mat translation_cv(3, 1, cv::DataType::type, cv::Scalar(0)); + ca::UndistortedPnP(image_points_cv, points_3d_cv, intrinsics_cv, cv::SOLVEPNP_ITERATIVE, rodrigues_rotation_cv, + translation_cv); + const Eigen::Isometry3d pose_estimate = ca::Isometry3d(rodrigues_rotation_cv, translation_cv); + LogError("true pose: " << std::endl << correspondences.camera_T_target().matrix()); + LogError("pnp pose: " << std::endl << pose_estimate.matrix()); + // TODO(rsoussan): Decrease tolerance once cv::solvePnP issues are resolved + constexpr double tolerance = 1e-5; + ASSERT_TRUE(pose_estimate.matrix().isApprox(correspondences.camera_T_target().matrix(), tolerance)); + } +}*/ + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/tools/calibration/test/test_pnp.test b/tools/calibration/test/test_pnp.test new file mode 100644 index 0000000000..cc82d9b80d --- /dev/null +++ b/tools/calibration/test/test_pnp.test @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/tools/calibration/test/test_ransac_pnp.cc b/tools/calibration/test/test_ransac_pnp.cc new file mode 100644 index 0000000000..cd8e3638c7 --- /dev/null +++ b/tools/calibration/test/test_ransac_pnp.cc @@ -0,0 +1,62 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include "test_utilities.h" // NOLINT + +#include +#include +#include +#include +#include + +#include + +#include + +namespace ca = calibration; +namespace lc = localization_common; +namespace oc = optimization_common; + +// TODO(rsoussan): Put back RansacPnP tests once pnp issues are resolved +/*TEST(RansacPnPTester, EvenlySpacedTargetsIdentityDistortionWithNoise) { + const auto params = ca::DefaultRansacPnPParams(); + const int num_rows = 5; + const int num_cols = 5; + const int num_y_levels = 5; + const auto target_poses = ca::EvenlySpacedTargetPoses(num_rows, num_cols, num_y_levels); + const auto target_points = ca::TargetPoints(10, 10); + std::vector initial_inliers(target_points.size()); + // Fill inliers with all indices + std::iota(initial_inliers.begin(), initial_inliers.end(), 0); + for (int i = 0; i < target_poses.size(); ++i) { + const auto correspondences = + ca::RegistrationCorrespondences(target_poses[i], lc::RandomIntrinsics(), target_points); + const auto pose_estimate = ca::RansacPnP( + correspondences.correspondences().image_points, correspondences.correspondences().points_3d, + correspondences.intrinsics(), Eigen::VectorXd(), params); + ASSERT_TRUE(pose_estimate != boost::none); + ASSERT_PRED2(lc::MatrixEquality<2>, pose_estimate->pose.matrix(), correspondences.camera_T_target().matrix()); + ASSERT_TRUE(pose_estimate->inliers.size() == correspondences.correspondences().size()); + } +}*/ + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/tools/calibration/test/test_ransac_pnp.test b/tools/calibration/test/test_ransac_pnp.test new file mode 100644 index 0000000000..859d4e2f11 --- /dev/null +++ b/tools/calibration/test/test_ransac_pnp.test @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/tools/calibration/test/test_reprojection_pose_estimate.cc b/tools/calibration/test/test_reprojection_pose_estimate.cc new file mode 100644 index 0000000000..424b11e307 --- /dev/null +++ b/tools/calibration/test/test_reprojection_pose_estimate.cc @@ -0,0 +1,147 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include "test_utilities.h" // NOLINT + +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +namespace ca = calibration; +namespace lc = localization_common; +namespace oc = optimization_common; +TEST(ReprojectionPoseEstimateTester, EvenlySpacedTargetsIdentityDistortionWithNoise) { + const auto params = ca::DefaultReprojectionPoseEstimateParams(); + const int num_rows = 5; + const int num_cols = 5; + const int num_y_levels = 5; + const auto target_poses = ca::EvenlySpacedTargetPoses(num_rows, num_cols, num_y_levels); + const auto target_points = ca::TargetPoints(10, 10); + std::vector initial_inliers(target_points.size()); + // Fill inliers with all indices + std::iota(initial_inliers.begin(), initial_inliers.end(), 0); + const double initial_estimate_translation_noise = 0.1; + const double initial_estimate_rotation_noise = 0.1; + for (int i = 0; i < static_cast(target_poses.size()); ++i) { + const auto correspondences = + ca::RegistrationCorrespondences(target_poses[i], lc::RandomIntrinsics(), target_points); + const Eigen::Isometry3d noisy_initial_estimate = lc::AddNoiseToIsometry3d( + correspondences.camera_T_target(), initial_estimate_translation_noise, initial_estimate_rotation_noise); + const auto pose_estimate = ca::ReprojectionPoseEstimateWithInitialEstimate( + correspondences.correspondences().image_points, correspondences.correspondences().points_3d, + correspondences.intrinsics(), Eigen::VectorXd(1), params, noisy_initial_estimate, initial_inliers); + ASSERT_TRUE(pose_estimate != boost::none); + ASSERT_PRED2(lc::MatrixEquality<4>, pose_estimate->pose.matrix(), correspondences.camera_T_target().matrix()); + ASSERT_TRUE(pose_estimate->inliers.size() == target_points.size()); + } +} + +TEST(ReprojectionPoseEstimateTester, EvenlySpacedTargetsFovDistortionWithNoise) { + const auto params = ca::DefaultReprojectionPoseEstimateParams(); + const int num_rows = 5; + const int num_cols = 5; + const int num_y_levels = 5; + const auto target_poses = ca::EvenlySpacedTargetPoses(num_rows, num_cols, num_y_levels); + const auto target_points = ca::TargetPoints(10, 10); + std::vector initial_inliers(target_points.size()); + // Fill inliers with all indices + std::iota(initial_inliers.begin(), initial_inliers.end(), 0); + const double initial_estimate_translation_noise = 0.1; + const double initial_estimate_rotation_noise = 0.1; + for (int i = 0; i < static_cast(target_poses.size()); ++i) { + const auto distortion = ca::RandomFovDistortion(); + const auto correspondences = ca::RegistrationCorrespondences( + target_poses[i], lc::RandomIntrinsics(), target_points, distortion); + const Eigen::Isometry3d noisy_initial_estimate = lc::AddNoiseToIsometry3d( + correspondences.camera_T_target(), initial_estimate_translation_noise, initial_estimate_rotation_noise); + const auto pose_estimate = ca::ReprojectionPoseEstimateWithInitialEstimate( + correspondences.correspondences().image_points, correspondences.correspondences().points_3d, + correspondences.intrinsics(), distortion, params, noisy_initial_estimate, initial_inliers); + ASSERT_TRUE(pose_estimate != boost::none); + ASSERT_PRED2(lc::MatrixEquality<4>, pose_estimate->pose.matrix(), correspondences.camera_T_target().matrix()); + ASSERT_TRUE(pose_estimate->inliers.size() == target_points.size()); + } +} + +TEST(ReprojectionPoseEstimateTester, EvenlySpacedTargetsRadDistortionWithNoise) { + const auto params = ca::DefaultReprojectionPoseEstimateParams(); + const int num_rows = 5; + const int num_cols = 5; + const int num_y_levels = 5; + const auto target_poses = ca::EvenlySpacedTargetPoses(num_rows, num_cols, num_y_levels); + const auto target_points = ca::TargetPoints(10, 10); + std::vector initial_inliers(target_points.size()); + // Fill inliers with all indices + std::iota(initial_inliers.begin(), initial_inliers.end(), 0); + const double initial_estimate_translation_noise = 0.1; + const double initial_estimate_rotation_noise = 0.1; + for (int i = 0; i < static_cast(target_poses.size()); ++i) { + const auto distortion = ca::RandomRadDistortion(); + const auto correspondences = ca::RegistrationCorrespondences( + target_poses[i], lc::RandomIntrinsics(), target_points, distortion); + const Eigen::Isometry3d noisy_initial_estimate = lc::AddNoiseToIsometry3d( + correspondences.camera_T_target(), initial_estimate_translation_noise, initial_estimate_rotation_noise); + const auto pose_estimate = ca::ReprojectionPoseEstimateWithInitialEstimate( + correspondences.correspondences().image_points, correspondences.correspondences().points_3d, + correspondences.intrinsics(), distortion, params, noisy_initial_estimate, initial_inliers); + ASSERT_TRUE(pose_estimate != boost::none); + ASSERT_PRED2(lc::MatrixEquality<4>, pose_estimate->pose.matrix(), correspondences.camera_T_target().matrix()); + ASSERT_TRUE(pose_estimate->inliers.size() == target_points.size()); + } +} + +TEST(ReprojectionPoseEstimateTester, EvenlySpacedTargetsRadTanDistortionWithNoise) { + const auto params = ca::DefaultReprojectionPoseEstimateParams(); + const int num_rows = 5; + const int num_cols = 5; + const int num_y_levels = 5; + const auto target_poses = ca::EvenlySpacedTargetPoses(num_rows, num_cols, num_y_levels); + const auto target_points = ca::TargetPoints(10, 10); + std::vector initial_inliers(target_points.size()); + // Fill inliers with all indices + std::iota(initial_inliers.begin(), initial_inliers.end(), 0); + const double initial_estimate_translation_noise = 0.1; + const double initial_estimate_rotation_noise = 0.1; + for (int i = 0; i < static_cast(target_poses.size()); ++i) { + const auto distortion = ca::RandomRadTanDistortion(); + const auto correspondences = ca::RegistrationCorrespondences( + target_poses[i], lc::RandomIntrinsics(), target_points, distortion); + const Eigen::Isometry3d noisy_initial_estimate = lc::AddNoiseToIsometry3d( + correspondences.camera_T_target(), initial_estimate_translation_noise, initial_estimate_rotation_noise); + const auto pose_estimate = ca::ReprojectionPoseEstimateWithInitialEstimate( + correspondences.correspondences().image_points, correspondences.correspondences().points_3d, + correspondences.intrinsics(), distortion, params, noisy_initial_estimate, initial_inliers); + ASSERT_TRUE(pose_estimate != boost::none); + ASSERT_PRED2(lc::MatrixEquality<4>, pose_estimate->pose.matrix(), correspondences.camera_T_target().matrix()); + ASSERT_TRUE(pose_estimate->inliers.size() == target_points.size()); + } +} +// TODO(rsoussan): Add test with ReprojectionPoseEstimate without initial estimate once pnp issues are resolved + +// Run all the tests that were declared with TEST() +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/tools/calibration/test/test_reprojection_pose_estimate.test b/tools/calibration/test/test_reprojection_pose_estimate.test new file mode 100644 index 0000000000..a20fc646e9 --- /dev/null +++ b/tools/calibration/test/test_reprojection_pose_estimate.test @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/tools/calibration/test/test_utilities.cc b/tools/calibration/test/test_utilities.cc new file mode 100644 index 0000000000..49e98fa90f --- /dev/null +++ b/tools/calibration/test/test_utilities.cc @@ -0,0 +1,228 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include "test_utilities.h" // NOLINT + +#include +#include + +#include + +namespace calibration { +namespace lc = localization_common; + +OptimizationParams DefaultOptimizationParams() { + OptimizationParams params; + params.solver_options.linear_solver_type = ceres::ITERATIVE_SCHUR; + params.solver_options.use_explicit_schur_complement = false; + params.solver_options.max_num_iterations = 100; + params.solver_options.function_tolerance = 1e-8; + params.solver_options.parameter_tolerance = 1e-8; + params.verbose = false; + params.huber_loss = 1.345; + return params; +} + +RansacPnPParams DefaultRansacPnPParams() { + RansacPnPParams params; + params.max_inlier_threshold = 3; + params.num_iterations = 100; + params.min_num_inliers = 4; + // TODO(rsoussan): Change this to cv::SOLVEPNP_IPPE when available. + // Currenty cv::SOLVEPNP_P3P and cv::SOLVEPNP_ITERATIVE lead to significant errors even with perfect data + params.pnp_method = cv::SOLVEPNP_EPNP; + return params; +} + +ReprojectionPoseEstimateParams DefaultReprojectionPoseEstimateParams() { + ReprojectionPoseEstimateParams params; + params.optimization = DefaultOptimizationParams(); + params.ransac_pnp = DefaultRansacPnPParams(); + params.optimize_estimate = true; + params.max_inlier_threshold = 3.0; + return params; +} + +CameraTargetBasedIntrinsicsCalibratorParams DefaultCameraTargetBasedIntrinsicsCalibratorParams() { + CameraTargetBasedIntrinsicsCalibratorParams params; + params.optimization = DefaultOptimizationParams(); + params.reprojection_pose_estimate = DefaultReprojectionPoseEstimateParams(); + params.calibrate_focal_lengths = true; + params.calibrate_principal_points = true; + params.calibrate_distortion = true; + params.calibrate_target_poses = true; + params.scale_loss_radially = false; + params.radial_scale_power = 1.0; + params.only_use_inliers = true; + params.max_num_match_sets = 10000000; + params.min_num_target_inliers = 4; + params.save_individual_initial_reprojection_images = false; + params.save_final_reprojection_image = false; + params.max_visualization_error_norm = 50; + params.individual_max_visualization_error_norm = 50; + params.image_size = Eigen::Vector2i(1280, 960); + return params; +} + +Eigen::VectorXd RandomFovDistortion() { + Eigen::VectorXd distortion(1); + distortion[0] = lc::RandomDouble(0.1, 0.3); + return distortion; +} + +Eigen::VectorXd RandomRadDistortion() { + Eigen::VectorXd distortion(2); + distortion[0] = lc::RandomDouble(-0.1, 0.1); + distortion[1] = lc::RandomDouble(-0.1, 0.1); + return distortion; +} + +Eigen::VectorXd RandomRadTanDistortion() { + Eigen::VectorXd distortion(4); + distortion[0] = lc::RandomDouble(-0.1, 0.1); + distortion[1] = lc::RandomDouble(-0.1, 0.1); + distortion[2] = lc::RandomDouble(0, 0.1); + distortion[3] = lc::RandomDouble(0, 0.1); + return distortion; +} + +std::vector TargetPoints(const int points_per_row, const int points_per_col, const double row_spacing, + const double col_spacing) { + Eigen::Vector3d target_center(points_per_col * col_spacing / 2.0, points_per_row * row_spacing / 2.0, 0.0); + std::vector target_points; + for (int i = 0; i < points_per_col; ++i) { + for (int j = 0; j < points_per_row; ++j) { + // Center target points about (0,0) + target_points.emplace_back(Eigen::Vector3d(i * col_spacing, j * row_spacing, 0) - target_center); + } + } + return target_points; +} + +std::vector RandomFrontFacingPoints(const int num_points) { + std::vector points; + for (int i = 0; i < num_points; ++i) { + points.emplace_back(RandomFrontFacingPoint()); + } + return points; +} + +Eigen::Vector3d RandomFrontFacingPoint() { + static constexpr double rho_min = 1.0; + static constexpr double rho_max = 3.0; + static constexpr double phi_min = -25.0; + static constexpr double phi_max = 25.0; + static constexpr double z_rho_scale = 0.5; + return RandomFrontFacingPoint(rho_min, rho_max, phi_min, phi_max, z_rho_scale); +} + +Eigen::Vector3d RandomFrontFacingPoint(const double rho_min, const double rho_max, const double phi_min, + const double phi_max, const double z_rho_scale) { + const double rho = lc::RandomDouble(rho_min, rho_max); + const double phi = lc::RandomDouble(phi_min, phi_max); + const double z = lc::RandomDouble(-1.0 * z_rho_scale * rho, z_rho_scale * rho); + // Z and x are swapped so z defines distance from camera rather than height + const Eigen::Vector3d tmp = lc::CylindricalToCartesian(Eigen::Vector3d(rho, phi, z)); + const Eigen::Vector3d random_point(tmp.z(), tmp.y(), tmp.x()); + return random_point; +} + +Eigen::Isometry3d RandomFrontFacingPose() { + static constexpr double rho_min = 1.0; + static constexpr double rho_max = 3.0; + static constexpr double phi_min = -25.0; + static constexpr double phi_max = 25.0; + static constexpr double z_rho_scale = 0.5; + + // Pitch acts like yaw since z axis points outwards in camera frame + static constexpr double yaw_min = -10.0; + static constexpr double yaw_max = 10.0; + static constexpr double pitch_min = -45; + static constexpr double pitch_max = 45; + static constexpr double roll_min = -10; + static constexpr double roll_max = 10; + + return RandomFrontFacingPose(rho_min, rho_max, phi_min, phi_max, z_rho_scale, yaw_min, yaw_max, pitch_min, pitch_max, + roll_min, roll_max); +} + +Eigen::Isometry3d RandomFrontFacingPose(const double rho_min, const double rho_max, const double phi_min, + const double phi_max, const double z_rho_scale, const double yaw_min, + const double yaw_max, const double pitch_min, const double pitch_max, + const double roll_min, const double roll_max) { + const Eigen::Vector3d translation = RandomFrontFacingPoint(rho_min, rho_max, phi_min, phi_max, z_rho_scale); + const double yaw = lc::RandomDouble(yaw_min, yaw_max); + const double pitch = lc::RandomDouble(pitch_min, pitch_max); + const double roll = lc::RandomDouble(roll_min, roll_max); + const Eigen::Matrix3d rotation = lc::RotationFromEulerAngles(yaw, pitch, roll); + return lc::Isometry3d(translation, rotation); +} + +std::vector EvenlySpacedTargetPoses(const int num_rows, const int num_cols, const int num_y_levels) { + // Pitch acts like yaw since z axis points out of camera frame + const lc::Sampler pitch_sampler(-15.0, 15.0, num_cols); + const lc::Sampler roll_sampler(-15.0, 15.0, num_cols); + const lc::Sampler yaw_sampler(-15.0, 15.0, num_cols); + + // Cylindrical coordinates for translation + const lc::Sampler rho_sampler(1.0, 3.0, num_rows); + constexpr double phi = 0.0; + + // Use smaller y_rho scale factor since image is shorter than it is wide + constexpr double y_rho_scale = 0.235; + constexpr double z_rho_scale = 0.5; + + std::vector poses; + for (int i = 0; i < num_rows; ++i) { + const double rho = rho_sampler.Sample(i); + const lc::Sampler z_sampler(-1.0 * rho * z_rho_scale, rho * z_rho_scale, num_cols); + for (int j = 0; j < num_cols; ++j) { + const double pitch = pitch_sampler.Sample(j); + const double roll = roll_sampler.Sample(j); + const double yaw = yaw_sampler.Sample(j); + const double z = z_sampler.Sample(j); + // Z and x are swapped so z defines distance from camera rather than height + const Eigen::Vector3d tmp = lc::CylindricalToCartesian(Eigen::Vector3d(rho, phi, z)); + const Eigen::Matrix3d rotation = lc::RotationFromEulerAngles(yaw, pitch, roll); + const lc::Sampler y_sampler(-1.0 * rho * y_rho_scale, rho * y_rho_scale, num_y_levels); + for (int k = 0; k < num_y_levels; ++k) { + const double y = y_sampler.Sample(k); + const Eigen::Vector3d translation(tmp.z(), y, tmp.x()); + poses.emplace_back(lc::Isometry3d(translation, rotation)); + } + } + } + + return poses; +} + +StateParameters AddNoiseToStateParameters(const StateParameters& state_parameters, const double focal_lengths_stddev, + const double principal_points_stddev, const double distortion_stddev, + const bool fov) { + StateParameters noisy_state_parameters; + noisy_state_parameters.focal_lengths = lc::AddNoiseToVector(state_parameters.focal_lengths, focal_lengths_stddev); + noisy_state_parameters.principal_points = + lc::AddNoiseToVector(state_parameters.principal_points, principal_points_stddev); + noisy_state_parameters.distortion = lc::AddNoiseToVector(state_parameters.distortion, distortion_stddev); + if (fov) { + // Close to zero fov values cause issues for solver, neg values are same as positive values + noisy_state_parameters.distortion[0] = std::max(0.1, noisy_state_parameters.distortion[0]); + } + return noisy_state_parameters; +} +} // namespace calibration diff --git a/tools/calibration/test/test_utilities.h b/tools/calibration/test/test_utilities.h new file mode 100644 index 0000000000..7d2d6f1dbc --- /dev/null +++ b/tools/calibration/test/test_utilities.h @@ -0,0 +1,167 @@ +/* Copyright (c) 2017, United S/ates Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +#ifndef CALIBRATION_TEST_UTILITIES_H_ // NOLINT +#define CALIBRATION_TEST_UTILITIES_H_ // NOLINT + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +namespace calibration { +OptimizationParams DefaultOptimizationParams(); + +RansacPnPParams DefaultRansacPnPParams(); + +ReprojectionPoseEstimateParams DefaultReprojectionPoseEstimateParams(); + +CameraTargetBasedIntrinsicsCalibratorParams DefaultCameraTargetBasedIntrinsicsCalibratorParams(); + +Eigen::VectorXd RandomFovDistortion(); + +Eigen::VectorXd RandomRadDistortion(); + +Eigen::VectorXd RandomRadTanDistortion(); + +// Samples in cylindrical coordinates for pose translation to keep pose in view frustrum. +// Samples z using scaled rho value to prevent large z vals with small rho values +// that may move the pose out of the view frustrum. +Eigen::Isometry3d RandomFrontFacingPose(const double rho_min, const double rho_max, const double phi_min, + const double phi_max, const double z_rho_scale, const double yaw_min, + const double yaw_max, const double pitch_min, const double pitch_max, + const double roll_min, const double roll_max); + +Eigen::Isometry3d RandomFrontFacingPose(); + +// Spaced out poses for targets which when projected into image space cover +// the image well with target points. Poses are sampled for each row/col combination +// and evenly spaced in cylindrical coordinates +std::vector EvenlySpacedTargetPoses(const int num_rows = 3, const int num_cols = 5, + const int num_y_levels = 2); + +std::vector TargetPoints(const int points_per_row, const int points_per_col, + const double row_spacing = 0.1, const double col_spacing = 0.1); + +std::vector RandomFrontFacingPoints(const int num_points); + +Eigen::Vector3d RandomFrontFacingPoint(); + +Eigen::Vector3d RandomFrontFacingPoint(const double rho_min, const double rho_max, const double phi_min, + const double phi_max, const double z_rho_scale); + +template +std::vector RandomTargetMatchSets(const int num_match_sets, const int num_target_points_per_row_and_col, + const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion = Eigen::VectorXd()); + +template +std::vector EvenlySpacedTargetMatchSets(const int num_pose_rows, const int num_pose_cols, + const int num_pose_y_levels, + const int num_target_points_per_row_and_col, + const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion = Eigen::VectorXd()); + +template +class RegistrationCorrespondences { + public: + RegistrationCorrespondences(const Eigen::Isometry3d& camera_T_target, const Eigen::Matrix3d& intrinsics, + const std::vector& target_t_target_point, + const Eigen::VectorXd& distortion = Eigen::VectorXd()); + + const localization_common::ImageCorrespondences& correspondences() const { return correspondences_; } + + const Eigen::Isometry3d& camera_T_target() const { return camera_T_target_; } + + const Eigen::Matrix3d& intrinsics() const { return intrinsics_; } + + private: + localization_common::ImageCorrespondences correspondences_; + Eigen::Isometry3d camera_T_target_; + Eigen::Matrix3d intrinsics_; +}; + +template +RegistrationCorrespondences::RegistrationCorrespondences( + const Eigen::Isometry3d& camera_T_target, const Eigen::Matrix3d& intrinsics, + const std::vector& target_t_target_points, const Eigen::VectorXd& distortion) + : camera_T_target_(camera_T_target), intrinsics_(intrinsics) { + for (const auto& target_t_target_point : target_t_target_points) { + const Eigen::Vector3d camera_t_target_point = camera_T_target_ * target_t_target_point; + if (camera_t_target_point.z() <= 0) continue; + const Eigen::Vector2d image_point = + Project3dPointToImageSpaceWithDistortion(camera_t_target_point, intrinsics_, distortion); + correspondences_.AddCorrespondence(image_point, target_t_target_point); + } +} + +template +std::vector RandomTargetMatchSets(const int num_match_sets, const int num_target_points_per_row_and_col, + const Eigen::Matrix3d& intrinsics, const Eigen::VectorXd& distortion) { + std::vector match_sets; + match_sets.reserve(num_match_sets); + for (int i = 0; i < num_match_sets; ++i) { + const auto correspondences = RegistrationCorrespondences( + RandomFrontFacingPose(), intrinsics, + TargetPoints(num_target_points_per_row_and_col, num_target_points_per_row_and_col), distortion); + // Set inliers using correspondence point size since correspondence points with negative z are not + // included in RegistrationCorrespondences + std::vector inliers(correspondences.correspondences().size()); + std::iota(inliers.begin(), inliers.end(), 0); + match_sets.emplace_back(correspondences.correspondences(), correspondences.camera_T_target(), inliers); + } + return match_sets; +} + +template +std::vector EvenlySpacedTargetMatchSets(const int num_pose_rows, const int num_pose_cols, + const int num_pose_y_levels, + const int num_target_points_per_row_and_col, + const Eigen::Matrix3d& intrinsics, + const Eigen::VectorXd& distortion) { + const std::vector target_poses = + EvenlySpacedTargetPoses(num_pose_rows, num_pose_cols, num_pose_y_levels); + std::vector match_sets; + const int num_match_sets = target_poses.size(); + match_sets.reserve(num_match_sets); + const std::vector target_points = + TargetPoints(num_target_points_per_row_and_col, num_target_points_per_row_and_col); + for (int i = 0; i < num_match_sets; ++i) { + const auto correspondences = + RegistrationCorrespondences(target_poses[i], intrinsics, target_points, distortion); + // Set inliers using correspondence point size since correspondence points with negative z are not + // included in RegistrationCorrespondences + std::vector inliers(correspondences.correspondences().size()); + std::iota(inliers.begin(), inliers.end(), 0); + match_sets.emplace_back(correspondences.correspondences(), correspondences.camera_T_target(), inliers); + } + return match_sets; +} + +StateParameters AddNoiseToStateParameters(const StateParameters& state_parameters, const double focal_lengths_stddev, + const double principal_points_stddev, const double distortion_stddev, + const bool fov = false); +} // namespace calibration + +#endif // CALIBRATION_TEST_UTILITIES_H_ // NOLINT diff --git a/tools/calibration/tools/create_undistorted_images.cc b/tools/calibration/tools/create_undistorted_images.cc new file mode 100644 index 0000000000..f577994711 --- /dev/null +++ b/tools/calibration/tools/create_undistorted_images.cc @@ -0,0 +1,133 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include + +namespace lc = localization_common; +namespace oc = optimization_common; +namespace po = boost::program_options; +namespace fs = boost::filesystem; + +int main(int argc, char** argv) { + std::string robot_config_file; + std::string world; + std::string output_undistorted_images_directory; + po::options_description desc( + "Creates undistorted images using the intrinsics and distortion params of the selected camera."); + desc.add_options()("help", "produce help message")("distorted-images-directory", po::value()->required(), + "Distorted images directory")( + "distortion-type", po::value()->required(), "fov, rad, or radtan.")( + "camera-name", po::value()->required(), "Selected camera name from robot config file.")( + "config-path", po::value()->required(), "Config path.")( + "output-directory,o", + po::value(&output_undistorted_images_directory)->default_value("undistorted_images"), + "Output directory for the undistorted images.")( + "robot-config-file,r", po::value(&robot_config_file)->default_value("config/robots/bumble.config"), + "Robot config file")("world,w", po::value(&world)->default_value("iss"), "World name."); + po::positional_options_description p; + p.add("distorted-images-directory", 1); + p.add("distortion-type", 1); + p.add("camera-name", 1); + p.add("config-path", 1); + po::variables_map vm; + try { + po::store(po::command_line_parser(argc, argv).options(desc).positional(p).run(), vm); + } catch (std::exception& e) { + std::cerr << "Error: " << e.what() << "\n"; + return 1; + } + + if (vm.count("help")) { + std::cout << desc << "\n"; + return 1; + } + + try { + po::notify(vm); + } catch (std::exception& e) { + std::cerr << "Error: " << e.what() << "\n"; + return 1; + } + + const std::string distorted_images_directory = vm["distorted-images-directory"].as(); + const std::string distortion_type = vm["distortion-type"].as(); + const std::string camera_name = vm["camera-name"].as(); + const std::string config_path = vm["config-path"].as(); + + // Only pass program name to free flyer so that boost command line options + // are ignored when parsing gflags. + int ff_argc = 1; + ff_common::InitFreeFlyerApplication(&ff_argc, &argv); + + lc::SetEnvironmentConfigs(config_path, world, robot_config_file); + config_reader::ConfigReader config; + config.AddFile("geometry.config"); + config.AddFile("cameras.config"); + config.AddFile("tools/camera_target_based_intrinsics_calibrator.config"); + if (!config.ReadFiles()) { + LogFatal("Failed to read config files."); + } + + if (!fs::is_directory(distorted_images_directory)) { + LogFatal("Distorted images directory " << distorted_images_directory << " not found."); + } + if (fs::is_directory(output_undistorted_images_directory)) { + LogFatal("Output undistorted images directory " << output_undistorted_images_directory << " already exists."); + } + fs::create_directory(output_undistorted_images_directory); + + const camera::CameraParameters camera_params(&config, camera_name.c_str()); + std::vector image_file_names; + const std::string images_file_path = (fs::path(distorted_images_directory) / fs::path("*.jpg")).string(); + cv::glob(images_file_path, image_file_names, false); + const Eigen::Matrix3d intrinsics = camera_params.GetIntrinsicMatrix(); + const Eigen::VectorXd distortion_params = camera_params.GetDistortion(); + for (const auto& image_file_name : image_file_names) { + const cv::Mat distorted_image = cv::imread(image_file_name); + cv::Mat undistorted_image; + // TODO(rsoussan): Avoid checking distortion param value/recreating distortion object each time + if (distortion_type == "fov") { + oc::FovDistorter distorter; + undistorted_image = distorter.Undistort(distorted_image, intrinsics, distortion_params); + } else if (distortion_type == "rad") { + oc::RadDistorter distorter; + undistorted_image = distorter.Undistort(distorted_image, intrinsics, distortion_params); + } else if (distortion_type == "radtan") { + oc::RadTanDistorter distorter; + undistorted_image = distorter.Undistort(distorted_image, intrinsics, distortion_params); + } else { + LogFatal("Invalid distortion type provided."); + } + const fs::path image_filepath(image_file_name); + const std::string undistorted_image_filename = + (fs::path(output_undistorted_images_directory) / image_filepath.filename()).string(); + cv::imwrite(undistorted_image_filename, undistorted_image); + } +} diff --git a/tools/calibration/tools/run_camera_target_based_intrinsics_calibrator.cc b/tools/calibration/tools/run_camera_target_based_intrinsics_calibrator.cc new file mode 100644 index 0000000000..eddb0e718a --- /dev/null +++ b/tools/calibration/tools/run_camera_target_based_intrinsics_calibrator.cc @@ -0,0 +1,191 @@ +/* Copyright (c) 2017, United States Government, as represented by the + * Administrator of the National Aeronautics and Space Administration. + * + * All rights reserved. + * + * The Astrobee platform is licensed under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +namespace ca = calibration; +namespace lc = localization_common; +namespace oc = optimization_common; +namespace po = boost::program_options; + +lc::ImageCorrespondences LoadTargetMatches(const std::string& match_file) { + std::vector image_points; + std::vector points_3d; + std::cout << "Reading: " << match_file << std::endl; + std::ifstream cr(match_file.c_str()); + std::string line; + while (std::getline(cr, line)) { + double val; + std::vector vals; + std::istringstream is(line); + while (is >> val) vals.push_back(val); + if (vals.size() != 5) LogFatal("Each line of " << match_file << " must have 5 entries."); + points_3d.emplace_back(Eigen::Vector3d(vals[1], vals[2], 0)); + image_points.emplace_back(Eigen::Vector2d(vals[3], vals[4])); + } + + return lc::ImageCorrespondences(image_points, points_3d); +} + +std::vector LoadAllTargetMatches(const std::string& corners_directory, + const int max_num_match_sets) { + std::vector corners_files; + std::vector all_matches; + ff_common::ListFiles(corners_directory, "txt", &corners_files); + int i = 0; + for (const auto& corners_file : corners_files) { + const auto& matches = LoadTargetMatches(corners_file); + if (matches.size() < 4) { + LogError("Too few matches, only " << matches.size() << "."); + continue; + } + all_matches.emplace_back(matches); + if (++i > max_num_match_sets) break; + } + return all_matches; +} + +void WriteCalibrationResultsToFile(const Eigen::Vector2d& focal_lengths, const Eigen::Vector2d& principal_points, + const Eigen::VectorXd& distortion, const std::string& output_filename) { + std::ofstream output_file; + output_file.open(output_filename); + output_file << focal_lengths[0] << " " << focal_lengths[1] << std::endl; + output_file << principal_points[0] << " " << principal_points[1] << std::endl; + int i = 0; + for (; i < distortion.size() - 1; ++i) { + output_file << distortion[i] << " "; + } + output_file << distortion[i]; + output_file.close(); +} + +template +bool Calibrate(const ca::RunCalibratorParams& params, const std::vector& target_matches, + const std::string& output_file) { + ca::CameraTargetBasedIntrinsicsCalibrator calibrator(params.camera_target_based_intrinsics_calibrator); + ca::StateParameters initial_state_parameters; + initial_state_parameters.focal_lengths = params.camera_params->GetFocalVector(); + initial_state_parameters.principal_points = params.camera_params->GetOpticalOffset(); + initial_state_parameters.distortion = params.camera_params->GetDistortion(); + ca::StateParameters calibrated_state_parameters; + ca::StateParametersCovariances covariances; + const bool success = calibrator.EstimateInitialTargetPosesAndCalibrate(target_matches, initial_state_parameters, + calibrated_state_parameters, covariances); + if (!success) { + LogError("Calibrate: Calibration failed."); + return false; + } + if (params.camera_target_based_intrinsics_calibrator.calibrate_focal_lengths) { + LogInfo("initial focal lengths: " << std::endl << initial_state_parameters.focal_lengths.matrix()); + LogInfo("calibrated focal lengths: " << std::endl << calibrated_state_parameters.focal_lengths.matrix()); + LogInfo("calibrated focal length covariance: " << std::endl << covariances.focal_lengths.matrix()); + } + if (params.camera_target_based_intrinsics_calibrator.calibrate_principal_points) { + LogInfo("initial principal points: " << std::endl << initial_state_parameters.principal_points.matrix()); + LogInfo("calibrated principal points: " << std::endl << calibrated_state_parameters.principal_points.matrix()); + LogInfo("calibrated principal points covariance: " << std::endl << covariances.principal_points.matrix()); + } + if (params.camera_target_based_intrinsics_calibrator.calibrate_distortion) { + LogInfo("initial distortion: " << std::endl << initial_state_parameters.distortion.matrix()); + LogInfo("calibrated distortion: " << std::endl << calibrated_state_parameters.distortion.matrix()); + LogInfo("calibrated distortion covariance: " << std::endl << covariances.distortion.matrix()); + } + WriteCalibrationResultsToFile(calibrated_state_parameters.focal_lengths, calibrated_state_parameters.principal_points, + calibrated_state_parameters.distortion, output_file); + return true; +} + +int main(int argc, char** argv) { + std::string robot_config_file; + std::string world; + std::string output_file; + po::options_description desc("Calibrates camera intrinsics using target detections."); + desc.add_options()("help", "produce help message")("corners-directory", po::value()->required(), + "Directory containing target detections to use for calibration.")( + "config-path,c", po::value()->required(), "Config path")( + "robot-config-file,r", po::value(&robot_config_file)->default_value("config/robots/bumble.config"), + "Robot config file")("world,w", po::value(&world)->default_value("iss"), "World name")( + "output-file,o", po::value(&output_file)->default_value("calibrated_params.txt"), + "File where calibrated parameters are saved."); + po::positional_options_description p; + p.add("corners-directory", 1); + p.add("config-path", 1); + po::variables_map vm; + try { + po::store(po::command_line_parser(argc, argv).options(desc).positional(p).run(), vm); + po::notify(vm); + } catch (std::exception& e) { + std::cerr << "Error: " << e.what() << "\n"; + return 1; + } + + if (vm.count("help")) { + std::cout << desc << "\n"; + return 1; + } + + const std::string corners_directory = vm["corners-directory"].as(); + const std::string config_path = vm["config-path"].as(); + + // Only pass program name to free flyer so that boost command line options + // are ignored when parsing gflags. + int ff_argc = 1; + ff_common::InitFreeFlyerApplication(&ff_argc, &argv); + + lc::SetEnvironmentConfigs(config_path, world, robot_config_file); + config_reader::ConfigReader config; + config.AddFile("geometry.config"); + config.AddFile("cameras.config"); + config.AddFile("tools/camera_target_based_intrinsics_calibrator.config"); + if (!config.ReadFiles()) { + LogFatal("Failed to read config files."); + } + ca::RunCalibratorParams params; + LoadRunCalibratorParams(config, params); + + std::vector target_matches; + if (!boost::filesystem::is_directory(corners_directory)) { + LogFatal("Corners directory " << corners_directory << " not found."); + } + LogInfo("Calibrating " << robot_config_file << ", camera: " << params.camera_name); + target_matches = + LoadAllTargetMatches(corners_directory, params.camera_target_based_intrinsics_calibrator.max_num_match_sets); + LogInfo("Number of target match sets: " << target_matches.size()); + if (params.distortion_type == "fov") { + Calibrate(params, target_matches, output_file); + } else if (params.distortion_type == "rad") { + Calibrate(params, target_matches, output_file); + } else if (params.distortion_type == "radtan") { + Calibrate(params, target_matches, output_file); + } else { + LogFatal("Invalid distortion type provided."); + } +} diff --git a/tools/ekf_bag/CMakeLists.txt b/tools/ekf_bag/CMakeLists.txt index 4b22c4f78a..3840dfe592 100644 --- a/tools/ekf_bag/CMakeLists.txt +++ b/tools/ekf_bag/CMakeLists.txt @@ -15,18 +15,89 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(ekf_bag) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + config_reader + camera + ekf + lk_optical_flow + localization_node +) + + +# Find OpenCV +SET(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/../../cmake") +find_package(OpenCV331 REQUIRED) + catkin_package( - CATKIN_DEPENDS roscpp + INCLUDE_DIRS include + LIBRARIES ${PROJECT_NAME} + CATKIN_DEPENDS roscpp config_reader camera ekf lk_optical_flow localization_node +) + +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} ) -create_library(TARGET ekf_bag - LIBS ekf lk_optical_flow localization_node sparse_mapping interest_point camera ff_common config_reader ${OpenCV_LIBRARIES} ${catkin_LIBRARIES} - INC ${catkin_INCLUDE_DIRS} +# Declare C++ libraries +add_library(ekf_bag + src/ekf_bag.cc + src/ekf_bag_csv.cc + src/tracked_features.cc ) +add_dependencies(ekf_bag ${catkin_EXPORTED_TARGETS}) +target_link_libraries(ekf_bag ${catkin_LIBRARIES} ${OpenCV_LIBRARIES}) + +## Declare a C++ executable: bag_to_csv +add_executable(bag_to_csv tools/bag_to_csv.cc) +add_dependencies(bag_to_csv ${catkin_EXPORTED_TARGETS}) +target_link_libraries(bag_to_csv + ekf_bag gflags glog ${catkin_LIBRARIES}) + +## Declare a C++ executable: bag_to_csv +add_executable(sparse_map_bag tools/sparse_map_bag.cc) +add_dependencies(sparse_map_bag ${catkin_EXPORTED_TARGETS}) +target_link_libraries(sparse_map_bag + ekf_bag gflags glog ${catkin_LIBRARIES}) + +############# +## Install ## +############# -create_tool_targets(DIR tools - LIBS ekf_bag - INC ${catkin_INCLUDE_DIRS} +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) + +# Mark cpp header files for installation +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE +) + +# Install C++ executables +install(TARGETS bag_to_csv DESTINATION bin) +install(TARGETS sparse_map_bag DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/bag_to_csv share/${PROJECT_NAME} + COMMAND ln -s ../../bin/sparse_map_bag share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") diff --git a/tools/ekf_bag/package.xml b/tools/ekf_bag/package.xml index 8b73eecd62..10ffc14d6c 100644 --- a/tools/ekf_bag/package.xml +++ b/tools/ekf_bag/package.xml @@ -16,6 +16,16 @@ catkin roscpp + config_reader + camera + ekf + lk_optical_flow + localization_node roscpp + config_reader + camera + ekf + lk_optical_flow + localization_node diff --git a/tools/ekf_video/CMakeLists.txt b/tools/ekf_video/CMakeLists.txt index 19f9793bbb..58fbf025ca 100644 --- a/tools/ekf_video/CMakeLists.txt +++ b/tools/ekf_video/CMakeLists.txt @@ -15,24 +15,51 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(ekf_video) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +## Find catkin macros and libraries +find_package(catkin2 REQUIRED COMPONENTS + roscpp + ekf_bag +) + +# System dependencies are found with CMake's conventions +find_package(Qt5 ${rviz_QT_VERSION} REQUIRED Gui) +list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/../../cmake") +find_package(FFMPEG REQUIRED ) + catkin_package( - CATKIN_DEPENDS roscpp + CATKIN_DEPENDS roscpp ekf_bag ) -# copied this from qp planner -find_package(catkin COMPONENTS rviz QUIET) -if(rviz_QT_VERSION) - find_package(Qt5 ${rviz_QT_VERSION} REQUIRED Gui) - - create_library(TARGET ekf_video - LIBS ekf_bag ${FFMPEG_LIBRARIES} ${catkin_LIBRARIES} ${Qt5Gui_LIBRARIES} - INC ${catkin_INCLUDE_DIRS} ${FFMPEG_INCLUDE_DIRS} ${Qt5Gui_INCLUDE_DIRS} - ) - - create_tool_targets(DIR tools - LIBS ekf_video ${Qt5Gui_LIBRARIES} - INC ${catkin_INCLUDE_DIRS} ${Qt5Gui_INCLUDE_DIRS} - ) -endif(rviz_QT_VERSION) +########### +## Build ## +########### + +# Specify additional locations of header files +include_directories( + include + ${catkin_INCLUDE_DIRS} + ${Qt5Gui_INCLUDE_DIRS} + ${FFMPEG_INCLUDE_DIRS} +) + +# Declare C++ libraries +add_library(ekf_video + src/ekf_bag_video.cc + src/video_writer.cc +) +add_dependencies(ekf_video ${catkin_EXPORTED_TARGETS}) +target_link_libraries(ekf_video ${catkin_LIBRARIES} ${Qt5Gui_LIBRARIES} ${FFMPEG_LIBRARIES} +) + + +## Declare a C++ executable: inspection_tool +add_executable(ekf_to_video tools/ekf_to_video.cc) +add_dependencies(ekf_to_video ${catkin_EXPORTED_TARGETS}) +target_link_libraries(ekf_to_video + ekf_video gflags glog ${catkin_LIBRARIES}) diff --git a/tools/ekf_video/package.xml b/tools/ekf_video/package.xml index d9fef16565..9cf98e0fee 100644 --- a/tools/ekf_video/package.xml +++ b/tools/ekf_video/package.xml @@ -16,6 +16,8 @@ catkin roscpp + ekf_bag roscpp + ekf_bag diff --git a/tools/faults_during_bag.py b/tools/faults_during_bag.py deleted file mode 100644 index 0bcba6147d..0000000000 --- a/tools/faults_during_bag.py +++ /dev/null @@ -1,53 +0,0 @@ -import sys - -import rosbag -import rospy -from ff_msgs.msg import Fault, FaultState - - -def process(msg, start): - for f in msg.faults: - # Fault 21 is perching arm node missing --> ignore - if f.id != 21: - elapsed = f.time_of_fault - start - print( - ( - "secs_from_start=%d fault_id=%d timestamp=%d.%09d" - % ( - int(elapsed.secs), - f.id, - f.time_of_fault.secs, - f.time_of_fault.nsecs, - ) - ) - ) - - -if len(sys.argv) < 3: - print("Usage: faults_during_bag.py short_bag_for_period ars_default_bag") - exit(1) - -short_bag_fn = sys.argv[1] -default_bag_fn = sys.argv[2] - -print(("reading time bounds of %s" % short_bag_fn)) -short_bag = rosbag.Bag(short_bag_fn) -start_ts = short_bag.get_start_time() -end_ts = short_bag.get_end_time() -short_bag.close() - -print( - ( - "will filter events of %s starting at %f to %f" - % (default_bag_fn, start_ts, end_ts) - ) -) - -default_bag = rosbag.Bag(default_bag_fn) - -for topic, msg, time in default_bag.read_messages( - topics=["/mgt/sys_monitor/state"], - start_time=rospy.Time(start_ts), - end_time=rospy.Time(end_ts), -): - process(msg, rospy.Time(start_ts)) diff --git a/tools/gds_helper/CMakeLists.txt b/tools/gds_helper/CMakeLists.txt index 3112012f10..323192567e 100644 --- a/tools/gds_helper/CMakeLists.txt +++ b/tools/gds_helper/CMakeLists.txt @@ -1,8 +1,8 @@ -cmake_minimum_required(VERSION 2.8.3) +cmake_minimum_required(VERSION 3.0) project(gds_helper) -## Compile as C++11, supported in ROS Kinetic and newer -# add_compile_options(-std=c++11) +# Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) ## Find catkin macros and libraries ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz) diff --git a/tools/gnc_visualizer/CMakeLists.txt b/tools/gnc_visualizer/CMakeLists.txt index 22fa4fbed0..98b427c09d 100644 --- a/tools/gnc_visualizer/CMakeLists.txt +++ b/tools/gnc_visualizer/CMakeLists.txt @@ -15,10 +15,20 @@ # License for the specific language governing permissions and limitations # under the License. +cmake_minimum_required(VERSION 3.0) project(gnc_visualizer) +## Compile as C++14, supported in ROS Kinetic and newer +add_compile_options(-std=c++14) + +find_package(catkin REQUIRED COMPONENTS + rospy + std_msgs + ff_msgs +) + catkin_package( LIBRARIES - DEPENDS rospy std_msgs vpp_msgs ff_msgs geometry_msgs + DEPENDS rospy std_msgs ff_msgs ) diff --git a/tools/gnc_visualizer/package.xml b/tools/gnc_visualizer/package.xml index 55381b26dc..087a6645de 100644 --- a/tools/gnc_visualizer/package.xml +++ b/tools/gnc_visualizer/package.xml @@ -15,11 +15,9 @@ catkin rospy - vpp_msgs ff_msgs std_msgs rospy - vpp_msgs ff_msgs std_msgs diff --git a/tools/graph_bag/CMakeLists.txt b/tools/graph_bag/CMakeLists.txt index c4982c61b3..827eb7bb1e 100644 --- a/tools/graph_bag/CMakeLists.txt +++ b/tools/graph_bag/CMakeLists.txt @@ -15,25 +15,110 @@ #License for the specific language governing permissions and limitations #under the License. -cmake_minimum_required(VERSION 2.8.3) - +cmake_minimum_required(VERSION 3.0) project(graph_bag) if (USE_ROS) -catkin_package( - CATKIN_DEPENDS roscpp rosbag -) + ## Compile as C++14, supported in ROS Kinetic and newer + add_compile_options(-std=c++14) -# include ff_nodelet to get ff_util header files since these aren't exposed elsewhere -create_library(TARGET ${PROJECT_NAME} - LIBS ff_common ${OpenCV_LIBRARIES} ${catkin_LIBRARIES} camera ff_nodelet graph_localizer imu_augmentor imu_bias_tester imu_integration lk_optical_flow localization_common localization_measurements localization_node sparse_mapping msg_conversions - INC ${catkin_INCLUDE_DIRS} -) + ## Find catkin macros and libraries + find_package(catkin2 REQUIRED COMPONENTS + roscpp + rosbag + ff_util + ff_msgs + ff_common + graph_localizer + camera + lk_optical_flow + imu_bias_tester + localization_node + imu_augmentor + ) + + # Find GTSAM + find_package(GTSAM REQUIRED) + + catkin_package( + LIBRARIES ${PROJECT_NAME} + INCLUDE_DIRS include + CATKIN_DEPENDS roscpp rosbag ff_util ff_msgs ff_common graph_localizer camera lk_optical_flow imu_bias_tester localization_node imu_augmentor + ) + + ########### + ## Build ## + ########### + # Specify additional locations of header files + include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIRS} + ) + + # Declare C++ libraries + add_library(${PROJECT_NAME} + src/bag_imu_filterer.cc + src/graph_bag.cc + src/graph_localizer_simulator.cc + src/imu_bias_tester_adder.cc + src/live_measurement_simulator.cc + src/parameter_reader.cc + src/sparse_mapping_pose_adder.cc + src/utilities.cc + ) + add_dependencies(${PROJECT_NAME} ${catkin_EXPORTED_TARGETS}) + target_link_libraries(${PROJECT_NAME} ${catkin_LIBRARIES}) -create_tool_targets(DIR tools - LIBS ${PROJECT_NAME} # profiler - INC ${catkin_INCLUDE_DIRS} + ## Declare a C++ executable: run_bag_imu_filterer + add_executable(run_bag_imu_filterer tools/run_bag_imu_filterer.cc) + add_dependencies(run_bag_imu_filterer ${catkin_EXPORTED_TARGETS}) + target_link_libraries(run_bag_imu_filterer + ${PROJECT_NAME} gflags gtsam ${catkin_LIBRARIES}) + + ## Declare a C++ executable: run_graph_bag + add_executable(run_graph_bag tools/run_graph_bag.cc) + add_dependencies(run_graph_bag ${catkin_EXPORTED_TARGETS}) + target_link_libraries(run_graph_bag + ${PROJECT_NAME} gflags gtsam ${catkin_LIBRARIES}) + + ## Declare a C++ executable: run_imu_bias_tester_adder + add_executable(run_imu_bias_tester_adder tools/run_imu_bias_tester_adder.cc) + add_dependencies(run_imu_bias_tester_adder ${catkin_EXPORTED_TARGETS}) + target_link_libraries(run_imu_bias_tester_adder + ${PROJECT_NAME} gflags gtsam ${catkin_LIBRARIES}) + + ## Declare a C++ executable: run_sparse_mapping_pose_adder + add_executable(run_sparse_mapping_pose_adder tools/run_sparse_mapping_pose_adder.cc) + add_dependencies(run_sparse_mapping_pose_adder ${catkin_EXPORTED_TARGETS}) + target_link_libraries(run_sparse_mapping_pose_adder + ${PROJECT_NAME} gflags gtsam ${catkin_LIBRARIES}) + +############# +## Install ## +############# + +# Mark libraries for installation +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) +# Install C++ executables +install(TARGETS run_bag_imu_filterer DESTINATION bin) +install(TARGETS run_graph_bag DESTINATION bin) +install(TARGETS run_imu_bias_tester_adder DESTINATION bin) +install(TARGETS run_sparse_mapping_pose_adder DESTINATION bin) +install(CODE "execute_process( + COMMAND ln -s ../../bin/run_bag_imu_filterer share/${PROJECT_NAME} + COMMAND ln -s ../../bin/run_graph_bag share/${PROJECT_NAME} + COMMAND ln -s ../../bin/run_imu_bias_tester_adder share/${PROJECT_NAME} + COMMAND ln -s ../../bin/run_sparse_mapping_pose_adder share/${PROJECT_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} + OUTPUT_QUIET + ERROR_QUIET + )") + endif (USE_ROS) diff --git a/tools/graph_bag/package.xml b/tools/graph_bag/package.xml index bf259d87db..0ca1691995 100644 --- a/tools/graph_bag/package.xml +++ b/tools/graph_bag/package.xml @@ -17,13 +17,26 @@ eigen_conversions roscpp rosbag - graph_localizer ff_util - tf + ff_msgs + ff_common + graph_localizer + camera + lk_optical_flow + imu_bias_tester + localization_node + imu_augmentor eigen_conversions graph_localizer roscpp rosbag ff_util - tf + ff_msgs + ff_common + graph_localizer + camera + lk_optical_flow + imu_bias_tester + localization_node + imu_augmentor diff --git a/tools/graph_bag/readme.md b/tools/graph_bag/readme.md index 50380f251a..de2df0beab 100644 --- a/tools/graph_bag/readme.md +++ b/tools/graph_bag/readme.md @@ -31,3 +31,23 @@ The imu analyzer tool plots imu data and also filtered imu data. It can use pyt ## merge\_bags Merge bags looks for bag files in the current directory with a provided name prefix and merges these into a single bag file. Bags are assumed to be numbered and are merged in numerical order. + +## run\_graph\_bag\_and\_plot\_results +Generates localization results for a provided bagfile. +See 'rosrun graph\_bag run\_graph\_bag\_and\_plot\_results.py -h' +for further details and usage instructions. + +## make\_groundtruth.py +Generates groundtruth map and bagfile for a given bagfile. +See 'rosrun graph\_bag make\_groundtruth.py -h' +for further details and usage instructions. + +## make\_groundtruth\_map.py +Generates groundtruth map for a given bagfile. +See 'rosrun graph\_bag make\_groundtruth.py -h' +for further details and usage instructions. + +## groundtruth\_sweep.py +Creates groundtruth in parallel for a set of bagfiles. +See 'rosrun graph\_bag groundtruth\_sweep.py -h' +for further details and usage instructions. diff --git a/tools/graph_bag/scripts/groundtruth_sweep.py b/tools/graph_bag/scripts/groundtruth_sweep.py new file mode 100755 index 0000000000..4fc9763d4a --- /dev/null +++ b/tools/graph_bag/scripts/groundtruth_sweep.py @@ -0,0 +1,166 @@ +#!/usr/bin/python +# +# Copyright (c) 2017, United States Government, as represented by the +# Administrator of the National Aeronautics and Space Administration. +# +# All rights reserved. +# +# The Astrobee platform is licensed under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with the +# License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +Generates groundtruth for bagfiles in parallel using a provided config script. +See make_groundtruth.py for more details on the groundtruth map, bagfile, and +pdfs created in this process. +""" + +import argparse +import csv +import itertools +import multiprocessing +import os +import sys + +import multiprocessing_helpers +import utilities + + +class GroundtruthParams(object): + def __init__( + self, + bagfile, + base_surf_map, + maps_directory, + loc_map, + config_path, + world, + image_topic, + robot_name, + use_image_features, + ): + self.bagfile = bagfile + self.base_surf_map = base_surf_map + self.maps_directory = maps_directory + self.loc_map = loc_map + self.config_path = config_path + self.world = world + self.image_topic = image_topic + self.robot_name = robot_name + self.use_image_features = use_image_features + + +def load_params(param_file): + groundtruth_params_list = [] + with open(param_file) as param_csvfile: + reader = csv.reader(param_csvfile, delimiter=" ") + for row in reader: + groundtruth_params_list.append( + GroundtruthParams( + row[0], + row[1], + row[2], + row[3], + row[4], + row[5], + row[6], + row[7], + row[8], + ) + ) + + return groundtruth_params_list + + +def check_params(groundtruth_params_list): + for params in groundtruth_params_list: + if not os.path.isfile(params.bagfile): + print(("Bagfile " + params.bagfile + " does not exist.")) + sys.exit() + if not os.path.isfile(params.base_surf_map): + print(("Base surf map " + params.base_surf_map + " does not exist.")) + sys.exit() + if not os.path.isdir(params.maps_directory): + print(("Maps directory " + params.maps_directory + " does not exist.")) + sys.exit() + if not os.path.isfile(params.loc_map): + print(("Loc map " + params.loc_map + " does not exist.")) + sys.exit() + + +# Add traceback so errors are forwarded, otherwise +# some errors are suppressed due to the multiprocessing +# library call +@multiprocessing_helpers.full_traceback +def run_groundtruth(params): + output_directory = utilities.basename(params.bagfile) + "_groundtruth" + groundtruth_command = ( + "rosrun graph_bag make_groundtruth.py " + + params.bagfile + + " " + + params.base_surf_map + + " " + + params.maps_directory + + " " + + params.loc_map + + " " + + params.config_path + + " -o " + + output_directory + + " -w " + + params.world + + " -i " + + params.image_topic + + " -r " + + params.robot_name + ) + if not bool(params.use_image_features): + groundtruth_command += " --generate-image-features" + + output_file = utilities.basename(params.bagfile) + "_groundtruth.txt" + utilities.run_command_and_save_output(groundtruth_command, output_file) + + +def groundtruth_sweep(config_file, num_processes): + groundtruth_params_list = load_params(config_file) + check_params(groundtruth_params_list) + pool = multiprocessing.Pool(num_processes) + pool.map(run_groundtruth, groundtruth_params_list) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument( + "config_file", + help="Config file containing arguments for each job to run. Should be formatted with one job per line and using a single space between each argument. Arguments for a job in order are: bagfile base_surf_map maps_directory loc_map config_path world image_topic robot_name use_image_features. See make_groundtruth.py description for more details on each argument.", + ) + parser.add_argument("-o", "--output-directory", default="groundtruth_sweep") + parser.add_argument( + "-p", + "--num-processes", + type=int, + default=10, + help="Number of concurrent processes to run, where each groundtruth creation job is assigned to one process.", + ) + args = parser.parse_args() + if not os.path.isfile(args.config_file): + print(("Config file " + args.config_file + " does not exist.")) + sys.exit() + if os.path.isdir(args.output_directory): + print(("Output directory " + args.output_directory + " already exists.")) + sys.exit() + config_file = os.path.abspath(args.config_file) + + os.mkdir(args.output_directory) + os.chdir(args.output_directory) + + groundtruth_sweep(config_file, args.num_processes) diff --git a/tools/graph_bag/scripts/make_groundtruth.py b/tools/graph_bag/scripts/make_groundtruth.py new file mode 100755 index 0000000000..ced75861e8 --- /dev/null +++ b/tools/graph_bag/scripts/make_groundtruth.py @@ -0,0 +1,169 @@ +#!/usr/bin/python +# +# Copyright (c) 2017, United States Government, as represented by the +# Administrator of the National Aeronautics and Space Administration. +# +# All rights reserved. +# +# The Astrobee platform is licensed under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with the +# License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +Generates the groundtruth map and groundtruth bagfile containing groundtruth +localization estimates for a given input bagfile. +Also tests the input bagfile against a provided localization map and plots the +results compared with the newly created groundtruth. +""" + +import argparse +import os +import shutil +import sys + +import make_groundtruth_map +import utilities + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument("bagfile", help="Input bagfile to generate groundtruth for.") + parser.add_argument( + "base_surf_map", + help="Existing map to use as basis for groundtruth. Should largely overlap area covered in input bagfile.", + ) + parser.add_argument( + "maps_directory", + help="Location of images used for each bagfile use to generate base_surf_map.", + ) + parser.add_argument( + "loc_map", help="Localization map for bagfile to test localization performance." + ) + parser.add_argument("config_path", help="Full path to config path.") + parser.add_argument( + "-o", "--output-directory", default="groundtruth_creation_output" + ) + parser.add_argument("-w", "--world", default="iss") + parser.add_argument( + "-i", + "--image-topic", + default="/mgt/img_sampler/nav_cam/image_record", + help="Image topic.", + ) + parser.add_argument("-r", "--robot-name", default="bumble") + parser.add_argument( + "-m", + "--map-name", + default=None, + help="Prefix for generated map names. Defaults to bagfile name.", + ) + parser.add_argument( + "--generate-image-features", + dest="use_image_features", + action="store_false", + help="Use image features msgs from bagfile or generate features from images.", + ) + + args = parser.parse_args() + if not os.path.isfile(args.bagfile): + print("Bag file " + args.bagfile + " does not exist.") + sys.exit() + if not os.path.isfile(args.base_surf_map): + print("Base surf map " + args.base_surf_map + " does not exist.") + sys.exit() + if not os.path.isfile(args.loc_map): + print("Loc map " + args.loc_map + " does not exist.") + sys.exit() + if not os.path.isdir(args.maps_directory): + print("Maps directory " + args.maps_directory + " does not exist.") + sys.exit() + if os.path.isdir(args.output_directory): + print("Output directory " + args.output_directory + " already exists.") + sys.exit() + + bagfile = os.path.abspath(args.bagfile) + base_surf_map = os.path.abspath(args.base_surf_map) + maps_directory = os.path.abspath(args.maps_directory) + + os.mkdir(args.output_directory) + os.chdir(args.output_directory) + + map_name = args.map_name + bag_prefix = utilities.basename(bagfile) + if not args.map_name: + map_name = bag_prefix + "_groundtruth" + + make_groundtruth_map.create_groundtruth( + bagfile, base_surf_map, maps_directory, map_name, args.world, args.robot_name + ) + + robot_config = "config/robots/" + args.robot_name + ".config" + groundtruth_bag = map_name + ".bag" + groundtruth_map_file = map_name + ".brisk.vocabdb.map" + groundtruth_pdf = "groundtruth.pdf" + groundtruth_csv = "groundtruth.csv" + make_groundtruth_command = ( + "rosrun graph_bag run_graph_bag_and_plot_results.py " + + bagfile + + " " + + groundtruth_map_file + + " " + + args.config_path + + " -i " + + args.image_topic + + " -r " + + robot_config + + " -w " + + args.world + + " -o " + + groundtruth_bag + + " --output-file " + + groundtruth_pdf + + " --output-csv-file " + + groundtruth_csv + + " --generate-image-features" + ) + utilities.run_command_and_save_output( + make_groundtruth_command, "make_groundtruth.txt" + ) + os.rename("run_graph_bag_command.txt", "groundtruth_run_graph_bag_command.txt") + + loc_results_bag = bag_prefix + "_results.bag" + loc_pdf = "loc_results.pdf" + loc_csv = "loc_results.csv" + get_loc_results_command = ( + "rosrun graph_bag run_graph_bag_and_plot_results.py " + + bagfile + + " " + + args.loc_map + + " " + + args.config_path + + " -i " + + args.image_topic + + " -r " + + robot_config + + " -w " + + args.world + + " -o " + + loc_results_bag + + " --output-file " + + loc_pdf + + " --output-csv-file " + + loc_csv + + " -g " + + groundtruth_bag + ) + if not args.use_image_features: + get_loc_results_command += " --generate-image-features" + utilities.run_command_and_save_output( + get_loc_results_command, "get_loc_results.txt" + ) + os.rename("run_graph_bag_command.txt", "loc_run_graph_bag_command.txt") diff --git a/tools/graph_bag/scripts/make_groundtruth_map.py b/tools/graph_bag/scripts/make_groundtruth_map.py new file mode 100755 index 0000000000..f3b6998c88 --- /dev/null +++ b/tools/graph_bag/scripts/make_groundtruth_map.py @@ -0,0 +1,175 @@ +#!/usr/bin/python +# +# Copyright (c) 2017, United States Government, as represented by the +# Administrator of the National Aeronautics and Space Administration. +# +# All rights reserved. +# +# The Astrobee platform is licensed under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with the +# License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +Generates a groundtruth map for a given input bagfile. The groundtruth map +creation process merges images from the input bagfile with an existing map. +This is the first step for groundtruth creation, where once a groundtruth map +is created for a bagfile the bagfile can then be localized using the groundtruth +map to generate groundtruth poses. +""" + +import argparse +import os +import shutil +import sys + +import utilities + + +def create_groundtruth( + bagfile, base_surf_map, maps_directory, map_name, world, robot_name +): + gt_images_dir = "gt_images_" + utilities.basename(bagfile) + os.mkdir(gt_images_dir) + gt_images = os.path.abspath(gt_images_dir) + extract_images_command = ( + "rosrun localization_node extract_image_bag " + + bagfile + + " -use_timestamp_as_image_name -image_topic /mgt/img_sampler/nav_cam/image_record -output_directory " + + gt_images + ) + utilities.run_command_and_save_output(extract_images_command, "extract_images.txt") + + all_gt_images = os.path.join(gt_images, "*.jpg") + select_images_command = ( + "rosrun sparse_mapping select_images -density_factor 1.4 " + all_gt_images + ) + utilities.run_command_and_save_output(select_images_command, "select_images.txt") + + # Set environment variables + home = os.path.expanduser("~") + robot_config_file = os.path.join("config/robots", robot_name + ".config") + astrobee_path = os.path.join(home, "astrobee/src/astrobee") + os.environ["ASTROBEE_RESOURCE_DIR"] = os.path.join(astrobee_path, "resources") + os.environ["ASTROBEE_CONFIG_DIR"] = os.path.join(astrobee_path, "config") + os.environ["ASTROBEE_ROBOT"] = os.path.join( + astrobee_path, "config/robots/bumble.config" + ) + os.environ["ASTROBEE_WORLD"] = world + + # Build groundtruth + groundtruth_map = map_name + ".map" + build_map_command = ( + "rosrun sparse_mapping build_map " + + all_gt_images + + " -output_map " + + groundtruth_map + + " -feature_detection -feature_matching -track_building -incremental_ba -bundle_adjustment -histogram_equalization -num_subsequent_images 100" + ) + utilities.run_command_and_save_output(build_map_command, "build_map.txt") + + # Merge with base map + groundtruth_surf_map = map_name + ".surf.map" + merge_map_command = ( + "rosrun sparse_mapping merge_maps " + + base_surf_map + + " " + + groundtruth_map + + " -output_map " + + groundtruth_surf_map + + " -num_image_overlaps_at_endpoints 100000000 -skip_bundle_adjustment" + ) + utilities.run_command_and_save_output(merge_map_command, "merge_map.txt") + + # Link maps directory since conversion to BRISK map needs + # image files to appear to be in correct relative path + os.symlink(maps_directory, "maps") + maps_gt_images = os.path.join("maps", gt_images_dir) + os.symlink(gt_images, maps_gt_images) + + # Convert SURF to BRISK map + # Get full path to output file to avoid permission errors when running + # command in maps directory + rebuild_output_file = os.path.join(os.getcwd(), "rebuild_map_as_brisk_map.txt") + groundtruth_brisk_map = map_name + ".brisk.map" + shutil.copyfile(groundtruth_surf_map, groundtruth_brisk_map) + groundtruth_brisk_map_full_path = os.path.abspath(groundtruth_brisk_map) + gt_path = os.getcwd() + os.chdir("maps") + rebuild_map_command = ( + "rosrun sparse_mapping build_map -rebuild -histogram_equalization -output_map " + + groundtruth_brisk_map_full_path + ) + utilities.run_command_and_save_output(rebuild_map_command, rebuild_output_file) + # Use gt_path since relative commands would now be wrt maps directory simlink + os.chdir(gt_path) + + # Create vocabdb + groundtruth_brisk_vocabdb_map = map_name + ".brisk.vocabdb.map" + shutil.copyfile(groundtruth_brisk_map, groundtruth_brisk_vocabdb_map) + add_vocabdb_command = ( + "rosrun sparse_mapping build_map -vocab_db -output_map " + + groundtruth_brisk_vocabdb_map + ) + utilities.run_command_and_save_output(add_vocabdb_command, "build_vocabdb.txt") + + # Remove simlinks + os.unlink(maps_gt_images) + os.unlink("maps") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument("bagfile", help="Input bagfile to generate groundtruth for.") + parser.add_argument( + "base_surf_map", + help="Existing map to use as basis for groundtruth. Should largely overlap area covered in input bagfile.", + ) + parser.add_argument( + "maps_directory", + help="Location of images used for each bagfile use to generate base_surf_map.", + ) + parser.add_argument( + "-o", "--output-directory", default="groundtruth_creation_output" + ) + parser.add_argument("-w", "--world", default="iss") + parser.add_argument("-r", "--robot-name", default="bumble") + parser.add_argument("-m", "--map-name", default="groundtruth") + + args = parser.parse_args() + if not os.path.isfile(args.bagfile): + print("Bag file " + args.bagfile + " does not exist.") + sys.exit() + if not os.path.isfile(args.base_surf_map): + print("Base surf map " + args.base_surf_map + " does not exist.") + sys.exit() + if not os.path.isdir(args.maps_directory): + print("Maps directory " + args.maps_directory + " does not exist.") + sys.exit() + if os.path.isdir(args.output_directory): + print("Output directory " + args.output_directory + " already exists.") + sys.exit() + + bagfile = os.path.abspath(args.bagfile) + base_surf_map = os.path.abspath(args.base_surf_map) + maps_directory = os.path.abspath(args.maps_directory) + + os.mkdir(args.output_directory) + os.chdir(args.output_directory) + + create_groundtruth( + bagfile, + base_surf_map, + maps_directory, + args.map_name, + args.world, + args.robot_name, + ) diff --git a/tools/graph_bag/scripts/run_graph_bag_and_plot_results.py b/tools/graph_bag/scripts/run_graph_bag_and_plot_results.py new file mode 100755 index 0000000000..808ffbe8b1 --- /dev/null +++ b/tools/graph_bag/scripts/run_graph_bag_and_plot_results.py @@ -0,0 +1,122 @@ +#!/usr/bin/python +# +# Copyright (c) 2017, United States Government, as represented by the +# Administrator of the National Aeronautics and Space Administration. +# +# All rights reserved. +# +# The Astrobee platform is licensed under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with the +# License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +Runs localization for a bagfile using the provided map. +Saves results to an output bagfile and generates a pdf +file with detailed results. +""" + +import argparse +import os +import sys + +import utilities + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument("bagfile", help="Input bagfile.") + parser.add_argument("map_file", help="Map file.") + parser.add_argument("config_path", help="Full path to config path.") + parser.add_argument( + "-r", + "--robot-config", + default="config/robots/bumble.config", + help="Relative path to robot config.", + ) + parser.add_argument( + "-i", + "--image-topic", + default="/mgt/img_sampler/nav_cam/image_record", + help="Image topic.", + ) + parser.add_argument( + "-o", "--output-bagfile", default="results.bag", help="Output bagfile." + ) + parser.add_argument("-w", "--world", default="iss") + parser.add_argument( + "--generate-image-features", + dest="use_image_features", + action="store_false", + help="Use image features msgs from bagfile or generate features from images.", + ) + parser.add_argument("--output-file", default="output.pdf") + parser.add_argument("--output-csv-file", default="results.csv") + parser.add_argument("-g", "--groundtruth-bagfile", default=None) + parser.add_argument( + "--directory", + default=None, + help="Full path to output directory where files will be saved. If not specified, timestamped directory will be created in current path.", + ) + args = parser.parse_args() + if not os.path.isfile(args.bagfile): + print("Bag file " + args.bagfile + " does not exist.") + sys.exit() + if not os.path.isfile(args.map_file): + print("Map file " + args.map_file + " does not exist.") + sys.exit() + if os.path.isfile(args.output_file): + print("Output file " + args.output_file + " already exist.") + sys.exit() + if os.path.isfile(args.output_csv_file): + print("Output csv file " + args.output_csv_file + " already exist.") + sys.exit() + if args.groundtruth_bagfile and not os.path.isfile(args.groundtruth_bagfile): + print("Groundtruth bag " + args.groundtruth_bagfile + " does not exist.") + sys.exit() + + bagfile = os.path.abspath(args.bagfile) + map_file = os.path.abspath(args.map_file) + + # Run localizer + run_graph_bag_command = ( + "rosrun graph_bag run_graph_bag " + + bagfile + + " " + + map_file + + " " + + args.config_path + + " -r " + + args.robot_config + + " -w " + + args.world + + " --use-image-features " + + (str(args.use_image_features)).lower() + + " -o " + + args.output_bagfile + ) + utilities.run_command_and_save_output( + run_graph_bag_command, "run_graph_bag_command.txt" + ) + + # Plot results + plot_results_command = ( + "rosrun graph_bag plot_results_main.py " + + args.output_bagfile + + " --output-file " + + args.output_file + + " --output-csv-file " + + args.output_csv_file + ) + if args.groundtruth_bagfile: + plot_results_command += " -g " + args.groundtruth_bagfile + utilities.run_command_and_save_output( + plot_results_command, "plot_results_command.txt" + ) diff --git a/tools/graph_bag/scripts/utilities.py b/tools/graph_bag/scripts/utilities.py index 66d4f32e8e..47b15d56dc 100644 --- a/tools/graph_bag/scripts/utilities.py +++ b/tools/graph_bag/scripts/utilities.py @@ -18,6 +18,7 @@ import datetime import glob import os +import subprocess import numpy as np import pandas as pd @@ -75,6 +76,18 @@ def load_dataframe(files): return dataframe +def run_command_and_save_output(command, output_filename, print_command=True): + if print_command: + print(command) + with open(output_filename, "w") as output_file: + subprocess.call(command, shell=True, stdout=output_file, stderr=output_file) + + +def basename(filename): + return os.path.splitext(os.path.basename(filename))[0] + + +# TODO(rsoussan): Move these to different utilities file def get_topic_rates( bag_name, topic, diff --git a/tools/imu_bias_tester/CMakeLists.txt b/tools/imu_bias_tester/CMakeLists.txt index 23e44b5bca..1dab1ca189 100644 --- a/tools/imu_bias_tester/CMakeLists.txt +++ b/tools/imu_bias_tester/CMakeLists.txt @@ -15,23 +15,78 @@ #License for the specific language governing permissions and limitations #under the License. +cmake_minimum_required(VERSION 3.0) project(imu_bias_tester) if (USE_ROS) + ## Compile as C++14, supported in ROS Kinetic and newer + add_compile_options(-std=c++14) -catkin_package( - LIBRARIES ${PROJECT_NAME} ${GLOG_LIBRARIES} ${GTSAM_LIBRARIES} ff_nodelet imu_integration localization_common localization_measurements - INCLUDE_DIRS include ${GTSAM_INCLUDE_DIR} ${GLOG_INCLUDE_DIRS} - CATKIN_DEPENDS - DEPENDS gtsam -) + ## Find catkin macros and libraries + find_package(catkin2 REQUIRED COMPONENTS + roscpp + nodelet + ff_util + ff_msgs + config_reader + imu_integration + ) -create_library(TARGET ${PROJECT_NAME} - LIBS ${catkin_LIBRARIES} ${GLOG_LIBRARIES} ff_nodelet imu_integration localization_common localization_measurements - INC ${catkin_INCLUDE_DIRS} ${GLOG_INCLUDE_DIRS} - DEPS gtsam ff_msgs -) + catkin_package( + LIBRARIES ${PROJECT_NAME} ${GLOG_LIBRARIES} ${GTSAM_LIBRARIES} + INCLUDE_DIRS include ${GTSAM_INCLUDE_DIR} ${GLOG_INCLUDE_DIRS} + CATKIN_DEPENDS roscpp ff_util ff_msgs config_reader imu_integration + ) + + ########### + ## Build ## + ########### + # Specify additional locations of header files + include_directories( + include + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIRS} + ) -install_launch_files() + # Declare C++ libraries + add_library(${PROJECT_NAME} + src/imu_bias_tester.cc + src/imu_bias_tester_nodelet.cc + src/imu_bias_tester_wrapper.cc + ) + add_dependencies(${PROJECT_NAME} ${catkin_EXPORTED_TARGETS}) + target_link_libraries(${PROJECT_NAME} ${catkin_LIBRARIES}) + + ############# + ## Install ## + ############# + + # Mark libraries for installation + install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} + ) + + # Mark nodelet_plugin for installation + install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} + ) + + # Mark cpp header files for installation + install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.h" + PATTERN ".svn" EXCLUDE + ) + + # Mark launch files for installation + install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch + PATTERN ".svn" EXCLUDE) + +else (USE_ROS) + find_package(catkin REQUIRED COMPONENTS) + catkin_package() endif (USE_ROS) diff --git a/tools/imu_bias_tester/package.xml b/tools/imu_bias_tester/package.xml index 6a7d64ec0b..d12a9ccdd6 100644 --- a/tools/imu_bias_tester/package.xml +++ b/tools/imu_bias_tester/package.xml @@ -15,12 +15,18 @@ catkin catkin - ff_msgs - nodelet roscpp - ff_msgs - nodelet + nodelet + ff_util + ff_msgs + config_reader + imu_integration roscpp + nodelet + ff_util + ff_msgs + config_reader + imu_integration diff --git a/tools/localization_rviz_plugins/CMakeLists.txt b/tools/localization_rviz_plugins/CMakeLists.txt index de1d5dec8e..ab41a3037d 100644 --- a/tools/localization_rviz_plugins/CMakeLists.txt +++ b/tools/localization_rviz_plugins/CMakeLists.txt @@ -15,26 +15,60 @@ #License for the specific language governing permissions and limitations #under the License. +cmake_minimum_required(VERSION 3.0) project(localization_rviz_plugins) if (USE_ROS AND BUILD_LOC_RVIZ_PLUGINS) - # For Qt - set(CMAKE_AUTOMOC ON) - set(QT_LIBRARIES Qt5::Widgets) + ## Compile as C++14, supported in ROS Kinetic and newer + add_compile_options(-std=c++14) + + + ## Find catkin macros and libraries + find_package(catkin REQUIRED COMPONENTS + roscpp + localization_common + ff_util + graph_localizer + graph_bag + rviz + ) + + # For Qt + set(CMAKE_AUTOMOC ON) + find_package(Qt5 ${rviz_QT_VERSION} REQUIRED Core Widgets) + set(QT_LIBRARIES Qt5::Widgets) catkin_package( - LIBRARIES ${PROJECT_NAME} ${catkin_LIBRARIES} ${GLOG_LIBRARIES} ${GTSAM_LIBRARIES} ${QT_LIBRARIES} camera config_reader graph_bag graph_localizer imu_integration localization_common localization_measurements - INCLUDE_DIRS ${GTSAM_INCLUDE_DIR} ${GLOG_INCLUDE_DIRS} ${Qt5Core_INCLUDE_DIRS} ${Qt5Gui_INCLUDE_DIRS} - CATKIN_DEPENDS rviz - DEPENDS gtsam + LIBRARIES ${PROJECT_NAME} ${GLOG_LIBRARIES} ${GTSAM_LIBRARIES} + INCLUDE_DIRS ${GTSAM_INCLUDE_DIR} ${GLOG_INCLUDE_DIRS} + CATKIN_DEPENDS roscpp localization_common ff_util graph_localizer graph_bag rviz ) - create_library(TARGET ${PROJECT_NAME} - LIBS ${catkin_LIBRARIES} ${GLOG_LIBRARIES} ${QT_LIBRARIES} camera config_reader graph_bag graph_localizer imu_integration localization_common localization_measurements - INC ${catkin_INCLUDE_DIRS} ${GLOG_INCLUDE_DIRS} ${Qt5Core_INCLUDE_DIRS} ${Qt5Gui_INCLUDE_DIRS} - ADD_SRCS src/localization_graph_display.cc src/localization_graph_panel.cc src/pose_display.cc src/utilities.cc src/imu_augmentor_display.cc src/sparse_mapping_display.cc ${MOC_FILES} - DEPS gtsam rviz - ) + ########### + ## Build ## + ########### + # Specify additional locations of header files + include_directories( + include + ${catkin_INCLUDE_DIRS} + ${Qt5Core_INCLUDE_DIRS} + ${Qt5Gui_INCLUDE_DIRS} + ) + + # Declare C++ libraries + add_library(${PROJECT_NAME} + src/localization_graph_display.cc + src/localization_graph_panel.cc + src/pose_display.cc + src/utilities.cc + src/imu_augmentor_display.cc + src/sparse_mapping_display.cc + ) + add_dependencies(${PROJECT_NAME} ${catkin_EXPORTED_TARGETS}) + target_link_libraries(${PROJECT_NAME} ${catkin_LIBRARIES} ${QT_LIBRARIES}) +else (USE_ROS AND BUILD_LOC_RVIZ_PLUGINS) + find_package(catkin REQUIRED COMPONENTS) + catkin_package() endif (USE_ROS AND BUILD_LOC_RVIZ_PLUGINS) diff --git a/tools/localization_rviz_plugins/package.xml b/tools/localization_rviz_plugins/package.xml index 29af693ce5..0850cb201d 100644 --- a/tools/localization_rviz_plugins/package.xml +++ b/tools/localization_rviz_plugins/package.xml @@ -16,6 +16,10 @@ catkin roscpp rviz + localization_common + ff_util + graph_localizer + graph_bag diff --git a/tools/rviz_visualizer/CMakeLists.txt b/tools/rviz_visualizer/CMakeLists.txt deleted file mode 100644 index 0de925fdf4..0000000000 --- a/tools/rviz_visualizer/CMakeLists.txt +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -project(rviz_visualizer) - -catkin_package( - CATKIN_DEPENDS roscpp visualization_msgs -) - -create_tool_targets(DIR tools - LIBS ${catkin_LIBRARIES} msg_conversions ff_common config_reader - INC ${catkin_INCLUDE_DIRS} -) - -install_launch_files() \ No newline at end of file diff --git a/tools/rviz_visualizer/camera_info/overhead.yaml b/tools/rviz_visualizer/camera_info/overhead.yaml deleted file mode 100644 index 959cde16b2..0000000000 --- a/tools/rviz_visualizer/camera_info/overhead.yaml +++ /dev/null @@ -1,20 +0,0 @@ -image_width: 1280 -image_height: 1024 -camera_name: overhead -camera_matrix: - rows: 3 - cols: 3 - data: [ 7.6946497855829443e+02, 0, 6.3522428948586071e+02, 0, 7.6875198858016370e+02, 4.9172890181774102e+02, 0, 0, 1] -distortion_model: plumb_bob -distortion_coefficients: - rows: 1 - cols: 4 - data: [-3.2402610654612035e-01, 9.4629740227230974e-02, 3.8611413028713553e-04, 2.0158706205913819e-04] -rectification_matrix: - rows: 3 - cols: 3 - data: [1, 0, 0, 0, 1, 0, 0, 0, 1] -projection_matrix: - rows: 3 - cols: 4 - data: [7.6946497855829443e+02, 0, 6.3522428948586071e+02,0, 0, 7.6875198858016370e+02, 4.9172890181774102e+02,0, 0, 0, 1, 0] \ No newline at end of file diff --git a/tools/rviz_visualizer/launch/astrobee_visualizer.launch b/tools/rviz_visualizer/launch/astrobee_visualizer.launch deleted file mode 100644 index ce25f7e9ca..0000000000 --- a/tools/rviz_visualizer/launch/astrobee_visualizer.launch +++ /dev/null @@ -1,22 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - diff --git a/tools/rviz_visualizer/launch/cam.launch b/tools/rviz_visualizer/launch/cam.launch deleted file mode 100644 index db6437a783..0000000000 --- a/tools/rviz_visualizer/launch/cam.launch +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tools/rviz_visualizer/launch/overhead_info.launch b/tools/rviz_visualizer/launch/overhead_info.launch deleted file mode 100644 index c25dcdddd6..0000000000 --- a/tools/rviz_visualizer/launch/overhead_info.launch +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/tools/rviz_visualizer/launch/post_process_overhead.launch b/tools/rviz_visualizer/launch/post_process_overhead.launch deleted file mode 100644 index 2467215636..0000000000 --- a/tools/rviz_visualizer/launch/post_process_overhead.launch +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/tools/rviz_visualizer/launch/record_qp.launch b/tools/rviz_visualizer/launch/record_qp.launch deleted file mode 100644 index eb9e568ae0..0000000000 --- a/tools/rviz_visualizer/launch/record_qp.launch +++ /dev/null @@ -1,32 +0,0 @@ - - - - - - - - - - - - - - - - - - - - diff --git a/tools/rviz_visualizer/model/astrobee_mesh.stl b/tools/rviz_visualizer/model/astrobee_mesh.stl deleted file mode 100644 index 4b6cd9c318..0000000000 Binary files a/tools/rviz_visualizer/model/astrobee_mesh.stl and /dev/null differ diff --git a/tools/rviz_visualizer/model/astrobee_model.urdf b/tools/rviz_visualizer/model/astrobee_model.urdf deleted file mode 100644 index 5af14da7e4..0000000000 --- a/tools/rviz_visualizer/model/astrobee_model.urdf +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - - - - - - - - - - - - - - diff --git a/tools/rviz_visualizer/readme.md b/tools/rviz_visualizer/readme.md deleted file mode 100644 index 0ea24842a8..0000000000 --- a/tools/rviz_visualizer/readme.md +++ /dev/null @@ -1,39 +0,0 @@ -\page rvizvisualizer RViz Visualizer - -This helper node publishes various topics that aid -in visualization using rviz. It currently publishes -a model of the granite table and the rviz tf2 transform. - -## Getting RVIZ Overhead Cam Display -![RVIZ](../images/mobility/overhead_cropped.png) - -ROS can project the RVIZ display into the camera frame of the overhead camera and overlay it on the image in real time. - -### Prereqs - -You will need the Kumar Robotics version of the flea3 driver (https://github.com/KumarRobotics/flea3, https://github.com/KumarRobotics/camera_base). This needs to be built with the catkin build system. - - mkdir -p catkin_ws/src && cd catkin_ws/src - git clone https://github.com/KumarRobotics/flea3 && git clone https://github.com/KumarRobotics/camera_base - cd .. && source /opt/ros/kinetic/setup.bash && catkin init - catkin config -DCMAKE_BUILD_TYPE=Release - catkin build --jobs 12 - echo source `pwd`/devel/setup.bash --extend >> ~/.bashrc - -You will also need the jsk-visiualization plugins. (Don't try to build these from source, it's doable, but a mess) - - sudo apt-get install ros-kinetic-jsk-visualization - -### Setup - -The camera_info directory inside rviz_visualizer needs to be copied to ~/.ros -The rules.d/40-pgr.rules file needs to be copied to /etc/udev/rules.d/ and the user needs to be added to the plugdev group. - -### Launching - roslaunch rviz_visualizer cam.launch - -![RVIZ](../images/mobility/display2_marked_up.png) -After launching, add the CameraOverlay to the rviz window - -![RVIZ](../images/mobility/overhead_marked_up.png) -Select the camera topic and set alpha to 1 diff --git a/tools/rviz_visualizer/rules.d/40-pgr.rules b/tools/rviz_visualizer/rules.d/40-pgr.rules deleted file mode 100644 index a4da77b24c..0000000000 --- a/tools/rviz_visualizer/rules.d/40-pgr.rules +++ /dev/null @@ -1,30 +0,0 @@ -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="2000", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="2001", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="2002", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="2003", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="2004", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="2005", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3000", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3001", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3004", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3005", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3006", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3007", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3008", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="300A", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="300B", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3100", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3101", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3102", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3103", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3104", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3105", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3106", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3107", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3108", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3109", MODE="0664", GROUP="plugdev" -ATTRS{idVendor}=="1e10", ATTRS{idProduct}=="3300", MODE="0664", GROUP="plugdev" -KERNEL=="raw1394", MODE="0664", GROUP="plugdev" -KERNEL=="video1394*", MODE="0664", GROUP="plugdev" -SUBSYSTEM=="firewire", GROUP="plugdev" -SUBSYSTEM=="usb", GROUP="plugdev" diff --git a/tools/rviz_visualizer/tools/overhead_info.cc b/tools/rviz_visualizer/tools/overhead_info.cc deleted file mode 100644 index d26263eeee..0000000000 --- a/tools/rviz_visualizer/tools/overhead_info.cc +++ /dev/null @@ -1,83 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -#include -#include -#include -#include - -static ros::Publisher camera_info_pub, camera_raw_pub; -static ros::Subscriber camera_sub; - -static sensor_msgs::CameraInfo caminfo; - -// hardcode this, was never ported to lua -void SetInfo() { - caminfo.K = {7.6946497855829443e+02, - 0, - 6.3522428948586071e+02, - 0, - 7.6875198858016370e+02, - 4.9172890181774102e+02, - 0, - 0, - 1}; - caminfo.P = {7.6946497855829443e+02, - 0, - 6.3522428948586071e+02, - 0, - 0, - 7.6875198858016370e+02, - 4.9172890181774102e+02, - 0, - 0, - 0, - 1, - 0}; - caminfo.D = {-3.2402610654612035e-01, 9.4629740227230974e-02, - 3.8611413028713553e-04, 2.0158706205913819e-04}; - caminfo.R = {1, 0, 0, 0, 1, 0, 0, 0, 1}; - caminfo.height = 960; - caminfo.width = 1280; - caminfo.distortion_model = "plumb_bob"; -} - -void imageCB(const sensor_msgs::Image::ConstPtr &img) { - caminfo.header = img->header; - caminfo.header.frame_id = "overhead"; - sensor_msgs::Image img2 = *img; - img2.header.frame_id = "overhead"; - camera_info_pub.publish(caminfo); - camera_raw_pub.publish(img2); -} - -int main(int argc, char **argv) { - ff_common::InitFreeFlyerApplication(&argc, &argv); - ros::init(argc, argv, "overhead_node"); - - ros::NodeHandle nh("~"); - // Load Camera Info - SetInfo(); - - camera_raw_pub = nh.advertise("/overhead/image_raw", 1); - camera_info_pub = - nh.advertise("/overhead/camera_info", 1); - camera_sub = nh.subscribe("/overhead/image", 1, &imageCB); - - ros::spin(); -} diff --git a/tools/rviz_visualizer/tools/visualization_node.cc b/tools/rviz_visualizer/tools/visualization_node.cc deleted file mode 100644 index 40f59c47a5..0000000000 --- a/tools/rviz_visualizer/tools/visualization_node.cc +++ /dev/null @@ -1,121 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -#include -#include -#include - -#include -#include -#include -#include -#include - -#include - -static ros::Publisher pub_vis; -visualization_msgs::MarkerArray marks; - -void PublishMarkers() { - for (unsigned int i = 0; i < marks.markers.size(); i++) - marks.markers[i].header.stamp = ros::Time(); - - pub_vis.publish(marks); -} - -void ReadParams(config_reader::ConfigReader *config) { - // Read config files into lua - if (!config->ReadFiles()) { - ROS_ERROR("Error loading rviz visualizer parameters."); - return; - } - - marks.markers.clear(); - auto boxes_table = std::make_shared(config, "boxes"); - int table_size = boxes_table->GetSize(); - // Go through boxes until there are no more, lua table index starts at 1 - for (int i = 1; i < (table_size + 1); ++i) { - visualization_msgs::Marker marker; - auto box = std::make_shared(boxes_table.get(), i); - auto color = std::make_shared(box.get(), "color"); - auto scale = std::make_shared(box.get(), "scale"); - auto pose = std::make_shared(box.get(), "pose"); - - marker.header.frame_id = "world"; - marker.ns = "table_visualization"; - marker.type = visualization_msgs::Marker::CUBE; - marker.action = visualization_msgs::Marker::ADD; - marker.id = i-1; - marker.frame_locked = true; - - if (!color->GetReal("r", &marker.color.r)) - marker.color.r = 1.0; - if (!color->GetReal("g", &marker.color.g)) - marker.color.g = 0.0; - if (!color->GetReal("b", &marker.color.b)) - marker.color.b = 0.0; - if (!color->GetReal("a", &marker.color.a)) - marker.color.a = 1.0; - - if (!msg_conversions::config_read_vector(scale.get(), &marker.scale)) { - marker.scale.x = 1.0; - marker.scale.y = 1.0; - marker.scale.z = 1.0; - } - - if (!msg_conversions::config_read_vector(pose.get(), &marker.pose.position)) { - marker.pose.position.x = 0.0; - marker.pose.position.y = 0.0; - marker.pose.position.z = 0.0; - } - - marker.pose.orientation.x = 0.0; - marker.pose.orientation.y = 0.0; - marker.pose.orientation.z = 0.0; - marker.pose.orientation.w = 1.0; - - marks.markers.push_back(marker); - } -} - -int main(int argc, char **argv) { - ff_common::InitFreeFlyerApplication(&argc, &argv); - ros::init(argc, argv, "visualization_node"); - - ros::NodeHandle nh("~"); - - // Initialize lua config reader - config_reader::ConfigReader config; - config.AddFile("tools/visualization_node.config"); - ReadParams(&config); - - ros::Timer config_timer = nh.createTimer(ros::Duration(1), [&config](ros::TimerEvent e) { - config.CheckFilesUpdated(std::bind(&ReadParams, &config));}, false, true); - - // Use a ltached topic to prevent unecessary chatter on the messaging backbone - pub_vis = nh.advertise("table", 1, true); - - // Publish the table markers - PublishMarkers(); - - // Keep spinning unti termination - ros::spin(); - - return 0; -} - diff --git a/tools/simulator/CMakeLists.txt b/tools/simulator/CMakeLists.txt deleted file mode 100644 index 7ac0dfaec1..0000000000 --- a/tools/simulator/CMakeLists.txt +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -add_subdirectory(arm_sim) -add_subdirectory(dock_sim) -add_subdirectory(eps_sim) diff --git a/tools/simulator/arm_sim/CMakeLists.txt b/tools/simulator/arm_sim/CMakeLists.txt deleted file mode 100644 index dfeaf58549..0000000000 --- a/tools/simulator/arm_sim/CMakeLists.txt +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -project(arm_sim) - -catkin_package( - LIBRARIES arm_sim - DEPENDS roscpp ff_msgs nodelet -) - -create_library(TARGET arm_sim - LIBS ${catkin_LIBRARIES} ${GLOG_LIBRARIES} ff_nodelet - INC ${catkin_INCLUDES} ${GLOG_INCLUDE_DIRS} - DEPS ff_msgs -) - -install_launch_files() diff --git a/tools/simulator/arm_sim/include/arm_sim/arm_sim.h b/tools/simulator/arm_sim/include/arm_sim/arm_sim.h deleted file mode 100644 index 6df9e2769b..0000000000 --- a/tools/simulator/arm_sim/include/arm_sim/arm_sim.h +++ /dev/null @@ -1,82 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - - -#ifndef ARM_SIM_ARM_SIM_H_ -#define ARM_SIM_ARM_SIM_H_ - -#include - -#include -#include - -#include -#include - -#include -#include -#include - -#include - -#include -#include -#include -#include -#include -#include -#include - -#include - -#include -#include -#include -#include - -namespace arm_sim { - -class ArmSim : public nodelet::Nodelet { - public: - ArmSim(); - ~ArmSim(); - - void GoalCallback(ff_msgs::ArmGoalConstPtr const& goal); - - bool Pan(float angle); - - bool Tilt(float angle); - - virtual void onInit(); - - private: - std::shared_ptr> sas_arm_; - - ff_msgs::ArmStateStamped arm_state_; - ff_msgs::JointSampleStamped joint_sample_; - - ros::NodeHandle nh_; - - ros::Publisher arm_state_pub_, joint_sample_pub_; - - int pub_queue_size_; -}; - -} // namespace arm_sim - -#endif // ARM_SIM_ARM_SIM_H_ diff --git a/tools/simulator/arm_sim/nodelet_plugins.xml b/tools/simulator/arm_sim/nodelet_plugins.xml deleted file mode 100644 index d46d78985a..0000000000 --- a/tools/simulator/arm_sim/nodelet_plugins.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - Nodelet to simulate the arm." - - - diff --git a/tools/simulator/arm_sim/readme.md b/tools/simulator/arm_sim/readme.md deleted file mode 100644 index cad320c29c..0000000000 --- a/tools/simulator/arm_sim/readme.md +++ /dev/null @@ -1,3 +0,0 @@ -\page arm_sim Arm Simulator - -This might be legacy code.. \ No newline at end of file diff --git a/tools/simulator/arm_sim/src/arm_sim.cc b/tools/simulator/arm_sim/src/arm_sim.cc deleted file mode 100644 index d752102457..0000000000 --- a/tools/simulator/arm_sim/src/arm_sim.cc +++ /dev/null @@ -1,209 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - - -#include - -namespace arm_sim { -ArmSim::ArmSim() : pub_queue_size_(10) { -} - -ArmSim::~ArmSim() { -} - -bool ArmSim::Pan(float angle) { - float diff = joint_sample_.samples[0].angle_pos - angle; - float step = 1 * M_PI/180; - int i, num_iter = std::abs(diff/step); - ff_msgs::ArmFeedback feedback; - - // Check the direction we need to step in - if (diff < 0) { - step *= -1; - } - - arm_state_.joint_state.state = ff_msgs::ArmJointState::MOVING; - arm_state_pub_.publish(arm_state_); - - for (i = 0; i < num_iter; i++) { - if (sas_arm_->isPreemptRequested()) { - return false; - } - joint_sample_.samples[0].angle_pos -= step; - joint_sample_pub_.publish(joint_sample_); - ros::Duration(0.1).sleep(); - // Don't have to publish fancy feedback since the executive doesn't use it - sas_arm_->publishFeedback(feedback); - } - - // Make pan angle match angle received exactly so that the angle isn't - // toggled when sending the same tilt angle again and again and again - joint_sample_.samples[0].angle_pos = angle; - joint_sample_pub_.publish(joint_sample_); - - if (std::abs(joint_sample_.samples[1].angle_pos - M_PI) < 0.05) { - arm_state_.joint_state.state = ff_msgs::ArmJointState::STOWED; - } else { - arm_state_.joint_state.state = ff_msgs::ArmJointState::STOPPED; - } - arm_state_pub_.publish(arm_state_); - return true; -} - -bool ArmSim::Tilt(float angle) { - float diff = joint_sample_.samples[1].angle_pos - angle; - float step = 1 * M_PI/180; - int i, num_iter = std::abs(diff/step); - ff_msgs::ArmFeedback feedback; - - // Check the direction we need to step in - if (diff < 0) { - step *= -1; - } - - if (std::abs(joint_sample_.samples[1].angle_pos - M_PI) < 0.05) { - arm_state_.joint_state.state = ff_msgs::ArmJointState::DEPLOYING; - } else if (std::abs(angle - M_PI) < 0.05) { - arm_state_.joint_state.state = ff_msgs::ArmJointState::STOWING; - } else { - arm_state_.joint_state.state = ff_msgs::ArmJointState::MOVING; - } - arm_state_pub_.publish(arm_state_); - - for (i = 0; i < num_iter; i++) { - if (sas_arm_->isPreemptRequested()) { - return false; - } - joint_sample_.samples[1].angle_pos -= step; - joint_sample_pub_.publish(joint_sample_); - ros::Duration(0.05).sleep(); - // Don't have to publish fancy feedback since the executive doesn't use it - sas_arm_->publishFeedback(feedback); - } - - // Make tilt angle match angle received exactly so that the angle isn't - // toggled when sending the same tilt angle again and again and again :) - joint_sample_.samples[1].angle_pos = angle; - joint_sample_pub_.publish(joint_sample_); - - if (std::abs(angle - M_PI) < 0.05) { - arm_state_.joint_state.state = ff_msgs::ArmJointState::STOWED; - } else { - arm_state_.joint_state.state = ff_msgs::ArmJointState::STOPPED; - } - arm_state_pub_.publish(arm_state_); - return true; -} - -void ArmSim::GoalCallback(ff_msgs::ArmGoalConstPtr const& goal) { - ff_msgs::ArmFeedback feedback; - ff_msgs::ArmResult result; - - // Don't have to publish fancy feedback since the executive doesn't use it - sas_arm_->publishFeedback(feedback); - - if (goal->command == ff_msgs::ArmGoal::ARM_MOVE) { - // Do tilt first because right now we use tilt to unstow - if (Tilt(goal->tilt)) { - sas_arm_->publishFeedback(feedback); - - Pan(goal->pan); - - sas_arm_->publishFeedback(feedback); - } - } else if (goal->command == ff_msgs::ArmGoal::ARM_PAN) { - Pan(goal->pan); - - sas_arm_->publishFeedback(feedback); - } else if (goal->command == ff_msgs::ArmGoal::ARM_TILT) { - Tilt(goal->tilt); - - sas_arm_->publishFeedback(feedback); - } else if (goal->command == ff_msgs::ArmGoal::GRIPPER_OPEN) { - sas_arm_->publishFeedback(feedback); - - joint_sample_.samples[2].angle_pos = 45 * M_PI/180; - joint_sample_pub_.publish(joint_sample_); - - sas_arm_->publishFeedback(feedback); - - arm_state_.gripper_state.state = ff_msgs::ArmGripperState::OPEN; - arm_state_pub_.publish(arm_state_); - } else if (goal->command == ff_msgs::ArmGoal::GRIPPER_CLOSE) { - sas_arm_->publishFeedback(feedback); - - joint_sample_.samples[2].angle_pos = 20 * M_PI/180; - joint_sample_pub_.publish(joint_sample_); - - sas_arm_->publishFeedback(feedback); - - arm_state_.gripper_state.state = ff_msgs::ArmGripperState::CLOSED; - arm_state_pub_.publish(arm_state_); - } else { - ROS_ERROR("Arm simulator node doesn't recognize mode %i!", goal->command); - result.response = ff_msgs::ArmResult::INVALID_COMMAND; - sas_arm_->setAborted(result); - } - result.response = ff_msgs::ArmResult::SUCCESS; - sas_arm_->setSucceeded(result); -} - -void ArmSim::onInit() { - nh_ = getNodeHandle(); - - arm_state_pub_ = nh_.advertise( - TOPIC_BEHAVIORS_ARM_ARM_STATE, pub_queue_size_, true); - joint_sample_pub_ = nh_.advertise( - TOPIC_BEHAVIORS_ARM_JOINT_SAMPLE, pub_queue_size_, true); - - arm_state_.joint_state.state = ff_msgs::ArmJointState::STOWED; - arm_state_.gripper_state.state = ff_msgs::ArmGripperState::CLOSED; - - joint_sample_.samples.resize(3); - for (int i = 0; i < 3; i++) { - joint_sample_.samples[i].angle_pos = 0; - joint_sample_.samples[i].angle_vel = 0; - joint_sample_.samples[i].angle_acc = 0; - joint_sample_.samples[i].current = 0; - joint_sample_.samples[i].torque = 0; - joint_sample_.samples[i].status = ff_msgs::JointSample::JOINT_ENABLED; - } - - joint_sample_.samples[0].name = "pan"; - joint_sample_.samples[0].temperature = 26; - - joint_sample_.samples[1].name = "tilt"; - joint_sample_.samples[1].temperature = 23; - joint_sample_.samples[1].angle_pos = M_PI; - - joint_sample_.samples[2].name = "gripper"; - joint_sample_.samples[2].temperature = 32; - joint_sample_.samples[2].angle_pos = 20 * M_PI/180; - - arm_state_pub_.publish(arm_state_); - joint_sample_pub_.publish(joint_sample_); - - sas_arm_ = std::make_shared> - (nh_, ACTION_BEHAVIORS_ARM, - boost::bind(&ArmSim::GoalCallback, this, _1), - false); - sas_arm_->start(); -} -} // namespace arm_sim - -PLUGINLIB_EXPORT_CLASS(arm_sim::ArmSim, nodelet::Nodelet) diff --git a/tools/simulator/dock_sim/CMakeLists.txt b/tools/simulator/dock_sim/CMakeLists.txt deleted file mode 100644 index dac1777c5e..0000000000 --- a/tools/simulator/dock_sim/CMakeLists.txt +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -project(dock_sim) - -catkin_package( - LIBRARIES dock_sim - DEPENDS roscpp ff_msgs nodelet -) - -create_library(TARGET dock_sim - LIBS ${catkin_LIBRARIES} ${GLOG_LIBRARIES} ff_nodelet - INC ${catkin_INCLUDES} ${GLOG_INCLUDE_DIRS} - DEPS ff_msgs -) - -install_launch_files() diff --git a/tools/simulator/dock_sim/nodelet_plugins.xml b/tools/simulator/dock_sim/nodelet_plugins.xml deleted file mode 100644 index 5e3aa8bb1e..0000000000 --- a/tools/simulator/dock_sim/nodelet_plugins.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - Nodelet to simulate the dock." - - - diff --git a/tools/simulator/dock_sim/readme.md b/tools/simulator/dock_sim/readme.md deleted file mode 100644 index e5458cc657..0000000000 --- a/tools/simulator/dock_sim/readme.md +++ /dev/null @@ -1,3 +0,0 @@ -\page dock_sim Dock Simulator - -This might be legacy code.. \ No newline at end of file diff --git a/tools/simulator/dock_sim/src/dock_sim.cc b/tools/simulator/dock_sim/src/dock_sim.cc deleted file mode 100644 index 6ea5b53e4f..0000000000 --- a/tools/simulator/dock_sim/src/dock_sim.cc +++ /dev/null @@ -1,95 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - - -#include - -namespace dock_sim { - -DockSim::DockSim() {} - -DockSim::~DockSim() {} - -// TODO(Someone) Create a simulated dock and add code here to actually dock -void DockSim::DockGoalCallback(ff_msgs::DockGoalConstPtr const& goal) { - ff_msgs::DockFeedback feedback; - ff_msgs::DockResult result; - switch (goal->command) { - // DOCK // - case ff_msgs::DockGoal::DOCK: - feedback.state.state = - ff_msgs::DockState::DOCKING_SWITCHING_TO_AR_LOC; - sas_dock_->publishFeedback(feedback); - ros::Duration(1).sleep(); - feedback.state.state = - ff_msgs::DockState::DOCKING_MOVING_TO_APPROACH_POSE; - sas_dock_->publishFeedback(feedback); - ros::Duration(5).sleep(); - feedback.state.state = - ff_msgs::DockState::DOCKING_MOVING_TO_COMPLETE_POSE; - sas_dock_->publishFeedback(feedback); - ros::Duration(5).sleep(); - feedback.state.state = - ff_msgs::DockState::DOCKING_CHECKING_ATTACHED; - sas_dock_->publishFeedback(feedback); - ros::Duration(1).sleep(); - feedback.state.state = - ff_msgs::DockState::DOCKING_WAITING_FOR_SPIN_DOWN; - sas_dock_->publishFeedback(feedback); - ros::Duration(1).sleep(); - feedback.state.state = - ff_msgs::DockState::DOCKING_SWITCHING_TO_NO_LOC; - sas_dock_->publishFeedback(feedback); - ros::Duration(1).sleep(); - result.response = - ff_msgs::DockResult::DOCKED; - sas_dock_->setSucceeded(result); - break; - // UNDOCK // - case ff_msgs::DockGoal::UNDOCK: - feedback.state.state = - ff_msgs::DockState::UNDOCKING_SWITCHING_TO_ML_LOC; - sas_dock_->publishFeedback(feedback); - ros::Duration(1).sleep(); - feedback.state.state = - ff_msgs::DockState::UNDOCKING_WAITING_FOR_SPIN_UP; - sas_dock_->publishFeedback(feedback); - ros::Duration(1).sleep(); - feedback.state.state = - ff_msgs::DockState::UNDOCKING_MOVING_TO_APPROACH_POSE; - sas_dock_->publishFeedback(feedback); - ros::Duration(5).sleep(); - result.response = - ff_msgs::DockResult::UNDOCKED; - sas_dock_->setSucceeded(result); - break; - } -} - -void DockSim::onInit() { - ros::NodeHandle nh = getNodeHandle(); - sas_dock_ = - std::make_shared> - (nh, ACTION_BEHAVIORS_DOCK, - boost::bind(&DockSim::DockGoalCallback, this, _1), - false); - sas_dock_->start(); -} -} // namespace dock_sim - -PLUGINLIB_EXPORT_CLASS(dock_sim::DockSim, nodelet::Nodelet) diff --git a/tools/simulator/eps_sim/CMakeLists.txt b/tools/simulator/eps_sim/CMakeLists.txt deleted file mode 100644 index 5302a1aeb1..0000000000 --- a/tools/simulator/eps_sim/CMakeLists.txt +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -project(eps_sim) - -find_package(catkin2 REQUIRED COMPONENTS - sensor_msgs - message_generation -) - -add_service_files( - FILES - AddRemoveBattery.srv -) - -generate_messages( - DEPENDENCIES sensor_msgs -) - -#create_msg_targets(SDIR srv -# DEPS sensor_msgs -#) - -catkin_package( - LIBRARIES eps_sim - DEPENDS roscpp ff_msgs ff_nodelet -) - -create_library(TARGET eps_sim - LIBS ${catkin_LIBRARIES} ${GLOG_LIBRARIES} ff_nodelet - INC ${catkin_INCLUDES} ${GLOG_INCLUDE_DIRS} - DEPS ff_msgs eps_sim_generate_messages_cpp -) - -install_launch_files() diff --git a/tools/simulator/eps_sim/include/eps_sim/eps_sim.h b/tools/simulator/eps_sim/include/eps_sim/eps_sim.h deleted file mode 100644 index 9f0c1ccfa4..0000000000 --- a/tools/simulator/eps_sim/include/eps_sim/eps_sim.h +++ /dev/null @@ -1,87 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - - -#ifndef EPS_SIM_EPS_SIM_H_ -#define EPS_SIM_EPS_SIM_H_ - -#include - -// ROS includes -#include - -// FSW standard naming -#include - -// Battery services -#include - -// Standard messages -#include -#include -#include - -#include - -// C++ STL includes -#include - -#define MESSAGE_QUEUE_SIZE 10 -#define MAX_TEMPERATURE 42 - -namespace eps_sim { - -// TODO(Katie) Add battery temperature when we figure out what we are doing with -// battery temperatures -class EpsSim : public ff_util::FreeFlyerNodelet { - public: - EpsSim(); - ~EpsSim(); - - bool BatteryService(eps_sim::AddRemoveBattery::Request &req, - eps_sim::AddRemoveBattery::Response &res); - - protected: - virtual void Initialize(ros::NodeHandle *nh); - - private: - void TelemetryCallback(const ros::TimerEvent &event); - - int rate_; - - float tl_charge_dec_, tr_charge_dec_, bl_charge_dec_, br_charge_dec_; - float tl_orig_charge_, tr_orig_charge_, bl_orig_charge_, br_orig_charge_; - float tl_temp_inc_, tr_temp_inc_, bl_temp_inc_, br_temp_inc_; - float tl_orig_temp_, tr_orig_temp_, bl_orig_temp_, br_orig_temp_; - - ros::Publisher battery_state_pub_tl_, battery_state_pub_tr_; - ros::Publisher battery_state_pub_bl_, battery_state_pub_br_; - ros::Publisher battery_temp_pub_tl_, battery_temp_pub_tr_; - ros::Publisher battery_temp_pub_bl_, battery_temp_pub_br_; - - ros::ServiceServer battery_srv_; - - ros::Timer telem_timer; - - sensor_msgs::BatteryState state_tl_, state_tr_, state_bl_, state_br_; - sensor_msgs::Temperature temp_tl_, temp_tr_, temp_bl_, temp_br_; -}; - -} // namespace eps_sim - -#endif // EPS_SIM_EPS_SIM_H_ diff --git a/tools/simulator/eps_sim/nodelet_plugins.xml b/tools/simulator/eps_sim/nodelet_plugins.xml deleted file mode 100644 index be845a4561..0000000000 --- a/tools/simulator/eps_sim/nodelet_plugins.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - Nodelet to simulate the eps." - - - diff --git a/tools/simulator/eps_sim/readme.md b/tools/simulator/eps_sim/readme.md deleted file mode 100644 index 3c19b5a99e..0000000000 --- a/tools/simulator/eps_sim/readme.md +++ /dev/null @@ -1,3 +0,0 @@ -\page eps_sim EPS Simulator - -This might be legacy code.. \ No newline at end of file diff --git a/tools/simulator/eps_sim/src/eps_sim.cc b/tools/simulator/eps_sim/src/eps_sim.cc deleted file mode 100644 index aee0ee9d0b..0000000000 --- a/tools/simulator/eps_sim/src/eps_sim.cc +++ /dev/null @@ -1,245 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - - -#include - -namespace eps_sim { -EpsSim::EpsSim() : ff_util::FreeFlyerNodelet(), rate_(10) { -} - -EpsSim::~EpsSim() { -} - -bool EpsSim::BatteryService(eps_sim::AddRemoveBattery::Request &req, - eps_sim::AddRemoveBattery::Response &res) { - if (req.battery.location == ff_hw_msgs::EpsBatteryLocation::TOP_LEFT) { - state_tl_ = req.battery; - if (req.battery.present) { - tl_orig_charge_ = state_tl_.charge; - tl_charge_dec_ = req.battery_charge_decrement_value; - temp_tl_.temperature = req.battery_temperature; - tl_orig_temp_ = req.battery_temperature; - tl_temp_inc_ = req.battery_temperature_increment_value; - state_tl_.percentage = state_tl_.charge / state_tl_.capacity * 100.0; - } - battery_state_pub_tl_.publish(state_tl_); - temp_tl_.header.stamp = ros::Time::now(); - battery_temp_pub_tl_.publish(temp_tl_); - } else if (req.battery.location == - ff_hw_msgs::EpsBatteryLocation::TOP_RIGHT) { - state_tr_ = req.battery; - if (req.battery.present) { - tr_orig_charge_ = state_tr_.charge; - tr_charge_dec_ = req.battery_charge_decrement_value; - temp_tr_.temperature = req.battery_temperature; - tr_orig_temp_ = req.battery_temperature; - tr_temp_inc_ = req.battery_temperature_increment_value; - state_tr_.percentage = state_tr_.charge / state_tr_.capacity * 100.0; - } - battery_state_pub_tr_.publish(state_tr_); - temp_tr_.header.stamp = ros::Time::now(); - battery_temp_pub_tr_.publish(temp_tr_); - } else if (req.battery.location == - ff_hw_msgs::EpsBatteryLocation::BOTTOM_LEFT) { - state_bl_ = req.battery; - if (req.battery.present) { - bl_orig_charge_ = state_bl_.charge; - bl_charge_dec_ = req.battery_charge_decrement_value; - temp_bl_.temperature = req.battery_temperature; - bl_orig_temp_ = req.battery_temperature; - bl_temp_inc_ = req.battery_temperature_increment_value; - state_bl_.percentage = state_bl_.charge / state_bl_.capacity * 100.0; - } - battery_state_pub_bl_.publish(state_bl_); - temp_bl_.header.stamp = ros::Time::now(); - battery_temp_pub_bl_.publish(temp_bl_); - } else if (req.battery.location == - ff_hw_msgs::EpsBatteryLocation::BOTTOM_RIGHT) { - state_br_ = req.battery; - if (req.battery.present) { - br_orig_charge_ = state_br_.charge; - br_charge_dec_ = req.battery_charge_decrement_value; - temp_br_.temperature = req.battery_temperature; - br_orig_temp_ = req.battery_temperature; - br_temp_inc_ = req.battery_temperature_increment_value; - state_br_.percentage = state_br_.charge / state_br_.capacity * 100.0; - } - battery_state_pub_br_.publish(state_br_); - temp_br_.header.stamp = ros::Time::now(); - battery_temp_pub_br_.publish(temp_br_); - } else { - ROS_ERROR("Eps sim: Battery location %s not recognized. Battery not added!", - req.battery.location.c_str()); - return false; - } - - return true; -} - -void EpsSim::Initialize(ros::NodeHandle *nh) { - battery_state_pub_tl_ = nh->advertise( - TOPIC_HARDWARE_EPS_BATTERY_STATE_TL, MESSAGE_QUEUE_SIZE); - battery_state_pub_tr_ = nh->advertise( - TOPIC_HARDWARE_EPS_BATTERY_STATE_TR, MESSAGE_QUEUE_SIZE); - battery_state_pub_bl_ = nh->advertise( - TOPIC_HARDWARE_EPS_BATTERY_STATE_BL, MESSAGE_QUEUE_SIZE); - battery_state_pub_br_ = nh->advertise( - TOPIC_HARDWARE_EPS_BATTERY_STATE_BR, MESSAGE_QUEUE_SIZE); - - battery_temp_pub_tl_ = nh->advertise( - TOPIC_HARDWARE_EPS_BATTERY_TEMP_TL, MESSAGE_QUEUE_SIZE); - battery_temp_pub_tr_ = nh->advertise( - TOPIC_HARDWARE_EPS_BATTERY_TEMP_TR, MESSAGE_QUEUE_SIZE); - battery_temp_pub_bl_ = nh->advertise( - TOPIC_HARDWARE_EPS_BATTERY_TEMP_BL, MESSAGE_QUEUE_SIZE); - battery_temp_pub_br_ = nh->advertise( - TOPIC_HARDWARE_EPS_BATTERY_TEMP_BR, MESSAGE_QUEUE_SIZE); - - telem_timer = nh->createTimer(ros::Duration(rate_), - &EpsSim::TelemetryCallback, this, false, true); - - battery_srv_ = nh->advertiseService("add_remove_battery", - &EpsSim::BatteryService, - this); - - state_tl_.header.stamp = ros::Time::now(); - state_tl_.present = false; - state_tl_.location = ff_hw_msgs::EpsBatteryLocation::TOP_LEFT; - tl_orig_charge_ = 0; - tl_charge_dec_ = 0; - battery_state_pub_tl_.publish(state_tl_); - temp_tl_.header.stamp = ros::Time::now(); - temp_tl_.temperature = -273; - tl_orig_temp_ = -273; - tl_temp_inc_ = 0; - battery_temp_pub_tl_.publish(temp_tl_); - - state_tr_.header.stamp = ros::Time::now(); - state_tr_.present = false; - state_tr_.location = ff_hw_msgs::EpsBatteryLocation::TOP_RIGHT; - tr_orig_charge_ = 0; - tr_charge_dec_ = 0; - battery_state_pub_tr_.publish(state_tr_); - temp_tr_.header.stamp = ros::Time::now(); - temp_tr_.temperature = -273; - tr_orig_temp_ = -273; - tr_temp_inc_ = 0; - battery_temp_pub_tr_.publish(temp_tr_); - - state_bl_.header.stamp = ros::Time::now(); - state_bl_.present = false; - state_bl_.location = ff_hw_msgs::EpsBatteryLocation::BOTTOM_LEFT; - bl_orig_charge_ = 0; - bl_charge_dec_ = 0; - battery_state_pub_bl_.publish(state_bl_); - temp_bl_.header.stamp = ros::Time::now(); - temp_bl_.temperature = -273; - bl_orig_temp_ = -273; - bl_temp_inc_ = 0; - battery_temp_pub_bl_.publish(temp_bl_); - - state_br_.header.stamp = ros::Time::now(); - state_br_.present = false; - state_br_.location = ff_hw_msgs::EpsBatteryLocation::BOTTOM_RIGHT; - br_orig_charge_ = 0; - br_charge_dec_ = 0; - battery_state_pub_br_.publish(state_br_); - temp_br_.header.stamp = ros::Time::now(); - temp_br_.temperature = -273; - br_orig_temp_ = -273; - br_temp_inc_ = 0; - battery_temp_pub_br_.publish(temp_br_); -} - -void EpsSim::TelemetryCallback(const ros::TimerEvent &event) { - if (state_tl_.present) { - state_tl_.charge -= tl_charge_dec_; - if (state_tl_.charge < 0) { - state_tl_.charge = tl_orig_charge_; - } - state_tl_.percentage = state_tl_.charge / state_tl_.capacity * 100.0; - - temp_tl_.temperature += tl_temp_inc_; - if (temp_tl_.temperature >= MAX_TEMPERATURE) { - temp_tl_.temperature = tl_orig_temp_; - } - } - - // Always publish every battery state regardless if it is present or not since - // the actual eps driver does this - state_tl_.header.stamp = ros::Time::now(); - battery_state_pub_tl_.publish(state_tl_); - temp_tl_.header.stamp = ros::Time::now(); - battery_temp_pub_tl_.publish(temp_tl_); - - if (state_tr_.present) { - state_tr_.charge -= tr_charge_dec_; - if (state_tr_.charge < 0) { - state_tr_.charge = tr_orig_charge_; - } - state_tr_.percentage = state_tr_.charge / state_tr_.capacity * 100.0; - - temp_tr_.temperature += tr_temp_inc_; - if (temp_tr_.temperature >= MAX_TEMPERATURE) { - temp_tr_.temperature = tr_orig_temp_; - } - } - state_tr_.header.stamp = ros::Time::now(); - battery_state_pub_tr_.publish(state_tr_); - temp_tr_.header.stamp = ros::Time::now(); - battery_temp_pub_tr_.publish(temp_tr_); - - if (state_bl_.present) { - state_bl_.charge -= bl_charge_dec_; - if (state_bl_.charge < 0) { - state_bl_.charge = bl_orig_charge_; - } - state_bl_.percentage = state_bl_.charge / state_bl_.capacity * 100.0; - - temp_bl_.temperature += bl_temp_inc_; - if (temp_bl_.temperature >= MAX_TEMPERATURE) { - temp_bl_.temperature = bl_orig_temp_; - } - } - state_bl_.header.stamp = ros::Time::now(); - battery_state_pub_bl_.publish(state_bl_); - temp_bl_.header.stamp = ros::Time::now(); - battery_temp_pub_bl_.publish(temp_bl_); - - if (state_br_.present) { - state_br_.charge -= br_charge_dec_; - if (state_br_.charge < 0) { - state_br_.charge = br_orig_charge_; - } - state_br_.percentage = state_br_.charge / state_br_.capacity * 100.0; - - temp_br_.temperature += br_temp_inc_; - if (temp_br_.temperature >= MAX_TEMPERATURE) { - temp_br_.temperature = br_orig_temp_; - } - } - state_br_.header.stamp = ros::Time::now(); - battery_state_pub_br_.publish(state_br_); - temp_br_.header.stamp = ros::Time::now(); - battery_temp_pub_br_.publish(temp_br_); -} - -} // namespace eps_sim - -PLUGINLIB_EXPORT_CLASS(eps_sim::EpsSim, nodelet::Nodelet) diff --git a/tools/simulator/eps_sim/srv/AddRemoveBattery.srv b/tools/simulator/eps_sim/srv/AddRemoveBattery.srv deleted file mode 100644 index 83d0d0406b..0000000000 --- a/tools/simulator/eps_sim/srv/AddRemoveBattery.srv +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Service to add or remove a battery - -sensor_msgs/BatteryState battery -float32 battery_charge_decrement_value -float32 battery_temperature -float32 battery_temperature_increment_value ---- diff --git a/tools/simulator/readme.md b/tools/simulator/readme.md deleted file mode 100644 index 34aef0b687..0000000000 --- a/tools/simulator/readme.md +++ /dev/null @@ -1,7 +0,0 @@ -\page simulator Simulator - -This might be legacy code - -\subpage arm_sim -\subpage dock_sim -\subpage eps_sim \ No newline at end of file diff --git a/tools/visualeyez/CMakeLists.txt b/tools/visualeyez/CMakeLists.txt deleted file mode 100644 index aaf25d5ca0..0000000000 --- a/tools/visualeyez/CMakeLists.txt +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -project(visualeyez) - -catkin_package( - CATKIN_DEPENDS - roscpp - nodelet - pluginlib - tf2 - tf2_ros - geometry_msgs - visualization_msgs - std_srvs -) - -create_library(TARGET visualeyez - LIBS ${catkin_LIBRARIES} ${EIGEN3_LIBRARIES} ${CERES_LIBRARIES} config_reader ff_nodelet - INC ${catkin_INCLUDE_DIRS} ${EIGEN3_INCLUDE_DIRS} ${CERES_INCLUDE_DIRS} -) - -create_tool_targets( - DIR tools - LIBS visualeyez -) - -# Only test if it is enabled -if (CATKIN_ENABLE_TESTING) - find_package(rostest REQUIRED) - - # We need to use the common client interface - include_directories(${catkin_INCLUDE_DIRS} include) - - # A simple test to check that the streaming client receives, decodes - # and fails gracefully if a packet is malformed. - add_rostest_gtest(test_visualeyez_stream - test/test_visualeyez_stream.test - test/test_visualeyez_stream.cc) - target_link_libraries(test_visualeyez_stream visualeyez) - - # A simple test to check that the streaming client receives, decodes - # and fails gracefully if a packet is malformed. - add_rostest_gtest(test_visualeyez_util - test/test_visualeyez_util.test - test/test_visualeyez_util.cc) - target_link_libraries(test_visualeyez_util visualeyez) - -endif() - -install_launch_files() \ No newline at end of file diff --git a/tools/visualeyez/config/visualeyez.rviz b/tools/visualeyez/config/visualeyez.rviz deleted file mode 100644 index 4685fade56..0000000000 --- a/tools/visualeyez/config/visualeyez.rviz +++ /dev/null @@ -1,149 +0,0 @@ -Panels: - - Class: rviz/Displays - Help Height: 78 - Name: Displays - Property Tree Widget: - Expanded: - - /Global Options1 - - /Status1 - Splitter Ratio: 0.5 - Tree Height: 478 - - Class: rviz/Selection - Name: Selection - - Class: rviz/Tool Properties - Expanded: - - /2D Pose Estimate1 - - /2D Nav Goal1 - - /Publish Point1 - Name: Tool Properties - Splitter Ratio: 0.588679 - - Class: rviz/Views - Expanded: - - /Current View1 - Name: Views - Splitter Ratio: 0.5 - - Class: rviz/Time - Experimental: false - Name: Time - SyncMode: 0 - SyncSource: "" -Visualization Manager: - Class: "" - Displays: - - Alpha: 0.5 - Cell Size: 1 - Class: rviz/Grid - Color: 160; 160; 164 - Enabled: true - Line Style: - Line Width: 0.03 - Value: Lines - Name: Grid - Normal Cell Count: 0 - Offset: - X: 0 - Y: 0 - Z: 0 - Plane: XY - Plane Cell Count: 10 - Reference Frame: - Value: true - - Class: rviz/MarkerArray - Enabled: true - Marker Topic: /visualeyez/rviz/markers - Name: MarkerArray - Namespaces: - visualeyez_: true - Queue Size: 100 - Value: true - - Class: rviz/TF - Enabled: true - Frame Timeout: 15 - Frames: - All Enabled: true - rviz: - Value: true - truth: - Value: true - world: - Value: true - Marker Scale: 1 - Name: TF - Show Arrows: true - Show Axes: true - Show Names: true - Tree: - world: - rviz: - {} - truth: - {} - Update Interval: 0 - Value: true - - Class: rviz/MarkerArray - Enabled: true - Marker Topic: /visualeyez/rviz/grounding - Name: MarkerArray - Namespaces: - visualeyez_grounding: true - Queue Size: 100 - Value: true - Enabled: true - Global Options: - Background Color: 48; 48; 48 - Fixed Frame: rviz - Frame Rate: 30 - Name: root - Tools: - - Class: rviz/Interact - Hide Inactive Objects: true - - Class: rviz/MoveCamera - - Class: rviz/Select - - Class: rviz/FocusCamera - - Class: rviz/Measure - - Class: rviz/SetInitialPose - Topic: /initialpose - - Class: rviz/SetGoal - Topic: /move_base_simple/goal - - Class: rviz/PublishPoint - Single click: true - Topic: /clicked_point - Value: true - Views: - Current: - Class: rviz/Orbit - Distance: 4.68643 - Enable Stereo Rendering: - Stereo Eye Separation: 0.06 - Stereo Focal Distance: 1 - Swap Stereo Eyes: false - Value: false - Focal Point: - X: 0.271304 - Y: -0.546715 - Z: -0.00664104 - Name: Current View - Near Clip Distance: 0.01 - Pitch: 0.130398 - Target Frame: - Value: Orbit (rviz) - Yaw: 4.2754 - Saved: ~ -Window Geometry: - Displays: - collapsed: false - Height: 921 - Hide Left Dock: false - Hide Right Dock: false - QMainWindow State: 000000ff00000000fd00000004000000000000016a00000695fc0200000007fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006400fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000003fc00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000010f00000296fc0200000002fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a0056006900650077007303000000440000040a000002e10000019700000003000004380000026dfc0100000003fc00000000000004380000000000fffffffa000000010200000002fb000000100044006900730070006c0061007900730000000000ffffffff000000dd00fffffffb0000000a00560069006500770073010000069b000000b00000000000000000fb0000000800540069006d0065000000000000000780000002c000fffffffb0000000800540069006d00650100000000000004500000000000000000000004380000037c00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730000000000ffffffff0000000000000000 - Selection: - collapsed: false - Time: - collapsed: false - Tool Properties: - collapsed: false - Views: - collapsed: false - Width: 1080 - X: 1438 - Y: 487 diff --git a/tools/visualeyez/include/visualeyez/visualeyez.h b/tools/visualeyez/include/visualeyez/visualeyez.h deleted file mode 100644 index 5aecdab5b8..0000000000 --- a/tools/visualeyez/include/visualeyez/visualeyez.h +++ /dev/null @@ -1,223 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -#ifndef VISUALEYEZ_VISUALEYEZ_H_ -#define VISUALEYEZ_VISUALEYEZ_H_ - -// ROS includes -#include - -// ROS messages -#include -#include - -// FSW tools -#include - -// FSW mesages -#include -#include -#include - -// Eigen C++ includes -#include -#include - -// Boost includes -#include -#include -#include -#include -#include - -// STL C++ includes -#include -#include -#include -#include -#include - -// Topic names -#define TOPIC_VISUALEYEZ_DATA "/visualeyez/data" -#define TOPIC_VISUALEYEZ_GROUNDING "/visualeyez/grounding" -#define SERVICE_VISUALEYEZ_CALIBRATE_CONFIG "/visualeyez/calibrate/config" -#define TOPIC_VISUALEYEZ_CALIBRATE_FEEDBACK "/visualeyez/calibrate/feedback" -#define TOPIC_VISUALEYEZ_RVIZ_GROUNDING "/visualeyez/rviz/grounding" -#define TOPIC_VISUALEYEZ_RVIZ_MARKERS "/visualeyez/rviz/markers" - -// Test calibration file -#define TEST_CALIBRATION_FILE "test.bin" -#define TEST_CALIBRATION_DURATION_SECS 60.0 -#define TEST_CALIBRATION_RATE_HZ 10.0 -#define CALIBRATION_XRADS_PER_SEC 0.01 -#define CALIBRATION_YRADS_PER_SEC 0.02 -#define CALIBRATION_ZRADS_PER_SEC 0.03 -#define EPSILON 1e-6 - -// Synthetic room dimensions -#define AREA_WIDTH 4.0 -#define AREA_LENGTH 8.0 -#define AREA_HEIGHT 2.0 -#define ROBOT_WIDTH 0.3 -#define ROBOT_LENGTH 0.3 -#define ROBOT_HEIGHT 0.3 - -// Header of doubles -#define HEADER_DATE 0 -#define HEADER_LEN 1 - -// Packet of doubles -#define PACKET_TCMID 0 -#define PACKET_LEDID 1 -#define PACKET_POS_X 2 -#define PACKET_POS_Y 3 -#define PACKET_POS_Z 4 -#define PACKET_LEN 5 - -// Max number of packets / measurement -#define MAX_NUM_PACKETS 512 - -// Maximum buffer size -#define MAX_BUFFER_SIZE (HEADER_LEN + MAX_NUM_PACKETS * PACKET_LEN) - -using udp = boost::asio::ip::udp; -namespace fs = boost::filesystem; - -/** - * \ingroup tools - */ -namespace visualeyez { - -// Data structure for storing matches -typedef std::pair < uint8_t, uint8_t > VZIndex; -typedef std::vector < VZIndex > VZMatches; -typedef std::map < std::string, VZMatches > VZTargetMatches; - -// Data structure for storing markers -typedef std::map < VZIndex, Eigen::Vector3d > VZMarkers; -typedef std::map < std::string, VZMarkers > VZTargetMarkers; - -// Utility functions for the Visualeyez tracking system -class VisualeyezUtils { - public: - // Helper function to see if an LED value is valid. - static bool IsValidMarkerValue(geometry_msgs::Vector3 const& pt); - - // Helper function for grabbing marker information from a LUA config file - static VZMarkers GetMarkers(config_reader::ConfigReader::Table *table); - - // Write calibration data to a binary file for a given set of targets - static bool WriteConfig(std::string file_name, VZTargetMarkers & targets); - - // Read calibration data from a binary file for a given set of targets - static bool ReadConfig(std::string file_name, VZTargetMarkers & targets); - - // This algorithm solves the Procrustes problem in that it finds an affine transform - // (rotation, translation, scale) that maps the "in" matrix to the "out" matrix - // Code from: https://github.com/oleg-alexandrov/projects/blob/master/eigen/Kabsch.cpp - // License is that this code is release in the public domain... Thanks, Oleg :) - template - static bool Kabsch(Eigen::Matrix in, Eigen::Matrix out, - Eigen::Transform &A, bool allowScale) { - // Default output - A.linear() = Eigen::Matrix::Identity(3, 3); - A.translation() = Eigen::Matrix::Zero(); - // A simple check to see that we have a sufficient number of correspondences - if (in.cols() < 4) { - // ROS_WARN("Visualeyez needs to see at least four LEDs to track"); - return false; - } - // A simple check to see that we have a sufficient number of correspondences - if (in.cols() != out.cols()) { - // ROS_ERROR("Same number of points required in input matrices"); - return false; - } - // First find the scale, by finding the ratio of sums of some distances, - // then bring the datasets to the same scale. - T dist_in = T(0.0), dist_out = T(0.0); - for (int col = 0; col < in.cols()-1; col++) { - dist_in += (in.col(col+1) - in.col(col)).norm(); - dist_out += (out.col(col+1) - out.col(col)).norm(); - } - if (dist_in <= T(0.0) || dist_out <= T(0.0)) - return true; - T scale = T(1.0); - if (allowScale) { - scale = dist_out/dist_in; - out /= scale; - } - // Find the centroids then shift to the origin - Eigen::Matrix in_ctr = Eigen::Matrix::Zero(); - Eigen::Matrix out_ctr = Eigen::Matrix::Zero(); - for (int col = 0; col < in.cols(); col++) { - in_ctr += in.col(col); - out_ctr += out.col(col); - } - in_ctr /= T(in.cols()); - out_ctr /= T(out.cols()); - for (int col = 0; col < in.cols(); col++) { - in.col(col) -= in_ctr; - out.col(col) -= out_ctr; - } - // SVD - Eigen::Matrix Cov = in * out.transpose(); - Eigen::JacobiSVD < Eigen::Matrix < T, Eigen::Dynamic, Eigen::Dynamic > > svd(Cov, - Eigen::ComputeThinU | Eigen::ComputeThinV); - // Find the rotation - T d = (svd.matrixV() * svd.matrixU().transpose()).determinant(); - if (d > T(0.0)) - d = T(1.0); - else - d = T(-1.0); - Eigen::Matrix I = Eigen::Matrix::Identity(3, 3); - I(2, 2) = d; - Eigen::Matrix R = svd.matrixV() * I * svd.matrixU().transpose(); - // The final transform - A.linear() = scale * R; - A.translation() = scale*(out_ctr - R*in_ctr); - // Success - return true; - } -}; - -class VisualeyezClient { - public: - // Constructor - VisualeyezClient(); - - // Destructor - ~VisualeyezClient(); - - // Sends a single message - void Send(ff_msgs::VisualeyezDataArray const& msg); - - protected: - // Called when message is sent - void SendCallback(const boost::system::error_code& error, std::size_t bytes_transferred); - - private: - boost::thread thread_; /*!< Service thread. */ - boost::asio::io_service io_service_; /*!< ASIO service. */ - udp::socket socket_; /*!< Network socket. */ - udp::endpoint remote_endpoint_; /*!< Remote endpoint. */ - boost::array tx_buffer_; /*!< Receive buffer */ -}; - -} // namespace visualeyez - -#endif // VISUALEYEZ_VISUALEYEZ_H_ diff --git a/tools/visualeyez/launch/visualeyez.launch b/tools/visualeyez/launch/visualeyez.launch deleted file mode 100644 index b3d04c103d..0000000000 --- a/tools/visualeyez/launch/visualeyez.launch +++ /dev/null @@ -1,42 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/tools/visualeyez/matlab/astrobee_ros.m b/tools/visualeyez/matlab/astrobee_ros.m deleted file mode 100644 index 6480c0fe9c..0000000000 --- a/tools/visualeyez/matlab/astrobee_ros.m +++ /dev/null @@ -1,55 +0,0 @@ -% Copyright (c) 2017, United States Government, as represented by the -% Administrator of the National Aeronautics and Space Administration. -% -% All rights reserved. -% -% The Astrobee platform is licensed under the Apache License, Version 2.0 -% (the "License"); you may not use this file except in compliance with the -% License. You may obtain a copy of the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations -% under the License. - -% Fixes R2014a on memric -restoredefaultpath; -addpath('./tcp_udp_ip'); -addpath('./vz'); - -%%%%%%%%%%%%%% -% PARAMETERS % -%%%%%%%%%%%%%% - -rate = 100; % Tracking rate -mmax = 512; % Maximum number of trackable markers -ncol = 5; % Number of measurement columns [tcm, led, x, y, z] - -%%%%%%%%%%%%%% -% TRACKING % -%%%%%%%%%%%%%% -udp = pnet('udpsocket', 9091); -offs = datenum('1970-1-1 00:00:00'); -% Tracking loop -while true - % Pause between measurements - pause(1/rate); - % Get the raw data from the VZ system - raw_data = VzGetDat; - % Get the number of measurements - n = size(raw_data, 1); - % Get the time stamp - t = (datenum(clock)-offs)*24*3600.0; - % Only send the data if we actually have some to send - if (n > 0 && n <= mmax) - % Setup the final data,: append a time stamp and convert to vector - final_data = [t, reshape(raw_data(:,1:ncol)', 1, ncol*n)]; - % Write the data to the UDP pipe - pnet(udp,'write', double(final_data), 'native'); - pnet(udp,'writepacket', 'spheresgoat.ndc.nasa.gov', 9090); - end -end -pnet(udp,'close'); \ No newline at end of file diff --git a/tools/visualeyez/nodelet_plugins.xml b/tools/visualeyez/nodelet_plugins.xml deleted file mode 100644 index ea054a0bdc..0000000000 --- a/tools/visualeyez/nodelet_plugins.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - Nodelet for converting UDP client data to ROS messages - - - Nodelet for tracking and calibration - - \ No newline at end of file diff --git a/tools/visualeyez/package.xml b/tools/visualeyez/package.xml deleted file mode 100644 index bd243490aa..0000000000 --- a/tools/visualeyez/package.xml +++ /dev/null @@ -1,37 +0,0 @@ - - - visualeyez - 0.0.0 - - TCP server for receiving visualeyez data from a Windows host - - - Apache License, Version 2.0 - - - Astrobee Flight Software - - - Astrobee Flight Software - - catkin - roscpp - nodelet - pluginlib - geometry_msgs - visualization_msgs - std_srvs - tf2 - tf2_ros - roscpp - nodelet - pluginlib - geometry_msgs - visualization_msgs - std_srvs - tf2 - tf2_ros - - - - \ No newline at end of file diff --git a/tools/visualeyez/readme.md b/tools/visualeyez/readme.md deleted file mode 100644 index 342340363d..0000000000 --- a/tools/visualeyez/readme.md +++ /dev/null @@ -1,200 +0,0 @@ -\page visualeyez Visualeyez marker tracking system - -## Overview - -The Visualeyez tracking system provides the ability to continuously track active LED markers in 3D space. Refer to the image below. The tracker (top left) is a bar containing three cameras. When an LED is in view of all three cameras, the system is able to solve for its position within a reference frame centered on the tracker. The VZSoft software collects raw measurements from the tracker (over a serial line) and plots the data in a GUI for the user. - -![alt text](../images/tools/visualeyez.png "The Visualeyez System") - -The GUI also provides the ability to stream measurements to third party applications. The company provides Matlab libraries for collecting this data (see ```matlab/vz```. We have written a small Matlab script, called ```astrobee_ros``` to collect and stream this data over UDP to a remote server. In our system configuration the Visualeyez tracker, VZSoft and Matlab script run on a PC called *memric*, and the data is streamed to another PC called *spheresgoat*. - -Clearly, the server (*spheresgoat*) must also be able to interpret these data packets. For the sake of simplicity we have written a ROS node called ```visualeyez_bridge```, which listens for the UDP packets and forwards the data as ROS messages. This makes it much easier to interpret and plot the data. - -Knowing the absolute position of the markers is only the first part of the problem. For the tracking system to be truly useful, we need to be able to transform the marker positions from the *visualeyez* reference frame to the *world* reference frame. The *grounding LEDs* and their world coordinates -- measured accurately by a theodolite -- are provided in the ```visualeyez.config``` LUA config file, along with the declaration of *target LED* groupings. - -The excerpt below from the LUA config file instructs the ```visualeyez_server``` that LEDS 1-4 on TCM1 are rigidly attached to the environment, and their world coordinates are known. One rigid body is declared with name `honey`, which is comprised of 24 LEDs connected to TCM2. The coordinate for each LED is an initial estimate of its position in the world frame when the body and world frames are aligned (origins and alignment equal). The actual positions are solved for through a calibration procedure, which will be explained later. - - grounding = { - { tcm=1, led=1, x=0.9140106314862838, y=-3.219100563007645, z=-0.6207210724524649 }, - { tcm=1, led=2, x=0.1835635587737848, y=-3.219100563007645, z=-0.2319006270806013 }, - { tcm=1, led=3, x=1.043904870248541, y=-3.219100563007645, z= 0.6628505519526655 }, - { tcm=1, led=4, x=1.110810070079649, y=-2.050167430479354, z=-0.2681018912672751 } - }; - - targets = { - { - name = "honey", - markers = { - { tcm=2, led=1, x=0.15282164, y=0.1529461, z=-0.12371832 }, - { tcm=2, led=2, x=0.15282164, y=0.1529461, z=-0.10805668 }, - { tcm=2, led=3, x=0.15282164, y=0.1393825, z=-0.117475 }, - { tcm=2, led=4, x=0.15282164, y=0.1529461, z=0.10805668 }, - { tcm=2, led=5, x=0.15282164, y=0.1529461, z=0.12371832 }, - { tcm=2, led=6, x=0.15282164, y=0.1393825, z=0.117475 }, - { tcm=2, led=7, x=0.15282164, y=-0.1529461, z=-0.12371832 }, - { tcm=2, led=8, x=0.15282164, y=-0.1529461, z=-0.10805668 }, - { tcm=2, led=9, x=0.15282164, y=-0.1393825, z=-0.117475 }, - { tcm=2, led=10, x=0.15282164, y=-0.1529461, z=0.10805668 }, - { tcm=2, led=11, x=0.15282164, y=-0.1529461, z=0.12371832 }, - { tcm=2, led=12, x=0.15282164, y=-0.1393825, z=0.117475 }, - { tcm=2, led=13, x=-0.15282164, y=0.1529461, z=-0.12371832 }, - { tcm=2, led=14, x=-0.15282164, y=0.1529461, z=-0.10805668 }, - { tcm=2, led=15, x=-0.15282164, y=0.1393825, z=-0.117475 }, - { tcm=2, led=16, x=-0.15282164, y=0.1529461, z=0.10805668 }, - { tcm=2, led=17, x=-0.15282164, y=0.1529461, z=0.12371832 }, - { tcm=2, led=18, x=-0.15282164, y=0.1393825, z=0.117475 }, - { tcm=2, led=19, x=-0.15282164, y=-0.1529461, z=-0.12371832 }, - { tcm=2, led=20, x=-0.15282164, y=-0.1529461, z=-0.10805668 }, - { tcm=2, led=21, x=-0.15282164, y=-0.1393825, z=-0.117475 }, - { tcm=2, led=22, x=-0.15282164, y=-0.1529461, z=0.10805668 }, - { tcm=2, led=23, x=-0.15282164, y=-0.1529461, z=0.12371832 }, - { tcm=2, led=24, x=-0.15282164, y=-0.1393825, z=0.117475 } - } - } - } - -Once the calibration procedure has been carried out the results are saved to a file called ```calibration.bin```. This file is simply a serialized ROS message containing the best estimate of the world position of each of the targets LEDs when the target's body frame and the world frame are aligned. - -## Operating guidelines - -### Configuring memric - -Firstly, log onto *memric* and make sure you have copied the contents of the the ```./tools/visualeyez/matlab``` folder in FSW to the Desktop. You will also need to obtain the the tarballs ```vz.tar.gz``` an ```tcp_udp_ip.tar.gz``` from ```volar.ndc.nasa.gov``` before running the script: - - /home/p-free-flyer/free-flyer/FSW/files/vz.tar.gz - /home/p-free-flyer/free-flyer/FSW/files/tcp_udp_ip.tar.gz` - -Secondly, turn on the Visualeyez tracker and that it is plugged into the serial port on *memric*. An orange light indicates that it is powered on. - -Thirdly, power all visualeyez TCMs by plugging in the white 14.8v lithium ion batteries and checking the small black switch is in the on position. A single green LED indicates that the TCM is operating correctly. Always make sure you unplug the batteries when you are finished, or when the TCM buzzers sound. This is because the TCMs continue to drain the battery even if their switch is turned off. If the battery falls below a 8v its cells become unstable. - -Now, run the Visualeyez VZSoft 2 software: ```C:\Program Files (x86)\Phoneix Technologies\VZ Soft\Vzsoftv2.exe``` - -When the GUI launches it should look something like the screen shot below. The first thing you'll want to is enable all TCMs and LEDs specified in your LUA file. In the example configuration given in the previous section you'll want TCM1 LED1-4 and TCM2 LED1-24 enabled. - -![alt text](../images/tools/vz_leds.png "") - -Verify that your TCMs are set to wireless (tetherless) operation: - -![alt text](../images/tools/vz_tcm.png "") - -Then, verify that you are listening for data ```COM8```. - -![alt text](../images/tools/vz_source.png "") - -Then, check that your operating options match those given below: - -![alt text](../images/tools/vz_operating.png "") - -Then, check that your sample timings are correct. This is probably the trickiest part of setting up the system to track correctly. One important thing to note before we proceed is that the LEDs can be damaged if you configure their sample period below 2.5ms. - -![alt text](../images/tools/vz_sampling.png "") - -Check that your exposure settings match those given below: - -![alt text](../images/tools/vz_exposure.png "") - -Check that your capture process settings match those given below: - -![alt text](../images/tools/vz_capture.png "") - -Finally, check that your stream settings match those given below: - -![alt text](../images/tools/vz_stream.png "") - -You are now ready to track! If you click the green circular button at the bottom of the GUI you should start to see red dots appear on the two sets of axes. Note that you can use ctrl plus the mouse button to zoom. - -![alt text](../images/tools/vz_track.png "") - -The final step it to launch Matlab R2014b 32-bit and run the astrobee_ros script in the folder you copied to **memric**. - -![alt text](../images/tools/matlab.png "") - -### Configuring spheresgoat - -Follow the instruction to checkout, build and source flight software on *spheresgoat*. Then launch the ```visualeyez_bridge``` and ```visualeyez_server``` on spheresgoat using the following command: - - roslaunch visualeyez visualeyez.launch rviz:=true - -The system starts in tracking mode by default. A snapshot of the tracking test is shown below The green dots represent static grounding LED positions in the world frame. The red dots are the target LEDs, which move as a rigid group. The set of axes in the center of the red LEDs shows the pose of the body frame with respect to the world frame. If calibration has not been carried out, you will notice that the estimate jumps frantically. - -![alt text](../images/tools/tracking.png "The tracking GUI") - -### Running the visualeyez ROS tools on another machine (optional) - -The NASA firewall prevents **memric** UDP packets from being forwarded to any laptop on the wireless. We'll therefore have to setup an SSH tunnel over which to forward the UDP packets. First, you'll need to set up your laptop to always use **m** as a proxy for **spheresgoat**. You should have this in your ***~/.ssh/config*** directory: - - Host m - Hostname m.ndc.nasa.gov - - Host spheresgoat - ProxyCommand ssh -q m nc spheresgoat.ndc.nasa.gov 22 - -Then, in one terminal set up an SSH tunnel to forward TCP packets to and from port 11000 on spheresgoat to port 11000 on your local machine. This command also starts ***socat** to receive UDP packets on **spheresgoat** port 9090 and forward them to TCP port 11000. Note that this command must be started BEFORE you launch ROS otherwise you will have the connection refused. - - ssh -L 11000:spheresgoat:11000 spheresgoat "socat -u udp-recvfrom:9090 tcp-listen:11000,fork" - -Then, in another terminal window on your local machine type: - - socat -u tcp:localhost:11000,fork udp-sendto:localhost:9090 - -This will pull TCP packets off the SSH tunnel and repackage them as UDP packets. If visualeyez is running, you should see lots of binary output when you type this command on your local machine: - - nc -ul 9090 - -If you are having issues with ports I find it extremely useful to be able to to check which ports are currently active: - - netstat -plun - netstat -pltn - -### Calibration procedure - -The calibration procedure makes two key assumptions: - -1. The center of rotation (gimbal origin) is the same as the body frame origin. -2. The first measurement taken corresponds to a roll, pitch and yaw of zero. - -You will first need to follow the instructions in the previous two sections to start pushing data from *memric* and consuming it on *spheresgoat*. - -It is critical that you now ensure the freeflyer has exactly zero roll, zero pitch and zero yaw before starting the calibration procedure. You can piggy-back of the gantry zeroing procedure, or perform a manual adjustment. - -Once the freeflyer is aligned you can use the ```visualeyez_tool``` to switch ```visualeyez_server``` to calibration mode. - - rosrun visualeyez visualeyez_tool -n honey -w calibration.bin -s 100 - -This command instructs the system to calibrate the markers for target "honey" and write the results to ```calibration.bin```. The `-s 100` argument instructs the calibration algorithm to run only when at least 100 sample measurements are received for each marker. If things go well you should see the following output on the command line. - - Recording: - Calibrating... - 0065 0000 0119 0040 0114 0066 0312 0161 0114 0180 0225 0191 0197 0172 0240 0145 0173 0355 0394 0195 0164 - -This output tells us that we have seen LED1 65 times, LED2 zero times, etc. It will keep updating dynamically with time. Your GUI will switch to blue markers to show they are measurements. Note that only the subset of markers currently in view will be plotted on the GUI. - -![alt text](../images/tools/calibration.png "The calibration GUI") - -At this point you will now need to rotate the gimbal slowly around all three degrees of freedom, so that each LED comes in view for at least 100 samples. When this threshold is met, you will see the following output. - - Saving... - Done! - -At this point the system will automatically switch back to tracking mode, using the reference marker positions solved for in the calibration procedure. If all went well the red dots should move smoothly in response to orientation changes. - -## Performance evaluation - -### Experimental design - -As with all localization systems, in order to measure the performance of the Visualeyez tracking system one must compare it against a "ground truth" produced by another tracking system with a stated accuracy at least one order of magnitude higher than the predicted accuracy of the system under test. - -To measure the performance of the Visualeyez system we will use a Leica TotalStation TPS1200 theodolite to track a 360 degree prism mounted rigidly on the underside of the freeflyer, while in motion. This robotic theodolite is able to track the center of the prism at 8Hz with at most 3mm error. - -Refer to the image below. The prism is mounted in the area underneath the freeflyer in place of a payload. A 25th LED is placed underneath the prism by a known distance *h*. This enables us to estimate the body-frame position of the prism center by simply subtracting *h* from the LED25 calibrated position. If we transform this coordinate to the world frame using the TF2 transform, we can compare it directly to the output data from the total station. - -![alt text](../images/tools/gantry.png "The gantry") - -The first step is to place the theodolite at a fixed position within view of all grounding markers, but sufficiently far from the freeflyer to avoid sharp freeflyer accelerations from affecting its ability to track the prism. The markers are then surveyed, and used jointly to set the grounding coordinates, but also to build the localization map (see \ref localization). - -The Visualeyez LEDs are then fixed to the grounding targets, and calibration is performed to learn the reference positions of the target markers (see the previous section). Once this is complete we can use the visualeyez server to track the pose of the freeflyer in the world frame. - -The theodolite is then configured to autonomously track and record the position of the prism at 8Hz as the freeflyer is moved by the gantry. We simultaneously record the TF2 transform produced by our tracking system. - -After the experiment completes we use the recorded TF2 transforms to estimate the trajectory of the prism, which is simply POS(LED25) - (0,0,h) transformed from the body frame to the world frame using the TF2 transforms. We use cross-correlation to determine the time offset between the theodolite (ground truth) and the Visualeyez data (system under test), and compare error statistics over the Leica time indices. \ No newline at end of file diff --git a/tools/visualeyez/src/visualeyez.cc b/tools/visualeyez/src/visualeyez.cc deleted file mode 100644 index 3eb254c8e9..0000000000 --- a/tools/visualeyez/src/visualeyez.cc +++ /dev/null @@ -1,219 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -// ROS includes -#include - -// Eigen C++ includes -#include -#include - -/** - * \ingroup tools - */ -namespace visualeyez { - - // Helper function to see if an LED value is valid. - bool VisualeyezUtils::IsValidMarkerValue(geometry_msgs::Vector3 const& pt) { - if (pt.x == 0.0 || pt.y == 0.0 || pt.z == 0.0) - return false; - return true; - } - - // Helper function for grabbing marker information from a LUA config file - VZMarkers VisualeyezUtils::GetMarkers(config_reader::ConfigReader::Table *table) { - VZMarkers markers; - for (int i = 0; i < table->GetSize(); i++) { - // Grab the row in the table - config_reader::ConfigReader::Table group; - if (!table->GetTable(i + 1, &group)) { - ROS_ERROR_STREAM("Could not read parameter table row" << i + 1); - continue; - } - // Extract values from the table - int ledid, tcmid; - double x, y, z; - if (!group.GetInt("tcm", &tcmid)) { - ROS_ERROR_STREAM("Could not read parameter: tcm"); - continue; - } - if (!group.GetInt("led", &ledid)) { - ROS_ERROR_STREAM("Could not read parameter: led"); - continue; - } - if (!group.GetReal("x", &x)) { - ROS_ERROR_STREAM("Could not read parameter: x"); - continue; - } - if (!group.GetReal("y", &y)) { - ROS_ERROR_STREAM("Could not read parameter: y"); - continue; - } - if (!group.GetReal("z", &z)) { - ROS_ERROR_STREAM("Could not read parameter: z"); - continue; - } - // Generate a map index from the tcm and led id number - VZIndex idx(tcmid, ledid); - // If all parameters were grabbed, add the row - markers[idx](0) = x; - markers[idx](1) = y; - markers[idx](2) = z; - } - return markers; - } - - // Write calibration data to a binary file for a given set of targets - bool VisualeyezUtils::WriteConfig(std::string file_name, VZTargetMarkers & targets) { - // Convert the incoming targets to a calibration message - ff_msgs::VisualeyezCalibration msg; - for (VZTargetMarkers::iterator it = targets.begin(); it != targets.end(); it++) { - ff_msgs::VisualeyezDataArray target; - target.header.stamp = ros::Time::now(); - target.header.frame_id = it->first; - for (VZMarkers::iterator jt = it->second.begin(); jt != it->second.end(); jt++) { - ff_msgs::VisualeyezData measurement; - measurement.tcmid = jt->first.first; // TCMID - measurement.ledid = jt->first.second; // LEDID - measurement.position.x = jt->second(0); // X POS - measurement.position.y = jt->second(1); // Y POS - measurement.position.z = jt->second(2); // Z POS - target.measurements.push_back(measurement); - } - msg.targets.push_back(target); - } - // Serialize the calibration data to file - std::ofstream ofs(file_name, std::ios::out | std::ios::binary); - if (!ofs.is_open()) { - ROS_INFO_STREAM("Cannot write to file " << file_name); - return false; - } - uint32_t serial_size = ros::serialization::serializationLength(msg); - boost::shared_array obuffer(new uint8_t[serial_size]); - ros::serialization::OStream ostream(obuffer.get(), serial_size); - ros::serialization::serialize(ostream, msg); - ofs.write(reinterpret_cast(obuffer.get()), serial_size); - ofs.close(); - // Success! - return true; - } - - // Read calibration data from a binary file for a given set of targets - bool VisualeyezUtils::ReadConfig(std::string file_name, VZTargetMarkers & targets) { - // Convert the calibration file into a calibration message - ff_msgs::VisualeyezCalibration msg; - std::ifstream ifs(file_name, std::ios::in | std::ios::binary); - if (!ifs.good()) { - ROS_INFO_STREAM("Cannot read from file " << file_name); - return false; - } - ifs.seekg(0, std::ios::end); - std::streampos end = ifs.tellg(); - ifs.seekg(0, std::ios::beg); - std::streampos begin = ifs.tellg(); - uint32_t file_size = end - begin; - boost::shared_array ibuffer(new uint8_t[file_size]); - ifs.read(reinterpret_cast(ibuffer.get()), file_size); - ros::serialization::IStream istream(ibuffer.get(), file_size); - ros::serialization::deserialize(istream, msg); - ifs.close(); - // Push all the data into a temporary data structure - VZTargetMarkers tmp; - for (size_t i = 0; i < msg.targets.size(); i++) { - std::string name = msg.targets[i].header.frame_id; - for (size_t j = 0; j < msg.targets[i].measurements.size(); j++) { - VZIndex index(msg.targets[i].measurements[j].tcmid, // TCMID - msg.targets[i].measurements[j].ledid); // LEDID - tmp[name][index](0) = msg.targets[i].measurements[j].position.x; // X POS - tmp[name][index](1) = msg.targets[i].measurements[j].position.y; // Y POS - tmp[name][index](2) = msg.targets[i].measurements[j].position.z; // Z POS - } - } - // Now check that we have a record for each input - for (VZTargetMarkers::iterator it = targets.begin(); it != targets.end(); it++) { - if (tmp.find(it->first) == tmp.end()) { - ROS_WARN_STREAM("Cannot locate target " << it->first); - return false; - } - targets[it->first] = tmp[it->first]; - } - // Success! - return true; - } - - VisualeyezClient::VisualeyezClient() : socket_(io_service_) { - // Extract the port from the config file - std::string host; - unsigned int port; - config_reader::ConfigReader config_params; - config_params.AddFile("tools/visualeyez.config"); - if (!config_params.ReadFiles()) - ROS_FATAL("Couldn't read config file"); - if (!config_params.GetUInt("client_port", &port)) - ROS_FATAL("Server port not specified in config file."); - if (!config_params.GetStr("client_host", &host)) - ROS_FATAL("Server address not specified in config file."); - // Open the socket - socket_ = udp::socket(io_service_, udp::endpoint(udp::v4(), 0)); - // Bind the socket to a specific endpoint - udp::resolver resolver(io_service_); - udp::resolver::query query( - udp::v4(), host, std::to_string(port)); - udp::resolver::iterator iterator = resolver.resolve(query); - // Use the first remote endpoint ad the one we want - remote_endpoint_ = *iterator; - // Start a listen thread - thread_ = boost::thread(boost::bind(&boost::asio::io_service::run, &io_service_)); - } - - // Destructor - VisualeyezClient::~VisualeyezClient() {} - - // Sends a single message - void VisualeyezClient::Send(ff_msgs::VisualeyezDataArray const& msg) { - // Avoid a buffer overrun - if (msg.measurements.size() > MAX_NUM_PACKETS || msg.measurements.empty()) { - ROS_WARN("Cannot process more than MAX_NUM_PACKETS at once"); - return; - } - // Prepare the header - tx_buffer_[HEADER_DATE] = msg.header.stamp.toSec(); - // Add the data - for (size_t i = 0; i < msg.measurements.size(); i++) { - tx_buffer_[HEADER_LEN+i*PACKET_LEN+PACKET_TCMID] = msg.measurements[i].tcmid; - tx_buffer_[HEADER_LEN+i*PACKET_LEN+PACKET_LEDID] = msg.measurements[i].ledid; - tx_buffer_[HEADER_LEN+i*PACKET_LEN+PACKET_POS_X] = msg.measurements[i].position.x; - tx_buffer_[HEADER_LEN+i*PACKET_LEN+PACKET_POS_Y] = msg.measurements[i].position.y; - tx_buffer_[HEADER_LEN+i*PACKET_LEN+PACKET_POS_Z] = msg.measurements[i].position.z; - } - // Calculate the final length - size_t len = HEADER_LEN + msg.measurements.size() * PACKET_LEN; - // Send the message - socket_.async_send_to(boost::asio::buffer(tx_buffer_, sizeof(double) * len), remote_endpoint_, - boost::bind(&VisualeyezClient::SendCallback, this, - boost::asio::placeholders::error, - boost::asio::placeholders::bytes_transferred)); - } - - // Called when messahe is sent - void VisualeyezClient::SendCallback(const boost::system::error_code& error, - std::size_t bytes_transferred) { - /* Do nothing */ - } - -} // namespace visualeyez diff --git a/tools/visualeyez/src/visualeyez_bridge.cc b/tools/visualeyez/src/visualeyez_bridge.cc deleted file mode 100644 index ecb3e2e6ab..0000000000 --- a/tools/visualeyez/src/visualeyez_bridge.cc +++ /dev/null @@ -1,137 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -#ifndef VISUALEYEZ_VISUALEYEZ_BRIDGE_H_ -#define VISUALEYEZ_VISUALEYEZ_BRIDGE_H_ - -// ROS includes -#include -#include -#include - -// Boost includes -#include -#include -#include -#include - -// FSW includes -#include - -// FSW messages -#include -#include - -// Visualeyez includes -#include - -// STL includes -#include - -using udp = boost::asio::ip::udp; - -/** - * \ingroup tools - */ -namespace visualeyez { - -//! A class for receiving UDP visualeyez measurements -/*! - To be written -*/ -class VisualeyezBridge : public nodelet::Nodelet { - public: - // Constructor - VisualeyezBridge() : socket_(io_service_) { - // Extract the port from the config file - std::string addr; unsigned int port; - config_reader::ConfigReader config_params; - config_params.AddFile("tools/visualeyez.config"); - if (!config_params.ReadFiles()) - ROS_FATAL("Couldn't read config file"); - if (!config_params.GetUInt("server_port", &port)) - ROS_FATAL("Server port not specified in config file."); - if (!config_params.GetStr("server_addr", &addr)) - ROS_FATAL("Server address not specified in config file."); - // Open the socket - socket_ = udp::socket(io_service_, udp::endpoint(udp::v4(), port)); - // Start listening - Listen(); - } - - // Destructor - ~VisualeyezBridge() {} - - // Start listening for incoming packets - void Listen() { - socket_.async_receive_from( - boost::asio::buffer(rx_buffer_, MAX_BUFFER_SIZE), rx_endpoint_, - boost::bind(&VisualeyezBridge::Receive, this, - boost::asio::placeholders::error, - boost::asio::placeholders::bytes_transferred)); - } - - protected: - // Called when ROS is loaded and ready - void onInit() { - // Setup the publisher before be start the ASIO service. - pub_ = getNodeHandle().advertise( - TOPIC_VISUALEYEZ_DATA, 1); - // Now that the publisher is ready we can start the ASIO run service. - thread_ = boost::thread(boost::bind(&boost::asio::io_service::run, &io_service_)); - } - - // Presumably the messages arrive in checksummed packets, and these packets get reassembled in order. Since - // we know each LED measurement has a specific size, we can determine the size of the measurement matrix by - // just looking at the size of the packet - void Receive(const boost::system::error_code& error, std::size_t size) { - if (!error && size % (PACKET_LEN * sizeof(double)) == (HEADER_LEN * sizeof(double))) { - size_t n = size / (PACKET_LEN * sizeof(double)); - msg_.header.stamp = ros::Time(rx_buffer_[HEADER_DATE]); - msg_.measurements.resize(n); - for (size_t i = 0; i < n; i++) { - msg_.measurements[i].tcmid = static_cast(rx_buffer_[HEADER_LEN+i*PACKET_LEN+PACKET_TCMID]); - msg_.measurements[i].ledid = static_cast(rx_buffer_[HEADER_LEN+i*PACKET_LEN+PACKET_LEDID]); - msg_.measurements[i].position.x = rx_buffer_[HEADER_LEN+i*PACKET_LEN+PACKET_POS_X]; - msg_.measurements[i].position.y = rx_buffer_[HEADER_LEN+i*PACKET_LEN+PACKET_POS_Y]; - msg_.measurements[i].position.z = rx_buffer_[HEADER_LEN+i*PACKET_LEN+PACKET_POS_Z]; - } - pub_.publish(msg_); - } else { - ROS_WARN_STREAM("Malformed packet of " << size << " bytes"); - } - // Restart listening - Listen(); - } - - private: - boost::thread thread_; /*!< Service thread. */ - boost::asio::io_service io_service_; /*!< ASIO service. */ - udp::socket socket_; /*!< Network socket. */ - udp::endpoint rx_endpoint_; /*!< Remote endpoint. */ - boost::array rx_buffer_; /*!< Receive buffer */ - ros::Publisher pub_; /*!< ROS publisher */ - ff_msgs::VisualeyezDataArray msg_; /*!< ROS message */ -}; - -// Register the nodelet with the system -PLUGINLIB_EXPORT_CLASS(visualeyez::VisualeyezBridge, nodelet::Nodelet); - -} // namespace visualeyez - -#endif // VISUALEYEZ_VISUALEYEZ_BRIDGE_H_ diff --git a/tools/visualeyez/src/visualeyez_server.cc b/tools/visualeyez/src/visualeyez_server.cc deleted file mode 100644 index 5c761d7615..0000000000 --- a/tools/visualeyez/src/visualeyez_server.cc +++ /dev/null @@ -1,626 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -#ifndef VISUALEYEZ_VISUALEYEZ_SERVER_H_ -#define VISUALEYEZ_VISUALEYEZ_SERVER_H_ - -// ROS includes -#include -#include -#include -#include - -// To visualize in RVIZ -#include -#include - -// Ceres and logging -#include -#include - -// Google logging -#include - -// FSW utils -#include - -// FSW messages -#include -#include - -// Visualeyez implementation -#include - -// C math -#include - -// C++ includes -#include - -using ceres::CostFunction; -using ceres::DynamicAutoDiffCostFunction; -using ceres::Problem; -using ceres::Solver; -using ceres::Solve; - -namespace visualeyez { - -// Marker -class Marker { - public: - Marker() { - pos[0] = 0; - pos[1] = 0; - pos[2] = 0; - } - explicit Marker(Eigen::Vector3d const& _pos) { - pos[0] = _pos(0); - pos[1] = _pos(1); - pos[2] = _pos(2); - } - explicit Marker(double x, double y, double z) { - pos[0] = x; - pos[1] = y; - pos[2] = z; - } - ~Marker() {} - double pos[3]; -}; -typedef std::map < VZIndex, Marker > MarkerMap; -typedef std::vector < Marker > ObservationVec; - -// This is an error representing LED projections -class ProjectionError { - public: - // Constructor for this error function - explicit ProjectionError(ObservationVec const& obs) : obs_(obs) {} - - // Called every solver iteration state = [t:3, R:3, X1:3 ... Xn:3] - template < typename T > - bool operator()(T const * const * states, T* res) const { - // Transform the measurements - T x_rot[3]; - for (size_t i = 0; i < obs_.size(); i++) { - ceres::AngleAxisRotatePoint(states[1], states[2+i], x_rot); - res[3*i+0] = (x_rot[0] + states[0][0] - obs_[i].pos[0]); - res[3*i+1] = (x_rot[1] + states[0][1] - obs_[i].pos[1]); - res[3*i+2] = (x_rot[2] + states[0][2] - obs_[i].pos[2]); - } - return true; - } - - // Add a residual block for the given observation - static ceres::CostFunction* Create(ObservationVec const& obs) { - ceres::DynamicAutoDiffCostFunction *cost_func = - new ceres::DynamicAutoDiffCostFunction (new ProjectionError(obs)); - cost_func->AddParameterBlock(3); // Translation - cost_func->AddParameterBlock(3); // Rotation - for (size_t i = 0; i < obs.size(); i++) - cost_func->AddParameterBlock(3); // Estimated position of marker obs(i) - cost_func->SetNumResiduals(3 * obs.size()); // Residual error for the observations - return static_cast (cost_func); - } - - private: - ObservationVec obs_; -}; - -// Possible states the system can be in -enum VisualeyezServerState { - GROUNDING, // Occurs once before - TRACKING, // Tracking in progress - RECORDING, // Recording in progress - CALIBRATING, // Calibration in progress -}; - -class VisualeyezServer : public nodelet::Nodelet { - public: - VisualeyezServer() : state_(GROUNDING) { - // Initialize Google Logging - google::InitGoogleLogging("visualeyez_calibrate"); - // Read the config file to get the targets - config_reader::ConfigReader config_params; - config_params.AddFile("tools/visualeyez.config"); - if (!config_params.ReadFiles()) - ROS_FATAL("Couldn't read config file"); - // Publish a TF2 message with the truth of the robot - if (!config_params.GetBool("pub_tf", &pub_tf_)) - ROS_FATAL("Could not get the put_tf parameters"); - // Groundings - config_reader::ConfigReader::Table table; - if (!config_params.GetTable("grounding", &table)) - ROS_FATAL("Could not get the grounding parameters"); - grounding_ = VisualeyezUtils::GetMarkers(&table); - if (grounding_.empty()) - ROS_FATAL("Could not read the grounding data"); - // Read the calibration file - std::string cfgfile; - if (!config_params.GetStr("calibration_file", &cfgfile)) - ROS_FATAL("Could not read the calibration file"); - // Read the target information - if (!config_params.GetTable("targets", &table)) - ROS_FATAL("Could not get the targets parameters"); - for (int i = 0; i < table.GetSize(); i++) { - config_reader::ConfigReader::Table group, mtable; - if (!table.GetTable(i + 1, &group)) - ROS_FATAL("Could not read parameter table row"); - std::string name; - if (!group.GetStr("name", &name)) - ROS_FATAL("Could not read parameter: name"); - if (!group.GetTable("markers", &mtable)) - ROS_FATAL("Could not read parameter: markers"); - targets_[name] = VisualeyezUtils::GetMarkers(&mtable); - if (targets_[name].empty()) - ROS_FATAL("Could not get parse target matches"); - } - // Read the calibration for the given set of targets - if (!VisualeyezUtils::ReadConfig(cfgfile, targets_)) - ROS_INFO("No calibration file"); - } - ~VisualeyezServer() {} - - protected: - // Called when nodelet boots - void onInit() { - sub_ = getNodeHandle().subscribe(TOPIC_VISUALEYEZ_DATA, 10, - &VisualeyezServer::DataCallback, this); - pub_feedback_ = getNodeHandle().advertise < ff_msgs::VisualeyezFeedbackArray > ( - TOPIC_VISUALEYEZ_CALIBRATE_FEEDBACK, 1, false); - pub_grounding_ = getNodeHandle().advertise < visualization_msgs::MarkerArray > ( - TOPIC_VISUALEYEZ_RVIZ_GROUNDING, 1, true); - pub_markers_ = getNodeHandle().advertise < visualization_msgs::MarkerArray > ( - TOPIC_VISUALEYEZ_RVIZ_MARKERS, 1, false); - // Add a publisher for every target - for (VZTargetMarkers::iterator it = targets_.begin(); it != targets_.end(); it++) { - pub_truth_[it->first] = getNodeHandle().advertise < geometry_msgs::PoseStamped > ( - std::string("/") + it->first + std::string("/") + TOPIC_LOCALIZATION_TRUTH, 1, false); - } - } - - // Simple callback to store a data sample - void DataCallback(const ff_msgs::VisualeyezDataArray::ConstPtr& msg) { - // If we are busy calibrating, then ignore the message - if (state_ == CALIBRATING) - return; - - ///////////////////////////////////////////// - // Separate into ground and target markers // - ///////////////////////////////////////////// - - // Collect the grounding and target matches - VZMatches grounding_matches; - VZTargetMatches target_matches; - // Iterate over the number of rows of raw data - VZMarkers measurement; - for (size_t i = 0; i < msg->measurements.size(); i++) { - // We are only interested in valid measurements - if (!VisualeyezUtils::IsValidMarkerValue(msg->measurements[i].position)) - continue; - // Get the VZIndex for this measurement - VZIndex index(msg->measurements[i].tcmid, msg->measurements[i].ledid); - // Now, set the value of the point - measurement[index](0) = msg->measurements[i].position.x; - measurement[index](1) = msg->measurements[i].position.y; - measurement[index](2) = msg->measurements[i].position.z; - // Record all calibration match indices - if (grounding_.find(index) != grounding_.end()) - grounding_matches.push_back(index); - // Record all target match indices - for (VZTargetMarkers::iterator it = targets_.begin(); it != targets_.end(); it++) { - if (it->second.find(index) != it->second.end()) - target_matches[it->first].push_back(index); - } - } - - //////////////////// - // Grounding mode // - //////////////////// - - if (state_ == GROUNDING) { - if (grounding_matches.size() < 3) - return; - // Preallocate correspondence matrices - Eigen::Matrix3Xd ptsI(3, grounding_matches.size()); - Eigen::Matrix3Xd ptsO(3, grounding_matches.size()); - // Extract the correct measurements from the raw data - for (size_t i = 0; i < grounding_matches.size(); i++) { - ptsI.block<3, 1>(0, i) = measurement[grounding_matches[i]]; // Measured position - ptsO.block<3, 1>(0, i) = grounding_[grounding_matches[i]]; // Truthful position - } - // Find the transform that explains the observation - if (VisualeyezUtils::Kabsch (ptsI, ptsO, transform_, true)) { - // Shift to tracking state - state_ = TRACKING; - // Only allow configuration now - srv_ = getNodeHandle().advertiseService(SERVICE_VISUALEYEZ_CALIBRATE_CONFIG, - &VisualeyezServer::ConfigureCallback, this); - // Publish the grounding to RVIZ - visualization_msgs::MarkerArray marker_array; - for (size_t i = 0; i < grounding_matches.size(); i++) { - visualization_msgs::Marker marker; - marker.header.frame_id = "world"; - marker.header.stamp = ros::Time::now(); - marker.ns = "visualeyez_grounding"; - marker.id = i; - marker.type = visualization_msgs::Marker::SPHERE; - marker.action = visualization_msgs::Marker::ADD; - marker.pose.position.x = ptsO(0, i); - marker.pose.position.y = ptsO(1, i); - marker.pose.position.z = ptsO(2, i); - marker.pose.orientation.x = 0.0; - marker.pose.orientation.y = 0.0; - marker.pose.orientation.z = 0.0; - marker.pose.orientation.w = 1.0; - marker.scale.x = 0.1; - marker.scale.y = 0.1; - marker.scale.z = 0.1; - marker.color.r = 0.0f; - marker.color.g = 1.0f; - marker.color.b = 0.0f; - marker.color.a = 1.0; - marker.lifetime = ros::Duration(); - marker_array.markers.push_back(marker); - } - pub_grounding_.publish(marker_array); - } - } - - /////////////////// - // Tracking mode // - /////////////////// - - if (state_ == TRACKING) { - // Loop over only the targets having markers that were actually in view - for (VZTargetMatches::iterator it = target_matches.begin(); it != target_matches.end(); it++) { - if (it->second.size() < 3) - continue; - // Get correspondence matrices - Eigen::Matrix3Xd ptsI(3, it->second.size()); - Eigen::Matrix3Xd ptsO(3, it->second.size()); - for (size_t i = 0; i < it->second.size(); i++) { - ptsI.block<3, 1>(0, i) = targets_[it->first][it->second[i]]; // WORLD REFERENCE - ptsO.block<3, 1>(0, i) = transform_ * measurement[it->second[i]]; // VISUALEYEZ -> WORLD - } - // Execute the Kabsch algorithm on the measurements/truths to find a transform, - // from WORLD TO BODY, disallowing the scale component of the transform. - Eigen::Affine3d transform; - if (VisualeyezUtils::Kabsch (ptsI, ptsO, transform, false)) { - // Extract the translation and rotation components - Eigen::Vector3d translation(transform.translation()); - Eigen::Quaterniond rotation(transform.rotation()); - // Publish a ground truth for this robot - if (pub_tf_) { - static tf2_ros::TransformBroadcaster tf; - static geometry_msgs::TransformStamped msg_tf; - msg_tf.header.stamp = msg->header.stamp; - msg_tf.header.frame_id = "world"; - msg_tf.child_frame_id = it->first + "/truth"; - msg_tf.transform.translation.x = translation(0); - msg_tf.transform.translation.y = translation(1); - msg_tf.transform.translation.z = translation(2); - msg_tf.transform.rotation.w = rotation.w(); - msg_tf.transform.rotation.x = rotation.x(); - msg_tf.transform.rotation.y = rotation.y(); - msg_tf.transform.rotation.z = rotation.z(); - tf.sendTransform(msg_tf); - } - // Publish the pose on the robot truth topic - if (pub_truth_.find(it->first) != pub_truth_.end()) { - static geometry_msgs::PoseStamped msg_ps; - msg_ps.header.stamp = msg->header.stamp; - msg_ps.header.frame_id = "world"; - msg_ps.pose.position.x = translation(0); - msg_ps.pose.position.y = translation(1); - msg_ps.pose.position.z = translation(2); - msg_ps.pose.orientation.w = rotation.w(); - msg_ps.pose.orientation.x = rotation.x(); - msg_ps.pose.orientation.y = rotation.y(); - msg_ps.pose.orientation.z = rotation.z(); - pub_truth_[it->first].publish(msg_ps); - } else { - NODELET_WARN_STREAM("Found pose but no publisher for " << it->first); - } - // Publish the complete marker set to RVIZ in RED - size_t counter = 0; - Eigen::Matrix3Xd ptsA(3, targets_[it->first].size()); - for (VZMarkers::iterator jt = targets_[it->first].begin(); jt != targets_[it->first].end(); jt++) - ptsA.block<3, 1>(0, counter++) = jt->second; - ptsA = transform * ptsA; - visualization_msgs::MarkerArray marker_array; - for (size_t i = 0; i < targets_[it->first].size(); i++) { - visualization_msgs::Marker marker; - marker.header.frame_id = "world"; - marker.header.stamp = ros::Time::now(); - marker.ns = "visualeyez_" + it->first; - marker.id = i; - marker.type = visualization_msgs::Marker::SPHERE; - marker.action = visualization_msgs::Marker::ADD; - marker.pose.position.x = ptsA(0, i); - marker.pose.position.y = ptsA(1, i); - marker.pose.position.z = ptsA(2, i); - marker.pose.orientation.x = 0.0; - marker.pose.orientation.y = 0.0; - marker.pose.orientation.z = 0.0; - marker.pose.orientation.w = 1.0; - marker.scale.x = 0.1; - marker.scale.y = 0.1; - marker.scale.z = 0.1; - marker.color.r = 1.0f; - marker.color.g = 0.0f; - marker.color.b = 0.0f; - marker.color.a = 1.0; - marker.lifetime = ros::Duration(); - marker_array.markers.push_back(marker); - } - pub_markers_.publish(marker_array); - } - } - } - - //////////////////// - // Recording mode // - //////////////////// - if (state_ == RECORDING) { - // Create an observation of the target from the full set of measurements - VZMarkers observation; - for (VZMatches::iterator it = target_matches[name_].begin(); it != target_matches[name_].end(); it++) - observation[*it] = transform_ * measurement[*it]; - // Only add the measurement if there are three or more correspondences - if (observation.size() < 3) - return; - measurements_.push_back(observation); - for (VZMarkers::iterator it = observation.begin(); it != observation.end(); it++) - count_[it->first]++; - - // Keep track of how many valid measurements were received so that the - // callee knows when to start calibration - ff_msgs::VisualeyezFeedbackArray msg; - msg.header.stamp = ros::Time::now(); - msg.header.frame_id = "visualeyez"; - for (VZMarkers::iterator it = targets_[name_].begin(); it != targets_[name_].end(); it++) { - ff_msgs::VisualeyezFeedback marker; - marker.tcmid = it->first.first; - marker.ledid = it->first.second; - if (count_.find(it->first) != count_.end()) - marker.count = count_[it->first]; - msg.feedback.push_back(marker); - } - pub_feedback_.publish(msg); - - // Publish markers to RVIZ - visualization_msgs::MarkerArray marker_array; - size_t counter = 0; - for (VZMarkers::iterator it = targets_[name_].begin(); it != targets_[name_].end(); it++) { - visualization_msgs::Marker marker; - marker.header.frame_id = "world"; - marker.header.stamp = ros::Time::now(); - marker.ns = "visualeyez_" + name_; - marker.id = counter++; - marker.type = visualization_msgs::Marker::SPHERE; - if (observation.find(it->first) == observation.end()) { - marker.action = visualization_msgs::Marker::DELETE; - } else { - marker.action = visualization_msgs::Marker::ADD; - marker.pose.position.x = observation[it->first](0); - marker.pose.position.y = observation[it->first](1); - marker.pose.position.z = observation[it->first](2); - } - marker.pose.orientation.x = 0.0; - marker.pose.orientation.y = 0.0; - marker.pose.orientation.z = 0.0; - marker.pose.orientation.w = 1.0; - marker.scale.x = 0.1; - marker.scale.y = 0.1; - marker.scale.z = 0.1; - marker.color.r = 0.0f; - marker.color.g = 0.0f; - marker.color.b = 1.0f; - marker.color.a = 1.0; - marker.lifetime = ros::Duration(); - marker_array.markers.push_back(marker); - } - pub_markers_.publish(marker_array); - } - } - - // Calibrate callback simply puts the system back in calibration mode - bool ConfigureCallback(ff_msgs::VisualeyezConfig::Request &req, ff_msgs::VisualeyezConfig::Response &res) { - // Special case: avoid configuring before gorounding - if (state_ == GROUNDING) { - ROS_WARN_STREAM("Configuration cannot be performed until grounding has completed"); - return false; - } - // Toggle the TF2 publisher if needed - pub_tf_ = req.pub_tf; - // Grounding has completed, so we can now support actions - switch (req.action) { - default: - return false; - // Start recording data - case ff_msgs::VisualeyezConfig::Request::TRACK: - ROS_WARN_STREAM("Return to target tracking mode"); - state_ = TRACKING; - return true; - // Start recording data - case ff_msgs::VisualeyezConfig::Request::RECORD: - if (targets_.find(req.name) == targets_.end()) { - ROS_WARN_STREAM("Cannot find target " << req.name); - return false; - } - ROS_INFO_STREAM("Recording data for target " << req.name); - name_ = req.name; - count_.clear(); - markers_.clear(); - measurements_.clear(); - state_ = RECORDING; - return true; - // Load calibration data - case ff_msgs::VisualeyezConfig::Request::LOAD: - if (!VisualeyezUtils::ReadConfig(req.name, targets_)) { - ROS_INFO_STREAM("Calibration file " << req.name << " could not be read"); - return false; - } - ROS_INFO_STREAM("Reading calibration from file " << req.name); - return true; - // Save calibration data - case ff_msgs::VisualeyezConfig::Request::SAVE: - if (!VisualeyezUtils::WriteConfig(req.name, targets_)) - return false; - ROS_INFO_STREAM("Writing calibration to " << req.name); - return true; - // Perform calibration - case ff_msgs::VisualeyezConfig::Request::CALIBRATE: - ROS_INFO_STREAM("Calibrating target " << name_ << " using recorded data"); - state_ = CALIBRATING; - break; - } - - // We will be solving for the transform (to the world rotation origin), relative marker positions - // and the rotation from the reference positions to the observation frame - ceres::Problem problem; - double *transform = new double[3]; - double *rotations = new double[3*measurements_.size()]; - size_t counter = 0; - - // Bootstrap the transform center - counter = 0; - Eigen::Vector3d centroid; - for (std::vector::iterator it = measurements_.begin(); it != measurements_.end(); it++) { - for (VZMarkers::iterator jt = it->begin(); jt != it->end(); jt++) { - centroid += jt->second; - counter++; - } - } - centroid /= counter; - transform[0] = centroid(0); - transform[1] = centroid(1); - transform[2] = centroid(2); - - // Create a ceres problem to solve for the relative marker coordinates - Eigen::Affine3d R = Eigen::Affine3d::Identity(); - for (size_t i = 0; i < measurements_.size(); i++) { - ObservationVec observations; - std::vector < double* > parameters; - parameters.push_back(transform); - parameters.push_back(&rotations[3*i]); - for (VZMarkers::iterator jt = measurements_[i].begin(); jt != measurements_[i].end(); jt++) { - parameters.push_back(markers_[jt->first].pos); - observations.push_back(Marker(jt->second)); - } - problem.AddResidualBlock(ProjectionError::Create(observations), new ceres::HuberLoss(1.0), parameters); - if (i > 0) { - // Get the correspondences with the last frame - std::vector correspondences; - for (VZMarkers::iterator jt = measurements_[i-1].begin(); jt != measurements_[i-1].end(); jt++) - if (measurements_[i].find(jt->first) != measurements_[i].end()) - correspondences.push_back(jt->first); - if (correspondences.size() > 3) { - Eigen::Matrix < double, 3, Eigen::Dynamic > prev(3, correspondences.size()); - Eigen::Matrix < double, 3, Eigen::Dynamic > curr(3, correspondences.size()); - for (size_t j = 0; j < correspondences.size(); j++) { - prev.block<3, 1>(0, j) = measurements_[i-1][correspondences[j]]; - curr.block<3, 1>(0, j) = measurements_[i][correspondences[j]]; - } - // Perform a KABSCH transform on the two matrices - Eigen::Affine3d A; - if (VisualeyezUtils::Kabsch < double > (prev, curr, A, false)) - R = R.rotate(A.linear()); - } - } - Eigen::AngleAxisd aa(R.linear()); - rotations[3*i+0] = aa.angle() * aa.axis()(0); - rotations[3*i+1] = aa.angle() * aa.axis()(1); - rotations[3*i+2] = aa.angle() * aa.axis()(2); - } - - // We fix the first rotation to (0,0,0) - problem.SetParameterBlockConstant(rotations); - - // Solve the problem - ceres::Solver::Options options; - options.linear_solver_type = ceres::DENSE_SCHUR; - options.minimizer_progress_to_stdout = true; - options.max_num_iterations = 100; - // options.function_tolerance = 1e-9; - ceres::Solver::Summary summary; - ceres::Solve(options, &problem, &summary); - std::cout << summary.FullReport() << "\n"; - if (!summary.IsSolutionUsable()) { - ROS_WARN("The solver completed but no solution was found"); - } else { - ROS_WARN("The solver found a solution"); - for (VZMarkers::iterator it = targets_[name_].begin(); it != targets_[name_].end(); it++) { - ROS_INFO_STREAM("[" << (int)it->first.first << ":" << (int)it->first.second << "] " - << markers_[it->first].pos[0] - it->second(0) << ", " - << markers_[it->first].pos[1] - it->second(1) << ", " - << markers_[it->first].pos[2] - it->second(2)); - it->second = Eigen::Vector3d( - markers_[it->first].pos[0], // POS_X - markers_[it->first].pos[1], // POS_Y - markers_[it->first].pos[2]); // POS_Z - } - } - - // Print out the final centroid guess - ROS_INFO_STREAM("Centroid initial guess: " << centroid); - centroid(0) = transform[0]; - centroid(1) = transform[1]; - centroid(2) = transform[2]; - ROS_INFO_STREAM("Centroid final guess: " << centroid); - - // Clear up memory - delete[] rotations; - delete[] transform; - - // return to tracking mode - state_ = TRACKING; - - // Make for great success - return true; - } - - private: - // Publish a TF2 message - bool pub_tf_; // Publish a TF2 message? - // ROS stuff - ros::Subscriber sub_; // Data callback - ros::ServiceServer srv_; // Trigger service - ros::Publisher pub_grounding_; // Grounding - ros::Publisher pub_markers_; // Markers - ros::Publisher pub_feedback_; // Counter feedback - std::map pub_truth_; // Truth publisher (1 per target) - // System state - VisualeyezServerState state_; // What state are we in - std::string name_; // Name of the target currently being tracked - // Targets and grounding - VZTargetMarkers targets_; // The targets we wish to track - VZMarkers grounding_; // Grounding points - std::vector < VZMarkers > measurements_; // Measurements - // Calibration-specific variables - MarkerMap markers_; // A list of markers - std::map count_; // Keep track of the valid marker count - // Projection from VISUALEYEZ -> WORLD frame - Eigen::Affine3d transform_; // Transform: VISUALEYEZ -> WORLD -}; - -// Register the nodelet witht he system -PLUGINLIB_EXPORT_CLASS(visualeyez::VisualeyezServer, nodelet::Nodelet); - -} // namespace visualeyez - -#endif // VISUALEYEZ_VISUALEYEZ_SERVER_H_ diff --git a/tools/visualeyez/test/test_visualeyez_stream.cc b/tools/visualeyez/test/test_visualeyez_stream.cc deleted file mode 100644 index babef136cc..0000000000 --- a/tools/visualeyez/test/test_visualeyez_stream.cc +++ /dev/null @@ -1,104 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -// Test Visualeyez stream: -// Sets up a fake client to send well-formed visualeyez messages at 1Hz -// using TimerCallback, if they start being received using the VisualeyezCallback -// within a 10 second timer the test passes, if 10 seconds passes and there is -// no response, then TimeoutCallback is called and the test fails. - -// Required for the test framework -#include - -// Required for the test cases -#include - -// Required messages -#include -#include - -// Visualeyez interface -#include - -// The client generates synthetic UDP data -visualeyez::VisualeyezClient vz_client_; - -// The synthetic data that will be sent -ff_msgs::VisualeyezDataArray msg_; - -// Timer callback -void TimerCallback(const ros::TimerEvent& event) { - vz_client_.Send(msg_); -} - -// Timeout callback -void TimeoutCallback(const ros::TimerEvent& event) { - EXPECT_FALSE(true); - ros::shutdown(); -} - -// Simple callback -void VisualeyezCallback(const ff_msgs::VisualeyezDataArray::ConstPtr& msg) { - // Perform tests on the received data - EXPECT_FALSE(msg == nullptr); - EXPECT_EQ(msg->header.stamp.toSec(), msg_.header.stamp.toSec()); - EXPECT_EQ(msg->measurements.size(), msg_.measurements.size()); - for (size_t i = 0; i < msg_.measurements.size(); i++) { - EXPECT_EQ(msg->measurements[i].tcmid, msg_.measurements[i].tcmid); - EXPECT_EQ(msg->measurements[i].ledid, msg_.measurements[i].ledid); - EXPECT_EQ(msg->measurements[i].position.x, msg_.measurements[i].position.x); - EXPECT_EQ(msg->measurements[i].position.y, msg_.measurements[i].position.y); - EXPECT_EQ(msg->measurements[i].position.z, msg_.measurements[i].position.z); - } - // Shutdown ROS, for now, test passed :) - ros::shutdown(); -} - -// Stream test - set up a fake client to send well-formed visualeyez messages at 1Hz, -// and ensure that they start being received within ten seconds. -TEST(test_visualeyez_stream, Stream) { - // Get a node handle - ros::NodeHandle nh("~"); - // Add some data to test - msg_.header.stamp = ros::Time::now(); - ff_msgs::VisualeyezData data; - data.tcmid = 1.0; - data.ledid = 2.0; - data.position.x = 3.0; - data.position.y = 4.0; - data.position.z = 5.0; - msg_.measurements.push_back(data); - // Subscribe to the visualeyez topic - ros::Subscriber sub = nh.subscribe(TOPIC_VISUALEYEZ_DATA, 1, VisualeyezCallback); - // Create a periodic timer to send data - ros::Timer timer = nh.createTimer(ros::Duration(1.0), TimerCallback, false, true); - // Create a periodic timer to send data - ros::Timer timout = nh.createTimer(ros::Duration(10), TimeoutCallback, false, true); - // Wait until shutdown - ros::spin(); -} - -// Run all the tests that were declared with TEST() -int main(int argc, char **argv) { - // Initialize the gtesttest framework - testing::InitGoogleTest(&argc, argv); - // Initialize ROS - ros::init(argc, argv, "test_visualeyez_stream", ros::init_options::AnonymousName); - // Run all test procedures - return RUN_ALL_TESTS(); -} diff --git a/tools/visualeyez/test/test_visualeyez_stream.test b/tools/visualeyez/test/test_visualeyez_stream.test deleted file mode 100644 index a9118072e3..0000000000 --- a/tools/visualeyez/test/test_visualeyez_stream.test +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/tools/visualeyez/test/test_visualeyez_util.cc b/tools/visualeyez/test/test_visualeyez_util.cc deleted file mode 100644 index f6b1a8357e..0000000000 --- a/tools/visualeyez/test/test_visualeyez_util.cc +++ /dev/null @@ -1,132 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -// Test Visualeyez util: -// Writes a calibration file, then opens and reads it. Check that the written -// content matches the original content. - -// Required for the test framework -#include - -// Required for the test cases -#include - -// Visualeyez interface -#include - -// Check that targets_o[name] matches targets_i[name] for some given name -void Evaluate(visualeyez::VZTargetMarkers & targets_i, - visualeyez::VZTargetMarkers & targets_o, std::string const& name) { - EXPECT_FALSE(targets_i.find(name) == targets_i.end()); - EXPECT_FALSE(targets_o.find(name) == targets_o.end()); - for (visualeyez::VZMarkers::iterator it = targets_i[name].begin(); - it != targets_i[name].end(); it++) { - EXPECT_EQ(targets_i[name][it->first](0), targets_o[name][it->first](0)); - EXPECT_EQ(targets_i[name][it->first](1), targets_o[name][it->first](1)); - EXPECT_EQ(targets_i[name][it->first](2), targets_o[name][it->first](2)); - } -} - -// Write an calibration file, then open and read it. Check that the written content -// matches the original content. -TEST(test_visualeyez_util, IO) { - // Prepare some input data - visualeyez::VZTargetMarkers targets_i; - targets_i["alpha"][visualeyez::VZIndex(1, 1)] = Eigen::Vector3d(1.0, 2.0, 3.0); - targets_i["alpha"][visualeyez::VZIndex(1, 2)] = Eigen::Vector3d(4.0, 5.0, 6.0); - targets_i["alpha"][visualeyez::VZIndex(1, 3)] = Eigen::Vector3d(7.0, 8.0, 9.0); - targets_i["bravo"][visualeyez::VZIndex(2, 1)] = Eigen::Vector3d(9.0, 8.0, 7.0); - targets_i["bravo"][visualeyez::VZIndex(2, 2)] = Eigen::Vector3d(6.0, 5.0, 4.0); - targets_i["bravo"][visualeyez::VZIndex(2, 3)] = Eigen::Vector3d(3.0, 2.0, 1.0); - // Write to the binary file - EXPECT_TRUE(visualeyez::VisualeyezUtils::WriteConfig("/tmp/test.bin", targets_i)); - // If we have an empty input, then we should get nothing back - visualeyez::VZTargetMarkers targets_o; - EXPECT_TRUE(visualeyez::VisualeyezUtils::ReadConfig("/tmp/test.bin", targets_o)); - EXPECT_EQ(targets_o.size(), 0); - // If we just add one, then we should just get one back - targets_o["alpha"] = visualeyez::VZMarkers(); - EXPECT_TRUE(visualeyez::VisualeyezUtils::ReadConfig("/tmp/test.bin", targets_o)); - EXPECT_EQ(targets_o.size(), 1); - Evaluate(targets_i, targets_o, "alpha"); - // If we add another, then we should just get two back (the original should be copied over) - targets_o["bravo"] = visualeyez::VZMarkers(); - EXPECT_TRUE(visualeyez::VisualeyezUtils::ReadConfig("/tmp/test.bin", targets_o)); - EXPECT_EQ(targets_o.size(), 2); - Evaluate(targets_i, targets_o, "alpha"); - Evaluate(targets_i, targets_o, "bravo"); - // Now if we add something that doesn't exist we should get a failure - targets_o["charlie"] = visualeyez::VZMarkers(); - EXPECT_FALSE(visualeyez::VisualeyezUtils::ReadConfig("/tmp/test.bin", targets_o)); - // Fail correctly - ros::shutdown(); -} - -// Kabsch algorithm with scaling -TEST(test_visualeyez_util, KabschWithScale) { - Eigen::Matrix3Xd in(3, 100), out(3, 100); - Eigen::Quaternion Q(1, 3, 5, 2); - Q.normalize(); - Eigen::Matrix3d R = Q.toRotationMatrix(); - double scale = 2.0; - for (int row = 0; row < in.rows(); row++) { - for (int col = 0; col < in.cols(); col++) { - in(row, col) = log(2*row + 10.0)/sqrt(1.0*col + 4.0) + sqrt(col*1.0)/(row + 1.0); - } - } - Eigen::Vector3d S; - S << -5, 6, -27; - for (int col = 0; col < in.cols(); col++) - out.col(col) = scale*R*in.col(col) + S; - Eigen::Affine3d A; - EXPECT_TRUE(visualeyez::VisualeyezUtils::Kabsch (in, out, A, true)); - EXPECT_LT((scale*R-A.linear()).cwiseAbs().maxCoeff(), 1e-13); - EXPECT_LT((S-A.translation()).cwiseAbs().maxCoeff(), 1e-13); -} - -// Kabsch algorithm without scaling -TEST(test_visualeyez_util, KabschWithoutScale) { - Eigen::Matrix3Xd in(3, 100), out(3, 100); - Eigen::Quaternion Q(1, 3, 5, 2); - Q.normalize(); - Eigen::Matrix3d R = Q.toRotationMatrix(); - for (int row = 0; row < in.rows(); row++) { - for (int col = 0; col < in.cols(); col++) { - in(row, col) = log(2*row + 10.0)/sqrt(1.0*col + 4.0) + sqrt(col*1.0)/(row + 1.0); - } - } - Eigen::Vector3d S; - S << -5, 6, -27; - for (int col = 0; col < in.cols(); col++) - out.col(col) = R*in.col(col) + S; - Eigen::Affine3d A; - EXPECT_TRUE(visualeyez::VisualeyezUtils::Kabsch < double > (in, out, A, false)); - EXPECT_LT((R-A.linear()).cwiseAbs().maxCoeff(), 1e-13); - EXPECT_LT((S-A.translation()).cwiseAbs().maxCoeff(), 1e-13); -} - -// Run all the tests that were declared with TEST() -int main(int argc, char **argv) { - // Initialize ROS and create a single node handle in order to use ros::Time::now() - ros::init(argc, argv, "test_visualeyez_util", ros::init_options::AnonymousName); - ros::NodeHandle nh("~"); - // Initialize the gtest framework - testing::InitGoogleTest(&argc, argv); - // Run all test procedures - return RUN_ALL_TESTS(); -} diff --git a/tools/visualeyez/test/test_visualeyez_util.test b/tools/visualeyez/test/test_visualeyez_util.test deleted file mode 100644 index 10c461f3d6..0000000000 --- a/tools/visualeyez/test/test_visualeyez_util.test +++ /dev/null @@ -1,35 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/tools/visualeyez/tools/visualeyez_tool.cc b/tools/visualeyez/tools/visualeyez_tool.cc deleted file mode 100644 index 2aa29b5f24..0000000000 --- a/tools/visualeyez/tools/visualeyez_tool.cc +++ /dev/null @@ -1,115 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -// Visualeyez header -#include - -// Config and feedback messages -#include -#include - -// Default argument values -std::string name_ = ""; -std::string file_ = "calibration.bin"; -uint32_t samples_ = 3; -ros::ServiceClient srv_; -ros::Subscriber sub_; - -// Configure the calibration engine -bool Configure(uint8_t action, std::string const& name = "") { - ff_msgs::VisualeyezConfig msg; - msg.request.action = action; - msg.request.name = name; - msg.request.pub_tf = true; - return srv_.call(msg); -} - -// Called when new feedback data arrives -void FeedbackCallback(const ff_msgs::VisualeyezFeedbackArray::ConstPtr& msg) { - std::vector::const_iterator it; - for (it = msg->feedback.begin(); it != msg->feedback.end(); it++) - std::cout << std::setfill('0') << std::setw(4) << it->count << ' '; - std::cout << '\r' << std::flush; - for (it = msg->feedback.begin(); it != msg->feedback.end(); it++) - if (it->count < samples_) return; - // Calibrate and save - std::cout << "Calibrating..." << std::endl; - Configure(ff_msgs::VisualeyezConfig::Request::CALIBRATE, name_); - std::cout << "Saving..." << std::endl; - Configure(ff_msgs::VisualeyezConfig::Request::SAVE, file_); - std::cout << "Done!" << std::endl; - // Clean up - srv_ = ros::ServiceClient(); - sub_ = ros::Subscriber(); - // Shutdown - ros::shutdown(); -} - -// Main entry point for application -int main(int argc, char *argv[]) { - // Command line parsing - char c; - while ((c = getopt(argc, argv, "hn:s:w:")) != -1) { - switch (c) { - case 'n': { - name_ = std::string(optarg); - break; - } - case 's': { - int nsamples = atoi(optarg); - if (nsamples <= 0) { - std::cout << "Number of samples must be greater than zero" << std::endl; - return -2; - } - samples_ = static_cast(nsamples); - break; - } - case 'w': { - file_ = std::string(optarg); - break; - } - case 'h': - default: { - std::cout << std::endl; - std::cout << "Usage: visualeyez_tool [OPTIONS]..." << std::endl; - std::cout << "Calibrate the visualeyez system" << std::endl; - std::cout << " -h Show usage and help" << std::endl; - std::cout << " -n name Freeflyer name ('" << name_ <<"')" << std::endl; - std::cout << " -s number Minimum number of samples per marker (" << samples_ << ")" << std::endl; - std::cout << " -w file Write to calibration file (" << file_ << ")" << std::endl; - std::cout << std::endl; - return -1; - } - } - } - // Initialize ros and get a callback - ros::init(argc, argv, "visualeyez_tool"); - ros::NodeHandle nh; - // Create a service client - srv_ = nh.serviceClient < ff_msgs::VisualeyezConfig > ( - SERVICE_VISUALEYEZ_CALIBRATE_CONFIG); - // Subscribe to the visualeyez topic - sub_ = nh.subscribe(TOPIC_VISUALEYEZ_CALIBRATE_FEEDBACK, 1, FeedbackCallback); - // Start the calibration - std::cout << "Recording:" << std::endl; - Configure(ff_msgs::VisualeyezConfig::Request::RECORD, name_); - // Keep blocking until - ros::spin(); - // Success - return 0; -} diff --git a/wdock/CMakeLists.txt b/wdock/CMakeLists.txt deleted file mode 100644 index 4e12e8cb9f..0000000000 --- a/wdock/CMakeLists.txt +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (c) 2017, United States Government, as represented by the -# Administrator of the National Aeronautics and Space Administration. -# -# All rights reserved. -# -# The Astrobee platform is licensed under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -project(wdock) - -set(DEPS - rapidExtAstrobee -) - -set(LIBS - Qt4::QtXml - rapidIo - rapidExtAstrobee - ff_common) - -set(INCLUDES - ${CMAKE_CURRENT_SOURCE_DIR}/include - ${SORACORE_INCLUDE_DIRS} -) - -create_library(TARGET wdock - LIBS ${LIBS} - INC ${INCLUDES} - DEPS ${DEPS} -) - -create_tool_targets(DIR tools - LIBS ${LIBS} wdock - INC ${INCLUDES} - DEPS ${DEPS} -) - -# Determine our module name -get_filename_component(MODULE_NAME ${CMAKE_CURRENT_SOURCE_DIR} NAME) - -install(CODE "execute_process( - COMMAND mkdir -p share/${MODULE_NAME} - COMMAND ln -s ../../bin/dds_ros_bridge share/${MODULE_NAME} - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} - OUTPUT_QUIET - ERROR_QUIET - )") diff --git a/wdock/include/wdock/wdock.h b/wdock/include/wdock/wdock.h deleted file mode 100644 index a9db98318f..0000000000 --- a/wdock/include/wdock/wdock.h +++ /dev/null @@ -1,109 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - - -#ifndef WDOCK_WDOCK_H_ -#define WDOCK_WDOCK_H_ - -#include -#include - -#include -#include -#include - -#include "AstrobeeConstants.h" - -#include "ff_common/init.h" - -#include "DockStateSupport.h" - -#include "knDds/DdsEntitiesFactory.h" -#include "knDds/DdsEntitiesFactorySvc.h" -#include "knDds/DdsEventLoop.h" -#include "knDds/DdsSupport.h" -#include "knDds/DdsTypedSupplier.h" - -#include "miro/Configuration.h" -#include "miro/Robot.h" -#include "miro/Log.h" - -#include "rapidDds/RapidConstants.h" -#include "rapidDds/AckSupport.h" -#include "rapidDds/CommandSupport.h" - -#include "rapidUtil/RapidHelper.h" - -namespace kn { - class DdsEntitiesFactorySvc; -} // end namespace kn - -namespace w_dock { -class WDock { - public: - WDock(int argc, char** argv, std::string const& entity_name); - ~WDock(); - - // call back for ddsEventLoop - void operator() (rapid::Command const* cmd); - - int64_t MakeTimestamp(); - - void ProcessDdsEventLoop(); - - void PublishCmdAck(std::string const& cmd_id, - rapid::AckStatus status = rapid::ACK_COMPLETED, - rapid::AckCompletedStatus completed_status = - rapid::ACK_COMPLETED_OK, - std::string const& message = ""); - - void PublishDockState(); - - // TODO(Katie) Remove the following functions once real code is in - void SetBerthOne(); - void SetBerthTwo(); - - private: - kn::DdsEventLoop dds_event_loop_; - - using AckStateSupplier = kn::DdsTypedSupplier; - using AckStateSupplierPtr = std::unique_ptr; - - AckStateSupplierPtr ack_state_supplier_; - - using DockStateSupplier = - kn::DdsTypedSupplier; - using DockStateSupplierPtr = std::unique_ptr; - - DockStateSupplierPtr dock_state_supplier_; - - using CommandEchoSupplier = - kn::DdsTypedSupplier; - using CommandEchoSupplierPtr = std::unique_ptr; - - CommandEchoSupplierPtr command_echo_supplier_; - - std::shared_ptr dds_entities_factory_; - - std::string ack_pub_suffix_, dock_state_pub_suffix_, - sub_suffix_, echo_suffix_; -}; - -} // end namespace w_dock - -#endif // WDOCK_WDOCK_H_ diff --git a/wdock/src/wdock.cc b/wdock/src/wdock.cc deleted file mode 100644 index fcf8d3ab5f..0000000000 --- a/wdock/src/wdock.cc +++ /dev/null @@ -1,277 +0,0 @@ -/* Copyright (c) 2017, United States Government, as represented by the - * Administrator of the National Aeronautics and Space Administration. - * - * All rights reserved. - * - * The Astrobee platform is licensed under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - - -#include "wdock/wdock.h" - -namespace { - -typedef std::vector NodeVector; - -void SubstituteROBOT_NAME(kn::DdsEntitiesFactorySvcParameters * params) { - { - NodeVector::iterator first, last = params->publishers.end(); - for (first = params->publishers.begin(); first != last; ++first) { - if (first->name == "") { - first->name = Miro::RobotParameters::instance()->name.c_str(); - } - if (first->partition == "") { - first->partition = Miro::RobotParameters::instance()->name.c_str(); - } - } - } - - { - NodeVector::iterator first, last = params->subscribers.end(); - for (first = params->subscribers.begin(); first != last; ++first) { - if (first->name == "") { - first->name = Miro::RobotParameters::instance()->name.c_str(); - } - if (first->partition == "") { - first->partition = Miro::RobotParameters::instance()->name.c_str(); - } - } - } -} - -} // namespace - -namespace w_dock { - -WDock::WDock(int argc, char** argv, std::string const& entity_name) : - dds_event_loop_(entity_name), - ack_pub_suffix_(""), - dock_state_pub_suffix_(""), - sub_suffix_(""), - echo_suffix_("-echo") { - int fakeArgc = 1; - - // Make path to QOS and NDDS files - std::string config_path = ff_common::GetConfigDir(); - config_path += "/communications/dds/"; - - /* fake miro log into thinking we have no arguments */ - Miro::Log::init(fakeArgc, argv); - Miro::Log::level(9); - - /* fake miro configuration into thinking we have no arguments */ - Miro::Configuration::init(fakeArgc, argv); - - Miro::RobotParameters *robotParams = Miro::RobotParameters::instance(); - kn::DdsEntitiesFactorySvcParameters *ddsParams = - kn::DdsEntitiesFactorySvcParameters::instance(); - - /* get the defaults for *all the things!* */ - Miro::ConfigDocument *config = Miro::Configuration::document(); - config->setSection("Robot"); - config->getParameters("Miro::RobotParameters", *robotParams); - config->getParameters("Miro::DdsEntitiesFactorySvcParameters", *ddsParams); - - robotParams->name = "SmartDock"; - robotParams->namingContextName = robotParams->name; - - SubstituteROBOT_NAME(ddsParams); - - // Clear config files so that dds only looks for the files we add - ddsParams->participants[0].discoveryPeersFiles.clear(); - ddsParams->configFiles.clear(); - - ddsParams->participants[0].participantName = argv[0]; - ddsParams->participants[0].domainId = 37; - ddsParams->participants[0].discoveryPeersFiles.push_back( - (config_path + "NDDS_DISCOVERY_PEERS")); - ddsParams->configFiles.push_back((config_path + "RAPID_QOS_PROFILES.xml")); - - dds_entities_factory_.reset(new kn::DdsEntitiesFactorySvc()); - dds_entities_factory_->init(ddsParams); - - // Set up the dock state publisher - dock_state_supplier_.reset(new w_dock::WDock::DockStateSupplier( - rapid::ext::astrobee::DOCK_STATE_TOPIC + dock_state_pub_suffix_, // topic - "", // name - "AstrobeeDockStateProfile", // profile - "")); // library - - // Initialize rapid message - rapid::RapidHelper::initHeader(dock_state_supplier_->event().hdr); - - dock_state_supplier_->event().berthOne.occupied = false; - dock_state_supplier_->event().berthOne.astrobeeName = ""; - dock_state_supplier_->event().berthOne.awake = false; - dock_state_supplier_->event().berthOne.numBatteries = 0; - dock_state_supplier_->event().berthOne.maxCapacity = 0; - dock_state_supplier_->event().berthOne.currentCapacity = 0; - - dock_state_supplier_->event().berthTwo.occupied = false; - dock_state_supplier_->event().berthTwo.astrobeeName = ""; - dock_state_supplier_->event().berthTwo.awake = false; - dock_state_supplier_->event().berthTwo.numBatteries = 0; - dock_state_supplier_->event().berthTwo.maxCapacity = 0; - dock_state_supplier_->event().berthTwo.currentCapacity = 0; - - PublishDockState(); - - // Set up the ack publisher - ack_state_supplier_.reset(new w_dock::WDock::AckStateSupplier( - rapid::ACK_TOPIC + ack_pub_suffix_, // topic - "", // name - "RapidAckProfile", // profile - "")); // library - - rapid::RapidHelper::initHeader(ack_state_supplier_->event().hdr); - - // Set up the command-echo publisher - command_echo_supplier_.reset(new w_dock::WDock::CommandEchoSupplier( - rapid::COMMAND_TOPIC + echo_suffix_, // topic - "", // name - "RapidCommandProfile", // profile - "")); // library - - rapid::RapidHelper::initHeader(command_echo_supplier_->event().hdr); - - // Set up the command subscriber - try { - dds_event_loop_.connect(this, - rapid::COMMAND_TOPIC + - sub_suffix_, // topic - "", // name - "RapidCommandProfile", // profile - ""); // library - } catch (std::exception& e) { - std::cout << "Rapid command exception: " << e.what() << std::endl; - throw; - } catch (...) { - std::cout << "Rapid command exeception unknown." << std::endl; - throw; - } -} - -WDock::~WDock() { -} - -void WDock::operator() (rapid::Command const* rapid_cmd) { - std::cout << "Received command from GDS." << std::endl; - - command_echo_supplier_->event() = *rapid_cmd; - command_echo_supplier_->sendEvent(); - - if (strcmp(rapid_cmd->cmdName, "wake") == 0) { - std::cout << "Received command wake. Berth: " << rapid_cmd->arguments[0]._u.i << std::endl; - // TODO(Someone) Add code to send wake command to astrobee over i2c - if (rapid_cmd->arguments.length() == 1 && - rapid_cmd->arguments[0]._d == rapid::RAPID_INT) { - if (rapid_cmd->arguments[0]._u.i == 1) { - std::cout << "Waking berth one!" << std::endl; - dock_state_supplier_->event().berthOne.occupied = true; - dock_state_supplier_->event().berthOne.astrobeeName = "Honey"; - dock_state_supplier_->event().berthOne.awake = true; - dock_state_supplier_->event().berthOne.numBatteries = 0; - dock_state_supplier_->event().berthOne.maxCapacity = 0; - dock_state_supplier_->event().berthOne.currentCapacity = 0; - } else if (rapid_cmd->arguments[0]._u.i == 2) { - std::cout << "Waking berth two!" << std::endl; - dock_state_supplier_->event().berthTwo.occupied = true; - dock_state_supplier_->event().berthTwo.astrobeeName = "Bumble"; - dock_state_supplier_->event().berthTwo.awake = true; - dock_state_supplier_->event().berthTwo.numBatteries = 0; - dock_state_supplier_->event().berthTwo.maxCapacity = 0; - dock_state_supplier_->event().berthTwo.currentCapacity = 0; - } else { - std::cout << "Berth name not recognized!" << std::endl; - PublishCmdAck(rapid_cmd->cmdId, rapid::ACK_COMPLETED, - rapid::ACK_COMPLETED_BAD_SYNTAX, - "Berth number not recognized! Needs to be 1 or 2!"); - return; - } - PublishDockState(); - PublishCmdAck(rapid_cmd->cmdId); - } else { - std::cout << "Wake command has invalid syntax." << std::endl; - PublishCmdAck(rapid_cmd->cmdId, rapid::ACK_COMPLETED, - rapid::ACK_COMPLETED_BAD_SYNTAX, - "Wake command has invalid syntax!"); - } - } else { - // TODO(Katie) put cmd name in msg - std::string msg = rapid_cmd->cmdName; - msg += " command not recognized by the dock."; - std::cout << msg << std::endl; - PublishCmdAck(rapid_cmd->cmdId, rapid::ACK_COMPLETED, - rapid::ACK_COMPLETED_BAD_SYNTAX, msg); - } -} - -int64_t WDock::MakeTimestamp() { - int64_t microseconds; - struct timeval tv; - - gettimeofday(&tv, NULL); - - microseconds = (tv.tv_sec * 1000000) + tv.tv_usec; - - return microseconds; -} - -void WDock::ProcessDdsEventLoop() { - // process events as fast as possible since we are controlling the wait else - // where - dds_event_loop_.processEvents(kn::milliseconds(0)); -} - -void WDock::PublishCmdAck(std::string const& cmd_id, rapid::AckStatus status, - rapid::AckCompletedStatus completed_status, - std::string const& message) { - ack_state_supplier_->event().hdr.timeStamp = MakeTimestamp(); - std::strncpy(ack_state_supplier_->event().cmdId, cmd_id.data(), 64); - ack_state_supplier_->event().status = status; - ack_state_supplier_->event().completedStatus = completed_status; - std::strncpy(ack_state_supplier_->event().message, message.data(), 128); - std::cout << "Sending command ack!" << std::endl; - ack_state_supplier_->sendEvent(); -} - -void WDock::PublishDockState() { - dock_state_supplier_->event().hdr.timeStamp = MakeTimestamp(); - std::cout << "Sending dock state!" << std::endl; - dock_state_supplier_->sendEvent(); -} - -// TODO(Katie) Remove following functions once real code is in -void WDock::SetBerthOne() { - dock_state_supplier_->event().berthOne.occupied = true; - dock_state_supplier_->event().berthOne.astrobeeName = ""; - dock_state_supplier_->event().berthOne.awake = false; - dock_state_supplier_->event().berthOne.numBatteries = 4; - dock_state_supplier_->event().berthOne.maxCapacity = 8000; - dock_state_supplier_->event().berthOne.currentCapacity = 8000; - - PublishDockState(); -} - -void WDock::SetBerthTwo() { - dock_state_supplier_->event().berthTwo.occupied = true; - dock_state_supplier_->event().berthTwo.astrobeeName = "Bumble"; - dock_state_supplier_->event().berthTwo.awake = false; - dock_state_supplier_->event().berthTwo.numBatteries = 2; - dock_state_supplier_->event().berthTwo.maxCapacity = 4000; - dock_state_supplier_->event().berthTwo.currentCapacity = 4000; - - PublishDockState(); -} - -} // end namespace w_dock