diff --git a/.gitattributes b/.gitattributes index 2906be97..84b10fd6 100644 --- a/.gitattributes +++ b/.gitattributes @@ -23,6 +23,10 @@ **/resources/**/*.zstd filter=lfs diff=lfs merge=lfs -text **/resources/**/*.db3 filter=lfs diff=lfs merge=lfs -text **/resources/**/*.yaml filter=lfs diff=lfs merge=lfs -text +**/resources/**/*.bag filter=lfs diff=lfs merge=lfs -text + +# DNN Model files +*.onnx filter=lfs diff=lfs merge=lfs -text # trtexec trtexec_x86_64 filter=lfs diff=lfs merge=lfs -text diff --git a/README.md b/README.md index b07b8641..329c7a7e 100644 --- a/README.md +++ b/README.md @@ -31,4 +31,4 @@ Please visit the [Isaac ROS Documentation](https://nvidia-isaac-ros.github.io/re ## Latest -Update 2024-09-26: Updated for Isaac ROS 3.1 +Update 2024-12-10: Refactored Dockerfiles diff --git a/docker/Dockerfile.aarch64 b/docker/Dockerfile.aarch64 deleted file mode 100644 index c02c3f6a..00000000 --- a/docker/Dockerfile.aarch64 +++ /dev/null @@ -1,246 +0,0 @@ -# Copyright (c) 2021-2024, NVIDIA CORPORATION. All rights reserved. -# -# NVIDIA CORPORATION and its licensors retain all intellectual property -# and proprietary rights in and to this software, related documentation -# and any modifications thereto. Any use, reproduction, disclosure or -# distribution of this software and related documentation without an express -# license agreement from NVIDIA CORPORATION is strictly prohibited. - -# Docker file for aarch64 based Jetson device -ARG BASE_IMAGE="nvcr.io/nvidia/l4t-cuda:12.2.12-devel" -FROM ${BASE_IMAGE} - -# Store list of packages (must be first) -RUN mkdir -p /opt/nvidia/isaac_ros_dev_base && dpkg-query -W | sort > /opt/nvidia/isaac_ros_dev_base/aarch64-start-packages.csv - -# Disable terminal interaction for apt -ENV DEBIAN_FRONTEND=noninteractive -ENV SHELL /bin/bash -SHELL ["/bin/bash", "-c"] - -# Ensure we have universe -RUN --mount=type=cache,target=/var/cache/apt \ -apt-get update && apt-get install -y \ - software-properties-common \ -&& add-apt-repository universe \ -&& apt-get update - -# Fundamentals -RUN --mount=type=cache,target=/var/cache/apt \ -apt-get update && apt-get install -y \ - apt-utils \ - bash-completion \ - build-essential \ - ca-certificates \ - curl \ - git \ - git-lfs \ - gnupg2 \ - iputils-ping \ - libgoogle-glog-dev \ - locales \ - lsb-release \ - software-properties-common \ - sudo \ - tar \ - unzip \ - vim \ - wget \ - zlib1g-dev - -# Add Isaac apt repository -RUN --mount=type=cache,target=/var/cache/apt \ - wget -qO - https://isaac.download.nvidia.com/isaac-ros/repos.key | apt-key add - && \ - grep -qxF "deb https://isaac.download.nvidia.com/isaac-ros/release-3 $(lsb_release -cs) release-3.0" /etc/apt/sources.list || \ - echo "deb https://isaac.download.nvidia.com/isaac-ros/release-3 $(lsb_release -cs) release-3.0" | tee -a /etc/apt/sources.list \ - && apt-get update - -# Setup Jetson debian repositories -RUN --mount=type=cache,target=/var/cache/apt \ - apt-key adv --fetch-keys https://repo.download.nvidia.com/jetson/jetson-ota-public.asc \ - && apt-key adv --fetch-keys http://l4t-repo.nvidia.com/jetson-ota-internal.key \ - && echo 'deb https://repo.download.nvidia.com/jetson/common r36.3 main' > /etc/apt/sources.list.d/nvidia-l4t-apt-source.list \ - && echo 'deb https://repo.download.nvidia.com/jetson/t234 r36.3 main' >> /etc/apt/sources.list.d/nvidia-l4t-apt-source.list \ - && apt-get update - -# Python basics -RUN --mount=type=cache,target=/var/cache/apt \ -apt-get update && apt-get install -y \ - python3-dev \ - python3-distutils \ - python3-flake8 \ - python3-pip \ - python3-pytest-cov \ - python3-venv \ - python3-zmq \ - python3.10 \ - python3.10-venv - -# Set Python3 as default -RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 1 - -# Core dev libraries -RUN --mount=type=cache,target=/var/cache/apt \ -apt-get update && apt-get install -y \ - ffmpeg \ - gfortran \ - graphicsmagick-libmagick-dev-compat \ - jq \ - kmod \ - lcov \ - libasio-dev \ - libassimp-dev \ - libatlas-base-dev \ - libblas3 \ - libatlas3-base \ - libboost-all-dev \ - libboost-dev \ - libceres-dev \ - libbullet-dev \ - libcunit1-dev \ - libffi7 \ - libfreetype6 \ - libgraphicsmagick++1-dev \ - libhidapi-libusb0 \ - libinput10 \ - libjpeg8 \ - liblapack3 \ - libmnl0 \ - libmnl-dev \ - libncurses5-dev \ - libode-dev \ - libopenblas0 \ - libopencv-dev=4.5.4+dfsg-9ubuntu4 \ - libopenmpi3 \ - libpcap-dev \ - libpcl-dev \ - libsuitesparse-dev \ - libtinyxml2-dev \ - libturbojpeg \ - linuxptp \ - libunwind8 \ - libv4l-0 \ - libx264-dev \ - libxaw7-dev \ - libyaml-cpp-dev \ - llvm-14 \ - nlohmann-json3-dev \ - python3-opencv=4.5.4+dfsg-9ubuntu4 \ - python3-scipy - -# Additional Python dependencies -RUN python3 -m pip install -U \ - Cython \ - pymongo \ - wheel \ - scikit-learn \ - ninja \ - networkx \ - "numpy>=1.24.4,<2" \ - numpy-quaternion \ - pyyaml \ - "setuptools_scm>=6.2" \ - trimesh \ - "yourdfpy>=0.0.53" \ - "warp-lang>=0.9.0" \ - "scipy>=1.7.0" \ - tqdm \ - importlib_resources - - -# Update environment -RUN update-alternatives --install /usr/bin/llvm-config llvm-config /usr/bin/llvm-config-14 14 -ENV LD_LIBRARY_PATH="/opt/nvidia/vpi3/lib64:${LD_LIBRARY_PATH}" -ENV LD_LIBRARY_PATH="/usr/lib/aarch64-linux-gnu/tegra:${LD_LIBRARY_PATH}" -ENV LD_LIBRARY_PATH="/usr/local/cuda-12.2/targets/aarch64-linux/lib:${LD_LIBRARY_PATH}" -ENV LD_LIBRARY_PATH="/usr/lib/aarch64-linux-gnu/tegra-egl:${LD_LIBRARY_PATH}" -ENV LD_LIBRARY_PATH="/usr/lib/aarch64-linux-gnu/tegra/weston:${LD_LIBRARY_PATH}" -ENV LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/usr/lib/aarch64-linux-gnu-host" -ENV PATH="/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/src/tensorrt/bin:${PATH}" - -# Install CUDA packages -RUN --mount=type=cache,target=/var/cache/apt \ -apt-get update && apt-get install -y --no-install-recommends \ - cuda-cudart-12-2 \ - cuda-libraries-12-2 \ - cuda-nvml-dev-12-2 \ - cuda-sanitizer-12-2 \ - cuda-toolkit-12-2 \ - libcublas-12-2 \ - libcudnn8 \ - libcusparse-12-2 \ - libnpp-12-2 - -# Install TensorRT and VPI -RUN --mount=type=cache,target=/var/cache/apt \ -mkdir -p /lib/firmware && \ -apt-get update && apt-get install -y \ - libnvvpi3 \ - tensorrt \ - vpi3-dev - -# Install Tao converter -RUN mkdir -p /opt/nvidia/tao && cd /opt/nvidia/tao && \ - wget --content-disposition 'https://api.ngc.nvidia.com/v2/resources/org/nvidia/team/tao/tao-converter/v5.1.0_jp6.0_aarch64/files?redirect=true&path=tao-converter' -O tao-converter && \ - chmod 755 tao-converter - -ENV PATH="${PATH}:/opt/nvidia/tao" -ENV TRT_LIB_PATH="/usr/lib/aarch64-linux-gnu" -ENV TRT_INCLUDE_PATH="/usr/include/aarch64-linux-gnu" - -# PyTorch (NV CUDA edition) -# https://docs.nvidia.com/deeplearning/frameworks/install-pytorch-jetson-platform/index.html -RUN python3 -m pip install --no-cache \ - https://developer.download.nvidia.com/compute/redist/jp/v60dp/pytorch/torch-2.2.0a0+6a974be.nv23.11-cp310-cp310-linux_aarch64.whl - -# Install Triton server from https://github.com/triton-inference-server/server/releases/tag/v2.40.0 -RUN --mount=type=cache,target=/var/cache/apt \ -apt-get update && apt-get install -y --no-install-recommends \ - libb64-0d \ - libre2-9 \ - rapidjson-dev \ - libopenblas-dev \ - libarchive-dev - -RUN --mount=type=cache,target=/var/cache/apt \ - cd /opt \ - && wget https://github.com/triton-inference-server/server/releases/download/v2.40.0/tritonserver2.40.0-igpu.tar.gz \ - && tar -xzvf tritonserver2.40.0-igpu.tar.gz \ - && chmod 644 /opt/tritonserver/backends/tensorflow/libtensorflow_cc.so.2 \ - && chmod 644 /opt/tritonserver/backends/tensorflow/libtensorflow_framework.so.2 \ - && rm tritonserver2.40.0-igpu.tar.gz - -ENV LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/opt/tritonserver/lib" - -# Install boost version >= 1.78 for boost::span -# Current libboost-dev apt packages are < 1.78, so install from tar.gz -RUN --mount=type=cache,target=/var/cache/apt \ - wget -O /tmp/boost.tar.gz \ - https://boostorg.jfrog.io/artifactory/main/release/1.80.0/source/boost_1_80_0.tar.gz \ - && (cd /tmp && tar xzf boost.tar.gz) \ - && cd /tmp/boost_1_80_0 \ - && ./bootstrap.sh --prefix=/usr \ - && ./b2 install \ - && rm -rf /tmp/boost* - -# Install CV-CUDA -RUN --mount=type=cache,target=/var/cache/apt \ - cd /tmp && \ - wget https://github.com/CVCUDA/CV-CUDA/releases/download/v0.5.0-beta/nvcv-lib-0.5.0_beta_DP-cuda12-aarch64-linux.deb && \ - dpkg -i nvcv-lib-0.5.0_beta_DP-cuda12-aarch64-linux.deb && \ - wget https://github.com/CVCUDA/CV-CUDA/releases/download/v0.5.0-beta/nvcv-dev-0.5.0_beta_DP-cuda12-aarch64-linux.deb && \ - dpkg -i nvcv-dev-0.5.0_beta_DP-cuda12-aarch64-linux.deb - -# Add MQTT binaries and libraries -RUN --mount=type=cache,target=/var/cache/apt \ -apt-add-repository ppa:mosquitto-dev/mosquitto-ppa \ -&& apt-get update && apt-get install -y \ - mosquitto \ - mosquitto-clients - -# Install jtop -RUN python3 -m pip install -U \ - jetson-stats - -# Store list of packages (must be last) -RUN mkdir -p /opt/nvidia/isaac_ros_dev_base && dpkg-query -W | sort > /opt/nvidia/isaac_ros_dev_base/aarch64-end-packages.csv \ No newline at end of file diff --git a/docker/Dockerfile.aarch64 b/docker/Dockerfile.aarch64 new file mode 120000 index 00000000..bf38c169 --- /dev/null +++ b/docker/Dockerfile.aarch64 @@ -0,0 +1 @@ +Dockerfile.base \ No newline at end of file diff --git a/docker/Dockerfile.base b/docker/Dockerfile.base new file mode 100644 index 00000000..a26474e7 --- /dev/null +++ b/docker/Dockerfile.base @@ -0,0 +1,461 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +# +# NVIDIA CORPORATION and its licensors retain all intellectual property +# and proprietary rights in and to this software, related documentation +# and any modifications thereto. Any use, reproduction, disclosure or +# distribution of this software and related documentation without an express +# license agreement from NVIDIA CORPORATION is strictly prohibited. +# + +ARG PLATFORM=amd64 +ARG BASE_IMAGE=nvidia/cuda:12.6.1-devel-ubuntu22.04 + +# -------------------------------------------------------------------------------------------------- +# https://docs.nvidia.com/deeplearning/frameworks/user-guide/index.html +# https://docs.nvidia.com/deeplearning/frameworks/support-matrix/index.html +FROM nvcr.io/nvidia/tritonserver:24.08-py3 AS base-amd64 + +FROM nvcr.io/nvidia/12.6.11-devel:12.6.11-devel-aarch64-ubuntu22.04 AS base-arm64 +# -------------------------------------------------------------------------------------------------- + +FROM base-${PLATFORM} AS common +ARG PLATFORM + +# Store list of packages (must be first) +RUN mkdir -p /opt/nvidia/isaac_ros_dev_base && dpkg-query -W | sort > /opt/nvidia/isaac_ros_dev_base/base-start-packages.csv + +# disable terminal interaction for apt +ENV DEBIAN_FRONTEND=noninteractive +ENV SHELL=/bin/bash +SHELL ["/bin/bash", "-c"] + +# Ensure we have universe +RUN --mount=type=cache,target=/var/cache/apt \ +apt-get update && apt-get install -y \ + software-properties-common \ +&& add-apt-repository universe \ +&& apt-get update + +# Fundamentals +RUN --mount=type=cache,target=/var/cache/apt \ +apt-get update && apt-get install -y \ + apt-transport-https \ + apt-utils \ + bash-completion \ + build-essential \ + ca-certificates \ + clang-format \ + cmake \ + curl \ + git \ + git-lfs \ + gnupg2 \ + iputils-ping \ + libgoogle-glog-dev \ + locales \ + lsb-release \ + mlocate \ + rsync \ + tar \ + unzip \ + vim \ + wget \ + zlib1g-dev + +# Python basics +RUN --mount=type=cache,target=/var/cache/apt \ +apt-get update && apt-get install -y \ + python3-dev \ + python3-distutils \ + python3-flake8 \ + python3-pip \ + python3-pybind11 \ + python3-pytest \ + python3-pytest-repeat \ + python3-pytest-rerunfailures \ + python3-pytest-cov \ + python3-venv \ + python3-zmq \ + python3.10 \ + python3.10-venv + +# Set Python3 as default +RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 1 + +# Add Isaac apt repository +RUN --mount=type=cache,target=/var/cache/apt \ + wget -qO - https://isaac.download.nvidia.com/isaac-ros/repos.key | apt-key add - && \ + grep -qxF "deb https://isaac.download.nvidia.com/isaac-ros/release-3 $(lsb_release -cs) release-3.0" /etc/apt/sources.list || \ + echo "deb https://isaac.download.nvidia.com/isaac-ros/release-3 $(lsb_release -cs) release-3.0" | tee -a /etc/apt/sources.list \ + && apt-get update + +# Core dev libraries +RUN --mount=type=cache,target=/var/cache/apt \ +apt-get update && apt-get install -y \ + ffmpeg \ + gfortran \ + graphicsmagick-libmagick-dev-compat \ + jq \ + kmod \ + lcov \ + libasio-dev \ + libassimp-dev \ + libatlas3-base \ + libatlas-base-dev \ + libblas3 \ + libboost-all-dev \ + libboost-dev \ + libbullet-dev \ + libceres-dev \ + libcunit1-dev \ + libffi7 \ + libfreetype6 \ + libgraphicsmagick++1-dev \ + libhidapi-libusb0 \ + libinput10 \ + libjpeg8 \ + liblapack3 \ + libmnl0 \ + libmnl-dev \ + libncurses5-dev \ + libode-dev \ + libopenblas0 \ + libopencv-dev=4.5.4+dfsg-9ubuntu4 \ + libopenmpi3 \ + libpcap-dev \ + libpcl-dev \ + libpython3.10 \ + libsuitesparse-dev \ + libtinyxml2-dev \ + libturbojpeg \ + libunwind8 \ + libv4l-0 \ + libv4l-dev \ + libx264-dev \ + libxaw7-dev \ + libyaml-cpp-dev \ + linuxptp \ + llvm-14 \ + nlohmann-json3-dev \ + patchelf \ + python3-opencv=4.5.4+dfsg-9ubuntu4 \ + python3-scipy + +# Downgrade to setuptools < 70.0.0 +# https://github.com/pypa/setuptools/issues/4483 +RUN python3 -m pip install -U \ +setuptools==65.7.0 + +# Python3 (PIP) +RUN python3 -m pip install -U \ + argcomplete \ + autopep8 \ + Cython \ + flake8 \ + flake8-blind-except \ + flake8-builtins \ + flake8-class-newline \ + flake8-comprehensions \ + flake8-deprecated \ + flake8-docstrings \ + flake8-import-order \ + flake8-quotes \ + gpustat==0.6.0 \ + importlib_resources \ + networkx \ + ninja \ + "numpy>=1.24.4,<2" \ + numpy-quaternion \ + onnx \ + pydocstyle \ + pymongo \ + pyyaml \ + "scipy>=1.7.0" \ + scikit-image \ + scikit-learn \ + "setuptools_scm>=6.2" \ + tqdm \ + trimesh \ + "warp-lang>=0.9.0" \ + wheel \ + "yourdfpy>=0.0.53" + +# Add MQTT binaries and libraries +RUN --mount=type=cache,target=/var/cache/apt \ +apt-add-repository ppa:mosquitto-dev/mosquitto-ppa \ +&& apt-get update && apt-get install -y \ + mosquitto \ + mosquitto-clients + +# Install Node.js +RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - && \ + apt-get install -y nodejs + +# Install Yarn +RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - && \ + echo "deb https://dl.yarnpkg.com/debian/ stable main" > /etc/apt/sources.list.d/yarn.list && \ + apt-get update && apt-get install -y yarn && \ + corepack enable + +# Install CuPy and HDBSCAN +RUN python3 -m pip install -U \ + cupy-cuda12x \ + hdbscan + +# Setup Jetson debian repositories +RUN --mount=type=cache,target=/var/cache/apt \ +apt-key adv --fetch-key https://repo.download.nvidia.com/jetson/jetson-ota-public.asc ; \ +if [[ ${PLATFORM} == 'arm64' ]]; then \ + echo 'deb https://repo.download.nvidia.com/jetson/common r36.4 main' > /etc/apt/sources.list.d/nvidia-l4t-apt-source.list \ + && echo 'deb https://repo.download.nvidia.com/jetson/t234 r36.4 main' >> /etc/apt/sources.list.d/nvidia-l4t-apt-source.list ; \ +elif [[ ${PLATFORM} == 'amd64' ]]; then \ + add-apt-repository "deb http://repo.download.nvidia.com/jetson/x86_64/$(lsb_release -cs) r36.4 main" ; \ +else \ + echo "Unrecognized platform: ${PLATFORM}" && exit 1 ; \ +fi ; \ +apt-get update + +# Setup CUDA repositories +RUN --mount=type=cache,target=/var/cache/apt \ +if [[ ${PLATFORM} == 'arm64' ]]; then \ + wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/arm64/cuda-keyring_1.1-1_all.deb ; \ +elif [[ ${PLATFORM} == 'amd64' ]]; then \ + wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/x86_64/cuda-keyring_1.1-1_all.deb ; \ +else \ + echo "Unrecognized platform: ${PLATFORM}" && exit 1 ; \ +fi ; \ +dpkg -i cuda-keyring_1.1-1_all.deb && rm -Rf cuda-keyring_1.1-1_all.deb ; \ +apt-get update + +# Install VPI +RUN --mount=type=cache,target=/var/cache/apt \ +if [[ ${PLATFORM} == 'arm64' ]]; then \ + # This is a temporary workaround required to install pva-allow-2 in docker which will not be necessary next release + apt-get install pva-allow-2 || true ; \ + rm /var/lib/dpkg/info/pva-allow-2.post* ; \ + dpkg --configure pva-allow-2 ; \ + mkdir -p /etc/pva/allow.d && mkdir -p /tmp/vpi_dev \ + && cd /tmp/vpi_dev \ + && wget https://repo.download.nvidia.com/jetson/common/pool/main/v/vpi3-dev/vpi3-dev_3.2.4_arm64.deb \ + && wget https://repo.download.nvidia.com/jetson/common/pool/main/libn/libnvvpi3/libnvvpi3_3.2.4_arm64.deb \ + && dpkg -i libnvvpi3_3.2.4_arm64.deb \ + && dpkg -i vpi3-dev_3.2.4_arm64.deb \ + && cd /tmp && rm -Rf /tmp/vpi_dev ; \ +fi ; \ +apt-get update && apt-get install -y \ + libnvvpi3 \ + vpi3-dev + +# Install cuDSS +RUN --mount=type=cache,target=/var/cache/apt \ +if [[ ${PLATFORM} == 'arm64' ]]; then \ + wget https://developer.download.nvidia.com/compute/cudss/redist/libcudss/linux-aarch64/libcudss-linux-aarch64-0.3.0.9_cuda12-archive.tar.xz && \ + tar -xvf libcudss-linux-aarch64-0.3.0.9_cuda12-archive.tar.xz -C /usr/local && \ + rm libcudss-linux-aarch64-0.3.0.9_cuda12-archive.tar.xz && \ + cp -r /usr/local/libcudss-linux-aarch64-0.3.0.9_cuda12-archive /usr/local/libcudss-linux-0.3.0.9_cuda12-archive ; \ +elif [[ ${PLATFORM} == 'amd64' ]]; then \ + wget https://developer.download.nvidia.com/compute/cudss/redist/libcudss/linux-x86_64/libcudss-linux-x86_64-0.3.0.9_cuda12-archive.tar.xz && \ + tar -xvf libcudss-linux-x86_64-0.3.0.9_cuda12-archive.tar.xz -C /usr/local && \ + rm libcudss-linux-x86_64-0.3.0.9_cuda12-archive.tar.xz && \ + cp -r /usr/local/libcudss-linux-x86_64-0.3.0.9_cuda12-archive /usr/local/libcudss-linux-0.3.0.9_cuda12-archive ; \ +else \ + echo "Unrecognized platform: ${PLATFORM}" && exit 1 ; \ +fi + +# Based on cuDSS version (0.3.0.9) and architecture which is not expected to change frequently +ENV CUDSS_DIR=/usr/local/libcudss-linux-0.3.0.9_cuda12-archive +ENV CMAKE_PREFIX_PATH=$CUDSS_DIR:$CMAKE_PREFIX_PATH +ENV LD_LIBRARY_PATH=$CUDSS_DIR/lib:$LD_LIBRARY_PATH + +# Install CV-CUDA +RUN --mount=type=cache,target=/var/cache/apt \ +cd /tmp ; \ +if [[ ${PLATFORM} == 'arm64' ]]; then \ + wget https://github.com/CVCUDA/CV-CUDA/releases/download/v0.5.0-beta/nvcv-lib-0.5.0_beta_DP-cuda12-aarch64-linux.deb && \ + dpkg -i nvcv-lib-0.5.0_beta_DP-cuda12-aarch64-linux.deb && \ + wget https://github.com/CVCUDA/CV-CUDA/releases/download/v0.5.0-beta/nvcv-dev-0.5.0_beta_DP-cuda12-aarch64-linux.deb && \ + dpkg -i nvcv-dev-0.5.0_beta_DP-cuda12-aarch64-linux.deb ; \ +elif [[ ${PLATFORM} == 'amd64' ]]; then \ + wget https://github.com/CVCUDA/CV-CUDA/releases/download/v0.5.0-beta/nvcv-lib-0.5.0_beta-cuda12-x86_64-linux.deb && \ + dpkg -i nvcv-lib-0.5.0_beta-cuda12-x86_64-linux.deb && \ + wget https://github.com/CVCUDA/CV-CUDA/releases/download/v0.5.0-beta/nvcv-dev-0.5.0_beta-cuda12-x86_64-linux.deb && \ + dpkg -i nvcv-dev-0.5.0_beta-cuda12-x86_64-linux.deb ; \ +else \ + echo "Unrecognized platform: ${PLATFORM}" && exit 1 ; \ +fi + +# -------------------------------------------------------------------------------------------------- + +FROM common AS extended-arm64 + +# Update environment +RUN update-alternatives --install /usr/bin/llvm-config llvm-config /usr/bin/llvm-config-14 14 +ENV LD_LIBRARY_PATH="/opt/nvidia/vpi3/lib64:${LD_LIBRARY_PATH}" +ENV LD_LIBRARY_PATH="/usr/lib/aarch64-linux-gnu/tegra:${LD_LIBRARY_PATH}" +ENV LD_LIBRARY_PATH="/usr/local/cuda-12.6/targets/aarch64-linux/lib:${LD_LIBRARY_PATH}" +ENV LD_LIBRARY_PATH="/usr/lib/aarch64-linux-gnu/tegra-egl:${LD_LIBRARY_PATH}" +ENV LD_LIBRARY_PATH="/usr/lib/aarch64-linux-gnu/tegra/weston:${LD_LIBRARY_PATH}" +ENV LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/usr/lib/aarch64-linux-gnu-host" +ENV PATH="/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/src/tensorrt/bin:${PATH}" + +# Install CUDA packages +RUN --mount=type=cache,target=/var/cache/apt \ +apt-get update && apt-get install -y --no-install-recommends --force-yes \ + cuda-cudart-12-6 \ + cuda-libraries-12-6 \ + cuda-nvml-dev-12-6 \ + cuda-sanitizer-12-6 \ + cuda-toolkit-12-6 \ + libcublas-12-6 \ + libcudnn9 \ + libcusparse-12-6 \ + libnpp-12-6 + +# Install TensorRT +RUN --mount=type=cache,target=/var/cache/apt \ +apt-get update && apt-get install -y \ + libnvinfer10 \ + libnvinfer-plugin10 \ + libnvonnxparsers10 \ + libnvinfer-dispatch10 \ + libnvinfer-bin \ + tensorrt + +ENV TRT_LIB_PATH="/usr/lib/aarch64-linux-gnu" +ENV TRT_INCLUDE_PATH="/usr/include/aarch64-linux-gnu" + +# Install pva-allow-2 as a workaround +RUN --mount=type=cache,target=/var/cache/apt \ + mkdir -p /tmp/pva && cd /tmp/pva \ + && wget https://repo.download.nvidia.com/jetson/common/pool/main/p/pva-allow-2/pva-allow-2_2.0.0~rc3_all.deb \ + && dpkg -i pva-allow-2_2.0.0~rc3_all.deb || true \ + && rm /var/lib/dpkg/info/pva-allow-2.post* \ + && dpkg --configure pva-allow-2 \ + && cd /tmp && rm -Rf /tmp/pva + + +# PyTorch (NV CUDA edition) +# https://docs.nvidia.com/deeplearning/frameworks/install-pytorch-jetson-platform/index.html +RUN python3 -m pip install --no-cache \ + https://developer.download.nvidia.cn/compute/redist/jp/v61/pytorch/torch-2.5.0a0+872d972e41.nv24.08.17622132-cp310-cp310-linux_aarch64.whl + +RUN --mount=type=cache,target=/var/cache/apt \ +apt-get update && apt-get install -y --no-install-recommends \ + libb64-0d \ + libre2-9 \ + rapidjson-dev \ + libopenblas-dev \ + libarchive-dev \ + libcusparselt0 \ + libcusparselt-dev + +# Install Triton server from https://github.com/triton-inference-server/server/releases/tag/v2.49.0 +RUN --mount=type=cache,target=/var/cache/apt \ + cd /opt \ + && wget https://github.com/triton-inference-server/server/releases/download/v2.49.0/tritonserver2.49.0-igpu.tar.gz \ + && tar -xzvf tritonserver2.49.0-igpu.tar.gz \ + && chmod 644 /opt/tritonserver/backends/tensorflow/libtensorflow_cc.so.2 \ + && chmod 644 /opt/tritonserver/backends/tensorflow/libtensorflow_framework.so.2 \ + && rm tritonserver2.49.0-igpu.tar.gz + +ENV LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/opt/tritonserver/lib" + +# Install boost version >= 1.78 for boost::span +# Current libboost-dev apt packages are < 1.78, so install from tar.gz +RUN --mount=type=cache,target=/var/cache/apt \ + wget -O /tmp/boost.tar.gz \ + https://boostorg.jfrog.io/artifactory/main/release/1.80.0/source/boost_1_80_0.tar.gz \ + && (cd /tmp && tar xzf boost.tar.gz) \ + && cd /tmp/boost_1_80_0 \ + && ./bootstrap.sh --prefix=/usr \ + && ./b2 install \ + && rm -rf /tmp/boost* + + +# Install jtop +RUN python3 -m pip install -U \ + jetson-stats + +# -------------------------------------------------------------------------------------------------- + +FROM common AS extended-amd64 + +# Update environment +ENV TRT_LIB_PATH="/usr/lib/x86_64-linux-gnu" +ENV TRT_INC_PATH="/usr/include/x86_64-linux-gnu" + + +# Install nvv4l2 for GXF Multimedia h264 codec +RUN --mount=type=cache,target=/var/cache/apt \ +apt-get update && apt-get install -y \ + nvv4l2 \ +&& ln -s /usr/lib/x86_64-linux-gnu/libnvcuvid.so.1 /usr/lib/x86_64-linux-gnu/libnvcuvid.so \ +&& ln -s /usr/lib/x86_64-linux-gnu/libnvidia-encode.so.1 /usr/lib/x86_64-linux-gnu/libnvidia-encode.so + +# Pytorch +RUN python3 -m pip install -U --extra-index-url https://download.pytorch.org/whl/cu121 \ + torch \ + torchvision \ + torchaudio + +# Update environment +ENV LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/opt/tritonserver/lib" + +# -------------------------------------------------------------------------------------------------- + +FROM extended-${PLATFORM} AS base + +# Install Ceres +RUN git clone https://github.com/ceres-solver/ceres-solver.git && \ + cd ceres-solver && \ + git checkout 6fb3dae4eeef855568e47ebbb29a8ba4f3c9153f && \ + mkdir build && \ + cd build && \ + cmake .. -DBUILD_TESTING=OFF -DBUILD_EXAMPLES=OFF -DBUILD_BENCHMARKS=OFF -DUSE_CUDA=ON -Dcudss_DIR=$CUDSS_DIR && \ + make -j 4 && \ + make install && \ + sed -i 's/find_dependency(cudss 0.3.0)/find_dependency(cudss)/' /usr/local/lib/cmake/Ceres/CeresConfig.cmake + +# Install Protobuf +# v5.26.0 is same tag as v26.0 +RUN git clone https://github.com/protocolbuffers/protobuf.git -b v5.26.0 && \ + cd protobuf && \ + git submodule update --init --recursive && \ + mkdir build && \ + cd build && \ + cmake .. \ + -Dprotobuf_BUILD_TESTS=OFF \ + -Dprotobuf_MSVC_STATIC_RUNTIME=OFF \ + -DABSL_PROPAGATE_CXX_STD=ON \ + -Dprotobuf_BUILD_SHARED_LIBS=ON \ + -DCMAKE_POSITION_INDEPENDENT_CODE=ON && \ + cmake --build . --config Release && \ + make install + +# Specify non-root admin user for container +ARG USERNAME=admin +ENV USERNAME=${USERNAME} + +# Install prerequisites +RUN --mount=type=cache,target=/var/cache/apt \ +apt-get update && apt-get install -y \ + gosu \ + sudo \ + udev + +# Copy scripts +RUN mkdir -p /usr/local/bin/scripts +COPY scripts/*entrypoint.sh /usr/local/bin/scripts/ +RUN chmod +x /usr/local/bin/scripts/*.sh || true + +# Copy script additions +RUN mkdir -p /usr/local/bin/scripts/entrypoint_additions +COPY scripts/entrypoint_addition[s]/*.sh /usr/local/bin/scripts/entrypoint_additions/ +RUN chmod +x /usr/local/bin/scripts/entrypoint_additions/*.sh || true + +# Copy middleware profiles +RUN mkdir -p /usr/local/share/middleware_profiles +COPY middleware_profile[s]/*profile.xml /usr/local/share/middleware_profiles/ + +# Store list of packages (must be last) +RUN mkdir -p /opt/nvidia/isaac_ros_dev_base && dpkg-query -W | sort > /opt/nvidia/isaac_ros_dev_base/base-end-packages.csv diff --git a/docker/Dockerfile.realsense b/docker/Dockerfile.realsense index 15bdf87e..2c91710d 100644 --- a/docker/Dockerfile.realsense +++ b/docker/Dockerfile.realsense @@ -8,10 +8,12 @@ # Dockerfile for setting up Realsense driver # https://github.com/jetsonhacks/installRealSenseSDK -ARG BASE_IMAGE +ARG BASE_IMAGE=ubuntu:22.04 FROM ${BASE_IMAGE} ARG LIBREALSENSE_SOURCE_VERSION=v2.55.1 +ARG REALSENSE_ROS_GIT_URL=https://github.com/NVIDIA-ISAAC-ROS/realsense-ros.git +ARG REALSENSE_ROS_VERSION=release/4.51.1-isaac COPY scripts/build-librealsense.sh /opt/realsense/build-librealsense.sh COPY scripts/install-realsense-dependencies.sh /opt/realsense/install-realsense-dependencies.sh @@ -24,3 +26,19 @@ RUN chmod +x /opt/realsense/install-realsense-dependencies.sh && \ RUN mkdir -p /opt/realsense/ COPY scripts/hotplug-realsense.sh /opt/realsense/hotplug-realsense.sh COPY udev_rules/99-realsense-libusb-custom.rules /etc/udev/rules.d/99-realsense-libusb-custom.rules + +# Install realsense-ros ROS 2 package +RUN --mount=type=cache,target=/var/cache/apt \ + mkdir -p ${ROS_ROOT}/src && cd ${ROS_ROOT}/src \ + && git clone ${REALSENSE_ROS_GIT_URL} -b ${REALSENSE_ROS_VERSION} \ + && cd realsense-ros && source ${ROS_ROOT}/setup.bash \ + && cd realsense2_camera_msgs && bloom-generate rosdebian && fakeroot debian/rules binary \ + && cd ../ && apt-get install -y ./*.deb && rm ./*.deb \ + && cd realsense2_description && bloom-generate rosdebian && fakeroot debian/rules binary \ + && cd ../ && apt-get install -y ./*.deb && rm ./*.deb \ + && cd realsense2_camera && bloom-generate rosdebian \ + && sed -i 's/dh_shlibdeps -/dh_shlibdeps --dpkg-shlibdeps-params=--ignore-missing-info -/g' debian/rules \ + && sed -i 's/ros-humble-librealsense2, //g' debian/control \ + && fakeroot debian/rules binary \ + && cd ../ && apt-get install -y ./*.deb && rm ./*.deb \ + && cd ../ && rm -Rf realsense-ros diff --git a/docker/Dockerfile.ros2_humble b/docker/Dockerfile.ros2_humble index 929454c5..22ca57e9 100644 --- a/docker/Dockerfile.ros2_humble +++ b/docker/Dockerfile.ros2_humble @@ -6,7 +6,7 @@ # distribution of this software and related documentation without an express # license agreement from NVIDIA CORPORATION is strictly prohibited. -ARG BASE_IMAGE +ARG BASE_IMAGE=ubuntu:22.04 FROM $BASE_IMAGE # Store list of packages (must be first) @@ -14,7 +14,7 @@ RUN mkdir -p /opt/nvidia/isaac_ros_dev_base && dpkg-query -W | sort > /opt/nvidi # disable terminal interaction for apt ENV DEBIAN_FRONTEND=noninteractive -ENV SHELL /bin/bash +ENV SHELL=/bin/bash SHELL ["/bin/bash", "-c"] # Env setup @@ -66,6 +66,7 @@ RUN python3 -m pip install -U \ matplotlib \ pandas \ rosbags \ + boto3 \ setuptools==65.7.0 # Install ROS 2 Humble @@ -131,7 +132,6 @@ COPY rosdep/extra_rosdeps.yaml /etc/ros/rosdep/sources.list.d/nvidia-isaac.yaml RUN --mount=type=cache,target=/var/cache/apt \ rosdep init \ && echo "yaml file:///etc/ros/rosdep/sources.list.d/nvidia-isaac.yaml" | tee /etc/ros/rosdep/sources.list.d/00-nvidia-isaac.list \ - && sed -i 's|gbpdistro https://raw.githubusercontent.com/ros/rosdistro/master/releases/fuerte.yaml fuerte||g' /etc/ros/rosdep/sources.list.d/20-default.list \ && rosdep update ####### -- Install updated packages over installed debians @@ -216,8 +216,14 @@ apt-get update && apt-get install -y \ ros-humble-tf2-geometry-msgs \ ros-humble-tf2-ros \ ros-humble-topic-based-ros2-control \ + ros-humble-ur \ + ros-humble-ur-bringup \ + ros-humble-ur-calibration \ + ros-humble-ur-client-library \ + ros-humble-ur-controllers \ ros-humble-ur-description \ ros-humble-ur-moveit-config \ + ros-humble-ur-robot-driver \ ros-humble-ur-msgs \ ros-humble-xacro @@ -275,11 +281,49 @@ RUN --mount=type=cache,target=/var/cache/apt \ RUN python3 -m pip install -U \ paho-mqtt==1.6.1 +# Install cuda-python for isaac_ros_pynitros +RUN python3 -m pip install \ + cuda-python +# Install fake cuda-python Debian package to satisfy apt install check +COPY rosdep/ros-humble-cuda-python-placeholder /tmp/ros-humble-cuda-python-placeholder +RUN --mount=type=cache,target=/var/cache/apt \ + cd /tmp && source ${ROS_ROOT}/setup.bash \ + && dpkg-deb --build ros-humble-cuda-python-placeholder && apt-get install -y ./ros-humble-cuda-python-placeholder.deb \ + && rm -f ./ros-humble-cuda-python-placeholder.deb + # Patch gtest to make it work with CXX 17 RUN sudo sed -i '917i #ifdef GTEST_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL' /usr/src/googletest/googletest/include/gtest/internal/gtest-internal.h \ && sudo sed -i '920i #endif' /usr/src/googletest/googletest/include/gtest/internal/gtest-internal.h \ && sudo sed -i '2392i #if defined(GTEST_INTERNAL_CPLUSPLUS_LANG) && \\\n GTEST_INTERNAL_CPLUSPLUS_LANG < 201703L\n#define GTEST_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL 1\n#endif' \ /usr/src/googletest/googletest/include/gtest/internal/gtest-port.h +# Install MCAP CLI +ARG TARGETPLATFORM +RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \ + wget https://github.com/foxglove/mcap/releases/download/releases%2Fmcap-cli%2Fv0.0.47/mcap-linux-amd64 && \ + chmod +x mcap-linux-amd64 && \ + mv mcap-linux-amd64 /opt/ros/humble/bin/mcap; \ + elif [ "$TARGETPLATFORM" = "linux/arm64" ]; then \ + wget https://github.com/foxglove/mcap/releases/download/releases%2Fmcap-cli%2Fv0.0.47/mcap-linux-arm64 && \ + chmod +x mcap-linux-arm64 && \ + mv mcap-linux-arm64 /opt/ros/humble/bin/mcap; \ + else \ + echo "Unknown architecture, can't install MCAP CLI" && \ + exit -1; \ + fi + +# Install custom vcstool with --delay flag to be robust against +# GitHub rate-limiting (nvbugs/4872446) +RUN mkdir -p /opt/ros/humble && cd /opt/ros/humble \ + && git clone https://github.com/andrewbest-tri/vcstool.git -b andrewbest/delay \ + && echo 'source /opt/ros/humble/vcstool/setup.sh' | tee --append /etc/bash.bashrc + +# Make sure that the workspace is always sourced +RUN echo "source /opt/ros/${ROS_DISTRO}/setup.bash" | sudo tee --append /etc/bash.bashrc + +# Colcon auto complete +RUN echo "source /usr/share/colcon_argcomplete/hook/colcon-argcomplete.bash" | sudo tee --append /etc/bash.bashrc + # Store list of packages (must be last) RUN mkdir -p /opt/nvidia/isaac_ros_dev_base && dpkg-query -W | sort > /opt/nvidia/isaac_ros_dev_base/ros2_humble-end-packages.csv + diff --git a/docker/Dockerfile.user b/docker/Dockerfile.user deleted file mode 100644 index fc0baa66..00000000 --- a/docker/Dockerfile.user +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# NVIDIA CORPORATION and its licensors retain all intellectual property -# and proprietary rights in and to this software, related documentation -# and any modifications thereto. Any use, reproduction, disclosure or -# distribution of this software and related documentation without an express -# license agreement from NVIDIA CORPORATION is strictly prohibited. - -ARG BASE_IMAGE -FROM ${BASE_IMAGE} - -# Setup non-root admin user -ARG USERNAME=admin -ARG USER_UID=1000 -ARG USER_GID=1000 - -# Install prerequisites -RUN --mount=type=cache,target=/var/cache/apt \ -apt-get update && apt-get install -y \ - sudo \ - udev - -# Reuse triton-server user as 'admin' user if exists -RUN if [ $(getent group triton-server) ]; then \ - groupmod -o --gid ${USER_GID} -n ${USERNAME} triton-server ; \ - usermod -l ${USERNAME} -u ${USER_UID} -m -d /home/${USERNAME} triton-server ; \ - mkdir -p /home/${USERNAME} ; \ - sudo chown ${USERNAME}:${USERNAME} /home/${USERNAME} ; \ - # Wipe files that may create issues for users with large uid numbers. - rm -f /var/log/lastlog /var/log/faillog ; \ - fi - -# Create the 'admin' user if not already exists -RUN if [ ! $(getent passwd ${USERNAME}) ]; then \ - groupadd --gid ${USER_GID} ${USERNAME} ; \ - useradd --no-log-init --uid ${USER_UID} --gid ${USER_GID} -m ${USERNAME} ; \ - fi - -# Update 'admin' user -RUN echo ${USERNAME} ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/${USERNAME} \ - && chmod 0440 /etc/sudoers.d/${USERNAME} \ - && adduser ${USERNAME} video && adduser ${USERNAME} plugdev && adduser ${USERNAME} sudo - -# Copy scripts -RUN mkdir -p /usr/local/bin/scripts -COPY scripts/*entrypoint.sh /usr/local/bin/scripts/ -RUN chmod +x /usr/local/bin/scripts/*.sh - -# Copy middleware profiles -RUN mkdir -p /usr/local/share/middleware_profiles -COPY middleware_profiles/*profile.xml /usr/local/share/middleware_profiles/ - -ENV USERNAME=${USERNAME} -ENV USER_GID=${USER_GID} -ENV USER_UID=${USER_UID} - -# Switch to non-root user and return to root -USER ${USERNAME} -RUN --mount=type=cache,target=/var/cache/apt \ - rosdep update -USER root \ No newline at end of file diff --git a/docker/Dockerfile.x86_64 b/docker/Dockerfile.x86_64 deleted file mode 100644 index 429d60a4..00000000 --- a/docker/Dockerfile.x86_64 +++ /dev/null @@ -1,176 +0,0 @@ -# Copyright (c) 2021-2024, NVIDIA CORPORATION. All rights reserved. -# -# NVIDIA CORPORATION and its licensors retain all intellectual property -# and proprietary rights in and to this software, related documentation -# and any modifications thereto. Any use, reproduction, disclosure or -# distribution of this software and related documentation without an express -# license agreement from NVIDIA CORPORATION is strictly prohibited. -# -# Docker file to build on x86_64 -# https://docs.nvidia.com/deeplearning/frameworks/user-guide/index.html -# https://docs.nvidia.com/deeplearning/frameworks/support-matrix/index.html -ARG BASE_IMAGE=nvcr.io/nvidia/tritonserver:23.10-py3 -FROM ${BASE_IMAGE} - -# Store list of packages (must be first) -RUN mkdir -p /opt/nvidia/isaac_ros_dev_base && dpkg-query -W | sort > /opt/nvidia/isaac_ros_dev_base/x86_64-start-packages.csv - -# disable terminal interaction for apt -ENV DEBIAN_FRONTEND=noninteractive -ENV SHELL /bin/bash -SHELL ["/bin/bash", "-c"] - -# Ensure we have universe -RUN --mount=type=cache,target=/var/cache/apt \ -apt-get update && apt-get install -y \ - software-properties-common \ -&& add-apt-repository universe \ -&& apt-get update - -# Add Isaac apt repository -RUN --mount=type=cache,target=/var/cache/apt \ - wget -qO - https://isaac.download.nvidia.com/isaac-ros/repos.key | apt-key add - && \ - grep -qxF "deb https://isaac.download.nvidia.com/isaac-ros/release-3 $(lsb_release -cs) release-3.0" /etc/apt/sources.list || \ - echo "deb https://isaac.download.nvidia.com/isaac-ros/release-3 $(lsb_release -cs) release-3.0" | tee -a /etc/apt/sources.list \ - && apt-get update - -# Fundamentals -RUN --mount=type=cache,target=/var/cache/apt \ -apt-get update && apt-get install -y \ - apt-transport-https \ - bash-completion \ - build-essential \ - ca-certificates \ - clang-format \ - cmake \ - curl \ - git \ - git-lfs \ - gnupg2 \ - iputils-ping \ - locales \ - lsb-release \ - rsync \ - software-properties-common \ - wget \ - vim \ - unzip \ - mlocate \ - libgoogle-glog-dev - -# Python basics -RUN --mount=type=cache,target=/var/cache/apt \ -apt-get update && apt-get install -y \ - python3-pip \ - python3-pybind11 \ - python3-pytest \ - python3-pytest-repeat \ - python3-pytest-rerunfailures \ - python3-pytest-cov - -# Set Python3 as default -RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 1 - -# Core dev libraries -RUN --mount=type=cache,target=/var/cache/apt \ -apt-get update && apt-get install -y \ - ffmpeg \ - libasio-dev \ - libbullet-dev \ - libtinyxml2-dev \ - libcunit1-dev \ - libmnl0 \ - libmnl-dev \ - libv4l-dev \ - libyaml-cpp-dev \ - libopencv-dev \ - libpython3.10 \ - libx264-dev \ - kmod \ - patchelf \ - python3-opencv \ - nlohmann-json3-dev - -# Python3 (PIP) -RUN python3 -m pip install -U \ - argcomplete \ - autopep8 \ - flake8==4.0.1 \ - flake8-blind-except \ - flake8-builtins \ - flake8-class-newline \ - flake8-comprehensions \ - flake8-deprecated \ - flake8-docstrings \ - flake8-import-order \ - flake8-quotes \ - gpustat==0.6.0 \ - onnx \ - pydocstyle \ - scikit-learn \ - ninja \ - networkx \ - "numpy>=1.24.4,<2" \ - numpy-quaternion \ - pyyaml \ - "setuptools_scm>=6.2" \ - trimesh \ - "yourdfpy>=0.0.53" \ - "warp-lang>=0.9.0" \ - "scipy>=1.7.0" \ - tqdm \ - importlib_resources - -# Install nvv4l2 for GXF Multimedia h264 codec -RUN --mount=type=cache,target=/var/cache/apt \ -apt-get update && apt-get install -y \ - nvv4l2 \ -&& ln -s /usr/lib/x86_64-linux-gnu/libnvcuvid.so.1 /usr/lib/x86_64-linux-gnu/libnvcuvid.so \ -&& ln -s /usr/lib/x86_64-linux-gnu/libnvidia-encode.so.1 /usr/lib/x86_64-linux-gnu/libnvidia-encode.so - -# Install VPI packages -ARG HAS_GPU="true" -RUN --mount=type=cache,target=/var/cache/apt \ - if [ "$HAS_GPU" = "true" ]; then \ - set -e ; \ - apt-key adv --fetch-key https://repo.download.nvidia.com/jetson/jetson-ota-public.asc ; \ - add-apt-repository "deb http://repo.download.nvidia.com/jetson/x86_64/$(lsb_release -cs) r36.3 main" ; \ - apt-get update ; \ - apt-get install libnvvpi3 vpi3-dev ; \ - fi - -# Pytorch -RUN python3 -m pip install -U --extra-index-url https://download.pytorch.org/whl/cu121 \ - torch \ - torchvision \ - torchaudio - -# Install Tao converter -RUN mkdir -p /opt/nvidia/tao && cd /opt/nvidia/tao && \ - wget --content-disposition 'https://api.ngc.nvidia.com/v2/resources/org/nvidia/team/tao/tao-converter/v5.1.0_8.6.3.1_x86/files?redirect=true&path=tao-converter' -O tao-converter && \ - chmod 755 tao-converter - -ENV PATH="${PATH}:/opt/nvidia/tao" -ENV TRT_LIB_PATH="/usr/lib/x86_64-linux-gnu" -ENV TRT_INC_PATH="/usr/include/x86_64-linux-gnu" - -# Update environment -ENV LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/opt/tritonserver/lib" - -# Install CV-CUDA -RUN --mount=type=cache,target=/var/cache/apt \ - cd /tmp && \ - wget https://github.com/CVCUDA/CV-CUDA/releases/download/v0.5.0-beta/nvcv-lib-0.5.0_beta-cuda12-x86_64-linux.deb && \ - dpkg -i nvcv-lib-0.5.0_beta-cuda12-x86_64-linux.deb && \ - wget https://github.com/CVCUDA/CV-CUDA/releases/download/v0.5.0-beta/nvcv-dev-0.5.0_beta-cuda12-x86_64-linux.deb && \ - dpkg -i nvcv-dev-0.5.0_beta-cuda12-x86_64-linux.deb - -# Add MQTT binaries and libraries -RUN --mount=type=cache,target=/var/cache/apt \ -apt-add-repository ppa:mosquitto-dev/mosquitto-ppa \ -&& apt-get update && apt-get install -y \ - mosquitto \ - mosquitto-clients - -# Store list of packages (must be last) -RUN mkdir -p /opt/nvidia/isaac_ros_dev_base && dpkg-query -W | sort > /opt/nvidia/isaac_ros_dev_base/x86_64-end-packages.csv \ No newline at end of file diff --git a/docker/Dockerfile.x86_64 b/docker/Dockerfile.x86_64 new file mode 120000 index 00000000..bf38c169 --- /dev/null +++ b/docker/Dockerfile.x86_64 @@ -0,0 +1 @@ +Dockerfile.base \ No newline at end of file diff --git a/docker/Dockerfile.zed b/docker/Dockerfile.zed deleted file mode 100644 index 35fb532c..00000000 --- a/docker/Dockerfile.zed +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# NVIDIA CORPORATION and its licensors retain all intellectual property -# and proprietary rights in and to this software, related documentation -# and any modifications thereto. Any use, reproduction, disclosure or -# distribution of this software and related documentation without an express -# license agreement from NVIDIA CORPORATION is strictly prohibited. - -ARG BASE_IMAGE -FROM ${BASE_IMAGE} - -ARG ZED_SDK_MAJOR=4 -ARG ZED_SDK_MINOR=0 - -# zed-ros2-wrapper dependencies -RUN --mount=type=cache,target=/var/cache/apt \ -apt-get update && apt-get install -y \ - libgeographic-dev \ - ros-humble-geographic-info \ - ros-humble-nmea-msgs \ - ros-humble-robot-localization \ - ros-humble-xacro - -# The zed installation script expects to be run as non-root user and needs the USER ENV variable to be set -ENV USER=${USERNAME} -USER ${USERNAME} - -COPY scripts/install-zed-x86_64.sh /opt/zed/install-zed-x86_64.sh -COPY scripts/install-zed-aarch64.sh /opt/zed/install-zed-aarch64.sh - -RUN --mount=type=cache,target=/var/cache/apt \ - if [ "$(uname -m)" = "x86_64" ]; then \ - sudo chmod +x /opt/zed/install-zed-x86_64.sh; \ - /opt/zed/install-zed-x86_64.sh; \ - else \ - sudo chmod +x /opt/zed/install-zed-aarch64.sh; \ - /opt/zed/install-zed-aarch64.sh; \ - fi - -# Revert to root user -USER root diff --git a/docker/rosdep/extra_rosdeps.yaml b/docker/rosdep/extra_rosdeps.yaml index f439399c..175b6997 100644 --- a/docker/rosdep/extra_rosdeps.yaml +++ b/docker/rosdep/extra_rosdeps.yaml @@ -5,6 +5,9 @@ # and any modifications thereto. Any use, reproduction, disclosure or # distribution of this software and related documentation without an express # license agreement from NVIDIA CORPORATION is strictly prohibited. + +# Reformat this file with `yq 'sort_keys(.)' -i ` + aandd_ekew_driver_py: ubuntu: focal: [ros-humble-aandd-ekew-driver-py] @@ -1087,6 +1090,10 @@ create_robot: ubuntu: focal: [ros-humble-create-robot] jammy: [ros-humble-create-robot] +cuda_python: + ubuntu: + focal: [ros-humble-cuda-python-placeholder] + jammy: [ros-humble-cuda-python-placeholder] cudnn_cmake_module: ubuntu: focal: [ros-humble-cudnn-cmake-module] @@ -2759,14 +2766,42 @@ irobot_create_toolbox_dbgsym: ubuntu: focal: [ros-humble-irobot-create-toolbox-dbgsym] jammy: [ros-humble-irobot-create-toolbox-dbgsym] +isaac_common: + ubuntu: + focal: [ros-humble-isaac-common] + jammy: [ros-humble-isaac-common] +isaac_common_py: + ubuntu: + focal: [ros-humble-isaac-common-py] + jammy: [ros-humble-isaac-common-py] isaac_manipulator_bringup: ubuntu: focal: [ros-humble-isaac-manipulator-bringup] jammy: [ros-humble-isaac-manipulator-bringup] -isaac_mapping_and_localization: +isaac_manipulator_interfaces: + ubuntu: + focal: [ros-humble-isaac-manipulator-interfaces] + jammy: [ros-humble-isaac-manipulator-interfaces] +isaac_manipulator_pick_and_place: + ubuntu: + focal: [ros-humble-isaac-manipulator-pick-and-place] + jammy: [ros-humble-isaac-manipulator-pick-and-place] +isaac_manipulator_ros_python_utils: + ubuntu: + focal: [ros-humble-isaac-manipulator-ros-python-utils] + jammy: [ros-humble-isaac-manipulator-ros-python-utils] +isaac_manipulator_servers: + ubuntu: + focal: [ros-humble-isaac-manipulator-servers] + jammy: [ros-humble-isaac-manipulator-servers] +isaac_mapping_and_localization_ros: ubuntu: - focal: [ros-humble-isaac-mapping-and-localization] - jammy: [ros-humble-isaac-mapping-and-localization] + focal: [ros-humble-isaac-mapping-and-localization-ros] + jammy: [ros-humble-isaac-mapping-and-localization-ros] +isaac_mapping_ros: + ubuntu: + focal: [ros-humble-isaac-mapping-ros] + jammy: [ros-humble-isaac-mapping-ros] isaac_ros_apriltag: ubuntu: focal: [ros-humble-isaac-ros-apriltag] @@ -2815,10 +2850,10 @@ isaac_ros_bi3d_msgs: ubuntu: focal: [ros-noetic-isaac-ros-bi3d-msgs] jammy: [ros-noetic-isaac-ros-bi3d-msgs] -isaac_ros_camera_localization: +isaac_ros_calibration_validation: ubuntu: - focal: [ros-humble-isaac-ros-camera-localization] - jammy: [ros-humble-isaac-ros-camera-localization] + focal: [ros-humble-isaac-ros-calibration-validation] + jammy: [ros-humble-isaac-ros-calibration-validation] isaac_ros_centerpose: ubuntu: focal: [ros-humble-isaac-ros-centerpose] @@ -2843,10 +2878,22 @@ isaac_ros_cumotion_examples: ubuntu: focal: [ros-humble-isaac-ros-cumotion-examples] jammy: [ros-humble-isaac-ros-cumotion-examples] +isaac_ros_cumotion_interfaces: + ubuntu: + focal: [ros-humble-isaac-ros-cumotion-interfaces] + jammy: [ros-humble-isaac-ros-cumotion-interfaces] isaac_ros_cumotion_moveit: ubuntu: focal: [ros-humble-isaac-ros-cumotion-moveit] jammy: [ros-humble-isaac-ros-cumotion-moveit] +isaac_ros_cumotion_object_attachment: + ubuntu: + focal: [ros-humble-isaac-ros-cumotion-object-attachment] + jammy: [ros-humble-isaac-ros-cumotion-object-attachment] +isaac_ros_cumotion_python_utils: + ubuntu: + focal: [ros-humble-isaac-ros-cumotion-python-utils] + jammy: [ros-humble-isaac-ros-cumotion-python-utils] isaac_ros_cumotion_robot_description: ubuntu: focal: [ros-humble-isaac-ros-cumotion-robot-description] @@ -2871,6 +2918,10 @@ isaac_ros_deepmap_data_converter: ubuntu: focal: [ros-humble-isaac-ros-deepmap-data-converter] jammy: [ros-humble-isaac-ros-deepmap-data-converter] +isaac_ros_deepmap_data_converter: + ubuntu: + focal: [ros-humble-isaac-ros-deepmap-data-converter] + jammy: [ros-humble-isaac-ros-deepmap-data-converter] isaac_ros_depth_image_proc: ubuntu: focal: [ros-humble-isaac-ros-depth-image-proc] @@ -2903,6 +2954,10 @@ isaac_ros_dope_benchmark: ubuntu: focal: [ros-humble-isaac-ros-dope-benchmark] jammy: [ros-humble-isaac-ros-dope-benchmark] +isaac_ros_esdf_visualizer: + ubuntu: + focal: [ros-humble-isaac-ros-esdf-visualizer] + jammy: [ros-humble-isaac-ros-esdf-visualizer] isaac_ros_ess: ubuntu: focal: [ros-humble-isaac-ros-ess] @@ -2923,18 +2978,6 @@ isaac_ros_examples: ubuntu: focal: [ros-humble-isaac-ros-examples] jammy: [ros-humble-isaac-ros-examples] -isaac_ros_goal_setter_interfaces: - ubuntu: - focal: [ros-humble-isaac-ros-goal-setter-interfaces] - jammy: [ros-humble-isaac-ros-goal-setter-interfaces] -isaac_ros_usb_cam: - ubuntu: - focal: [ros-humble-isaac-ros-usb-cam] - jammy: [ros-humble-isaac-ros-usb-cam] -isaac_ros_realsense: - ubuntu: - focal: [ros-humble-isaac-ros-realsense] - jammy: [ros-humble-isaac-ros-realsense] isaac_ros_foundationpose: ubuntu: focal: [ros-humble-isaac-ros-foundationpose] @@ -2943,6 +2986,22 @@ isaac_ros_foundationpose_benchmark: ubuntu: focal: [ros-humble-isaac-ros-foundationpose-benchmark] jammy: [ros-humble-isaac-ros-foundationpose-benchmark] +isaac_ros_franka_cumotion_benchmark: + ubuntu: + focal: [ros-humble-isaac-ros-franka-cumotion-benchmark] + jammy: [ros-humble-isaac-ros-franka-cumotion-benchmark] +isaac_ros_franka_ompl_benchmark: + ubuntu: + focal: [ros-humble-isaac-ros-franka-ompl-benchmark] + jammy: [ros-humble-isaac-ros-franka-ompl-benchmark] +isaac_ros_goal_setter_interfaces: + ubuntu: + focal: [ros-humble-isaac-ros-goal-setter-interfaces] + jammy: [ros-humble-isaac-ros-goal-setter-interfaces] +isaac_ros_ground_calibration: + ubuntu: + focal: [ros-humble-isaac-ros-ground-calibration] + jammy: [ros-humble-isaac-ros-ground-calibration] isaac_ros_gxf: ubuntu: focal: [ros-humble-isaac-ros-gxf] @@ -3023,6 +3082,10 @@ isaac_ros_launch_utils: ubuntu: focal: [ros-humble-isaac-ros-launch-utils] jammy: [ros-humble-isaac-ros-launch-utils] +isaac_ros_lidar_camera_projection: + ubuntu: + focal: [ros-humble-isaac-ros-lidar-camera-projection] + jammy: [ros-humble-isaac-ros-lidar-camera-projection] isaac_ros_managed_nitros: ubuntu: focal: [ros-humble-isaac-ros-managed-nitros] @@ -3031,6 +3094,10 @@ isaac_ros_mission_client: ubuntu: focal: [ros-humble-isaac-ros-mission-client] jammy: [ros-humble-isaac-ros-mission-client] +isaac_ros_moveit_benchmark: + ubuntu: + focal: [ros-humble-isaac-ros-moveit-benchmark] + jammy: [ros-humble-isaac-ros-moveit-benchmark] isaac_ros_moveit_goal_setter: ubuntu: focal: [ros-humble-isaac-ros-moveit-goal-setter] @@ -3039,18 +3106,10 @@ isaac_ros_mqtt_bridge: ubuntu: focal: [ros-humble-isaac-ros-mqtt-bridge] jammy: [ros-humble-isaac-ros-mqtt-bridge] -isaac_ros_nav_goal_generator: - ubuntu: - focal: [ros-humble-isaac-ros-nav-goal-generator] - jammy: [ros-humble-isaac-ros-nav-goal-generator] isaac_ros_nitros: ubuntu: focal: [ros-humble-isaac-ros-nitros] jammy: [ros-humble-isaac-ros-nitros] -isaac_ros_nitros_april_tag_detection_array_type: - ubuntu: - focal: [ros-humble-isaac-ros-nitros-april-tag-detection-array-type] - jammy: [ros-humble-isaac-ros-nitros-april-tag-detection-array-type] isaac_ros_nitros_battery_state_type: ubuntu: focal: [ros-humble-isaac-ros-nitros-battery-state-type] @@ -3123,10 +3182,6 @@ isaac_ros_nitros_imu_type: ubuntu: focal: [ros-humble-isaac-ros-nitros-imu-type] jammy: [ros-humble-isaac-ros-nitros-imu-type] -isaac_ros_nitros_interfaces: - ubuntu: - focal: [ros-humble-isaac-ros-nitros-interfaces] - jammy: [ros-humble-isaac-ros-nitros-interfaces] isaac_ros_nitros_msgs: ubuntu: focal: [ros-noetic-isaac-ros-nitros-msgs] @@ -3167,10 +3222,18 @@ isaac_ros_nitros_twist_type: ubuntu: focal: [ros-humble-isaac-ros-nitros-twist-type] jammy: [ros-humble-isaac-ros-nitros-twist-type] +isaac_ros_nova: + ubuntu: + focal: [ros-humble-isaac-ros-nova] + jammy: [ros-humble-isaac-ros-nova] isaac_ros_nova_interfaces: ubuntu: focal: [ros-humble-isaac-ros-nova-interfaces] jammy: [ros-humble-isaac-ros-nova-interfaces] +isaac_ros_nova_recorder: + ubuntu: + focal: [ros-humble-isaac-ros-nova-recorder] + jammy: [ros-humble-isaac-ros-nova-recorder] isaac_ros_nvblox: ubuntu: focal: [ros-humble-isaac-ros-nvblox] @@ -3199,18 +3262,22 @@ isaac_ros_owl: ubuntu: focal: [ros-humble-isaac-ros-owl] jammy: [ros-humble-isaac-ros-owl] +isaac_ros_peoplenet_models_install: + ubuntu: + focal: [ros-humble-isaac-ros-peoplenet-models-install] + jammy: [ros-humble-isaac-ros-peoplenet-models-install] isaac_ros_peoplesemseg_models_install: ubuntu: focal: [ros-humble-isaac-ros-peoplesemseg-models-install] jammy: [ros-humble-isaac-ros-peoplesemseg-models-install] -isaac_ros_perceptor_constants: - ubuntu: - focal: [ros-humble-isaac-ros-perceptor-constants] - jammy: [ros-humble-isaac-ros-perceptor-constants] isaac_ros_perceptor_bringup: ubuntu: focal: [ros-humble-isaac-ros-perceptor-bringup] jammy: [ros-humble-isaac-ros-perceptor-bringup] +isaac_ros_perceptor_python_utils: + ubuntu: + focal: [ros-humble-isaac-ros-perceptor-python-utils] + jammy: [ros-humble-isaac-ros-perceptor-python-utils] isaac_ros_perceptor_nova_benchmark: ubuntu: focal: [ros-humble-isaac-ros-perceptor-nova-benchmark] @@ -3231,6 +3298,22 @@ isaac_ros_pointcloud_utils: ubuntu: focal: [ros-humble-isaac-ros-pointcloud-utils] jammy: [ros-humble-isaac-ros-pointcloud-utils] +isaac_ros_pose_proc: + ubuntu: + focal: [ros-humble-isaac-ros-pose-proc] + jammy: [ros-humble-isaac-ros-pose-proc] +isaac_ros_pynitros: + ubuntu: + focal: [ros-humble-isaac-ros-pynitros] + jammy: [ros-humble-isaac-ros-pynitros] +isaac_ros_r2b_galileo: + ubuntu: + focal: [ros-humble-isaac-ros-r2b-galileo] + jammy: [ros-humble-isaac-ros-r2b-galileo] +isaac_ros_realsense: + ubuntu: + focal: [ros-humble-isaac-ros-realsense] + jammy: [ros-humble-isaac-ros-realsense] isaac_ros_realsense_ess_benchmark: ubuntu: focal: [ros-humble-isaac-ros-realsense-ess-benchmark] @@ -3247,6 +3330,10 @@ isaac_ros_ros2_converter: ubuntu: focal: [ros-humble-isaac-ros-ros2-converter] jammy: [ros-humble-isaac-ros-ros2-converter] +isaac_ros_rosbag_utils: + ubuntu: + focal: [ros-humble-isaac-ros-rosbag-utils] + jammy: [ros-humble-isaac-ros-rosbag-utils] isaac_ros_rtdetr: ubuntu: focal: [ros-humble-isaac-ros-rtdetr] @@ -3315,6 +3402,10 @@ isaac_ros_test: ubuntu: focal: [ros-humble-isaac-ros-test] jammy: [ros-humble-isaac-ros-test] +isaac_ros_test_cmake: + ubuntu: + focal: [ros-humble-isaac-ros-test-cmake] + jammy: [ros-humble-isaac-ros-test-cmake] isaac_ros_triton: ubuntu: focal: [ros-humble-isaac-ros-triton] @@ -3331,6 +3422,18 @@ isaac_ros_unet_benchmark: ubuntu: focal: [ros-humble-isaac-ros-unet-benchmark] jammy: [ros-humble-isaac-ros-unet-benchmark] +isaac_ros_ur5_cumotion_benchmark: + ubuntu: + focal: [ros-humble-isaac-ros-ur5-cumotion-benchmark] + jammy: [ros-humble-isaac-ros-ur5-cumotion-benchmark] +isaac_ros_ur5_ompl_benchmark: + ubuntu: + focal: [ros-humble-isaac-ros-ur5-ompl-benchmark] + jammy: [ros-humble-isaac-ros-ur5-ompl-benchmark] +isaac_ros_usb_cam: + ubuntu: + focal: [ros-humble-isaac-ros-usb-cam] + jammy: [ros-humble-isaac-ros-usb-cam] isaac_ros_vda5050_nav2_client: ubuntu: focal: [ros-humble-isaac-ros-vda5050-nav2-client] @@ -3339,6 +3442,10 @@ isaac_ros_vda5050_nav2_client_bringup: ubuntu: focal: [ros-humble-isaac-ros-vda5050-nav2-client-bringup] jammy: [ros-humble-isaac-ros-vda5050-nav2-client-bringup] +isaac_ros_visual_global_localization: + ubuntu: + focal: [ros-humble-isaac-ros-visual-global-localization] + jammy: [ros-humble-isaac-ros-visual-global-localization] isaac_ros_visual_slam: ubuntu: focal: [ros-humble-isaac-ros-visual-slam] @@ -3355,6 +3462,18 @@ isaac_ros_visual_slam_msgs: ubuntu: focal: [ros-noetic-isaac-ros-visual-slam-msgs] jammy: [ros-noetic-isaac-ros-visual-slam-msgs] +isaac_ros_wifi_common: + ubuntu: + focal: [ros-humble-isaac-ros-wifi-common] + jammy: [ros-humble-isaac-ros-wifi-common] +isaac_ros_wifi_localizer: + ubuntu: + focal: [ros-humble-isaac-ros-wifi-localizer] + jammy: [ros-humble-isaac-ros-wifi-localizer] +isaac_ros_wifi_mapping: + ubuntu: + focal: [ros-humble-isaac-ros-wifi-mapping] + jammy: [ros-humble-isaac-ros-wifi-mapping] isaac_ros_wifi_scan: ubuntu: focal: [ros-humble-isaac-ros-wifi-scan] @@ -4109,6 +4228,10 @@ mimick_vendor: ubuntu: focal: [ros-humble-mimick-vendor] jammy: [ros-humble-mimick-vendor] +mission_monitor: + ubuntu: + focal: [ros-humble-mission-monitor] + jammy: [ros-humble-mission-monitor] mobileye_560_660_msgs: ubuntu: focal: [ros-humble-mobileye-560-660-msgs] @@ -4741,38 +4864,6 @@ navigation2: ubuntu: focal: [ros-humble-navigation2] jammy: [ros-humble-navigation2] -navperf: - ubuntu: - focal: [ros-humble-navperf] - jammy: [ros-humble-navperf] -navperf_bringup: - ubuntu: - focal: [ros-humble-navperf-bringup] - jammy: [ros-humble-navperf-bringup] -navperf_compute_monitor: - ubuntu: - focal: [ros-humble-navperf-compute-monitor] - jammy: [ros-humble-navperf-compute-monitor] -navperf_commander: - ubuntu: - focal: [ros-humble-navperf-commander] - jammy: [ros-humble-navperf-commander] -navperf_evaluator: - ubuntu: - focal: [ros-humble-navperf-evaluator] - jammy: [ros-humble-navperf-evaluator] -navperf_msgs: - ubuntu: - focal: [ros-humble-navperf-msgs] - jammy: [ros-humble-navperf-msgs] -mission_monitor: - ubuntu: - focal: [ros-humble-mission-monitor] - jammy: [ros-humble-mission-monitor] -navperf_utils: - ubuntu: - focal: [ros-humble-navperf-utils] - jammy: [ros-humble-navperf-utils] negotiated: ubuntu: focal: [ros-humble-negotiated] @@ -4853,14 +4944,14 @@ nova_carter_docking: ubuntu: focal: [ros-humble-nova-carter-docking] jammy: [ros-humble-nova-carter-docking] -nova_carter_navigation: - ubuntu: - focal: [ros-humble-nova-carter-navigation] - jammy: [ros-humble-nova-carter-navigation] nova_carter_example_data: ubuntu: focal: [ros-humble-nova-carter-example-data] jammy: [ros-humble-nova-carter-example-data] +nova_carter_navigation: + ubuntu: + focal: [ros-humble-nova-carter-navigation] + jammy: [ros-humble-nova-carter-navigation] nova_developer_kit_bringup: ubuntu: focal: [ros-humble-nova-developer-kit-bringup] @@ -4969,6 +5060,10 @@ nvblox_test: ubuntu: focal: [ros-humble-nvblox-test] jammy: [ros-humble-nvblox-test] +nvblox_test_data: + ubuntu: + focal: [ros-humble-nvblox-test-data] + jammy: [ros-humble-nvblox-test-data] object_recognition_msgs: ubuntu: focal: [ros-humble-object-recognition-msgs] @@ -5641,6 +5736,10 @@ position_controllers_dbgsym: ubuntu: focal: [ros-humble-position-controllers-dbgsym] jammy: [ros-humble-position-controllers-dbgsym] +posix_ipc: + ubuntu: + focal: [python3-posix-ipc] + jammy: [python3-posix-ipc] proxsuite: ubuntu: focal: [ros-humble-proxsuite] @@ -5677,10 +5776,34 @@ pybind11_vendor: ubuntu: focal: [ros-humble-pybind11-vendor] jammy: [ros-humble-pybind11-vendor] +python3-av-pip: + ubuntu: + focal: + pip: + packages: [av] + jammy: + pip: + packages: [av] +python3-pydantic-pip: + ubuntu: + focal: + pip: + packages: [pydantic] + jammy: + pip: + packages: [pydantic] python3-pymupdf: ubuntu: focal: [python3-fitz] jammy: [python3-fitz] +python3-pytransform3d-pip: + ubuntu: + focal: + pip: + packages: [pytransform3d] + jammy: + pip: + packages: [pytransform3d] python_cmake_module: ubuntu: focal: [ros-humble-python-cmake-module] @@ -6517,6 +6640,38 @@ rmw_implementation_dbgsym: ubuntu: focal: [ros-humble-rmw-implementation-dbgsym] jammy: [ros-humble-rmw-implementation-dbgsym] +roboeval: + ubuntu: + focal: [ros-humble-roboeval] + jammy: [ros-humble-roboeval] +roboeval_bringup: + ubuntu: + focal: [ros-humble-roboeval-bringup] + jammy: [ros-humble-roboeval-bringup] +roboeval_commander: + ubuntu: + focal: [ros-humble-roboeval-commander] + jammy: [ros-humble-roboeval-commander] +roboeval_compute_monitor: + ubuntu: + focal: [ros-humble-roboeval-compute-monitor] + jammy: [ros-humble-roboeval-compute-monitor] +roboeval_evaluator: + ubuntu: + focal: [ros-humble-roboeval-evaluator] + jammy: [ros-humble-roboeval-evaluator] +roboeval_interfaces: + ubuntu: + focal: [ros-humble-roboeval-interfaces] + jammy: [ros-humble-roboeval-interfaces] +roboeval_runner: + ubuntu: + focal: [ros-humble-roboeval-runner] + jammy: [ros-humble-roboeval-runner] +roboeval_utils: + ubuntu: + focal: [ros-humble-roboeval-utils] + jammy: [ros-humble-roboeval-utils] robot_calibration: ubuntu: focal: [ros-humble-robot-calibration] @@ -7129,6 +7284,10 @@ rospy: ubuntu: focal: [ros-noetic-rospy] jammy: [ros-noetic-rospy] +rosx_introspection: + ubuntu: + focal: [ros-humble-rosx-introspection] + jammy: [ros-humble-rosx-introspection] rot_conv: ubuntu: focal: [ros-humble-rot-conv] @@ -8737,6 +8896,14 @@ ur_robot_driver: ubuntu: focal: [ros-humble-ur-robot-driver] jammy: [ros-humble-ur-robot-driver] +ur5_gripper_moveit_config: + ubuntu: + focal: [ros-humble-ur5-gripper-moveit-config] + jammy: [ros-humble-ur5-gripper-moveit-config] +ur5_robotiq_85_description: + ubuntu: + focal: [ros-humble-ur5-robotiq-85-description] + jammy: [ros-humble-ur5-robotiq-85-description] ur_robot_driver_dbgsym: ubuntu: focal: [ros-humble-ur-robot-driver-dbgsym] @@ -9081,3 +9248,11 @@ zstd_vendor_dbgsym: ubuntu: focal: [ros-humble-zstd-vendor-dbgsym] jammy: [ros-humble-zstd-vendor-dbgsym] +nodejs: + ubuntu: + focal: [nodejs] + jammy: [nodejs] +yarnpkg: + ubuntu: + focal: [yarn] + jammy: [yarn] diff --git a/docker/rosdep/ros-humble-cuda-python-placeholder/DEBIAN/control b/docker/rosdep/ros-humble-cuda-python-placeholder/DEBIAN/control new file mode 100644 index 00000000..975f4550 --- /dev/null +++ b/docker/rosdep/ros-humble-cuda-python-placeholder/DEBIAN/control @@ -0,0 +1,7 @@ +Package: ros-humble-cuda-python-placeholder +Version: 0.0.0-0jammy +Architecture: all +Maintainer: Isaac ROS Maintainers +Section: misc +Priority: optional +Description: Placeholder package to satisfy dependency for cuda-python diff --git a/docker/scripts/install-zed-aarch64.sh b/docker/scripts/install-zed-aarch64.sh index 9699d895..efc8cf3a 100644 --- a/docker/scripts/install-zed-aarch64.sh +++ b/docker/scripts/install-zed-aarch64.sh @@ -7,9 +7,9 @@ sudo apt-get install --no-install-recommends lsb-release wget less zstd udev sud # Download zed SDK installation RUN file to /tmp directory cd /tmp -wget -q --no-check-certificate -O ZED_SDK_Linux.run +wget -q --no-check-certificate -O ZED_SDK_Linux.run https://stereolabs.sfo2.cdn.digitaloceanspaces.com/zedsdk/4.1/ZED_SDK_Tegra_L4T36.3_v4.1.3.zstd.run -chmod +x ZED_SDK_Linux.run ; ./ZED_SDK_Linux.run silent skip_od_module skip_python skip_drivers - +wget -q --no-check-certificate -O ZED_SDK_Linux.run +wget -q --no-check-certificate -O ZED_SDK_Linux.run https://stereolabs.sfo2.cdn.digitaloceanspaces.com/zedsdk/4.2/ZED_SDK_Tegra_L4T36.4_v4.2.2.zstd.run +sudo chmod 777 ./ZED_SDK_Linux.run +sudo -u admin ./ZED_SDK_Linux.run silent skip_od_module skip_python skip_drivers # Symlink required to use the streaming features on Jetson inside a container, based on # https://github.com/stereolabs/zed-docker/blob/fd514606174d8bb09f21a229f1099205b284ecb6/4.X/l4t/devel/Dockerfile#L27C5-L27C95 sudo ln -sf /usr/lib/aarch64-linux-gnu/tegra/libv4l2.so.0 /usr/lib/aarch64-linux-gnu/libv4l2.so @@ -17,4 +17,4 @@ sudo ln -sf /usr/lib/aarch64-linux-gnu/tegra/libv4l2.so.0 /usr/lib/aarch64-linux # Cleanup sudo rm -rf /usr/local/zed/resources/* rm -rf ZED_SDK_Linux.run -sudo rm -rf /var/lib/apt/lists/* \ No newline at end of file +sudo rm -rf /var/lib/apt/lists/* diff --git a/docker/scripts/install-zed-x86_64.sh b/docker/scripts/install-zed-x86_64.sh index 484babc8..4dd83bd6 100644 --- a/docker/scripts/install-zed-x86_64.sh +++ b/docker/scripts/install-zed-x86_64.sh @@ -3,6 +3,7 @@ # Extract ubuntu release year from /etc/lsb-release # Expects "/etc/lsb-release" to contain a line similar to "DISTRIB_RELEASE=20.04" export UBUNTU_RELEASE_YEAR="$(grep -o -P 'DISTRIB_RELEASE=.{0,2}' /etc/lsb-release | cut -d= -f2)" +export ZED_SDK_MAJOR=4 ZED_SDK_MINOR=2 # Extract cuda major and minor version from nvcc --version # Expects "nvcc --version" to contain a line similar to "release 11.8" @@ -14,19 +15,15 @@ export CUDA_MINOR="$(nvcc --version | grep -o -P ' release .{0,4}' | cut -d. -f2 sudo apt-get update -y || true sudo apt-get install --no-install-recommends lsb-release wget less udev sudo zstd build-essential cmake libpng-dev libgomp1 -y -# TODO: Remove this when zed-ros2-wrapper has a compatible version with ZED_SDK 4.1 (which supports cuda 12.2). -CUDA_MAJOR=12 -CUDA_MINOR=1 - # Download zed SDK installation RUN file to /tmp directory cd /tmp -wget -q -O ZED_SDK_Linux_Ubuntu${UBUNTU_RELEASE_YEAR}.run https://download.stereolabs.com/zedsdk/${ZED_SDK_MAJOR}.${ZED_SDK_MINOR}/cu${CUDA_MAJOR}${CUDA_MINOR%.*}/ubuntu${UBUNTU_RELEASE_YEAR} -chmod +x ZED_SDK_Linux_Ubuntu${UBUNTU_RELEASE_YEAR}.run ; ./ZED_SDK_Linux_Ubuntu${UBUNTU_RELEASE_YEAR}.run -- silent skip_od_module skip_python skip_cuda +wget -q -O ZED_SDK_Linux_Ubuntu${UBUNTU_RELEASE_YEAR}.run https://download.stereolabs.com/zedsdk/${ZED_SDK_MAJOR}.${ZED_SDK_MINOR}/cu${CUDA_MAJOR}/ubuntu${UBUNTU_RELEASE_YEAR} +chmod +x ZED_SDK_Linux_Ubuntu${UBUNTU_RELEASE_YEAR}.run ; sudo -u admin ./ZED_SDK_Linux_Ubuntu${UBUNTU_RELEASE_YEAR}.run -- silent skip_od_module skip_python skip_cuda -# Symlink required for zed SDK, based on +# Symlink required for zed SDK, based on # https://github.com/stereolabs/zed-docker/blob/fd514606174d8bb09f21a229f1099205b284ecb6/4.X/ubuntu/devel/Dockerfile#L24 sudo ln -sf /lib/x86_64-linux-gnu/libusb-1.0.so.0 /usr/lib/x86_64-linux-gnu/libusb-1.0.so # Cleanup rm ZED_SDK_Linux_Ubuntu${UBUNTU_RELEASE_YEAR}.run -sudo rm -rf /var/lib/apt/lists/* \ No newline at end of file +sudo rm -rf /var/lib/apt/lists/* diff --git a/docker/scripts/workspace-entrypoint.sh b/docker/scripts/workspace-entrypoint.sh index 50c76cfd..2dfd97e4 100755 --- a/docker/scripts/workspace-entrypoint.sh +++ b/docker/scripts/workspace-entrypoint.sh @@ -8,11 +8,53 @@ # distribution of this software and related documentation without an express # license agreement from NVIDIA CORPORATION is strictly prohibited. -# Build ROS dependency -echo "source /opt/ros/${ROS_DISTRO}/setup.bash" >> ~/.bashrc -source /opt/ros/${ROS_DISTRO}/setup.bash +echo "Creating non-root container '${USERNAME}' for host user uid=${HOST_USER_UID}:gid=${HOST_USER_GID}" + +if [ ! $(getent group ${HOST_USER_GID}) ]; then + groupadd --gid ${HOST_USER_GID} ${USERNAME} &>/dev/null +else + CONFLICTING_GROUP_NAME=`getent group ${HOST_USER_GID} | cut -d: -f1` + groupmod -o --gid ${HOST_USER_GID} -n ${USERNAME} ${CONFLICTING_GROUP_NAME} +fi + +if [ ! $(getent passwd ${HOST_USER_UID}) ]; then + useradd --no-log-init --uid ${HOST_USER_UID} --gid ${HOST_USER_GID} -m ${USERNAME} &>/dev/null +else + CONFLICTING_USER_NAME=`getent passwd ${HOST_USER_UID} | cut -d: -f1` + usermod -l ${USERNAME} -u ${HOST_USER_UID} -m -d /home/${USERNAME} ${CONFLICTING_USER_NAME} &>/dev/null + mkdir -p /home/${USERNAME} + # Wipe files that may create issues for users with large uid numbers. + rm -f /var/log/lastlog /var/log/faillog +fi + +# Update 'admin' user +chown ${USERNAME}:${USERNAME} /home/${USERNAME} +echo ${USERNAME} ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/${USERNAME} +chmod 0440 /etc/sudoers.d/${USERNAME} +adduser ${USERNAME} video >/dev/null +adduser ${USERNAME} plugdev >/dev/null +adduser ${USERNAME} sudo >/dev/null + +# If jtop present, give the user access +if [ -S /run/jtop.sock ]; then + JETSON_STATS_GID="$(stat -c %g /run/jtop.sock)" + addgroup --gid ${JETSON_STATS_GID} jtop >/dev/null + adduser ${USERNAME} jtop >/dev/null +fi + +# Run all entrypoint additions +shopt -s nullglob +for addition in /usr/local/bin/scripts/entrypoint_additions/*.sh; do + if [[ "${addition}" =~ ".user." ]]; then + echo "Running entryrypoint extension: ${addition} as user ${USERNAME}" + gosu ${USERNAME} ${addition} + else + echo "Sourcing entryrypoint extension: ${addition}" + source ${addition} + fi +done # Restart udev daemon -sudo service udev restart +service udev restart -$@ +exec gosu ${USERNAME} "$@" diff --git a/isaac_common/CMakeLists.txt b/isaac_common/CMakeLists.txt new file mode 100644 index 00000000..99743a90 --- /dev/null +++ b/isaac_common/CMakeLists.txt @@ -0,0 +1,47 @@ +# SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES +# Copyright (c) 2021-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +cmake_minimum_required(VERSION 3.22.1) +project(isaac_common) + +find_package(ament_cmake_auto REQUIRED) +ament_auto_find_build_dependencies() + +if(BUILD_TESTING) + find_package(ament_lint_auto REQUIRED) + ament_lint_auto_find_test_dependencies() + + ament_add_gtest(${PROJECT_NAME}_test_message_buffer test/test_message_buffer.cpp) + target_include_directories(${PROJECT_NAME}_test_message_buffer PUBLIC + $ + $ + ) + + ament_add_gtest(${PROJECT_NAME}_test_message_stream_synchronizer test/test_message_stream_synchronizer.cpp) + target_include_directories(${PROJECT_NAME}_test_message_stream_synchronizer PUBLIC + $ + $ + ) +endif() + + +# Embed versioning information into installed files +ament_index_get_resource(ISAAC_ROS_COMMON_CMAKE_PATH isaac_ros_common_cmake_path isaac_ros_common) +include("${ISAAC_ROS_COMMON_CMAKE_PATH}/isaac_ros_common-version-info.cmake") +generate_version_info(${PROJECT_NAME}) + +ament_auto_package() diff --git a/isaac_common/include/isaac_common/messaging/message_buffer.hpp b/isaac_common/include/isaac_common/messaging/message_buffer.hpp new file mode 100644 index 00000000..46c4f1b3 --- /dev/null +++ b/isaac_common/include/isaac_common/messaging/message_buffer.hpp @@ -0,0 +1,166 @@ +// SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES +// Copyright (c) 2023-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// SPDX-License-Identifier: Apache-2.0 + +#ifndef ISAAC_COMMON__MESSAGING__MESSAGE_BUFFER_HPP_ +#define ISAAC_COMMON__MESSAGING__MESSAGE_BUFFER_HPP_ + +#include +#include +#include +#include + +namespace nvidia::isaac_common::messaging +{ + +template +class MessageBuffer +{ +public: + explicit MessageBuffer(int maxsize); + void Push(const int64_t & timestamp, const T & msg); + T Pop(); + T Peek() const; + int Size() const; + bool IsEmpty() const; + int64_t GetLastTimeStamp() const; + int64_t GetCurrentTimeStamp() const; + int64_t GetNextTimeStamp() const; + void ClearAll(); + void ClearUpto(const int64_t & timestamp); + std::vector ClearAndGetUpto(const int64_t & timestamp); + std::vector GetAll() const; + std::vector GetUpto(const int64_t & start_time, const int64_t & end_time) const; + +private: + // Buffer size + int max_size_; + // Timestamp of the last processed / removed message + int64_t last_msg_ts_; + // Timestamp of the lastest message that is added to the buffer + int64_t current_msg_ts_; + // Timestamp of the next message to be processed + int64_t next_msg_ts_; + // Buffer to store a message along with a timestamp field. + std::list> buffer_; +}; + +template +MessageBuffer::MessageBuffer(int maxsize) +: max_size_{maxsize}, last_msg_ts_{-1}, current_msg_ts_{-1}, next_msg_ts_{-1} {} + +template +void MessageBuffer::Push(const int64_t & timestamp, const T & msg) +{ + if (static_cast(buffer_.size()) >= max_size_) { + buffer_.pop_front(); + } + buffer_.emplace_back(std::make_pair(timestamp, msg)); + current_msg_ts_ = timestamp; + next_msg_ts_ = buffer_.front().first; +} + +template +T MessageBuffer::Pop() +{ + auto result = buffer_.front(); + buffer_.pop_front(); + last_msg_ts_ = result.first; + next_msg_ts_ = buffer_.empty() ? -1 : buffer_.front().first; + return result.second; +} + +template +T MessageBuffer::Peek() const +{ + return buffer_.front().second; +} + +template +int MessageBuffer::Size() const +{ + return buffer_.size(); +} + +template +bool MessageBuffer::IsEmpty() const +{ + return buffer_.empty(); +} + +template +int64_t MessageBuffer::GetLastTimeStamp() const +{ + return last_msg_ts_; +} + +template +int64_t MessageBuffer::GetCurrentTimeStamp() const +{ + return current_msg_ts_; +} + +template +int64_t MessageBuffer::GetNextTimeStamp() const +{ + return next_msg_ts_; +} + +template +void MessageBuffer::ClearAll() +{ + buffer_.clear(); +} + +template +void MessageBuffer::ClearUpto(const int64_t & timestamp) +{ + buffer_.remove_if([timestamp](std::pair msg) {return msg.first <= timestamp;}); +} + +template +std::vector MessageBuffer::ClearAndGetUpto(const int64_t & timestamp) +{ + const auto values = GetUpto(-1, timestamp); + ClearUpto(timestamp); + return values; +} + +template +std::vector MessageBuffer::GetUpto(const int64_t & start_time, const int64_t & end_time) const +{ + std::vector result; + for (auto msg : buffer_) { + if (msg.first >= start_time && msg.first <= end_time) { + result.emplace_back(msg.second); + } + } + return result; +} + +template +std::vector MessageBuffer::GetAll() const +{ + std::vector result; + for (auto msg : buffer_) { + result.emplace_back(msg.second); + } + return result; +} + +} // namespace nvidia::isaac_common::messaging + +#endif // ISAAC_COMMON__MESSAGING__MESSAGE_BUFFER_HPP_ diff --git a/isaac_common/include/isaac_common/messaging/message_stream_synchronizer.hpp b/isaac_common/include/isaac_common/messaging/message_stream_synchronizer.hpp new file mode 100644 index 00000000..a8ed4c8e --- /dev/null +++ b/isaac_common/include/isaac_common/messaging/message_stream_synchronizer.hpp @@ -0,0 +1,199 @@ +// SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES +// Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// SPDX-License-Identifier: Apache-2.0 + +#ifndef ISAAC_COMMON__MESSAGING__MESSAGE_STREAM_SYNCHRONIZER_HPP_ +#define ISAAC_COMMON__MESSAGING__MESSAGE_STREAM_SYNCHRONIZER_HPP_ + +#include +#include +#include +#include +#include + +#include "isaac_common/messaging/message_buffer.hpp" + +namespace nvidia::isaac_common::messaging +{ + +// This class is designed synchronize n topics, where n can be set dynamically +// at runtime. This is different to the synchronizers from ROS' message_filters +// where n has to be set at compile time. All topics have to use the same +// message type. If some topics have message drops the synchronizer can also +// return the subset of matching messages. +template +class MessageStreamSynchronizer +{ +public: + using CallbackFunction = + std::function> &)>; + + explicit MessageStreamSynchronizer( + int num_topics, int timestamp_delta_threshold_ns, int min_num_messages, int buffer_size); + + // Functions to add new incomming messages. Should be called from the + // subscriber's callback. + void AddMessage( + int idx, int64_t timestamp, const Message & message, + bool trigger_callback = true); + + Message PeekNextMessage(int idx) const; + + // Update all buffers by popping all messages that can no longer be a match. + // And if we have a match call trigger the callback function. + void PopBuffersAndTriggerCallback(); + + // Callback that is called whenever we have a new matching set of messages. + void RegisterCallback(CallbackFunction callback); + + // Clear all the buffers + void ClearBuffers(); + +private: + // Get the timestamps of the earliest message of every message buffer. + std::vector PeekMessageTimestamps() const; + void TriggerCallbackWithMessages(const std::vector & buffer_indices); + + // Threshold used to consider if timestamps of multiple messages are matching. + const int timestamp_delta_threshold_ns_; + // Minimum number of messages used to trigger the callback. + size_t min_num_messages_; + + std::vector> message_buffers_; + CallbackFunction callback_ = [](int64_t, const std::vector> &) {}; +}; + +namespace +{ +std::vector GetIndicesOfMessagesBeforeThreshold( + const std::vector & values, int64_t threshold) +{ + if (values.empty()) { + return {}; + } + + const int64_t min_value = *std::min_element(values.begin(), values.end()); + + std::vector indices; + indices.reserve(values.size()); + + for (size_t i = 0; i < values.size(); ++i) { + if (values[i] <= min_value + threshold) { + indices.push_back(i); + } + } + return indices; +} + +} // namespace + +template +MessageStreamSynchronizer::MessageStreamSynchronizer( + int num_topics, int timestamp_delta_threshold_ns, int min_num_messages, int buffer_size) +: timestamp_delta_threshold_ns_(timestamp_delta_threshold_ns), + min_num_messages_(min_num_messages), + message_buffers_(num_topics, MessageBuffer(buffer_size)) {} + +template +void MessageStreamSynchronizer::AddMessage( + int idx, int64_t timestamp, const Message & message, bool trigger_callback) +{ + message_buffers_[idx].Push(timestamp, message); + if (trigger_callback) { + PopBuffersAndTriggerCallback(); + } +} + +template +auto MessageStreamSynchronizer::PeekNextMessage(int idx) const -> Message +{ + return message_buffers_[idx].Peek(); +} + +template +std::vector MessageStreamSynchronizer::PeekMessageTimestamps() const +{ + std::vector timestamps; + timestamps.reserve(message_buffers_.size()); + for (const auto & buffer : message_buffers_) { + if (buffer.IsEmpty()) { + return {}; + } + timestamps.push_back(buffer.GetNextTimeStamp()); + } + return timestamps; +} + +template +void MessageStreamSynchronizer::TriggerCallbackWithMessages( + const std::vector & indices) +{ + int64_t max_timestamp = -1; + std::vector messages; + std::vector> idx_and_images; + idx_and_images.reserve(indices.size()); + + for (auto & idx : indices) { + const int64_t timestamp = message_buffers_[idx].GetNextTimeStamp(); + max_timestamp = std::max(timestamp, max_timestamp); + idx_and_images.push_back({idx, message_buffers_[idx].Peek()}); + } + + callback_(max_timestamp, idx_and_images); +} + +template +void MessageStreamSynchronizer::PopBuffersAndTriggerCallback() +{ + // Strategy: We look at the front message of every buffer and use all messages that are within the + // timestamp threshold. + while (true) { + const std::vector timestamps = PeekMessageTimestamps(); + // If empty we don't yet have enough values. + if (timestamps.empty()) { + return; + } + + const std::vector front_match_indices = + GetIndicesOfMessagesBeforeThreshold(timestamps, timestamp_delta_threshold_ns_); + + if (front_match_indices.size() >= min_num_messages_) { + TriggerCallbackWithMessages(front_match_indices); + } + + for (const int idx : front_match_indices) { + message_buffers_[idx].Pop(); + } + } +} + +template +void MessageStreamSynchronizer::RegisterCallback(CallbackFunction callback) +{ + callback_ = std::move(callback); +} + +template +void MessageStreamSynchronizer::ClearBuffers() +{ + for (auto & buffer : message_buffers_) { + buffer.ClearAll(); + } +} + +} // namespace nvidia::isaac_common::messaging + +#endif // ISAAC_COMMON__MESSAGING__MESSAGE_STREAM_SYNCHRONIZER_HPP_ diff --git a/isaac_common/package.xml b/isaac_common/package.xml new file mode 100644 index 00000000..80baf84d --- /dev/null +++ b/isaac_common/package.xml @@ -0,0 +1,34 @@ + + + + + + + isaac_common + 3.2.0 + Common utilities for Isaac that are not depending on ROS. + + Isaac ROS Maintainers + NVIDIA Isaac ROS Software License + https://developer.nvidia.com/isaac-ros-gems/ + Lionel Gulich + + ament_cmake_auto + isaac_ros_common + + ament_lint_auto + ament_lint_common + ament_cmake_gtest + + + ament_cmake + + diff --git a/isaac_common/test/test_message_buffer.cpp b/isaac_common/test/test_message_buffer.cpp new file mode 100644 index 00000000..9c7e3ead --- /dev/null +++ b/isaac_common/test/test_message_buffer.cpp @@ -0,0 +1,170 @@ +// SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES +// Copyright (c) 2023-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// SPDX-License-Identifier: Apache-2.0 +#include + +#include "isaac_common/messaging/message_buffer.hpp" + +namespace nvidia::isaac_common::messaging +{ + +namespace +{ +struct ExampleMessage +{ + int64_t sec; + int64_t nanosec; +}; +} // namespace + +// This test fixture provides a message buffer of size = 5 with header msgs and timestamp vectors +// for easy access in the test cases +class MessageBufferTest : public ::testing::Test +{ +protected: + void SetUp() override + { + for (uint8_t i = 0; i < 5; i++) { + ExampleMessage msg; + msg.sec = timestamps[i]; + msg.nanosec = 0; + msgs.emplace_back(msg); + } + } + std::vector timestamps{1, 2, 3, 4, 5}; + std::vector msgs; + MessageBuffer msg_buffer{5}; +}; + +// This test is verifying the push and pop functionality. +// We are pushing 5 messages and popping out 5 +TEST_F(MessageBufferTest, TestPushAndPop) { + // Initial size should be zero + EXPECT_EQ(msg_buffer.Size(), static_cast(0)); + for (uint8_t i = 0; i < 5; i++) { + msg_buffer.Push(timestamps[i], msgs[i]); + } + ASSERT_FALSE(msg_buffer.IsEmpty()); + EXPECT_EQ(msg_buffer.Size(), static_cast(5)); + + auto msg = msg_buffer.Pop(); + ASSERT_FALSE(msg_buffer.IsEmpty()); + EXPECT_EQ(msg_buffer.Size(), static_cast(4)); + msg_buffer.Pop(); + msg_buffer.Pop(); + msg_buffer.Pop(); + msg_buffer.Pop(); + ASSERT_TRUE(msg_buffer.IsEmpty()); + EXPECT_EQ(msg_buffer.Size(), static_cast(0)); +} + +// This test is verifying the timestamps in the buffer when performing push and pop operations +// LastTimeStamp = Timestamp of last message popped +// CurrentTimeStamp = Timestamp of lastest message pushed +// NextTimeStamp = Timestamp of first / earliest message in the buffer +TEST_F(MessageBufferTest, TestTimestamps) { + // Cheking timestamps before push + ASSERT_EQ(msg_buffer.GetLastTimeStamp(), -1); + ASSERT_EQ(msg_buffer.GetCurrentTimeStamp(), -1); + ASSERT_EQ(msg_buffer.GetNextTimeStamp(), -1); + + for (uint8_t i = 0; i < 5; i++) { + msg_buffer.Push(timestamps[i], msgs[i]); + } + // Checking timestamps when only push is done + EXPECT_EQ(msg_buffer.GetLastTimeStamp(), -1); + EXPECT_EQ(msg_buffer.GetNextTimeStamp(), 1); + EXPECT_EQ(msg_buffer.GetCurrentTimeStamp(), 5); + + auto msg = msg_buffer.Pop(); + + // Checking timestamps after pop + EXPECT_EQ(msg_buffer.GetLastTimeStamp(), 1); + EXPECT_EQ(msg_buffer.GetNextTimeStamp(), 2); + EXPECT_EQ(msg_buffer.GetCurrentTimeStamp(), 5); + + msg_buffer.Push(timestamps[4] + 1, msgs[4]); + + // Checking timestamps after pop + EXPECT_EQ(msg_buffer.GetLastTimeStamp(), 1); + EXPECT_EQ(msg_buffer.GetNextTimeStamp(), 2); + EXPECT_EQ(msg_buffer.GetCurrentTimeStamp(), 6); + + for (uint8_t i = 0; i < 4; i++) { + msg_buffer.Pop(); + EXPECT_EQ(msg_buffer.GetLastTimeStamp(), 2 + i); + EXPECT_EQ(msg_buffer.GetNextTimeStamp(), 2 + (i + 1)); + EXPECT_EQ(msg_buffer.GetCurrentTimeStamp(), 6); + } + msg_buffer.Pop(); + ASSERT_TRUE(msg_buffer.IsEmpty()); + EXPECT_EQ(msg_buffer.GetLastTimeStamp(), msg_buffer.GetCurrentTimeStamp()); + EXPECT_EQ(msg_buffer.GetNextTimeStamp(), -1); + EXPECT_EQ(msg_buffer.GetCurrentTimeStamp(), 6); +} + +// This test is verifying the fixed width buffer is correct and size never exceeds 5 +TEST_F(MessageBufferTest, TestFixedBufferSize) { + for (uint8_t i = 0; i < 7; i++) { + ExampleMessage msg; + msg.sec = i; + msg.nanosec = 0; + msg_buffer.Push(i, msg); + } + ASSERT_FALSE(msg_buffer.IsEmpty()); + EXPECT_EQ(msg_buffer.Size(), static_cast(5)); + + auto all_msgs = msg_buffer.GetAll(); + for (uint8_t i = 0; i < 5; i++) { + EXPECT_EQ(all_msgs[i].sec, i + 2); + } +} + +// This test is verifying the range access and range delete functionality +// GetUpto includes a time range which is inclusive of [start_time, end_time] +// ClearUpto takes a timepoint uptill which all data should be cleared including the timepoint +// ClearAndGetUpto is the combination of ClearUpto and GetUpto +TEST_F(MessageBufferTest, TestRangeAccessAndDelete) { + for (uint8_t i = 0; i < 5; i++) { + msg_buffer.Push(timestamps[i], msgs[i]); + } + + auto all_msgs = msg_buffer.GetAll(); + ASSERT_EQ(all_msgs.size(), static_cast(5)); + for (uint8_t i = 0; i < 5; i++) { + EXPECT_EQ(all_msgs[i].sec, i + 1); + } + + auto range_msgs1 = msg_buffer.GetUpto(2, 4); + ASSERT_EQ(range_msgs1.size(), static_cast(3)); + EXPECT_EQ(range_msgs1[0].sec, 2); + EXPECT_EQ(range_msgs1[1].sec, 3); + EXPECT_EQ(range_msgs1[2].sec, 4); + + auto range_msgs2 = msg_buffer.ClearAndGetUpto(3); + ASSERT_EQ(range_msgs2.size(), static_cast(3)); + EXPECT_EQ(range_msgs2[0].sec, 1); + EXPECT_EQ(range_msgs2[1].sec, 2); + EXPECT_EQ(range_msgs2[2].sec, 3); + + msg_buffer.ClearUpto(4); + ASSERT_EQ(msg_buffer.Size(), static_cast(1)); + + msg_buffer.ClearAll(); + ASSERT_EQ(msg_buffer.Size(), static_cast(0)); +} + +} // namespace nvidia::isaac_common::messaging diff --git a/isaac_common/test/test_message_stream_synchronizer.cpp b/isaac_common/test/test_message_stream_synchronizer.cpp new file mode 100644 index 00000000..743f013b --- /dev/null +++ b/isaac_common/test/test_message_stream_synchronizer.cpp @@ -0,0 +1,122 @@ +// SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES +// Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// SPDX-License-Identifier: Apache-2.0 +#include +#include + +#include // NOLINT (build/include_order) + +#include "isaac_common/messaging/message_stream_synchronizer.hpp" + +namespace nvidia::isaac_common::messaging +{ + +namespace +{ + +struct ExampleMessage +{ + int64_t sec; + int64_t nanosec; +}; + +ExampleMessage CreateMessage(int time_ns) +{ + ExampleMessage message; + message.sec = time_ns / 1e9; + message.nanosec = time_ns % static_cast(1e9); + return message; +} + +template +void ExpectTimestamps( + const std::vector> & messages, const std::vector & indices, + const std::vector & timestamps) +{ + ASSERT_EQ(timestamps.size(), indices.size()); + ASSERT_EQ(timestamps.size(), messages.size()); + + for (size_t i = 0; i < timestamps.size(); ++i) { + const int msg_idx = messages[i].first; + const int msg_stamp = messages[i].second.sec * 1e9 + messages[i].second.nanosec; + EXPECT_EQ(indices[i], msg_idx) << "Message " << i << " has incorrect index."; + EXPECT_EQ(timestamps[i], msg_stamp) << "Message " << i << "i has incorrect stamp."; + } +} + +} // namespace + +TEST(MessageStreamSynchronizerTests, EverythingTest) { + constexpr int kNumTopics = 2; + constexpr int kBufferSize = 10; + constexpr double kTimestepDeltaThresholdNs = 4; + MessageStreamSynchronizer sync( + kNumTopics, kTimestepDeltaThresholdNs, kNumTopics, kBufferSize); + + // Use a lambda to store the messages from the callback. + std::vector> synced_msgs; + sync.RegisterCallback( + [&synced_msgs](int64_t /*timestamp_ns*/, const auto & callback_msgs) { + synced_msgs = callback_msgs; + }); + + // Check that initially everything is empty. + EXPECT_EQ(synced_msgs.size(), 0); + + // Now we simulate incoming messages. We assume the synced_msgs arrive every 5ns, + + // Warmup period, we fill every buffer. + sync.AddMessage(0, 0, CreateMessage(0)); + sync.AddMessage(1, 0, CreateMessage(0)); + ExpectTimestamps(synced_msgs, {0, 1}, {0, 0}); + synced_msgs.clear(); + + // No data drops, but timestamps have offsets: we expect a match. + sync.AddMessage(0, 10, CreateMessage(10)); + sync.AddMessage(1, 12, CreateMessage(12)); + ExpectTimestamps(synced_msgs, {0, 1}, {10, 12}); + synced_msgs.clear(); + + // Heavily delayed data, we dont expect a match yet. + // Data from camera 0 is delayed. + sync.AddMessage(1, 20, CreateMessage(20)); + ExpectTimestamps(synced_msgs, {}, {}); + + // Now the delayed data from before arrives. We expect 2 matches. + sync.AddMessage(1, 30, CreateMessage(30)); + // This is the delayed data from camera 0: + sync.AddMessage(0, 20, CreateMessage(20)); + ExpectTimestamps(synced_msgs, {0, 1}, {20, 20}); + synced_msgs.clear(); + + sync.AddMessage(0, 30, CreateMessage(30)); + ExpectTimestamps(synced_msgs, {0, 1}, {30, 30}); + synced_msgs.clear(); + + // This time camera 1 has drops. We expect no matches. + sync.AddMessage(0, 40, CreateMessage(40)); + ExpectTimestamps(synced_msgs, {}, {}); + synced_msgs.clear(); + + // We have data from everything. We expect the algorithm to recover and have + // a match. + sync.AddMessage(0, 50, CreateMessage(50)); + sync.AddMessage(1, 50, CreateMessage(50)); + ExpectTimestamps(synced_msgs, {0, 1}, {50, 50}); + synced_msgs.clear(); +} + +} // namespace nvidia::isaac_common::messaging diff --git a/isaac_common_py/isaac_common_py/__init__.py b/isaac_common_py/isaac_common_py/__init__.py new file mode 100644 index 00000000..34d9bfea --- /dev/null +++ b/isaac_common_py/isaac_common_py/__init__.py @@ -0,0 +1,16 @@ +# SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES +# Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 diff --git a/isaac_common_py/isaac_common_py/filesystem_utils.py b/isaac_common_py/isaac_common_py/filesystem_utils.py new file mode 100644 index 00000000..de6a51c1 --- /dev/null +++ b/isaac_common_py/isaac_common_py/filesystem_utils.py @@ -0,0 +1,25 @@ +import pathlib +import os + + +def create_workdir(base_path: pathlib.Path, version: str, allow_sudo=False) -> pathlib.Path: + """ Create a versioned workdir with a latest symlink. """ + work_path = base_path / version + try: + work_path.mkdir(exist_ok=True, parents=True) + except PermissionError as e: + if allow_sudo: + os.system(f'sudo mkdir -p {base_path}') + else: + raise e + + if not os.access(base_path, os.W_OK): + if allow_sudo: + os.system(f'sudo chown {os.getuid()} {base_path}') + # If sudo is not allowed we don't raise an error here, since we expect + # one of the commands below to raise the correct PermissionError. + + latest_work_path = base_path / "latest" + latest_work_path.unlink(missing_ok=True) + latest_work_path.symlink_to(work_path, target_is_directory=True) + return work_path diff --git a/isaac_common_py/isaac_common_py/io_utils.py b/isaac_common_py/isaac_common_py/io_utils.py new file mode 100644 index 00000000..c9757ca5 --- /dev/null +++ b/isaac_common_py/isaac_common_py/io_utils.py @@ -0,0 +1,31 @@ +import sys + + +def print_green(text: str): + """ Print text in green. """ + print(f"\033[32m{text}\033[0m") + + +def print_yellow(text: str): + """ Print text in yellow. """ + print(f"\033[33m{text}\033[0m") + + +def print_blue(text: str): + """ Print text in blue. """ + print(f"\033[34m{text}\033[0m") + + +def print_gray(text: str): + """ Print text in gray. """ + print(f"\033[90m{text}\033[0m") + + +def print_red(text: str): + """ Print text in red. """ + print(f"\033[91m{text}\033[0m") + + +def delete_last_lines_in_stdout(n: int): + """ Delete the last n lines in stdout. """ + sys.stdout.write("\033[F\033[K" * n) diff --git a/isaac_common_py/isaac_common_py/subprocess_utils.py b/isaac_common_py/isaac_common_py/subprocess_utils.py new file mode 100644 index 00000000..a6a1e70d --- /dev/null +++ b/isaac_common_py/isaac_common_py/subprocess_utils.py @@ -0,0 +1,170 @@ +import collections +import pathlib +import select +import subprocess +import time +from typing import Literal +from datetime import datetime, timedelta + +from isaac_common_py import io_utils + + +def log_process_all(process: subprocess.Popen, log_file: pathlib.Path) -> list[str]: + """ Log all output from the process to stdout and the log file. """ + full_output = [] + + with log_file.open('w') as f: + while process.poll() is None: + ready, _, _ = select.select([process.stdout], [], [], 0.1) + if ready: + output = process.stdout.readline().strip('\n') + full_output.append(output) + io_utils.print_gray(output) + f.write(output + '\n') + f.flush() + + stdout, _ = process.communicate() + full_output.extend(stdout.splitlines()) + io_utils.print_gray(stdout) + f.write(stdout) + f.flush() + + process.wait() + return full_output + + +def log_process_tail(process: subprocess.Popen, log_file: pathlib.Path, tail: int, + timeout=None) -> list[str]: + """ + Log only the last n lines from the process output to stdout, but everything to the log file. + """ + tail_output = collections.deque(maxlen=tail) + full_output = [] + warning = '' + start = time.time() + with log_file.open('w') as f: + while process.poll() is None: + + # Store the current output line. + ready, _, _ = select.select([process.stdout], [], [], 0.1) + if ready: + # clear the previous warning + if len(warning) > 0: + io_utils.delete_last_lines_in_stdout(1) + warning = '' + + io_utils.delete_last_lines_in_stdout(len(tail_output)) + + output = process.stdout.readline().strip('\n') + tail_output.append(output) + full_output.append(output) + + # Print output and also add to log file. + io_utils.print_gray(' ' + '\n '.join(tail_output)) + f.write(output + '\n') + f.flush() + + if timeout and time.time() - start > timeout: + # clear previous warning + if len(warning) > 0: + io_utils.delete_last_lines_in_stdout(1) + + warning = ('WARNING: The command has exceeded the timeout ' + + f'limit of {timeout} seconds.') + io_utils.print_yellow(warning) + + stdout, _ = process.communicate() + full_output.extend(stdout.splitlines()) + f.write(stdout) + f.flush() + + process.wait() + if len(warning) > 0: + io_utils.delete_last_lines_in_stdout(1) + io_utils.delete_last_lines_in_stdout(len(tail_output)) + return full_output + + +def log_process_none(process: subprocess.Popen, log_file: pathlib.Path) -> list[str]: + """ Log nothing to stdout, but everything to the log file. """ + full_output = [] + with log_file.open('w') as f: + while process.poll() is None: + ready, _, _ = select.select([process.stdout], [], [], 0.1) + if ready: + output = process.stdout.readline().strip('\n') + full_output.append(output) + f.write(output + '\n') + f.flush() + + stdout, _ = process.communicate() + f.write(stdout) + f.flush() + + process.wait() + return full_output + + +def run_command( + mnemonic: str, + command: str | list, + log_file: pathlib.Path, + print_mode: Literal['all', 'tail', 'none'], + allow_failure=False, + timeout=None, + **kwargs, +) -> list[str]: + """ Run a command and log its outputs. """ + assert print_mode in ['all', 'tail', 'none'] + if timeout: + end = datetime.now() + timedelta(seconds=timeout) + io_utils.print_blue( + f'{mnemonic}: Estimated completion at {end.strftime("%H:%M:%S")}. Running...⏳') + else: + io_utils.print_blue(f'{mnemonic} Running...⏳') + + if not isinstance(command, list): + command = [command] + command = [str(c) for c in command] + + log_file.unlink(missing_ok=True) + + start = time.time() + + process = subprocess.Popen( + command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + **kwargs, + ) + + if print_mode == 'all': + full_output = log_process_all(process, log_file) + elif print_mode == 'tail': + full_output = log_process_tail(process, log_file, 10, timeout) + elif print_mode == 'none': + full_output = log_process_none(process, log_file) + + end = time.time() + duration = end - start + + success = True if allow_failure else process.returncode == 0 + status = (f'{mnemonic}: Success ✅️ [{duration:.2f}s]' + if success else f'{mnemonic}: Fail ❌ [{duration:.2f}s]') + + if print_mode == 'all': + io_utils.print_blue(status) + elif print_mode in ['tail', 'none']: + io_utils.delete_last_lines_in_stdout(1) + io_utils.print_blue(status) + if not success: + io_utils.print_blue("Logs:") + io_utils.print_gray(' ' + '\n '.join(full_output)) + + if not success: + command_str = ' '.join(command) + io_utils.print_red(f"Failed to run command '{command_str}'.") + raise subprocess.CalledProcessError(returncode=process.returncode, cmd=[command_str]) + + return full_output diff --git a/isaac_common_py/package.xml b/isaac_common_py/package.xml new file mode 100644 index 00000000..c7336bec --- /dev/null +++ b/isaac_common_py/package.xml @@ -0,0 +1,16 @@ + + + + isaac_common_py + 3.2.0 + ROS-free python utilities + Isaac ROS Maintainers + Apache-2.0 + https://developer.nvidia.com/isaac-ros-gems/ + Lionel Gulich + isaac_ros_common + + + ament_python + + diff --git a/isaac_common_py/resource/isaac_common_py b/isaac_common_py/resource/isaac_common_py new file mode 100644 index 00000000..e69de29b diff --git a/isaac_common_py/setup.cfg b/isaac_common_py/setup.cfg new file mode 100644 index 00000000..1baad78f --- /dev/null +++ b/isaac_common_py/setup.cfg @@ -0,0 +1,4 @@ +[develop] +script_dir=$base/lib/isaac_common_py +[install] +install_scripts=$base/lib/isaac_common_py diff --git a/isaac_common_py/setup.py b/isaac_common_py/setup.py new file mode 100644 index 00000000..f093f38a --- /dev/null +++ b/isaac_common_py/setup.py @@ -0,0 +1,71 @@ +# SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES +# Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +import importlib.util +from pathlib import Path +import sys + +from ament_index_python.packages import get_resource +from setuptools import setup + +ISAAC_ROS_COMMON_PATH = get_resource( + 'isaac_ros_common_scripts_path', + 'isaac_ros_common' +)[0] + +ISAAC_ROS_COMMON_VERSION_INFO = Path(ISAAC_ROS_COMMON_PATH) / 'isaac_ros_common-version-info.py' + +spec = importlib.util.spec_from_file_location( + 'isaac_ros_common_version_info', + ISAAC_ROS_COMMON_VERSION_INFO +) + +isaac_ros_common_version_info = importlib.util.module_from_spec(spec) +sys.modules['isaac_ros_common_version_info'] = isaac_ros_common_version_info +spec.loader.exec_module(isaac_ros_common_version_info) + +from isaac_ros_common_version_info import GenerateVersionInfoCommand # noqa: E402, I100 +PACKAGE_NAME = 'isaac_common_py' + +LICENSE = """ +Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +NVIDIA CORPORATION and its licensors retain all intellectual property +and proprietary rights in and to this software, related documentation +and any modifications thereto. Any use, reproduction, disclosure or +distribution of this software and related documentation without an express +license agreement from NVIDIA CORPORATION is strictly prohibited. +""" + +setup( + name=PACKAGE_NAME, + version='3.0.1', + packages=[PACKAGE_NAME], + data_files=[ + ('share/ament_index/resource_index/packages', ['resource/' + PACKAGE_NAME]), + ('share/' + PACKAGE_NAME, ['package.xml']), + ], + install_requires=['setuptools'], + zip_safe=True, + maintainer='Isaac ROS Maintainers', + maintainer_email='isaac-ros-maintainers@nvidia.com', + description='General purpose utilities for python', + license=LICENSE, + tests_require=['pytest'], + cmdclass={ + 'build_py': GenerateVersionInfoCommand, + }, +) diff --git a/isaac_common_py/tests/test_subprocess_utils.py b/isaac_common_py/tests/test_subprocess_utils.py new file mode 100644 index 00000000..cf1588e4 --- /dev/null +++ b/isaac_common_py/tests/test_subprocess_utils.py @@ -0,0 +1,23 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +# +# NVIDIA CORPORATION and its licensors retain all intellectual property +# and proprietary rights in and to this software, related documentation +# and any modifications thereto. Any use, reproduction, disclosure or +# distribution of this software and related documentation without an express +# license agreement from NVIDIA CORPORATION is strictly prohibited. + +import sys +import pathlib +import shutil + +from isaac_common_py import subprocess_utils + + +def test_run_command(tmp_path: pathlib.Path): + output = subprocess_utils.run_command( + mnemonic='Example Command', + command='ping google.com -c 10'.split(), + log_file=tmp_path / 'log.txt', + print_mode='tail', + ) + assert len(output) > 10 diff --git a/isaac_ros_apriltag_interfaces/CMakeLists.txt b/isaac_ros_apriltag_interfaces/CMakeLists.txt index 0dd5ed02..86c029cb 100644 --- a/isaac_ros_apriltag_interfaces/CMakeLists.txt +++ b/isaac_ros_apriltag_interfaces/CMakeLists.txt @@ -44,4 +44,10 @@ if(BUILD_TESTING) ament_lint_auto_find_test_dependencies() endif() + +# Embed versioning information into installed files +ament_index_get_resource(ISAAC_ROS_COMMON_CMAKE_PATH isaac_ros_common_cmake_path isaac_ros_common) +include("${ISAAC_ROS_COMMON_CMAKE_PATH}/isaac_ros_common-version-info.cmake") +generate_version_info(${PROJECT_NAME}) + ament_auto_package() diff --git a/isaac_ros_apriltag_interfaces/package.xml b/isaac_ros_apriltag_interfaces/package.xml index 0e19a676..53a27a3e 100644 --- a/isaac_ros_apriltag_interfaces/package.xml +++ b/isaac_ros_apriltag_interfaces/package.xml @@ -11,7 +11,7 @@ license agreement from NVIDIA CORPORATION is strictly prohibited. isaac_ros_apriltag_interfaces - 3.1.0 + 3.2.0 Interfaces for performing Isaac ROS AprilTag detection Isaac ROS Maintainers @@ -22,6 +22,7 @@ license agreement from NVIDIA CORPORATION is strictly prohibited. ament_cmake_auto rosidl_default_generators + isaac_ros_common rosidl_default_runtime geometry_msgs diff --git a/isaac_ros_bi3d_interfaces/CMakeLists.txt b/isaac_ros_bi3d_interfaces/CMakeLists.txt index bed59d3a..1b6814b3 100644 --- a/isaac_ros_bi3d_interfaces/CMakeLists.txt +++ b/isaac_ros_bi3d_interfaces/CMakeLists.txt @@ -43,4 +43,10 @@ if(BUILD_TESTING) ament_lint_auto_find_test_dependencies() endif() + +# Embed versioning information into installed files +ament_index_get_resource(ISAAC_ROS_COMMON_CMAKE_PATH isaac_ros_common_cmake_path isaac_ros_common) +include("${ISAAC_ROS_COMMON_CMAKE_PATH}/isaac_ros_common-version-info.cmake") +generate_version_info(${PROJECT_NAME}) + ament_auto_package() diff --git a/isaac_ros_bi3d_interfaces/package.xml b/isaac_ros_bi3d_interfaces/package.xml index 8889a790..e783e2da 100644 --- a/isaac_ros_bi3d_interfaces/package.xml +++ b/isaac_ros_bi3d_interfaces/package.xml @@ -11,7 +11,7 @@ license agreement from NVIDIA CORPORATION is strictly prohibited. isaac_ros_bi3d_interfaces - 3.1.0 + 3.2.0 Interfaces for Isaac ROS Bi3D Isaac ROS Maintainers @@ -20,6 +20,7 @@ license agreement from NVIDIA CORPORATION is strictly prohibited. Peter Du rosidl_default_generators + isaac_ros_common rosidl_default_runtime std_msgs diff --git a/isaac_ros_common/CMakeLists.txt b/isaac_ros_common/CMakeLists.txt index d9ed26fa..c6e1a2dc 100644 --- a/isaac_ros_common/CMakeLists.txt +++ b/isaac_ros_common/CMakeLists.txt @@ -41,14 +41,20 @@ target_link_libraries(${PROJECT_NAME} vpi) ament_index_register_resource(isaac_ros_common_cmake_path CONTENT "${CMAKE_INSTALL_PREFIX}/share/${PROJECT_NAME}/cmake") list(APPEND ${PROJECT_NAME}_CONFIG_EXTRAS + cmake/isaac_ros_common-version-info.cmake cmake/isaac_ros_common-extras.cmake cmake/isaac_ros_common-extras-modules.cmake cmake/isaac_ros_common-extras-assets.cmake ) +ament_python_install_package(${PROJECT_NAME}) + +ament_index_register_resource(isaac_ros_common_scripts_path CONTENT + "${CMAKE_INSTALL_PREFIX}/bin") install( DIRECTORY scripts/ DESTINATION bin + USE_SOURCE_PERMISSIONS ) if(BUILD_TESTING) @@ -59,4 +65,9 @@ if(BUILD_TESTING) ament_lint_auto_find_test_dependencies() endif() + +# Embed versioning information into installed files +include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/isaac_ros_common-version-info.cmake) +generate_version_info(${PROJECT_NAME}) + ament_auto_package(INSTALL_TO_SHARE cmake) diff --git a/isaac_ros_common/cmake/isaac_ros_common-extras-assets.cmake b/isaac_ros_common/cmake/isaac_ros_common-extras-assets.cmake index b5c89862..07ea70a8 100644 --- a/isaac_ros_common/cmake/isaac_ros_common-extras-assets.cmake +++ b/isaac_ros_common/cmake/isaac_ros_common-extras-assets.cmake @@ -47,8 +47,26 @@ function(install_isaac_ros_asset TARGET_NAME) # Hook the command up with a target add_custom_target("${TARGET_NAME}" ALL DEPENDS ${OUTPUT_PATHS}) -endfunction() + # Register the install script as an ament resource such that it can + # be retrieved by other packages. + message(STATUS "Registering ament resource: ${INSTALL_SCRIPT} as ${TARGET_NAME}") + ament_index_register_resource("${TARGET_NAME}" CONTENT "${INSTALL_SCRIPT}") +endfunction() +# Return the install script path registered when installing the asset. +# +# :param VAR: The the output variable that will hold the path. +# :type VAR: string +# :param SCRIPT_NAME: The name of the script installed with install_isaac_ros_asset() +# :type VAR: string +# :param PACKAGE_NAME: The ROS2 package that installed the asset. +# :type VAR: string +function(get_isaac_ros_asset_install_script_path VAR SCRIPT_NAME PACKAGE_NAME) + # Retrieve the script path from the ament index, under the name registered + # during install_isaac_ros_asset(). + ament_index_get_resource(SCRIPT_PATH "${SCRIPT_NAME}" "${PACKAGE_NAME}") + set(${VAR} ${SCRIPT_PATH} PARENT_SCOPE) +endfunction() diff --git a/isaac_ros_common/cmake/isaac_ros_common-extras.cmake b/isaac_ros_common/cmake/isaac_ros_common-extras.cmake index 60106c6c..fa00b2dd 100644 --- a/isaac_ros_common/cmake/isaac_ros_common-extras.cmake +++ b/isaac_ros_common/cmake/isaac_ros_common-extras.cmake @@ -39,9 +39,16 @@ set(CUDA_MIN_VERSION "11.4") find_package(CUDA REQUIRED) include_directories("${CUDA_INCLUDE_DIRS}") +# Setup cuda architectures # Target Ada is CUDA 11.8 or greater -if( ${CUDA_VERSION} GREATER_EQUAL 11.8) - set(CMAKE_CUDA_ARCHITECTURES "89;87;86;80;75;72;70;61;60") -else() - set(CMAKE_CUDA_ARCHITECTURES "87;86;80;75;72;70;61;60") +if(NOT DEFINED CMAKE_CUDA_ARCHITECTURES) + if(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "aarch64") + set(CMAKE_CUDA_ARCHITECTURES "87") + elseif(${CUDA_VERSION} GREATER_EQUAL 11.8) + set(CMAKE_CUDA_ARCHITECTURES "89;86;80;75;70") + else() + set(CMAKE_CUDA_ARCHITECTURES "86;80;75;70") + endif() endif() +message(STATUS "CUDA architectures: ${CMAKE_CUDA_ARCHITECTURES}") + diff --git a/isaac_ros_common/cmake/isaac_ros_common-version-info.cmake b/isaac_ros_common/cmake/isaac_ros_common-version-info.cmake new file mode 100644 index 00000000..8711f5cf --- /dev/null +++ b/isaac_ros_common/cmake/isaac_ros_common-version-info.cmake @@ -0,0 +1,52 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +# +# NVIDIA CORPORATION and its licensors retain all intellectual property +# and proprietary rights in and to this software, related documentation +# and any modifications thereto. Any use, reproduction, disclosure or +# distribution of this software and related documentation without an express +# license agreement from NVIDIA CORPORATION is strictly prohibited. + +# isaac_ros_common/cmake/version_info.cmake + +function(generate_version_info PROJECT_NAME) + find_package(Python3 REQUIRED COMPONENTS Interpreter) + + # Assume that this macro is being called from another package that has + # a build dependency on isaac_ros_common. + + # Check if the project is 'isaac_ros_common' to use relative pathing + if("${PROJECT_NAME}" STREQUAL "isaac_ros_common") + # Use relative pathing + set(SCRIPT_PATH "${CMAKE_CURRENT_SOURCE_DIR}/scripts/isaac_ros_version_embed.py") + else() + # Use the package path resolution + find_package(ament_cmake REQUIRED) + ament_index_get_resource(ISAAC_ROS_COMMON_SCRIPTS_PATH isaac_ros_common_scripts_path isaac_ros_common) + set(SCRIPT_PATH "${ISAAC_ROS_COMMON_SCRIPTS_PATH}/isaac_ros_version_embed.py") + endif() + + # Output path for the version_info.yaml file + set(OUTPUT_PATH "${CMAKE_CURRENT_BINARY_DIR}/version_info.yaml") + + # Install destination for the generated YAML file + set(INSTALL_DESTINATION "share/${PROJECT_NAME}") + + # Add a custom command to generate the version info YAML file + add_custom_command( + OUTPUT ${OUTPUT_PATH} + COMMAND ${Python3_EXECUTABLE} ${SCRIPT_PATH} --output ${OUTPUT_PATH} --source-dir ${CMAKE_CURRENT_SOURCE_DIR} + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + DEPENDS ${SCRIPT_PATH} ${CMAKE_CURRENT_SOURCE_DIR}/package.xml + COMMENT "Generating version information as YAML" + ) + + # Add a custom target that depends on the output file + add_custom_target( + generate_version_info_target_${PROJECT_NAME} ALL + DEPENDS ${OUTPUT_PATH} + ) + + # Install the generated YAML file to the install directory + install(FILES ${OUTPUT_PATH} DESTINATION ${INSTALL_DESTINATION}) + +endfunction() diff --git a/isaac_ros_common/cmake/modules/FindTENSORRT.cmake b/isaac_ros_common/cmake/modules/FindTENSORRT.cmake index 6844bbe3..2085b083 100644 --- a/isaac_ros_common/cmake/modules/FindTENSORRT.cmake +++ b/isaac_ros_common/cmake/modules/FindTENSORRT.cmake @@ -28,6 +28,8 @@ # is recommended instead in CMake. # [1] https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#config-file-packages # [2] https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#find-module-packages +# +# From: https://github.com/nvidia-holoscan/holoscan-sdk/blob/main/cmake/modules/FindTensorRT.cmake # Find headers find_path(TENSORRT_INCLUDE_DIR NAMES NvInferVersion.h REQUIRED) @@ -35,14 +37,14 @@ mark_as_advanced(TENSORRT_INCLUDE_DIR) # Find version function(read_version name str) - string(REGEX MATCH "${name} ([0-9]\\d*)" _ ${str}) + string(REGEX MATCH "${name} ([0-9]+)" _ "${str}") set(${name} ${CMAKE_MATCH_1} PARENT_SCOPE) endfunction() file(READ "${TENSORRT_INCLUDE_DIR}/NvInferVersion.h" _TRT_VERSION_FILE) -read_version(NV_TENSORRT_MAJOR ${_TRT_VERSION_FILE}) -read_version(NV_TENSORRT_MINOR ${_TRT_VERSION_FILE}) -read_version(NV_TENSORRT_PATCH ${_TRT_VERSION_FILE}) +read_version(NV_TENSORRT_MAJOR "${_TRT_VERSION_FILE}") +read_version(NV_TENSORRT_MINOR "${_TRT_VERSION_FILE}") +read_version(NV_TENSORRT_PATCH "${_TRT_VERSION_FILE}") set(TENSORRT_VERSION "${NV_TENSORRT_MAJOR}.${NV_TENSORRT_MINOR}.${NV_TENSORRT_PATCH}") unset(_TRT_VERSION_FILE) @@ -59,9 +61,7 @@ endmacro() find_trt_library(nvinfer) find_trt_library(nvinfer_plugin) -find_trt_library(nvcaffe_parser) find_trt_library(nvonnxparser) -find_trt_library(nvparsers) # Generate TENSORRT_FOUND include(FindPackageHandleStandardArgs) @@ -69,4 +69,4 @@ find_package_handle_standard_args(TENSORRT FOUND_VAR TENSORRT_FOUND VERSION_VAR TENSORRT_VERSION REQUIRED_VARS TENSORRT_INCLUDE_DIR # no need for libs/targets, since find_library is REQUIRED -) +) \ No newline at end of file diff --git a/isaac_ros_common/include/isaac_ros_common/qos.hpp b/isaac_ros_common/include/isaac_ros_common/qos.hpp index cc757052..48e8dd8d 100644 --- a/isaac_ros_common/include/isaac_ros_common/qos.hpp +++ b/isaac_ros_common/include/isaac_ros_common/qos.hpp @@ -30,9 +30,10 @@ namespace common rclcpp::QoS AddQosParameter( rclcpp::Node & node, std::string default_qos = "SYSTEM_DEFAULT", - std::string parameter_name = "qos"); + std::string parameter_name = "qos", + const int default_depth = 0); -rclcpp::QoS ParseQosString(const std::string & str); +rclcpp::QoS ParseQosString(const std::string & str, const int depth = 0); } // namespace common } // namespace isaac_ros diff --git a/isaac_ros_common/isaac_ros_common/__init__.py b/isaac_ros_common/isaac_ros_common/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/isaac_ros_common/isaac_ros_common/qos.py b/isaac_ros_common/isaac_ros_common/qos.py new file mode 100644 index 00000000..d6f2c1d7 --- /dev/null +++ b/isaac_ros_common/isaac_ros_common/qos.py @@ -0,0 +1,46 @@ +# SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES', +# Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +import rclpy +from rclpy.node import Node +import rclpy.qos + + +def add_qos_parameter(node: Node, default_qos='SYSTEM_DEFAULT', parameter_name='qos'): + return parse_qos_string( + node.declare_parameter(parameter_name, default_qos).get_parameter_value().string_value) + + +def parse_qos_string(qos_str: str): + profile = qos_str.upper() + + if profile == 'SYSTEM_DEFAULT': + return rclpy.qos.qos_profile_system_default + if profile == 'DEFAULT': + return rclpy.qos.QoSProfile(depth=10) + if profile == 'PARAMETER_EVENTS': + return rclpy.qos.qos_profile_parameter_events + if profile == 'SERVICES_DEFAULT': + return rclpy.qos.qos_profile_services_default + if profile == 'PARAMETERS': + return rclpy.qos.qos_profile_parameters + if profile == 'SENSOR_DATA': + return rclpy.qos.qos_profile_sensor_data + + Node('parseQoSString').get_logger().warn( + f'Unknown QoS profile: {profile}. Returning profile: DEFAULT') + return rclpy.qos.QoSProfile(depth=10) diff --git a/isaac_ros_common/package.xml b/isaac_ros_common/package.xml index 1fb25538..b9eadcac 100644 --- a/isaac_ros_common/package.xml +++ b/isaac_ros_common/package.xml @@ -13,7 +13,7 @@ license agreement from NVIDIA CORPORATION is strictly prohibited. isaac_ros_common - 3.1.0 + 3.2.0 Utilities for performing common functions in Isaac ROS packages Isaac ROS Maintainers diff --git a/isaac_ros_common/scripts/isaac_ros_common-version-info.py b/isaac_ros_common/scripts/isaac_ros_common-version-info.py new file mode 100644 index 00000000..8e75fd9b --- /dev/null +++ b/isaac_ros_common/scripts/isaac_ros_common-version-info.py @@ -0,0 +1,87 @@ +# setup.py +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +# +# NVIDIA CORPORATION and its licensors retain all intellectual property +# and proprietary rights in and to this software, related documentation +# and any modifications thereto. Any use, reproduction, disclosure or +# distribution of this software and related documentation without an express +# license agreement from NVIDIA CORPORATION is strictly prohibited. + +import os +import subprocess +import sys + +from setuptools.command.build_py import build_py + + +class GenerateVersionInfoCommand(build_py): + """Generate version_info.yaml before building.""" + + def run(self): + project_name = self.distribution.get_name() + + # Get the current working directory where setup.py is executed + project_path = os.getcwd() + + # Log the project path for debugging + print(f'Project path for {project_name}: {project_path}') + + # Call generate_version_info with the correct project_path + output_path, install_destination = generate_version_info(project_name, project_path) + + # Add the generated file to the package data + if self.distribution.data_files is None: + self.distribution.data_files = [] + self.distribution.data_files.append((install_destination, [output_path])) + + # Continue with the build process + super().run() + + +def generate_version_info(project_name, source_dir): + from ament_index_python.packages import get_resource + + # Determine the script path + if project_name == 'isaac_ros_common': + # Use relative pathing + script_path = os.path.join(os.path.dirname(__file__), 'scripts', + 'isaac_ros_version_embed.py') + else: + # Use the package path resolution + try: + script_path = os.path.join( + get_resource( + 'isaac_ros_common_scripts_path', + 'isaac_ros_common' + )[0], + 'isaac_ros_version_embed.py' + ) + except ImportError: + print('Error: isaac_ros_common package not found.') + sys.exit(1) + except Exception as e: + print(f'Error finding isaac_ros_version_embed.py: {e}') + sys.exit(1) + + # Output path for the version_info.yaml file + build_dir = os.path.join(os.getcwd(), 'build') + os.makedirs(build_dir, exist_ok=True) + output_path = os.path.join(build_dir, 'version_info.yaml') + + # Install destination for the generated YAML file + install_destination = os.path.join('share', project_name) + + # Run the script to generate the version info YAML file + command = [ + sys.executable, script_path, + '--output', output_path, + '--source-dir', source_dir + ] + try: + subprocess.check_call(command, cwd=source_dir) + print('Generating version information as YAML') + except subprocess.CalledProcessError as e: + print(f'Error generating version information: {e}') + sys.exit(1) + + return output_path, install_destination diff --git a/isaac_ros_common/scripts/isaac_ros_generate_bug_report.py b/isaac_ros_common/scripts/isaac_ros_generate_bug_report.py new file mode 100755 index 00000000..ac8ceab8 --- /dev/null +++ b/isaac_ros_common/scripts/isaac_ros_generate_bug_report.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +# +# NVIDIA CORPORATION and its licensors retain all intellectual property +# and proprietary rights in and to this software, related documentation +# and any modifications thereto. Any use, reproduction, disclosure or +# distribution of this software and related documentation without an express +# license agreement from NVIDIA CORPORATION is strictly prohibited. +import argparse + + +def main(isaac_package): + """ + Generate a bug report for the specified `isaac_package`. + + Args + ---- + isaac_package (str): The name of the `isaac_package` + for which the bug report is being generated. + + Returns + ------- + None + + Raises + ------ + FileNotFoundError: If the `version_info.yaml` file is not found in the package share path. + + """ + from ament_index_python.packages import get_package_share_path + + print(f'\nGenerating bug report for isaac_package: {isaac_package}') + package_share_path = get_package_share_path(isaac_package) + + print(f'\nPackage share path: {package_share_path}') + + with open(f'{package_share_path}/version_info.yaml', 'r') as file: + package_yaml = file.read() + + print(package_yaml) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Generate bug report for given isaac_package') + parser.add_argument('isaac_package', + help='isaac_package for which bug report is to be generated') + + args = parser.parse_args() + + main(args.isaac_package) diff --git a/isaac_ros_common/scripts/isaac_ros_version_embed.py b/isaac_ros_common/scripts/isaac_ros_version_embed.py new file mode 100644 index 00000000..3d33482e --- /dev/null +++ b/isaac_ros_common/scripts/isaac_ros_version_embed.py @@ -0,0 +1,149 @@ +#!/usr/bin/env python3 +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +# +# NVIDIA CORPORATION and its licensors retain all intellectual property +# and proprietary rights in and to this software, related documentation +# and any modifications thereto. Any use, reproduction, disclosure or +# distribution of this software and related documentation without an express +# license agreement from NVIDIA CORPORATION is strictly prohibited. + +import argparse +import datetime +import os +import subprocess +import xml.etree.ElementTree as ET + +import yaml # Requires PyYAML to be installed + + +def get_git_info(repo_dir): + """ + Retrieve git information for a given repository directory. + + Args + ---- + repo_dir (str): The path to the repository directory. + + Returns + ------- + dict: A dictionary containing the following git information: + - commit_hash (str): The commit hash. + - commit_date (str): The commit date. + - commit_message (str): The commit message. + - is_dirty (str): 'Yes' if the repository is dirty, 'No' otherwise. + + Raises + ------ + subprocess.CalledProcessError: If there is an error executing the git commands. + + """ + git_info = { + 'commit_hash': 'N/A', + 'commit_date': 'N/A', + 'commit_message': 'N/A', + 'git_branch': 'N/A', + 'is_dirty': 'N/A' + } + + try: + # Check if inside a git repository + subprocess.check_output(['git', 'rev-parse', '--is-inside-work-tree'], + cwd=repo_dir, stderr=subprocess.DEVNULL) + + # Get commit hash + commit_hash = subprocess.check_output(['git', 'rev-parse', 'HEAD'], + cwd=repo_dir).decode().strip() + git_info['commit_hash'] = commit_hash + + # Get commit date + commit_date = subprocess.check_output(['git', 'show', '-s', '--format=%ci', 'HEAD'], + cwd=repo_dir).decode().strip() + git_info['commit_date'] = commit_date + + # Get commit message + commit_message = subprocess.check_output(['git', 'log', '-1', '--pretty=%B'], + cwd=repo_dir).decode().strip() + git_info['commit_message'] = commit_message + + # Get git branch + git_branch = subprocess.check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD'], + cwd=repo_dir).decode().strip() + git_info['git_branch'] = git_branch + + # Check if workspace is dirty + status = subprocess.check_output(['git', 'status', '--porcelain'], + cwd=repo_dir).decode().strip() + git_info['is_dirty'] = 'Yes' if status else 'No' + + except subprocess.CalledProcessError: + pass # Not in a git repository + + return git_info + + +def get_version_from_package_xml(package_xml_path): + """ + Retrieve the version from a package.xml file. + + Args + ---- + package_xml_path (str): The path to the package.xml file. + + Returns + ------- + str: The version number extracted from the package.xml file. + If an error occurs, returns 'N/A'. + + Raises + ------ + Exception: If an error occurs while parsing the package.xml file. + + """ + try: + tree = ET.parse(package_xml_path) + root = tree.getroot() + version = root.find('version').text.strip() + return version + except: # noqa: E722 + return 'N/A' + + +def main(): + parser = argparse.ArgumentParser(description='Generate version information as a YAML file.') + parser.add_argument('--output', '-o', required=True, help='Output YAML file path.') + parser.add_argument('--source-dir', required=True, help='Source directory of the package.') + args = parser.parse_args() + + # Get the directory of the package being built + package_dir = os.path.abspath(args.source_dir) + + # Get version from package.xml + package_xml_path = os.path.join(package_dir, 'package.xml') + version = get_version_from_package_xml(package_xml_path) + + # Get git info + git_info = get_git_info(package_dir) + + # Get current datetime + build_datetime = datetime.datetime.now().isoformat() + + # Prepare data + data = { + 'version': version, + 'build_datetime': build_datetime, + 'git_branch': git_info['git_branch'], + 'git_commit_hash': git_info['commit_hash'], + 'git_commit_date': git_info['commit_date'], + 'git_commit_message': git_info['commit_message'], + 'git_workspace_dirty': git_info['is_dirty'] + } + + # Write YAML data to the specified output path + output_path = args.output + os.makedirs(os.path.dirname(output_path), exist_ok=True) + with open(output_path, 'w') as f: + yaml.dump(data, f) + + +if __name__ == '__main__': + main() diff --git a/isaac_ros_common/src/qos.cpp b/isaac_ros_common/src/qos.cpp index 8fc837c2..cc073a33 100644 --- a/isaac_ros_common/src/qos.cpp +++ b/isaac_ros_common/src/qos.cpp @@ -30,39 +30,43 @@ namespace common rclcpp::QoS AddQosParameter( rclcpp::Node & node, std::string default_qos, - std::string parameter_name) + std::string parameter_name, + const int default_depth) { - return ParseQosString(node.declare_parameter(parameter_name, default_qos)); + std::string qos_str = node.declare_parameter(parameter_name, default_qos); + const int depth = node.declare_parameter(parameter_name + "_depth", default_depth); + return ParseQosString(qos_str, depth); } -rclcpp::QoS ParseQosString(const std::string & str) + +rclcpp::QoS ParseQosString(const std::string & str, const int depth) { std::string profile = str; // Convert to upper case. std::transform(profile.begin(), profile.end(), profile.begin(), ::toupper); + rmw_qos_profile_t rmw_qos = rmw_qos_profile_default; + if (profile == "SYSTEM_DEFAULT") { - return rclcpp::QoS(rclcpp::SystemDefaultsQoS()); - } - if (profile == "DEFAULT") { - return rclcpp::QoS(rclcpp::QoSInitialization::from_rmw(rmw_qos_profile_default)); - } - if (profile == "PARAMETER_EVENTS") { - return rclcpp::QoS(rclcpp::ParameterEventsQoS()); - } - if (profile == "SERVICES_DEFAULT") { - return rclcpp::QoS(rclcpp::ServicesQoS()); - } - if (profile == "PARAMETERS") { - return rclcpp::QoS(rclcpp::ParametersQoS()); - } - if (profile == "SENSOR_DATA") { - return rclcpp::QoS(rclcpp::SensorDataQoS()); + rmw_qos = rmw_qos_profile_system_default; + } else if (profile == "DEFAULT") { + rmw_qos = rmw_qos_profile_default; + } else if (profile == "PARAMETER_EVENTS") { + rmw_qos = rmw_qos_profile_parameter_events; + } else if (profile == "SERVICES_DEFAULT") { + rmw_qos = rmw_qos_profile_services_default; + } else if (profile == "PARAMETERS") { + rmw_qos = rmw_qos_profile_parameters; + } else if (profile == "SENSOR_DATA") { + rmw_qos = rmw_qos_profile_sensor_data; + } else { + RCLCPP_WARN_STREAM( + rclcpp::get_logger("parseQoSString"), + "Unknown QoS profile: " << profile << ". Returning profile: DEFAULT"); } - RCLCPP_WARN_STREAM( - rclcpp::get_logger("parseQoSString"), - "Unknown QoS profile: " << profile << ". Returning profile: DEFAULT"); - return rclcpp::QoS(rclcpp::QoSInitialization::from_rmw(rmw_qos_profile_default)); + auto qos_init = depth == + 0 ? rclcpp::QoSInitialization::from_rmw(rmw_qos_profile_default) : rclcpp::KeepLast(depth); + return rclcpp::QoS(qos_init, rmw_qos); } } // namespace common diff --git a/isaac_ros_common/src/vpi_utilities.cpp b/isaac_ros_common/src/vpi_utilities.cpp index 1a43b174..ab57b2ab 100644 --- a/isaac_ros_common/src/vpi_utilities.cpp +++ b/isaac_ros_common/src/vpi_utilities.cpp @@ -33,9 +33,11 @@ const std::unordered_map g_str_to_vpi_backend({ {"CPU", VPI_BACKEND_CPU}, {"CUDA", VPI_BACKEND_CUDA}, {"PVA", VPI_BACKEND_PVA}, + {"OFA", VPI_BACKEND_OFA}, {"VIC", VPI_BACKEND_VIC}, {"NVENC", VPI_BACKEND_NVENC}, {"TEGRA", VPI_BACKEND_TEGRA}, + {"ORIN", VPI_BACKEND_OFA | VPI_BACKEND_PVA | VPI_BACKEND_VIC}, {"ALL", VPI_BACKEND_ALL}, }); diff --git a/isaac_ros_launch_utils/isaac_ros_launch_utils/core.py b/isaac_ros_launch_utils/isaac_ros_launch_utils/core.py index c199ffd8..cf9d94d7 100644 --- a/isaac_ros_launch_utils/isaac_ros_launch_utils/core.py +++ b/isaac_ros_launch_utils/isaac_ros_launch_utils/core.py @@ -19,13 +19,17 @@ import pathlib import platform import uuid -from typing import Any, Callable, List, Tuple, Dict +from typing import Any, Callable import yaml import os from ament_index_python.packages import get_package_share_directory -from isaac_ros_launch_utils.all_types import * +import isaac_ros_launch_utils.all_types as lut + +# For backcompatibility we import with *. All new files should not use this and +# instead use the import statement above. +from isaac_ros_launch_utils.all_types import * # noqa: F401, F403 class NovaRobot(Enum): @@ -37,13 +41,25 @@ class NovaRobot(Enum): UNKNOWN = 4 -def _add_delay_if_set(action: Action, delay: Any = None) -> Action: +def _add_delay_if_set(action: lut.Action, delay: Any = None) -> lut.Action: + """ Automatically a delay to a launch action. """ if is_valid(delay): delay = float(delay) if isinstance(delay, str) else delay - return TimerAction(period=delay, actions=[action]) + return lut.TimerAction(period=delay, actions=[action]) return action +def _try_convert_string_to_primitive(value_str: str): + """ Try to convert a string to a primitive type. If not possible returns the same string. """ + primitives = (bool, str, int, float, list, dict, type(None)) + try: + value = eval(value_str) # pylint: disable=eval-used, + except: # noqa: E722 pylint: disable=bare-except + return value_str + + return value if isinstance(value, primitives) else value_str + + class ArgumentContainer(argparse.Namespace): """ A helper class to make it easier to define launch arguments and easier to see what arguments @@ -51,6 +67,7 @@ class ArgumentContainer(argparse.Namespace): """ def __init__(self): + super().__init__() self._launch_configurations = [] self._cli_launch_args = [] self._opaque_functions = [] @@ -60,15 +77,15 @@ def add_arg(self, default: Any = None, description: str | None = None, choices: list[str] | None = None, - cli: bool = False) -> LaunchConfiguration: + cli: bool = False) -> lut.LaunchConfiguration: """ Add an argument to the arg container. """ default = str(default) if default is not None else None - launch_configuration = LaunchConfiguration(name, default=default) + launch_configuration = lut.LaunchConfiguration(name, default=default) self._launch_configurations.append(launch_configuration) setattr(self, name, launch_configuration) if cli: self._cli_launch_args.append( - DeclareLaunchArgument( + lut.DeclareLaunchArgument( name, default_value=default, description=description, @@ -76,33 +93,29 @@ def add_arg(self, )) return launch_configuration - def get_launch_actions(self) -> list[Action]: + def get_launch_actions(self) -> list[lut.Action]: """ Get all launch actions contained in this argument container. """ return self._cli_launch_args + self._opaque_functions def add_opaque_function( self, - function: Callable[['ArgumentContainer'], list[Action] | None], - ) -> OpaqueFunction: + function: Callable[['ArgumentContainer'], list[lut.Action] | None], + condition: lut.Condition = None, + ) -> lut.OpaqueFunction: """ Helper function to add an opaque function that has access to all the evaluated arguments. """ - def helper_function(context: LaunchContext): + def helper_function(context: lut.LaunchContext): evaluated_args = argparse.Namespace() for launch_configuration in self._launch_configurations: name = launch_configuration.variable_name[0].perform(context) value_str = launch_configuration.perform(context) - try: - #pylint: disable=eval-used, - value = eval(value_str) - #pylint: disable=bare-except, - except: - value = value_str + value = _try_convert_string_to_primitive(value_str) setattr(evaluated_args, name, value) return function(evaluated_args) - opaque_function = OpaqueFunction(function=helper_function) + opaque_function = lut.OpaqueFunction(function=helper_function, condition=condition) self._opaque_functions.append(opaque_function) return opaque_function @@ -115,10 +128,11 @@ def get_path(package: str, path: str) -> pathlib.Path: def add_robot_description( - nominals_package: Any, - nominals_file: Any, + nominals_package: Any = None, + nominals_file: Any = None, robot_calibration_path: Any = "/etc/nova/calibration/isaac_calibration.urdf", - override_path: Any = None) -> Action: + override_path: Any = None, + condition: lut.Substitution = None) -> lut.Action: """ Loads an URDF file and adds a robot state publisher node. We select the first existing URDF file based on the following priorities: @@ -134,7 +148,7 @@ def add_robot_description( """ - def impl(context: LaunchContext) -> Action: + def impl(context: lut.LaunchContext) -> lut.Action: nominals_package_str = perform_context(context, nominals_package) nominals_file_str = perform_context(context, nominals_file) robot_calibration_path_str = perform_context(context, robot_calibration_path) @@ -142,12 +156,15 @@ def impl(context: LaunchContext) -> Action: override_urdf = pathlib.Path(override_path_str or '') calibrated_urdf = pathlib.Path(robot_calibration_path_str) - nominals_urdf = get_path(nominals_package_str, nominals_file_str) + if nominals_package_str and nominals_file_str: + nominals_urdf = get_path(nominals_package_str, nominals_file_str) + else: + nominals_urdf = pathlib.Path() if is_valid(override_path_str): if not override_urdf.is_file(): - raise FileNotFoundError(f'[add_robot_description]: Path of override URDF ' \ - f'{override_path_str} does not exist.') + raise FileNotFoundError('[add_robot_description]: Path of override URDF ' + + f'{override_path_str} does not exist.') print(f"Using override URDF from: {override_path_str}") urdf_path = override_urdf elif calibrated_urdf.is_file(): @@ -157,63 +174,70 @@ def impl(context: LaunchContext) -> Action: print("Using nominals URDF") urdf_path = nominals_urdf else: - raise Exception(f'No robot description found.') + raise Exception('No robot description found.') - robot_description = Command(['xacro ', str(urdf_path)]) + robot_description = lut.Command(['xacro ', str(urdf_path)]) - robot_state_publisher = Node( + robot_state_publisher = lut.Node( package='robot_state_publisher', executable='robot_state_publisher', name='robot_state_publisher', parameters=[{ - 'robot_description': ParameterValue(robot_description, value_type=str), + 'robot_description': lut.ParameterValue(robot_description, value_type=str), }], ) return [robot_state_publisher] - return OpaqueFunction(function=impl) + return lut.OpaqueFunction(function=impl, condition=condition) def include(package: str, path: str, launch_arguments: dict | None = None, - condition: Condition = None, - delay: float | None = None): + condition: lut.Condition = None, + delay: float | None = None, + scoped: bool = False, + forwarding: bool = True): """ Include another launch file. """ launch_path = get_path(package, path) if path.endswith('.py'): - source = PythonLaunchDescriptionSource([str(launch_path)]) + source = lut.PythonLaunchDescriptionSource([str(launch_path)]) elif path.endswith('.xml'): - source = XMLLaunchDescriptionSource([str(launch_path)]) + source = lut.XMLLaunchDescriptionSource([str(launch_path)]) # Convert values to strings because launch arguments can only be strings. def make_valid_launch_argument(value: Any): - if isinstance(value, (LaunchConfiguration, Substitution)): + if isinstance(value, (lut.LaunchConfiguration, lut.Substitution)): return value return str(value) launch_arguments = { - k: make_valid_launch_argument(v) for k, v in (launch_arguments or {}).items() + k: make_valid_launch_argument(v) + for k, v in (launch_arguments or {}).items() } - include_action = IncludeLaunchDescription( + include_action = lut.IncludeLaunchDescription( source, launch_arguments=(launch_arguments or {}).items(), condition=condition, ) action = _add_delay_if_set(include_action, delay) + if scoped or not forwarding: + action = lut.GroupAction([action], scoped=scoped, forwarding=forwarding) return action def load_composable_nodes(container_name: str, - composable_nodes: list[ComposableNode], + composable_nodes: list[lut.ComposableNode], log_message: Any = None, - condition: Condition = None) -> Action: - """" Add a GroupAction that loads composable nodes and a log info depending on a condition. """ + condition: lut.Condition = None) -> lut.Action: + """" + Add a lut.GroupAction that loads composable nodes and a log info depending on a condition. + """ actions = [] actions.append( - LoadComposableNodes( + lut.LoadComposableNodes( target_container=container_name, composable_node_descriptions=composable_nodes, )) @@ -234,7 +258,7 @@ def load_composable_nodes(container_name: str, actions.append(log_info(log_message)) - return GroupAction(actions, condition=condition) + return lut.GroupAction(actions, condition=condition) def get_default_negotiation_time(x86_negotiation_time_s: int = 5, @@ -278,39 +302,40 @@ def component_container(container_name: str, container_executable = 'component_container_mt' arguments.extend(['--ros-args', '--log-level', log_level]) print(f"Using container type: {container_executable}, with arguments: {arguments}") - return Node( - name=container_name, - package='rclcpp_components', - executable=container_executable, - on_exit=Shutdown(), - prefix=prefix, - arguments=arguments, - output='screen', - condition=condition) + return lut.Node(name=container_name, + package='rclcpp_components', + executable=container_executable, + on_exit=lut.Shutdown(), + prefix=prefix, + arguments=arguments, + output='screen', + condition=condition) def service_call( service: str, type: str, # pylint: disable=redefined-builtin content: str, - delay: float | None = None) -> Action: + delay: float | None = None) -> lut.Action: """ Add a service call to a launch graph. """ - actions: list[Action] = [] + actions: list[lut.Action] = [] actions.append( - ExecuteProcess( - cmd=[FindExecutable(name='ros2'), ' service call ', service, type, content], + lut.ExecuteProcess( + cmd=[lut.FindExecutable(name='ros2'), ' service call ', service, type, content], shell=True, )) actions.append(log_info(['Calling service ', service, '.'])) - action = GroupAction(actions=actions) + action = lut.GroupAction(actions=actions) action = _add_delay_if_set(action, delay) return action -def perform_context(context: LaunchContext, expression: Any) -> Any: - """ If the expression is a substitution perform its substitution else just return the expression. """ - if isinstance(expression, (Substitution)): +def perform_context(context: lut.LaunchContext, expression: Any) -> Any: + """ + If the expression is a substitution perform its substitution else just return the expression. + """ + if isinstance(expression, (lut.Substitution)): return expression.perform(context) else: return expression @@ -323,14 +348,15 @@ def play_rosbag(bag_path: Any, delay: Any = None, shutdown_on_exit: bool = False, additional_bag_play_args: Any = None, - condition: Substitution = None) -> Action: + condition: lut.Substitution = None) -> lut.Action: """ Add a process playing back a ros2bag to the launch graph. """ - def impl(context: LaunchContext) -> Action: + def impl(context: lut.LaunchContext) -> lut.Action: bag_path_str = perform_context(context, bag_path) loop_str = perform_context(context, loop) clock_str = perform_context(context, clock) rate_str = perform_context(context, rate) + shutdown_str = perform_context(context, shutdown_on_exit) delay_str = perform_context(context, delay) bag_args_str = perform_context(context, additional_bag_play_args) @@ -347,14 +373,17 @@ def impl(context: LaunchContext) -> Action: if is_valid(bag_args_str): cmd.extend((bag_args_str).split()) - print("[play_rosbag]: Running the following command:", ' '.join(cmd)) on_exit_func = None - if shutdown_on_exit: - on_exit_func = Shutdown() - bag_play_action = ExecuteProcess(cmd=cmd, output='screen', on_exit=on_exit_func) - return [_add_delay_if_set(bag_play_action, delay_str)] + if is_true(shutdown_str): + on_exit_func = lut.Shutdown() + bag_play_action = lut.ExecuteProcess(cmd=cmd, output='screen', on_exit=on_exit_func) + log_action = log_info(['Now starting playback of rosbag "', bag_path, '" with command: '] + + cmd) + + action = lut.GroupAction(actions=[log_action, bag_play_action]) + return [_add_delay_if_set(action, delay_str)] - return OpaqueFunction(function=impl, condition=condition) + return lut.OpaqueFunction(function=impl, condition=condition) def record_rosbag(topics: Any = '--all', @@ -362,10 +391,10 @@ def record_rosbag(topics: Any = '--all', bag_path: Any = None, additional_bag_record_args: Any = None, storage='mcap', - condition: Substitution = None) -> Action: + condition: lut.Substitution = None) -> lut.Action: """ Add a process recording a ros2bag to the launch graph. """ - def impl(context: LaunchContext) -> Action: + def impl(context: lut.LaunchContext) -> lut.Action: topics_str = perform_context(context, topics) delay_str = perform_context(context, delay) bag_path_str = perform_context(context, bag_path) @@ -386,10 +415,10 @@ def impl(context: LaunchContext) -> Action: print("[record_rosbag]: Running the following command:", ' '.join(cmd)) - bag_play_action = ExecuteProcess(cmd=cmd, output='screen') + bag_play_action = lut.ExecuteProcess(cmd=cmd, output='screen') return [_add_delay_if_set(bag_play_action, delay_str)] - return OpaqueFunction(function=impl, condition=condition) + return lut.OpaqueFunction(function=impl, condition=condition) def static_transform(parent: str, @@ -408,7 +437,7 @@ def static_transform(parent: str, translation = [str(x) for x in translation] orientation = [str(x) for x in orientation] - return Node( + return lut.Node( package='tf2_ros', name='my_stat_tf_pub', executable='static_transform_publisher', @@ -418,18 +447,18 @@ def static_transform(parent: str, ) -def shutdown_if_stderr(action: Action) -> Action: +def shutdown_if_stderr(action: lut.Action) -> lut.Action: """ Stop the app if the passed actions prints to stderr. """ - def handler(event) -> Action: + def handler(event) -> lut.Action: # pylint: disable=protected-access source_action = event._RunningProcessEvent__action error = event._ProcessIO__text - reason = f"Action '{source_action}' failed with error '{error}'." - print('Shutdown reason:', reason) - return Shutdown(reason=reason) + reason = f"lut.Action '{source_action}' failed with error '{error}'." + print('lut.Shutdown reason:', reason) + return lut.Shutdown(reason=reason) - return RegisterEventHandler(OnProcessIO( + return lut.RegisterEventHandler(lut.OnProcessIO( target_action=action, on_stderr=handler, )) @@ -437,7 +466,7 @@ def handler(event) -> Action: def set_parameter(parameter: str, value: str, namespace='', condition=None): if not namespace: - return SetParameter(parameter, value, condition=condition) + return lut.SetParameter(parameter, value, condition=condition) yaml_dict = {namespace: {'ros__parameters': {parameter: value}}} path = pathlib.Path(f'/tmp/{uuid.uuid4()}.yaml') @@ -445,153 +474,161 @@ def set_parameter(parameter: str, value: str, namespace='', condition=None): with open(path, 'w') as file: yaml.dump(yaml_dict, file) - return SetParametersFromFile(str(path), condition=condition) + return lut.SetParametersFromFile(str(path), condition=condition) -def has_substring(expression: Any, substring: str) -> bool | Substitution: +def has_substring(expression: Any, substring: str) -> bool | lut.Substitution: """ A condition that's true if the expression contains a substring. Returns a substitution if the expression is a substitution else returns a boolean. """ - if isinstance(expression, (Substitution)): - return PythonExpression(['"', str(substring), '" in "', expression, '"']) + if isinstance(expression, (lut.Substitution)): + return lut.PythonExpression(['"', str(substring), '" in "', expression, '"']) else: return str(substring) in expression -def is_not(expression: Any) -> bool | Substitution: +def is_not(expression: Any) -> bool | lut.Substitution: """ Inverts and expression. Returns a substitution if the expression is a substitution else returns a boolean. """ - if isinstance(expression, (Substitution)): - return NotSubstitution(expression) + if isinstance(expression, (lut.Substitution)): + return lut.NotSubstitution(expression) else: return not expression -def is_empty(expression: Any) -> bool | Substitution: +def is_empty(expression: Any) -> bool | lut.Substitution: """ Checks if the expression is empty. Returns a substitution if the expression is a substitution else returns a boolean. """ - if isinstance(expression, (Substitution)): - return PythonExpression(['len("', expression, '") == 0']) + if isinstance(expression, (lut.Substitution)): + return lut.PythonExpression(['len("', expression, '") == 0']) else: return len(str(expression)) == 0 -def is_not_empty(expression: Any) -> bool | Substitution: +def is_not_empty(expression: Any) -> bool | lut.Substitution: """ - Deprecated: Use `NotSubstitution(is_empty(...))` instead. + Deprecated: Use `lut.NotSubstitution(is_empty(...))` instead. A substitution that's true if the expression is not empty. """ - return NotSubstitution(is_empty(expression)) + return lut.NotSubstitution(is_empty(expression)) -def is_none_or_null(expression: Any) -> bool | Substitution: +def is_none_or_null(expression: Any) -> bool | lut.Substitution: """ Checks if the expression is 'null' or 'none' or 'False'. Returns a substitution if the expression is a substitution else returns a boolean. """ - if isinstance(expression, (Substitution)): - is_none = PythonExpression(['"', expression, '".lower() == "none"']) - is_null = PythonExpression(['"', expression, '".lower() == "null"']) - return OrSubstitution(is_none, is_null) + if isinstance(expression, (lut.Substitution)): + is_none = lut.PythonExpression(['"', expression, '".lower() == "none"']) + is_null = lut.PythonExpression(['"', expression, '".lower() == "null"']) + return lut.OrSubstitution(is_none, is_null) else: return expression is None or str(expression).lower() in ['none', 'null'] -def is_true(expression: Any) -> bool | Substitution: +def is_true(expression: Any) -> bool | lut.Substitution: """ Checks if the expression is true. Returns a substitution if the expression is a substitution else returns a boolean. """ - if isinstance(expression, (Substitution)): - return PythonExpression(['"', expression, '".lower() == "true"']) + if isinstance(expression, (lut.Substitution)): + return lut.PythonExpression(['"', expression, '".lower() == "true"']) elif isinstance(expression, str): return expression.lower() == 'true' else: return bool(expression) -def is_false(expression: Any) -> bool | Substitution: +def is_false(expression: Any) -> bool | lut.Substitution: """ Checks if the expression is false. Returns a substitution if the expression is a substitution else returns a boolean. """ - if isinstance(expression, (Substitution)): - return PythonExpression(['"', expression, '".lower() == "false"']) + if isinstance(expression, (lut.Substitution)): + return lut.PythonExpression(['"', expression, '".lower() == "false"']) elif isinstance(expression, str): return expression.lower() == 'false' else: return not bool(expression) -def is_valid(expression: Any) -> bool | Substitution: +def is_valid(expression: Any) -> bool | lut.Substitution: """ Checks if the expression is valid. We define a valid expression as not being empty and not being null or none. Returns a substitution if the expression is a substitution else returns a boolean. """ - if isinstance(expression, (Substitution)): - return AndSubstitution( - AndSubstitution( - NotSubstitution(is_none_or_null(expression)), - NotSubstitution(is_empty(expression)), + if isinstance(expression, (lut.Substitution)): + return lut.AndSubstitution( + lut.AndSubstitution( + lut.NotSubstitution(is_none_or_null(expression)), + lut.NotSubstitution(is_empty(expression)), ), - NotSubstitution(is_false(expression)), + lut.NotSubstitution(is_false(expression)), ) else: return not is_none_or_null(expression) and not is_empty(expression) and not is_false( expression) -def is_equal(lhs: Any, rhs: Any) -> bool | Substitution: +def is_equal(lhs: Any, rhs: Any) -> bool | lut.Substitution: """ Checks if the two expressions are equal. """ - if isinstance(lhs, Substitution) or isinstance(rhs, Substitution): - return PythonExpression(["'", lhs, "' == '", rhs, "'"]) + if isinstance(lhs, lut.Substitution) or isinstance(rhs, lut.Substitution): + return lut.PythonExpression(["'", lhs, "' == '", rhs, "'"]) else: return lhs == rhs -def both_false(a: Substitution, b: Substitution) -> Substitution: +def both_false(a: lut.Substitution, b: lut.Substitution) -> lut.Substitution: """ Return substitution which is true if both arguments are false. """ - return AndSubstitution(is_not(a), is_not(b)) + return lut.AndSubstitution(is_not(a), is_not(b)) -def to_bool(expression: Substitution) -> bool | Substitution: +def to_bool(expression: lut.Substitution) -> bool | lut.Substitution: """ Returns a substitution which is the argument converted to a bool. """ return is_true(expression) -def union(a: Any, b: Any) -> Substitution: - """ Unite the expressions a and b. A and be are expected to contain comma-separated strings. """ - return PythonExpression(["','.join(list(set(('", a, "'+','+'", b, "').split(','))))"]) +def union(a: Any, b: Any) -> lut.Substitution: + """ + Unite the expressions a and b. A and be are expected to contain comma-separated strings. + """ + return lut.PythonExpression(["','.join(list(set(('", a, "'+','+'", b, "').split(','))))"]) -def if_else_substitution(condition: LaunchConfiguration, if_value: Any, else_value: Any) -> Any: +def if_else_substitution(condition: lut.LaunchConfiguration, if_value: Any, + else_value: Any) -> Any: """ Return if_value if the condition is true, else it returns else_value. """ - return PythonExpression( - ['"', if_value, '"if "', condition, '".lower() == "true" else"', else_value, '"']) + if isinstance(condition, lut.Substitution): + return lut.PythonExpression( + ['"', if_value, '"if "', condition, '".lower() == "true" else"', else_value, '"']) + else: + return if_value if is_true(condition) else else_value def get_dict_value(dictionary: Any, key: Any) -> Any: """ Returns the value of the item with the specified key. """ - return PythonExpression(['str(', dictionary, '.get("', key, '"))']) + return lut.PythonExpression(['str(', dictionary, '.get("', key, '"))']) def dict_values_contain_substring(dictionary: Any, substring: str) -> Any: - """ A substitution that's true if the dictionary holds a value which contains the substring. """ - return PythonExpression(["'", substring, "' in ','.join(list(", dictionary, ".values()))"]) + """ + A substitution that's true if the dictionary holds a value which contains the substring. + """ + return lut.PythonExpression(["'", substring, "' in ','.join(list(", dictionary, ".values()))"]) def get_keys_with_substring_in_value(dictionary: Any, substring: str) -> Any: """ Return all keys of the items with a value containing the substring. """ - return PythonExpression([ + return lut.PythonExpression([ "','.join(list(key for key, value in ", dictionary, ".items() if '", substring, "' in value))" ]) @@ -599,11 +636,12 @@ def get_keys_with_substring_in_value(dictionary: Any, substring: str) -> Any: def remove_substring_from_dict_values(dictionary: Any, substring: str) -> Any: """ Return a dict with a substring being removed from all values of an input dict. """ - return PythonExpression([ + return lut.PythonExpression([ "{ key: ','.join(filter(lambda x: x != '", substring, "', value.split(','))) for key, value in", dictionary, ".items()}" ]) + def remove_substrings_from_dict_values(dictionary: Any, substrings: str) -> Any: """ Return a dict with all substrings being removed from all values of an input dict. """ for substring in substrings: @@ -611,34 +649,34 @@ def remove_substrings_from_dict_values(dictionary: Any, substrings: str) -> Any: return dictionary -def assert_path_exists(expression: LaunchConfiguration, condition=None) -> Action: +def assert_path_exists(expression: lut.LaunchConfiguration, condition=None) -> lut.Action: """ A condition that's true if the expression evalutes to 'False' in Python. Note that the default UnlessCondition is only true if the expression is 'false' or '0'. This is more generic, ie. it would also return true for a None type or an empty string. """ - def impl(context: LaunchContext) -> None: + def impl(context: lut.LaunchContext) -> None: path = pathlib.Path(expression.perform(context)) assert path.exists(), f'Path {path} does not exist.' - return OpaqueFunction(function=impl, condition=condition) + return lut.OpaqueFunction(function=impl, condition=condition) -def assert_condition(assert_message: str, condition: Condition) -> Action: +def assert_condition(assert_message: str, condition: lut.Condition) -> lut.Action: """ Asserting the condition and printing the assert message if the condition is false. """ - def impl(context: LaunchContext) -> None: + def impl(context: lut.LaunchContext) -> None: assert False, assert_message - return OpaqueFunction(function=impl, condition=condition) + return lut.OpaqueFunction(function=impl, condition=condition) -def log_info(msg, condition=None) -> Action: +def log_info(msg, condition=None) -> lut.Action: """ Helper to create a message that is logged from ros launch. """ - return LogInfo(msg=msg, condition=condition) + return lut.LogInfo(msg=msg, condition=condition) def get_nova_system_info(path: str = '/etc/nova/systeminfo.yaml') -> dict: @@ -653,8 +691,8 @@ def get_nova_robot(path: str = '/etc/nova/manager_selection') -> NovaRobot: """ Get the nova robot name stored in the manager_selection file created by nova init. """ pathlib_path = pathlib.Path(path) if not pathlib_path.exists(): - raise FileNotFoundError(f'[get_nova_robot]: manager selection file ' \ - f'{pathlib_path} does not exist.') + raise FileNotFoundError('[get_nova_robot]: manager selection file ' + + f'{pathlib_path} does not exist.') name = pathlib_path.read_text().strip('\n') if name == 'nova-carter': print(f'Detected NovaRobot: {NovaRobot.NOVA_CARTER.name}') @@ -674,8 +712,6 @@ def get_isaac_ros_ws_path() -> str: isaac_ros_ws_path = os.environ.get('ISAAC_ROS_WS') if isaac_ros_ws_path is None: isaac_ros_ws_path = "/workspaces/isaac_ros-dev" - print( - f"Warning: Isaac ROS workspace path requested, but environment variable ISAAC_ROS_WS " - "not set. Returning default path {isaac_ros_ws_path}" - ) + print("Warning: Isaac ROS workspace path requested, but environment variable ISAAC_ROS_WS " + f"not set. Returning default path {isaac_ros_ws_path}") return isaac_ros_ws_path diff --git a/isaac_ros_launch_utils/package.xml b/isaac_ros_launch_utils/package.xml index be26e1fa..5a8cecee 100644 --- a/isaac_ros_launch_utils/package.xml +++ b/isaac_ros_launch_utils/package.xml @@ -2,12 +2,13 @@ isaac_ros_launch_utils - 3.1.0 + 3.2.0 Helper functions to simplify ROS2 launch files. Isaac ROS Maintainers Apache-2.0 https://developer.nvidia.com/isaac-ros-gems/ Lionel Gulich + isaac_ros_common ament_python diff --git a/isaac_ros_launch_utils/setup.py b/isaac_ros_launch_utils/setup.py index 86e24b69..c5d685d3 100644 --- a/isaac_ros_launch_utils/setup.py +++ b/isaac_ros_launch_utils/setup.py @@ -15,7 +15,29 @@ # # SPDX-License-Identifier: Apache-2.0 from setuptools import setup +import sys +import importlib.util +from ament_index_python.packages import get_resource +from pathlib import Path + +ISAAC_ROS_COMMON_PATH = get_resource( + 'isaac_ros_common_scripts_path', + 'isaac_ros_common' +)[0] + +ISAAC_ROS_COMMON_VERSION_INFO = Path(ISAAC_ROS_COMMON_PATH) / 'isaac_ros_common-version-info.py' + +spec = importlib.util.spec_from_file_location( + 'isaac_ros_common_version_info', + ISAAC_ROS_COMMON_VERSION_INFO +) + +isaac_ros_common_version_info = importlib.util.module_from_spec(spec) +sys.modules['isaac_ros_common_version_info'] = isaac_ros_common_version_info +spec.loader.exec_module(isaac_ros_common_version_info) + +from isaac_ros_common_version_info import GenerateVersionInfoCommand # noqa: E402, I100 PACKAGE_NAME = 'isaac_ros_launch_utils' LICENSE = """ @@ -43,4 +65,7 @@ license=LICENSE, tests_require=[], entry_points={}, + cmdclass={ + 'build_py': GenerateVersionInfoCommand, + }, ) diff --git a/isaac_ros_nitros_bridge_interfaces/CMakeLists.txt b/isaac_ros_nitros_bridge_interfaces/CMakeLists.txt index b1575ceb..3bef8ae5 100644 --- a/isaac_ros_nitros_bridge_interfaces/CMakeLists.txt +++ b/isaac_ros_nitros_bridge_interfaces/CMakeLists.txt @@ -34,7 +34,8 @@ ament_auto_find_build_dependencies() find_package(rosidl_default_generators REQUIRED) rosidl_generate_interfaces(${PROJECT_NAME} msg/NitrosBridgeImage.msg - DEPENDENCIES std_msgs + msg/NitrosBridgeTensorList.msg + DEPENDENCIES std_msgs isaac_ros_tensor_list_interfaces ) ament_export_dependencies(rosidl_default_runtime) @@ -43,4 +44,10 @@ if(BUILD_TESTING) ament_lint_auto_find_test_dependencies() endif() + +# Embed versioning information into installed files +ament_index_get_resource(ISAAC_ROS_COMMON_CMAKE_PATH isaac_ros_common_cmake_path isaac_ros_common) +include("${ISAAC_ROS_COMMON_CMAKE_PATH}/isaac_ros_common-version-info.cmake") +generate_version_info(${PROJECT_NAME}) + ament_auto_package() diff --git a/isaac_ros_nitros_bridge_interfaces/msg/NitrosBridgeImage.msg b/isaac_ros_nitros_bridge_interfaces/msg/NitrosBridgeImage.msg index 0c703fd3..aeecc72b 100644 --- a/isaac_ros_nitros_bridge_interfaces/msg/NitrosBridgeImage.msg +++ b/isaac_ros_nitros_bridge_interfaces/msg/NitrosBridgeImage.msg @@ -4,5 +4,9 @@ uint32 width string encoding uint8 is_bigendian uint32 step + # Filled with PID and file descriptor(exported from GPU memory) -int32[] data +int32[] data # (PID, FD) +uint8[] cuda_event_handle +string uid +uint32 device_id \ No newline at end of file diff --git a/isaac_ros_nitros_bridge_interfaces/msg/NitrosBridgeTensorList.msg b/isaac_ros_nitros_bridge_interfaces/msg/NitrosBridgeTensorList.msg new file mode 100644 index 00000000..bb7d7a58 --- /dev/null +++ b/isaac_ros_nitros_bridge_interfaces/msg/NitrosBridgeTensorList.msg @@ -0,0 +1,11 @@ +std_msgs/Header header + +# A list of tensors +isaac_ros_tensor_list_interfaces/Tensor[] tensors + +int32 pid +int32 fd + +uint8[] cuda_event_handle +string uid +uint32 device_id \ No newline at end of file diff --git a/isaac_ros_nitros_bridge_interfaces/package.xml b/isaac_ros_nitros_bridge_interfaces/package.xml index 92817be9..5c00a46d 100644 --- a/isaac_ros_nitros_bridge_interfaces/package.xml +++ b/isaac_ros_nitros_bridge_interfaces/package.xml @@ -13,7 +13,7 @@ license agreement from NVIDIA CORPORATION is strictly prohibited. isaac_ros_nitros_bridge_interfaces - 3.1.0 + 3.2.0 Interfaces for Isaac ROS NITROS Bridge Msgs Isaac ROS Maintainers @@ -24,10 +24,12 @@ license agreement from NVIDIA CORPORATION is strictly prohibited. ament_cmake_auto rosidl_default_generators + isaac_ros_common rosidl_default_runtime std_msgs + isaac_ros_tensor_list_interfaces ament_lint_auto ament_lint_common diff --git a/isaac_ros_nova_interfaces/CMakeLists.txt b/isaac_ros_nova_interfaces/CMakeLists.txt index de5cf668..ba85f1e4 100644 --- a/isaac_ros_nova_interfaces/CMakeLists.txt +++ b/isaac_ros_nova_interfaces/CMakeLists.txt @@ -44,4 +44,10 @@ if(BUILD_TESTING) ament_lint_auto_find_test_dependencies() endif() + +# Embed versioning information into installed files +ament_index_get_resource(ISAAC_ROS_COMMON_CMAKE_PATH isaac_ros_common_cmake_path isaac_ros_common) +include("${ISAAC_ROS_COMMON_CMAKE_PATH}/isaac_ros_common-version-info.cmake") +generate_version_info(${PROJECT_NAME}) + ament_auto_package() diff --git a/isaac_ros_nova_interfaces/package.xml b/isaac_ros_nova_interfaces/package.xml index 87e34248..de5a3a57 100644 --- a/isaac_ros_nova_interfaces/package.xml +++ b/isaac_ros_nova_interfaces/package.xml @@ -13,7 +13,7 @@ license agreement from NVIDIA CORPORATION is strictly prohibited. isaac_ros_nova_interfaces - 3.1.0 + 3.2.0 Interfaces for Isaac ROS Nova Isaac ROS Maintainers @@ -27,6 +27,7 @@ license agreement from NVIDIA CORPORATION is strictly prohibited. ament_cmake_auto rosidl_default_generators + isaac_ros_common ament_lint_auto ament_lint_common diff --git a/isaac_ros_pointcloud_interfaces/CMakeLists.txt b/isaac_ros_pointcloud_interfaces/CMakeLists.txt index 94ff2650..fa00e2e4 100644 --- a/isaac_ros_pointcloud_interfaces/CMakeLists.txt +++ b/isaac_ros_pointcloud_interfaces/CMakeLists.txt @@ -46,4 +46,10 @@ if(BUILD_TESTING) ament_lint_auto_find_test_dependencies() endif() + +# Embed versioning information into installed files +ament_index_get_resource(ISAAC_ROS_COMMON_CMAKE_PATH isaac_ros_common_cmake_path isaac_ros_common) +include("${ISAAC_ROS_COMMON_CMAKE_PATH}/isaac_ros_common-version-info.cmake") +generate_version_info(${PROJECT_NAME}) + ament_auto_package() diff --git a/isaac_ros_pointcloud_interfaces/package.xml b/isaac_ros_pointcloud_interfaces/package.xml index fc75b663..6d40acdc 100644 --- a/isaac_ros_pointcloud_interfaces/package.xml +++ b/isaac_ros_pointcloud_interfaces/package.xml @@ -13,7 +13,7 @@ isaac_ros_pointcloud_interfaces - 3.1.0 + 3.2.0 Pointcloud interfaces for Isaac ROS NITROS Isaac ROS Maintainers @@ -25,6 +25,7 @@ std_msgs rosidl_default_generators + isaac_ros_common rosidl_default_runtime ament_lint_auto diff --git a/isaac_ros_r2b_galileo/CMakeLists.txt b/isaac_ros_r2b_galileo/CMakeLists.txt new file mode 100644 index 00000000..c130622a --- /dev/null +++ b/isaac_ros_r2b_galileo/CMakeLists.txt @@ -0,0 +1,50 @@ +# SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES +# Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +cmake_minimum_required(VERSION 3.22.1) +project(isaac_ros_r2b_galileo) + +find_package(ament_cmake_auto REQUIRED) +ament_auto_find_build_dependencies() + +include(FetchContent) + +set(DATASET_NAME r2b_galileo) +set(BASE_URL https://api.ngc.nvidia.com/v2/resources/org/nvidia/team/isaac/r2bdataset2024/1/files?redirect=true&path=r2b_galileo) + +FetchContent_Declare( + metadata + URL ${BASE_URL}/metadata.yaml + DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}/data/${DATASET_NAME}/ + DOWNLOAD_NO_EXTRACT TRUE +) + +FetchContent_Declare( + mcap + URL ${BASE_URL}/${DATASET_NAME}_0.mcap + DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}/data/${DATASET_NAME}/ + DOWNLOAD_NO_EXTRACT TRUE +) + +FetchContent_MakeAvailable(metadata mcap) + +# Embed versioning information into installed files +ament_index_get_resource(ISAAC_ROS_COMMON_CMAKE_PATH isaac_ros_common_cmake_path isaac_ros_common) +include("${ISAAC_ROS_COMMON_CMAKE_PATH}/isaac_ros_common-version-info.cmake") +generate_version_info(${PROJECT_NAME}) + +ament_auto_package(INSTALL_TO_SHARE ${CMAKE_CURRENT_BINARY_DIR}/data) diff --git a/isaac_ros_r2b_galileo/package.xml b/isaac_ros_r2b_galileo/package.xml new file mode 100644 index 00000000..d11fb845 --- /dev/null +++ b/isaac_ros_r2b_galileo/package.xml @@ -0,0 +1,17 @@ + + + + isaac_ros_r2b_galileo + 3.2.0 + Package to provide the r2b_galileo dataset for testing + Isaac ROS Maintainers + Apache-2.0 + https://developer.nvidia.com/isaac-ros-gems/ + + + ament_cmake + + + ament_cmake_auto + isaac_ros_common + diff --git a/isaac_ros_rosbag_utils/config/edex_extraction_nova.yaml b/isaac_ros_rosbag_utils/config/edex_extraction_nova.yaml new file mode 100644 index 00000000..18a8b3f1 --- /dev/null +++ b/isaac_ros_rosbag_utils/config/edex_extraction_nova.yaml @@ -0,0 +1,27 @@ +# Topic names: +camera_info_topics: + - /front_stereo_camera/left/camera_info + - /front_stereo_camera/right/camera_info + - /back_stereo_camera/left/camera_info + - /back_stereo_camera/right/camera_info + - /left_stereo_camera/left/camera_info + - /left_stereo_camera/right/camera_info + - /right_stereo_camera/left/camera_info + - /right_stereo_camera/right/camera_info +image_topics: + - /front_stereo_camera/left/image_compressed + - /front_stereo_camera/right/image_compressed + - /back_stereo_camera/left/image_compressed + - /back_stereo_camera/right/image_compressed + - /left_stereo_camera/left/image_compressed + - /left_stereo_camera/right/image_compressed + - /right_stereo_camera/left/image_compressed + - /right_stereo_camera/right/image_compressed + +# Frame names: +rig_frame: base_link +imu_frame: front_stereo_camera_imu + +# Resize images to use less space. +output_width: 768 +output_height: 480 diff --git a/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/__init__.py b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/__init__.py new file mode 100644 index 00000000..34d9bfea --- /dev/null +++ b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/__init__.py @@ -0,0 +1,16 @@ +# SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES +# Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 diff --git a/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/rosbag_edex_extraction.py b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/rosbag_edex_extraction.py new file mode 100644 index 00000000..3c7ddcc1 --- /dev/null +++ b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/rosbag_edex_extraction.py @@ -0,0 +1,401 @@ +import json +import logging +import math +import os +import pathlib +import shutil +from typing import Any + +import numpy as np +import pandas as pd +import pydantic + +from rosbags import highlevel +from pytransform3d import transform_manager + +from isaac_ros_rosbag_utils import rosbag_image_extraction +from isaac_ros_rosbag_utils import rosbag_video_extraction +from isaac_ros_rosbag_utils import rosbag_tf_extraction +from isaac_ros_rosbag_utils import rosbag_urdf_extraction + +CV_T_ROS = np.array([ + [00, -1, 0, 0], + [00, 00, 1, 0], + [-1, 00, 0, 0], + [00, 00, 0, 1], +]) +ROS_T_CV = np.linalg.inv(CV_T_ROS) +CAMERA_OPTICAL_ROS_T_CAMERA_OPTICAL_CV = np.array([ + [1, 00, 00, 0], + [0, -1, 00, 0], + [0, 00, -1, 0], + [0, 00, 00, 1], +]) + + +class Config(pydantic.BaseModel): + """Configuration for the bag to edex converter.""" + # Path of the rosbag used for extraction. + rosbag_path: pathlib.Path + # Path of the generated edex. + edex_path: pathlib.Path + # Topics used to get the camera's intrinsics (and extrinsics if frames are not set explicitly). + camera_info_topics: list[str] + # Topics used to extract images. Must be the same length as camera_info_topics. + image_topics: list[str] + # Topic used to get IMU measurements. + imu_topic: str | None = None + # Frames used to acquire the extrinsics. If not set the frames from the messages will be used: + rig_frame: str + camera_optical_frames: list[str] | None = None + imu_frame: str | None = None + # Number of workers used in image extraction. + num_workers: int | None = None + # Threshold used for syncing images in the same frame. + sync_threshold_ns: int = int(0.001 * 10**9) + # Width and height used to resize the extracted images. + output_width: int | None = None + output_height: int | None = None + output_format: str | None = None + + @pydantic.model_validator(mode='after') + def check_fields(self): + """ Preprocess the values and then validate that all members are valid. """ + if not self.rosbag_path.exists(): + raise ValueError(f"Path '{self.rosbag_path}' does not exist") + if len(self.image_topics) != len(self.camera_info_topics): + raise ValueError("Need same number of image topics as camera info topics.") + if self.camera_optical_frames: + if len(self.camera_optical_frames) != len(self.camera_info_topics): + raise ValueError( + "Need same number of camera optical frames as camera info topics.") + return self + + +def to_edex_format(pose_matrix: np.array) -> list[list[float]]: + """ Convert a 4x4 pose matrix to the 3x4 format that edex expects. """ + # Do some safety checks to make sure that the matrix is a valid pose matrix. + assert pose_matrix.shape == (4, 4) + assert math.isclose(np.linalg.det(pose_matrix), 1.0) + assert pose_matrix[3, 3] == 1.0 + return pose_matrix[0:3, :].tolist() + + +def log_rosbag_info(reader: highlevel.AnyReader): + """Log the topics and message types of all message channels in the rosbag.""" + logs = [f'\t- {c.topic}: {c.msgtype}' for c in reader.connections] + logs = sorted(logs) + # pylint: disable=logging-not-lazy + logging.info('Found the following topics in rosbag:\n' + '\n'.join(logs)) + + +def get_first_message(reader: highlevel.AnyReader, topics: list[str]) -> list[object]: + """ Get the first message of every topic. """ + connections = [c for c in reader.connections if c.topic in topics] + topic_and_first_msg = {} + for connection, _, rawdata in reader.messages(connections): + msg = reader.deserialize(rawdata, connection.msgtype) + topic_and_first_msg[connection.topic] = msg + if len(topic_and_first_msg) == len(topics): + break + + # Generate the list in the same order as the input topics. + return [topic_and_first_msg[topic] for topic in topics] + + +def synchronize_images(timestamps_df: pd.DataFrame, images_base_path: pathlib.Path, + sync_threshold_ns: int) -> pd.DataFrame: + """ + Synchronize the images based on their timestamp. This will also modify/move the images on + disk. Returns a dataframe with the synchronized timestamps. + """ + # Our strategy is to iterate through the timestamps from the front. If all front stamps are + # inside of the threshold we have a match. Else we increment the index of the earliest + # timestamp in the front set. + + # Setup helper objects. + topics = timestamps_df.columns + front_idx = {topic: 0 for topic in topics} + frame_idx = 0 + synced_timestamps: dict[str, list[int]] = {topic: [] for topic in topics} + + # Iterate until we reach the end of one image stream. + while all(front_idx[topic] < timestamps_df.shape[0] for topic in topics): + # Update the front values list. + front = [timestamps_df[topic][idx] for topic, idx in front_idx.items()] + + # Values are nan if we reached the end of an image stream. + if any(np.isnan(front)): + break + + argmin = np.argmin(front) + argmax = np.argmax(front) + if front[argmax] - front[argmin] < sync_threshold_ns: + # Rename images on disk. + for topic, old_frame_idx in front_idx.items(): + old_path = rosbag_image_extraction.get_image_path(images_base_path, topic, + old_frame_idx) + new_path = rosbag_image_extraction.get_image_path(images_base_path, topic, + frame_idx) + logging.debug(f'Renaming {old_path} to {new_path}.') + os.rename(old_path, new_path) + synced_timestamps[topic].append(timestamps_df[topic][old_frame_idx]) + # Bump all frame indices. + front_idx = {topic: front_idx[topic] + 1 for topic in topics} + frame_idx += 1 + else: + front_idx[topics[argmin]] += 1 + + # Remove the leftover images. + for topic in topics: + if np.isnan(front_idx[topic]): + continue + for old_frame_idx in range(front_idx[topic], timestamps_df.shape[0]): + old_path = rosbag_image_extraction.get_image_path(images_base_path, topic, + old_frame_idx) + old_path.unlink(missing_ok=True) + + # Store the synchronized timestamps. + synced_timestamp_df = pd.DataFrame(synced_timestamps) + synced_timestamp_df.to_csv(images_base_path / 'synced_timestamps.csv') + return synced_timestamp_df + + +def extract_images(reader: highlevel.AnyReader, config: Config) -> pd.DataFrame: + """ Extract the images from the bag deterministically. """ + timestamps_df = rosbag_image_extraction.extract_images( + reader=reader, + topics=config.image_topics, + width=config.output_width, + height=config.output_height, + format=config.output_format, + images_base_path=config.edex_path / 'images', + ) + synced_timestamps_df = synchronize_images(timestamps_df, config.edex_path / 'images', + config.sync_threshold_ns) + return synced_timestamps_df + + +def extract_imu_stream(reader: highlevel.AnyReader, config: Config): + """ Extract all imu messages from the bag and store to disk. """ + imu_path = config.edex_path / 'imu.jsonl' + logging.info(f"Writing imu data '{imu_path}'.") + with open(imu_path, 'w', encoding='utf-8') as file: + connections = [c for c in reader.connections if c.topic == config.imu_topic] + for connection, _, rawdata in reader.messages(connections): + msg = reader.deserialize(rawdata, connection.msgtype) + + imu_data = { + 'timestamp': msg.header.stamp.sec * 10**9 + msg.header.stamp.nanosec, + 'AngularVelocityX': msg.angular_velocity.x, + 'AngularVelocityY': msg.angular_velocity.y, + 'AngularVelocityZ': msg.angular_velocity.z, + 'LinearAccelerationX': msg.linear_acceleration.x, + 'LinearAccelerationY': msg.linear_acceleration.y, + 'LinearAccelerationZ': msg.linear_acceleration.z, + } + + json.dump(imu_data, file) + file.write('\n') + + +def extract_frame_metadata(synced_timestamps_df: pd.DataFrame, config: Config) -> int: + """ Extract all frame metadata and store to disk. Returns the number of found frames. """ + topics = synced_timestamps_df.columns + num_frames = synced_timestamps_df.shape[0] + + with (config.edex_path / 'frame_metadata.jsonl').open('w') as outfile: + for frame_idx in range(num_frames): + timestamps = synced_timestamps_df.iloc[frame_idx] + + cams_list = [] + for camera_idx, topic in enumerate(topics): + path = rosbag_image_extraction.get_image_path(pathlib.Path('images'), topic, + frame_idx) + cams_list.append({ + 'id': camera_idx, + 'filename': str(path), + 'timestamp': int(timestamps.iloc[camera_idx]), + }) + + out_line = {'frame_id': frame_idx, 'cams': cams_list} + json.dump(out_line, outfile) + outfile.write('\n') + + return num_frames + + +def get_imu_metadata(imu_msg: Any, tf_manager: transform_manager.TransformManager, + config: Config) -> dict: + """ Create the imu metadata needed for the stereo.edex file. """ + if not config.imu_frame: + config.imu_frame = imu_msg.header.frame_id + + rig_ros_T_imu_ros = tf_manager.get_transform(config.imu_frame, config.rig_frame) + rig_cv_T_imu_cv = CV_T_ROS @ rig_ros_T_imu_ros @ ROS_T_CV + imu_metadata = { + 'g': [0.0, -9.81, 0.0], + 'measurements': 'imu.jsonl', + 'transform': to_edex_format(rig_cv_T_imu_cv), + } + return imu_metadata + + +def get_camera_metadata(camera_idx: int, camera_msg: Any, + tf_manager: transform_manager.TransformManager, config: Config) -> dict: + """ Create the camera metadata needed for the stereo.edex file. """ + assert camera_msg.distortion_model == 'rational_polynomial', \ + ('Bag to edex converter cannot (yet) handle other distortion models than ' + + "'rational_polynomial'") + + if config.camera_optical_frames: + camera_optical_frame = config.camera_optical_frames[camera_idx] + else: + camera_optical_frame = camera_msg.header.frame_id + + rig_ros_T_camera_optical_ros = tf_manager.get_transform(camera_optical_frame, config.rig_frame) + rig_cv_T_camera_optical_cv = \ + CV_T_ROS @ rig_ros_T_camera_optical_ros @ CAMERA_OPTICAL_ROS_T_CAMERA_OPTICAL_CV + + width_ratio = config.output_width / camera_msg.width if config.output_width else 1.0 + height_ratio = config.output_height / camera_msg.height if config.output_height else 1.0 + if width_ratio != height_ratio: + logging.warning('The resized images do not have the same aspect ratio. This may lead ' + 'to incorrect results.') + + sx, sy = int(width_ratio * camera_msg.width), int(height_ratio * camera_msg.height) + # Focal length and principal point of the raw camera. + # [fx 0 cx] + # K = [ 0 fy cy] + # [ 0 0 1] + fx, fy = (width_ratio * camera_msg.k[0]), (height_ratio * camera_msg.k[4]) # noqa: F841 + cx, cy = (width_ratio * camera_msg.k[2]), (height_ratio * camera_msg.k[5]) # noqa: F841 + # Focal length and principal point of the rectified camera. + # [fx' 0 cx' Tx] + # P = [ 0 fy' cy' Ty] + # [ 0 0 1 0] + # pylint: disable=unused-variable + fx_, fy_ = (width_ratio * camera_msg.p[0]), (height_ratio * camera_msg.p[5]) # noqa: F841 + cx_, cy_ = (width_ratio * camera_msg.p[2]), (height_ratio * camera_msg.p[6]) # noqa: F841 + tx_, ty_ = camera_msg.p[3], camera_msg.p[7] # noqa: F841 + + camera_metadata = { + 'transform': to_edex_format(rig_cv_T_camera_optical_cv), + 'intrinsics': { + 'distortion_model': 'polynomial', + 'distortion_params': camera_msg.d.tolist(), + 'focal': [fx, fy], + 'principal': [cx, cy], + 'size': [sx, sy], + }, + } + return camera_metadata + + +def extract_edex_metadata( + reader: highlevel.AnyReader, + tf_manager: transform_manager.TransformManager, + config: Config, + num_frames: int, +): + """ Create the stereo.edex metadata file. """ + camera_info_msgs = get_first_message(reader, config.camera_info_topics) + cameras_metadata = [] + for idx, msg in enumerate(camera_info_msgs): + camera_metadata = get_camera_metadata(idx, msg, tf_manager, config) + cameras_metadata.append(camera_metadata) + + a_metadata = { + 'version': '0.9', + 'frame_start': 0, + 'frame_end': num_frames, + 'cameras': cameras_metadata, + } + + if config.imu_topic: + imu_msg = get_first_message(reader, [config.imu_topic])[0] + imu_metadata = get_imu_metadata(imu_msg, tf_manager, config) + a_metadata['imu'] = imu_metadata + + sequence_paths = [ + rosbag_image_extraction.get_image_path(pathlib.Path('images'), topic, 0) + for topic in config.image_topics + ] + sequence_paths = [str(path).lstrip('/') for path in sequence_paths] + b_metadata = { + 'frame_metadata': 'frame_metadata.jsonl', + 'sequence': sequence_paths, + } + edex_metadata = [a_metadata, b_metadata] + + edex_metadata_path = config.edex_path / 'stereo.edex' + logging.info(f"Writing edex metadata to '{edex_metadata_path}'.") + json_string = json.dumps(edex_metadata, indent=2) + edex_metadata_path.write_text(json_string) + + +def extract_edex(config: Config): + """ Extract the entire edex using the config. """ + # Create edex path. + shutil.rmtree(config.edex_path, ignore_errors=True) + config.edex_path.mkdir(parents=True) + + # Extract the URDF from the rosbag. + tf_manager = rosbag_tf_extraction.get_static_transform_manager_from_bag(config.rosbag_path) + urdf_content = rosbag_urdf_extraction.get_urdf_from_tf_manager('robot', tf_manager) + (config.edex_path / 'robot.urdf').write_text(urdf_content) + + with highlevel.AnyReader([config.rosbag_path]) as reader: + log_rosbag_info(reader) + + # Do some quick checks that all the required data is present in the rosbag. + # If not we ignore the inexistent topics. + bag_topics = [c.topic for c in reader.connections] + image_topics = [] + camera_info_topics = [] + camera_optical_frames = [] + for idx, (image_topic, info_topic) in enumerate(zip( + config.image_topics, config.camera_info_topics)): + if image_topic not in bag_topics: + logging.warning(f"Could not find topic '{image_topic}' in rosbag. Ignoring it.") + elif info_topic not in bag_topics: + logging.warning(f"Could not find topic '{info_topic}' in rosbag. Ignoring it.") + else: + image_topics.append(image_topic) + camera_info_topics.append(info_topic) + if config.camera_optical_frames: + camera_optical_frames.append(config.camera_optical_frames[idx]) + + config.image_topics = image_topics + config.camera_info_topics = camera_info_topics + config.camera_optical_frames = camera_optical_frames or None + + # Extract all data and store to disk. + synced_timestamps_df = extract_images(reader, config) + num_frames = extract_frame_metadata(synced_timestamps_df, config) + extract_edex_metadata(reader, tf_manager, config, num_frames) + + if config.imu_topic: + extract_imu_stream(reader, config) + + logging.info(f"Finished extracting edex to '{config.edex_path}'.") + + +def extract_videos(config: Config): + """ Extract only the videos using the config. """ + # Create edex path. + shutil.rmtree(config.edex_path, ignore_errors=True) + config.edex_path.mkdir(parents=True) + + with highlevel.AnyReader([config.rosbag_path]) as reader: + log_rosbag_info(reader) + + # Do some quick checks that all the required data is present in the rosbag. + bag_topics = [c.topic for c in reader.connections] + for topic in config.image_topics: + assert topic in bag_topics, f"Could not find topic '{topic}' in rosbag." + + rosbag_video_extraction.extract_videos(reader, config.image_topics, + config.edex_path / 'videos') + + logging.info(f"Finished extracting videos to '{config.edex_path}'.") diff --git a/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/rosbag_image_extraction.py b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/rosbag_image_extraction.py new file mode 100644 index 00000000..cd697427 --- /dev/null +++ b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/rosbag_image_extraction.py @@ -0,0 +1,210 @@ +""" +Contains functions to extract the images of multiple h264 encoded videos in parallel. +""" +import concurrent.futures +import logging +import os +import pathlib +import queue +import sys +import threading +import time + +import av +import pandas as pd +from rosbags import highlevel + +SYNC_THRESHOLD_NS = 0.001 * 10**9 + + +def progress_bar(iteration: int, total: int, prefix='', suffix='', line_length=80, fill='█'): + length = line_length - len(prefix) - len(suffix) + percent = ("{0:.1f}").format(100 * (iteration / float(total))) + filled_length = int(length * iteration // total) + bar = fill * filled_length + '-' * (length - filled_length) + sys.stdout.write(f'\r{prefix} |{bar}| {percent}% {suffix}') + sys.stdout.flush() + + +def pyav_format_from_ros_encoding(encoding: str) -> tuple[str, int]: + """ Convert a ros encoding to a pyav format string and num color channels. """ + ros_to_pyav = { + 'mono8': ('gray8', 1), + 'bgr8': ('bgr24', 3), + 'rgb8': ('rgb24', 3), + } + return ros_to_pyav[encoding] + + +def get_image_path(base_path: pathlib.Path, topic: str, frame_idx: int) -> pathlib.Path: + """ Get the path to the image with the given index in the given camera. """ + # Remove basename from the topic and remove leading slash. + last_slash_idx = topic.rfind('/') + topic = topic[:last_slash_idx] + topic = topic.lstrip('/') + if topic == '': + return base_path / f'{frame_idx:06d}.png' + return base_path / f'{topic}/{frame_idx:06d}.png' + + +def _producer( + reader: highlevel.AnyReader, + topics: list[str], + width: int | None, + height: int | None, + format: str | None, + images_base_path: pathlib.Path, + frame_queue: queue.Queue, + shutdown_event: threading.Event, +) -> pd.DataFrame: + """ A function that fills a queue with frames that should be written to disk. """ + if width or height: + assert width and height, 'Both width and height must be specified.' + + logging.debug('Started producer thread.') + logging.info(f"Writing images to '{images_base_path}'.") + + # Setup required helper objects. + decoders: dict[str, av.CodecContext] = {} + timestamps: dict[str, list[int]] = {} + camera_indices: dict[str, int] = {} + for idx, topic in enumerate(topics): + decoders[topic] = av.CodecContext.create('h264', 'r') + timestamps[topic] = [] + camera_indices[topic] = idx + # If the parent directory of the images does not exist it will fail silently. + get_image_path(images_base_path, topic, 0).parent.mkdir(parents=True, exist_ok=True) + + # Incrementally extract the images from the h264 streams. + connections = [c for c in reader.connections if c.topic in topics] + logging.info('Starting to extract images from rosbag.') + + num_messages = sum(1 for _ in reader.messages(connections)) + + for idx, (connection, _, rawdata) in enumerate(reader.messages(connections)): + if idx % 100 == 0: + progress_bar(idx, num_messages, prefix='Extracting images...', suffix='Done') + + # Deserialize the ROS message. + topic = connection.topic + msg = reader.deserialize(rawdata, connection.msgtype) + + if connection.msgtype == 'sensor_msgs/msg/Image': + # Directly use the uncompressed frame. + format, num_color_channels = pyav_format_from_ros_encoding(msg.encoding) + if num_color_channels == 1: + decoded_frame = av.VideoFrame.from_ndarray( + msg.data.reshape(msg.height, msg.width), + format=format, + ) + else: + decoded_frame = av.VideoFrame.from_ndarray( + msg.data.reshape(msg.height, msg.width, num_color_channels), + format=format, + ) + elif connection.msgtype == 'sensor_msgs/msg/CompressedImage': + # Decode the h264 frame. + encoded_frame_bytes = msg.data.tobytes() + try: + encoded_packet = av.packet.Packet(encoded_frame_bytes) + decoded_frames = decoders[topic].decode(encoded_packet) + except av.error.InvalidDataError: + continue + + assert len(decoded_frames) == 1 + decoded_frame = decoded_frames[0] + else: + raise ValueError(f"Unknown message type '{connection.msgtype}' in topic '{topic}'.") + + decoded_frame = decoded_frame.reformat( + width=width or decoded_frame.width, + height=height or decoded_frame.height, + format=format or decoded_frame.format, + ) + + # Store the timestamp corresponding to the frame. + frame_idx = len(timestamps[topic]) + timestamps[topic].append(msg.header.stamp.sec * 10**9 + msg.header.stamp.nanosec) + + frame_queue.put((images_base_path, topic, frame_idx, decoded_frame)) + + # Print new line to finish progress bar. + print("") + + shutdown_event.set() + logging.info('Finished extracting images from rosbag.') + + # Append -1 to all timestamps lists to make them the same length. + max_len = max(len(timestamps_list) for timestamps_list in timestamps.values()) + for timestamps_list in timestamps.values(): + timestamps_list += [-1] * (max_len - len(timestamps_list)) + + timestamp_df = pd.DataFrame(timestamps) + timestamp_df.to_csv(images_base_path / 'raw_timestamps.csv') + + return timestamp_df + + +def _consumer(thread_id: int, frame_queue: queue.Queue, shutdown_event: threading.Event) -> None: + """ A function that consumes the next frame from the queue and writes it to disk. """ + logging.info(f'Started consumer thread {thread_id}.') + while True: + try: + images_base_path, topic, frame_idx, frame = frame_queue.get(timeout=1) + image_path = get_image_path(images_base_path, topic, frame_idx) + logging.debug(f'Writing frame {topic}/{frame_idx} to {image_path}.') + frame.to_image().save(str(image_path)) + except queue.Empty: + # If the queue is empty and the shutdown event is set the producer is done, thus we can + # stop. + if shutdown_event.is_set(): + break + logging.info(f'Finished consumer thread {thread_id}.') + + +def extract_images( + reader: highlevel.AnyReader, + topics: list[str], + width: int, + height: int, + format: str, + images_base_path: pathlib.Path, + num_workers: int = -1, +) -> pd.DataFrame: + """ + Extract all images from a rosbag. + """ + if num_workers == -1: + num_workers = 2 * (os.cpu_count() or 1) + + if num_workers < 2: + logging.warning(f"Need at least 2' workers, but " + f'only has {num_workers} workers. Increasing num_workers') + num_workers = 2 + + shutdown_event = threading.Event() + frame_queue: queue.Queue[tuple] = queue.Queue(maxsize=num_workers * 2) + + start = time.time() + logging.info(f'Starting thread pool with {num_workers} workers.') + with concurrent.futures.ThreadPoolExecutor(max_workers=num_workers) as executor: + # Start 1 thread for the producer. + producer_future = executor.submit(_producer, reader, topics, width, height, format, + images_base_path, frame_queue, shutdown_event) + + # Use all remaining resources for the consumers. + consumer_futures = [ + executor.submit(_consumer, i, frame_queue, shutdown_event) + for i in range(num_workers - 1) + ] + + # Wait for the producers and consumers to finish. + all_futures = [producer_future] + consumer_futures + for future in concurrent.futures.as_completed(all_futures): + future.result() + timestamps_df = producer_future.result() + + end = time.time() + duration_s = end - start + logging.info(f'Finished extracting all images. Took {duration_s} seconds.') + return timestamps_df diff --git a/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/rosbag_tf_extraction.py b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/rosbag_tf_extraction.py new file mode 100644 index 00000000..cc9c0bcc --- /dev/null +++ b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/rosbag_tf_extraction.py @@ -0,0 +1,72 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +# +# NVIDIA CORPORATION and its licensors retain all intellectual property +# and proprietary rights in and to this software, related documentation +# and any modifications thereto. Any use, reproduction, disclosure or +# distribution of this software and related documentation without an express +# license agreement from NVIDIA CORPORATION is strictly prohibited. + +import collections +import pathlib + +import pandas as pd +from pytransform3d import trajectories +from pytransform3d import transform_manager + +from rosbags import highlevel + + +def _extract_tf_dataframe_from_bag(rosbag_path: pathlib.Path) -> pd.DataFrame: + """ + Read a bag and return a pandas dataframe containing the transforms from /tf and /tf_static. + """ + data = collections.defaultdict(list) + with highlevel.AnyReader([rosbag_path]) as reader: + connections = [x for x in reader.connections if x.topic in ['/tf_static', '/tf']] + for connection, timestamp, rawdata in reader.messages(connections=connections): + msg = reader.deserialize(rawdata, connection.msgtype) + for tf in msg.transforms: + data['topic'].append(connection.topic) + data['sec'].append(tf.header.stamp.sec) + data['nanosec'].append(tf.header.stamp.nanosec) + data['parent'].append(tf.header.frame_id) + data['child'].append(tf.child_frame_id) + data['x'].append(tf.transform.translation.x) + data['y'].append(tf.transform.translation.y) + data['z'].append(tf.transform.translation.z) + data['qw'].append(tf.transform.rotation.w) + data['qx'].append(tf.transform.rotation.x) + data['qy'].append(tf.transform.rotation.y) + data['qz'].append(tf.transform.rotation.z) + df = pd.DataFrame(data) + df['time_s'] = df['sec'] + df['nanosec'] * 10**9 + return df + + +def get_transform_manager_from_bag( + rosbag_path: pathlib.Path) -> transform_manager.TemporalTransformManager: + """Reads a bag and returns a TemporalTransformManager containing all transforms inside.""" + df = _extract_tf_dataframe_from_bag(rosbag_path) + + tf_manager = transform_manager.TemporalTransformManager() + for (parent, child), group in df.groupby(['parent', 'child']): + timestamps = group['time_s'].to_numpy() + pqs = group[['x', 'y', 'z', 'qw', 'qx', 'qy', 'qz']].to_numpy() + tf_series = transform_manager.NumpyTimeseriesTransform(timestamps, pqs) + tf_manager.add_transform(child, parent, tf_series) + + return tf_manager + + +def get_static_transform_manager_from_bag( + rosbag_path: pathlib.Path) -> transform_manager.TransformManager: + """Reads a bag and returns a TransformManager containing the static transforms only.""" + df = _extract_tf_dataframe_from_bag(rosbag_path) + # Filter out any non-static tfs. + df_static = df[df['topic'] == '/tf_static'].reset_index() + transforms = trajectories.transforms_from_pqs(df_static[['x', 'y', 'z', 'qw', 'qx', 'qy', + 'qz']]) + tf_manager = transform_manager.TransformManager() + for index, row in df_static.iterrows(): + tf_manager.add_transform(row['child'], row['parent'], transforms[index]) + return tf_manager diff --git a/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/rosbag_urdf_extraction.py b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/rosbag_urdf_extraction.py new file mode 100644 index 00000000..9cedb10c --- /dev/null +++ b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/rosbag_urdf_extraction.py @@ -0,0 +1,152 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +# +# NVIDIA CORPORATION and its licensors retain all intellectual property +# and proprietary rights in and to this software, related documentation +# and any modifications thereto. Any use, reproduction, disclosure or +# distribution of this software and related documentation without an express +# license agreement from NVIDIA CORPORATION is strictly prohibited. + +import pathlib +import xml.etree.ElementTree as ET +from typing import Literal + +import numpy as np +import pydantic +from pytransform3d import transformations +from pytransform3d import rotations +from pytransform3d import transform_manager + +from isaac_ros_rosbag_utils import rosbag_tf_extraction + + +class Translation(pydantic.BaseModel): + """ Representation of a translation. """ + translation: list[float] + + def to_urdf(self) -> str: + """Returns the translation element in the URDF format.""" + return " ".join(map(str, self.translation)) + + +class Rotation(pydantic.BaseModel): + """ Representation of a rotation. """ + rotation: list[float] + + def to_euler(self, order: Literal['xyz', 'xzy', 'yxz', 'yzx', 'zxy', 'zyx']) -> np.ndarray: + """Returns the euler representation of the rotation vector.""" + idx_from_axis = {'x': 0, 'y': 1, 'z': 2} + i = idx_from_axis[order[0]] + j = idx_from_axis[order[1]] + k = idx_from_axis[order[2]] + return rotations.euler_from_quaternion(self.rotation, i, j, k, extrinsic=False) + + def to_urdf(self) -> str: + """Returns the rotation element in the URDF format.""" + return " ".join(map(str, self.to_euler('xyz'))) + + +class Transform(pydantic.BaseModel): + """ Representation of a transform. """ + translation: Translation + rotation: Rotation + + @staticmethod + def from_homogenous_matrix(tf: np.ndarray) -> 'Transform': + """Returns the transform from the given tf_name in the given TransformManager.""" + pq = transformations.pq_from_transform(tf) + return Transform(translation=Translation(translation=pq[:3]), + rotation=Rotation(rotation=pq[3:])) + + def to_urdf(self) -> ET.Element: + """Returns the transform element in the URDF format.""" + return ET.Element("origin", xyz=self.translation.to_urdf(), rpy=self.rotation.to_urdf()) + + +class Link(pydantic.BaseModel): + """Representation of a link.""" + name: str + + def to_urdf(self) -> ET.Element: + """Returns the link element in the URDF format.""" + return ET.Element("link", name=self.name) + + +class Joint(pydantic.BaseModel): + """Representation of a joint.""" + name: str + type: Literal['fixed'] + parent: Link + child: Link + transform: Transform + + def to_urdf(self) -> ET.Element: + """Returns the joint element in the URDF format.""" + element = ET.Element("joint", name=self.name, type=self.type) + ET.SubElement(element, "parent", link=self.parent.name) + ET.SubElement(element, "child", link=self.child.name) + element.append(self.transform.to_urdf()) + return element + + +class Robot: + """Representation of a robot.""" + + def __init__(self, name: str): + self._name = name + self._links = {} + self._joints = [] + + def add_link(self, link: Link): + """Add a link to the robot.""" + self._links[link.name] = link + + def get_link(self, name: str) -> Link: + """Get a link from the robot.""" + return self._links[name] + + def add_joint(self, joint: Joint): + """Add a joint to the robot.""" + self._joints.append(joint) + + def to_urdf(self) -> ET.Element: + """Returns the robot element in the URDF format.""" + root = ET.Element("robot", name=self._name) + for link in self._links.values(): + root.append(link.to_urdf()) + for joint in self._joints: + root.append(joint.to_urdf()) + return root + + +def get_urdf_from_tf_manager(name: str, tf_manager: transform_manager.TransformManager) -> str: + """Create a URDF (in string format) from a TransformManager.)""" + robot = Robot(name) + + # We potentially add some links twice, but we don't care as long as we haven't made joints yet. + for parent_frame, child_frame in tf_manager.transforms: + robot.add_link(Link(name=parent_frame)) + robot.add_link(Link(name=child_frame)) + + for parent_frame, child_frame in tf_manager.transforms: + tf = tf_manager.get_transform(parent_frame, child_frame) + robot.add_joint( + Joint( + name=f"{parent_frame}_T_{child_frame}", + type='fixed', + parent=robot.get_link(parent_frame), + child=robot.get_link(child_frame), + transform=Transform.from_homogenous_matrix(tf), + )) + + urdf_xml = robot.to_urdf() + # Intent is needed to format the XML nicely. + ET.indent(urdf_xml, 4 * ' ') + urdf_string = ET.tostring(urdf_xml, encoding='unicode') + return urdf_string + + +def extract_urdf(name: str, rosbag_path: pathlib.Path, output_path: pathlib.Path) -> None: + """Extracts the URDF from a rosbag and saves it to the output path.""" + tf_manager = rosbag_tf_extraction.get_static_transform_manager_from_bag(rosbag_path) + urdf_content = get_urdf_from_tf_manager(name, tf_manager) + output_path.write_text(urdf_content) diff --git a/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/rosbag_video_extraction.py b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/rosbag_video_extraction.py new file mode 100644 index 00000000..b1a4ec3a --- /dev/null +++ b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/rosbag_video_extraction.py @@ -0,0 +1,39 @@ +""" Functions to extract videos from rosbags. """ +import logging +import pathlib + +from rosbags import highlevel + + +def get_video_path(base_path: pathlib.Path, topic: str) -> pathlib.Path: + """ Get the path to the video with the given topic. """ + # Remove basename from the topic and remove leading slash. + last_slash_idx = topic.rfind('/') + topic = topic[:last_slash_idx] + topic = topic.lstrip('/') + topic = topic.replace('/', '_') + return base_path / f'{topic}.h264' + + +def extract_video(reader: highlevel.AnyReader, topic: str, video_path: pathlib.Path): + """ Extract an image topic from a rosbag and store as an h264 encoded video. """ + # Store topic as an h264 encoded video to disk. + logging.info(f"Writing h264 video to '{video_path}'.") + video_path.parent.mkdir(parents=True, exist_ok=True) + + connections = [c for c in reader.connections if c.topic == topic] + with video_path.open('wb') as file: + for connection, _, rawdata in reader.messages(connections): + msg = reader.deserialize(rawdata, connection.msgtype) + file.write(msg.data.tobytes()) + + +def extract_videos(reader: highlevel.AnyReader, topics: list[str], + base_video_path: pathlib.Path) -> list[pathlib.Path]: + """ Extract multiple images topics from a rosbag and store as an h264 encoded video. """ + video_paths = [] + for topic in topics: + video_path = get_video_path(base_video_path, topic) + video_paths.append(video_path) + extract_video(reader, topic, video_path) + return video_paths diff --git a/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/scripts/extract_edex.py b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/scripts/extract_edex.py new file mode 100644 index 00000000..512b92c5 --- /dev/null +++ b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/scripts/extract_edex.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python3 +import argparse +import logging +import os +import pathlib + +import yaml + +from isaac_ros_rosbag_utils import rosbag_edex_extraction + + +def main(): + logging.basicConfig(level=logging.INFO) + + parser = argparse.ArgumentParser() + parser.add_argument( + '-c', + '--config_path', + type=pathlib.Path, + required=True, + help='Path to config file.', + ) + parser.add_argument( + '--rosbag_path', + type=pathlib.Path, + required=True, + help='Path to rosbag file. Can be used to override the rosbag path from the config file.', + ) + parser.add_argument( + '--rosbag_name', + type=str, + required=False, + help='Name of the rosbag. If not provided will be inferred automatically.', + ) + parser.add_argument( + '--edex_path', + type=pathlib.Path, + required=True, + help=('Path where edex is generated. Can be used to override the rosbag path from ' + + 'the config file.'), + ) + parser.add_argument( + '--extract_only_video', + action='store_true', + help='If set will only extract the video and not the images.', + ) + + args = parser.parse_args() + + assert args.config_path.exists(), f"Config path '{args.config_path}' does not exist." + yaml_string = args.config_path.read_text() + yaml_dict = yaml.safe_load(yaml_string) + + # We allow to override some arguments from the CLI. + if args.rosbag_path is not None: + yaml_dict['rosbag_path'] = args.rosbag_path.absolute() + if args.rosbag_name is not None: + yaml_dict['rosbag_name'] = args.rosbag_name + if args.edex_path is not None: + yaml_dict['edex_path'] = args.edex_path.absolute() + + config = rosbag_edex_extraction.Config(**yaml_dict) + + # Change working dir s.t. we resolve relative paths relative to the config file directory. + os.chdir(args.config_path.parent) + + if args.extract_only_video: + rosbag_edex_extraction.extract_videos(config) + else: + rosbag_edex_extraction.extract_edex(config) + + +if __name__ == '__main__': + main() diff --git a/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/scripts/extract_urdf.py b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/scripts/extract_urdf.py new file mode 100644 index 00000000..a47869bd --- /dev/null +++ b/isaac_ros_rosbag_utils/isaac_ros_rosbag_utils/scripts/extract_urdf.py @@ -0,0 +1,22 @@ +""" +Use this script to extract a URDF from the /tf_static topic in a rosbag. + +The generated URDF is minimal and only contains transforms. Physical parameters like mass, inertia +etc. are not contained. +""" + +import argparse +import pathlib + +from isaac_ros_rosbag_utils import rosbag_urdf_extraction + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('-n', '--name', type=str, required=True) + parser.add_argument('-r', '--rosbag_path', type=pathlib.Path, required=True) + parser.add_argument('-o', '--output_path', type=pathlib.Path, required=True) + args = parser.parse_args() + rosbag_urdf_extraction.extract_urdf(args.name, args.rosbag_path, args.output_path) + +if __name__ == '__main__': + main() diff --git a/isaac_ros_rosbag_utils/package.xml b/isaac_ros_rosbag_utils/package.xml new file mode 100644 index 00000000..3843f48c --- /dev/null +++ b/isaac_ros_rosbag_utils/package.xml @@ -0,0 +1,29 @@ + + + + isaac_ros_rosbag_utils + 3.2.0 + Utilities for working with ROS bags + Isaac ROS Maintainers + Apache-2.0 + https://developer.nvidia.com/isaac-ros-gems/ + Lionel Gulich + isaac_ros_common + + + + + + + + + + + + + isaac_ros_r2b_galileo + + + ament_python + + diff --git a/isaac_ros_rosbag_utils/requirements.txt b/isaac_ros_rosbag_utils/requirements.txt new file mode 100644 index 00000000..4c3c5103 --- /dev/null +++ b/isaac_ros_rosbag_utils/requirements.txt @@ -0,0 +1,16 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +# NVIDIA CORPORATION and its licensors retain all intellectual property +# and proprietary rights in and to this software, related documentation +# and any modifications thereto. Any use, reproduction, disclosure or +# distribution of this software and related documentation without an express +# license agreement from NVIDIA CORPORATION is strictly prohibited. + +av +numpy +pandas +pydantic +# We pin the version because newer versions are not backcompatible and do not allow to extrapolate +# tfs in the time domain. +pytransform3d==3.8.0 +rosbags diff --git a/isaac_ros_rosbag_utils/resource/isaac_ros_rosbag_utils b/isaac_ros_rosbag_utils/resource/isaac_ros_rosbag_utils new file mode 100644 index 00000000..e69de29b diff --git a/isaac_ros_rosbag_utils/setup.cfg b/isaac_ros_rosbag_utils/setup.cfg new file mode 100644 index 00000000..dd096177 --- /dev/null +++ b/isaac_ros_rosbag_utils/setup.cfg @@ -0,0 +1,4 @@ +[develop] +script_dir=$base/lib/isaac_ros_rosbag_utils +[install] +install_scripts=$base/lib/isaac_ros_rosbag_utils diff --git a/isaac_ros_rosbag_utils/setup.py b/isaac_ros_rosbag_utils/setup.py new file mode 100644 index 00000000..3405b246 --- /dev/null +++ b/isaac_ros_rosbag_utils/setup.py @@ -0,0 +1,81 @@ +# SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES +# Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +import importlib.util +from pathlib import Path +import sys + +from ament_index_python.packages import get_resource +from setuptools import setup + +ISAAC_ROS_COMMON_PATH = get_resource( + 'isaac_ros_common_scripts_path', + 'isaac_ros_common' +)[0] + +ISAAC_ROS_COMMON_VERSION_INFO = Path(ISAAC_ROS_COMMON_PATH) / 'isaac_ros_common-version-info.py' + +spec = importlib.util.spec_from_file_location( + 'isaac_ros_common_version_info', + ISAAC_ROS_COMMON_VERSION_INFO +) + +isaac_ros_common_version_info = importlib.util.module_from_spec(spec) +sys.modules['isaac_ros_common_version_info'] = isaac_ros_common_version_info +spec.loader.exec_module(isaac_ros_common_version_info) + +from isaac_ros_common_version_info import GenerateVersionInfoCommand # noqa: E402, I100 + +PACKAGE_NAME = 'isaac_ros_rosbag_utils' + +LICENSE = """ +Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +NVIDIA CORPORATION and its licensors retain all intellectual property +and proprietary rights in and to this software, related documentation +and any modifications thereto. Any use, reproduction, disclosure or +distribution of this software and related documentation without an express +license agreement from NVIDIA CORPORATION is strictly prohibited. +""" + +setup( + name=PACKAGE_NAME, + version='3.0.1', + packages=[PACKAGE_NAME, f'{PACKAGE_NAME}.scripts'], + data_files=[ + ('share/ament_index/resource_index/packages', ['resource/' + PACKAGE_NAME]), + ('share/' + PACKAGE_NAME, ['package.xml']), + ('share/' + PACKAGE_NAME, ['requirements.txt']), + ('share/' + PACKAGE_NAME + '/config', ['config/edex_extraction_nova.yaml']), + ], + install_requires=['setuptools'], + zip_safe=True, + maintainer='Isaac ROS Maintainers', + maintainer_email='isaac-ros-maintainers@nvidia.com', + description='Utilities for working with ROS bags', + license=LICENSE, + # Use scripts/extract_urdf.py main as entrypoint: + entry_points={ + 'console_scripts': [ + 'extract_urdf = isaac_ros_rosbag_utils.scripts.extract_urdf:main', + 'extract_edex = isaac_ros_rosbag_utils.scripts.extract_edex:main', + ], + }, + cmdclass={ + 'build_py': GenerateVersionInfoCommand, + }, + tests_require=['pytest'], +) diff --git a/isaac_ros_rosbag_utils/tests/test_rosbag_edex_extraction.py b/isaac_ros_rosbag_utils/tests/test_rosbag_edex_extraction.py new file mode 100644 index 00000000..e5a6b529 --- /dev/null +++ b/isaac_ros_rosbag_utils/tests/test_rosbag_edex_extraction.py @@ -0,0 +1,71 @@ +import pathlib +import subprocess + +# flake8: noqa + +# This is a WAR until we can specify pip deps in package.xml +REQUIREMENTS_FILE = pathlib.Path(__file__).parent.parent / 'requirements.txt' +subprocess.call(['python3', '-m', 'pip', 'install', '-r', str(REQUIREMENTS_FILE)]) + +import yaml +import ament_index_python.packages + +from isaac_ros_rosbag_utils import rosbag_edex_extraction + +SCRIPT_DIR = pathlib.Path(__file__).parent + + +def count_lines(path: pathlib.Path) -> int: + with path.open('rb') as f: + return sum(1 for _ in f) + + +def get_r2b_galileo() -> pathlib.Path: + return pathlib.Path( + ament_index_python.packages.get_package_share_directory( + 'isaac_ros_r2b_galileo')) / 'data/r2b_galileo' + + +def get_config(path: pathlib.Path, rosbag_path: pathlib.Path, + edex_path: pathlib.Path) -> rosbag_edex_extraction.Config: + yaml_string = path.read_text() + yaml_dict = yaml.safe_load(yaml_string) + yaml_dict['rosbag_path'] = rosbag_path + yaml_dict['edex_path'] = edex_path + return rosbag_edex_extraction.Config(**yaml_dict) + + +def test_edex_extraction(tmp_path: pathlib.Path): + edex_path = tmp_path / 'edex' + config = get_config( + SCRIPT_DIR / '../config/edex_extraction_nova.yaml', + get_r2b_galileo(), + edex_path, + ) + rosbag_edex_extraction.extract_edex(config) + + assert edex_path.is_dir() + assert (edex_path / 'robot.urdf').is_file() + assert (edex_path / 'stereo.edex').is_file() + assert (edex_path / 'frame_metadata.jsonl').is_file() + assert count_lines(edex_path / 'frame_metadata.jsonl') == 335 + + +def test_video_extraction(tmp_path: pathlib.Path): + edex_path = tmp_path / 'edex' + config = get_config( + SCRIPT_DIR / '../config/edex_extraction_nova.yaml', + get_r2b_galileo(), + edex_path, + ) + rosbag_edex_extraction.extract_videos(config) + + assert edex_path.is_dir() + assert (edex_path / 'videos/front_stereo_camera_left.h264').is_file() + assert (edex_path / 'videos/front_stereo_camera_right.h264').is_file() + assert (edex_path / 'videos/left_stereo_camera_left.h264').is_file() + assert (edex_path / 'videos/left_stereo_camera_right.h264').is_file() + assert (edex_path / 'videos/right_stereo_camera_left.h264').is_file() + assert (edex_path / 'videos/right_stereo_camera_right.h264').is_file() + assert (edex_path / 'videos/back_stereo_camera_left.h264').is_file() + assert (edex_path / 'videos/back_stereo_camera_right.h264').is_file() diff --git a/isaac_ros_rosbag_utils/tests/test_rosbag_tf_extraction.py b/isaac_ros_rosbag_utils/tests/test_rosbag_tf_extraction.py new file mode 100644 index 00000000..4b237b94 --- /dev/null +++ b/isaac_ros_rosbag_utils/tests/test_rosbag_tf_extraction.py @@ -0,0 +1,259 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +# +# NVIDIA CORPORATION and its licensors retain all intellectual property +# and proprietary rights in and to this software, related documentation +# and any modifications thereto. Any use, reproduction, disclosure or +# distribution of this software and related documentation without an express +# license agreement from NVIDIA CORPORATION is strictly prohibited. + +# flake8: noqa + +import pathlib +import shutil +import subprocess + +# This is a WAR until we can specify pip deps in package.xml +REQUIREMENTS_FILE = pathlib.Path(__file__).parent.parent / 'requirements.txt' +subprocess.call(['python3', '-m', 'pip', 'install', '-r', str(REQUIREMENTS_FILE)]) + +import numpy as np +from pytransform3d import rotations +from pytransform3d import transformations +import rosbags.rosbag2 +import rosbags.typesys +from rosbags.typesys.stores.ros2_humble import builtin_interfaces__msg__Time as Time +from rosbags.typesys.stores.ros2_humble import geometry_msgs__msg__Quaternion as Quaternion +from rosbags.typesys.stores.ros2_humble import geometry_msgs__msg__Transform as Transform +from rosbags.typesys.stores.ros2_humble import geometry_msgs__msg__TransformStamped as \ + TransformStamped +from rosbags.typesys.stores.ros2_humble import geometry_msgs__msg__Vector3 as Vector3 +from rosbags.typesys.stores.ros2_humble import std_msgs__msg__Header as Header +from rosbags.typesys.stores.ros2_humble import tf2_msgs__msg__TFMessage as TFMessage + +from isaac_ros_rosbag_utils import rosbag_tf_extraction + + +def sec_to_ns(s: float) -> int: + return int(s * 1e9) + + +def nanoseconds_to_msg(total_ns: int) -> Time: + ns = int(total_ns % 1e9) + s = int(total_ns // 1e9) + return Time(sec=s, nanosec=ns) + + +def tf_message_from_pq(pq: np.ndarray, parent_frame: str, child_frame: str, + stamp_ns: int) -> TFMessage: + msg = TFMessage(transforms=[ + TransformStamped( + header=Header( + stamp=nanoseconds_to_msg(stamp_ns), + frame_id=parent_frame, + ), + child_frame_id=child_frame, + transform=Transform( + translation=Vector3(x=pq[0], y=pq[1], z=pq[2]), + rotation=Quaternion(w=pq[3], x=pq[4], y=pq[5], z=pq[6]), + ), + ) + ]) + return msg + + +def write_tf_messages_to_bag(tf_messages: list[TFMessage], stamps_ns: list[int], + path: pathlib.Path): + assert len(tf_messages) == len(stamps_ns) + shutil.rmtree(path, ignore_errors=True) + with rosbags.rosbag2.Writer(path, version=9) as writer: + typestore = rosbags.typesys.get_typestore(rosbags.typesys.Stores.ROS2_FOXY) + connection = writer.add_connection('/tf', TFMessage.__msgtype__, typestore=typestore) + for stamp_ns, msg in zip(stamps_ns, tf_messages): + serialized_msg = typestore.serialize_cdr(msg, connection.msgtype) + writer.write(connection, stamp_ns, serialized_msg) + + +def get_linear_motion(distance: float, + axis: int, + stamp1_ns: int, + stamp2_ns: int, + frame1: str = '1', + frame2: str = '2') -> tuple[list[TFMessage], list[int]]: + assert axis < 3 + # Generate the test motion + p1 = np.array([0, 0, 0]) + q1 = np.array([1, 0, 0, 0]) + pq1 = np.hstack([p1, q1]) + msg1 = tf_message_from_pq(pq1, frame1, frame2, stamp1_ns) + + p2 = np.array([0, 0, 0]) + # Set the requested motion + p2[axis] = distance + q2 = np.array([1, 0, 0, 0]) + pq2 = np.hstack([p2, q2]) + msg2 = tf_message_from_pq(pq2, frame1, frame2, stamp2_ns) + + tf_msgs = [msg1, msg2] + stamps_ns = [stamp1_ns, stamp2_ns] + return tf_msgs, stamps_ns + + +def get_angular_motion(angle_rad: float, + axis: int, + stamp1_ns: int, + stamp2_ns: int, + frame1: str = '1', + frame2: str = '2') -> tuple[list[TFMessage], list[int]]: + assert axis < 3 + # Generate the test motion + p1 = np.array([0, 0, 0]) + q1 = np.array([1, 0, 0, 0]) + pq1 = np.hstack([p1, q1]) + msg1 = tf_message_from_pq(pq1, frame1, frame2, stamp1_ns) + + p2 = np.array([0, 0, 0]) + q2 = rotations.quaternion_from_angle(0, angle_rad) + pq2 = np.hstack([p2, q2]) + msg2 = tf_message_from_pq(pq2, frame1, frame2, stamp2_ns) + + tf_msgs = [msg1, msg2] + stamps_ns = [stamp1_ns, stamp2_ns] + return tf_msgs, stamps_ns + + +def assert_quaternion_close(q1: np.ndarray, q2: np.ndarray): + q2_inv = rotations.q_conj(q2) + q_diff = rotations.concatenate_quaternions(q1, q2_inv) + np.testing.assert_allclose(np.abs(q_diff), np.array([1, 0, 0, 0]), rtol=1e-5, atol=1e-8) + + +def test_x_motion(tmp_path: pathlib.Path): + test_bag_path = tmp_path / 'tf_test_bag' + + # Build the test motion + tf_msgs, stamps_ns = get_linear_motion( + distance=1.0, + axis=0, + stamp1_ns=0, + stamp2_ns=sec_to_ns(1), + ) + + # Write test data + write_tf_messages_to_bag(tf_msgs, stamps_ns, test_bag_path) + temporal_transform_manager = rosbag_tf_extraction.get_transform_manager_from_bag(test_bag_path) + + # Test + transform = temporal_transform_manager.get_transform_at_time('2', '1', 0.5) + pq = transformations.pq_from_transform(transform) + np.testing.assert_allclose(pq, np.array([0.5, 0, 0, 1, 0, 0, 0])) + + # NOTE(alexmillane): Turns out the pytransforms3d extrapolates automatically. + transform = temporal_transform_manager.get_transform_at_time('2', '1', 2.0) + pq = transformations.pq_from_transform(transform) + np.testing.assert_allclose(pq, np.array([2.0, 0, 0, 1, 0, 0, 0])) + + +def test_xy_motion(tmp_path: pathlib.Path): + test_bag_path = tmp_path / 'tf_test_bag' + + # Build the test motion + tf_msgs_1, stamps_ns_1 = get_linear_motion( + distance=1.0, + axis=0, + stamp1_ns=sec_to_ns(0), + stamp2_ns=sec_to_ns(4), + frame1='1', + frame2='2', + ) + tf_msgs_2, stamps_ns_2 = get_linear_motion( + distance=1.0, + axis=1, + stamp1_ns=sec_to_ns(1), + stamp2_ns=sec_to_ns(3), + frame1='2', + frame2='3', + ) + + tf_msgs = tf_msgs_1 + tf_msgs_2 + stamps_ns = stamps_ns_1 + stamps_ns_2 + + # Write test data + write_tf_messages_to_bag(tf_msgs, stamps_ns, test_bag_path) + temporal_transform_manager = rosbag_tf_extraction.get_transform_manager_from_bag(test_bag_path) + + # Test + transform = temporal_transform_manager.get_transform_at_time('3', '1', 2.0) + pq = transformations.pq_from_transform(transform) + np.testing.assert_allclose(pq, np.array([0.5, 0.5, 0, 1, 0, 0, 0])) + + # NOTE(alexmillane): Turns out the pytransforms3d extrapolates automatically. + transform = temporal_transform_manager.get_transform_at_time('3', '1', 3.0) + pq = transformations.pq_from_transform(transform) + np.testing.assert_allclose(pq, np.array([0.75, 1.0, 0, 1, 0, 0, 0])) + + +def test_x_rot_motion(tmp_path): + test_bag_path = tmp_path / 'tf_test_bag' + + # Build the test motion + tf_msgs, stamps_ns = get_angular_motion( + angle_rad=np.pi, + axis=0, + stamp1_ns=sec_to_ns(0), + stamp2_ns=sec_to_ns(1), + frame1='1', + frame2='2', + ) + + # Write test data + write_tf_messages_to_bag(tf_msgs, stamps_ns, test_bag_path) + temporal_transform_manager = rosbag_tf_extraction.get_transform_manager_from_bag(test_bag_path) + + # Test + transform = temporal_transform_manager.get_transform_at_time('2', '1', 0.5) + pq = transformations.pq_from_transform(transform) + q_expected = rotations.quaternion_from_angle(0, 0.5 * np.pi) + pq_expected = np.hstack([np.array([0, 0, 0]), q_expected]) + np.testing.assert_allclose(pq, pq_expected) + + # Extrapolate + transform = temporal_transform_manager.get_transform_at_time('2', '1', 1.5) + pq = transformations.pq_from_transform(transform) + q_expected = rotations.quaternion_from_angle(0, 1.5 * np.pi) + assert_quaternion_close(pq[3:], q_expected) + + +def test_xy_rot_motion(tmp_path: pathlib.Path): + test_bag_path = tmp_path / 'tf_test_bag' + + # Build the test motion + tf_msgs_1, stamps_ns_1 = get_angular_motion( + angle_rad=np.pi, + axis=0, + stamp1_ns=sec_to_ns(0), + stamp2_ns=sec_to_ns(4), + frame1='1', + frame2='2', + ) + tf_msgs_2, stamps_ns_2 = get_angular_motion( + angle_rad=np.pi, + axis=1, + stamp1_ns=sec_to_ns(1), + stamp2_ns=sec_to_ns(3), + frame1='2', + frame2='3', + ) + + tf_msgs = tf_msgs_1 + tf_msgs_2 + stamps_ns = stamps_ns_1 + stamps_ns_2 + + # Write test data + write_tf_messages_to_bag(tf_msgs, stamps_ns, test_bag_path) + temporal_transform_manager = rosbag_tf_extraction.get_transform_manager_from_bag(test_bag_path) + + # Test + transform = temporal_transform_manager.get_transform_at_time('3', '1', 2.0) + pq = transformations.pq_from_transform(transform) + q_expected = rotations.quaternion_from_angle(0, np.pi) + pq_expected = np.hstack([np.array([0, 0, 0]), q_expected]) + np.testing.assert_allclose(pq, pq_expected, atol=1e-6) diff --git a/isaac_ros_rosbag_utils/tests/test_rosbag_tf_utils.py b/isaac_ros_rosbag_utils/tests/test_rosbag_tf_utils.py new file mode 100644 index 00000000..ee721b4d --- /dev/null +++ b/isaac_ros_rosbag_utils/tests/test_rosbag_tf_utils.py @@ -0,0 +1,257 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +# +# NVIDIA CORPORATION and its licensors retain all intellectual property +# and proprietary rights in and to this software, related documentation +# and any modifications thereto. Any use, reproduction, disclosure or +# distribution of this software and related documentation without an express +# license agreement from NVIDIA CORPORATION is strictly prohibited. + +# flake8: noqa + +import pathlib +import shutil +import subprocess + +# This is a WAR until we can specify pip deps in package.xml +REQUIREMENTS_FILE = pathlib.Path(__file__).parent.parent / 'requirements.txt' +subprocess.call(['python3', '-m', 'pip', 'install', '-r', str(REQUIREMENTS_FILE)]) + +import numpy as np +from pytransform3d import rotations +from pytransform3d import transformations +import rosbags.rosbag2 +import rosbags.serde +from rosbags.typesys.types import builtin_interfaces__msg__Time as Time +from rosbags.typesys.types import geometry_msgs__msg__Quaternion as Quaternion +from rosbags.typesys.types import geometry_msgs__msg__Transform as Transform +from rosbags.typesys.types import geometry_msgs__msg__TransformStamped as TransformStamped +from rosbags.typesys.types import geometry_msgs__msg__Vector3 as Vector3 +from rosbags.typesys.types import std_msgs__msg__Header as Header +from rosbags.typesys.types import tf2_msgs__msg__TFMessage as TFMessage + +from isaac_ros_rosbag_utils import rosbag_tf_extraction + + +def sec_to_ns(s: float) -> int: + return int(s * 1e9) + + +def nanoseconds_to_msg(total_ns: int) -> Time: + ns = int(total_ns % 1e9) + s = int(total_ns // 1e9) + return Time(sec=s, nanosec=ns) + + +def tf_message_from_pq(pq: np.ndarray, parent_frame: str, child_frame: str, + stamp_ns: int) -> TFMessage: + msg = TFMessage(transforms=[ + TransformStamped( + header=Header( + stamp=nanoseconds_to_msg(stamp_ns), + frame_id=parent_frame, + ), + child_frame_id=child_frame, + transform=Transform( + translation=Vector3(x=pq[0], y=pq[1], z=pq[2]), + rotation=Quaternion(w=pq[3], x=pq[4], y=pq[5], z=pq[6]), + ), + ) + ]) + return msg + + +def write_tf_messages_to_bag(tf_messages: list[TFMessage], stamps_ns: list[int], + path: pathlib.Path): + assert len(tf_messages) == len(stamps_ns) + shutil.rmtree(path, ignore_errors=True) + with rosbags.rosbag2.Writer(path) as writer: + connection = writer.add_connection('/tf', 'tf2_msgs/msg/TFMessage') + for stamp_ns, msg in zip(stamps_ns, tf_messages): + serialized_msg = rosbags.serde.serialize_cdr(msg, 'tf2_msgs/msg/TFMessage') + writer.write(connection, stamp_ns, serialized_msg) + + +def get_linear_motion(distance: float, + axis: int, + stamp1_ns: int, + stamp2_ns: int, + frame1: str = '1', + frame2: str = '2') -> tuple[list[TFMessage], list[int]]: + assert axis < 3 + # Generate the test motion + p1 = np.array([0, 0, 0]) + q1 = np.array([1, 0, 0, 0]) + pq1 = np.hstack([p1, q1]) + msg1 = tf_message_from_pq(pq1, frame1, frame2, stamp1_ns) + + p2 = np.array([0, 0, 0]) + # Set the requested motion + p2[axis] = distance + q2 = np.array([1, 0, 0, 0]) + pq2 = np.hstack([p2, q2]) + msg2 = tf_message_from_pq(pq2, frame1, frame2, stamp2_ns) + + tf_msgs = [msg1, msg2] + stamps_ns = [stamp1_ns, stamp2_ns] + return tf_msgs, stamps_ns + + +def get_angular_motion(angle_rad: float, + axis: int, + stamp1_ns: int, + stamp2_ns: int, + frame1: str = '1', + frame2: str = '2') -> tuple[list[TFMessage], list[int]]: + assert axis < 3 + # Generate the test motion + p1 = np.array([0, 0, 0]) + q1 = np.array([1, 0, 0, 0]) + pq1 = np.hstack([p1, q1]) + msg1 = tf_message_from_pq(pq1, frame1, frame2, stamp1_ns) + + p2 = np.array([0, 0, 0]) + q2 = rotations.quaternion_from_angle(0, angle_rad) + pq2 = np.hstack([p2, q2]) + msg2 = tf_message_from_pq(pq2, frame1, frame2, stamp2_ns) + + tf_msgs = [msg1, msg2] + stamps_ns = [stamp1_ns, stamp2_ns] + return tf_msgs, stamps_ns + + +def assert_quaternion_close(q1: np.ndarray, q2: np.ndarray): + q2_inv = rotations.q_conj(q2) + q_diff = rotations.concatenate_quaternions(q1, q2_inv) + np.testing.assert_allclose(np.abs(q_diff), np.array([1, 0, 0, 0]), rtol=1e-5, atol=1e-8) + + +def test_x_motion(tmp_path: pathlib.Path): + test_bag_path = tmp_path / 'tf_test_bag' + + # Build the test motion + tf_msgs, stamps_ns = get_linear_motion( + distance=1.0, + axis=0, + stamp1_ns=0, + stamp2_ns=sec_to_ns(1), + ) + + # Write test data + write_tf_messages_to_bag(tf_msgs, stamps_ns, test_bag_path) + temporal_transform_manager = rosbag_tf_extraction.get_transform_manager_from_bag(test_bag_path) + + # Test + transform = temporal_transform_manager.get_transform_at_time('2', '1', 0.5) + pq = transformations.pq_from_transform(transform) + np.testing.assert_allclose(pq, np.array([0.5, 0, 0, 1, 0, 0, 0])) + + # NOTE(alexmillane): Turns out the pytransforms3d extrapolates automatically. + transform = temporal_transform_manager.get_transform_at_time('2', '1', 2.0) + pq = transformations.pq_from_transform(transform) + np.testing.assert_allclose(pq, np.array([2.0, 0, 0, 1, 0, 0, 0])) + + +def test_xy_motion(tmp_path: pathlib.Path): + test_bag_path = tmp_path / 'tf_test_bag' + + # Build the test motion + tf_msgs_1, stamps_ns_1 = get_linear_motion( + distance=1.0, + axis=0, + stamp1_ns=sec_to_ns(0), + stamp2_ns=sec_to_ns(4), + frame1='1', + frame2='2', + ) + tf_msgs_2, stamps_ns_2 = get_linear_motion( + distance=1.0, + axis=1, + stamp1_ns=sec_to_ns(1), + stamp2_ns=sec_to_ns(3), + frame1='2', + frame2='3', + ) + + tf_msgs = tf_msgs_1 + tf_msgs_2 + stamps_ns = stamps_ns_1 + stamps_ns_2 + + # Write test data + write_tf_messages_to_bag(tf_msgs, stamps_ns, test_bag_path) + temporal_transform_manager = rosbag_tf_extraction.get_transform_manager_from_bag(test_bag_path) + + # Test + transform = temporal_transform_manager.get_transform_at_time('3', '1', 2.0) + pq = transformations.pq_from_transform(transform) + np.testing.assert_allclose(pq, np.array([0.5, 0.5, 0, 1, 0, 0, 0])) + + # NOTE(alexmillane): Turns out the pytransforms3d extrapolates automatically. + transform = temporal_transform_manager.get_transform_at_time('3', '1', 3.0) + pq = transformations.pq_from_transform(transform) + np.testing.assert_allclose(pq, np.array([0.75, 1.0, 0, 1, 0, 0, 0])) + + +def test_x_rot_motion(tmp_path): + test_bag_path = tmp_path / 'tf_test_bag' + + # Build the test motion + tf_msgs, stamps_ns = get_angular_motion( + angle_rad=np.pi, + axis=0, + stamp1_ns=sec_to_ns(0), + stamp2_ns=sec_to_ns(1), + frame1='1', + frame2='2', + ) + + # Write test data + write_tf_messages_to_bag(tf_msgs, stamps_ns, test_bag_path) + temporal_transform_manager = rosbag_tf_extraction.get_transform_manager_from_bag(test_bag_path) + + # Test + transform = temporal_transform_manager.get_transform_at_time('2', '1', 0.5) + pq = transformations.pq_from_transform(transform) + q_expected = rotations.quaternion_from_angle(0, 0.5 * np.pi) + pq_expected = np.hstack([np.array([0, 0, 0]), q_expected]) + np.testing.assert_allclose(pq, pq_expected) + + # Extrapolate + transform = temporal_transform_manager.get_transform_at_time('2', '1', 1.5) + pq = transformations.pq_from_transform(transform) + q_expected = rotations.quaternion_from_angle(0, 1.5 * np.pi) + assert_quaternion_close(pq[3:], q_expected) + + +def test_xy_rot_motion(tmp_path: pathlib.Path): + test_bag_path = tmp_path / 'tf_test_bag' + + # Build the test motion + tf_msgs_1, stamps_ns_1 = get_angular_motion( + angle_rad=np.pi, + axis=0, + stamp1_ns=sec_to_ns(0), + stamp2_ns=sec_to_ns(4), + frame1='1', + frame2='2', + ) + tf_msgs_2, stamps_ns_2 = get_angular_motion( + angle_rad=np.pi, + axis=1, + stamp1_ns=sec_to_ns(1), + stamp2_ns=sec_to_ns(3), + frame1='2', + frame2='3', + ) + + tf_msgs = tf_msgs_1 + tf_msgs_2 + stamps_ns = stamps_ns_1 + stamps_ns_2 + + # Write test data + write_tf_messages_to_bag(tf_msgs, stamps_ns, test_bag_path) + temporal_transform_manager = rosbag_tf_extraction.get_transform_manager_from_bag(test_bag_path) + + # Test + transform = temporal_transform_manager.get_transform_at_time('3', '1', 2.0) + pq = transformations.pq_from_transform(transform) + q_expected = rotations.quaternion_from_angle(0, np.pi) + pq_expected = np.hstack([np.array([0, 0, 0]), q_expected]) + np.testing.assert_allclose(pq, pq_expected, atol=1e-6) diff --git a/isaac_ros_tensor_list_interfaces/CMakeLists.txt b/isaac_ros_tensor_list_interfaces/CMakeLists.txt index 06dcf1ac..3201fdba 100644 --- a/isaac_ros_tensor_list_interfaces/CMakeLists.txt +++ b/isaac_ros_tensor_list_interfaces/CMakeLists.txt @@ -45,4 +45,10 @@ if(BUILD_TESTING) ament_lint_auto_find_test_dependencies() endif() + +# Embed versioning information into installed files +ament_index_get_resource(ISAAC_ROS_COMMON_CMAKE_PATH isaac_ros_common_cmake_path isaac_ros_common) +include("${ISAAC_ROS_COMMON_CMAKE_PATH}/isaac_ros_common-version-info.cmake") +generate_version_info(${PROJECT_NAME}) + ament_auto_package() diff --git a/isaac_ros_tensor_list_interfaces/package.xml b/isaac_ros_tensor_list_interfaces/package.xml index 9caa8557..ccffe13e 100644 --- a/isaac_ros_tensor_list_interfaces/package.xml +++ b/isaac_ros_tensor_list_interfaces/package.xml @@ -13,7 +13,7 @@ license agreement from NVIDIA CORPORATION is strictly prohibited. isaac_ros_tensor_list_interfaces - 3.1.0 + 3.2.0 Interfaces for Isaac ROS tensor list Isaac ROS Maintainers @@ -28,6 +28,7 @@ license agreement from NVIDIA CORPORATION is strictly prohibited. ament_cmake_auto rosidl_default_generators + isaac_ros_common ament_lint_auto ament_lint_common diff --git a/isaac_ros_test/build/version_info.yaml b/isaac_ros_test/build/version_info.yaml new file mode 100644 index 00000000..09c7a659 --- /dev/null +++ b/isaac_ros_test/build/version_info.yaml @@ -0,0 +1,7 @@ +build_datetime: '2024-11-12T18:11:11.201185' +git_branch: jaiveers/mock-model-generator +git_commit_date: 2024-11-12 17:53:00 -0800 +git_commit_hash: 8aa2e9f4334d988223e9c11665cd5634f59158e9 +git_commit_message: Working with full support for dynamic axes +git_workspace_dirty: 'Yes' +version: 3.2.0 diff --git a/isaac_ros_test/isaac_ros_test/__init__.py b/isaac_ros_test/isaac_ros_test/__init__.py index 06265ed4..b4ad5d7f 100644 --- a/isaac_ros_test/isaac_ros_test/__init__.py +++ b/isaac_ros_test/isaac_ros_test/__init__.py @@ -20,6 +20,7 @@ from .cpu_profiler import CPUProfiler from .isaac_ros_base_test import IsaacROSBaseTest from .json_conversion import JSONConversion +from .mock_model_generator import MockModelGenerator from .pcd_loader import PCDLoader from .pose_utilities import PoseUtilities from .tegrastats_profiler import TegrastatsProfiler @@ -28,7 +29,8 @@ 'CPUProfiler', 'IsaacROSBaseTest', 'JSONConversion', - 'TegrastatsProfiler', + 'MockModelGenerator', 'PCDLoader', 'PoseUtilities', + 'TegrastatsProfiler', ] diff --git a/isaac_ros_test/isaac_ros_test/isaac_ros_base_test.py b/isaac_ros_test/isaac_ros_test/isaac_ros_base_test.py index 055480d2..bc6b2791 100644 --- a/isaac_ros_test/isaac_ros_test/isaac_ros_base_test.py +++ b/isaac_ros_test/isaac_ros_test/isaac_ros_base_test.py @@ -261,12 +261,21 @@ def callback(msg): return callback - subscriptions = [self.node.create_subscription( - msg_type, - self.namespaces[topic] if use_namespace_lookup else topic, - make_callback(topic), - qos_profile, - ) for topic, msg_type in subscription_requests] + try: + subscriptions = [ + self.node.create_subscription( + msg_type, + self.namespaces[topic] if use_namespace_lookup else topic, + make_callback(topic), + qos_profile, + ) for topic, msg_type in subscription_requests + ] + except Exception as e: + # Silent failures have been observed here. We print and raise to make sure that a + # trace ends up at the console. + print('Failed to create subscriptions:') + print(e) + raise return subscriptions diff --git a/isaac_ros_test/isaac_ros_test/json_conversion.py b/isaac_ros_test/isaac_ros_test/json_conversion.py index b4b83131..ad160c0a 100644 --- a/isaac_ros_test/isaac_ros_test/json_conversion.py +++ b/isaac_ros_test/isaac_ros_test/json_conversion.py @@ -100,7 +100,7 @@ def load_camera_info_from_json(json_filepath: Path, camera_info.r = camera_info_json['R'] camera_info.p = camera_info_json['P'] - if(desired_size): + if desired_size: camera_info.width = desired_size[0] camera_info.height = desired_size[1] return camera_info diff --git a/isaac_ros_test/isaac_ros_test/mock_model_generator.py b/isaac_ros_test/isaac_ros_test/mock_model_generator.py new file mode 100644 index 00000000..b38bcf84 --- /dev/null +++ b/isaac_ros_test/isaac_ros_test/mock_model_generator.py @@ -0,0 +1,162 @@ +# SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES +# Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Generate a mock model with empty weights for testing DNN pipeline structure.""" + +import argparse +import functools +import operator +from pathlib import Path +from typing import List, NamedTuple, Tuple + +import torch + + +class MockModelGenerator: + + class Binding(NamedTuple): + name: str + shape: Tuple[int, ...] + datatype: torch.dtype + + class MockModel(torch.nn.Module): + def __init__( + self, + input_bindings: List['MockModelGenerator.Binding'], + output_bindings: List['MockModelGenerator.Binding'], + intermediate_size: int = 4, + ): + super().__init__() + self.input_bindings = input_bindings + self.output_bindings = output_bindings + + # Precompute the output sizes for reshaping merged layer output + # Handle dynamic batch sizes (dim = -1) by taking the absolute value of the product + self.output_sizes = [ + abs(functools.reduce(operator.mul, b.shape)) for b in self.output_bindings + ] + + total_input_elements = sum( + abs(functools.reduce(operator.mul, b.shape)) for b in self.input_bindings) + total_output_elements = sum(self.output_sizes) + + self.layers = torch.nn.Sequential( + torch.nn.Linear(total_input_elements, intermediate_size), + torch.nn.Linear(intermediate_size, total_output_elements) + ) + + def forward(self, *inputs): + # Cast, flatten, and concatenate the input tensors to a single tensor + merged_input = torch.cat( + [t.float().flatten() for t in inputs], + ) + + # Run the forward pass through the merged layer + merged_output = self.layers(merged_input) + + output_tensors = [] + start_idx = 0 + for binding, size in zip(self.output_bindings, self.output_sizes): + end_idx = start_idx + size + + # Slice the merged output to get the current output binding + output_tensor = merged_output[start_idx:end_idx]\ + .reshape(binding.shape)\ + .to(binding.datatype) + + output_tensors.append(output_tensor) + + # Update the start index for the next output binding + start_idx = end_idx + + return output_tensors + + @classmethod + def generate( + cls, + input_bindings: List[Binding], + output_bindings: List[Binding], + output_onnx_path: Path + ): + model = cls.MockModel(input_bindings, output_bindings) + + # Generate dummy input tensors + dummy_input = [ + # Use 1 as the default dimension for dynamic axes + torch.ones([d if d != -1 else 1 for d in b.shape]).to(b.datatype) + for b in input_bindings + ] + + # Identify dynamic axes across bindings + dynamic_axes = {} + for binding in (*input_bindings, *output_bindings): + dynamic_axes[binding.name] = { + i: f'dynamic_{i}' for i, size in enumerate(binding.shape) + if size == -1 + } + + torch.onnx.export( + model, + tuple(dummy_input), + output_onnx_path, + input_names=[binding.name for binding in input_bindings], + output_names=[binding.name for binding in output_bindings], + dynamic_axes=dynamic_axes + ) + + +def parse_bindings(bindings_str): + bindings = [] + for binding_str in bindings_str.split(','): + name, shape_str, datatype_str = binding_str.split(':') + shape = tuple(map(int, shape_str.split('x'))) + datatype = getattr(torch, datatype_str) + bindings.append(MockModelGenerator.Binding(name, shape, datatype)) + return bindings + + +def main(input_bindings_str, output_bindings_str, output_onnx_path): + input_bindings = parse_bindings(input_bindings_str) + output_bindings = parse_bindings(output_bindings_str) + + MockModelGenerator.generate(input_bindings, output_bindings, output_onnx_path) + + +# Example usage: +# images:-1x3x640x640:float32,orig_target_sizes:-1x2:int64 +# labels:-1x300:float32,boxes:-1x300x4:float32,scores:-1x300:float32 + +if __name__ == '__main__': + ap = argparse.ArgumentParser( + description='Generate a mock model with empty weights for testing DNN pipeline structure.') + ap.add_argument( + '--input-bindings', type=str, required=True, + help='Input bindings in the format name:shape:datatype,name:shape:datatype,... ' + 'Use -1 for dynamic axes.' + ) + ap.add_argument( + '--output-bindings', type=str, required=True, + help='Output bindings in the format name:shape:datatype,name:shape:datatype,... ' + 'Use -1 for dynamic axes.' + ) + ap.add_argument( + 'output_onnx_path', type=Path, + help='Path to save the generated ONNX model.' + ) + + args = ap.parse_args() + main(args.input_bindings, args.output_bindings, args.output_onnx_path) diff --git a/isaac_ros_test/package.xml b/isaac_ros_test/package.xml index 504c13b5..0936e68b 100644 --- a/isaac_ros_test/package.xml +++ b/isaac_ros_test/package.xml @@ -13,7 +13,7 @@ license agreement from NVIDIA CORPORATION is strictly prohibited. isaac_ros_test - 3.1.0 + 3.2.0 Isaac ROS testing utilities Isaac ROS Maintainers @@ -22,6 +22,7 @@ license agreement from NVIDIA CORPORATION is strictly prohibited. Ethan Yu Isaac Chang Jaiveer Singh + isaac_ros_common cv_bridge diff --git a/isaac_ros_test/setup.py b/isaac_ros_test/setup.py index 9c9e8f9e..6b073711 100644 --- a/isaac_ros_test/setup.py +++ b/isaac_ros_test/setup.py @@ -15,8 +15,30 @@ # # SPDX-License-Identifier: Apache-2.0 +import importlib.util +from pathlib import Path +import sys + +from ament_index_python.packages import get_resource from setuptools import setup +ISAAC_ROS_COMMON_PATH = get_resource( + 'isaac_ros_common_scripts_path', + 'isaac_ros_common' +)[0] + +ISAAC_ROS_COMMON_VERSION_INFO = Path(ISAAC_ROS_COMMON_PATH) / 'isaac_ros_common-version-info.py' + +spec = importlib.util.spec_from_file_location( + 'isaac_ros_common_version_info', + ISAAC_ROS_COMMON_VERSION_INFO +) + +isaac_ros_common_version_info = importlib.util.module_from_spec(spec) +sys.modules['isaac_ros_common_version_info'] = isaac_ros_common_version_info +spec.loader.exec_module(isaac_ros_common_version_info) + +from isaac_ros_common_version_info import GenerateVersionInfoCommand # noqa: E402, I100 package_name = 'isaac_ros_test' setup( @@ -39,4 +61,7 @@ 'console_scripts': [ ], }, + cmdclass={ + 'build_py': GenerateVersionInfoCommand, + }, ) diff --git a/isaac_ros_test_cmake/CMakeLists.txt b/isaac_ros_test_cmake/CMakeLists.txt new file mode 100644 index 00000000..ecd41dd7 --- /dev/null +++ b/isaac_ros_test_cmake/CMakeLists.txt @@ -0,0 +1,61 @@ +# SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES +# Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +cmake_minimum_required(VERSION 3.22.1) +project(isaac_ros_test_cmake) + +find_package(ament_cmake REQUIRED) +find_package(ament_cmake_auto REQUIRED) +ament_auto_find_build_dependencies() + +# Install additional cmake functions +ament_index_register_resource(isaac_ros_test_cmake_cmake_path CONTENT + "${CMAKE_INSTALL_PREFIX}/share/${PROJECT_NAME}/cmake") +list(APPEND ${PROJECT_NAME}_CONFIG_EXTRAS + cmake/isaac_ros_test_cmake-extras-graph-test.cmake +) + +# Register the dummy bag as a resource +ament_index_register_resource(dummy_bag CONTENT + "${CMAKE_INSTALL_PREFIX}/share/${PROJECT_NAME}/resources/dummy_bag") + +# Register the test as a resource +set(ISAAC_ROS_GRAPH_STARTUP_TEST_PATH + "${CMAKE_INSTALL_PREFIX}/share/${PROJECT_NAME}/isaac_ros_test_cmake/isaac_ros_graph_startup_test.py") +ament_index_register_resource(graph_startup_test CONTENT "${ISAAC_ROS_GRAPH_STARTUP_TEST_PATH}") + +if(BUILD_TESTING) + find_package(ament_lint_auto REQUIRED) + ament_lint_auto_find_test_dependencies() + + include(cmake/isaac_ros_test_cmake-extras-graph-test.cmake) + add_graph_startup_test(test_graph_startup_test + test/test_launchfile.launch.py + TIMEOUT 5 + # NOTE: We have to manually pass this path in this file because the resource + # is not ready at build time. + ISAAC_ROS_GRAPH_STARTUP_TEST_PATH "${ISAAC_ROS_GRAPH_STARTUP_TEST_PATH}" + ) +endif() + + +# Embed versioning information into installed files +ament_index_get_resource(ISAAC_ROS_COMMON_CMAKE_PATH isaac_ros_common_cmake_path isaac_ros_common) +include("${ISAAC_ROS_COMMON_CMAKE_PATH}/isaac_ros_common-version-info.cmake") +generate_version_info(${PROJECT_NAME}) + +ament_auto_package(INSTALL_TO_SHARE test resources isaac_ros_test_cmake) diff --git a/isaac_ros_test_cmake/cmake/isaac_ros_test_cmake-extras-graph-test.cmake b/isaac_ros_test_cmake/cmake/isaac_ros_test_cmake-extras-graph-test.cmake new file mode 100644 index 00000000..5cd74fd7 --- /dev/null +++ b/isaac_ros_test_cmake/cmake/isaac_ros_test_cmake-extras-graph-test.cmake @@ -0,0 +1,101 @@ +# SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES +# Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +# Parse the arguments for add_graph_startup_test() +macro(parse_add_graph_startup_test_arguments namespace) + cmake_parse_arguments(${namespace} + "DONT_CHECK_EXIT_CODE" + "TIMEOUT;ISAAC_ROS_GRAPH_STARTUP_TEST_PATH" # One value keywords + "ARGS" # Multi value keywords + ${ARGN}) + + if(NOT ${namespace}_TIMEOUT) + # The default timeout is 5 seconds. + set(${namespace}_TIMEOUT 5) + endif() + +endmacro() + +# Add a startup test for a launchfile. +# +# :param TARGET_NAME: The tests target name. +# :type TARGET_NAME: string +# :param LAUNCHFILE: The launchfile to test. +# :type LAUNCHFILE: string +# :param TIMEOUT: The test timeout in seconds. +# :type TIMEOUT: integer +# :param ISAAC_ROS_GRAPH_STARTUP_TEST_PATH: The path to the graph startup test, if manual +# specification is desired. If not specified, which is the usual case, this +# variable is determined automatically. +# :type ISAAC_ROS_GRAPH_STARTUP_TEST_PATH: string +# :param ARGS: Launch arguments to be passed to graph under test. +# :type ARGS: string +function(add_graph_startup_test TARGET_NAME LAUNCHFILE) + parse_add_graph_startup_test_arguments(_add_graph_startup_test ${ARGN}) + + # Path to the underlying meta-test. + if(DEFINED _add_graph_startup_test_ISAAC_ROS_GRAPH_STARTUP_TEST_PATH) + set(ISAAC_ROS_GRAPH_STARTUP_TEST_PATH ${_add_graph_startup_test_ISAAC_ROS_GRAPH_STARTUP_TEST_PATH}) + else() + ament_index_has_resource(HAS_GRAPH_STARTUP_TEST_PATH graph_startup_test isaac_ros_test_cmake) + if(NOT HAS_GRAPH_STARTUP_TEST_PATH) + message(FATAL_ERROR "graph_startup_test resource not found.") + endif() + ament_index_get_resource(ISAAC_ROS_GRAPH_STARTUP_TEST_PATH graph_startup_test isaac_ros_test_cmake) + if(EXISTS ${ISAAC_ROS_GRAPH_STARTUP_TEST_PATH}) + message(STATUS "Found the launchfile independent test at: ${ISAAC_ROS_GRAPH_STARTUP_TEST_PATH}") + else() + message(FATAL_ERROR "Could not find the launchfile independent test at: ${ISAAC_ROS_GRAPH_STARTUP_TEST_PATH}") + endif() + endif() + + + # The package under test is the project name of the caller. + set(PACKAGE ${PROJECT_NAME}) + + # Compose arguments to the underlying test + # Note that the arguments which are intended for the graph under test are grouped as a single string + # and passed through the test as a single ROS argument "launch_file_arguments". + set(ARGUMENTS "package_under_test:=${PACKAGE}" "launch_file_under_test:=${LAUNCHFILE}") + list(APPEND ARGUMENTS "timeout:='${_add_graph_startup_test_TIMEOUT}'") + if(${_add_graph_startup_test_DONT_CHECK_EXIT_CODE}) + list(APPEND ARGUMENTS "check_exit_code:='False'") + endif() + list(APPEND ARGUMENTS "launch_file_arguments:='${_add_graph_startup_test_ARGS}'") + + # Inside the test we trigger the graph to shutdown after the requested timeout. + # However, launch_testing has a timeout at which point the test is killed. + # We (arbitrarily) set this as 10 seconds longer than the requested timeout. + math(EXPR LAUNCH_TESTING_TIMEOUT "${_add_graph_startup_test_TIMEOUT} + 20") + + add_launch_test( + ${ISAAC_ROS_GRAPH_STARTUP_TEST_PATH} + TARGET ${TARGET_NAME} + ARGS ${ARGUMENTS} + TIMEOUT ${LAUNCH_TESTING_TIMEOUT} + ) +endfunction() + + +# Return the installed path of a dummy bag that comes with this package. +# +# :param VAR: The the output variable that will hold the path. +# :type VAR: string +function(get_dummy_bag_path VAR) + ament_index_get_resource(DUMMY_BAG_PATH dummy_bag isaac_ros_test_cmake) + set(${VAR} ${DUMMY_BAG_PATH} PARENT_SCOPE) +endfunction() diff --git a/isaac_ros_test_cmake/isaac_ros_test_cmake/isaac_ros_graph_startup_test.py b/isaac_ros_test_cmake/isaac_ros_test_cmake/isaac_ros_graph_startup_test.py new file mode 100644 index 00000000..e8fa94f1 --- /dev/null +++ b/isaac_ros_test_cmake/isaac_ros_test_cmake/isaac_ros_graph_startup_test.py @@ -0,0 +1,177 @@ +# SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES +# Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +import time +from typing import List +import unittest + +import isaac_ros_launch_utils as lu +from isaac_ros_launch_utils.all_types import Action +import launch +from launch import LaunchDescription +import launch_testing +from launch_testing import post_shutdown_test +import pytest +import rclpy +from rclpy.node import Node + + +def generate_parameterized_launchfile_test(args: lu.ArgumentContainer) -> List[Action]: + # The launch arguments are passed as a single string, in order to separate them + # from the arguments used by the test infrastructure. So we now split them up + # to be passed to the underlying lauchfile. + launch_arguments = {} + if len(args.launch_file_arguments) > 0: + for arg_string in args.launch_file_arguments.split(' '): + arg_name, arg_value = arg_string.split(':=') + launch_arguments[arg_name] = arg_value + # The launchfile we've been asked to test. + actions = [] + actions.append( + lu.include( + args.package_under_test, + args.launch_file_under_test, + launch_arguments=launch_arguments, + )) + return actions + + +class TimeoutStore: + """This class just provides access to a single shared variable 'timeout_s'.""" + + timeout_s = None + check_exit_code = None + + @staticmethod + def save_timeout(args: lu.ArgumentContainer): + """Store the timeout. Called as isaac_ros_launch_utils opaque function.""" + TimeoutStore.timeout_s = float(args.timeout) + + @staticmethod + def get_timeout() -> int: + """Get the stored value.""" + return TimeoutStore.timeout_s + + @staticmethod + def save_check_exit_code(args: lu.ArgumentContainer): + """Store the flag. Called as isaac_ros_launch_utils opaque function.""" + TimeoutStore.check_exit_code = lu.is_true(args.check_exit_code) + + @staticmethod + def get_check_exit_code() -> bool: + """Get the flag.""" + return TimeoutStore.check_exit_code + + +@pytest.mark.rostest +def generate_test_description(): + + args = lu.ArgumentContainer() + args.add_arg('package_under_test', + cli=True, + description='The package containing the launch file to test.') + args.add_arg('launch_file_under_test', + cli=True, + description='The path within the package to the launch file to test.') + args.add_arg('timeout', + cli=True, + description='The time after which we declare a non-crashed graph a test success.') + args.add_arg('check_exit_code', + default=True, + cli=True, + description='Whether or not to check the error code completing the dry-run.') + args.add_arg('launch_file_arguments', + default=None, + cli=True, + description='The arguments to be passed to the launch file under test.') + + # Launch the test launchfile. + actions = args.get_launch_actions() + actions.append(args.add_opaque_function(generate_parameterized_launchfile_test)) + + # Save the timeout during startup + # NOTE(alexmillane): This is the best way I could think of to get an evaluated launch + # parameter into the test fixture. If someone can think of something better, please update. + actions.append(args.add_opaque_function(TimeoutStore.save_timeout)) + actions.append(args.add_opaque_function(TimeoutStore.save_check_exit_code)) + + # NOTE(alexmillane): We trigger the ready-to-test action 1 second after the graph + # starts coming up in order to ensure that the timeout parameter is evaluated. + ready_to_test_time = 1.0 + + # Required for ROS launch testing. + actions.append(launch_testing.util.KeepAliveProc()) + actions.append( + launch.actions.TimerAction(period=ready_to_test_time, + actions=[launch_testing.actions.ReadyToTest()])) + + return LaunchDescription(actions) + + +class DummyTest(unittest.TestCase): + """This test does nothing, except keep the test alive until the timeout is elapsed.""" + + def test_graph_startup_test(self): + rclpy.init() + + # Create a Node for logging + node = Node('test_node') + + # Get the timeout requested. + assert TimeoutStore.get_timeout() is not None, 'Need to increase ready to test time.' + timeout_s = TimeoutStore.get_timeout() + + # Loop until the timeout + loop_period_s = 0.1 + log_period_s = 1.0 + start_time = time.time() + node.get_logger().info(f'Start test. Waiting for {timeout_s} seconds.') + while rclpy.ok() and ((time.time() - start_time) < timeout_s): + already_waited_time_s = time.time() - start_time + node.get_logger().info( + f'Waited for {already_waited_time_s:0.2f} out of' + f' {timeout_s} seconds.', + throttle_duration_sec=log_period_s) + time.sleep(loop_period_s) + # Check if we stopped looping because of an error (fail) or because the test timed + # out (pass). + if rclpy.ok(): + node.get_logger().info('Test success. Shutting down test node.') + else: + elapsed_time_s = time.time() - start_time + node.get_logger().info(f'Test terminated early after {elapsed_time_s}.') + rclpy.shutdown() + + +@post_shutdown_test() +class TestAfterShutdown(unittest.TestCase): + + disallowed_phrases_in_log = [ + # This phrase appears in the log if a launchfile tries to add a Component + # which doesn't exist in the workspace. + 'Failed to find class' + ] + + def test_exit_code(self, proc_info): + if TimeoutStore.get_check_exit_code(): + launch_testing.asserts.assertExitCodes(proc_info) + + def test_error_message(self, proc_output): + for proc in proc_output: + for disallowed_phrase in self.disallowed_phrases_in_log: + assert disallowed_phrase not in str( + proc.text), f'Found disallowed phrase \"{disallowed_phrase}\"' diff --git a/isaac_ros_test_cmake/package.xml b/isaac_ros_test_cmake/package.xml new file mode 100644 index 00000000..5c1fa90e --- /dev/null +++ b/isaac_ros_test_cmake/package.xml @@ -0,0 +1,27 @@ + + + + isaac_ros_test_cmake + 3.2.0 + + cmake integration for Isaac ROS tests. + + + Isaac ROS Maintainers + + NVIDIA Isaac ROS Software License + https://developer.nvidia.com/isaac-ros-gems/ + + ament_cmake + isaac_ros_common + + launch_testing_ament_cmake + + ament_lint_auto + ament_lint_common + isaac_ros_launch_utils + + + ament_cmake + + diff --git a/isaac_ros_test_cmake/resources/dummy_bag/dummy_bag.db3 b/isaac_ros_test_cmake/resources/dummy_bag/dummy_bag.db3 new file mode 100644 index 00000000..9f0b1a8f --- /dev/null +++ b/isaac_ros_test_cmake/resources/dummy_bag/dummy_bag.db3 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:caabcd716bf1bf53aabbe2ffc7773d3a104078cca14a3477e7fd5bcb443eb098 +size 65536 diff --git a/isaac_ros_test_cmake/resources/dummy_bag/metadata.yaml b/isaac_ros_test_cmake/resources/dummy_bag/metadata.yaml new file mode 100644 index 00000000..33e46b30 --- /dev/null +++ b/isaac_ros_test_cmake/resources/dummy_bag/metadata.yaml @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:352273d4faa2d71c3a65691aa494b1cb98f8a0279163e0111efe0d72cf9d4713 +size 760 diff --git a/isaac_ros_test_cmake/test/test_launchfile.launch.py b/isaac_ros_test_cmake/test/test_launchfile.launch.py new file mode 100644 index 00000000..08ab23d4 --- /dev/null +++ b/isaac_ros_test_cmake/test/test_launchfile.launch.py @@ -0,0 +1,30 @@ +# SPDX-FileCopyrightText: NVIDIA CORPORATION & AFFILIATES +# Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +from launch import LaunchDescription +from launch.actions import ExecuteProcess +from launch.actions import TimerAction + + +def generate_launch_description(): + # This launchfile creates a timer action. The graph terminates after completion. + # The timeout should be chosen to be more than the graph_startup_test test time. + test_time_s = 100.0 + return LaunchDescription([ + TimerAction(period=test_time_s, + actions=[ExecuteProcess(cmd=['echo', '"Hello test"'], output='screen')]) + ]) diff --git a/scripts/build_base_image.sh b/scripts/build_base_image.sh index a8438293..2c0c4a28 100755 --- a/scripts/build_base_image.sh +++ b/scripts/build_base_image.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2022-2024, NVIDIA CORPORATION. All rights reserved. # # NVIDIA CORPORATION and its licensors retain all intellectual property # and proprietary rights in and to this software, related documentation @@ -12,9 +12,56 @@ set -e ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" source $ROOT/utils/print_color.sh +# Initialize variables BUILDKIT_DISABLED_STR="" -if [[ ! -z "$5" ]]; then - BUILDKIT_DISABLED_STR="--disable_buildkit" +NO_CACHE_STR="" + +# Parse command-line arguments +VALID_ARGS=$(getopt -o '' --long disable-buildkit,no-cache,skip_registry_check -- "$@") +if [[ $? -ne 0 ]]; then + echo "Invalid arguments" + exit 1 +fi + +eval set -- "$VALID_ARGS" + +# Process the arguments +while [ : ]; do + case "$1" in + --disable-buildkit) + BUILDKIT_DISABLED_STR="--disable_buildkit" + shift + ;; + --no-cache) + NO_CACHE_STR="-d --no-cache" + shift + ;; + -r | --skip_registry_check) + SKIP_REGISTRY_CHECK="--skip_registry_check" + shift + ;; + --) + shift + break + ;; + *) + echo "Unexpected option: $1" + exit 1 + ;; + esac +done + +# Ensure there are at least four remaining positional arguments +if [[ $# -lt 4 ]]; then + echo "Usage: $0 [--disable-buildkit] [--no-cache] " + exit 1 fi -$ROOT/build_image_layers.sh --image_key "$1" --image_name "$2" --base_image "$3" --context_dir "$4" $BUILDKIT_DISABLED_STR \ No newline at end of file +# Extract positional arguments +IMAGE_KEY="$1" +IMAGE_NAME="$2" +BASE_IMAGE="$3" +CONTEXT_DIR="$4" + +# Call the build_image_layers.sh script with the appropriate arguments +$ROOT/build_image_layers.sh --image_key "$IMAGE_KEY" --image_name "$IMAGE_NAME" --base_image "$BASE_IMAGE" --context_dir "$CONTEXT_DIR" $BUILDKIT_DISABLED_STR $NO_CACHE_STR $SKIP_REGISTRY_CHECK \ No newline at end of file diff --git a/scripts/build_image_layers.sh b/scripts/build_image_layers.sh index 35cc7e1d..d2b723d7 100755 --- a/scripts/build_image_layers.sh +++ b/scripts/build_image_layers.sh @@ -25,7 +25,7 @@ ADDITIONAL_BUILD_ARGS=() ADDITIONAL_DOCKER_ARGS=() DOCKER_SEARCH_DIRS=(${DOCKER_DIR}) SKIP_REGISTRY_CHECK=0 -BASE_DOCKER_REGISTRY_NAMES=("nvcr.io/isaac/ros") +BASE_DOCKER_REGISTRY_NAMES=("nvcr.io/nvidia/isaac/ros") # Read and parse config file if exists # @@ -276,9 +276,12 @@ fi # Arguments for docker build BUILD_ARGS+=("--build-arg" "USERNAME="admin"") -BUILD_ARGS+=("--build-arg" "USER_UID=`id -u`") -BUILD_ARGS+=("--build-arg" "USER_GID=`id -g`") -BUILD_ARGS+=("--build-arg" "PLATFORM=$PLATFORM") + +if [[ $PLATFORM == "x86_64" ]]; then + BUILD_ARGS+=("--build-arg" "PLATFORM=amd64") +else + BUILD_ARGS+=("--build-arg" "PLATFORM=arm64") +fi for BUILD_ARG in ${ADDITIONAL_BUILD_ARGS[@]} do diff --git a/scripts/deploy/_Dockerfile.deploy b/scripts/deploy/_Dockerfile.deploy index 4b3acc90..e10463fb 100644 --- a/scripts/deploy/_Dockerfile.deploy +++ b/scripts/deploy/_Dockerfile.deploy @@ -7,7 +7,7 @@ # license agreement from NVIDIA CORPORATION is strictly prohibited. ARG SET_LAUNCH_CMD -ARG BASE_IMAGE=nvidia/cuda:12.2.2-runtime-ubuntu22.04 +ARG BASE_IMAGE=nvidia/cuda:12.6.1-runtime-ubuntu22.04 FROM ${BASE_IMAGE} as deploy_base # Setup entrypoint @@ -25,9 +25,6 @@ RUN --mount=type=cache,target=/var/cache/apt \ # Copy built products into container COPY staging/ / -# Switch to non-root user -USER ${USERNAME:?} - # ----------- FROM deploy_base as deploy_has_launch_0 diff --git a/scripts/deploy/_Dockerfile.deploy_ws b/scripts/deploy/_Dockerfile.deploy_ws index 135fe3b1..17cc6960 100644 --- a/scripts/deploy/_Dockerfile.deploy_ws +++ b/scripts/deploy/_Dockerfile.deploy_ws @@ -6,7 +6,7 @@ # distribution of this software and related documentation without an express # license agreement from NVIDIA CORPORATION is strictly prohibited. -ARG BASE_IMAGE=nvidia/cuda:12.2.2-runtime-ubuntu22.04 +ARG BASE_IMAGE=nvidia/cuda:12.6.1-runtime-ubuntu22.04 FROM ${BASE_IMAGE} as deploy_ros_ws ARG ROS_WS diff --git a/scripts/docker_deploy.sh b/scripts/docker_deploy.sh index 25980f7b..0dfa8d5a 100755 --- a/scripts/docker_deploy.sh +++ b/scripts/docker_deploy.sh @@ -139,12 +139,6 @@ if [[ ! -z "${ROS_WS}" ]]; then INCLUDE_DIRS+=( "$ROS_WS/install:${ROS_WS_DEST}/install" ) fi -# Always suffix .user to base image -# If the configured key does not have .user, append it last -if [[ $BASE_IMAGE_KEY != *".user"* ]]; then - BASE_IMAGE_KEY="${BASE_IMAGE_KEY}.user" -fi - # Summarize final arguments for script print_info "Building deployable image ${DEPLOY_IMAGE_NAME}" print_info "Base image key: |${BASE_IMAGE_KEY}| / suffix image_key: |${SUFFIX_IMAGE_KEY}|" @@ -262,7 +256,7 @@ if [[ ! -z "${SUFFIX_IMAGE_KEY}" ]]; then print_info "Building suffix deploy image for key ${SUFFIX_IMAGE_KEY}" PREVIOUS_STAGE="${INSTALLED_DEPLOY_IMAGE_NAME}" INSTALLED_DEPLOY_IMAGE_NAME="${DEPLOY_IMAGE_NAME}-suffix" - $ROOT/build_image_layers.sh --image_key "SUFFIX_IMAGE_KEY" --image_name "${INSTALLED_DEPLOY_IMAGE_NAME}" --base_image "${PREVIOUS_STAGE}" --build_arg "MODE=deploy" ${ADDITIONAL_DOCKER_ARGS[@]} + $ROOT/build_image_layers.sh --image_key "${SUFFIX_IMAGE_KEY}" --image_name "${INSTALLED_DEPLOY_IMAGE_NAME}" --base_image "${PREVIOUS_STAGE}" --build_arg "MODE=deploy" ${ADDITIONAL_DOCKER_ARGS[@]} fi # Retag last image diff --git a/scripts/run_dev.sh b/scripts/run_dev.sh index 9f1c849b..86f08e54 100755 --- a/scripts/run_dev.sh +++ b/scripts/run_dev.sh @@ -44,7 +44,7 @@ fi ISAAC_ROS_DEV_DIR="${ISAAC_ROS_WS}" SKIP_IMAGE_BUILD=0 VERBOSE=0 -VALID_ARGS=$(getopt -o hvd:i:ba: --long help,verbose,isaac_ros_dev_dir:,image_key_suffix:,skip_image_build,docker_arg: -- "$@") +VALID_ARGS=$(getopt -o hvd:i:ba: --long help,verbose,isaac_ros_dev_dir:,image_key:,skip_image_build,docker_arg: -- "$@") eval set -- "$VALID_ARGS" while [ : ]; do case "$1" in @@ -94,7 +94,7 @@ ON_EXIT+=("popd") # Fall back if isaac_ros_dev_dir not specified if [[ -z "$ISAAC_ROS_DEV_DIR" ]]; then - ISAAC_ROS_DEV_DIR_DEFAULTS=("$HOME/workspaces/isaac_ros-dev" "/workspaces/isaac_ros-dev" "/mnt/nova_ssd/workspaces/isaac_ros-dev") + ISAAC_ROS_DEV_DIR_DEFAULTS=("$HOME/workspaces/isaac" "/workspaces/isaac" "/mnt/nova_ssd/workspaces/isaac") for ISAAC_ROS_DEV_DIR in "${ISAAC_ROS_DEV_DIR_DEFAULTS[@]}" do if [[ -d "$ISAAC_ROS_DEV_DIR" ]]; then @@ -105,12 +105,12 @@ if [[ -z "$ISAAC_ROS_DEV_DIR" ]]; then if [[ ! -d "$ISAAC_ROS_DEV_DIR" ]]; then ISAAC_ROS_DEV_DIR=$(realpath "$ROOT/../") fi - print_warning "isaac_ros_dev not specified, assuming $ISAAC_ROS_DEV_DIR" + print_warning "isaac not specified, assuming $ISAAC_ROS_DEV_DIR" fi # Validate isaac_ros_dev_dir if [[ ! -d "$ISAAC_ROS_DEV_DIR" ]]; then - print_error "Specified isaac_ros_dev does not exist: $ISAAC_ROS_DEV_DIR" + print_error "Specified isaac does not exist: $ISAAC_ROS_DEV_DIR" exit 1 fi @@ -153,6 +153,7 @@ if [[ $? -eq 0 ]]; then f="${LFS_FILES_STATUS:$i:1}" if [[ "$f" == "-" ]]; then print_error "LFS files are missing. Please re-clone repos after installing git-lfs." + git lfs ls-files exit 1 fi done @@ -160,14 +161,9 @@ fi # Determine base image key PLATFORM="$(uname -m)" -BASE_IMAGE_KEY=$PLATFORM.user +BASE_IMAGE_KEY=$PLATFORM if [[ ! -z "${IMAGE_KEY}" ]]; then - BASE_IMAGE_KEY=$PLATFORM.$IMAGE_KEY - - # If the configured key does not have .user, append it last - if [[ $IMAGE_KEY != *".user"* ]]; then - BASE_IMAGE_KEY=$BASE_IMAGE_KEY.user - fi + BASE_IMAGE_KEY=$BASE_IMAGE_KEY.$IMAGE_KEY fi # Check skip image build from env @@ -194,14 +190,16 @@ fi # Re-use existing container. if [ "$(docker ps -a --quiet --filter status=running --filter name=$CONTAINER_NAME)" ]; then print_info "Attaching to running container: $CONTAINER_NAME" - docker exec -i -t -u admin --workdir /workspaces/isaac_ros-dev $CONTAINER_NAME /bin/bash $@ + ISAAC_ROS_WS=$(docker exec $CONTAINER_NAME printenv ISAAC_ROS_WS) + print_info "Docker workspace: $ISAAC_ROS_WS" + docker exec -i -t -u admin --workdir $ISAAC_ROS_WS $CONTAINER_NAME /bin/bash $@ exit 0 fi # Summarize launch print_info "Launching Isaac ROS Dev container with image key ${BASE_IMAGE_KEY}: ${ISAAC_ROS_DEV_DIR}" -# Build imag to launch +# Build image to launch if [[ $SKIP_IMAGE_BUILD -ne 1 ]]; then print_info "Building $BASE_IMAGE_KEY base as image: $BASE_NAME" $ROOT/build_image_layers.sh --image_key "$BASE_IMAGE_KEY" --image_name "$BASE_NAME" @@ -229,12 +227,20 @@ DOCKER_ARGS+=("-v $HOME/.Xauthority:/home/admin/.Xauthority:rw") DOCKER_ARGS+=("-e DISPLAY") DOCKER_ARGS+=("-e NVIDIA_VISIBLE_DEVICES=all") DOCKER_ARGS+=("-e NVIDIA_DRIVER_CAPABILITIES=all") -DOCKER_ARGS+=("-e FASTRTPS_DEFAULT_PROFILES_FILE=/usr/local/share/middleware_profiles/rtps_udp_profile.xml") DOCKER_ARGS+=("-e ROS_DOMAIN_ID") DOCKER_ARGS+=("-e USER") DOCKER_ARGS+=("-e ISAAC_ROS_WS=/workspaces/isaac_ros-dev") +DOCKER_ARGS+=("-e HOST_USER_UID=`id -u`") +DOCKER_ARGS+=("-e HOST_USER_GID=`id -g`") + +# Forward SSH Agent to container if the ssh agent is active. +if [[ -n $SSH_AUTH_SOCK ]]; then + DOCKER_ARGS+=("-v $SSH_AUTH_SOCK:/ssh-agent") + DOCKER_ARGS+=("-e SSH_AUTH_SOCK=/ssh-agent") +fi if [[ $PLATFORM == "aarch64" ]]; then + DOCKER_ARGS+=("-e NVIDIA_VISIBLE_DEVICES=nvidia.com/gpu=all,nvidia.com/pva=all") DOCKER_ARGS+=("-v /usr/bin/tegrastats:/usr/bin/tegrastats") DOCKER_ARGS+=("-v /tmp/:/tmp/") DOCKER_ARGS+=("-v /usr/lib/aarch64-linux-gnu/tegra:/usr/lib/aarch64-linux-gnu/tegra") @@ -246,8 +252,6 @@ if [[ $PLATFORM == "aarch64" ]]; then # If jtop present, give the container access if [[ $(getent group jtop) ]]; then DOCKER_ARGS+=("-v /run/jtop.sock:/run/jtop.sock:ro") - JETSON_STATS_GID="$(getent group jtop | cut -d: -f3)" - DOCKER_ARGS+=("--group-add $JETSON_STATS_GID") fi fi @@ -279,12 +283,12 @@ fi docker run -it --rm \ --privileged \ --network host \ + --ipc=host \ ${DOCKER_ARGS[@]} \ -v $ISAAC_ROS_DEV_DIR:/workspaces/isaac_ros-dev \ -v /etc/localtime:/etc/localtime:ro \ --name "$CONTAINER_NAME" \ --runtime nvidia \ - --user="admin" \ --entrypoint /usr/local/bin/scripts/workspace-entrypoint.sh \ --workdir /workspaces/isaac_ros-dev \ $BASE_NAME \