diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..4428940 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,15 @@ +[run] +branch = False +source = robotdataprocess +concurrency = multiprocessing +disable_warnings = + module-not-measured + no-data-collected + +[paths] +source = + src/robotdataprocess + */site-packages/robotdataprocess + */robotdataprocess/src/robotdataprocess + */src/robotdataprocess + /usr/local/lib/python3.10/dist-packages/robotdataprocess \ No newline at end of file diff --git a/.github/workflows/python_test.yml b/.github/workflows/python_test.yml deleted file mode 100644 index c18dcca..0000000 --- a/.github/workflows/python_test.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: Python Unit Tests - -on: - push: - branches: [ "**" ] - pull_request: - branches: [ "**" ] - -permissions: - contents: read - -jobs: - build-and-test: - name: "Build & Test Python" - runs-on: ubuntu-latest - timeout-minutes: 180 - - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.8 - uses: actions/setup-python@v3 - with: - python-version: "3.8" - - name: Install dependencies - run: | - python -m pip install --upgrade pip - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - name: Install the package - run: | - python -m pip install -e . - - name: Import external message types - uses: snickerbockers/submodules-init@v4 - - name: Test with unittest & evaluate test coverage - run: | - coverage run --source robotdataprocess -m unittest discover tests/ -v - - name: Coveralls GitHub Action (Upload Coverage Report) - uses: coverallsapp/github-action@v2.3.6 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - \ No newline at end of file diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000..7235f52 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,143 @@ +name: Unit Tests + +on: + push: + branches: [ "**" ] + +permissions: + contents: read + +jobs: + python3_8-tests: + name: "Build & Test Python3.8" + runs-on: ubuntu-latest + timeout-minutes: 10 + env: + SKIP_ROS2_TESTS: "True" + + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.8 + uses: actions/setup-python@v3 + with: + python-version: "3.8" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Install the package + run: | + python -m pip install -e . + - name: Import external message types + uses: snickerbockers/submodules-init@v4 + - name: Test with unittest + run: | + coverage run --source robotdataprocess -m unittest discover tests/ -v + - name: Save coverage data + uses: actions/upload-artifact@v4 + with: + name: coverage-${{ github.job }} + path: .coverage.* + include-hidden-files: true + + python3_10-tests: + name: "Build & Test Python3.10" + runs-on: ubuntu-latest + timeout-minutes: 10 + env: + SKIP_ROS2_TESTS: "True" + + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.10 + uses: actions/setup-python@v3 + with: + python-version: "3.10" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Install the package + run: | + python3 -m pip install -e . "numpy<1.25" + pip install --upgrade pydantic typeguard + - name: Import external message types + uses: snickerbockers/submodules-init@v4 + - name: Test with unittest + run: | + coverage run --source robotdataprocess -m unittest discover tests/ -v + - name: Save coverage data + uses: actions/upload-artifact@v4 + with: + name: coverage-${{ github.job }} + path: .coverage.* + include-hidden-files: true + + ros2-tests: + name: "Build & Test ROS2 Python Code" + runs-on: ubuntu-latest + timeout-minutes: 5 + container: + image: dlittleman/robotdataprocess_ros2_humble:0.2 + options: --user root # optional, helps avoid permission issues + env: + SKIP_PURE_PYTHON_TESTS: "True" + + steps: + - uses: actions/checkout@v3 + - name: Install dependencies + run: | + python3 -m pip install --upgrade pip + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Install the package + run: | + python3 -m pip install -e . "numpy<1.25" + pip install --upgrade pydantic typeguard + - name: Import external message types + uses: snickerbockers/submodules-init@v4 + - name: Test with unittest & evaluate test coverage + shell: bash + run: | + export PATH="$HOME/.local/bin:$PATH" + source /opt/ros/humble/setup.bash + coverage run --source robotdataprocess -m unittest discover tests/ -v + - name: Save coverage data + uses: actions/upload-artifact@v4 + with: + name: coverage-${{ github.job }} + path: .coverage.* + include-hidden-files: true + + coverage: + name: "Merge & Upload Coverage Reports" + needs: [python3_8-tests, python3_10-tests, ros2-tests] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/download-artifact@v4 + with: + path: coverage-files + - name: Install dependencies + run: | + python3 -m pip install --upgrade pip + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Install the package + run: | + python3 -m pip install -e . + - name: Merge coverage + run: | + pip install coverage + ls -al + pwd + coverage combine coverage-files/coverage-python3_8-tests/.coverage.* coverage-files/coverage-python3_10-tests/.coverage.* coverage-files/coverage-ros2-tests/.coverage.* + coverage xml + - name: Coveralls GitHub Action (Upload Coverage Report) + uses: coverallsapp/github-action@v2.3.6 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: Upload Merged Coverage File + uses: actions/upload-artifact@v4 + with: + name: coverage-final + path: .coverage + include-hidden-files: true diff --git a/.gitignore b/.gitignore index 0eca8d9..937bc09 100644 --- a/.gitignore +++ b/.gitignore @@ -4,10 +4,17 @@ outputs/ # Ignore coverage files .coverage +.coverage.* htmlcov/ # Ignore profile output profile.out #ignore pyenv -venv/ \ No newline at end of file +venv/ + +# Ignore Python cache files +*/__pycache__/ +__pycache__/ + +.vscode/ \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index 15f040f..0000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "cmake.sourceDirectory": "/home/dbutterfield3/Research/robotdataprocess/external_msgs_ros1/geometry2/tf2_geometry_msgs" -} \ No newline at end of file diff --git a/README.md b/README.md index af98d68..c47f0fc 100644 --- a/README.md +++ b/README.md @@ -2,56 +2,26 @@ [![Python Unit Tests](https://github.com/lunarlab-gatech/robotdataprocess/actions/workflows/python_test.yml/badge.svg?branch=master)](https://github.com/lunarlab-gatech/robotdataprocess/actions/workflows/python_test.yml) [![Coverage Status](https://coveralls.io/repos/github/lunarlab-gatech/robotdataprocess/badge.svg?branch=master)](https://coveralls.io/github/lunarlab-gatech/robotdataprocess?branch=master) -A library for loading, saving, converting, and manipulating robotic datasets. - -Currently, for operations with rosbags, the library inputs and outputs ROS2 Humble bags. For converting to ROS1 bags, the output bag will be compatible with ROS1 Noetic. +A library for loading, saving, converting, publishsing, and manipulating robotic datasets. Most notably, it can load data in a variety of formats and then publish them live over ROS1 or ROS2. This circumvents the need to convert datasets into rosbags first, saving disk space. **WARNING:** Currently, this repository is in active development and functionality isn't guaranteed to work. If you will depend on this repository for important tasks, perhaps write test cases for the corresponding functionality before deployment/use. ## Installation -You will need Python 3.8+ to run this code. Run the following commands to install the repository: +This repository is officially supported with: +- Python 3.8 (for use with ROS1 Noetic & ROS2 Foxy/Galactic) +- Python 3.10 (for use with ROS2 Humble and later) + +Run the following commands to install the repository: ``` git submodule init git submodule update pip install . ``` -## Example Use - -### Command Line Examples - -See use cases for this repository below. For more information on specific operations, see the corresponding config file. All of these operations can be launched by running the following command below (or a similar one): -``` -robotdataprocess /config/> ~/.bashrc + +# Default command +CMD ["bash"] + +# Set user arguments (being username, echo $UID, and id -g) +# NOTE: These should be overwritten when running docker build! +ARG USERNAME=user +ARG USER_UID=1000 +ARG USER_GID=1000 + +# Create a non-root user with matching UID/GID +RUN groupadd --gid ${USER_GID} ${USERNAME} && \ + useradd --uid ${USER_UID} --gid ${USER_GID} -m ${USERNAME} && \ + usermod -aG sudo ${USERNAME} && \ + echo "${USERNAME} ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers + +# Add a custom .bashrc with warm colors and fixed hostname +RUN echo 'export PS1="\\[\\e[1;31m\\]\\u@robotdataprocess_container\\[\\e[0m\\]:\\[\\e[1;33m\\]\\w\\[\\e[0m\\]\\$ "' > /home/${USERNAME}/.bashrc && \ + chown ${USERNAME}:${USERNAME} /home/${USERNAME}/.bashrc + +# Set default user +USER ${USERNAME} +WORKDIR /home/${USERNAME} \ No newline at end of file diff --git a/docker/ROS2_Humble/build_image.sh b/docker/ROS2_Humble/build_image.sh new file mode 100755 index 0000000..844fdd8 --- /dev/null +++ b/docker/ROS2_Humble/build_image.sh @@ -0,0 +1,5 @@ +docker build \ + --build-arg USERNAME=$(id -un) \ + --build-arg USER_UID=$(id -u) \ + --build-arg USER_GID=$(id -g) \ + -t robotdataprocess_ros2_humble . \ No newline at end of file diff --git a/docker/ROS2_Humble/run_container.sh b/docker/ROS2_Humble/run_container.sh new file mode 100755 index 0000000..85da0f9 --- /dev/null +++ b/docker/ROS2_Humble/run_container.sh @@ -0,0 +1,4 @@ +docker run -it --rm \ + -v $(pwd):/workspace \ + -w /workspace \ + robotdataprocess_ros2_humble diff --git a/examples/Hercules/extract/extract_GT_Maplab.py b/examples/Hercules/extract/extract_GT_Maplab.py new file mode 100644 index 0000000..b660249 --- /dev/null +++ b/examples/Hercules/extract/extract_GT_Maplab.py @@ -0,0 +1,39 @@ +from decimal import Decimal +import getpass +import os +from pathlib import Path +from robotdataprocess import OdometryData, CoordinateFrame +from typing import Union + + +def data_extraction(input_dir: str, robot_name: str, crop_data: bool, end_time: Union[Decimal, None]): + input_path = Path(input_dir).absolute() + output_path = input_path.parent / 'extract' / 'files_for_maplab_baseline' + pose_data = OdometryData.from_txt_file(input_path / robot_name / 'pose_world_frame.txt', robot_name + '/odom', robot_name + '/ground_truth/base_link', CoordinateFrame.NED, False) + + # Convert to the FLU coordinate frame & crop + pose_data.to_FLU_frame() + if crop_data: + assert end_time is not None, "end_time must be provided when crop_data is True" + pose_data.crop_data(Decimal('0.0'), end_time) + + # Save back to a csv file + if os.path.exists(output_path / robot_name / 'poseGT.csv'): + print("Deleting CSV file at this location previously...") + os.remove(output_path / robot_name / 'poseGT.csv') + os.makedirs(output_path / robot_name, exist_ok=True) + pose_data.to_csv(output_path / robot_name / 'poseGT.csv') + + +if __name__ == "__main__": + dataset_num = "V2.1.0" + user = getpass.getuser() + input_dir = '/media/' + user + '/T7/GT/SLAM/Hercules_datasets/' + dataset_num + '/data' + robot_name = "Drone1" + + data_extraction( + input_dir=input_dir, + robot_name=robot_name, + crop_data=False, + end_time=None, + ) diff --git a/examples/Hercules/extract/extract_data_LIO-SAM.py b/examples/Hercules/extract/extract_data_LIO-SAM.py new file mode 100644 index 0000000..33d64f5 --- /dev/null +++ b/examples/Hercules/extract/extract_data_LIO-SAM.py @@ -0,0 +1,61 @@ +from decimal import Decimal +import getpass +from pathlib import Path +from robotdataprocess import ImuData, OdometryData, CoordinateFrame, LiDARData +from robotdataprocess.ros.Ros2BagWrapper import Ros2BagWrapper +from typing import Union + +def to_bag(input_dir: str, robot_name: str, crop_data: bool, end_time: Union[Decimal, None]): + # Check parameters + if crop_data and end_time is None: + raise ValueError("end_time required if crop_data is True!") + + # Make directory paths + input_path = Path(input_dir).absolute() + output_path = input_path.parent / 'extract' / 'bags_for_LIO-SAM' + + # Extract RGB and IMU from Hercules v1.5 + imu_data = ImuData.from_txt_file(input_path / robot_name / 'synthetic_imu_9axis_500Hz.txt', 'base_link', CoordinateFrame.NED, nine_axis=True) + pose_data = OdometryData.from_txt_file(input_path / robot_name / 'pose_world_frame.txt', 'map', 'base_link', CoordinateFrame.NED, False) + lidar_data = LiDARData.from_npy_files(input_path / robot_name / "lidar", "lidar_link", CoordinateFrame.NED) + + # Shift GT data to start at Identity to be roughly close to odometry output + pose_data.shift_to_start_at_identity() + + # Crop the data + if crop_data: + imu_data.crop_data(Decimal('0.0'), end_time) + pose_data.crop_data(Decimal('0.0'), end_time) + lidar_data.crop_data(Decimal('0.0'), end_time) + + # Save it into a ROS2 Humble bag + Ros2BagWrapper.write_data_to_rosbag(output_path / robot_name, + [ imu_data, lidar_data, pose_data], + ['/imu_raw', '/points_raw', '/odom_gt'], + [ None, None, "Path"], + None) + +def main(): + # Enter desired configuration here + dataset_num = "V2.3.C" + user = getpass.getuser() + input_dir = '/media/' + user + '/T73/Hercules_datasets/' + dataset_num + '/data' + robot_names = ["Husky1"] + robot_crop_end_times = [None] + + # Check validity of inputs + assert len(robot_names) == len(robot_crop_end_times) + num_robots = len(robot_names) + + # Run extraction for each robot + for i in range(num_robots): + if robot_crop_end_times[i] == None: crop_data = False + else: crop_data = True + + to_bag(input_dir=input_dir, + robot_name=robot_names[i], + crop_data=crop_data, + end_time=robot_crop_end_times[i]) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/Hercules/extract_data_Maplab.py b/examples/Hercules/extract/extract_data_Maplab.py similarity index 59% rename from examples/Hercules/extract_data_Maplab.py rename to examples/Hercules/extract/extract_data_Maplab.py index 61ac91f..69b2903 100644 --- a/examples/Hercules/extract_data_Maplab.py +++ b/examples/Hercules/extract/extract_data_Maplab.py @@ -8,20 +8,22 @@ Configure the paths in the main() function below. """ +import getpass from pathlib import Path import shutil -from robotdataprocess import ImageData, ImuData, CoordinateFrame -from robotdataprocess.rosbag.Ros2BagWrapper import Ros2BagWrapper +from robotdataprocess import ImageDataInMemory, ImuData, LiDARData, CoordinateFrame +from robotdataprocess.ros.Ros2BagWrapper import Ros2BagWrapper def extract_to_bag(input_dir: str, output_bag: str, robot_name: str): """ - Extract images and IMU data from a folder and save to a ROS1 bag. + Extract images, IMU data, and LiDAR data from a folder and save to a ROS1 bag. Args: input_dir (str): Path to the input directory containing: - rgb/ folder with PNG/JPG images (filenames are timestamps) - synthetic_imu.txt file (format: timestamp lin_acc_x lin_acc_y lin_acc_z ang_vel_x ang_vel_y ang_vel_z) + - lidar/ folder with .npy files (filenames are timestamps) output_bag (str): Path where the output ROS1 bag will be saved (should end in .bag) robot_name (str): Name of the robot (used for frame IDs) """ @@ -46,28 +48,37 @@ def extract_to_bag(input_dir: str, output_bag: str, robot_name: str): print(f" Loaded {imu_data.len()} IMU measurements") # Load images from folder - print("\n2. Loading images...") - image_data = ImageData.from_image_files( - input_path / 'rgb', - f'{robot_name}/cam0' + print("\n2. Loading left images...") + left_image_data = ImageDataInMemory.from_image_files( + input_path / 'rgb_stereo_left', + f'camera_0.0' ) - print(f" Loaded {image_data.len()} images") + print(f" Loaded {left_image_data.len()} images") - # Set IMU frame to FLU (ROS standard) without transforming the data - # Note: The IMU data remains in NED frame, but we label it as FLU - # to allow writing to ROS bag without errors. Many applications - # like VINS-Mono handle the coordinate frame conversion internally. - # FIXME: this is not implemented yet! - # imu_data.to_FLU_frame() - imu_data.frame = CoordinateFrame.FLU + # Loading right images + print("\n2.b Loading right images...") + right_image_data = ImageDataInMemory.from_image_files( + input_path / 'rgb_stereo_right', + f'camera_1.0' + ) + print(f" Loaded {right_image_data.len()} images") + + # Load LiDAR data from .npy files + print("\n2.c Loading LiDAR data...") + lidar_data = LiDARData.from_npy_files( + input_path / 'lidar', + 'lidar_sensor_0', #change this for more robots + CoordinateFrame.FLU + ) + print(f" Loaded {lidar_data.len()} point clouds") print("\n3. Writing to temporary ROS2 bag...") # Write data to temporary ROS2 bag (required intermediate step) Ros2BagWrapper.write_data_to_rosbag( temp_ros2_bag, - [imu_data, image_data], # Data to write - ['/imu0', '/cam0/image_raw'], # Topic names - [None, None], # Use default message types + [imu_data, left_image_data, right_image_data, lidar_data], # Data to write + ['/imu0', '/cam0/image_raw', '/cam1/image_raw', '/lidar'], # Topic names + [None, None, None, None], # Use default message types None # No external message definitions ) @@ -87,21 +98,25 @@ def extract_to_bag(input_dir: str, output_bag: str, robot_name: str): print(f"\n✓ Successfully created ROS1 bag at: {output_path}") print(f" - IMU topic: /imu0 ({imu_data.len()} messages)") - print(f" - Image topic: /cam0/image_raw ({image_data.len()} messages)") - + print(f" - Image topic: /cam0/image_raw ({left_image_data.len()} messages)") + print(f" - Image topic: /cam1/image_raw ({right_image_data.len()} messages)") + print(f" - LiDAR topic: /lidar ({lidar_data.len()} messages)") def main(): # ========== CONFIGURE THESE PATHS ========== # Robot name (used for frame IDs in the bag) - robot_name = 'Drone1' - dataset_num = "V1.5" + robot_name = 'Husky1' + dataset_num = "V2.1.0" # Input directory containing your data - input_dir = '/media/dbutterfield3/T731/Hercules_datasets/' + dataset_num + '/data/' + robot_name + user = getpass.getuser() + + # # Output bag path + input_dir = '/media/' + user + '/T73/Hercules_datasets/' + dataset_num + '/data/' + robot_name # Output bag path - output_bag = '/media/dbutterfield3/T731/Hercules_datasets/' + dataset_num + '/extract/bags_for_maplab/' + robot_name + '.bag' + output_bag = '/media/' + user + '/T73/Hercules_datasets/' + dataset_num + '/extract/bags_for_maplab/' + robot_name + '.bag' # ========================================== diff --git a/examples/Hercules/extract_data_ROMAN.py b/examples/Hercules/extract/extract_data_ROMAN.py similarity index 61% rename from examples/Hercules/extract_data_ROMAN.py rename to examples/Hercules/extract/extract_data_ROMAN.py index aa57232..09e84cc 100644 --- a/examples/Hercules/extract_data_ROMAN.py +++ b/examples/Hercules/extract/extract_data_ROMAN.py @@ -1,9 +1,12 @@ from decimal import Decimal +import getpass +import os from pathlib import Path -from robotdataprocess import ImageData, ImuData, OdometryData, CoordinateFrame -from robotdataprocess.rosbag.Ros2BagWrapper import Ros2BagWrapper +from robotdataprocess import ImageDataInMemory, ImuData, OdometryData, CoordinateFrame +from robotdataprocess.ros.Ros2BagWrapper import Ros2BagWrapper +from typing import Union -def data_extraction(input_dir: str, robot_name: str, crop_data: bool, end_time: Decimal | None, skip_depth: bool = False, skip_rgb: bool = False): +def data_extraction(input_dir: str, robot_name: str, crop_data: bool, end_time: Union[Decimal, None], skip_depth: bool = False, skip_rgb: bool = False): # Check paramters if crop_data and end_time is None: raise ValueError("end_time required if crop_data is True!") @@ -14,20 +17,20 @@ def data_extraction(input_dir: str, robot_name: str, crop_data: bool, end_time: # Extract depth data from Hercules V1.5 from individual .npy files to a single .npy file if not skip_depth: - depth_data = ImageData.from_npy_files(input_path / robot_name / 'depth', 'front_center_DepthPerspective') + depth_data = ImageDataInMemory.from_npy_files(input_path / robot_name / 'depth', 'front_center_DepthPerspective') if crop_data: depth_data.crop_data(Decimal('0.0'), end_time) depth_data.to_npy(output_path / robot_name / 'depth') # Extract image data from Hercules V1.5 to .npy if not skip_rgb: - rgb_data = ImageData.from_image_files(input_path / robot_name / 'rgb', 'front_center_Scene') + rgb_data = ImageDataInMemory.from_image_files(input_path / robot_name / 'rgb_stereo_left', 'front_center_Scene') if crop_data: rgb_data.crop_data(Decimal('0.0'), end_time) rgb_data.to_npy(output_path / robot_name / 'rgb') # Load the odometry data - pose_data = OdometryData.from_txt_file(input_path / robot_name / 'pose_world_frame.txt', robot_name + '/odom', robot_name + '/ground_truth/base_link', CoordinateFrame.NED) + pose_data = OdometryData.from_txt_file(input_path / robot_name / 'pose_world_frame.txt', robot_name + '/odom', robot_name + '/ground_truth/base_link', CoordinateFrame.NED, False) # Convert to the FLU coordinate frame & crop pose_data.to_FLU_frame() @@ -35,14 +38,19 @@ def data_extraction(input_dir: str, robot_name: str, crop_data: bool, end_time: pose_data.crop_data(Decimal('0.0'), end_time) # Save back to a csv file - pose_data.to_csv(output_path / robot_name / 'poseGT.csv') + if os.path.exists(output_path / robot_name / 'poseGT.csv'): + print("Deleting CSV file at this location previously...") + os.remove(output_path / robot_name / 'poseGT.csv') + os.makedirs(output_path / robot_name, exist_ok=True) + pose_data.to_csv(output_path / robot_name / 'poseGT.csv', write_header=True) def main(): # Enter desired configuration here - dataset_num = "V1.6" - input_dir = '/media/dbutterfield3/T731/Hercules_datasets/' + dataset_num + '/data' - robot_names = ["Drone1"] - robot_crop_end_times = [None] + dataset_num = "V2.2.0" + user = getpass.getuser() + input_dir = '/media/' + user + '/T73/Hercules_datasets/' + dataset_num + '/data' + robot_names = ["Husky1", "Husky2", "Drone1", "Drone2"] + robot_crop_end_times = [None, None, None, None] # Check validity of inputs assert len(robot_names) == len(robot_crop_end_times) diff --git a/examples/Hercules/extract/extract_data_SlideSLAM.py b/examples/Hercules/extract/extract_data_SlideSLAM.py new file mode 100644 index 0000000..238be8b --- /dev/null +++ b/examples/Hercules/extract/extract_data_SlideSLAM.py @@ -0,0 +1,63 @@ +from decimal import Decimal +import getpass +from pathlib import Path +import shutil +from robotdataprocess import ImageDataInMemory, OdometryData, CoordinateFrame +from robotdataprocess.ros.Ros2BagWrapper import Ros2BagWrapper +from typing import Union + +def extract_to_bag(input_dir: str, output_bag: str, robot_name: str, crop_data: bool, end_time: Union[Decimal, None]): + + # Convert to Path objects + input_path = Path(input_dir).absolute() + output_path = Path(output_bag).absolute() + + # Create temporary ROS2 bag path (ROS2 bags are directories) + temp_ros2_bag = output_path.parent / (output_path.stem + "_temp_ros2") + + # Extract RGB and IMU from Hercules v1.5 + odom_data = OdometryData.from_csv(input_path.parent.parent / 'extract' / 'files_for_roman_baseline' + / robot_name / 'vins_result_no_loop_reformatted.csv', + 'world', robot_name+"/odom", + CoordinateFrame.FLU, True) + pose_data = OdometryData.from_txt_file(input_path / robot_name / 'pose_world_frame.txt', + 'world', robot_name + '/ground_truth/odom', CoordinateFrame.NED) + seg_data = ImageDataInMemory.from_image_files(input_path / 'seg', '' + robot_name + '/cam0') + depth_data = ImageDataInMemory.from_npy_files(input_path / 'depth', '' + robot_name + '/cam0') + + # Convert pose data to FLU frame + pose_data.to_FLU_frame() + + # Crop the data + if crop_data: + odom_data.crop_data(Decimal('0.0'), end_time) + pose_data.crop_data(Decimal('0.0'), end_time) + seg_data.crop_data(Decimal('0.0'), end_time) + depth_data.crop_data(Decimal('0.0'), end_time) + + # Write data to temporary ROS2 bag (required intermediate step) + Ros2BagWrapper.write_data_to_rosbag( + temp_ros2_bag, + [odom_data, pose_data, seg_data, depth_data], + ['/odom', '/odom_gt/path', '/cam0/seg', '/cam0/depth'], + [None, "Path", None, None], + None) + + # Inform the user how to finish + print("To finish, use the rosbags-convert command line tool to convert from a ROS2 bag to a ROS1 bag.") + print("Ex: rosbags-convert --src _temp_ros2/ --dst .bag") + +def main(): + robot_name = 'Drone2' + dataset_num = "V2.1.0" + crop_data = False + end_time = None + + user = getpass.getuser() + input_dir = '/media/' + user + '/T73/Hercules_datasets/' + dataset_num + '/data/' + robot_name + output_bag = '/media/' + user + '/T73/Hercules_datasets/' + dataset_num + '/extract/bags_for_slideslam/' + robot_name + '.bag' + + extract_to_bag(input_dir, output_bag, robot_name, crop_data, end_time) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/Hercules/extract_data_VINS-Mono.py b/examples/Hercules/extract/extract_data_VINS-Mono.py similarity index 76% rename from examples/Hercules/extract_data_VINS-Mono.py rename to examples/Hercules/extract/extract_data_VINS-Mono.py index c634baa..d2052af 100644 --- a/examples/Hercules/extract_data_VINS-Mono.py +++ b/examples/Hercules/extract/extract_data_VINS-Mono.py @@ -1,9 +1,11 @@ from decimal import Decimal +import getpass from pathlib import Path -from robotdataprocess import ImageData, ImuData, OdometryData, CoordinateFrame -from robotdataprocess.rosbag.Ros2BagWrapper import Ros2BagWrapper +from robotdataprocess import ImageDataInMemory, ImuData, OdometryData, CoordinateFrame +from robotdataprocess.ros.Ros2BagWrapper import Ros2BagWrapper +from typing import Union -def to_bag(input_dir: str, robot_name: str, crop_data: bool, end_time: Decimal | None): +def to_bag(input_dir: str, robot_name: str, crop_data: bool, end_time: Union[Decimal, None]): # Check parameters if crop_data and end_time is None: raise ValueError("end_time required if crop_data is True!") @@ -15,15 +17,12 @@ def to_bag(input_dir: str, robot_name: str, crop_data: bool, end_time: Decimal | # Extract RGB and IMU from Hercules v1.5 imu_data = ImuData.from_txt_file(input_path / robot_name / 'synthetic_imu.txt', '' + robot_name + '/base_link', CoordinateFrame.NED) pose_data = OdometryData.from_txt_file(input_path / robot_name / 'pose_world_frame.txt', 'world', 'body', CoordinateFrame.NED) - image_data = ImageData.from_image_files(input_path / robot_name / 'rgb', '' + robot_name + '/front_center_Scene') + image_data = ImageDataInMemory.from_image_files(input_path / robot_name / 'rgb_stereo_left', '' + robot_name + '/front_center_Scene') # Convert data from NED frame to ROS frame (and make sure it is at the identity) pose_data.to_FLU_frame() pose_data.shift_to_start_at_identity() - # Leave the IMU data in the NED frame (I believe that VINS-Mono actually adjusts internally) - imu_data.frame = CoordinateFrame.FLU # This lets us write it into a ROS bag without an error, without actually changing data - # Crop the data if crop_data: imu_data.crop_data(Decimal('0.0'), end_time) @@ -39,8 +38,9 @@ def to_bag(input_dir: str, robot_name: str, crop_data: bool, end_time: Decimal | def main(): # Enter desired configuration here - dataset_num = "V1.6" - input_dir = '/media/dbutterfield3/T731/Hercules_datasets/' + dataset_num + '/data' + dataset_num = "V2.1.2" + user = getpass.getuser() + input_dir = '/media/' + user + '/T73/Hercules_datasets/' + dataset_num + '/data' robot_names = ["Drone1"] robot_crop_end_times = [None] diff --git a/examples/Hercules/extract_data_SlideSLAM.py b/examples/Hercules/extract_data_SlideSLAM.py deleted file mode 100644 index 5ef7dc6..0000000 --- a/examples/Hercules/extract_data_SlideSLAM.py +++ /dev/null @@ -1,65 +0,0 @@ -from decimal import Decimal -from pathlib import Path -import shutil -from robotdataprocess import ImageData, OdometryData, CoordinateFrame -from robotdataprocess.rosbag.Ros2BagWrapper import Ros2BagWrapper - - -def extract_to_bag(input_dir: str, output_bag: str, robot_name: str, crop_data: bool, end_time: Decimal | None): - - # Convert to Path objects - input_path = Path(input_dir).absolute() - output_path = Path(output_bag).absolute() - - # Create temporary ROS2 bag path (ROS2 bags are directories) - temp_ros2_bag = output_path.parent / (output_path.stem + "_temp_ros2") - - # Extract RGB and IMU from Hercules v1.5 - odom_data = OdometryData.from_txt_file(input_path / 'pose_world_frame.txt', 'world', 'body', CoordinateFrame.NED) - seg_data = ImageData.from_image_files(input_path / 'seg', '' + robot_name + '/cam0') - depth_data = ImageData.from_npy_files(input_path / 'depth', '' + robot_name + '/cam0') - - # Convert data from NED frame to FLU frame - odom_data.to_FLU_frame() - - # Crop the data - if crop_data: - odom_data.crop_data(Decimal('0.0'), end_time) - seg_data.crop_data(Decimal('0.0'), end_time) - depth_data.crop_data(Decimal('0.0'), end_time) - - # Write data to temporary ROS2 bag (required intermediate step) - Ros2BagWrapper.write_data_to_rosbag( - temp_ros2_bag, - [odom_data, seg_data, depth_data], - ['/odom', '/cam0/seg', '/cam0/depth'], - [None, None, None], - None) - - # Get the repository root path (where external_msgs_ros1 is located) - repo_root = Path(__file__).parent.parent.parent - external_msgs_ros1_path = repo_root / "external_msgs_ros1" - - # Create wrapper for the temp ROS2 bag and convert to ROS1 - bag_wrapper = Ros2BagWrapper(temp_ros2_bag, None) - bag_wrapper.export_as_ros1(output_path, external_msgs_ros1_path) - - # Remove the temporary ROS2 bag directory - if temp_ros2_bag.exists(): - shutil.rmtree(temp_ros2_bag) - - print(f"\n✓ Successfully created ROS1 bag at: {output_path}") - -def main(): - robot_name = 'Drone1' - dataset_num = "V1.6" - crop_data = False - end_time = None - - input_dir = '/media/dbutterfield3/T731/Hercules_datasets/' + dataset_num + '/data/' + robot_name - output_bag = '/media/dbutterfield3/T731/Hercules_datasets/' + dataset_num + '/extract/bags_for_slideslam/' + robot_name + '.bag' - - extract_to_bag(input_dir, output_bag, robot_name, crop_data, end_time) - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/examples/Hercules/maplab_results.py b/examples/Hercules/maplab_results.py deleted file mode 100644 index 1f82529..0000000 --- a/examples/Hercules/maplab_results.py +++ /dev/null @@ -1,51 +0,0 @@ -from decimal import Decimal -import numpy as np -import os -from robotdataprocess import ImageData, ImuData, OdometryData, CoordinateFrame, PathData -from robotdataprocess.rosbag.Ros2BagWrapper import Ros2BagWrapper -from scipy.spatial.transform import Rotation as R - -def main(): - dataset_names = ["V1.5"] - - # Do it for all files, robots, and datasets - for dataset_name in dataset_names: - - # Load the odometry data - est_data_husky1 = OdometryData.from_csv('/media/dbutterfield3/T731/Hercules_datasets/' + dataset_name + '/results/maplab_results/maplab/merged_map/vertex_poses_velocities_biases.csv', "odom", 'base_link', CoordinateFrame.NED, True, [0,3,4,5,6,7,8,9], filter=(' mission-id', ' 9d9572d8194a7f180900000000000000')) - est_data_husky2 = OdometryData.from_csv('/media/dbutterfield3/T731/Hercules_datasets/' + dataset_name + '/results/maplab_results/maplab/merged_map/vertex_poses_velocities_biases.csv', "odom", 'base_link', CoordinateFrame.NED, True, [0,3,4,5,6,7,8,9], filter=(' mission-id', ' 38a88adc194a7f180900000000000000')) - est_data_lst: list[OdometryData] = [est_data_husky1, est_data_husky2] - for est_data in est_data_lst: - est_data.timestamps = est_data.timestamps / Decimal('1e9') # Convert from ns to s - est_data_husky1.visualize([est_data_husky2], ["Husky1 Maplab Results","Husky2 Maplab Results"], 10, 40) - - # Load the ground truth data - gt_data_husky1 = OdometryData.from_csv('/media/dbutterfield3/T731/Hercules_datasets/' + dataset_name + '/extract/files_for_roman_baseline/Husky1/poseGT.csv', "world", "robot", CoordinateFrame.FLU, True, None) - gt_data_husky2 = OdometryData.from_csv('/media/dbutterfield3/T731/Hercules_datasets/' + dataset_name + '/extract/files_for_roman_baseline/Husky2/poseGT.csv', "world", "robot", CoordinateFrame.FLU, True, None) - gt_data_lst: list[OdometryData] = [gt_data_husky1, gt_data_husky2] - - # Make the timestamps match and then concatenate - est_data_lst, gt_data_lst = PathData.make_start_and_end_times_match(est_data_lst, gt_data_lst) - est_data: OdometryData = PathData.concatenate_PathData(est_data_lst).to_OdometryData('odom', 'base_link') - gt_data: PathData = PathData.concatenate_PathData(gt_data_lst) - - # Since positions are in FLU but orientations are in NED rotated to FLU, lets fix that - R_NED = np.array([[1, 0, 0], - [0, -1, 0], - [0, 0, -1]]) - R_NED_Q = R.from_matrix(R_NED) - est_data._ori_apply_rotation(R_NED_Q.inv()) - est_data._ori_change_of_basis(R_NED_Q) - est_data.frame = CoordinateFrame.FLU - est_data.visualize([gt_data], ["Husky1+Husky2 Maplab Results", "Ground Truth"], [10, 10], [40, 1000]) - - # Calculate RMS ATE, among other metrics - metrics_dictionary: dict = OdometryData.calculate_trajectory_errors(gt_data, est_data, max_diff=0.1, visualize=True) - print("RMS ATE: ", metrics_dictionary['APE']['translation_part']['rmse']) - print("RMS RTE: ", metrics_dictionary['RPE']['translation_part']['rmse']) - - print("RMS APE Rotation Angle (Deg): ", metrics_dictionary['APE']['rotation_angle_deg']['rmse']) - print("RMS RTE Rotation Angle (Deg): ", metrics_dictionary['RPE']['rotation_angle_deg']['rmse']) - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/examples/Hercules/publish/publish_data_LIO-SAM.py b/examples/Hercules/publish/publish_data_LIO-SAM.py new file mode 100644 index 0000000..8cd1ca7 --- /dev/null +++ b/examples/Hercules/publish/publish_data_LIO-SAM.py @@ -0,0 +1,75 @@ +import argparse +from decimal import Decimal +import getpass +from pathlib import Path +from robotdataprocess import ImuData, OdometryData, CoordinateFrame +from robotdataprocess.data_types.Data import ROSMsgLibType +from robotdataprocess.data_types.LiDARData import LiDARData +from robotdataprocess.data_types.ImageData.ImageDataOnDisk import ImageDataOnDisk +from robotdataprocess.ros.RosPublisher import publish_data_ROS_multiprocess +from typing import Union + +def publish_data(input_dir: str, robot_name: str, crop_data: bool, end_time: Union[Decimal, None]): + # Check parameters + if crop_data and end_time is None: + raise ValueError("end_time required if crop_data is True!") + + # Extract data from Hercules + input_path = Path(input_dir).absolute() + imu_data = ImuData.from_txt_file(input_path / robot_name / 'synthetic_imu_9axis_500Hz.txt', 'base_link', CoordinateFrame.NED, nine_axis=True) + pose_data = OdometryData.from_txt_file(input_path / robot_name / 'pose_world_frame.txt', 'map', 'base_link', CoordinateFrame.NED, False) + lidar_with_channels_path = input_path.parent / "extract" / "files_for_lio_sam" / robot_name / "lidar" + if lidar_with_channels_path.exists(): + lidar_data = LiDARData.from_npy_files(lidar_with_channels_path, "lidar_link", CoordinateFrame.NED) + else: + lidar_data = LiDARData.from_npy_files(input_path / robot_name / "lidar", "lidar_link", CoordinateFrame.NED) + + # Calculate point channels if we don't have them + #lidar_data.visualize() + if lidar_data.channels is None: + lidar_data.calculate_point_channels(16, -20, 20) + print("Point Channel Calculation Completed!") + #lidar_data.to_npy_files(lidar_with_channels_path) + + # Convert GT Pose to Identity for visualization + pose_data.shift_to_start_at_identity() + + # Crop the data + if crop_data: + imu_data.crop_data(Decimal('0.0'), end_time) + pose_data.crop_data(Decimal('0.0'), end_time) + lidar_data.crop_data(Decimal('0.0'), end_time) + + # Publish the data via ROS topics + publish_data_ROS_multiprocess([imu_data, pose_data, lidar_data], + ['/imu_raw', '/odom_gt', '/points_raw'], + [None, "Path", None], + [500, 20, 20], + [1, 1, 4], + ROSMsgLibType.RCLPY, True, verbose=True) + +def main(dataset_num: str, robot_name: str, crop_end_time: Union[float, None]): + + # Do bookkeeping for cropping + if crop_end_time == None: + crop_data = False + crop_end_time = None + else: + crop_data = True + crop_end_time = Decimal(crop_end_time) + + # Publish the data for the specified robot + user = getpass.getuser() + publish_data(input_dir='/home/' + user + '/data/Hercules_datasets/' + dataset_num + '/data', + robot_name=robot_name, + crop_data=crop_data, + end_time=crop_end_time) + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Publish Hercules data via ROS topics for LIO-SAM.") + parser.add_argument('--dataset_num', type=str, default=None, required=True, help="Dataset version number (e.g., V1.6).") + parser.add_argument('--robot_name', type=str, default=None, required=True, help="Name of the robot (e.g., Drone1).") + parser.add_argument('--crop_end_time', type=float, default=None, help="Optional end time (in seconds) to crop the data.") + args = parser.parse_args() + + main(dataset_num=args.dataset_num, robot_name=args.robot_name, crop_end_time=args.crop_end_time) \ No newline at end of file diff --git a/examples/Hercules/publish/publish_data_Maplab.py b/examples/Hercules/publish/publish_data_Maplab.py new file mode 100644 index 0000000..cd162dc --- /dev/null +++ b/examples/Hercules/publish/publish_data_Maplab.py @@ -0,0 +1,76 @@ +import argparse +from decimal import Decimal +import getpass +from pathlib import Path +from robotdataprocess import ImuData, OdometryData, CoordinateFrame, LiDARData +from robotdataprocess.data_types.Data import ROSMsgLibType +from robotdataprocess.data_types.ImageData.ImageDataOnDisk import ImageDataOnDisk +from robotdataprocess.ros.RosPublisher import publish_data_ROS_multiprocess +from typing import Union + +def publish_data(input_dir: str, robot_name: str, crop_data: bool, end_time: Union[Decimal, None]): + # Check parameters + if crop_data and end_time is None: + raise ValueError("end_time required if crop_data is True!") + + # Extract RGB and IMU from Hercules + input_path = Path(input_dir).absolute() + imu_data = ImuData.from_txt_file(input_path / robot_name / 'synthetic_imu.txt', '' + robot_name + '/base_link', CoordinateFrame.NED) + #odom_data = OdometryData.from_txt_file(input_path / robot_name / 'pose_world_frame.txt', 'map', '' + robot_name + '/base_link', CoordinateFrame.NED, False) + odom_data = OdometryData.from_csv(input_path.parent / 'extract' / 'files_for_roman_baseline' / robot_name / 'poseGT.csv', 'map', robot_name + '/base_link', CoordinateFrame.FLU, True, None) + left_image_data = ImageDataOnDisk.from_image_files(input_path / robot_name / 'rgb_stereo_left', '' + robot_name + '/front_center_Scene') + right_image_data = ImageDataOnDisk.from_image_files(input_path / robot_name / 'rgb_stereo_right', '' + robot_name + '/front_right_Scene') + lidar_data = LiDARData.from_npy_files(input_path / robot_name / "lidar", "lidar_link", CoordinateFrame.NED) + + # Crop the data + if crop_data: + imu_data.crop_data(Decimal('0.0'), end_time) + odom_data.crop_data(Decimal('0.0'), end_time) + left_image_data.crop_data(Decimal('0.0'), end_time) + right_image_data.crop_data(Decimal('0.0'), end_time) + raise NotADirectoryError("Crop Data not implemented for lidar_data!") + + # Publish the data via ROS2 topics + publish_data_ROS_multiprocess([imu_data, left_image_data, right_image_data, odom_data, odom_data, lidar_data], + ['/'+robot_name+'/imu0', + '/'+robot_name+'/cam0/image_raw', + '/'+robot_name+'/cam1/image_raw', + '/'+robot_name+'/odom_gt', + '/'+robot_name+'/rovioli/maplab_odom_T_M_I', + '/'+robot_name+'/lidar'], + [None, None, None, "Path", "maplab_msg/OdometryWithImuBiases", None], + [200, 20, 20, 20, 20, 20], + [1, 1, 1, 1, 1, 1], ROSMsgLibType.ROSPY, True, verbose=True) + # publish_data_ROS_multiprocess([imu_data, left_image_data, right_image_data, odom_data], + # ['/'+robot_name+'/imu0', + # '/'+robot_name+'/cam0/image_raw', + # '/'+robot_name+'/cam1/image_raw', + # '/'+robot_name+'/rovioli/maplab_odom_T_M_I', + # '/'+robot_name+'/odom_gt'], + # [None, None, None, "maplab_msg/OdometryWithImuBiases", "Path"], + # [1, 3, 3, 1, 1], ROSMsgLibType.ROSPY, True, verbose=True) + +def main(dataset_num: str, robot_name: str, crop_end_time: Union[float, None]): + + # Publish data for each robot + if crop_end_time == None: + crop_data = False + crop_end_time = None + else: + crop_data = True + crop_end_time = Decimal(crop_end_time) + + user = getpass.getuser() + publish_data(input_dir='/home/' + user + '/data/Hercules_datasets/' + dataset_num + '/data', + robot_name=robot_name, + crop_data=crop_data, + end_time=crop_end_time) + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Publish Hercules data via ROS2 topics for Maplab.") + parser.add_argument('--dataset_num', type=str, default=None, help="Dataset version number (e.g., V1.6).") + parser.add_argument('--robot_name', type=str, default=None, help="Name of the robot (e.g., Husky1).") + parser.add_argument('--crop_end_time', type=float, default=None, help="Optional end time (in seconds) to crop the data.") + args = parser.parse_args() + + main(dataset_num=args.dataset_num, robot_name=args.robot_name, crop_end_time=args.crop_end_time) \ No newline at end of file diff --git a/examples/Hercules/publish/publish_data_OpenVINS.py b/examples/Hercules/publish/publish_data_OpenVINS.py new file mode 100644 index 0000000..1379df7 --- /dev/null +++ b/examples/Hercules/publish/publish_data_OpenVINS.py @@ -0,0 +1,65 @@ +import argparse +from decimal import Decimal +import getpass +from pathlib import Path +from robotdataprocess import ImuData, OdometryData, CoordinateFrame +from robotdataprocess.data_types.Data import ROSMsgLibType +from robotdataprocess.data_types.ImageData.ImageDataOnDisk import ImageDataOnDisk +from robotdataprocess.ros.RosPublisher import publish_data_ROS_multiprocess +from typing import Union + +def publish_data(input_dir: str, robot_name: str, crop_data: bool, end_time: Union[Decimal, None]): + # Check parameters + if crop_data and end_time is None: + raise ValueError("end_time required if crop_data is True!") + + # Extract data from Hercules + input_path = Path(input_dir).absolute() + imu_data = ImuData.from_txt_file(input_path / robot_name / 'synthetic_imu.txt', '' + robot_name + '/base_link', CoordinateFrame.NED) + pose_data = OdometryData.from_txt_file(input_path / robot_name / 'pose_world_frame.txt', 'global', 'body', CoordinateFrame.NED, False) + left_image_data = ImageDataOnDisk.from_image_files(input_path / robot_name / 'rgb_stereo_left', '' + robot_name + '/front_center_Scene') + right_image_data = ImageDataOnDisk.from_image_files(input_path / robot_name / 'rgb_stereo_right', '' + robot_name + '/front_center_Scene') + + # Convert data from NED frame to ROS frame (and make sure it is at the identity) + pose_data.to_FLU_frame() + pose_data.shift_to_start_at_identity() + + # Crop the data + if crop_data: + imu_data.crop_data(Decimal('0.0'), end_time) + pose_data.crop_data(Decimal('0.0'), end_time) + left_image_data.crop_data(Decimal('0.0'), end_time) + right_image_data.crop_data(Decimal('0.0'), end_time) + + # Publish the data via ROS2 topics + publish_data_ROS_multiprocess([imu_data, pose_data, left_image_data, right_image_data], + ['/imu0', '/odom_gt', '/cam0/image_raw', '/cam1/image_raw'], + [None, "Path", None, None], + [1, 1, 3, 3], + ROSMsgLibType.RCLPY, True, verbose=True) + +def main(dataset_num: str, robot_name: str, crop_end_time: Union[float, None]): + + # Do bookkeeping for cropping + if crop_end_time == None: + crop_data = False + crop_end_time = None + else: + crop_data = True + crop_end_time = Decimal(crop_end_time) + + # Publish the data for the specified robot + user = getpass.getuser() + publish_data(input_dir='/home/' + user + '/data/Hercules_datasets/' + dataset_num + '/data', + robot_name=robot_name, + crop_data=crop_data, + end_time=crop_end_time) + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Publish Hercules data via ROS2 topics for VINS-Mono.") + parser.add_argument('--dataset_num', type=str, default=None, help="Dataset version number (e.g., V1.6).") + parser.add_argument('--robot_name', type=str, default=None, help="Name of the robot (e.g., Drone1).") + parser.add_argument('--crop_end_time', type=float, default=None, help="Optional end time (in seconds) to crop the data.") + args = parser.parse_args() + + main(dataset_num=args.dataset_num, robot_name=args.robot_name, crop_end_time=args.crop_end_time) \ No newline at end of file diff --git a/examples/Hercules/publish/publish_data_VINS-Mono.py b/examples/Hercules/publish/publish_data_VINS-Mono.py new file mode 100644 index 0000000..30d7bf1 --- /dev/null +++ b/examples/Hercules/publish/publish_data_VINS-Mono.py @@ -0,0 +1,58 @@ +import argparse +from decimal import Decimal +import getpass +from pathlib import Path +from robotdataprocess import ImuData, OdometryData, CoordinateFrame +from robotdataprocess.data_types.Data import ROSMsgLibType +from robotdataprocess.data_types.ImageData.ImageDataOnDisk import ImageDataOnDisk +from robotdataprocess.ros.RosPublisher import publish_data_ROS_multiprocess +from typing import Union + +def publish_data(input_dir: str, robot_name: str, crop_data: bool, end_time: Union[Decimal, None]): + # Check parameters + if crop_data and end_time is None: + raise ValueError("end_time required if crop_data is True!") + + # Extract RGB and IMU from Hercules + input_path = Path(input_dir).absolute() + imu_data = ImuData.from_txt_file(input_path / robot_name / 'synthetic_imu.txt', '' + robot_name + '/base_link', CoordinateFrame.NED) + #pose_data = OdometryData.from_txt_file(input_path / robot_name / 'pose_world_frame.txt', 'world', 'body', CoordinateFrame.NED) + image_data = ImageDataOnDisk.from_image_files(input_path / robot_name / 'rgb', '' + robot_name + '/front_center_Scene') + + # Convert data from NED frame to ROS frame (and make sure it is at the identity) + # pose_data.to_FLU_frame() + # pose_data.shift_to_start_at_identity() + + # Crop the data + if crop_data: + imu_data.crop_data(Decimal('0.0'), end_time) + #pose_data.crop_data(Decimal('0.0'), end_time) + image_data.crop_data(Decimal('0.0'), end_time) + + # Publish the data via ROS2 topics + publish_data_ROS_multiprocess([imu_data, image_data], ['/imu0', '/cam0/image_raw'], ROSMsgLibType.RCLPY) + +def main(dataset_num: str, robot_name: str, crop_end_time: Union[float, None]): + + # Publish data for each robot + if crop_end_time == None: + crop_data = False + crop_end_time = None + else: + crop_data = True + crop_end_time = Decimal(crop_end_time) + + user = getpass.getuser() + publish_data(input_dir='/home/' + user + '/data/Hercules_datasets/' + dataset_num + '/data', + robot_name=robot_name, + crop_data=crop_data, + end_time=crop_end_time) + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Publish Hercules data via ROS2 topics for VINS-Mono.") + parser.add_argument('--dataset_num', type=str, default=None, help="Dataset version number (e.g., V1.6).") + parser.add_argument('--robot_name', type=str, default=None, help="Name of the robot (e.g., Husky1).") + parser.add_argument('--crop_end_time', type=float, default=None, help="Optional end time (in seconds) to crop the data.") + args = parser.parse_args() + + main(dataset_num=args.dataset_num, robot_name=args.robot_name, crop_end_time=args.crop_end_time) \ No newline at end of file diff --git a/examples/Hercules/vins_mono_reformatting.py b/examples/Hercules/reformat/reformat_VINS_Mono.py similarity index 69% rename from examples/Hercules/vins_mono_reformatting.py rename to examples/Hercules/reformat/reformat_VINS_Mono.py index 7b657fa..4c53517 100644 --- a/examples/Hercules/vins_mono_reformatting.py +++ b/examples/Hercules/reformat/reformat_VINS_Mono.py @@ -1,13 +1,14 @@ from decimal import Decimal +import getpass import numpy as np import os -from robotdataprocess import ImageData, ImuData, OdometryData, CoordinateFrame -from robotdataprocess.rosbag.Ros2BagWrapper import Ros2BagWrapper +from robotdataprocess import ImageDataInMemory, ImuData, OdometryData, CoordinateFrame +from robotdataprocess.ros.Ros2BagWrapper import Ros2BagWrapper from scipy.spatial.transform import Rotation as R def main(): robot_names = ["Husky1", "Husky2", "Drone1", "Drone2"] - dataset_names = ["V1.6"] + dataset_names = ["V2.1.2"] file_names = ['vins_result_loop.csv', 'vins_result_no_loop.csv'] # Do it for all files, robots, and datasets @@ -16,7 +17,8 @@ def main(): for file_name in file_names: # Load the odometry data - odom_data = OdometryData.from_csv('/home/dbutterfield3/Research/ros_workspaces/vins_mono_ws/src/VINS-MONO-ROS2/output/hercules_'+dataset_name+'_LC/'+robot_name+'/'+file_name, "odom", 'base_link', CoordinateFrame.FLU, False, None) + user = getpass.getuser() + odom_data = OdometryData.from_csv('/media/' + user + '/T73/Hercules_datasets/'+dataset_name+'/results/vins_mono/'+robot_name+'/'+file_name, "odom", 'base_link', CoordinateFrame.FLU, False, None) # Since positions are in FLU but orientations are in NED rotated to FLU, lets fix that R_NED = np.array([[1, 0, 0], @@ -31,11 +33,13 @@ def main(): odom_data.timestamps = odom_data.timestamps / Decimal('1e9') # Save the csv in a ROMAN friendly format - output_path = '/media/dbutterfield3/T731/Hercules_datasets/'+dataset_name+'/extract/files_for_roman_baseline/' \ + output_path = '/media/' + user + '/T73/Hercules_datasets/'+dataset_name+'/extract/files_for_roman_baseline/' \ + robot_name + '/' + file_name.replace('.csv', '_reformatted.csv') if os.path.exists(output_path): print("Deleting CSV file at this location previously...") os.remove(output_path) + else: + os.makedirs(os.path.dirname(output_path), exist_ok=True) odom_data.to_csv(output_path) if __name__ == "__main__": diff --git a/examples/Hercules/results/results_LIO-SAM.py b/examples/Hercules/results/results_LIO-SAM.py new file mode 100644 index 0000000..6b3ede7 --- /dev/null +++ b/examples/Hercules/results/results_LIO-SAM.py @@ -0,0 +1,25 @@ +import getpass +from robotdataprocess.data_types.OdometryData import OdometryData, CoordinateFrame + +def main(): + # Load the GT and estimated path data (for GRaCo dataset) + robot_name = "ground-01" + file_name = "odometry.csv" + print("\n=== Processing results for robot:", robot_name) + user = getpass.getuser() + est_data = OdometryData.from_csv('/media/' + user + '/T73/GRaCo_dataset/results/LIO-SAM/ground/' + + robot_name + '/' + file_name, "world", "robot", CoordinateFrame.FLU, True, None) + gt_data = OdometryData.from_ros2_bag('/media/' + user + '/T73/GRaCo_dataset/data/ground/' + + robot_name + '/', '/gnss/ground_truth', CoordinateFrame.FLU) + + # Calculate RMS ATE, among other metrics + metrics_dictionary: dict = OdometryData.calculate_trajectory_errors(gt_data, est_data, max_diff=0.1, visualize=True, axes_length=3) + print("\nMetrics for file: ", file_name) + print("Robot: ", robot_name, "RMS ATE: ", metrics_dictionary['APE']['translation_part']['rmse']) + print("Robot: ", robot_name, "RMS RTE: ", metrics_dictionary['RPE']['translation_part']['rmse']) + + print("Robot: ", robot_name, "RMS APE Rotation Angle (Deg): ", metrics_dictionary['APE']['rotation_angle_deg']['rmse']) + print("Robot: ", robot_name, "RMS RPE Rotation Angle (Deg): ", metrics_dictionary['RPE']['rotation_angle_deg']['rmse']) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/Hercules/results/results_OpenVINS.py b/examples/Hercules/results/results_OpenVINS.py new file mode 100644 index 0000000..6587631 --- /dev/null +++ b/examples/Hercules/results/results_OpenVINS.py @@ -0,0 +1,42 @@ +import getpass +import numpy as np +from robotdataprocess.data_types.OdometryData import OdometryData, CoordinateFrame +from scipy.spatial.transform import Rotation as R + +def main(): + # Load the GT and estimated path data + robot_names = ["Husky1", "Husky2", "Drone1", "Drone2"] + dataset_version = "V2.1.0" + file_name = 'ov_estimate.txt' + + for robot_name in robot_names: + # Load Estimated data from OpenVINS + user = getpass.getuser() + est_data = OdometryData.from_txt_file('/media/' + user + '/T73/Hercules_datasets/' + dataset_version + + "/results/openvins/" + robot_name+ '/' + file_name, "world", "robot", + CoordinateFrame.FLU, True, [0, 5, 6, 7, 4, 1, 2, 3]) + + # Orientations are in NED rotated to FLU, lets fix that, but should be change of basis instead + R_NED = np.array([[1, 0, 0], + [0, -1, 0], + [0, 0, -1]]) + R_NED_Q = R.from_matrix(R_NED) + est_data._ori_apply_rotation(R_NED_Q.inv()) + est_data._ori_change_of_basis(R_NED_Q) + + # Load the GT Data + gt_data = OdometryData.from_csv('/media/' + user + '/T73/Hercules_datasets/' + dataset_version + \ + "/extract/files_for_roman_baseline/" + robot_name +'/poseGT.csv', + "world", "robot", CoordinateFrame.FLU, True, None) + + # Calculate RMS ATE, among other metrics + metrics_dictionary: dict = OdometryData.calculate_trajectory_errors(gt_data, est_data, max_diff=0.1, visualize=True) + print("\nMetrics for file: ", file_name) + print("Robot: ", robot_name, "RMS ATE: ", metrics_dictionary['APE']['translation_part']['rmse']) + print("Robot: ", robot_name, "RMS RTE: ", metrics_dictionary['RPE']['translation_part']['rmse']) + + print("Robot: ", robot_name, "RMS APE Rotation Angle (Deg): ", metrics_dictionary['APE']['rotation_angle_deg']['rmse']) + print("Robot: ", robot_name, "RMS RTE Rotation Angle (Deg): ", metrics_dictionary['RPE']['rotation_angle_deg']['rmse']) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/Hercules/results/results_ROVIOLI.py b/examples/Hercules/results/results_ROVIOLI.py new file mode 100644 index 0000000..c54b2a3 --- /dev/null +++ b/examples/Hercules/results/results_ROVIOLI.py @@ -0,0 +1,40 @@ +import getpass +import numpy as np +from robotdataprocess.data_types.OdometryData import OdometryData, CoordinateFrame +from scipy.spatial.transform import Rotation as R + +def main(): + # Load the GT and estimated path data + robot_names = ["Husky1"] + + for robot_name in robot_names: + dataset_version = "V2.0.1" + + user = getpass.getuser() + gt_csv = '/media/' + user + '/T73/Hercules_datasets/' + dataset_version + \ + "/extract/files_for_roman_baseline/" + robot_name + '/poseGT.csv' + rovioli_csv = '/media/' + user + '/T73/Hercules_datasets/' + dataset_version + \ + "/results/maplab_results/rovioli/" + robot_name + '/estimated_poses.csv' + + est_data = OdometryData.from_csv(rovioli_csv, "world", "robot", CoordinateFrame.FLU, True, [0,1,2,3,7,4,5,6], separator=r'\s') + gt_data = OdometryData.from_csv(gt_csv, "world", "robot", CoordinateFrame.FLU, True, None) + + # Update frame of ROVIOLI output from NED to FLU + R_NED = np.array([[1, 0, 0], + [0, -1, 0], + [0, 0, -1]]) + R_NED_Q = R.from_matrix(R_NED) + est_data._ori_apply_rotation(R_NED_Q.inv()) + est_data._ori_change_of_basis(R_NED_Q) + + # Calculate RMS ATE, among other metrics + metrics_dictionary: dict = OdometryData.calculate_trajectory_errors(gt_data, est_data, max_diff=0.1, visualize=True, axes_length=1, axes_interval=100) + print("Robot: ", robot_name, "RMS ATE: ", metrics_dictionary['APE']['translation_part']['rmse']) + print("Robot: ", robot_name, "RMS RTE: ", metrics_dictionary['RPE']['translation_part']['rmse']) + + print("Robot: ", robot_name, "RMS APE Rotation Angle (Deg): ", metrics_dictionary['APE']['rotation_angle_deg']['rmse']) + print("Robot: ", robot_name, "RMS RTE Rotation Angle (Deg): ", metrics_dictionary['RPE']['rotation_angle_deg']['rmse']) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/Hercules/results/results_SlideSLAM.py b/examples/Hercules/results/results_SlideSLAM.py new file mode 100644 index 0000000..a9221d0 --- /dev/null +++ b/examples/Hercules/results/results_SlideSLAM.py @@ -0,0 +1,46 @@ +import getpass +from robotdataprocess import OdometryData, CoordinateFrame, PathData +from scipy.spatial.transform import Rotation as R + +def main(): + dataset_names = ["V2.1.0"] + robot_names = ["Husky1", "Husky2"] # NOTE: [0] must be hostRobot_ID==0, and [1] must be hostRobot_ID==1 (from SlideSLAM) + robot_names_merged = robot_names[0] + "_" + robot_names[1] + + # Do it for all datasets and robots + for dataset_name in dataset_names: + # Since SlideSLAM is Decentralized, each robot has an estimated trajectory for all robots + for robot_name in robot_names: + print("=== Processing results as estimated by robot", robot_name, "===") + + # Get robot0 name and robot1 name + robot0_name = robot_names[0] + robot1_name = robot_names[1] + + # Load the odometry data + user = getpass.getuser() + est_data_robot0 = OdometryData.from_csv('/media/' + user + '/T73/Hercules_datasets/' + dataset_name + '/results/slideslam/' + robot_names_merged + '/' + robot_name + '/trajectory_0.csv', "odom", 'base_link', CoordinateFrame.NED, True, [7, 0, 1, 2, 6, 3, 4, 5], reorder_data=True) + est_data_robot1 = OdometryData.from_csv('/media/' + user + '/T73/Hercules_datasets/' + dataset_name + '/results/slideslam/' + robot_names_merged + '/' + robot_name + '/trajectory_1.csv', "odom", 'base_link', CoordinateFrame.NED, True, [7, 0, 1, 2, 6, 3, 4, 5], reorder_data=True) + est_data_lst: list[OdometryData] = [est_data_robot0, est_data_robot1] + est_data_robot0.visualize([est_data_robot1], [robot0_name + " Results", robot1_name + " Results"], 10, 40) + + # Load the ground truth data + gt_data_robot0 = OdometryData.from_csv('/media/' + user + '/T73/Hercules_datasets/' + dataset_name + '/extract/files_for_roman_baseline/' + robot0_name + '/poseGT.csv', "world", "robot", CoordinateFrame.FLU, True, None) + gt_data_robot1 = OdometryData.from_csv('/media/' + user + '/T73/Hercules_datasets/' + dataset_name + '/extract/files_for_roman_baseline/' + robot1_name + '/poseGT.csv', "world", "robot", CoordinateFrame.FLU, True, None) + gt_data_lst: list[OdometryData] = [gt_data_robot0, gt_data_robot1] + + # # Make the timestamps match and then concatenate + est_data_lst, gt_data_lst = PathData.make_start_and_end_times_match(est_data_lst, gt_data_lst) + est_data: OdometryData = PathData.concatenate_PathData(est_data_lst).to_OdometryData('odom', 'base_link') + gt_data: PathData = PathData.concatenate_PathData(gt_data_lst) + + # # Calculate RMS ATE, among other metrics + metrics_dictionary: dict = OdometryData.calculate_trajectory_errors(gt_data, est_data, max_diff=0.1, visualize=True) + print("RMS ATE: ", metrics_dictionary['APE']['translation_part']['rmse']) + print("RMS RTE: ", metrics_dictionary['RPE']['translation_part']['rmse']) + + print("RMS APE Rotation Angle (Deg): ", metrics_dictionary['APE']['rotation_angle_deg']['rmse']) + print("RMS RTE Rotation Angle (Deg): ", metrics_dictionary['RPE']['rotation_angle_deg']['rmse']) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/Hercules/results/results_VINS-Mono.py b/examples/Hercules/results/results_VINS-Mono.py new file mode 100644 index 0000000..7c04697 --- /dev/null +++ b/examples/Hercules/results/results_VINS-Mono.py @@ -0,0 +1,31 @@ +import getpass +from robotdataprocess.data_types.OdometryData import OdometryData, CoordinateFrame + +def main(): + # Load the GT and estimated path data + robot_names = ["Husky1", "Husky2", "Drone1", "Drone2"] + dataset_version = "V2.1.2" + file_names = ['vins_result_no_loop_reformatted.csv', 'vins_result_loop_reformatted.csv'] + + for robot_name in robot_names: + for file_name in file_names: + print("\n=== Processing results for robot:", robot_name) + user = getpass.getuser() + robot_folder = '/media/' + user + '/T73/Hercules_datasets/' + dataset_version + \ + "/extract/files_for_roman_baseline/" + robot_name + est_data = OdometryData.from_csv(robot_folder + '/' + file_name, + "world", "robot", CoordinateFrame.FLU, True, None) + gt_data = OdometryData.from_csv(robot_folder +'/poseGT.csv', + "world", "robot", CoordinateFrame.FLU, True, None) + + # Calculate RMS ATE, among other metrics + metrics_dictionary: dict = OdometryData.calculate_trajectory_errors(gt_data, est_data, max_diff=0.1, visualize=True) + print("\nMetrics for file: ", file_name) + print("Robot: ", robot_name, "RMS ATE: ", metrics_dictionary['APE']['translation_part']['rmse']) + print("Robot: ", robot_name, "RMS RTE: ", metrics_dictionary['RPE']['translation_part']['rmse']) + + print("Robot: ", robot_name, "RMS APE Rotation Angle (Deg): ", metrics_dictionary['APE']['rotation_angle_deg']['rmse']) + print("Robot: ", robot_name, "RMS RTE Rotation Angle (Deg): ", metrics_dictionary['RPE']['rotation_angle_deg']['rmse']) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/Hercules/results/results_maplab.py b/examples/Hercules/results/results_maplab.py new file mode 100644 index 0000000..8e688bc --- /dev/null +++ b/examples/Hercules/results/results_maplab.py @@ -0,0 +1,80 @@ +from decimal import Decimal +import getpass +import numpy as np +import os +from robotdataprocess import ImageDataInMemory, ImuData, OdometryData, CoordinateFrame, PathData +from robotdataprocess.ros.Ros2BagWrapper import Ros2BagWrapper +from scipy.spatial.transform import Rotation as R + +def main(): + dataset_names = ["V2.1.0"] + + # Do it for all files, robots, and datasets + for dataset_name in dataset_names: + + # Load the odometry data + user = getpass.getuser() + est_data_husky1 = OdometryData.from_csv('/media/' + user + '/T731/Hercules_datasets/' + dataset_name + '/results/maplab_results/maplab/merged_map/vertex_poses_velocities_biases.csv', "odom", 'base_link', CoordinateFrame.NED, True, [0,3,4,5,6,7,8,9], filter=(' mission-id', ' 9d9572d8194a7f180900000000000000')) + est_data_husky2 = OdometryData.from_csv('/media/' + user + '/T731/Hercules_datasets/' + dataset_name + '/results/maplab_results/maplab/merged_map/vertex_poses_velocities_biases.csv', "odom", 'base_link', CoordinateFrame.NED, True, [0,3,4,5,6,7,8,9], filter=(' mission-id', ' 38a88adc194a7f180900000000000000')) + est_data_lst: list[OdometryData] = [est_data_husky1, est_data_husky2] + for est_data in est_data_lst: + est_data.timestamps = est_data.timestamps / Decimal('1e9') # Convert from ns to s + est_data_husky2.visualize([est_data_husky1], ["Drone1 Maplab Results","Husky1 Maplab Results"], 10, 40) + + # Load the ground truth data + gt_data_husky1 = OdometryData.from_csv('/media/' + user + '/T731/Hercules_datasets/' + dataset_name + '/extract/files_for_roman_baseline/Husky1/poseGT.csv', "world", "robot", CoordinateFrame.FLU, True, None) + gt_data_husky2 = OdometryData.from_csv('/media/' + user + '/T731/Hercules_datasets/' + dataset_name + '/extract/files_for_roman_baseline/Husky2/poseGT.csv', "world", "robot", CoordinateFrame.FLU, True, None) + gt_data_lst: list[OdometryData] = [gt_data_husky1, gt_data_husky2] + + # Make the timestamps match and then concatenate + est_data_lst, gt_data_lst = PathData.make_start_and_end_times_match(est_data_lst, gt_data_lst) + est_data: OdometryData = PathData.concatenate_PathData(est_data_lst).to_OdometryData('odom', 'base_link') + gt_data: PathData = PathData.concatenate_PathData(gt_data_lst) + + # Since positions are in FLU but orientations are in NED rotated to FLU, lets fix that + R_NED = np.array([[1, 0, 0], + [0, -1, 0], + [0, 0, -1]]) + R_NED_Q = R.from_matrix(R_NED) + + # Apply coordinate transformations to individual estimated trajectories + for est_data in est_data_lst: + est_data._ori_apply_rotation(R_NED_Q.inv()) + est_data._ori_change_of_basis(R_NED_Q) + est_data.frame = CoordinateFrame.FLU + + # 1. Compare individual robots with their ground truths + robot_names = ["Drone1", "Husky1"] + for i, (est_data, gt_data, name) in enumerate(zip(est_data_lst, gt_data_lst, robot_names)): + print(f"\n{'='*50}") + print(f"Individual Results for {name}") + print(f"{'='*50}") + + est_data.visualize([gt_data], [f"{name} Maplab Results", f"{name} Ground Truth"], [10, 10], [40, 1000]) + + metrics_dictionary: dict = OdometryData.calculate_trajectory_errors(gt_data, est_data, max_diff=0.1, visualize=True) + print(f"{name} RMS ATE: ", metrics_dictionary['APE']['translation_part']['rmse']) + print(f"{name} RMS RTE: ", metrics_dictionary['RPE']['translation_part']['rmse']) + print(f"{name} RMS APE Rotation Angle (Deg): ", metrics_dictionary['APE']['rotation_angle_deg']['rmse']) + print(f"{name} RMS RTE Rotation Angle (Deg): ", metrics_dictionary['RPE']['rotation_angle_deg']['rmse']) + + # 2. Compare combined results + print(f"\n{'='*50}") + print("Combined Results (Drone1 + Husky1)") + print(f"{'='*50}") + + # Make the timestamps match and then concatenate + est_data_lst, gt_data_lst = PathData.make_start_and_end_times_match(est_data_lst, gt_data_lst) + est_data_combined: OdometryData = PathData.concatenate_PathData(est_data_lst).to_OdometryData('odom', 'base_link') + gt_data_combined: PathData = PathData.concatenate_PathData(gt_data_lst) + + est_data_combined.visualize([gt_data_combined], ["Drone1+Husky1 Maplab Results", "Ground Truth"], [10, 10], [40, 1000]) + + metrics_dictionary: dict = OdometryData.calculate_trajectory_errors(gt_data_combined, est_data_combined, max_diff=0.1, visualize=True) + print("Combined RMS ATE: ", metrics_dictionary['APE']['translation_part']['rmse']) + print("Combined RMS RTE: ", metrics_dictionary['RPE']['translation_part']['rmse']) + print("Combined RMS APE Rotation Angle (Deg): ", metrics_dictionary['APE']['rotation_angle_deg']['rmse']) + print("Combined RMS RTE Rotation Angle (Deg): ", metrics_dictionary['RPE']['rotation_angle_deg']['rmse']) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/Hercules/rovioli_results.py b/examples/Hercules/rovioli_results.py deleted file mode 100644 index 03ed7e3..0000000 --- a/examples/Hercules/rovioli_results.py +++ /dev/null @@ -1,28 +0,0 @@ -from decimal import Decimal -from robotdataprocess.data_types.OdometryData import OdometryData, CoordinateFrame -from pprint import pprint - -def main(): - # Load the GT and estimated path data - robot_names = ["Husky1", "Husky2"] - - for robot_name in robot_names: - dataset_version = "V1.5" - - gt_csv = "/media/dbutterfield3/T731/Hercules_datasets/" + dataset_version + \ - "/extract/files_for_roman_baseline/" + robot_name + '/poseGT.csv' - rovioli_csv = "/media/dbutterfield3/T731/Hercules_datasets/" + dataset_version + \ - "/results/maplab_results/rovioli/" + robot_name + '/estimated_poses.csv' - - est_data = OdometryData.from_csv(rovioli_csv, "world", "robot", CoordinateFrame.FLU, True, [0,1,2,3,7,4,5,6], separator=r'\s') - gt_data = OdometryData.from_csv(gt_csv, "world", "robot", CoordinateFrame.FLU, True, None) - - # TODO: Update frame of ROVIOLI output from NED to FLU - - # Calculate RMS ATE, among other metrics - metrics_dictionary: dict = OdometryData.calculate_trajectory_errors(gt_data, est_data, max_diff=0.1, visualize=True, axes_length=1, axes_interval=100) - print("Robot: ", robot_name, "RMS ATE: ", metrics_dictionary['APE']['translation_part']['rmse']) - - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/examples/Hercules/vins_mono_results.py b/examples/Hercules/vins_mono_results.py deleted file mode 100644 index 101ac29..0000000 --- a/examples/Hercules/vins_mono_results.py +++ /dev/null @@ -1,27 +0,0 @@ -from decimal import Decimal -from robotdataprocess.data_types.OdometryData import OdometryData, CoordinateFrame -from pprint import pprint - -def main(): - # Load the GT and estimated path data - robot_names = ["Husky1", "Husky2", "Drone1", "Drone2"] - - for robot_name in robot_names: - dataset_version = "V1.4.1" - robot_folder = "/media/dbutterfield3/T731/Hercules_datasets/" + dataset_version + \ - "/extract/files_for_roman_baseline/" + robot_name - est_data = OdometryData.from_csv(robot_folder + '/vins_result_loop_reformatted.csv', - "world", "robot", CoordinateFrame.FLU, True, None) - gt_data = OdometryData.from_csv(robot_folder +'/poseGT.csv', - "world", "robot", CoordinateFrame.FLU, True, None) - - # Calculate RMS ATE, among other metrics - metrics_dictionary: dict = OdometryData.calculate_trajectory_errors(gt_data, est_data, max_diff=0.1, visualize=True) - print("Robot: ", robot_name, "RMS ATE: ", metrics_dictionary['APE']['translation_part']['rmse']) - print("Robot: ", robot_name, "RMS RTE: ", metrics_dictionary['RPE']['translation_part']['rmse']) - - print("Robot: ", robot_name, "RMS APE Rotation Angle (Deg): ", metrics_dictionary['APE']['rotation_angle_deg']['rmse']) - print("Robot: ", robot_name, "RMS RTE Rotation Angle (Deg): ", metrics_dictionary['RPE']['rotation_angle_deg']['rmse']) - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/examples/LiDAR_debug.py b/examples/LiDAR_debug.py new file mode 100644 index 0000000..189e628 --- /dev/null +++ b/examples/LiDAR_debug.py @@ -0,0 +1,18 @@ +import getpass +from robotdataprocess.data_types.LiDARData import LiDARData + +# Example usage of LiDARData class +if __name__ == "__main__": + + # Path to the folder containing .npy files + user = getpass.getuser() + npy_folder_path = '/media/' + user + '/hercules-collect/raw_data_hercules/ausenv_stereo3center_2ugvuav_calib_752x480/Drone1/lidar' + frame_id = "lidar_frame" + + # Load LiDAR data from .npy files + lidar_data = LiDARData.from_npy_files(npy_folder_path, frame_id) + + # Print some information about the loaded data + print(f"Loaded LiDAR data with frame ID: {lidar_data.frame_id}") + print(f"Number of LiDAR scans: {lidar_data.len()}") + print(f"Timestamps: {lidar_data.timestamps}") \ No newline at end of file diff --git a/examples/WIP/FoundStereo_to_npy.py b/examples/WIP/FoundStereo_to_npy.py index f6fe22a..5163e66 100644 --- a/examples/WIP/FoundStereo_to_npy.py +++ b/examples/WIP/FoundStereo_to_npy.py @@ -1,8 +1,8 @@ -from robotdataprocess import ImageData +from robotdataprocess import ImageDataInMemory def main(): # Load Estimated Depth from FoundationStereo (of GRaCo Dataset) - depth_data = ImageData.from_npy_files("/home/dbutterfield3/Research/FoundationStereo/outputs/GRaCo/ground_05/depth", "camera_21239776") + depth_data = ImageDataInMemory.from_npy_files("/home/dbutterfield3/Research/FoundationStereo/outputs/GRaCo/ground_05/depth", "camera_21239776") # Save it as a single .npy file for use with ROMAN depth_data.to_npy("/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-05-extract/npy/depth") diff --git a/examples/WIP/LeftRectified_to_npy.py b/examples/WIP/LeftRectified_to_npy.py index f8426cb..773ccfb 100644 --- a/examples/WIP/LeftRectified_to_npy.py +++ b/examples/WIP/LeftRectified_to_npy.py @@ -1,8 +1,8 @@ -from robotdataprocess import ImageData +from robotdataprocess import ImageDataInMemory def main(): # Load Left rectified from GRaCo Dataset - left_img_rectified = ImageData.from_image_files("/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-05-extract/left_rectified", "camera_21239776") + left_img_rectified = ImageDataInMemory.from_image_files("/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-05-extract/left_rectified", "camera_21239776") # Save it as a single .npy file for use with ROMAN left_img_rectified.to_npy("/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-05-extract/npy/left_rectified") diff --git a/examples/WIP/rescale_images.py b/examples/WIP/rescale_images.py index a68aeef..61ca107 100644 --- a/examples/WIP/rescale_images.py +++ b/examples/WIP/rescale_images.py @@ -1,7 +1,7 @@ -from robotdataprocess.data_types.ImageData import ImageData +from robotdataprocess.data_types.ImageData.ImageDataInMemory import ImageDataInMemory def main(): - images = ImageData.from_npy('/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-05-extract/npy/left_rectified') + images = ImageDataInMemory.from_npy('/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-05-extract/npy/left_rectified') images.downscale_by_factor(4) images.to_npy('/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-05-extract/npy/left_rectified_small') diff --git a/examples/WIP/stereo_rectify_undistort.py b/examples/WIP/stereo_rectify_undistort.py index 2dd6213..1e437be 100644 --- a/examples/WIP/stereo_rectify_undistort.py +++ b/examples/WIP/stereo_rectify_undistort.py @@ -1,11 +1,11 @@ import numpy as np -from robotdataprocess import ImageData, ImuData, OdometryData, CoordinateFrame -from robotdataprocess.rosbag.Ros2BagWrapper import Ros2BagWrapper +from robotdataprocess import ImageDataInMemory, ImuData, OdometryData, CoordinateFrame +from robotdataprocess.ros.Ros2BagWrapper import Ros2BagWrapper def main(): # Extract stereo imagery from GRaCo dataset - left_data = ImageData.from_ros2_bag('/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-05', '/camera_left/image_raw', '/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-05-extract/npy/left') - right_data = ImageData.from_ros2_bag('/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-05', '/camera_right/image_raw', '/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-05-extract/npy/right') + left_data = ImageDataInMemory.from_ros2_bag('/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-05', '/camera_left/image_raw', '/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-05-extract/npy/left') + right_data = ImageDataInMemory.from_ros2_bag('/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-05', '/camera_right/image_raw', '/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-05-extract/npy/right') # left_data = ImageData.from_npy('/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-04-extract/npy/left') # right_data = ImageData.from_npy('/media/dbutterfield3/T75/Graco_Datasets/ground/ros2/ground-04-extract/npy/right') diff --git a/examples/WIP/tartanair_to_bag.py b/examples/WIP/tartanair_to_bag.py index ff8444a..fdd6283 100644 --- a/examples/WIP/tartanair_to_bag.py +++ b/examples/WIP/tartanair_to_bag.py @@ -1,11 +1,11 @@ -from robotdataprocess.data_types.ImageData import ImageData +from robotdataprocess.data_types.ImageData.ImageDataInMemory import ImageDataInMemory from robotdataprocess.data_types.ImuData import ImuData -from robotdataprocess.rosbag.Ros2BagWrapper import Ros2BagWrapper +from robotdataprocess.ros.Ros2BagWrapper import Ros2BagWrapper def main(): # Extract data from TartanGround Dataset imu_data = ImuData.from_TartanAir('/home/dbutterfield3/Research/TartanAir/dataset/TartanGround/OldTownSummer/Data_ground/Pose2000/imu', 'imu') - image_data = ImageData.from_TartanAir('/home/dbutterfield3/Research/TartanAir/dataset/TartanGround/OldTownSummer/Data_ground/Pose2000', 'image_lcam_front', 'imu', 'image_lcam_front') + image_data = ImageDataInMemory.from_TartanAir('/home/dbutterfield3/Research/TartanAir/dataset/TartanGround/OldTownSummer/Data_ground/Pose2000', 'image_lcam_front', 'imu', 'image_lcam_front') # Save it into a ROS2 Humble bag Ros2BagWrapper.write_data_to_rosbag('/home/dbutterfield3/Research/rosbag_manip/outputs/tartanGround', [imu_data, image_data], ['/imu', '/image_lcam_front'], None) diff --git a/examples/imu_integration.py b/examples/imu_integration.py index 42cc2b4..3ebffa7 100644 --- a/examples/imu_integration.py +++ b/examples/imu_integration.py @@ -1,29 +1,31 @@ -from decimal import Decimal -import numpy as np +import getpass from pathlib import Path from robotdataprocess.data_types.ImuData import ImuData, CoordinateFrame from robotdataprocess.data_types.OdometryData import OdometryData def main(): # Enter desired configuration here - dataset_num = "V1.6" - input_dir = '/home/dbutterfield3/Desktop/data/Hercules_datasets/' + dataset_num + '/data' - robot_name = "Drone2" + dataset_num = "V2.3.C.Temp" + user = getpass.getuser() + input_dir = '/media/' + user + '/T73/Hercules_Datasets/' + dataset_num + '/data' + robot_name = "Husky1" # Make directory paths input_path = Path(input_dir).absolute() # Extract IMU data and GT Pose data - imu_data = ImuData.from_txt_file(input_path / robot_name / 'synthetic_imu.txt', robot_name + '/base_link', CoordinateFrame.NED) - gt_odom_data = OdometryData.from_txt_file(input_path / robot_name / 'pose_world_frame.txt', 'world', robot_name + '/base_link', CoordinateFrame.NED) - + imu_data = ImuData.from_txt_file(input_path / robot_name / 'synthetic_imu_9axis_500Hz.txt', robot_name + '/base_link', CoordinateFrame.NED, nine_axis=True) + gt_odom_data = OdometryData.from_txt_file(input_path / robot_name / 'pose_world_frame.txt', 'world', robot_name + '/base_link', CoordinateFrame.NED, False) # Convert imu data to odometry via integration and visualize compared to GT - initial_pos = np.array([5.0, 5.0, 0.912486], dtype=float) - initial_vel = np.array([0.0, 0.0, 0.0], dtype=float) - initial_ori = np.array([0.0, 0.0, 0.0, 1.0], dtype=float) + initial_pos = gt_odom_data.positions[0] + initial_vel = (gt_odom_data.positions[1] - gt_odom_data.positions[0]) / (gt_odom_data.timestamps[1] - gt_odom_data.timestamps[0]) + initial_ori = gt_odom_data.orientations[0] + print("Initial Position: ", initial_pos) + print("Initial Velocity: ", initial_vel) + print("Initial Orientation (quat xyzw): ", initial_ori) - odom_data: OdometryData = imu_data.to_PathData(initial_pos, initial_vel, initial_ori, use_ang_vel=True).to_OdometryData('world') + odom_data: OdometryData = imu_data.to_PathData(initial_pos, initial_vel, None, use_ang_vel=False).to_OdometryData('world', robot_name + '/base_link') odom_data.visualize([gt_odom_data], ["IMU Derived Odometry", "Ground Truth Odometry"], axes_interval=5000, axes_length=10.0) if __name__ == "__main__": diff --git a/examples/Maplab/visualize_pose.py b/examples/maplab_visualize_pose.py similarity index 87% rename from examples/Maplab/visualize_pose.py rename to examples/maplab_visualize_pose.py index 0b521df..52778a7 100644 --- a/examples/Maplab/visualize_pose.py +++ b/examples/maplab_visualize_pose.py @@ -1,4 +1,5 @@ from decimal import Decimal +import getpass import numpy as np from robotdataprocess.data_types.OdometryData import OdometryData from robotdataprocess import CoordinateFrame @@ -6,9 +7,10 @@ def main(): - maplab = OdometryData.from_csv('/home/dbutterfield3/Desktop/data/vertex_poses_velocities_biases.csv', + user = getpass.getuser() + maplab = OdometryData.from_csv('/home/' + user + '/Desktop/data/vertex_poses_velocities_biases.csv', "world", "robot", CoordinateFrame.ENU, True, [0, 3, 4, 5, 6, 7, 8, 9]) - vicon = OdometryData.from_csv('/home/dbutterfield3/Desktop/data/vicon_trajectory.csv', + vicon = OdometryData.from_csv('/home/' + user + '/Desktop/data/vicon_trajectory.csv', "odom", 'base_link', CoordinateFrame.FLU, True, [0, 1, 2, 3, 7, 4, 5, 6]) vicon.shift_to_start_at_identity() diff --git a/examples/sandilya_imu_integration.py b/examples/sandilya_imu_integration.py new file mode 100644 index 0000000..cc2c9e1 --- /dev/null +++ b/examples/sandilya_imu_integration.py @@ -0,0 +1,215 @@ +#!/usr/bin/env python3 +""" +recover_odom_from_imu.py + +Recover odometry (t, x y z qw qx qy qz) from IMU: + t, ax, ay, az, gx, gy, gz + +Critical: you MUST set the initial attitude (and realistically initial velocity). +For your synthetic pipeline, the best option is to initialize from the first +pose in the ground-truth odom file that you used to generate the synthetic IMU. + +This script assumes your IMU is: + - acc_body is specific force in body frame (a_world - g, rotated into body) + - omega_body is body angular velocity (rad/s) in body frame +and that you want a world frame consistent with your odom output. + +No fancy alignment is done beyond the provided initialization. +""" + +import numpy as np +from pathlib import Path +from scipy.spatial.transform import Rotation + +# ------------------------- +# USER SETTINGS (EDIT ME) +# ------------------------- + +IMU_PATH = "/media/dbutterfield3/T73/Hercules_datasets/V2.1.2/data/Drone1/synthetic_imu.txt" +OUT_ODOM_PATH = "/media/dbutterfield3/T73/Hercules_datasets/V2.1.2/data/Drone1/int_odom_DELETE_ME.txt" + +# If True, initialize from the first TWO poses in this odom file (for pos0, vel0, quat0) +INIT_FROM_GT_ODOM = True +GT_ODOM_INIT_PATH = "/media/dbutterfield3/T73/Hercules_datasets/V2.1.2/data/Drone1/pose_world_frame.txt" + +# If INIT_FROM_GT_ODOM is False, use these: +INIT_POS = np.array([0.0, 0.0, 0.0]) # x y z +INIT_VEL = np.array([0.0, 0.0, 0.0]) # vx vy vz +INIT_QUAT_WXYZ = np.array([1.0, 0.0, 0.0, 0.0]) # qw qx qy qz + +# Gravity in your chosen world frame. +# IMPORTANT: This must match the frame used in the IMU synthesis. +# If your world Z is down (NED-like): [0,0,+9.80665] +# If your world Z is up (ENU/UE): [0,0,-9.80665] +GRAVITY_WORLD = np.array([0.0, 0.0, 9.80665], dtype=float) + +# Safety: drop / clamp weird time steps +MAX_DT = 0.1 # seconds (for IMU 200 Hz, dt ~ 0.005) + +# ------------------------- +# IO +# ------------------------- + +def load_imu(path: str): + data = np.loadtxt(path) + t = data[:, 0] + acc_body = data[:, 1:4] + omega_body = data[:, 4:7] + return t, acc_body, omega_body + + +def _looks_like_int_only_line(line: str) -> bool: + s = line.strip() + if not s: + return False + if any(c.isspace() for c in s): + return False + if "." in s or "e" in s.lower(): + return False + try: + int(s) + return True + except ValueError: + return False + + +def _read_odom_lines(odom_path: str): + p = Path(odom_path) + lines = p.read_text(errors="ignore").splitlines() + lines = [ln.strip() for ln in lines if ln.strip()] + if not lines: + raise ValueError(f"Empty odom file: {odom_path}") + + if _looks_like_int_only_line(lines[0]): + lines = lines[1:] + + if len(lines) < 2: + raise ValueError(f"Need at least 2 pose lines in odom file: {odom_path}") + + return lines + + +def _parse_odom_pose(line: str): + # format: t x y z qw qx qy qz + parts = line.split() + if len(parts) < 8: + raise ValueError(f"Odom line has too few columns: {line}") + + t = float(parts[0]) + x, y, z = map(float, parts[1:4]) + qw, qx, qy, qz = map(float, parts[4:8]) + return t, np.array([x, y, z], dtype=float), np.array([qw, qx, qy, qz], dtype=float) + + +def load_first_pose_and_velocity_from_odom(odom_path: str): + """ + Odom format: + optional first line: integer count + then: t x y z qw qx qy qz + + Returns: + pos0 : (3,) + vel0 : (3,) estimated from first two poses + quat0: (4,) wxyz from first pose + """ + lines = _read_odom_lines(odom_path) + + t0, p0, q0 = _parse_odom_pose(lines[0]) + t1, p1, _ = _parse_odom_pose(lines[1]) + + dt = t1 - t0 + if not np.isfinite(dt) or dt <= 0.0: + raise ValueError(f"Bad initial dt from odom: t0={t0}, t1={t1}") + + v0 = (p1 - p0) / dt + return p0, v0, q0 + + +# ------------------------- +# INTEGRATION +# ------------------------- + +def integrate_trapezoid(t, acc_body, omega_body, pos0, vel0, quat_wxyz0): + n = len(t) + if n < 2: + raise ValueError("IMU file too short") + + pos = np.zeros((n, 3), dtype=float) + vel = np.zeros((n, 3), dtype=float) + quats = np.zeros((n, 4), dtype=float) + + pos[0] = pos0 + vel[0] = vel0 + + # Initialize orientation from given quaternion (wxyz) + qw, qx, qy, qz = quat_wxyz0 + rot = Rotation.from_quat([qx, qy, qz, qw]) # SciPy expects [x y z w] + q0_xyzw = rot.as_quat() + q0_xyzw /= np.linalg.norm(q0_xyzw) + quats[0] = [q0_xyzw[3], q0_xyzw[0], q0_xyzw[1], q0_xyzw[2]] + + dt = np.diff(t, prepend=t[0]) + + for i in range(1, n): + dti = float(dt[i]) + + # Handle bad timestamps + if not np.isfinite(dti) or dti <= 0.0: + pos[i] = pos[i - 1] + vel[i] = vel[i - 1] + quats[i] = quats[i - 1] + continue + + if dti > MAX_DT: + dti = MAX_DT + + # 1) orientation update: omega_body is in BODY frame + delta_q = Rotation.from_rotvec(omega_body[i - 1] * dti) + rot_new = rot * delta_q + + # 2) accel: body specific force -> world accel by rotating and adding gravity + a_prev = rot.apply(acc_body[i - 1]) + GRAVITY_WORLD + a_curr = rot_new.apply(acc_body[i]) + GRAVITY_WORLD + + # 3) trapezoidal integrate velocity + vel[i] = vel[i - 1] + 0.5 * (a_prev + a_curr) * dti + + # 4) trapezoidal integrate position + pos[i] = pos[i - 1] + 0.5 * (vel[i - 1] + vel[i]) * dti + + # 5) record quaternion [w x y z] + q_xyzw = rot_new.as_quat() + q_xyzw /= np.linalg.norm(q_xyzw) + quats[i] = [q_xyzw[3], q_xyzw[0], q_xyzw[1], q_xyzw[2]] + + rot = rot_new + + return pos, quats + + +def save_odom(t, pos, quats, path: str): + with open(path, "w") as f: + for ti, p, q in zip(t, pos, quats): + f.write( + f"{ti:.6f} {p[0]:.6f} {p[1]:.6f} {p[2]:.6f} " + f"{q[0]:.6f} {q[1]:.6f} {q[2]:.6f} {q[3]:.6f}\n" + ) + + +def main(): + t, acc_body, omega_body = load_imu(IMU_PATH) + + if INIT_FROM_GT_ODOM: + pos0, vel0, quat0 = load_first_pose_and_velocity_from_odom(GT_ODOM_INIT_PATH) + else: + pos0 = INIT_POS.astype(float) + vel0 = INIT_VEL.astype(float) + quat0 = INIT_QUAT_WXYZ.astype(float) + + pos, quats = integrate_trapezoid(t, acc_body, omega_body, pos0, vel0, quat0) + save_odom(t, pos, quats, OUT_ODOM_PATH) + print(f"Wrote recovered odom to: {OUT_ODOM_PATH}") + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/visualize_odom.py b/examples/visualize_odom.py index be42ac9..c1bd647 100644 --- a/examples/visualize_odom.py +++ b/examples/visualize_odom.py @@ -1,10 +1,12 @@ +import getpass from robotdataprocess.data_types.OdometryData import OdometryData from robotdataprocess import CoordinateFrame def main(): robot_names = ["Drone2"] dataset_name = "V1.4.1" - file_path = '/home/dbutterfield3/Desktop/data/Hercules_datasets/'+dataset_name+'/extract/files_for_roman_baseline/' + user = getpass.getuser() + file_path = '/home/' + user + '/Desktop/data/Hercules_datasets/'+dataset_name+'/extract/files_for_roman_baseline/' # Load csv files data: list[OdometryData] = [] diff --git a/pyproject.toml b/pyproject.toml index c2e793d..a858aac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,10 @@ [project] name = "robotdataprocess" -version = "0.1.2" +version = "0.2" authors = [ { name="Daniel Butterfield", email="danielcbutter@gmail.com" }, + { name="Hanzhi Li", email="danielcbutter@gmail.com" }, + { name="Sandilya Sai Garimella", email="sgarimella34@gatech.edu"} ] description = "A library for loading, saving, converting, and manipulating robotic datasets." readme = "README.md" @@ -14,13 +16,21 @@ classifiers = [ dependencies = ["coverage", "evo", "matplotlib", - "numpy", + + # NumPy pin for successful build with ROS2 + "numpy<1.25; python_version >= '3.10'", + "numpy; python_version <= '3.9'", + "opencv-python", "rosbags", "scipy", "snakeviz", "tqdm", - "typeguard", + + # Typeguard pin for successful build with ROS2 + "typeguard>=4.4.4; python_version >= '3.10'", + "typeguard; python_version <= '3.9'", + "typer", "pandas", "pillow", diff --git a/src/robotdataprocess/CmdLineInterface.py b/src/robotdataprocess/CmdLineInterface.py index 4d23880..f468d4d 100644 --- a/src/robotdataprocess/CmdLineInterface.py +++ b/src/robotdataprocess/CmdLineInterface.py @@ -1,12 +1,12 @@ from collections import defaultdict from .data_types.Data import CoordinateFrame -from .data_types.ImageData import ImageData +from .data_types.ImageData.ImageDataInMemory import ImageDataInMemory from .data_types.OdometryData import OdometryData from decimal import Decimal import matplotlib.pyplot as plt import numpy as np from pathlib import Path -from .rosbag.Ros2BagWrapper import Ros2BagWrapper +from .ros.Ros2BagWrapper import Ros2BagWrapper from rosbags.rosbag2 import Reader as Reader2 from rosbags.rosbag2 import Writer as Writer2 from scipy.spatial.transform import Rotation as R @@ -127,11 +127,11 @@ def extract_images_to_npy(self): topic: str = self.operation_params['extract_images_to_npy']['topic'] output_folder: str = self.operation_params['extract_images_to_npy']['output_folder'] - ImageData.from_ros2_bag(self.input_bag, topic, output_folder) + ImageDataInMemory.from_ros2_bag(self.input_bag, topic, output_folder) def compare_timestamps_two_image_data(self): """ Compare timestamps between two ImageData instances. """ - data0 = ImageData.from_npy(self.operation_params['compare_timestamps_two_image_data']['folder_0']) - data1 = ImageData.from_npy(self.operation_params['compare_timestamps_two_image_data']['folder_1']) + data0 = ImageDataInMemory.from_npy(self.operation_params['compare_timestamps_two_image_data']['folder_0']) + data1 = ImageDataInMemory.from_npy(self.operation_params['compare_timestamps_two_image_data']['folder_1']) data0.compare_timestamps(data1) diff --git a/src/robotdataprocess/ModuleImporter.py b/src/robotdataprocess/ModuleImporter.py new file mode 100644 index 0000000..8c5787c --- /dev/null +++ b/src/robotdataprocess/ModuleImporter.py @@ -0,0 +1,34 @@ +import importlib + +class ModuleImporter: + """ This class allows us to Lazily import ROS modules to make execution in non-ROS workspaces possible. + Additionally caches the imports to support real-time publishing when using RosPublisher.py """ + + _module_cache: dict = {} + _attr_cache: dict = {} + + @classmethod + def get_module_attribute(cls, module_path: str, attr_name: str): + """ Dynamically loads and caches a ROS attribute (like a Class). """ + + key = f"{module_path}.{attr_name}" + if key not in cls._attr_cache: + module = cls.get_module(module_path) + try: + cls._attr_cache[key] = getattr(module, attr_name) + except (ImportError, AttributeError) as e: + raise ImportError(f"Module '{module_path}' was found, but attribute '{attr_name}' does not exist. Ensure you have the correct ROS message packages installed.") from e + return cls._attr_cache[key] + + @classmethod + def get_module(cls, name: str): + """ Returns a cached reference to a top-level module or sub-module (e.g., 'rospy' or 'sensor_msgs.msg'). """ + + if name not in cls._module_cache: + try: + cls._module_cache[name] = importlib.import_module(name) + except ImportError as e: + raise ImportError(f"Module {name} is required but not installed.") from e + return cls._module_cache[name] + + \ No newline at end of file diff --git a/src/robotdataprocess/__init__.py b/src/robotdataprocess/__init__.py index 593aabc..d4a6f95 100644 --- a/src/robotdataprocess/__init__.py +++ b/src/robotdataprocess/__init__.py @@ -1,7 +1,17 @@ -from .CmdLineInterface import CmdLineInterface -from .data_types.ImageData import ImageData +# Data Enumerations +from .data_types.Data import CoordinateFrame, ROSMsgLibType + +# Data Types +from .data_types.ImageData.ImageDataInMemory import ImageDataInMemory +from .data_types.ImageData.ImageDataOnDisk import ImageDataOnDisk from .data_types.ImuData import ImuData +from .data_types.LiDARData import LiDARData from .data_types.OdometryData import OdometryData -from .data_types.Data import CoordinateFrame from .data_types.PathData import PathData -from .rosbag.Ros2BagWrapper import Ros2BagWrapper + +# ROS Classes & Functions +from .ros.Ros2BagWrapper import Ros2BagWrapper +from .ros.RosPublisher import publish_data_ROS_multiprocess + +# Other Classes +from .CmdLineInterface import CmdLineInterface \ No newline at end of file diff --git a/src/robotdataprocess/__pycache__/CmdLineInterface.cpython-310.pyc b/src/robotdataprocess/__pycache__/CmdLineInterface.cpython-310.pyc deleted file mode 100644 index 2a8e178..0000000 Binary files a/src/robotdataprocess/__pycache__/CmdLineInterface.cpython-310.pyc and /dev/null differ diff --git a/src/robotdataprocess/__pycache__/CmdLineInterface.cpython-38.pyc b/src/robotdataprocess/__pycache__/CmdLineInterface.cpython-38.pyc deleted file mode 100644 index 44b4ab8..0000000 Binary files a/src/robotdataprocess/__pycache__/CmdLineInterface.cpython-38.pyc and /dev/null differ diff --git a/src/robotdataprocess/__pycache__/__init__.cpython-310.pyc b/src/robotdataprocess/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 7906b5c..0000000 Binary files a/src/robotdataprocess/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/src/robotdataprocess/__pycache__/__init__.cpython-312.pyc b/src/robotdataprocess/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 7934c9f..0000000 Binary files a/src/robotdataprocess/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/src/robotdataprocess/__pycache__/__init__.cpython-38.pyc b/src/robotdataprocess/__pycache__/__init__.cpython-38.pyc deleted file mode 100644 index 12b66fd..0000000 Binary files a/src/robotdataprocess/__pycache__/__init__.cpython-38.pyc and /dev/null differ diff --git a/src/robotdataprocess/__pycache__/command_line.cpython-312.pyc b/src/robotdataprocess/__pycache__/command_line.cpython-312.pyc deleted file mode 100644 index a7406ff..0000000 Binary files a/src/robotdataprocess/__pycache__/command_line.cpython-312.pyc and /dev/null differ diff --git a/src/robotdataprocess/__pycache__/conversion_utils.cpython-310.pyc b/src/robotdataprocess/__pycache__/conversion_utils.cpython-310.pyc deleted file mode 100644 index 7b1535f..0000000 Binary files a/src/robotdataprocess/__pycache__/conversion_utils.cpython-310.pyc and /dev/null differ diff --git a/src/robotdataprocess/__pycache__/conversion_utils.cpython-312.pyc b/src/robotdataprocess/__pycache__/conversion_utils.cpython-312.pyc deleted file mode 100644 index cbff740..0000000 Binary files a/src/robotdataprocess/__pycache__/conversion_utils.cpython-312.pyc and /dev/null differ diff --git a/src/robotdataprocess/__pycache__/conversion_utils.cpython-38.pyc b/src/robotdataprocess/__pycache__/conversion_utils.cpython-38.pyc deleted file mode 100644 index 75e1b5a..0000000 Binary files a/src/robotdataprocess/__pycache__/conversion_utils.cpython-38.pyc and /dev/null differ diff --git a/src/robotdataprocess/data_types/Data.py b/src/robotdataprocess/data_types/Data.py index a722199..54ae078 100644 --- a/src/robotdataprocess/data_types/Data.py +++ b/src/robotdataprocess/data_types/Data.py @@ -5,6 +5,7 @@ from enum import Enum import numpy as np from typeguard import typechecked +from typing import Any class CoordinateFrame(Enum): """ @@ -22,6 +23,19 @@ class CoordinateFrame(Enum): ENU = 2 NONE = 3 +class ROSMsgLibType(Enum): + """ + Enum for different ROS message library types. + + Attributes: + ROSBAGS: Use ROS messages from the rosbags library (Pure Python library). + RCLPY: Use ROS messages from the rclpy library (ROS2 Python client library). + ROSPY: Use ROS messages from the rospy library (ROS1 Python client library). + """ + + ROSBAGS = 0 + RCLPY = 1 + ROSPY = 2 class Data: """ @@ -34,7 +48,7 @@ class Data: timestamps: np.ndarray[Decimal] @typechecked - def __init__(self, frame_id: str, timestamps: np.ndarray | list, ): + def __init__(self, frame_id: str, timestamps: np.ndarray | list): # Copy initial values into attributes self.frame_id = frame_id @@ -50,12 +64,12 @@ def len(self): return len(self.timestamps) @staticmethod - def get_ros_msg_type(): - """ Will return the msgtype needed to add a connetion to a rosbag writer. """ + def get_ros_msg_type(libtype: ROSMsgLibType): + """ Will return the msgtype for the ROS message for this Data object. """ raise NotImplementedError("This method needs to be overwritten by the child Data class!") - def get_ros_msg(self, i: int): - """ Will return a ROS message object ready to be written into a ROS2 Humble bag. """ + def get_ros_msg(self, libtype: ROSMsgLibType, i: int): + """ Will create and return a ROS message object. """ raise NotImplementedError("This method needs to be overwritten by the child Data class!") def crop_data(self, start: Decimal, end: Decimal): diff --git a/src/robotdataprocess/data_types/ImageData/ImageData.py b/src/robotdataprocess/data_types/ImageData/ImageData.py new file mode 100644 index 0000000..a01b562 --- /dev/null +++ b/src/robotdataprocess/data_types/ImageData/ImageData.py @@ -0,0 +1,246 @@ +from __future__ import annotations + +from ..Data import Data, ROSMsgLibType +import decimal +from decimal import Decimal +from enum import Enum +from ...ModuleImporter import ModuleImporter +import numpy as np +from pathlib import Path +from typeguard import typechecked +from typing import Union, Any +from rosbags.typesys import Stores, get_typestore + +@typechecked +class ImageData(Data): + """ Generic ImageData class that should be overwritten by children """ + + # Define image encodings enumeration + class ImageEncoding(Enum): + Mono8 = 0 + RGB8 = 1 + _32FC1 = 2 + + # ================ Class Methods ================ + @classmethod + def from_str(cls, encoding_str: str): + if encoding_str == "ImageEncoding.Mono8": + return cls.Mono8 + elif encoding_str == "ImageEncoding.RGB8": + return cls.RGB8 + elif encoding_str == "ImageEncoding._32FC1": + return cls._32FC1 + else: + raise NotImplementedError(f"This encoding ({encoding_str}) is not yet implemented (or it doesn't exist)!") + + @classmethod + def from_ros_str(cls, encoding_str: str): + encoding_str = encoding_str.lower() + if encoding_str == 'mono8': + return cls.Mono8 + elif encoding_str == 'rgb8': + return cls.RGB8 + elif encoding_str == "32fc1": + return cls._32FC1 + else: + raise NotImplementedError(f"This encoding ({encoding_str}) is not yet implemented (or it doesn't exist)!") + + @classmethod + def from_dtype_and_channels(cls, dtype: np.dtype, channels: int): + if dtype == np.uint8 and channels == 1: + return cls.Mono8 + elif dtype == np.uint8 and channels == 3: + return cls.RGB8 + elif dtype == np.float32 and channels == 1: + return cls._32FC1 + else: + raise NotImplementedError(f"dtype {dtype} w/ {channels} channel(s) has no corresponding encoding!") + + @classmethod + def from_pillow_str(cls, encoding_str: str): + if encoding_str == "RGB": + return cls.RGB8 + elif encoding_str == "L": + return cls.Mono8 + else: + raise NotImplementedError(f"This encoding ({encoding_str}) is not yet implemented (or it doesn't exist)!") + + # ================ Export Methods ================ + @staticmethod + def to_ros_str(encoding: ImageData.ImageEncoding): + if encoding == ImageData.ImageEncoding.Mono8: + return 'mono8' + elif encoding == ImageData.ImageEncoding.RGB8: + return 'rgb8' + elif encoding == ImageData.ImageEncoding._32FC1: + return '32FC1' + else: + raise NotImplementedError(f"This ImageData.ImageEncoding.{encoding} is not yet implemented (or it doesn't exist)!") + + @staticmethod + def to_dtype_and_channels(encoding): + if encoding == ImageData.ImageEncoding.Mono8: + return (np.uint8, 1) + elif encoding == ImageData.ImageEncoding.RGB8: + return (np.uint8, 3) + elif encoding == ImageData.ImageEncoding._32FC1: + return (np.float32, 1) + else: + raise NotImplementedError(f"This encoding ({encoding}) is missing a mapping to dtype/channels!") + + height: int + width: int + encoding: ImageData.ImageEncoding + images: Union[np.ndarray, Any] # With Any being a LazyImageArray + + def __init__(self, frame_id: str, timestamps: Union[np.ndarray, list], height: int, + width: int, encoding: ImageData.ImageEncoding, images: Union[np.ndarray, Any]): + super().__init__(frame_id, timestamps) + self.height = height + self.width = width + self.encoding = encoding + self.images = images + + # ========================================================================= + # ============================ Class Methods ============================== + # ========================================================================= + + @classmethod + def from_image_files(cls, image_folder_path: Union[Path, str], frame_id: str) -> ImageData: + """ Creates a class structure from a folder with .png files. """ + NotImplementedError("This method needs to be overwritten by the child Data class!") + + # ========================================================================= + # ========================= Manipulation Methods ========================== + # ========================================================================= + + def crop_data(self, start: Decimal, end: Decimal): + """ Will crop the data so only values within [start, end] inclusive are kept. """ + + # Create boolean mask of data to keep + mask = (self.timestamps >= start) & (self.timestamps <= end) + + # Apply mask + self.timestamps = self.timestamps[mask] + self.images = self.images[mask] + + # ========================================================================= + # ======================= Multi ImageData Methods ========================= + # ========================================================================= + + def compare_timestamps(self, other: ImageDataInMemory): + """ + This method compares two ImageData objects based on the timestamps of their + images. + """ + + # Find the locations in other where self timestamps would fit + idxs = np.searchsorted(other.timestamps, self.timestamps, side='right') + + # Get the left indices and right indices + idxs_right = np.clip(idxs, 0, len(other.timestamps)-1) + idxs_left = np.clip(idxs - 1, 0, len(other.timestamps)-1) + + # Get distances to nearest on either side + dists = np.minimum(np.abs(self.timestamps - other.timestamps[idxs_left]), + np.abs(self.timestamps - other.timestamps[idxs_right])) + + # Print the mean and std of the distances + print(f"Mean distance (left): {np.mean(np.abs(self.timestamps - other.timestamps[idxs_left]))}") + print(f"Mean distance (right): {np.mean(np.abs(self.timestamps - other.timestamps[idxs_right]))}") + print(f"Mean distance: {np.mean(dists)}") + print(f"Std distance: {np.std(dists)}") + + # ========================================================================= + # =========================== Conversion to ROS =========================== + # ========================================================================= + + @staticmethod + def get_ros_msg_type(lib_type: ROSMsgLibType) -> Any: + """ Return the __msgtype__ for an Image msg. """ + + if lib_type == ROSMsgLibType.ROSBAGS: + typestore = get_typestore(Stores.ROS2_HUMBLE) + return typestore.types['sensor_msgs/msg/Image'].__msgtype__ + elif lib_type == ROSMsgLibType.RCLPY or lib_type == ROSMsgLibType.ROSPY: + return ModuleImporter.get_module_attribute('sensor_msgs.msg', 'Image') + else: + raise NotImplementedError(f"Unsupported ROS_MSG_LIBRARY_TYPE {lib_type} for ImageData.get_ros_msg_type!") + + def get_ros_msg(self, lib_type: ROSMsgLibType, i: int): + """ + Gets an Image ROS2 Humble message corresponding to the image represented by index i. + + Args: + lib_type (ROSMsgLibType): The type of ROS message to return (e.g., ROSBAGS, RCLPY). + i (int): The index of the image message to convert. + Raises: + ValueError: If i is outside the data bounds. + """ + + # Check to make sure index is within data bounds + if i < 0 or i >= self.len(): + raise ValueError(f"Index {i} is out of bounds!") + + # Calculate the step + if self.encoding == ImageData.ImageEncoding.RGB8: + step = 3 * self.width + elif self.encoding == ImageData.ImageEncoding._32FC1: + step = 4 * self.width + else: + raise NotImplementedError(f"Unsupported encoding {self.encoding} for rosbag_get_ros_msg!") + + # Get the seconds and nanoseconds + seconds = int(self.timestamps[i]) + nanoseconds = int((self.timestamps[i] - self.timestamps[i].to_integral_value(rounding=decimal.ROUND_DOWN)) * Decimal("1e9").to_integral_value(decimal.ROUND_HALF_EVEN)) + + # Calculate the ROS2 Image data + if self.encoding == ImageData.ImageEncoding.RGB8: + data = self.images[i].flatten() + elif self.encoding == ImageData.ImageEncoding._32FC1: + data = self.images[i].flatten().view(np.uint8) + # TODO: Check endianness for _32FC1 + else: + raise NotImplementedError(f"Unsupported encoding {self.encoding} for rosbag_get_ros_msg!") + + # Write the data into the new class + if lib_type == ROSMsgLibType.ROSBAGS: + typestore = get_typestore(Stores.ROS2_HUMBLE) + Image, Header, Time = typestore.types['sensor_msgs/msg/Image'], typestore.types['std_msgs/msg/Header'], typestore.types['builtin_interfaces/msg/Time'] + + return Image(Header(stamp=Time(sec=int(seconds), + nanosec=int(nanoseconds)), + frame_id=self.frame_id), + height=self.height, + width=self.width, + encoding=ImageData.ImageEncoding.to_ros_str(self.encoding), + is_bigendian=0, + step=step, + data=data) + + elif lib_type == ROSMsgLibType.RCLPY or lib_type == ROSMsgLibType.ROSPY: + Header = ModuleImporter.get_module_attribute('std_msgs.msg', 'Header') + Image = ModuleImporter.get_module_attribute('sensor_msgs.msg', 'Image') + + # Create the messages + img_msg = Image() + img_msg.header = Header() + if lib_type == ROSMsgLibType.RCLPY: + Time = ModuleImporter.get_module_attribute('rclpy.time', 'Time') + img_msg.header.stamp = Time(seconds=seconds, nanoseconds=int(nanoseconds)).to_msg() + else: + rospy = ModuleImporter.get_module('rospy') + img_msg.header.stamp = rospy.Time(secs=int(seconds), nsecs=int(nanoseconds)) + + # Populate the rest of the data + img_msg.header.frame_id = self.frame_id + img_msg.height = self.height + img_msg.width = self.width + img_msg.encoding = ImageData.ImageEncoding.to_ros_str(self.encoding) + img_msg.is_bigendian = 0 + img_msg.step = step + img_msg.data = data.tolist() + return img_msg + + else: + raise NotImplementedError(f"Unsupported ROS_MSG_LIBRARY_TYPE {lib_type} for ImageData.get_ros_msg()!") \ No newline at end of file diff --git a/src/robotdataprocess/data_types/ImageData.py b/src/robotdataprocess/data_types/ImageData/ImageDataInMemory.py similarity index 57% rename from src/robotdataprocess/data_types/ImageData.py rename to src/robotdataprocess/data_types/ImageData/ImageDataInMemory.py index bad750e..037c2e4 100644 --- a/src/robotdataprocess/data_types/ImageData.py +++ b/src/robotdataprocess/data_types/ImageData/ImageDataInMemory.py @@ -1,124 +1,35 @@ from __future__ import annotations -from ..conversion_utils import col_to_dec_arr +from ...conversion_utils import col_to_dec_arr import cv2 -from .Data import Data -import decimal + from decimal import Decimal from enum import Enum +from .ImageData import ImageData import numpy as np from numpy.lib.format import open_memmap import os from pathlib import Path from PIL import Image -from ..rosbag.Ros2BagWrapper import Ros2BagWrapper +from ...ros.Ros2BagWrapper import Ros2BagWrapper from rosbags.rosbag2 import Reader as Reader2 -from rosbags.typesys import Stores, get_typestore from rosbags.typesys.store import Typestore from typeguard import typechecked from typing import Tuple, Union import tqdm -class ImageData(Data): - - # Define image encodings enum - class ImageEncoding(Enum): - Mono8 = 0 - RGB8 = 1 - _32FC1 = 2 - - # ================ Class Methods ================ - @classmethod - def from_str(cls, encoding_str: str): - if encoding_str == "ImageEncoding.Mono8": - return cls.Mono8 - elif encoding_str == "ImageEncoding.RGB8": - return cls.RGB8 - elif encoding_str == "ImageEncoding._32FC1": - return cls._32FC1 - else: - raise NotImplementedError(f"This encoding ({encoding_str}) is not yet implemented (or it doesn't exist)!") - - @classmethod - def from_ros_str(cls, encoding_str: str): - encoding_str = encoding_str.lower() - if encoding_str == 'mono8': - return cls.Mono8 - elif encoding_str == 'rgb8': - return cls.RGB8 - elif encoding_str == "32fc1": - return cls._32FC1 - else: - raise NotImplementedError(f"This encoding ({encoding_str}) is not yet implemented (or it doesn't exist)!") - - @classmethod - def from_dtype_and_channels(cls, dtype: np.dtype, channels: int): - if dtype == np.uint8 and channels == 1: - return cls.Mono8 - elif dtype == np.uint8 and channels == 3: - return cls.RGB8 - elif dtype == np.float32 and channels == 1: - return cls._32FC1 - else: - raise NotImplementedError(f"dtype {dtype} w/ {channels} channel(s) has no corresponding encoding!") - - @classmethod - def from_pillow_str(cls, encoding_str: str): - if encoding_str == "RGB": - return cls.RGB8 - elif encoding_str == "L": - return cls.Mono8 - else: - raise NotImplementedError(f"This encoding ({encoding_str}) is not yet implemented (or it doesn't exist)!") - - # ================ Export Methods ================ - @staticmethod - def to_ros_str(encoding: ImageData.ImageEncoding): - if encoding == ImageData.ImageEncoding.Mono8: - return 'mono8' - elif encoding == ImageData.ImageEncoding.RGB8: - return 'rgb8' - elif encoding == ImageData.ImageEncoding._32FC1: - return '32FC1' - else: - raise NotImplementedError(f"This ImageData.ImageEncoding.{encoding} is not yet implemented (or it doesn't exist)!") - - @staticmethod - def to_dtype_and_channels(encoding): - if encoding == ImageData.ImageEncoding.Mono8: - return (np.uint8, 1) - elif encoding == ImageData.ImageEncoding.RGB8: - return (np.uint8, 3) - elif encoding == ImageData.ImageEncoding._32FC1: - return (np.float32, 1) - else: - raise NotImplementedError(f"This encoding ({encoding}) is missing a mapping to dtype/channels!") - - - - # Define image-specific data attributes - height: int - width: int - encoding: ImageEncoding - images: np.ndarray - - @typechecked +@typechecked +class ImageDataInMemory(ImageData): + def __init__(self, frame_id: str, timestamps: Union[np.ndarray, list], height: int, width: int, encoding: ImageData.ImageEncoding, images: np.ndarray): - - # Copy initial values into attributes - super().__init__(frame_id, timestamps) - self.height = height - self.width = width - self.encoding = encoding - self.images = images + super().__init__(frame_id, timestamps, height, width, encoding, images) # ========================================================================= # ============================ Class Methods ============================== # ========================================================================= @classmethod - @typechecked def from_ros2_bag(cls, bag_path: Union[Path, str], img_topic: str, save_folder: Union[Path, str]): """ Creates a class structure from a ROS2 bag file with an Image topic. Will @@ -148,7 +59,7 @@ def from_ros2_bag(cls, bag_path: Union[Path, str], img_topic: str, save_folder: height = msg.height width = msg.width encoding = ImageData.ImageEncoding.from_ros_str(msg.encoding) - img = ImageData._decode_image_msg(msg, encoding, height, width) + img = ImageDataInMemory._decode_image_msg(msg, encoding, height, width) image_shape = img.shape break @@ -171,7 +82,7 @@ def from_ros2_bag(cls, bag_path: Union[Path, str], img_topic: str, save_folder: # Extract images (skipping malformed ones) img = None try: - img = ImageData._decode_image_msg(msg, encoding, height, width) + img = ImageDataInMemory._decode_image_msg(msg, encoding, height, width) except Exception as e: print("Failure decoding image msg: ", e) if img is not None and img.shape == image_shape: @@ -199,7 +110,6 @@ def from_ros2_bag(cls, bag_path: Union[Path, str], img_topic: str, save_folder: return cls(frame_id, timestamps_np, height, width, encoding, np.load(imgs_path, mmap_mode='r+')) @classmethod - @typechecked def from_npy(cls, folder_path: Union[Path, str]): """ Creates a class structure from .npy and .txt files (the ones written by from_ros2_bag()). @@ -235,7 +145,6 @@ def from_npy(cls, folder_path: Union[Path, str]): return cls(frame_id, np.load(ts_path), height, width, encoding, np.load(imgs_path, mmap_mode='r+')) @classmethod - @typechecked def from_npy_files(cls, npy_folder_path: Union[Path, str], frame_id: str): """ Creates a class structure from .npy files, where each individual image @@ -248,7 +157,7 @@ def from_npy_files(cls, npy_folder_path: Union[Path, str], frame_id: str): ImageData: Instance of this class. """ - # Get all png files in the designated folder (sorted) + # Get all npy files in the designated folder (sorted) all_image_files = [str(p) for p in Path(npy_folder_path).glob("*.npy")] # Extract the timestamps and sort them @@ -286,7 +195,6 @@ def from_npy_files(cls, npy_folder_path: Union[Path, str], frame_id: str): return cls(frame_id, timestamps_sorted, height, width, encoding, images) @classmethod - @typechecked def from_image_files(cls, image_folder_path: Union[Path, str], frame_id: str): """ Creates a class structure from a folder with .png files, using the file names @@ -341,7 +249,6 @@ def from_image_files(cls, image_folder_path: Union[Path, str], frame_id: str): # ========================= Manipulation Methods ========================== # ========================================================================= - @typechecked def downscale_by_factor(self, scale: int): """ Scales down all images by the provided factor. @@ -376,21 +283,10 @@ def downscale_by_factor(self, scale: int): rescaled_images[i] = cv2.resize(self.images[i], (self.width, self.height), interpolation=cv2.INTER_LINEAR) self.images = rescaled_images - def crop_data(self, start: Decimal, end: Decimal): - """ Will crop the data so only values within [start, end] inclusive are kept. """ - - # Create boolean mask of data to keep - mask = (self.timestamps >= start) & (self.timestamps <= end) - - # Apply mask - self.timestamps = self.timestamps[mask] - self.images = self.images[mask] - # ========================================================================= # ============================ Export Methods ============================= # ========================================================================= - @typechecked def to_npy(self, output_folder_path: Union[Path, str]): """ Saves each image in this ImageData into three files: @@ -434,8 +330,6 @@ def to_npy(self, output_folder_path: Union[Path, str]): f.write(f"width: {self.width}\n") f.write(f"encoding: {self.encoding}\n") - - @typechecked def to_image_files(self, output_folder_path: Union[Path, str]): """ Saves each image in this ImageData instance to the specified folder, @@ -474,7 +368,6 @@ def to_image_files(self, output_folder_path: Union[Path, str]): # ========================================================================= @staticmethod - @typechecked def _decode_image_msg(msg: object, encoding: ImageData.ImageEncoding, height: int, width: int): """ Helper method that decodes image data from a ROS2 Image message. @@ -489,157 +382,4 @@ def _decode_image_msg(msg: object, encoding: ImageData.ImageEncoding, height: in if channels > 1: return np.frombuffer(msg.data, dtype=dtype).reshape((height, width, channels)) else: - return np.frombuffer(msg.data, dtype=dtype).reshape((height, width)) - - # ========================================================================= - # ======================= Multi ImageData Methods ========================= - # ========================================================================= - - def compare_timestamps(self, other: ImageData): - """ - This method compares two ImageData objects based on the timestamps of their - images. - """ - - # Find the locations in other where self timestamps would fit - idxs = np.searchsorted(other.timestamps, self.timestamps, side='right') - - # Get the left indices and right indices - idxs_right = np.clip(idxs, 0, len(other.timestamps)-1) - idxs_left = np.clip(idxs - 1, 0, len(other.timestamps)-1) - - # Get distances to nearest on either side - dists = np.minimum(np.abs(self.timestamps - other.timestamps[idxs_left]), - np.abs(self.timestamps - other.timestamps[idxs_right])) - - # Print the mean and std of the distances - print(f"Mean distance (left): {np.mean(np.abs(self.timestamps - other.timestamps[idxs_left]))}") - print(f"Mean distance (right): {np.mean(np.abs(self.timestamps - other.timestamps[idxs_right]))}") - print(f"Mean distance: {np.mean(dists)}") - print(f"Std distance: {np.std(dists)}") - - @typechecked - def stereo_undistort_and_rectify(self: ImageData, other: ImageData, - K1: np.ndarray, D1: np.ndarray, K2: np.ndarray, D2: np.ndarray, - R: np.ndarray, T: np.ndarray) -> Tuple[ImageData, ImageData, np.ndarray, np.ndarray]: - """ - Undistort and rectify stereo images using stereo calibration parameters. - Note that self NEEDS to be the left stereo image sequence. - - Args: - other (ImageData): The right stereo image sequence. - K1, D1: Intrinsics and distortion for left camera. - K2, D2: Intrinsics and distortion for right camera. - R, T: Rotation and translation from left to right camera. - - Returns: - Tuple[ImageData, ImageData, np.ndarray, np.ndarray]: - Rectified left and right ImageData, and new Instrinsics matrices for the left and right cameras. - """ - - # Make sure the ImageData sequences are compatible - assert self.width == other.width and self.height == other.height and self.encoding == other.encoding, \ - "Left and right images must have the same resolution and encoding." - - # Find matching timestamps between self and other - set_self = set(self.timestamps) - set_other = set(other.timestamps) - common_timestamps = sorted(set_self.intersection(set_other)) - if len(common_timestamps) == 0: - raise ValueError("No matching timestamps between left and right images.") - - # Find indices of matching timestamps in each ImageData - left_indices = [np.where(self.timestamps == ts)[0][0] for ts in common_timestamps] - right_indices = [np.where(other.timestamps == ts)[0][0] for ts in common_timestamps] - - # Image size - image_size = (self.width, self.height) - - # Compute rectification transforms - R1, R2, P1, P2, Q, _, _ = cv2.stereoRectify(K1, D1, K2, D2, image_size, R, T, flags=cv2.CALIB_ZERO_DISPARITY, alpha=0) - - # Compute intrinsics of rectified imagery - K1_new = P1[:, :3] - K2_new = P2[:, :3] - print("New left camera intrinsics after rectification:\n", K1_new) - print("New right camera intrinsics after rectification:\n", K2_new) - print("Distortion coefficients after rectification are zero.") - - # Compute rectification maps - map1_x, map1_y = cv2.initUndistortRectifyMap(K1, D1, R1, P1, image_size, cv2.CV_32FC1) - map2_x, map2_y = cv2.initUndistortRectifyMap(K2, D2, R2, P2, image_size, cv2.CV_32FC1) - - # Allocate arrays for rectified images (only matching pairs) - left_rectified = np.zeros((len(common_timestamps), self.height, self.width, *self.images.shape[3:]), dtype=self.images.dtype) - right_rectified = np.zeros((len(common_timestamps), other.height, other.width, *other.images.shape[3:]), dtype=other.images.dtype) - - # Rectify/Undistort each image pair - for i, (li, ri) in enumerate(tqdm.tqdm(zip(left_indices, right_indices), total=len(common_timestamps), desc="Rectifying stereo pairs")): - left_rectified[i] = cv2.remap(self.images[li], map1_x, map1_y, interpolation=cv2.INTER_LINEAR) - right_rectified[i] = cv2.remap(other.images[ri], map2_x, map2_y, interpolation=cv2.INTER_LINEAR) - - # Return new ImageData instances with rectified images and matched timestamps - left = ImageData(self.frame_id, np.array(common_timestamps), self.height, self.width, self.encoding, left_rectified) - right = ImageData(other.frame_id, np.array(common_timestamps), other.height, other.width, other.encoding, right_rectified) - return left, right, K1_new, K2_new - - - # ========================================================================= - # =========================== Conversion to ROS =========================== - # ========================================================================= - - @typechecked - @staticmethod - def get_ros_msg_type() -> str: - """ Return the __msgtype__ for an Image msg. """ - typestore = get_typestore(Stores.ROS2_HUMBLE) - return typestore.types['sensor_msgs/msg/Image'].__msgtype__ - - @typechecked - def get_ros_msg(self, i: int): - """ - Gets an Image ROS2 Humble message corresponding to the image represented by index i. - - Args: - i (int): The index of the image message to convert. - Raises: - ValueError: If i is outside the data bounds. - """ - - # Check to make sure index is within data bounds - if i < 0 or i >= self.len(): - raise ValueError(f"Index {i} is out of bounds!") - - # Get ROS2 message classes - typestore = get_typestore(Stores.ROS2_HUMBLE) - Image, Header, Time = typestore.types['sensor_msgs/msg/Image'], typestore.types['std_msgs/msg/Header'], typestore.types['builtin_interfaces/msg/Time'] - - # Calculate the step - if self.encoding == ImageData.ImageEncoding.RGB8: - step = 3 * self.width - elif self.encoding == ImageData.ImageEncoding._32FC1: - step = 4 * self.width - else: - raise NotImplementedError(f"Only RGB8 and 32FC1 encodings are currently supported for export, not {self.encoding}!") - - # Get the seconds and nanoseconds - seconds = int(self.timestamps[i]) - nanoseconds = (self.timestamps[i] - self.timestamps[i].to_integral_value(rounding=decimal.ROUND_DOWN)) * Decimal("1e9").to_integral_value(decimal.ROUND_HALF_EVEN) - - # Calculate the ROS2 Image data - if self.encoding == ImageData.ImageEncoding.RGB8: - data = self.images[i].flatten() - elif self.encoding == ImageData.ImageEncoding._32FC1: - data = self.images[i].flatten().view(np.uint8) - - # Write the data into the new class - return Image(Header(stamp=Time(sec=int(seconds), - nanosec=int(nanoseconds)), - frame_id=self.frame_id), - height=self.height, - width=self.width, - encoding=ImageData.ImageEncoding.to_ros_str(self.encoding), - is_bigendian=0, - step=step, - data=data) - \ No newline at end of file + return np.frombuffer(msg.data, dtype=dtype).reshape((height, width)) \ No newline at end of file diff --git a/src/robotdataprocess/data_types/ImageData/ImageDataOnDisk.py b/src/robotdataprocess/data_types/ImageData/ImageDataOnDisk.py new file mode 100644 index 0000000..8938143 --- /dev/null +++ b/src/robotdataprocess/data_types/ImageData/ImageDataOnDisk.py @@ -0,0 +1,101 @@ +from __future__ import annotations + +from ...conversion_utils import col_to_dec_arr +from .ImageData import ImageData +import numpy as np +from pathlib import Path +from PIL import Image +from typeguard import typechecked +from typing import Union, List + +@typechecked +class ImageDataOnDisk(ImageData): + + class LazyImageArray: + """A read-only array-like interface that loads PNG images from disk on demand.""" + + height: int + width: int + encoding: ImageData.ImageEncoding + image_paths: List[Path] + + def __init__(self, height: int, width: int, encoding: ImageData.ImageEncoding, image_paths: List[Path]): + self.height = height + self.width = width + self.encoding = encoding + self.image_paths = image_paths + + def __getitem__(self, idx) -> np.ndarray: + # Handle boolean masking (used by your crop_data function) + if isinstance(idx, np.ndarray) and idx.dtype == bool: + new_paths = [p for p, keep in zip(self.image_paths, idx) if keep] + return self.__class__(self.height, self.width, self.encoding, new_paths) + + # Handle slicing (e.g., images[0:10]) + if isinstance(idx, slice): + return self.__class__(self.height, self.width, self.encoding, self.image_paths[idx]) + + # Handle single integer indexing (loading the actual image) + path: Path = self.image_paths[idx] + return np.array(Image.open(str(path)), dtype=self.dtype) + + def __setitem__(self, idx, value): + raise RuntimeError("This LazyPNGArray is read-only; writes are forbidden.") + + def __len__(self): + return len(self.image_paths) + + @property + def shape(self): + _, channels = ImageData.ImageEncoding.to_dtype_and_channels(self.encoding) + if channels == 1: + return (len(self), self.height, self.width) + else: + return (len(self), self.height, self.width, 3) + + @property + def dtype(self): + dtype_val, _ = ImageData.ImageEncoding.to_dtype_and_channels(self.encoding) + return dtype_val + + images: ImageDataOnDisk.LazyImageArray # Not initalized here, but put here for visual code highlighting + + def __init__(self, frame_id: str, timestamps: Union[np.ndarray, list], images: ImageDataOnDisk.LazyImageArray): + super().__init__(frame_id, timestamps, images.height, images.width, images.encoding, images) + + # ========================================================================= + # ============================ Class Methods ============================== + # ========================================================================= + + @classmethod + def from_image_files(cls, image_folder_path: Union[Path, str], frame_id: str) -> ImageDataOnDisk: + """ + Creates a class structure from a folder with .png files, using the file names + as the timestamps. + + Args: + image_folder_path (Path | str): Path to the folder with the images. + frame_id (str): The frame where this image data was collected. + Returns: + ImageDataOnDisk: Instance of this class. + """ + + # Get all png files in the designated folder (sorted) + all_image_files = [str(p) for p in Path(image_folder_path).glob("*.png")] + + # Extract the timestamps and sort them + timestamps = col_to_dec_arr([s.split('/')[-1][:-4] for s in all_image_files]) + sorted_indices = np.argsort(timestamps) + timestamps_sorted = timestamps[sorted_indices] + + # Use sorted_indices to sort all_image_files in the same way + all_image_files_sorted: List[Path] = [Path(all_image_files[i]) for i in sorted_indices] + + # Make sure the mode is what we expect + with Image.open(str(all_image_files_sorted[0])) as first_image: + encoding = ImageData.ImageEncoding.from_pillow_str(first_image.mode) + if encoding != ImageData.ImageEncoding.RGB8 and encoding != ImageData.ImageEncoding.Mono8: + raise NotImplementedError(f"Unsupported encoding {encoding} for 'from_image_files' method!") + + # Return an ImageDataOnDisk class + return cls(frame_id, timestamps_sorted, cls.LazyImageArray(first_image.height, first_image.width, encoding, all_image_files_sorted)) \ No newline at end of file diff --git a/src/robotdataprocess/data_types/ImuData.py b/src/robotdataprocess/data_types/ImuData.py index 9bc55fa..0a96563 100644 --- a/src/robotdataprocess/data_types/ImuData.py +++ b/src/robotdataprocess/data_types/ImuData.py @@ -1,43 +1,48 @@ from ..conversion_utils import col_to_dec_arr, dec_arr_to_float_arr -from .Data import Data, CoordinateFrame +from .Data import Data, CoordinateFrame, ROSMsgLibType import decimal from decimal import Decimal +from ..ModuleImporter import ModuleImporter import numpy as np from numpy.typing import NDArray from pathlib import Path from robotdataprocess.data_types.PathData import PathData -from ..rosbag.Ros2BagWrapper import Ros2BagWrapper +from ..ros.Ros2BagWrapper import Ros2BagWrapper from rosbags.rosbag2 import Reader as Reader2 from rosbags.typesys import Stores, get_typestore from rosbags.typesys.store import Typestore from scipy.spatial.transform import Rotation as R from typeguard import typechecked -from typing import Union +from typing import Union, Any, Optional import tqdm +@typechecked class ImuData(Data): # Define IMU-specific data attributes - lin_acc: NDArray[Decimal] - ang_vel: NDArray[Decimal] - orientations: NDArray[Decimal] # quaternions (x, y, z, w) + lin_acc: NDArray + ang_vel: NDArray + orientations: Optional[NDArray] # quaternions (x, y, z, w) frame: CoordinateFrame @typechecked def __init__(self, frame_id: str, frame: CoordinateFrame, timestamps: Union[np.ndarray, list], lin_acc: Union[np.ndarray, list], ang_vel: Union[np.ndarray, list], - orientations: Union[np.ndarray, list]): + orientations: Optional[NDArray]): # Copy initial values into attributes super().__init__(frame_id, timestamps) self.frame = frame self.lin_acc = col_to_dec_arr(lin_acc) self.ang_vel = col_to_dec_arr(ang_vel) - self.orientations = col_to_dec_arr(orientations) + if orientations is not None: + self.orientations = col_to_dec_arr(orientations) + else: + self.orientations = None # Check to ensure that all arrays have same length if len(self.timestamps) != len(self.lin_acc) or len(self.lin_acc) != len(self.ang_vel) \ - or len(self.ang_vel) != len(self.orientations): + or (self.orientations is not None and len(self.ang_vel) != len(self.orientations)): raise ValueError("Lengths of timestamp, lin_acc, ang_vel, and orientation arrays are not equal!") # ========================================================================= @@ -58,6 +63,8 @@ def from_ros2_bag(cls, bag_path: Union[Path, str], imu_topic: str, frame_id: str ImageData: Instance of this class. """ + print("WARNING: This code does not check the orientation covariance to determine if the orientation is valid; may use invalid orientations!") + # Get topic message count and typestore bag_wrapper = Ros2BagWrapper(bag_path, None) typestore: Typestore = bag_wrapper.get_typestore() @@ -101,20 +108,20 @@ def from_ros2_bag(cls, bag_path: Union[Path, str], imu_topic: str, frame_id: str @classmethod @typechecked - def from_txt_file(cls, file_path: Union[Path, str], frame_id: str, frame: CoordinateFrame): + def from_txt_file(cls, file_path: Union[Path, str], frame_id: str, frame: CoordinateFrame, nine_axis: bool = False): """ Creates a class structure from the TartanAir dataset format, which includes - various .txt files with IMU data. It expects the timestamp, the linear - acceleration, and the angular velocity, seperated by spaced in that order. + various .txt files with IMU data. It expects (in order) the timestamp, the linear + acceleration, and the angular velocity seperated by spaces. Will also expect + orientation (xyzw) at the end if nine_axis is set to True. Args: file_path (Path | str): Path to the file containing the IMU data. frame_id (str): The frame where this IMU data was collected. frame (CoordinateFrame): The coordinate system convention of this data. + nine_axis (bool): If true, will also load orientations from txt file. Returns: ImuData: Instance of this class. - - NOTE: Sets orientation to identity! """ # Count the number of lines in the file @@ -127,6 +134,10 @@ def from_txt_file(cls, file_path: Union[Path, str], frame_id: str, frame: Coordi timestamps = np.zeros((line_count), dtype=object) lin_acc = np.zeros((line_count, 3), dtype=object) ang_vel = np.zeros((line_count, 3), dtype=object) + if nine_axis: + orientations = np.zeros((line_count, 4), dtype=object) + else: + orientations = None # Open the txt file and read in the data with open(str(file_path), 'r') as file: @@ -135,13 +146,11 @@ def from_txt_file(cls, file_path: Union[Path, str], frame_id: str, frame: Coordi timestamps[i] = line_split[0] lin_acc[i] = line_split[1:4] ang_vel[i] = line_split[4:7] + if nine_axis: + orientations[i] = line_split[7:11] - # Set orientation to identity, as it is assumed this text file doesn't have it - orientation = np.zeros((lin_acc.shape[0], 4), dtype=int) - orientation[:,3] = np.ones((lin_acc.shape[0]), dtype=int) - # Create the ImuData class - return cls(frame_id, frame, timestamps, lin_acc, ang_vel, orientation) + return cls(frame_id, frame, timestamps, lin_acc, ang_vel, orientations) # ========================================================================= # ========================= Manipulation Methods ========================== @@ -157,7 +166,8 @@ def crop_data(self, start: Decimal, end: Decimal): self.timestamps = self.timestamps[mask] self.lin_acc = self.lin_acc[mask] self.ang_vel = self.ang_vel[mask] - self.orientations = self.orientations[mask] + if self.orientations is not None: + self.orientations = self.orientations[mask] # ========================================================================= # ============================ Export Methods ============================= @@ -172,7 +182,8 @@ def to_PathData(self, initial_pos: NDArray[float], initial_vel: NDArray[float], Parameters: initial_pos: The initial position as a numpy array. initial_vel: The initial velocity as a numpy array. - initial_ori: The initial orientation as a numpy array (quaternion x, y, z, w). + initial_ori: The initial orientation as a numpy array (quaternion x, y, z, w), + only used if use_ang_vel is True. use_ang_vel: If True, will use angular velocity data to calculate orientation. If False, will use orientation data directly from the IMUData class. @@ -192,9 +203,12 @@ def to_PathData(self, initial_pos: NDArray[float], initial_vel: NDArray[float], if use_ang_vel: ori = np.zeros((self.len(), 4), dtype=float) ori[:, 3] = np.ones((self.len()), dtype=float) + ori[0] = initial_ori else: - ori = dec_arr_to_float_arr(self.orientations) - ori[0] = initial_ori + if self.orientations is not None: + ori = dec_arr_to_float_arr(self.orientations) + else: + raise ValueError("use_ang_vel is False, but this ImuData hs no orientation data to use!") # Setup a tqdm progress bar pbar = tqdm.tqdm(total=self.len()-1, desc="Integrating IMU Data", unit="steps") @@ -202,7 +216,7 @@ def to_PathData(self, initial_pos: NDArray[float], initial_vel: NDArray[float], # Integrate the IMU data for i in range(1, self.len()): # Get time difference - dt: float = dec_arr_to_float_arr(self.timestamps[i] - self.timestamps[i-1]) + dt: float = dec_arr_to_float_arr(self.timestamps[i] - self.timestamps[i-1]).item() # Calculate orientation if use_ang_vel: @@ -239,15 +253,19 @@ def to_PathData(self, initial_pos: NDArray[float], initial_vel: NDArray[float], # =========================== Conversion to ROS =========================== # ========================================================================= - @typechecked @staticmethod - def get_ros_msg_type() -> str: + def get_ros_msg_type(lib_type: ROSMsgLibType) -> Any: """ Return the __msgtype__ for an Imu msg. """ - typestore = get_typestore(Stores.ROS2_HUMBLE) - return typestore.types['sensor_msgs/msg/Imu'].__msgtype__ - @typechecked - def get_ros_msg(self, i: int): + if lib_type == ROSMsgLibType.ROSBAGS: + typestore = get_typestore(Stores.ROS2_HUMBLE) + return typestore.types['sensor_msgs/msg/Imu'].__msgtype__ + elif lib_type == ROSMsgLibType.RCLPY or lib_type == ROSMsgLibType.ROSPY: + return ModuleImporter.get_module_attribute('sensor_msgs.msg', 'Imu') + else: + raise NotImplementedError(f"Unsupported ROSMsgLibType {lib_type} for ImuData.get_ros_msg_type()!") + + def get_ros_msg(self, lib_type: ROSMsgLibType, i: int): """ Gets an Image ROS2 Humble message corresponding to the image represented by index i. @@ -255,39 +273,96 @@ def get_ros_msg(self, i: int): i (int): The index of the image message to convert. Raises: ValueError: If i is outside the data bounds. + + # NOTE: Assumes covariances of 0. """ # Check to make sure index is within data bounds if i < 0 or i >= self.len(): raise ValueError(f"Index {i} is out of bounds!") - # Get ROS2 message classes - typestore = get_typestore(Stores.ROS2_HUMBLE) - Imu = typestore.types['sensor_msgs/msg/Imu'] - Header = typestore.types['std_msgs/msg/Header'] - Time = typestore.types['builtin_interfaces/msg/Time'] - Quaternion = typestore.types['geometry_msgs/msg/Quaternion'] - Vector3 = typestore.types['geometry_msgs/msg/Vector3'] - # Get the seconds and nanoseconds seconds = int(self.timestamps[i]) nanoseconds = (self.timestamps[i] - self.timestamps[i].to_integral_value(rounding=decimal.ROUND_DOWN)) * Decimal("1e9").to_integral_value(decimal.ROUND_HALF_EVEN) # Write the data into the new msg - return Imu(Header(stamp=Time(sec=int(seconds), - nanosec=int(nanoseconds)), - frame_id=self.frame_id), - orientation=Quaternion(x=0, - y=0, - z=0, - w=1), # Currently ignores data in orientation - orientation_covariance=np.zeros(9), - angular_velocity=Vector3(x=self.ang_vel[i][0], - y=self.ang_vel[i][1], - z=self.ang_vel[i][2]), - angular_velocity_covariance=np.zeros(9), - linear_acceleration=Vector3(x=self.lin_acc[i][0], - y=self.lin_acc[i][1], - z=self.lin_acc[i][2]), - linear_acceleration_covariance=np.zeros(9)) - \ No newline at end of file + if lib_type == ROSMsgLibType.ROSBAGS: + typestore = get_typestore(Stores.ROS2_HUMBLE) + Imu = typestore.types['sensor_msgs/msg/Imu'] + Header = typestore.types['std_msgs/msg/Header'] + Time = typestore.types['builtin_interfaces/msg/Time'] + Quaternion = typestore.types['geometry_msgs/msg/Quaternion'] + Vector3 = typestore.types['geometry_msgs/msg/Vector3'] + + if self.orientations is not None: + ori = Quaternion(x=self.orientations[i][0], y=self.orientations[i][1], + z=self.orientations[i][2], w=self.orientations[i][3]) + ori_cov = np.zeros(9, dtype=np.float64) + else: + ori = Quaternion(x=0, y=0, z=0, w=1) + ori_cov = np.zeros(9, dtype=np.float64) + ori_cov[0] = -1 + + return Imu(Header(stamp=Time(sec=int(seconds), + nanosec=int(nanoseconds)), + frame_id=self.frame_id), + orientation=ori, + orientation_covariance=ori_cov, + angular_velocity=Vector3(x=self.ang_vel[i][0], + y=self.ang_vel[i][1], + z=self.ang_vel[i][2]), + angular_velocity_covariance=np.zeros(9), + linear_acceleration=Vector3(x=self.lin_acc[i][0], + y=self.lin_acc[i][1], + z=self.lin_acc[i][2]), + linear_acceleration_covariance=np.zeros(9)) + + elif lib_type == ROSMsgLibType.RCLPY or lib_type == ROSMsgLibType.ROSPY: + Header = ModuleImporter.get_module_attribute('std_msgs.msg', 'Header') + Imu = ModuleImporter.get_module_attribute('sensor_msgs.msg', 'Imu') + Quaternion = ModuleImporter.get_module_attribute('geometry_msgs.msg', 'Quaternion') + Vector3 = ModuleImporter.get_module_attribute('geometry_msgs.msg', 'Vector3') + + # Create the message objects + imu_msg = Imu() + imu_msg.header = Header() + if lib_type == ROSMsgLibType.RCLPY: + Time = ModuleImporter.get_module_attribute('rclpy.time', 'Time') + imu_msg.header.stamp = Time(seconds=seconds, nanoseconds=int(nanoseconds)).to_msg() + else: + rospy = ModuleImporter.get_module('rospy') + imu_msg.header.stamp = rospy.Time(secs=seconds, nsecs=int(nanoseconds)) + + # Populate the rest of the data + imu_msg.header.frame_id = self.frame_id + imu_msg.orientation = Quaternion() + if self.orientations is not None: + imu_msg.orientation.x = float(self.orientations[i][0]) + imu_msg.orientation.y = float(self.orientations[i][1]) + imu_msg.orientation.z = float(self.orientations[i][2]) + imu_msg.orientation.w = float(self.orientations[i][3]) + imu_msg.orientation_covariance = np.zeros(9, dtype=np.float64) + else: + imu_msg.orientation.x = 0.0 + imu_msg.orientation.y = 0.0 + imu_msg.orientation.z = 0.0 + imu_msg.orientation.w = 1.0 + covariance = np.zeros(9, dtype=np.float64) + covariance[0] = -1 + imu_msg.orientation_covariance = covariance + + imu_msg.angular_velocity = Vector3() + imu_msg.angular_velocity.x = float(self.ang_vel[i][0]) + imu_msg.angular_velocity.y = float(self.ang_vel[i][1]) + imu_msg.angular_velocity.z = float(self.ang_vel[i][2]) + imu_msg.angular_velocity_covariance = np.zeros(9) + imu_msg.linear_acceleration = Vector3() + imu_msg.linear_acceleration.x = float(self.lin_acc[i][0]) + imu_msg.linear_acceleration.y = float(self.lin_acc[i][1]) + imu_msg.linear_acceleration.z = float(self.lin_acc[i][2]) + imu_msg.linear_acceleration_covariance = np.zeros(9) + return imu_msg + + else: + raise NotImplementedError(f"Unsupported ROSMsgLibType {lib_type} for ImuData.get_ros_msg()!") + \ No newline at end of file diff --git a/src/robotdataprocess/data_types/LiDARData.py b/src/robotdataprocess/data_types/LiDARData.py new file mode 100644 index 0000000..7737e80 --- /dev/null +++ b/src/robotdataprocess/data_types/LiDARData.py @@ -0,0 +1,371 @@ +from __future__ import annotations + +import decimal +from decimal import Decimal +from matplotlib.animation import FuncAnimation +import matplotlib.pyplot as plt +import numpy as np +from numpy.typing import NDArray +from rosbags.typesys import Stores, get_typestore +import struct +import sys +from pathlib import Path +import tqdm +from typeguard import typechecked +from typing import Union, List, Tuple, Optional, Any + +from ..conversion_utils import col_to_dec_arr +from ..ModuleImporter import ModuleImporter +from .Data import Data, CoordinateFrame, ROSMsgLibType + +@typechecked +class LiDARData(Data): + """ + LiDAR Data class that contains LiDAR-specific attributes and methods. + Inherits from the generic Data class. + + NOTE: Assumes points at [0,0,0] are invalid and will set them to NaNs when necessary. + Thus, all other points are assumed to be valid. + """ + + point_clouds: List[np.ndarray] # List of length T of (N, 3) arrays of point clouds, with assumed (x, y, z) ordering + channels: Optional[NDArray] # List of length T of (N) arrays with channel number for each point + frame: CoordinateFrame + + def __init__(self, frame_id: str, timestamps: np.ndarray | list, point_clouds: List[np.ndarray], + channels: Optional[NDArray], frame: CoordinateFrame) -> None: + super().__init__(frame_id, timestamps) + self.point_clouds = point_clouds + self.channels = channels + self.frame = frame + + # ========================================================================= + # ============================ Class Methods ============================== + # ========================================================================= + + @classmethod + def from_npy_files(cls, npy_folder_path: Union[Path, str], frame_id: str, frame: CoordinateFrame) -> LiDARData: + """ + Load LiDAR data from a series of .npy files in a specified folder, + where file names correspond to timestamps. Each point is expected to + contain either [x, y, z] or [x, y, z, channel]. + + Args: + npy_folder_path: Path to the folder. + frame_id: The frame ID for the LiDAR data. + frame: The coordinate frame of the LiDAR data. + Returns: + LiDARData: An instance of LiDARData populated with the loaded data. + """ + + # Get all npy files in the designated folder (sorted) + all_npy_files: List[str] = [str(p) for p in Path(npy_folder_path).glob("*.npy")] + print(f"Found {len(all_npy_files)} .npy files in folder {npy_folder_path}") + + # Extract the timestamps and sort the npy files + timestamps = col_to_dec_arr([s.split('/')[-1][:-4] for s in all_npy_files]) + sorted_indices = np.argsort(timestamps) + timestamps_sorted = timestamps[sorted_indices] + all_npy_files_sorted = [all_npy_files[i] for i in sorted_indices] + + # Check shape + first_pc = np.load(all_npy_files_sorted[0], mmap_mode='r') + assert len(first_pc.shape) == 2 + assert first_pc.shape[1] in (3, 4) + has_channels = first_pc.shape[1] == 4 + + # Setup arrays to hold data + point_clouds_memmap: List[np.ndarray] = [] + channels_memmap: Optional[List[np.ndarray]] = [] if has_channels else None + + # Load all the point clouds memory mapped (so they are kept on disk) + pbar = tqdm.tqdm(total=len(all_npy_files_sorted), desc="Extracting Point Clouds...", unit=" files") + for i, path in enumerate(all_npy_files_sorted): + pc = np.load(path, mmap_mode="r") + assert pc.shape == first_pc.shape + + # Separate XYZ and optional channel + point_clouds_memmap.append(pc[:, :3]) + if has_channels: + channels_memmap.append(pc[:, 3]) + pbar.update() + + # Return an LiDARData class + return cls(frame_id, timestamps_sorted, point_clouds_memmap, channels_memmap, frame) + + # def to_npy_files(self, output_folder_path: Union[Path, str]) -> None: + # """ + # Save LiDAR point clouds to a series of .npy files in a specified folder, + # where file names correspond to timestamps. + + # Args: + # output_folder_path: Path to the output folder. + # """ + + # # Create the output folder + # output_folder_path = Path(output_folder_path) + # output_folder_path.mkdir(parents=True, exist_ok=True) + + # # Save each .npy file + # num_frames = self.point_clouds.shape[0] + # pbar = tqdm.tqdm(total=num_frames, desc="Saving Point Clouds...", unit=" files") + # for i in range(num_frames): + # if self.channels is not None: + # pc = np.concatenate([self.point_clouds[i], self.channels[i][:, None]], axis=1) + # else: + # pc = self.point_clouds[i] + # np.save(output_folder_path / f"{self.timestamps[i]}.npy", pc) + # pbar.update() + # pbar.close() + + # ========================================================================= + # ========================= Reproducible Loading ========================== + # ========================================================================= + def get_point_cloud_at_index(self, index: int): + """ + Gets the point cloud at index T and ensures all necessary transformations are applied. + This is a safe copy of the memmapped array, meaning it can be transformed and changed. + """ + + pc = self.point_clouds[index].astype(np.float32, copy=True) + channels = None + if self.channels is not None: + channels = self.channels[index].astype(np.int16, copy=True) + + # Mask invalid points (all zeros) and set to NaNs + mask_invalid = (pc == 0.0).all(axis=1) | np.isnan(pc).all(axis=1) + pc[mask_invalid] = np.nan + + return pc, channels + + # ========================================================================= + # ========================= Manipulation Methods ========================== + # ========================================================================= + def calculate_point_channels(self, num_channels: int, v_min_angle: float, v_max_angle: float) -> None: + """ + Calculate channel numbers for each point. + + NOTE: This assumes that lasers are evenly spaced within the angular range and that + the first laser fires at v_min_angle and the last laser fires at v_max_angle. + NOTE: Invalid points (NaNs) get a channel of -1. + """ + + if self.channels is not None: + raise RuntimeError("Attempted to calculate channel numbers, but its already calculated!") + + # Calculate laser line angles + laser_angles = np.linspace(v_min_angle, v_max_angle, num_channels) + + # Compute channels + channels = [] + pbar = tqdm.tqdm(total=self.len(), desc="Calculating Point Channels...", unit=" frames") + for i in range(self.len()): + # Get point cloud + pc, _ = self.get_point_cloud_at_index(i) + + # Extract dimensions + x = pc[:, 0] + y = pc[:, 1] + z = pc[:, 2] + + # Compute vertical angle per point in degrees + horiz_dist = np.sqrt(x**2 + y**2) + vertical_angle = np.arctan2(z, horiz_dist) * 180.0 / np.pi + + # Assign points to laser line that is closest to its angle + angle_diff = np.abs(vertical_angle[..., None] - laser_angles) + chan = np.argmin(angle_diff, axis=-1) + + # Any point where x, y, or z is NaN gets a channel of -1 + mask_invalid = np.isnan(pc).any(axis=1) + chan[mask_invalid] = -1 + channels.append(chan.astype(np.int16)) + pbar.update() + + self.channels = channels + pbar.close() + + # ========================================================================= + # ============================ Visualization ============================== + # ========================================================================= + + @typechecked + def visualize(self, interval_ms: int = 100, plot_lims: Tuple[float, float] = (-50.0, 50.0), testing: bool = False): + """ + Visualizes the raw LiDAR data over time using Matplotlib FuncAnimation. + + Parameters: + interval_ms: The time between plotted frames. + plot_lims: The axes limits of the 3D plot. + testing: Only used for test cases, disables blocking in plt.show() + """ + + # Create the plot + fig = plt.figure(figsize=(16, 16)) + ax = fig.add_subplot(111, projection="3d") + ax.set_xlim(*plot_lims) + ax.set_ylim(*plot_lims) + ax.set_zlim(*plot_lims) + ax.set_xlabel("X", fontsize=20) + ax.set_ylabel("Y", fontsize=20) + ax.set_zlabel("Z", fontsize=20) + title = ax.set_title(f"", fontsize=24) + + # Create the update function + scatter = ax.scatter([], [], [], s=4, cmap='viridis') + def update(frame: int): + # Load safe copy of memmap array for plotting + pts, channels = self.get_point_cloud_at_index(frame) + + # Mask invalid points + nan_mask = np.isnan(pts).any(axis=1) + pts = pts[~nan_mask] + channels = channels[~nan_mask] + + # Update the plot + x, y, z = pts[:, 0], pts[:, 1], pts[:, 2] + scatter._offsets3d = (x, y, z) # update Z + if channels is not None: + scatter.set_array(channels) + else: + scatter.set_array(z) # Set color based on Z + title.set_text(f"LiDAR Frame {frame+1}/{self.len()-1}") + return scatter, title + + # Start the animation + ani = FuncAnimation(fig, update, frames=self.len(), interval=interval_ms, blit=False, repeat=False) + if not testing: + plt.show() + + # ========================================================================= + # =================== ROS Message Conversion Methods ===================== + # ========================================================================= + + @staticmethod + def get_ros_msg_type(lib_type: ROSMsgLibType) -> Any: + """ Return the __msgtype__ for a PointCloud2 msg. """ + + if lib_type == ROSMsgLibType.ROSBAGS: + typestore = get_typestore(Stores.ROS2_HUMBLE) + return typestore.types['sensor_msgs/msg/PointCloud2'].__msgtype__ + elif lib_type == ROSMsgLibType.RCLPY: + return ModuleImporter.get_module_attribute('sensor_msgs.msg', 'PointCloud2') + else: + raise NotImplementedError(f"Unsupported ROSMsgLibType {lib_type} for LiDARData.get_ros_msg_type()!") + + def get_ros_msg(self, lib_type: ROSMsgLibType, i: int): + """ + Gets a PointCloud2 message corresponding to the point cloud at index i. + + Args: + lib_type (ROSMsgLibType): The type of ROS message to return (e.g., ROSBAGS, RCLPY). + i (int): The index of the point cloud to convert. + Raises: + ValueError: If i is outside the data bounds. + + NOTE: Currently publishes an unordered point cloud. + NOTE: Assumes all points are collected at same time (likely false in the real-world). + NOTE: Assumes channels data is provided. + NOTE: Assumes intensity of 255 for all points. + """ + # Check to make sure index is within data bounds + if i < 0 or i >= self.len(): + raise ValueError(f"Index {i} is out of bounds!") + + # Get the seconds and nanoseconds + seconds = int(self.timestamps[i]) + nanoseconds = int((self.timestamps[i] - self.timestamps[i].to_integral_value(rounding=decimal.ROUND_DOWN)) * Decimal("1e9")) + + # Get point cloud with NaN masking applied + pts, channels = self.get_point_cloud_at_index(i) + num_points = pts.shape[0] + + # Calculate time and intensity (NOTE: Time is assumed to be zero & intensity assumed to be 255 for all points) + time = np.zeros((num_points, 1), dtype=np.float32) + intensity = np.ones((num_points, 1), dtype=np.float32) * 255 + + # Append channel and time onto our point cloud to get (N, 6) + if channels is not None: + pc_aug = np.concatenate([pts, channels[:, np.newaxis], time, intensity], axis=-1) + else: + raise RuntimeError("ROS2 PointCloud2 message expects channels data, but it has not been provided or calculated via calculate_point_channels()!") + + # Create PointCloud2 message + if lib_type == ROSMsgLibType.ROSBAGS: + + typestore = get_typestore(Stores.ROS2_HUMBLE) + PointCloud2 = typestore.types['sensor_msgs/msg/PointCloud2'] + Header = typestore.types['std_msgs/msg/Header'] + Time = typestore.types['builtin_interfaces/msg/Time'] + PointField = typestore.types['sensor_msgs/msg/PointField'] + + # Define point fields for x, y, z + fields = [ + PointField(name='x', offset=0, datatype=7, count=1), # FLOAT32 = 7 + PointField(name='y', offset=4, datatype=7, count=1), + PointField(name='z', offset=8, datatype=7, count=1), + PointField(name="ring", offset=12, datatype=7, count=1), + PointField(name="time", offset=16, datatype=7, count=1), + PointField(name="intensity", offset=20, datatype=7, count=1) + ] + + return PointCloud2( + header=Header( + stamp=Time(sec=int(seconds), nanosec=int(nanoseconds)), + frame_id=self.frame_id + ), + height=1, + width=num_points, + fields=fields, + is_bigendian= (sys.byteorder == "big"), + point_step=24, # 3 floats * 4 bytes + row_step=24 * num_points, + data=np.frombuffer(pc_aug.astype(np.float32).tobytes(), dtype=np.uint8), + is_dense=not np.isnan(pts).any() + ) + + elif lib_type == ROSMsgLibType.RCLPY or lib_type == ROSMsgLibType.ROSPY: + + Header = ModuleImporter.get_module_attribute('std_msgs.msg', 'Header') + PointCloud2 = ModuleImporter.get_module_attribute('sensor_msgs.msg', 'PointCloud2') + PointField = ModuleImporter.get_module_attribute('sensor_msgs.msg', 'PointField') + + # Create the message object + pc_msg = PointCloud2() + pc_msg.header = Header() + if lib_type == ROSMsgLibType.RCLPY: + Time = ModuleImporter.get_module_attribute('rclpy.time', 'Time') + pc_msg.header.stamp = Time(seconds=seconds, nanoseconds=int(nanoseconds)).to_msg() + else: + rospy = ModuleImporter.get_module('rospy') + pc_msg.header.stamp = rospy.Time(secs=seconds, nsecs=int(nanoseconds)) + pc_msg.header.frame_id = self.frame_id + + # Set the height and width assuming an unordered point cloud + pc_msg.height = 1 + pc_msg.width = num_points + + # Set the point fields + pc_msg.fields = [ + PointField(name="x", offset=0, datatype=PointField.FLOAT32, count=1), + PointField(name="y", offset=4, datatype=PointField.FLOAT32, count=1), + PointField(name="z", offset=8, datatype=PointField.FLOAT32, count=1), + PointField(name="ring", offset=12, datatype=PointField.FLOAT32, count=1), + PointField(name="time", offset=16, datatype=PointField.FLOAT32, count=1), + PointField(name="intensity", offset=20, datatype=PointField.FLOAT32, count=1) + ] + pc_msg.point_step = 24 + pc_msg.row_step = pc_msg.point_step * num_points + + # Fill in the remaining data + pc_msg.is_bigendian = sys.byteorder == "big" + pc_msg.is_dense = not np.isnan(pts).any() + + # Pack points into binary + fmt = "ffffff" + pc_msg.data = b"".join(struct.pack(fmt, *p) for p in pc_aug) + return pc_msg + + else: + raise NotImplementedError(f"Unsupported ROSMsgLibType {lib_type} for LiDARData.get_ros_msg()!") + \ No newline at end of file diff --git a/src/robotdataprocess/data_types/OdometryData.py b/src/robotdataprocess/data_types/OdometryData.py index 69c6840..f41ede8 100644 --- a/src/robotdataprocess/data_types/OdometryData.py +++ b/src/robotdataprocess/data_types/OdometryData.py @@ -2,31 +2,36 @@ from ..conversion_utils import col_to_dec_arr import csv -from .Data import CoordinateFrame, Data +from .Data import CoordinateFrame, Data, ROSMsgLibType import decimal from decimal import Decimal from evo.core import geometry import matplotlib.pyplot as plt +from ..ModuleImporter import ModuleImporter import numpy as np from numpy.typing import NDArray import os import pandas as pd from .PathData import PathData from pathlib import Path -from ..rosbag.Ros2BagWrapper import Ros2BagWrapper +from ..ros.Ros2BagWrapper import Ros2BagWrapper from rosbags.rosbag2 import Reader as Reader2 from rosbags.typesys import Stores, get_typestore from rosbags.typesys.store import Typestore from scipy.spatial.transform import Rotation as R from typeguard import typechecked -from typing import Union, List, Tuple +from typing import Union, List, Tuple, Any import tqdm +PATH_SLICE_STEP = 40 + +@typechecked class OdometryData(PathData): # Define odometry-specific data attributes child_frame_id: str - poses: list # Saved nav_msgs/msg/Pose + poses: list # Saved nav_msgs/msg/Pose for rosbags + poses_rclpy: list # Saved nav_msgs/msg/Pose for rclpy @typechecked def __init__(self, frame_id: str, child_frame_id: str, timestamps: Union[np.ndarray, list], @@ -36,6 +41,7 @@ def __init__(self, frame_id: str, child_frame_id: str, timestamps: Union[np.ndar super().__init__(frame_id, timestamps, positions, orientations, frame) self.child_frame_id: str = child_frame_id self.poses: list = [] + self.poses_rclpy: list = [] # Check to ensure that all arrays have same length if len(self.timestamps) != len(self.positions) or len(self.positions) != len(self.orientations): @@ -109,7 +115,7 @@ def from_ros2_bag(cls, bag_path: Union[Path, str], odom_topic: str, frame: Coord def from_csv(cls, csv_path: Union[Path, str], frame_id: str, child_frame_id: str, frame: CoordinateFrame, header_included: bool, column_to_data: Union[List[int], None] = None, separator: Union[str, None] = None, filter: Union[Tuple[str, str], None] = None, - ts_in_ns: bool = False): + ts_in_ns: bool = False, reorder_data: bool = False): """ Creates a class structure from a csv file. @@ -128,6 +134,8 @@ def from_csv(cls, csv_path: Union[Path, str], frame_id: str, child_frame_id: str csv file has no headers, then `column_name` should be the index of the column as a string. ts_in_ns (bool): If True, assumes timestamps are in nanoseconds and converts to seconds. Otherwise, assumes timestamps are already in seconds. + reorder_data (bool): If True, reorders the data to be in order of timestamps. If False, + assumes data is already ordered by timestamp. Returns: OdometryData: Instance of this class. @@ -167,12 +175,21 @@ def from_csv(cls, csv_path: Union[Path, str], frame_id: str, child_frame_id: str if ts_in_ns: timestamps_np = timestamps_np / Decimal('1e9') + # Reorder the data if needed + if reorder_data: + print("Warning: This code is not tested yet!") + sort_indices = np.argsort(timestamps_np) + timestamps_np = timestamps_np[sort_indices] + positions_np = positions_np[sort_indices] + orientations_np = orientations_np[sort_indices] + # Create an OdometryData class return cls(frame_id, child_frame_id, timestamps_np, positions_np, orientations_np, frame) @classmethod @typechecked - def from_txt_file(cls, file_path: Union[Path, str], frame_id: str, child_frame_id: str, frame: CoordinateFrame): + def from_txt_file(cls, file_path: Union[Path, str], frame_id: str, child_frame_id: str, frame: CoordinateFrame, + header_included: bool, column_to_data: Union[List[int], None] = None): """ Creates a class structure from a text file, where the order of values in the files follows ['timestamp', 'x', 'y', 'z', 'qw', 'qx', 'qy', 'qz']. @@ -182,10 +199,23 @@ def from_txt_file(cls, file_path: Union[Path, str], frame_id: str, child_frame_i frame_id (str): The frame where this odometry is relative to. child_frame_id (str): The frame whose pose is represented by this odometry. frame (CoordinateFrame): The coordinate system convention of this data. + header_included (bool): If this text file has a header, so we can remove it. + column_to_data (list[int]): Tells the algorithms which columns in the text file contain which + of the following data: ['timestamp', 'x', 'y', 'z', 'qw', 'qx', 'qy', 'qz']. Thus, + index 0 of column_to_data should be the column that timestamp data is found in the + text file. Set to None to use [0,1,2,3,4,5,6,7]. Returns: OdometryData: Instance of this class. """ + # If column_to_data is None, assume default + if column_to_data is None: + column_to_data = [0,1,2,3,4,5,6,7] + else: + # Check column_to_data values are valid + assert np.all(np.array(column_to_data) >= 0) + assert len(column_to_data) == 8 + # Count the number of lines in the file line_count = 0 with open(str(file_path), 'r') as file: @@ -201,9 +231,15 @@ def from_txt_file(cls, file_path: Union[Path, str], frame_id: str, child_frame_i with open(str(file_path), 'r') as file: for i, line in enumerate(file): line_split = line.split(' ') - timestamps_np[i] = line_split[0] - positions_np[i] = line_split[1:4] - orientations_np[i] = line_split[5:8] + [line_split[4]] + timestamps_np[i] = line_split[column_to_data[0]] + positions_np[i] = np.array([line_split[column_to_data[1]], line_split[column_to_data[2]], line_split[column_to_data[3]]]) + orientations_np[i] = np.array([line_split[column_to_data[5]], line_split[column_to_data[6]], line_split[column_to_data[7]], line_split[column_to_data[4]]]) + + # Remove the header + if header_included: + timestamps_np = timestamps_np[1:] + positions_np = positions_np[1:] + orientations_np = orientations_np[1:] # Create an OdometryData class return cls(frame_id, child_frame_id, timestamps_np, positions_np, orientations_np, frame) @@ -294,13 +330,14 @@ def crop_data(self, start: Decimal, end: Decimal): # Empty poses as they might need to be recalculated self.poses = [] + self.poses_rclpy = [] # ========================================================================= # ============================ Export Methods ============================= # ========================================================================= @typechecked - def to_csv(self, csv_path: Union[Path, str]): + def to_csv(self, csv_path: Union[Path, str], write_header: bool = True): """ Writes the odometry data to a .csv file. Note that data will be saved in the following order: timestamp, pos.x, pos.y, pos.z, @@ -309,6 +346,7 @@ def to_csv(self, csv_path: Union[Path, str]): Args: csv_path (Path | str): Path to the output csv file. odom_topic (str): Topic of the Odometry messages. + write_header (bool): If false, skip the header row. Returns: OdometryData: Instance of this class. """ @@ -326,7 +364,8 @@ def to_csv(self, csv_path: Union[Path, str]): writer = csv.writer(csvfile, delimiter=',') # Write the first row - writer.writerow(['timestamp', 'x', 'y', 'z', 'qw', 'qx', 'qy', 'qz']) + if write_header: + writer.writerow(['timestamp', 'x', 'y', 'z', 'qw', 'qx', 'qy', 'qz']) # Write message data to the csv file for i in range(len(self.timestamps)): @@ -385,54 +424,39 @@ def _ori_change_of_basis(self, R_i: R): # =========================== Conversion to ROS =========================== # ========================================================================= - @typechecked @staticmethod - def get_ros_msg_type(msg_type: str = "Odometry") -> str: - """ Return the __msgtype__ for an Imu msg. """ - typestore = get_typestore(Stores.ROS2_HUMBLE) - if msg_type == "Odometry": - return typestore.types['nav_msgs/msg/Odometry'].__msgtype__ - elif msg_type == "Path": - return typestore.types['nav_msgs/msg/Path'].__msgtype__ + def get_ros_msg_type(lib_type: ROSMsgLibType, msg_type: str = "Odometry") -> Any: + """ Return the __msgtype__ for an Odometry msg. """ + if lib_type == ROSMsgLibType.ROSBAGS: + typestore = get_typestore(Stores.ROS2_HUMBLE) + if msg_type == "Odometry": + return typestore.types['nav_msgs/msg/Odometry'].__msgtype__ + elif msg_type == "Path": + return typestore.types['nav_msgs/msg/Path'].__msgtype__ + else: + raise ValueError(f"Unsupported msg_type for OdometryData: {msg_type}") + elif lib_type == ROSMsgLibType.RCLPY: + if msg_type == "Path": + return ModuleImporter.get_module_attribute('nav_msgs.msg', 'Path') + else: + raise ValueError(f"Unsupported msg_type for OdometryData: {msg_type}") + elif lib_type == ROSMsgLibType.ROSPY: + if msg_type == "maplab_msg/OdometryWithImuBiases": + return ModuleImporter.get_module_attribute('maplab_msgs.msg', 'OdometryWithImuBiases') + elif msg_type == "Path": + return ModuleImporter.get_module_attribute('nav_msgs.msg', 'Path') + else: + raise ValueError(f"Unsupported msg_type for OdometryData: {msg_type}") else: - raise ValueError(f"Unsupported msg_type for OdometryData: {msg_type}") + raise NotImplementedError(f"Unsupported ROSMsgLibType {lib_type} for OdometryData.get_ros_msg_type()!") - @typechecked - def extract_seconds_and_nanoseconds(self, i: int): + def _extract_seconds_and_nanoseconds(self, i: int): seconds = int(self.timestamps[i]) nanoseconds = (self.timestamps[i] - self.timestamps[i].to_integral_value(rounding=decimal.ROUND_DOWN)) \ * Decimal("1e9").to_integral_value(decimal.ROUND_HALF_EVEN) return seconds, nanoseconds - def calculate_stamped_poses(self): - """ Pre-calculates all stamped poses if they aren't calculated yet.""" - - if len(self.poses) != self.len(): - # Get ROS2 message types - typestore = get_typestore(Stores.ROS2_HUMBLE) - Header = typestore.types['std_msgs/msg/Header'] - Time = typestore.types['builtin_interfaces/msg/Time'] - PoseStamped = typestore.types['geometry_msgs/msg/PoseStamped'] - Pose = typestore.types['geometry_msgs/msg/Pose'] - Point = typestore.types['geometry_msgs/msg/Point'] - Quaternion = typestore.types['geometry_msgs/msg/Quaternion'] - - # Pre-calculate all the poses - for i in range(self.len()): - seconds, nanoseconds = self.extract_seconds_and_nanoseconds(i) - self.poses.append(PoseStamped(Header(stamp=Time(sec=int(seconds), - nanosec=int(nanoseconds)), - frame_id=self.frame_id), - pose=Pose(position=Point(x=self.positions[i][0], - y=self.positions[i][1], - z=self.positions[i][2]), - orientation=Quaternion(x=self.orientations[i][0], - y=self.orientations[i][1], - z=self.orientations[i][2], - w=self.orientations[i][3])))) - - @typechecked - def get_ros_msg(self, i: int, msg_type: str = "Odometry"): + def get_ros_msg(self, lib_type: ROSMsgLibType, i: int, msg_type: str = "Odometry"): """ Gets an Image ROS2 Humble message corresponding to the odometry in index i. @@ -442,58 +466,177 @@ def get_ros_msg(self, i: int, msg_type: str = "Odometry"): ValueError: If i is outside the data bounds. """ - # Calculate the Stamped Poses - self.calculate_stamped_poses() - - # Make sure our data is in the FLU frame, otherwise throw an error - if self.frame != CoordinateFrame.FLU: - raise RuntimeError("Convert this Odometry Data to a FLU frame before writing to a ROS2 bag!") - # Check to make sure index is within data bounds if i < 0 or i >= self.len(): raise IndexError(f"Index {i} is out of bounds!") - - # Get ROS2 message classes - typestore = get_typestore(Stores.ROS2_HUMBLE) - Odometry = typestore.types['nav_msgs/msg/Odometry'] - Header = typestore.types['std_msgs/msg/Header'] - Time = typestore.types['builtin_interfaces/msg/Time'] - PoseWithCovariance = typestore.types['geometry_msgs/msg/PoseWithCovariance'] - TwistWithCovariance = typestore.types['geometry_msgs/msg/TwistWithCovariance'] - Twist = typestore.types['geometry_msgs/msg/Twist'] - Vector3 = typestore.types['geometry_msgs/msg/Vector3'] - Path = typestore.types['nav_msgs/msg/Path'] - Pose = typestore.types['geometry_msgs/msg/Pose'] - Point = typestore.types['geometry_msgs/msg/Point'] - Quaternion = typestore.types['geometry_msgs/msg/Quaternion'] + + # Extract seconds and nanoseconds + seconds, nanoseconds = self._extract_seconds_and_nanoseconds(i) # Write the data into the new msg - if msg_type == "Odometry": - seconds, nanoseconds = self.extract_seconds_and_nanoseconds(i) - return Odometry(Header(stamp=Time(sec=int(seconds), - nanosec=int(nanoseconds)), - frame_id=self.frame_id), - child_frame_id=self.child_frame_id, - pose=PoseWithCovariance(pose=Pose(position=Point(x=self.positions[i][0], - y=self.positions[i][1], - z=self.positions[i][2]), - orientation=Quaternion(x=self.orientations[i][0], - y=self.orientations[i][1], - z=self.orientations[i][2], - w=self.orientations[i][3])), - covariance=np.zeros(36)), - twist=TwistWithCovariance(twist=Twist(linear=Vector3(x=0, # Currently doesn't support Twist - y=0, - z=0,), - angular=Vector3(x=0, - y=0, - z=0,)), - covariance=np.zeros(36))) - elif msg_type == "Path": - seconds, nanoseconds = self.extract_seconds_and_nanoseconds(i) - return Path(Header(stamp=Time(sec=int(seconds), - nanosec=int(nanoseconds)), - frame_id=self.frame_id), - poses=self.poses[0:i+1:10]) + if lib_type == ROSMsgLibType.ROSBAGS: + typestore = get_typestore(Stores.ROS2_HUMBLE) + + Odometry = typestore.types['nav_msgs/msg/Odometry'] + Header = typestore.types['std_msgs/msg/Header'] + Time = typestore.types['builtin_interfaces/msg/Time'] + PoseWithCovariance = typestore.types['geometry_msgs/msg/PoseWithCovariance'] + TwistWithCovariance = typestore.types['geometry_msgs/msg/TwistWithCovariance'] + Twist = typestore.types['geometry_msgs/msg/Twist'] + Vector3 = typestore.types['geometry_msgs/msg/Vector3'] + Path = typestore.types['nav_msgs/msg/Path'] + Pose = typestore.types['geometry_msgs/msg/Pose'] + Point = typestore.types['geometry_msgs/msg/Point'] + Quaternion = typestore.types['geometry_msgs/msg/Quaternion'] + + if msg_type == "Odometry": + + return Odometry(Header(stamp=Time(sec=int(seconds), + nanosec=int(nanoseconds)), + frame_id=self.frame_id), + child_frame_id=self.child_frame_id, + pose=PoseWithCovariance(pose=Pose(position=Point(x=self.positions[i][0], + y=self.positions[i][1], + z=self.positions[i][2]), + orientation=Quaternion(x=self.orientations[i][0], + y=self.orientations[i][1], + z=self.orientations[i][2], + w=self.orientations[i][3])), + covariance=np.zeros(36)), + twist=TwistWithCovariance(twist=Twist(linear=Vector3(x=0, # Currently doesn't support Twist + y=0, + z=0,), + angular=Vector3(x=0, + y=0, + z=0,)), + covariance=np.zeros(36))) + elif msg_type == "Path": + + # Pre-calculate all the poses + if len(self.poses) != self.len(): + PoseStamped = typestore.types['geometry_msgs/msg/PoseStamped'] + + for j in range(self.len()): + sec, nanosec = self._extract_seconds_and_nanoseconds(j) + self.poses.append(PoseStamped(Header(stamp=Time(sec=int(sec), + nanosec=int(nanosec)), + frame_id=self.frame_id), + pose=Pose(position=Point(x=self.positions[j][0], + y=self.positions[j][1], + z=self.positions[j][2]), + orientation=Quaternion(x=self.orientations[j][0], + y=self.orientations[j][1], + z=self.orientations[j][2], + w=self.orientations[j][3])))) + + return Path(Header(stamp=Time(sec=int(seconds), + nanosec=int(nanoseconds)), + frame_id=self.frame_id), + poses=self.poses[0:i+1:PATH_SLICE_STEP]) + else: + raise ValueError(f"Unsupported msg_type for OdometryData: {msg_type} with ROSMsgLibType.ROSBAGS") + + elif lib_type == ROSMsgLibType.ROSPY or lib_type == ROSMsgLibType.RCLPY: + if msg_type == "maplab_msg/OdometryWithImuBiases": + + if lib_type == ROSMsgLibType.RCLPY: + raise ValueError("maplab_msg/OdometryWithImuBiases is not supported for RCLPY!") + + rospy = ModuleImporter.get_module('rospy') + OdometryWithImuBiases = ModuleImporter.get_module_attribute('maplab_msgs.msg', 'OdometryWithImuBiases') + PoseWithCovariance = ModuleImporter.get_module_attribute('geometry_msgs.msg', 'PoseWithCovariance') + TwistWithCovariance = ModuleImporter.get_module_attribute('geometry_msgs.msg', 'TwistWithCovariance') + Point = ModuleImporter.get_module_attribute('geometry_msgs.msg', 'Point') + Quaternion = ModuleImporter.get_module_attribute('geometry_msgs.msg', 'Quaternion') + Vector3 = ModuleImporter.get_module_attribute('geometry_msgs.msg', 'Vector3') + Header = ModuleImporter.get_module_attribute('std_msgs.msg', 'Header') + + msg = OdometryWithImuBiases() + msg.header = Header() + msg.header.stamp = rospy.Time(secs=seconds, nsecs=int(nanoseconds)) + msg.header.frame_id = self.frame_id + msg.child_frame_id = self.child_frame_id + msg.pose = PoseWithCovariance() + msg.pose.pose.position = Point() + msg.pose.pose.position.x = float(self.positions[i][0]) + msg.pose.pose.position.y = float(self.positions[i][1]) + msg.pose.pose.position.z = float(self.positions[i][2]) + msg.pose.pose.orientation = Quaternion() + msg.pose.pose.orientation.x = float(self.orientations[i][0]) + msg.pose.pose.orientation.y = float(self.orientations[i][1]) + msg.pose.pose.orientation.z = float(self.orientations[i][2]) + msg.pose.pose.orientation.w = float(self.orientations[i][3]) + msg.pose.covariance = np.zeros(36) # NOTE: Assumes covariance of zero. + msg.twist = TwistWithCovariance() + msg.twist.twist.linear = Vector3() + msg.twist.twist.linear.x = 0.0 # NOTE: Currently doesn't support Twist + msg.twist.twist.linear.y = 0.0 + msg.twist.twist.linear.z = 0.0 + msg.twist.twist.angular = Vector3() + msg.twist.twist.angular.x = 0.0 + msg.twist.twist.angular.y = 0.0 + msg.twist.twist.angular.z = 0.0 + msg.twist.covariance = np.zeros(36) + msg.accel_bias = Vector3() # NOTE: Assumes IMU biases are zero + msg.accel_bias.x = 0.0 + msg.accel_bias.y = 0.0 + msg.accel_bias.z = 0.0 + msg.gyro_bias = Vector3() + msg.gyro_bias.x = 0.0 + msg.gyro_bias.y = 0.0 + msg.gyro_bias.z = 0.0 + msg.odometry_state = 0 # NOTE: Assumes default state + return msg + + elif msg_type == "Path": + + Path = ModuleImporter.get_module_attribute('nav_msgs.msg', 'Path') + Header = ModuleImporter.get_module_attribute('std_msgs.msg', 'Header') + + # Pre-calculate all the poses + if len(self.poses_rclpy) != self.len(): + + PoseStamped = ModuleImporter.get_module_attribute('geometry_msgs.msg', 'PoseStamped') + Pose = ModuleImporter.get_module_attribute('geometry_msgs.msg', 'Pose') + Point = ModuleImporter.get_module_attribute('geometry_msgs.msg', 'Point') + Quaternion = ModuleImporter.get_module_attribute('geometry_msgs.msg', 'Quaternion') + + for j in range(self.len()): + sec, nanosec = self._extract_seconds_and_nanoseconds(j) + pose_msg = PoseStamped() + pose_msg.header = Header() + if lib_type == ROSMsgLibType.RCLPY: + Time = ModuleImporter.get_module_attribute('rclpy.time', 'Time') + pose_msg.header.stamp = Time(seconds=seconds, nanoseconds=int(nanoseconds)).to_msg() + else: + rospy = ModuleImporter.get_module('rospy') + pose_msg.header.stamp = rospy.Time(secs=int(seconds), nsecs=int(nanoseconds)) + pose_msg.header.frame_id = self.frame_id + pose_msg.pose = Pose() + pose_msg.pose.position = Point() + pose_msg.pose.position.x = float(self.positions[j][0]) + pose_msg.pose.position.y = float(self.positions[j][1]) + pose_msg.pose.position.z = float(self.positions[j][2]) + pose_msg.pose.orientation = Quaternion() + pose_msg.pose.orientation.x = float(self.orientations[j][0]) + pose_msg.pose.orientation.y = float(self.orientations[j][1]) + pose_msg.pose.orientation.z = float(self.orientations[j][2]) + pose_msg.pose.orientation.w = float(self.orientations[j][3]) + self.poses_rclpy.append(pose_msg) + + msg = Path() + msg.header = Header() + if lib_type == ROSMsgLibType.RCLPY: + Time = ModuleImporter.get_module_attribute('rclpy.time', 'Time') + msg.header.stamp = Time(seconds=seconds, nanoseconds=int(nanoseconds)).to_msg() + else: + rospy = ModuleImporter.get_module('rospy') + msg.header.stamp = rospy.Time(secs=int(seconds), nsecs=int(nanoseconds)) + msg.header.frame_id = self.frame_id + msg.poses = self.poses_rclpy[0:i+1:PATH_SLICE_STEP] + return msg + + else: + raise ValueError(f"Unsupported msg_type for OdometryData: {msg_type} with ROSMsgLibType.ROSPY or ROSMsgLibType.RCLPY.") else: - raise ValueError(f"Unsupported msg_type for OdometryData: {msg_type}") \ No newline at end of file + raise NotImplementedError(f"Unsupported ROSMsgLibType {lib_type} for OdometryData.get_ros_msg()!") \ No newline at end of file diff --git a/src/robotdataprocess/data_types/PathData.py b/src/robotdataprocess/data_types/PathData.py index 7c63081..e0433a5 100644 --- a/src/robotdataprocess/data_types/PathData.py +++ b/src/robotdataprocess/data_types/PathData.py @@ -10,7 +10,7 @@ import numpy as np from numpy.typing import NDArray from pathlib import Path -from ..rosbag.Ros2BagWrapper import Ros2BagWrapper +from ..ros.Ros2BagWrapper import Ros2BagWrapper from rosbags.rosbag2 import Reader as Reader2 from rosbags.typesys.store import Typestore from scipy.spatial.transform import Rotation as R @@ -46,6 +46,7 @@ def from_ros2_bag(cls, bag_path: Union[Path, str], odom_topic: str, frame: Coord Args: bag_path (Union[Path, str]): Path to the ROS2 bag file. odom_topic (str): Topic of the Path messages. + frame: The coordinate frame of this data. Returns: PathData: Instance of this class. """ diff --git a/src/robotdataprocess/data_types/__pycache__/Data.cpython-310.pyc b/src/robotdataprocess/data_types/__pycache__/Data.cpython-310.pyc deleted file mode 100644 index 29942a7..0000000 Binary files a/src/robotdataprocess/data_types/__pycache__/Data.cpython-310.pyc and /dev/null differ diff --git a/src/robotdataprocess/data_types/__pycache__/Data.cpython-312.pyc b/src/robotdataprocess/data_types/__pycache__/Data.cpython-312.pyc deleted file mode 100644 index 1107ada..0000000 Binary files a/src/robotdataprocess/data_types/__pycache__/Data.cpython-312.pyc and /dev/null differ diff --git a/src/robotdataprocess/data_types/__pycache__/Data.cpython-38.pyc b/src/robotdataprocess/data_types/__pycache__/Data.cpython-38.pyc deleted file mode 100644 index 20384a8..0000000 Binary files a/src/robotdataprocess/data_types/__pycache__/Data.cpython-38.pyc and /dev/null differ diff --git a/src/robotdataprocess/data_types/__pycache__/ImageData.cpython-310.pyc b/src/robotdataprocess/data_types/__pycache__/ImageData.cpython-310.pyc deleted file mode 100644 index f05f86a..0000000 Binary files a/src/robotdataprocess/data_types/__pycache__/ImageData.cpython-310.pyc and /dev/null differ diff --git a/src/robotdataprocess/data_types/__pycache__/ImageData.cpython-312.pyc b/src/robotdataprocess/data_types/__pycache__/ImageData.cpython-312.pyc deleted file mode 100644 index c67cef2..0000000 Binary files a/src/robotdataprocess/data_types/__pycache__/ImageData.cpython-312.pyc and /dev/null differ diff --git a/src/robotdataprocess/data_types/__pycache__/ImageData.cpython-38.pyc b/src/robotdataprocess/data_types/__pycache__/ImageData.cpython-38.pyc deleted file mode 100644 index 9fd8037..0000000 Binary files a/src/robotdataprocess/data_types/__pycache__/ImageData.cpython-38.pyc and /dev/null differ diff --git a/src/robotdataprocess/data_types/__pycache__/ImuData.cpython-310.pyc b/src/robotdataprocess/data_types/__pycache__/ImuData.cpython-310.pyc deleted file mode 100644 index 1bc131e..0000000 Binary files a/src/robotdataprocess/data_types/__pycache__/ImuData.cpython-310.pyc and /dev/null differ diff --git a/src/robotdataprocess/data_types/__pycache__/ImuData.cpython-312.pyc b/src/robotdataprocess/data_types/__pycache__/ImuData.cpython-312.pyc deleted file mode 100644 index e5d5942..0000000 Binary files a/src/robotdataprocess/data_types/__pycache__/ImuData.cpython-312.pyc and /dev/null differ diff --git a/src/robotdataprocess/data_types/__pycache__/ImuData.cpython-38.pyc b/src/robotdataprocess/data_types/__pycache__/ImuData.cpython-38.pyc deleted file mode 100644 index fd366b6..0000000 Binary files a/src/robotdataprocess/data_types/__pycache__/ImuData.cpython-38.pyc and /dev/null differ diff --git a/src/robotdataprocess/data_types/__pycache__/OdometryData.cpython-310.pyc b/src/robotdataprocess/data_types/__pycache__/OdometryData.cpython-310.pyc deleted file mode 100644 index 7db7385..0000000 Binary files a/src/robotdataprocess/data_types/__pycache__/OdometryData.cpython-310.pyc and /dev/null differ diff --git a/src/robotdataprocess/data_types/__pycache__/OdometryData.cpython-312.pyc b/src/robotdataprocess/data_types/__pycache__/OdometryData.cpython-312.pyc deleted file mode 100644 index 8b7449e..0000000 Binary files a/src/robotdataprocess/data_types/__pycache__/OdometryData.cpython-312.pyc and /dev/null differ diff --git a/src/robotdataprocess/data_types/__pycache__/OdometryData.cpython-38.pyc b/src/robotdataprocess/data_types/__pycache__/OdometryData.cpython-38.pyc deleted file mode 100644 index ef41eda..0000000 Binary files a/src/robotdataprocess/data_types/__pycache__/OdometryData.cpython-38.pyc and /dev/null differ diff --git a/src/robotdataprocess/data_types/__pycache__/PathData.cpython-310.pyc b/src/robotdataprocess/data_types/__pycache__/PathData.cpython-310.pyc deleted file mode 100644 index 5fffe6c..0000000 Binary files a/src/robotdataprocess/data_types/__pycache__/PathData.cpython-310.pyc and /dev/null differ diff --git a/src/robotdataprocess/data_types/__pycache__/PathData.cpython-38.pyc b/src/robotdataprocess/data_types/__pycache__/PathData.cpython-38.pyc deleted file mode 100644 index d00fa25..0000000 Binary files a/src/robotdataprocess/data_types/__pycache__/PathData.cpython-38.pyc and /dev/null differ diff --git a/src/robotdataprocess/data_types/__pycache__/__init__.cpython-38.pyc b/src/robotdataprocess/data_types/__pycache__/__init__.cpython-38.pyc deleted file mode 100644 index 9c20e1c..0000000 Binary files a/src/robotdataprocess/data_types/__pycache__/__init__.cpython-38.pyc and /dev/null differ diff --git a/src/robotdataprocess/rosbag/Ros2BagWrapper.py b/src/robotdataprocess/ros/Ros2BagWrapper.py similarity index 94% rename from src/robotdataprocess/rosbag/Ros2BagWrapper.py rename to src/robotdataprocess/ros/Ros2BagWrapper.py index 6aad67b..e2b7bf2 100644 --- a/src/robotdataprocess/rosbag/Ros2BagWrapper.py +++ b/src/robotdataprocess/ros/Ros2BagWrapper.py @@ -1,4 +1,4 @@ -from ..data_types.Data import Data +from ..data_types.Data import Data, ROSMsgLibType from collections import defaultdict from decimal import Decimal import glob @@ -11,6 +11,7 @@ from rosbags.rosbag2 import Reader as Reader2 from rosbags.rosbag2 import Writer as Writer2 from rosbags.typesys import Stores, get_typestore, get_types_from_msg +import sys from rosbags.typesys.store import Typestore import tqdm from typeguard import typechecked @@ -103,6 +104,23 @@ def get_typestore(self) -> Typestore: """ Return the ROS2 typestore """ return self.typestore2 + @staticmethod + def _create_writer2(path: Union[Path, str]) -> Writer2: + """ + Create a Writer2 instance with compatibility for different rosbags library versions. + Older versions (used with Python 3.8) don't support the 'version' parameter. + + Args: + path (Path | str): Path to the output ROS2 bag file. + Returns: + Writer2: A Writer2 instance configured appropriately. + """ + sig = inspect.signature(Writer2.__init__) + if 'version' in sig.parameters: + return Writer2(path, version=9) + else: + return Writer2(path) + # ========================================================================= # ============================ Hertz Analysis ============================= # ========================================================================= @@ -399,7 +417,7 @@ def downsample(self, output_bag: Union[Path, str], topic_downsample_ratios: dict connections = reader.connections # Create a writer with only the specified topics - with Writer2(output_bag) as writer: + with Ros2BagWrapper._create_writer2(output_bag) as writer: conn_map = {} for conn in connections: if (conn.topic in topic_downsample_ratios) or include_unmentioned_topics: @@ -533,7 +551,7 @@ def crop(self, output_path: Union[Path, str], start_ts: float, end_ts: float) -> connections = reader.connections # Setup the bag writer - with Writer2(output_bag) as writer: + with Ros2BagWrapper._create_writer2(output_bag) as writer: conn_map = {} for conn in connections: conn_map[conn.topic] = writer.add_connection( @@ -581,7 +599,7 @@ def write_data_to_rosbag(bag_path: Union[Path, str], data_list: List[Data], typestore = Ros2BagWrapper._create_typestore_with_external_msgs(Stores.ROS2_HUMBLE, external_msgs_path) # Open a ROS2 Humble bag for writing - with Writer2(str(bag_path)) as writer: + with Ros2BagWrapper._create_writer2(str(bag_path)) as writer: # For each data class for i in range(0, len(data_list)): @@ -589,8 +607,8 @@ def write_data_to_rosbag(bag_path: Union[Path, str], data_list: List[Data], topic = data_topics[i] # Add the new connection - if data_msg_type[i] is not None: msgtype = data.get_ros_msg_type(data_msg_type[i]) - else: msgtype = data.get_ros_msg_type() + if data_msg_type[i] is not None: msgtype = data.get_ros_msg_type(ROSMsgLibType.ROSBAGS, data_msg_type[i]) + else: msgtype = data.get_ros_msg_type(ROSMsgLibType.ROSBAGS) connection = writer.add_connection(topic, msgtype, typestore=typestore) # Setup a tqdm bar @@ -598,8 +616,8 @@ def write_data_to_rosbag(bag_path: Union[Path, str], data_list: List[Data], # Write each of the data entries for j in range(0, data.len()): - if data_msg_type[i] is not None: msg = data.get_ros_msg(j, data_msg_type[i]) - else: msg = data.get_ros_msg(j) + if data_msg_type[i] is not None: msg = data.get_ros_msg(ROSMsgLibType.ROSBAGS, j, data_msg_type[i]) + else: msg = data.get_ros_msg(ROSMsgLibType.ROSBAGS,j) timestamp = int(Ros2BagWrapper.extract_timestamp(msg) * Decimal('1e9')) writer.write(connection, timestamp, typestore.serialize_cdr(msg, msgtype)) pbar.update(1) @@ -613,6 +631,7 @@ def write_data_to_rosbag(bag_path: Union[Path, str], data_list: List[Data], "builtin_interfaces/msg/Time": "builtin_interfaces/msg/Time", "geometry_msgs/msg/Point": "geometry_msgs/msg/Point", "geometry_msgs/msg/Pose": "geometry_msgs/msg/Pose", + "geometry_msgs/msg/PoseStamped": "geometry_msgs/msg/PoseStamped", "geometry_msgs/msg/PoseWithCovariance": "geometry_msgs/msg/PoseWithCovariance", "geometry_msgs/msg/Quaternion": "geometry_msgs/msg/Quaternion", "geometry_msgs/msg/Transform": "geometry_msgs/msg/Transform", @@ -621,10 +640,13 @@ def write_data_to_rosbag(bag_path: Union[Path, str], data_list: List[Data], "geometry_msgs/msg/TwistWithCovariance": "geometry_msgs/msg/TwistWithCovariance", "geometry_msgs/msg/Vector3": "geometry_msgs/msg/Vector3", "nav_msgs/msg/Odometry": "nav_msgs/msg/Odometry", + "nav_msgs/msg/Path": "nav_msgs/msg/Path", "rosgraph_msgs/msg/Clock": "rosgraph_msgs/msg/Clock", "sensor_msgs/msg/Image": "sensor_msgs/msg/Image", "sensor_msgs/msg/Imu": "sensor_msgs/msg/Imu", "sensor_msgs/msg/CameraInfo": "sensor_msgs/msg/CameraInfo", + "sensor_msgs/msg/PointCloud2": "sensor_msgs/msg/PointCloud2", + "sensor_msgs/msg/PointField": "sensor_msgs/msg/PointField", "sensor_msgs/msg/RegionOfInterest": "sensor_msgs/msg/RegionOfInterest", "std_msgs/msg/Header": "std_msgs/msg/Header", "tf2_msgs/msg/TFMessage": "tf2_msgs/msg/TFMessage" @@ -668,6 +690,11 @@ def _get_mapping_2to1(self, msg_type: str) -> dict: "position": ("position", self._get_mapping_2to1("geometry_msgs/msg/Point")), "orientation": ("orientation", self._get_mapping_2to1("geometry_msgs/msg/Quaternion")) } + elif msg_type == "geometry_msgs/msg/PoseStamped": + map = { + "header": ("header", self._get_mapping_2to1("std_msgs/msg/Header")), + "pose": ("pose", self._get_mapping_2to1("geometry_msgs/msg/Pose")) + } elif msg_type == "geometry_msgs/msg/PoseWithCovariance": map = { "pose": ("pose", self._get_mapping_2to1("geometry_msgs/msg/Pose")), @@ -714,6 +741,11 @@ def _get_mapping_2to1(self, msg_type: str) -> dict: "pose": ("pose", self._get_mapping_2to1("geometry_msgs/msg/PoseWithCovariance")), "twist": ("twist", self._get_mapping_2to1("geometry_msgs/msg/TwistWithCovariance")) } + elif msg_type == "nav_msgs/msg/Path": + map = { # TODO: This is broken, as it can't handle if non-final mapping is a list + "header": ("header", self._get_mapping_2to1("std_msgs/msg/Header")), + "poses": ("poses", self._get_mapping_2to1("geometry_msgs/msg/PoseStamped")) + } elif msg_type == "rosgraph_msgs/msg/Clock": map = { "clock": ("clock", self._get_mapping_2to1("builtin_interfaces/msg/Time")) @@ -752,6 +784,25 @@ def _get_mapping_2to1(self, msg_type: str) -> dict: "binning_y": "binning_y", "roi": ("roi", self._get_mapping_2to1("sensor_msgs/msg/RegionOfInterest")) } + elif msg_type == "sensor_msgs/msg/PointCloud2": + map = { + "header": ("header", self._get_mapping_2to1("std_msgs/msg/Header")), + "height": "height", + "width": "width", + "fields": ("fields", self._get_mapping_2to1("sensor_msgs/msg/PointField")), + "is_bigendian": "is_bigendian", + "point_step": "point_step", + "row_step": "row_step", + "data": "data", + "is_dense": "is_dense" + } + elif msg_type == "sensor_msgs/msg/PointField": + map = { + "name": "name", + "offset": "offset", + "datatype": "datatype", + "count": "count" + } elif msg_type == "sensor_msgs/msg/RegionOfInterest": map = { "x_offset": "x_offset", @@ -768,7 +819,7 @@ def _get_mapping_2to1(self, msg_type: str) -> dict: # Thus, default value in self._get_ros1_msg_instance is used. } elif msg_type == "tf2_msgs/msg/TFMessage": - map = { + map = { # TODO: This is broken, as it can't handle if non-final mapping is a list "transforms": ("transforms", self._get_mapping_2to1("geometry_msgs/msg/TransformStamped")) } else: @@ -950,6 +1001,13 @@ def _get_ros1_msg_instance(self, msg_type: str, ros2_msg: Union[object, None]) - if name == 'self': continue + # Get the type of this parameter + type_str = str(param.annotation) + + # Skip ClassVar parameters as they are class-level attributes, not instance parameters + if 'ClassVar' in type_str: + continue + # Get the ros2 object for this parameter try: name_ros2 = map1to2[name] @@ -959,9 +1017,6 @@ def _get_ros1_msg_instance(self, msg_type: str, ros2_msg: Union[object, None]) - except: # No matching ros2 mapping ros2_msg_attr = None - # Get the type of this parameter - type_str = str(param.annotation) - # If it is a list, add as many default values to list as needed if "list" in type_str: @@ -1038,6 +1093,7 @@ def _iterate_and_assign(self, ros2_attr_list: List[str], ros1_attr_list: List[st # If a list was found, see if we can still assign if list_found_2 or list_found_1: + # Double check both objects are currently lists if type(ros1_obj) != list or type(ros2_obj) != list: raise RuntimeError("Attempting to assign list object to single object (or visa versa). Double check mappings in _get_mapping_2to1().") @@ -1082,7 +1138,7 @@ def _map_ros2_to_ros1(self, ros2_msg: object, msg_type: str) -> object: # Map each message to temporary objects ros2_obj = ros2_msg ros1_obj = ros1_msg - + # Iterate through this attribute and any child messages and assign values self._iterate_and_assign(ros2_attr_list, ros1_attr_list, ros2_obj, ros1_obj, msg_type) @@ -1096,6 +1152,10 @@ def export_as_ros1(self, output_path: Union[Path, str], external_msg_path_ros1: output_path (Path | str): Path to the output ROS1 bag file. external_msgs_path_ros2 (Path | str): Path to the directory containing external message definitions. + + Warning: + Known bug that nav_msgs/Path messages are not properly written to the ROS1 bag, as + well as tf2_msgs/TFMessage messages. """ # Convert to pathlib paths diff --git a/src/robotdataprocess/ros/RosPublisher.py b/src/robotdataprocess/ros/RosPublisher.py new file mode 100644 index 0000000..9360d9b --- /dev/null +++ b/src/robotdataprocess/ros/RosPublisher.py @@ -0,0 +1,612 @@ +from ..data_types.Data import Data, ROSMsgLibType +from ..data_types.ImageData.ImageData import ImageData +from ..ModuleImporter import ModuleImporter +import multiprocessing +from multiprocessing import Process, Manager, Event, resource_tracker +from multiprocessing.managers import DictProxy +import multiprocessing.shared_memory +from multiprocessing.shared_memory import SharedMemory +import numpy as np +import queue as queue_module +import os +from rich.live import Live +from rich.table import Table +from rich.console import Console +from rich.markup import escape +import signal +import time +import traceback +from typeguard import typechecked +from typing import List, Union, Any, Tuple + +ROS_PUB_QUEUE_SIZE = 10 +MSG_QUEUE_MAX_SIZE = 20 # entries +RESTART_JUMP_MSGS = 5 # msgs + +def neutralize_resource_tracker(): + # Create a dummy tracker class that won't crash + class DummyTracker: + def register(self, *args, **kwargs): pass + def unregister(self, *args, **kwargs): pass + def ensure_running(self, *args, **kwargs): pass + + # 2. Globally neutralize the actual resource_tracker + resource_tracker.register = lambda *args, **kwargs: None + resource_tracker.unregister = lambda *args, **kwargs: None + + # 3. Specifically for Python 3.10+, replace the module reference + # instead of setting it to None + multiprocessing.shared_memory.resource_tracker = DummyTracker() + +neutralize_resource_tracker() + +class _SingleDataPublisher(): + """ + Single Data Publisher that is used for both ROS1 and ROS2 implementations. + Publishes messages from a single Data object honoring timestamps using a high-frequency timer. + + Args: + libtype: ROSMsgLibType indicating whether to use rospy (ROS1) or rclpy (ROS2). + ros2_node_class: If ROS2, this is the Node class. + data: The Data object to publish. + topic_name: The ROS topic to publish on. + type: The ROS message type for data that can be published as multiple types. + hertz: The expected frequency to publish at (used to set the publisher timer frequency). + stop_event: Allow sub-processes to be stopped by KeyboardInterrupts. + wait_for_sub: Whether to wait for a subscriber until we start publishing (used for tests). + num_workers: Number of worker processes to pre-build messages. + verbose: Whether to print topic publishing status to the console. + stats_dict: Dictionary for processes to put publishing statistics for printing. + """ + + def __init__(self, libtype: ROSMsgLibType, ros2_node_class: Union[Any, None], data: Data, topic_name: str, + type: Union[str, None], hertz: float, stop_event, wait_for_sub: bool = False, num_workers: int = 1, + verbose: bool = True, stats_dict: Union[DictProxy, None] = None): + + # Save parameters + self.libtype = libtype + self.ros2_node_class = ros2_node_class + self.data = data + self.topic = topic_name + self.type = type + self.hertz = hertz + self.timer_freq = 2 * self.hertz + self.stop_event = stop_event + self.num_workers = num_workers + self.verbose = verbose + self.stats_dict = stats_dict + self.skipped_msgs = 0 + self._is_finished = False + self.restart_when_behind = False + + # Timing setup + self.start_time = time.monotonic() + 1.0 + self.first_ts = float(self.data.timestamps[0]) + self.prev_time = self.start_time + self.prev_console_update_time = self.start_time + + # Message queue to hold build messages from workers + self.worker_queue = multiprocessing.Queue(maxsize=MSG_QUEUE_MAX_SIZE) + self.local_buf: dict = {} + self.manager = Manager() + self.index = self.manager.Value("i", 0) + self.next_msg = None + self._last_pub = None + + # Start worker processes to pre-build messages + self.stop_event_workers = Event() + self.workers: List[Process] = [] + for worker_id in range(self.num_workers): + p = Process(target=self._message_worker, args=(worker_id, self.stop_event_workers, self.stats_dict)) + p.start() + self.workers.append(p) + + # Get data type + if self.type is not None: + self._pub_type = self.data.get_ros_msg_type(self.libtype, self.type) + else: + self._pub_type = self.data.get_ros_msg_type(self.libtype) + + # Create publisher (TODO: Look into QoS settings) + if self.libtype == ROSMsgLibType.ROSPY: + # For ROS1, we have to intialize rospy AFTER forking processes + import rospy + rospy.init_node(f"robotdataprocess_publisher_{topic_name.replace('/', '_')}", anonymous=True) + self.publisher = rospy.Publisher(self.topic, self._pub_type, queue_size=ROS_PUB_QUEUE_SIZE) + + elif self.libtype == ROSMsgLibType.RCLPY: + self.publisher = self.ros2_node_class.create_publisher(self._pub_type, self.topic, ROS_PUB_QUEUE_SIZE) + + # If desired, wait until we have at least one subscriber to start publishing + if wait_for_sub: + if self.libtype == ROSMsgLibType.ROSPY: + rospy = ModuleImporter.get_module('rospy') + while self.publisher.get_num_connections() == 0 and not rospy.is_shutdown(): + rospy.sleep(1.0 / self.timer_freq) + elif self.libtype == ROSMsgLibType.RCLPY: + while self.publisher.get_subscription_count() == 0: + rclpy = ModuleImporter.get_module('rclpy') + rclpy.spin_once(self.ros2_node_class, timeout_sec=1.0 / self.timer_freq) + + # Redo timing setup once we have our subscriber + self.start_time = time.monotonic() + 1.0 + self.prev_time = self.start_time + self.prev_console_update_time = self.start_time + + # High-resolution timer for triggering publishes + if self.libtype == ROSMsgLibType.ROSPY: + rospy = ModuleImporter.get_module('rospy') + rospy.Timer(rospy.Duration(1.0 / float(self.timer_freq)), self._timer_callback) + elif self.libtype == ROSMsgLibType.RCLPY: + self.timer = self.ros2_node_class.create_timer(1.0 / float(self.timer_freq), self._timer_callback) + + self.log_msg_to_ros(f"robotdataprocess_publisher_{topic_name.replace('/', '_')} intialized!", stats_dict) + + def log_msg_to_ros(self, msg: str, stats_dict: Union[DictProxy, None] = None): + """ Method to log data to ROS regardless of ROS version (1 or 2) """ + + if self.libtype == ROSMsgLibType.ROSPY: + rospy = ModuleImporter.get_module('rospy') + rospy.loginfo(msg) + elif self.libtype == ROSMsgLibType.RCLPY: + self.ros2_node_class.get_logger().info(msg) + + # Additionally logs it to the console if enabled + if stats_dict: + topic_info = self.stats_dict[self.topic] + topic_info['log_queue'].append(msg) + self.stats_dict[self.topic] = topic_info + + def _message_worker(self, worker_id: int, stop_event, stats_dict: Union[DictProxy, None] = None, use_shared_mem: bool = True): + """ + Worker process: prebuild ROS messages and put them into the queue. + Each worker handles every nth message starting at worker_id. + """ + neutralize_resource_tracker() + + # For ROS1, initialize this worker's rospy node + if self.libtype == ROSMsgLibType.ROSPY: + import rospy + rospy.init_node(f"robotdataprocess_worker_{self.topic.replace('/', '_')}_{worker_id}", anonymous=True) + + try: + # Iterature though entries assigned to this worker + for idx in range(worker_id, self.data.len(), self.num_workers): + # Stop working if we recieve a shutdown signal + if stop_event.is_set(): + break + + # Skip messages that the main thread has skipped + if idx < self.index.value: + continue + + # Load the message + if self.type is not None: + msg = self.data.get_ros_msg(self.libtype, idx, self.type) + else: + msg = self.data.get_ros_msg(self.libtype, idx) + timestamp = float(self.data.timestamps[idx]) + + # If message has data field, use shared memory to transfer to publishing process efficiently + shm_name = None + if use_shared_mem and hasattr(msg, 'data') and len(msg.data) > 0: + + # Write a handler to disable SharedMemory usage if we run out + def handler(signum, frame): + nonlocal use_shared_mem + use_shared_mem = False + raise TimeoutError("Calls to SharedMemory freezing! Switching to Process-to-Process Serialization, which will run slower. If in a Docker container, this can be avoided by increasing the shared memory (docker run --shm-size=2gb).") + + # Set an alarm for 0.2 seconds + signal.signal(signal.SIGALRM, handler) + signal.setitimer(signal.ITIMER_REAL, 0.2) + + # Attempt to allocate shared memory + if isinstance(msg.data, list): data_to_copy = bytes(msg.data) + else: data_to_copy = msg.data + pixel_bytes = np.frombuffer(data_to_copy, dtype=np.uint8) + success = False + try: + shm = SharedMemory(create=True, size=pixel_bytes.nbytes) + signal.setitimer(signal.ITIMER_REAL, 0.0) + success = True + except TimeoutError as e: + self.log_msg_to_ros(f"{e}", stats_dict) + + # If successful, save the data to shared memory + if success: + shared_array = np.ndarray(pixel_bytes.shape, dtype=pixel_bytes.dtype, buffer=shm.buf) + shared_array[:] = pixel_bytes[:] + + # Strip the data from transport + msg.data = [] if isinstance(msg.data, list) else b"" + shm_name = shm.name + shm.close() + + # Put the message into the buffer + while True: + if stop_event.is_set() or idx < self.index.value: + if shm_name: + temp_shm = SharedMemory(name=shm_name) + temp_shm.close() + temp_shm.unlink() + break + else: + try: + payload = (idx, float(timestamp), msg, shm_name) + self.worker_queue.put(payload, timeout=1.0 / float(self.timer_freq)) + break + except queue_module.Full: + time.sleep(1.0 / float(self.timer_freq)) + + + except BrokenPipeError: + # Manager must have died from KeyboardInterrupt + pass + + def _pop_msg_with_index(self, target_index: int) -> Union[Tuple, None]: + """ + Search queue for (idx, ts, msg) where idx == target_index. + Returns the item or None if not found. Additionally clears stale messages. + """ + + neutralize_resource_tracker() + + # Drain the Queue into the local reorder buffer if we don't have the msg we want + if not target_index in self.local_buf: + while not self.worker_queue.empty(): + try: + idx, ts, msg, shm_name = self.worker_queue.get(timeout=1.0 / float(self.timer_freq)) + if idx < target_index: + if shm_name: + try: + temp_shm = SharedMemory(name=shm_name) + temp_shm.close() + temp_shm.unlink() + except (FileNotFoundError, BufferError): + pass + else: + self.local_buf[idx] = (ts, msg, shm_name) + except queue_module.Empty: + break + + # Clear out any stale messages that were skipped + keys_to_delete = [k for k in self.local_buf.keys() if k < target_index] + for k in keys_to_delete: + _, _, stale_shm_name = self.local_buf.pop(k) + if stale_shm_name: + try: + temp_shm = SharedMemory(name=stale_shm_name) + temp_shm.close() + temp_shm.unlink() + except (FileNotFoundError, BufferError): + pass + + # Get the actual target we're looking for + if target_index in self.local_buf: + ts, msg, shm_name = self.local_buf.pop(target_index) + + # Load data from shared memory if shm_name exists + if shm_name: + try: + actual_shm = SharedMemory(name=shm_name) + msg.data = actual_shm.buf.tobytes() + actual_shm.close() + actual_shm.unlink() + except FileNotFoundError: + self.log_msg_to_ros(f"Warning: SHM {shm_name} vanished before pop.", self.stats_dict) + except Exception as e: + self.log_msg_to_ros(f"Failed to reconstruct SHM: {e}", self.stats_dict) + return None + + return (target_index, ts, msg) + + # Otherwise, we don't have the target yet... + return None + + def _timer_callback(self, event=None): + # Check if we have no more messages to publish or we get a StopEvent: + if self.index.value >= self.data.len() or self.stop_event.is_set(): + + # Tell workers to finish (if they haven't already) + self.stop_event_workers.set() + + # Shut down cleanly + if self.libtype == ROSMsgLibType.ROSPY: + import rospy + self.log_msg_to_ros("Finished publishing.", self.stats_dict) + self.log_msg_to_ros("Waiting for worker processes to finish...", self.stats_dict) + for w in self.workers: + w.join() + rospy.signal_shutdown("Node shutdown...") + self._is_finished = True + self.log_msg_to_ros("Node destroyed.", self.stats_dict) + return + elif self.libtype == ROSMsgLibType.RCLPY: + self.log_msg_to_ros("Finished publishing.", self.stats_dict) + self.timer.cancel() + self.log_msg_to_ros("Waiting for worker processes to finish...", self.stats_dict) + for w in self.workers: + w.join() + self.ros2_node_class.destroy_node() + self._is_finished = True + self.log_msg_to_ros("Node destroyed.", self.stats_dict) + return + + # Get the next message from the queue if we don't have one ready + if self.next_msg is None: + result = self._pop_msg_with_index(self.index.value) + if result is None: return + self.next_msg: Tuple[int, float, Any] = result + + # Calculate target publish time for the current message + now = time.monotonic() + target: float = float(self.data.timestamps[self.index.value]) - self.first_ts + self.start_time + + # Publish when time has arrived + if now >= target: + + # Check if we are behind + behind: bool = False + if self.index.value < self.data.len() - 1: + next_target = float(self.data.timestamps[self.index.value + 1]) - self.first_ts + self.start_time + if now > next_target: + behind = True + + # If behind... + if behind: + # We either plan a restart + if self.restart_when_behind: + + # Calculate the next message to publish (skipping some) + prev_skipped_msgs = self.skipped_msgs + while self.index.value < self.data.len() - 1 and self.skipped_msgs - prev_skipped_msgs < RESTART_JUMP_MSGS: + self.index.value += 1 + self.skipped_msgs += 1 + + # We don't want to publish before its time, so empty next_msg and return + self.next_msg = None + return + + # Or just skip to the next message that isn't behind + else: + # Find the next index + while self.index.value < self.data.len() - 1: + next_target = float(self.data.timestamps[self.index.value + 1]) - self.first_ts + self.start_time + if now < next_target: + break + self.index.value += 1 + self.skipped_msgs += 1 + + # Load the next message for it (if available) + result = self._pop_msg_with_index(self.index.value) + if result is None: return + self.next_msg: Tuple[int, float, Any] = result + + # Publish the message + self.publisher.publish(self.next_msg[2]) + self.index.value += 1 + + # Check if we've published timestamps out of order + if self._last_pub is not None and self._last_pub[1] >= self.next_msg[1]: + raise RuntimeError("Published timestamps out of order!") + self._last_pub = self.next_msg + + # Stats calculation + elapsed = now - self.start_time + msgs_published = self.index.value - self.skipped_msgs + interval = float(now - self.prev_time) if self.index.value > 0 else 0.0 + avg_hz = msgs_published / float(elapsed) if elapsed > 0 else 0.0 + inst_hz = 1.0 / interval if interval > 0 else 0.0 + self.prev_time = now + + # Update statistics (at most 10Hz) + elapsed_console_update = now - self.prev_console_update_time + if self.verbose and self.stats_dict is not None and elapsed_console_update > 0.1: + self.stats_dict[self.topic] = { + "last_update_time": now, + "progress": self.index.value, + "total": self.data.len(), + "avg_hz": avg_hz, + "inst_hz": inst_hz, + "skipped": self.skipped_msgs, + "log_queue": self.stats_dict[self.topic]['log_queue'], + 'local_buf_size': len(self.local_buf), + 'worker_queue_size': self.worker_queue.qsize() + } + self.prev_console_update_time = now + + # Prepare the next message + if self.index.value < self.data.len(): + result = self._pop_msg_with_index(self.index.value) + if result is None: + self.next_msg = None + return + self.next_msg = result + else: + self.next_msg = None + +@typechecked +def _run_ROS_publisher_process(data: Data, topic_name: str, type: Union[str, None], hertz: float, stop_event, wait_for_sub: bool = False, + num_workers: int = 1, verbose: bool = True, stats_dict: Union[DictProxy, None] = None) -> None: + """ + Entry point for each ROS1 publishing multiprocessing worker. + NOTE: This function feels useless, but follows similar structure to the ROS2 version, which is more complex. + """ + + try: + import rospy + class SingleDataPublisherROS1(): + def __init__(self, data: Data, topic_name: str, type: Union[str, None], hertz: float,stop_event, wait_for_sub: bool = False, num_workers: int = 1, + verbose: bool = True, stats_dict: Union[DictProxy, None] = None): + self._pub = _SingleDataPublisher(ROSMsgLibType.ROSPY, None, data, topic_name, type, hertz, stop_event, wait_for_sub, num_workers, verbose, stats_dict) + node = SingleDataPublisherROS1(data, topic_name, type, hertz, stop_event, wait_for_sub, num_workers, verbose, stats_dict) + while not rospy.is_shutdown() and not node._pub._is_finished: + time.sleep(0.1) + + except Exception: + print(f"Exception in publisher for topic '{topic_name}':") + print(traceback.format_exc()) + +@typechecked +def _run_ROS2_publisher_process(data: Data, topic_name: str, type: Union[str, None], hertz: float, stop_event, wait_for_sub: bool = False, + num_workers: int = 1, verbose: bool = True, stats_dict: Union[DictProxy, None] = None) -> None: + """ + Entry point for each ROS2 publishing multiprocessing worker. + """ + + try: + # Lazy import rclpy ONLY here + import rclpy + from rclpy.node import Node + + # Wrapper class that is also a ROS2 Node, so that we are in compliance with rclpy design. + class SingleDataPublisherROS2(Node): + def __init__(self, data: Data, topic_name: str, type: Union[str, None], hertz: float, stop_event, wait_for_sub: bool = False, num_workers: int = 1, + verbose: bool = True, stats_dict: Union[DictProxy, None] = None): + super().__init__(f"robotdataprocess_publisher_{topic_name.replace('/', '_')}") + self._pub = _SingleDataPublisher(ROSMsgLibType.RCLPY, self, data, topic_name, type, hertz, stop_event, wait_for_sub, num_workers, verbose, stats_dict) + + def is_finished(self) -> bool: + return self._pub._is_finished + + # Start ROS2 node + if not rclpy.ok(): + rclpy.init() + node = SingleDataPublisherROS2(data, topic_name, type, hertz, stop_event, wait_for_sub, num_workers, verbose, stats_dict) + while rclpy.ok(): + rclpy.spin_once(node, timeout_sec=2.0) + if node.is_finished(): + break + + except Exception: + print(f"Exception in publisher for topic '{topic_name}':") + print(traceback.format_exc()) + +@typechecked +def publish_data_ROS_multiprocess(data_list: List[Data], data_topics: List[str], data_msg_type: List[Union[str, None]], + data_hz: List[float], data_workers: List[int], libtype: ROSMsgLibType, shutdown_ros: bool, + verbose: bool = True, delay_seconds: float = 0.0, wait_for_sub: bool = False) -> None: + """ + Launches one publisher process per Data stream, either for ROS1 or ROS2. + + Args: + data_list: list of Data objects + data_topics: list of ROS topic names + data_msg_type: list of ROS message types for data that can be published as multiple types. + data_hz: The expected hertz of the data stream (used to set hertz speed of publishing process). + data_workers: Each topic will have one publisher process and X number of worker processes to pull data. + libtype: ROSMsgLibType indicating whether to use rospy (ROS1) or rclpy (ROS2). + shutdown_ros: Whether to shutdown ROS after publishing is complete. + verbose: Whether to print topic publishing status to the console. + delay_seconds: Delay in seconds before doing anything (for testing purposes). + wait_for_sub: Whether to wait for a subscriber before we start publishing (for testing purposes). + """ + + # Optional delay before starting (for testing purposes) + if delay_seconds > 0.0: + time.sleep(delay_seconds) + + # Ensure we have matching data and topics + assert len(data_list) == len(data_topics), "First four arguments need to have the same length!" + assert len(data_list) == len(data_msg_type), "First four arguments need to have the same length!" + assert len(data_list) == len(data_workers), "First four arguments need to have the same length!" + + # Create a shared dictionary across processes for statistics + manager = Manager() + stats_dict = manager.dict() # Shared across processes + + # Initialize stats_dict for each topic + for topic in data_topics: + stats_dict[topic] = {"last_update_time": 0, "progress": 0, "total": 0, "avg_hz": 0, + "inst_hz": 0, "skipped": 0, "log_queue": manager.list(), + 'local_buf_size': 0, 'worker_queue_size': 0} + + # Launch the appropriate publisher processes + processes: List[Process] = [] + topic_to_proc: dict = {} + stop_event = Event() + for data, topic, type, hertz, workers in zip(data_list, data_topics, data_msg_type, data_hz, data_workers): + if libtype == ROSMsgLibType.RCLPY: + pub_proc_func = _run_ROS2_publisher_process + elif libtype == ROSMsgLibType.ROSPY: + pub_proc_func = _run_ROS_publisher_process + + p = Process(target=pub_proc_func, args=(data, topic, type, hertz, stop_event, wait_for_sub, workers, verbose, stats_dict)) + p.start() + processes.append(p) + topic_to_proc[topic] = p + + # Helper to build the table with a Status column + def generate_table() -> Table: + table = Table(title="ROS Publisher Dashboard", show_header=True, header_style="bold magenta") + table.add_column("Status", justify="center", min_width=14) + table.add_column("Topic", style="cyan") + table.add_column("Progress", justify="right", min_width=13) + table.add_column("Avg Hz", justify="right") + table.add_column("Inst Hz", justify="right") + table.add_column("Skipped Msgs", justify="right") + table.add_column("Buffer Sizes", justify="right") + + for topic, s in stats_dict.items(): + proc = topic_to_proc[topic] + row_style = None + + if proc.is_alive(): + if time.monotonic() - float(s["last_update_time"]) < 0.3: + status = "[bold green]RUNNING[/bold green]" + else: + status = "[bold yellow]UNRESPONSIVE[/bold yellow]" + row_style = "grey50" + elif s['progress'] >= s['total'] and s['total'] > 0: + status = "[bold blue]FINISHED[/bold blue]" + else: + status = "[bold red]STOPPED[/bold red]" + + table.add_row( + status, + topic, + f"{s['progress']}/{s['total']}", + f"{s['avg_hz']:.1f}", + f"{s['inst_hz']:.1f}", + f"{s['skipped']}", + f"{s['local_buf_size']}/∞ | {s['worker_queue_size']}/{MSG_QUEUE_MAX_SIZE}", + style=row_style + ) + return table + + # Wait for all publishers to finish (and provide updates if requested) + try: + if verbose: + with Live(generate_table(), refresh_per_second=10, screen=False) as live: + while any(p.is_alive() for p in processes): + for topic in data_topics: + logs = stats_dict[topic]["log_queue"] + while len(logs) > 0: + msg = logs.pop(0) + live.console.print(f"[{topic}] {msg}", markup=False, style="") + live.update(generate_table()) + time.sleep(0.1) + live.update(generate_table()) + for p in processes: + p.join() + else: + for p in processes: + p.join() + except KeyboardInterrupt: + stop_event.set() + for p in processes: + p.join() + manager.shutdown() + + # Shutdown if requested + if libtype == ROSMsgLibType.RCLPY and shutdown_ros: + import rclpy + if rclpy.ok(): + rclpy.shutdown() + print("✓ ROS2 shutdown complete.") + elif libtype == ROSMsgLibType.ROSPY and shutdown_ros: + # ROS1 shutdown handled in each publisher node + pass + elif shutdown_ros: + raise NotImplementedError(f"Unsupported ROSMsgLibType {libtype} for shutdown_ros parameter!") \ No newline at end of file diff --git a/src/robotdataprocess/rosbag/__init__.py b/src/robotdataprocess/ros/__init__.py similarity index 100% rename from src/robotdataprocess/rosbag/__init__.py rename to src/robotdataprocess/ros/__init__.py diff --git a/src/robotdataprocess/rosbag/__pycache__/Ros2BagWrapper.cpython-310.pyc b/src/robotdataprocess/rosbag/__pycache__/Ros2BagWrapper.cpython-310.pyc deleted file mode 100644 index 62b2eb8..0000000 Binary files a/src/robotdataprocess/rosbag/__pycache__/Ros2BagWrapper.cpython-310.pyc and /dev/null differ diff --git a/src/robotdataprocess/rosbag/__pycache__/Ros2BagWrapper.cpython-312.pyc b/src/robotdataprocess/rosbag/__pycache__/Ros2BagWrapper.cpython-312.pyc deleted file mode 100644 index 4810378..0000000 Binary files a/src/robotdataprocess/rosbag/__pycache__/Ros2BagWrapper.cpython-312.pyc and /dev/null differ diff --git a/src/robotdataprocess/rosbag/__pycache__/Ros2BagWrapper.cpython-38.pyc b/src/robotdataprocess/rosbag/__pycache__/Ros2BagWrapper.cpython-38.pyc deleted file mode 100644 index 251eff7..0000000 Binary files a/src/robotdataprocess/rosbag/__pycache__/Ros2BagWrapper.cpython-38.pyc and /dev/null differ diff --git a/src/robotdataprocess/rosbag/__pycache__/__init__.cpython-310.pyc b/src/robotdataprocess/rosbag/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 4f3e912..0000000 Binary files a/src/robotdataprocess/rosbag/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/src/robotdataprocess/rosbag/__pycache__/__init__.cpython-312.pyc b/src/robotdataprocess/rosbag/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index dc6adce..0000000 Binary files a/src/robotdataprocess/rosbag/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/src/robotdataprocess/rosbag/__pycache__/__init__.cpython-38.pyc b/src/robotdataprocess/rosbag/__pycache__/__init__.cpython-38.pyc deleted file mode 100644 index f02d8e3..0000000 Binary files a/src/robotdataprocess/rosbag/__pycache__/__init__.cpython-38.pyc and /dev/null differ diff --git a/tests/__pycache__/test_CmdLineInterface.cpython-310.pyc b/tests/__pycache__/test_CmdLineInterface.cpython-310.pyc deleted file mode 100644 index c8f929c..0000000 Binary files a/tests/__pycache__/test_CmdLineInterface.cpython-310.pyc and /dev/null differ diff --git a/tests/__pycache__/test_CmdLineInterface.cpython-38.pyc b/tests/__pycache__/test_CmdLineInterface.cpython-38.pyc deleted file mode 100644 index 2f911a1..0000000 Binary files a/tests/__pycache__/test_CmdLineInterface.cpython-38.pyc and /dev/null differ diff --git a/tests/__pycache__/test_ImageData.cpython-310.pyc b/tests/__pycache__/test_ImageData.cpython-310.pyc deleted file mode 100644 index 48f3a23..0000000 Binary files a/tests/__pycache__/test_ImageData.cpython-310.pyc and /dev/null differ diff --git a/tests/__pycache__/test_ImageData.cpython-38.pyc b/tests/__pycache__/test_ImageData.cpython-38.pyc deleted file mode 100644 index 1f8055a..0000000 Binary files a/tests/__pycache__/test_ImageData.cpython-38.pyc and /dev/null differ diff --git a/tests/__pycache__/test_ImuData.cpython-310.pyc b/tests/__pycache__/test_ImuData.cpython-310.pyc deleted file mode 100644 index 9284a23..0000000 Binary files a/tests/__pycache__/test_ImuData.cpython-310.pyc and /dev/null differ diff --git a/tests/__pycache__/test_ImuData.cpython-38.pyc b/tests/__pycache__/test_ImuData.cpython-38.pyc deleted file mode 100644 index d9f01c9..0000000 Binary files a/tests/__pycache__/test_ImuData.cpython-38.pyc and /dev/null differ diff --git a/tests/__pycache__/test_OdometryData.cpython-310.pyc b/tests/__pycache__/test_OdometryData.cpython-310.pyc deleted file mode 100644 index 655e3a8..0000000 Binary files a/tests/__pycache__/test_OdometryData.cpython-310.pyc and /dev/null differ diff --git a/tests/__pycache__/test_OdometryData.cpython-38.pyc b/tests/__pycache__/test_OdometryData.cpython-38.pyc deleted file mode 100644 index 778d6a8..0000000 Binary files a/tests/__pycache__/test_OdometryData.cpython-38.pyc and /dev/null differ diff --git a/tests/__pycache__/test_PathData.cpython-310.pyc b/tests/__pycache__/test_PathData.cpython-310.pyc deleted file mode 100644 index 67295c1..0000000 Binary files a/tests/__pycache__/test_PathData.cpython-310.pyc and /dev/null differ diff --git a/tests/__pycache__/test_PathData.cpython-38.pyc b/tests/__pycache__/test_PathData.cpython-38.pyc deleted file mode 100644 index f57a18c..0000000 Binary files a/tests/__pycache__/test_PathData.cpython-38.pyc and /dev/null differ diff --git a/tests/__pycache__/test_utils.cpython-310.pyc b/tests/__pycache__/test_utils.cpython-310.pyc deleted file mode 100644 index 557af66..0000000 Binary files a/tests/__pycache__/test_utils.cpython-310.pyc and /dev/null differ diff --git a/tests/__pycache__/test_utils.cpython-38.pyc b/tests/__pycache__/test_utils.cpython-38.pyc deleted file mode 100644 index be85f65..0000000 Binary files a/tests/__pycache__/test_utils.cpython-38.pyc and /dev/null differ diff --git a/tests/files/test_ImageDataOnDisk/test_from_image_files/0.050000.png b/tests/files/test_ImageDataOnDisk/test_from_image_files/0.050000.png new file mode 100755 index 0000000..4661f7b Binary files /dev/null and b/tests/files/test_ImageDataOnDisk/test_from_image_files/0.050000.png differ diff --git a/tests/files/test_ImageDataOnDisk/test_from_image_files/141.750000.png b/tests/files/test_ImageDataOnDisk/test_from_image_files/141.750000.png new file mode 100755 index 0000000..53739f7 Binary files /dev/null and b/tests/files/test_ImageDataOnDisk/test_from_image_files/141.750000.png differ diff --git a/tests/files/test_ImageDataOnDisk/test_from_image_files/333.050000.png b/tests/files/test_ImageDataOnDisk/test_from_image_files/333.050000.png new file mode 100755 index 0000000..01500a6 Binary files /dev/null and b/tests/files/test_ImageDataOnDisk/test_from_image_files/333.050000.png differ diff --git a/tests/files/test_ImuData/test_from_txt_file/synthetic_imu_9axis.txt b/tests/files/test_ImuData/test_from_txt_file/synthetic_imu_9axis.txt new file mode 100755 index 0000000..1844204 --- /dev/null +++ b/tests/files/test_ImuData/test_from_txt_file/synthetic_imu_9axis.txt @@ -0,0 +1,101 @@ +331.370000 9.643094 -0.011990 -10.020356 -0.000121 0.003536 -0.012490 0.001867 -0.000792 0.928042 0.372469 +331.375000 0.790649 -0.004057 -9.900053 -0.000131 0.004022 -0.012510 0.001859 -0.000789 0.928031 0.372498 +331.380000 -8.061777 0.002769 -9.780083 -0.000131 0.004192 -0.012247 0.001849 -0.000786 0.928019 0.372527 +331.385000 -16.914201 0.008510 -9.660483 -0.000120 0.004048 -0.011699 0.001839 -0.000782 0.928008 0.372556 +331.390000 -25.766638 0.013236 -9.541236 -0.000100 0.003588 -0.010867 0.001830 -0.000779 0.927997 0.372583 +331.395000 -34.619102 0.017070 -9.422267 -0.000070 0.002813 -0.009751 0.001822 -0.000776 0.927987 0.372608 +331.400000 -43.471606 0.020183 -9.303448 -0.000033 0.001814 -0.008445 0.001815 -0.000773 0.927978 0.372631 +331.405000 -34.775094 0.010147 -9.412920 -0.000001 0.000956 -0.007323 0.001811 -0.000771 0.927970 0.372650 +331.410000 -26.078621 0.000991 -9.522123 0.000020 0.000329 -0.006479 0.001809 -0.000770 0.927963 0.372667 +331.415000 -17.382176 -0.007457 -9.631188 0.000032 -0.000065 -0.005912 0.001808 -0.000770 0.927957 0.372682 +331.420000 -8.685750 -0.015317 -9.740208 0.000035 -0.000227 -0.005624 0.001808 -0.000770 0.927951 0.372696 +331.425000 0.010669 -0.022663 -9.849233 0.000028 -0.000157 -0.005613 0.001809 -0.000770 0.927946 0.372709 +331.430000 8.707092 -0.029520 -9.958275 0.000011 0.000144 -0.005880 0.001809 -0.000770 0.927941 0.372722 +331.435000 17.403530 -0.035865 -10.067305 -0.000015 0.000678 -0.006425 0.001809 -0.000770 0.927935 0.372736 +331.440000 26.099995 -0.041626 -10.176252 -0.000050 0.001443 -0.007248 0.001807 -0.000769 0.927929 0.372751 +331.445000 34.796502 -0.046684 -10.285007 -0.000095 0.002441 -0.008348 0.001804 -0.000768 0.927923 0.372768 +331.450000 43.493062 -0.050869 -10.393420 -0.000146 0.003580 -0.009635 0.001798 -0.000766 0.927915 0.372787 +331.455000 34.814016 -0.039643 -10.260977 -0.000188 0.004496 -0.010743 0.001790 -0.000763 0.927906 0.372809 +331.460000 26.135011 -0.029106 -10.128725 -0.000218 0.005099 -0.011581 0.001779 -0.000759 0.927896 0.372834 +331.465000 17.456029 -0.019428 -9.996810 -0.000235 0.005389 -0.012149 0.001767 -0.000755 0.927885 0.372861 +331.470000 8.777054 -0.010729 -9.865325 -0.000239 0.005366 -0.012446 0.001754 -0.000750 0.927874 0.372889 +331.475000 0.098070 -0.003085 -9.734308 -0.000230 0.005029 -0.012473 0.001742 -0.000746 0.927862 0.372918 +331.480000 -8.580938 0.003478 -9.603742 -0.000209 0.004379 -0.012230 0.001730 -0.000742 0.927851 0.372947 +331.485000 -17.259985 0.008977 -9.473556 -0.000175 0.003416 -0.011717 0.001719 -0.000738 0.927839 0.372976 +331.490000 -25.939083 0.013480 -9.343626 -0.000129 0.002140 -0.010934 0.001711 -0.000735 0.927828 0.373003 +331.495000 -34.618244 0.017099 -9.213771 -0.000070 0.000551 -0.009880 0.001706 -0.000733 0.927818 0.373028 +331.500000 -43.297481 0.019994 -9.083758 -0.000002 -0.001246 -0.008642 0.001705 -0.000733 0.927809 0.373051 +331.505000 -34.970942 0.011318 -9.216151 0.000056 -0.002823 -0.007566 0.001708 -0.000734 0.927801 0.373071 +331.510000 -26.644478 0.003503 -9.348438 0.000100 -0.004075 -0.006737 0.001715 -0.000737 0.927794 0.373089 +331.515000 -18.318077 -0.003609 -9.480846 0.000130 -0.005002 -0.006155 0.001724 -0.000740 0.927788 0.373104 +331.520000 -9.991725 -0.010132 -9.613546 0.000146 -0.005604 -0.005821 0.001736 -0.000744 0.927782 0.373118 +331.525000 -1.665408 -0.016141 -9.746658 0.000149 -0.005880 -0.005734 0.001749 -0.000749 0.927776 0.373132 +331.530000 6.660887 -0.021669 -9.880246 0.000138 -0.005832 -0.005895 0.001763 -0.000754 0.927771 0.373145 +331.535000 14.987178 -0.026706 -10.014319 0.000112 -0.005458 -0.006303 0.001776 -0.000760 0.927765 0.373159 +331.540000 23.313480 -0.031204 -10.148835 0.000073 -0.004759 -0.006958 0.001789 -0.000764 0.927760 0.373174 +331.545000 31.639811 -0.035072 -10.283695 0.000020 -0.003735 -0.007861 0.001800 -0.000769 0.927753 0.373190 +331.550000 39.966189 -0.038178 -10.418746 -0.000043 -0.002468 -0.008935 0.001809 -0.000772 0.927746 0.373208 +331.555000 33.477485 -0.030884 -10.319172 -0.000098 -0.001293 -0.009878 0.001815 -0.000774 0.927737 0.373229 +331.560000 26.988843 -0.024040 -10.219279 -0.000140 -0.000292 -0.010615 0.001818 -0.000776 0.927728 0.373251 +331.565000 20.500252 -0.017760 -10.119200 -0.000171 0.000534 -0.011144 0.001818 -0.000776 0.927718 0.373276 +331.570000 14.011703 -0.012130 -10.019044 -0.000188 0.001186 -0.011466 0.001817 -0.000776 0.927708 0.373302 +331.575000 7.523185 -0.007210 -9.918899 -0.000194 0.001664 -0.011581 0.001814 -0.000775 0.927697 0.373329 +331.580000 1.034688 -0.003033 -9.818828 -0.000187 0.001968 -0.011490 0.001810 -0.000774 0.927686 0.373355 +331.585000 -5.453796 0.000394 -9.718873 -0.000167 0.002097 -0.011191 0.001805 -0.000773 0.927676 0.373382 +331.590000 -11.942275 0.003092 -9.619053 -0.000136 0.002052 -0.010685 0.001800 -0.000771 0.927665 0.373408 +331.595000 -18.430760 0.005109 -9.519365 -0.000091 0.001833 -0.009973 0.001795 -0.000770 0.927655 0.373433 +331.600000 -24.919257 0.006520 -9.419783 -0.000039 0.001476 -0.009109 0.001791 -0.000768 0.927646 0.373456 +331.605000 -20.418448 0.002764 -9.490349 0.000008 0.001130 -0.008323 0.001788 -0.000767 0.927637 0.373477 +331.610000 -15.917654 -0.000522 -9.560821 0.000043 0.000833 -0.007668 0.001785 -0.000766 0.927630 0.373496 +331.615000 -11.416873 -0.003398 -9.631224 0.000069 0.000583 -0.007147 0.001783 -0.000765 0.927622 0.373514 +331.620000 -6.916104 -0.005912 -9.701582 0.000084 0.000382 -0.006758 0.001782 -0.000764 0.927616 0.373531 +331.625000 -2.415343 -0.008100 -9.771911 0.000088 0.000229 -0.006502 0.001781 -0.000763 0.927609 0.373546 +331.630000 2.085410 -0.009987 -9.842224 0.000082 0.000124 -0.006378 0.001781 -0.000763 0.927603 0.373561 +331.635000 6.586158 -0.011585 -9.912531 0.000066 0.000067 -0.006387 0.001780 -0.000763 0.927597 0.373576 +331.640000 11.086903 -0.012895 -9.982835 0.000040 0.000059 -0.006529 0.001780 -0.000762 0.927591 0.373591 +331.645000 15.587648 -0.013905 -10.053137 0.000002 0.000098 -0.006804 0.001780 -0.000762 0.927585 0.373606 +331.650000 20.088395 -0.014592 -10.123432 -0.000042 0.000171 -0.007175 0.001780 -0.000762 0.927579 0.373622 +331.655000 16.458902 -0.012094 -10.065444 -0.000082 0.000222 -0.007494 0.001780 -0.000762 0.927572 0.373639 +331.660000 12.829410 -0.009834 -10.007459 -0.000113 0.000236 -0.007725 0.001779 -0.000762 0.927565 0.373656 +331.665000 9.199919 -0.007834 -9.949481 -0.000137 0.000212 -0.007868 0.001778 -0.000762 0.927558 0.373674 +331.670000 5.570427 -0.006109 -9.891513 -0.000153 0.000152 -0.007923 0.001778 -0.000762 0.927551 0.373692 +331.675000 1.940931 -0.004668 -9.833553 -0.000161 0.000055 -0.007890 0.001777 -0.000762 0.927543 0.373711 +331.680000 -1.688569 -0.003515 -9.775598 -0.000162 -0.000080 -0.007769 0.001777 -0.000762 0.927536 0.373729 +331.685000 -5.318077 -0.002644 -9.717641 -0.000154 -0.000251 -0.007559 0.001777 -0.000763 0.927529 0.373747 +331.690000 -8.947592 -0.002047 -9.659673 -0.000139 -0.000459 -0.007262 0.001778 -0.000763 0.927522 0.373764 +331.695000 -12.577118 -0.001706 -9.601684 -0.000115 -0.000704 -0.006876 0.001778 -0.000764 0.927515 0.373781 +331.700000 -16.206654 -0.001599 -9.543658 -0.000086 -0.000979 -0.006430 0.001780 -0.000765 0.927508 0.373797 +331.705000 -13.190945 -0.003287 -9.576564 -0.000058 -0.001250 -0.006033 0.001782 -0.000766 0.927502 0.373812 +331.710000 -10.175249 -0.004762 -9.609486 -0.000033 -0.001510 -0.005713 0.001785 -0.000767 0.927497 0.373826 +331.715000 -7.159566 -0.006045 -9.642437 -0.000011 -0.001758 -0.005469 0.001789 -0.000769 0.927491 0.373839 +331.720000 -4.143896 -0.007151 -9.675427 0.000007 -0.001996 -0.005302 0.001793 -0.000770 0.927486 0.373852 +331.725000 -1.128238 -0.008093 -9.708470 0.000023 -0.002222 -0.005212 0.001797 -0.000772 0.927481 0.373864 +331.730000 1.887409 -0.008876 -9.741575 0.000035 -0.002438 -0.005198 0.001802 -0.000774 0.927476 0.373876 +331.735000 4.903045 -0.009504 -9.774752 0.000043 -0.002642 -0.005261 0.001808 -0.000776 0.927472 0.373888 +331.740000 7.918669 -0.009972 -9.808011 0.000049 -0.002835 -0.005401 0.001814 -0.000779 0.927467 0.373901 +331.745000 10.934284 -0.010274 -9.841360 0.000051 -0.003017 -0.005617 0.001821 -0.000781 0.927462 0.373913 +331.750000 13.949888 -0.010399 -9.874807 0.000051 -0.003163 -0.005887 0.001828 -0.000784 0.927456 0.373926 +331.755000 11.106001 -0.009092 -9.881722 0.000050 -0.003175 -0.006116 0.001835 -0.000787 0.927451 0.373940 +331.760000 8.262113 -0.007942 -9.888548 0.000049 -0.003029 -0.006282 0.001843 -0.000790 0.927445 0.373954 +331.765000 5.418232 -0.006963 -9.895279 0.000047 -0.002724 -0.006385 0.001850 -0.000792 0.927439 0.373969 +331.770000 2.574366 -0.006160 -9.901921 0.000045 -0.002260 -0.006424 0.001856 -0.000795 0.927433 0.373983 +331.775000 -0.269477 -0.005539 -9.908485 0.000043 -0.001638 -0.006399 0.001862 -0.000797 0.927427 0.373998 +331.780000 -3.113290 -0.005100 -9.914995 0.000041 -0.000857 -0.006312 0.001865 -0.000798 0.927421 0.374013 +331.785000 -5.957064 -0.004841 -9.921482 0.000038 0.000083 -0.006160 0.001867 -0.000799 0.927415 0.374028 +331.790000 -8.800791 -0.004754 -9.927985 0.000035 0.001181 -0.005946 0.001867 -0.000799 0.927410 0.374042 +331.795000 -11.644464 -0.004830 -9.934555 0.000032 0.002438 -0.005667 0.001865 -0.000797 0.927404 0.374056 +331.800000 -14.488075 -0.005054 -9.941250 0.000029 0.003776 -0.005349 0.001859 -0.000795 0.927399 0.374069 +331.805000 -11.027280 -0.006925 -9.914763 0.000025 0.004886 -0.005084 0.001850 -0.000791 0.927394 0.374081 +331.810000 -7.566431 -0.008600 -9.888188 0.000020 0.005689 -0.004896 0.001839 -0.000787 0.927389 0.374093 +331.815000 -4.105543 -0.010096 -9.861460 0.000015 0.006187 -0.004785 0.001826 -0.000781 0.927384 0.374104 +331.820000 -0.644633 -0.011421 -9.834537 0.000010 0.006378 -0.004751 0.001812 -0.000775 0.927380 0.374116 +331.825000 2.816285 -0.012581 -9.807397 0.000004 0.006263 -0.004794 0.001797 -0.000769 0.927376 0.374127 +331.830000 6.277195 -0.013575 -9.780040 -0.000003 0.005842 -0.004914 0.001782 -0.000764 0.927371 0.374138 +331.835000 9.738084 -0.014397 -9.752487 -0.000010 0.005114 -0.005111 0.001769 -0.000758 0.927367 0.374149 +331.840000 13.198936 -0.015035 -9.724779 -0.000018 0.004081 -0.005384 0.001757 -0.000753 0.927362 0.374161 +331.845000 16.659736 -0.015474 -9.696981 -0.000026 0.002742 -0.005734 0.001747 -0.000750 0.927357 0.374173 +331.850000 20.120470 -0.015690 -9.669176 -0.000034 0.001188 -0.006133 0.001741 -0.000747 0.927351 0.374187 +331.855000 15.347024 -0.013409 -9.687215 -0.000042 -0.000216 -0.006465 0.001738 -0.000746 0.927346 0.374201 +331.860000 10.573511 -0.011403 -9.705384 -0.000050 -0.001377 -0.006700 0.001739 -0.000746 0.927340 0.374216 +331.865000 5.799940 -0.009699 -9.723577 -0.000057 -0.002296 -0.006841 0.001742 -0.000748 0.927333 0.374232 +331.870000 1.026325 -0.008313 -9.741709 -0.000064 -0.002972 -0.006885 0.001747 -0.000750 0.927327 0.374247 \ No newline at end of file diff --git a/tests/files/test_LiDARData/test_from_npy_files/0.100000.npy b/tests/files/test_LiDARData/test_from_npy_files/0.100000.npy new file mode 100755 index 0000000..bf04cd4 Binary files /dev/null and b/tests/files/test_LiDARData/test_from_npy_files/0.100000.npy differ diff --git a/tests/files/test_LiDARData/test_from_npy_files/0.600000.npy b/tests/files/test_LiDARData/test_from_npy_files/0.600000.npy new file mode 100755 index 0000000..bf04cd4 Binary files /dev/null and b/tests/files/test_LiDARData/test_from_npy_files/0.600000.npy differ diff --git a/tests/files/test_LiDARData/test_from_npy_files/1.100000.npy b/tests/files/test_LiDARData/test_from_npy_files/1.100000.npy new file mode 100755 index 0000000..bf04cd4 Binary files /dev/null and b/tests/files/test_LiDARData/test_from_npy_files/1.100000.npy differ diff --git a/tests/files/test_OdometryData/test_from_txt_file_AND_get_ros_msg_AND_from_ros2_bag/odom_openvins.txt b/tests/files/test_OdometryData/test_from_txt_file_AND_get_ros_msg_AND_from_ros2_bag/odom_openvins.txt new file mode 100644 index 0000000..a9ab9ec --- /dev/null +++ b/tests/files/test_OdometryData/test_from_txt_file_AND_get_ros_msg_AND_from_ros2_bag/odom_openvins.txt @@ -0,0 +1,4 @@ +# timestamp(s) q p v bg ba cam_imu_dt num_cam cam0_k cam0_d cam0_rot cam0_trans ... imu_model dw da tg wtoI atoI etc +1403715278.86375 0.807497 -0.008140 0.589812 0.002098 0.010333 0.001012 0.067899 0.096015 0.041991 0.370462 0.001098 0.016673 0.077103 -0.028092 -0.000786 0.012117 0.0016021 2 458.694354 457.251920 367.211936 248.414992 -0.283686 0.072937 0.000104 0.000328 -0.007688 0.010796 0.701806 0.712244 0.063095 -0.020743 -0.007551 457.555003 456.189760 380.023012 255.211928 -0.283581 0.074565 0.000234 -0.000175 -0.002579 0.015013 0.702415 0.711605 -0.042659 -0.020457 -0.009289 0 1.00000000 0.00000000 0.00000000 1.00000000 0.00000000 1.00000000 1.00000000 0.00000000 0.00000000 1.00000000 0.00000000 1.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 1.00000000 0.00000000 0.00000000 0.00000000 1.00000000 +1403715278.91222 0.809683 -0.007289 0.586820 0.001250 0.047673 0.008781 0.078044 0.147596 0.044660 0.352876 -0.000963 0.022577 0.077152 -0.019884 -0.000632 0.008755 0.0000808 2 458.825081 457.490316 367.751691 248.107344 -0.279185 0.066723 0.001332 -0.001803 -0.008539 0.011244 0.701502 0.712527 0.063026 -0.021364 -0.006420 457.917337 456.110594 379.236106 255.503715 -0.282909 0.067575 0.001190 -0.001329 -0.002728 0.014343 0.702393 0.711640 -0.043404 -0.020891 -0.008765 0 1.00000000 0.00000000 0.00000000 1.00000000 0.00000000 1.00000000 1.00000000 0.00000000 0.00000000 1.00000000 0.00000000 1.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 1.00000000 0.00000000 0.00000000 0.00000000 1.00000000 +1403715278.96200 0.810142 -0.007347 0.586184 0.001775 0.053992 0.011239 0.099576 0.142725 0.050730 0.311138 -0.000753 0.025858 0.076775 -0.028162 -0.000472 0.011265 -0.0001463 2 458.919497 457.401261 367.728234 248.058404 -0.279443 0.067074 0.000865 -0.001890 -0.008977 0.011167 0.701614 0.712413 0.061646 -0.020473 -0.001371 457.862967 456.128439 379.220482 255.458118 -0.281457 0.066705 0.000639 -0.001646 -0.002850 0.013949 0.702475 0.711566 -0.045878 -0.019852 -0.002700 0 1.00000000 0.00000000 0.00000000 1.00000000 0.00000000 1.00000000 1.00000000 0.00000000 0.00000000 1.00000000 0.00000000 1.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 1.00000000 0.00000000 0.00000000 0.00000000 1.00000000 \ No newline at end of file diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImageData/0.050000.png b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImageData/0.050000.png new file mode 100755 index 0000000..21e0fc5 Binary files /dev/null and b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImageData/0.050000.png differ diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImageData/0.100000.png b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImageData/0.100000.png new file mode 100755 index 0000000..d792356 Binary files /dev/null and b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImageData/0.100000.png differ diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImageData/0.150000.png b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImageData/0.150000.png new file mode 100755 index 0000000..41c51a0 Binary files /dev/null and b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImageData/0.150000.png differ diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImageData/0.200000.png b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImageData/0.200000.png new file mode 100755 index 0000000..2a255fb Binary files /dev/null and b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImageData/0.200000.png differ diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImageData/1.000000.png b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImageData/1.000000.png new file mode 100755 index 0000000..5e582a5 Binary files /dev/null and b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImageData/1.000000.png differ diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImuData/imu.txt b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImuData/imu.txt new file mode 100644 index 0000000..42d8362 --- /dev/null +++ b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/ImuData/imu.txt @@ -0,0 +1,107 @@ +104.112000 40.709373 -0.010616 -9.709079 0.000035 -0.003331 -0.004083 0.000131 0.001952 0.045553 -0.998960 +104.114000 37.115732 -0.010991 -9.724263 0.000043 -0.003711 -0.004118 0.000132 0.001956 0.045557 -0.998960 +104.116000 33.522082 -0.011423 -9.739424 0.000050 -0.004040 -0.004150 0.000132 0.001959 0.045561 -0.998960 +104.118000 29.928426 -0.011911 -9.754552 0.000056 -0.004320 -0.004180 0.000132 0.001963 0.045565 -0.998959 +104.120000 26.334764 -0.012458 -9.769636 0.000060 -0.004550 -0.004209 0.000132 0.001968 0.045569 -0.998959 +104.122000 22.741098 -0.013064 -9.784669 0.000064 -0.004729 -0.004235 0.000132 0.001972 0.045573 -0.998959 +104.124000 19.147427 -0.013730 -9.799643 0.000066 -0.004859 -0.004259 0.000132 0.001977 0.045578 -0.998959 +104.126000 15.553754 -0.014455 -9.814553 0.000066 -0.004939 -0.004281 0.000132 0.001982 0.045582 -0.998959 +104.128000 11.960079 -0.015242 -9.829395 0.000066 -0.004969 -0.004301 0.000132 0.001987 0.045586 -0.998958 +104.130000 8.366403 -0.016089 -9.844167 0.000064 -0.004949 -0.004319 0.000133 0.001992 0.045590 -0.998958 +104.132000 4.772728 -0.016998 -9.858867 0.000062 -0.004880 -0.004335 0.000133 0.001997 0.045595 -0.998958 +104.134000 1.179053 -0.017969 -9.873495 0.000058 -0.004760 -0.004348 0.000133 0.002001 0.045599 -0.998958 +104.136000 -2.414619 -0.019003 -9.888054 0.000052 -0.004590 -0.004360 0.000133 0.002006 0.045603 -0.998958 +104.138000 -6.008289 -0.020099 -9.902547 0.000046 -0.004371 -0.004370 0.000133 0.002011 0.045608 -0.998957 +104.140000 -9.601954 -0.021258 -9.916978 0.000038 -0.004102 -0.004377 0.000133 0.002015 0.045612 -0.998957 +104.142000 -13.195614 -0.022479 -9.931354 0.000030 -0.003782 -0.004383 0.000134 0.002019 0.045616 -0.998957 +104.144000 -16.789268 -0.023764 -9.945681 0.000019 -0.003413 -0.004386 0.000134 0.002023 0.045621 -0.998957 +104.146000 -20.382915 -0.025111 -9.959968 0.000008 -0.002994 -0.004387 0.000134 0.002026 0.045625 -0.998957 +104.148000 -23.976554 -0.026522 -9.974227 -0.000004 -0.002525 -0.004386 0.000134 0.002029 0.045630 -0.998956 +104.150000 -27.570183 -0.027995 -9.988469 -0.000018 -0.002019 -0.004384 0.000134 0.002032 0.045634 -0.998956 +104.152000 -25.262928 -0.028231 -9.979430 -0.000030 -0.001528 -0.004385 0.000134 0.002034 0.045638 -0.998956 +104.154000 -22.955663 -0.028428 -9.970432 -0.000042 -0.001065 -0.004388 0.000134 0.002036 0.045643 -0.998956 +104.156000 -20.648389 -0.028583 -9.961467 -0.000052 -0.000629 -0.004395 0.000134 0.002037 0.045647 -0.998956 +104.158000 -18.341106 -0.028698 -9.952528 -0.000062 -0.000221 -0.004405 0.000134 0.002037 0.045652 -0.998955 +104.160000 -16.033815 -0.028773 -9.943608 -0.000070 0.000159 -0.004418 0.000134 0.002037 0.045656 -0.998955 +104.162000 -13.726517 -0.028807 -9.934701 -0.000077 0.000511 -0.004434 0.000134 0.002037 0.045660 -0.998955 +104.164000 -11.419211 -0.028801 -9.925800 -0.000083 0.000836 -0.004454 0.000135 0.002037 0.045665 -0.998955 +104.166000 -9.111899 -0.028754 -9.916900 -0.000089 0.001132 -0.004477 0.000135 0.002036 0.045669 -0.998955 +104.168000 -6.804581 -0.028666 -9.907996 -0.000093 0.001401 -0.004503 0.000135 0.002035 0.045674 -0.998954 +104.170000 -4.497258 -0.028537 -9.899085 -0.000095 0.001642 -0.004532 0.000135 0.002033 0.045678 -0.998954 +104.172000 -2.189930 -0.028366 -9.890162 -0.000097 0.001856 -0.004565 0.000135 0.002032 0.045683 -0.998954 +104.174000 0.117402 -0.028154 -9.881223 -0.000098 0.002041 -0.004600 0.000135 0.002030 0.045687 -0.998954 +104.176000 2.424737 -0.027899 -9.872267 -0.000098 0.002199 -0.004639 0.000135 0.002028 0.045692 -0.998954 +104.178000 4.732076 -0.027602 -9.863290 -0.000096 0.002329 -0.004681 0.000135 0.002026 0.045697 -0.998953 +104.180000 7.039417 -0.027261 -9.854291 -0.000094 0.002431 -0.004727 0.000135 0.002023 0.045701 -0.998953 +104.182000 9.346760 -0.026876 -9.845268 -0.000090 0.002506 -0.004775 0.000135 0.002021 0.045706 -0.998953 +104.184000 11.654104 -0.026446 -9.836222 -0.000085 0.002552 -0.004827 0.000135 0.002018 0.045711 -0.998953 +104.186000 13.961450 -0.025971 -9.827151 -0.000080 0.002571 -0.004882 0.000134 0.002016 0.045716 -0.998952 +104.188000 16.268795 -0.025450 -9.818055 -0.000073 0.002562 -0.004941 0.000134 0.002013 0.045720 -0.998952 +104.190000 18.576140 -0.024881 -9.808937 -0.000065 0.002526 -0.005002 0.000134 0.002011 0.045725 -0.998952 +104.192000 20.883485 -0.024265 -9.799796 -0.000056 0.002461 -0.005067 0.000134 0.002008 0.045730 -0.998952 +104.194000 23.190828 -0.023600 -9.790635 -0.000046 0.002369 -0.005135 0.000134 0.002006 0.045735 -0.998952 +104.196000 25.498169 -0.022884 -9.781456 -0.000035 0.002249 -0.005206 0.000134 0.002003 0.045741 -0.998951 +104.198000 27.805508 -0.022118 -9.772262 -0.000022 0.002101 -0.005280 0.000134 0.002001 0.045746 -0.998951 +104.200000 30.112843 -0.021299 -9.763056 -0.000009 0.001931 -0.005356 0.000134 0.001999 0.045751 -0.998951 +104.202000 27.248126 -0.020894 -9.760480 0.000003 0.001760 -0.005428 0.000134 0.001997 0.045756 -0.998951 +104.204000 24.383405 -0.020546 -9.757934 0.000015 0.001593 -0.005493 0.000134 0.001995 0.045762 -0.998950 +104.206000 21.518681 -0.020258 -9.755416 0.000025 0.001431 -0.005552 0.000134 0.001994 0.045767 -0.998950 +104.208000 18.653954 -0.020032 -9.752922 0.000035 0.001273 -0.005605 0.000134 0.001992 0.045773 -0.998950 +104.210000 15.789223 -0.019867 -9.750450 0.000044 0.001119 -0.005652 0.000134 0.001991 0.045778 -0.998950 +104.212000 12.924490 -0.019766 -9.747996 0.000051 0.000970 -0.005692 0.000133 0.001990 0.045784 -0.998949 +104.214000 10.059754 -0.019728 -9.745558 0.000058 0.000825 -0.005727 0.000133 0.001989 0.045790 -0.998949 +104.216000 7.195015 -0.019756 -9.743133 0.000065 0.000685 -0.005755 0.000133 0.001988 0.045795 -0.998949 +104.218000 4.330273 -0.019849 -9.740719 0.000070 0.000549 -0.005777 0.000133 0.001987 0.045801 -0.998949 +104.220000 1.465528 -0.020008 -9.738313 0.000074 0.000418 -0.005793 0.000133 0.001987 0.045807 -0.998948 +104.222000 -1.399219 -0.020233 -9.735913 0.000077 0.000291 -0.005803 0.000133 0.001986 0.045813 -0.998948 +104.224000 -4.263968 -0.020525 -9.733517 0.000080 0.000169 -0.005807 0.000133 0.001986 0.045819 -0.998948 +104.226000 -7.128720 -0.020883 -9.731122 0.000081 0.000051 -0.005805 0.000133 0.001986 0.045824 -0.998948 +104.228000 -9.993474 -0.021308 -9.728727 0.000082 -0.000063 -0.005796 0.000133 0.001986 0.045830 -0.998947 +104.230000 -12.858231 -0.021799 -9.726330 0.000081 -0.000172 -0.005781 0.000132 0.001986 0.045836 -0.998947 +104.232000 -15.722989 -0.022357 -9.723928 0.000080 -0.000277 -0.005760 0.000132 0.001986 0.045842 -0.998947 +104.234000 -18.587750 -0.022979 -9.721521 0.000078 -0.000378 -0.005733 0.000132 0.001987 0.045847 -0.998946 +104.236000 -21.452512 -0.023667 -9.719106 0.000075 -0.000474 -0.005700 0.000132 0.001987 0.045853 -0.998946 +104.238000 -24.317276 -0.024418 -9.716682 0.000071 -0.000565 -0.005661 0.000132 0.001987 0.045859 -0.998946 +104.240000 -27.182042 -0.025233 -9.714247 0.000066 -0.000652 -0.005615 0.000132 0.001988 0.045865 -0.998946 +104.242000 -30.046810 -0.026109 -9.711801 0.000060 -0.000735 -0.005564 0.000132 0.001989 0.045870 -0.998945 +104.244000 -32.911580 -0.027047 -9.709342 0.000053 -0.000814 -0.005506 0.000132 0.001989 0.045876 -0.998945 +104.246000 -35.776350 -0.028044 -9.706869 0.000046 -0.000887 -0.005442 0.000132 0.001990 0.045881 -0.998945 +104.248000 -38.641123 -0.029099 -9.704381 0.000037 -0.000957 -0.005372 0.000132 0.001991 0.045887 -0.998945 +104.250000 -41.505896 -0.030210 -9.701877 0.000028 -0.001020 -0.005298 0.000132 0.001992 0.045892 -0.998944 +104.252000 -38.139374 -0.030867 -9.716172 0.000019 -0.001070 -0.005229 0.000132 0.001993 0.045897 -0.998944 +104.254000 -34.772854 -0.031448 -9.730476 0.000010 -0.001104 -0.005168 0.000132 0.001994 0.045903 -0.998944 +104.256000 -31.406334 -0.031954 -9.744794 0.000002 -0.001123 -0.005114 0.000132 0.001995 0.045908 -0.998944 +104.258000 -28.039814 -0.032388 -9.759124 -0.000006 -0.001127 -0.005068 0.000132 0.001996 0.045913 -0.998943 +104.260000 -24.673295 -0.032750 -9.773470 -0.000013 -0.001115 -0.005029 0.000132 0.001997 0.045918 -0.998943 +104.262000 -21.306775 -0.033042 -9.787832 -0.000020 -0.001088 -0.004998 0.000132 0.001999 0.045923 -0.998943 +104.264000 -17.940255 -0.033266 -9.802209 -0.000026 -0.001045 -0.004975 0.000132 0.002000 0.045928 -0.998943 +104.266000 -14.573734 -0.033421 -9.816603 -0.000032 -0.000987 -0.004959 0.000132 0.002001 0.045933 -0.998943 +104.268000 -11.207212 -0.033508 -9.831012 -0.000038 -0.000914 -0.004950 0.000132 0.002002 0.045938 -0.998942 +104.270000 -7.840689 -0.033529 -9.845435 -0.000043 -0.000825 -0.004949 0.000132 0.002003 0.045943 -0.998942 +104.272000 -4.474164 -0.033483 -9.859871 -0.000048 -0.000721 -0.004956 0.000133 0.002003 0.045948 -0.998942 +104.274000 -1.107637 -0.033370 -9.874319 -0.000052 -0.000601 -0.004970 0.000133 0.002004 0.045953 -0.998942 +104.276000 2.258893 -0.033190 -9.888776 -0.000056 -0.000466 -0.004991 0.000133 0.002005 0.045958 -0.998941 +104.278000 5.625425 -0.032944 -9.903239 -0.000060 -0.000316 -0.005021 0.000133 0.002005 0.045963 -0.998941 +104.280000 8.991960 -0.032629 -9.917706 -0.000063 -0.000151 -0.005057 0.000133 0.002006 0.045968 -0.998941 +104.282000 12.358498 -0.032245 -9.932173 -0.000065 0.000030 -0.005101 0.000133 0.002006 0.045973 -0.998941 +104.284000 15.725040 -0.031792 -9.946637 -0.000068 0.000227 -0.005153 0.000133 0.002006 0.045978 -0.998940 +104.286000 19.091586 -0.031269 -9.961092 -0.000069 0.000439 -0.005212 0.000133 0.002005 0.045983 -0.998940 +104.288000 22.458135 -0.030673 -9.975535 -0.000071 0.000666 -0.005279 0.000133 0.002005 0.045988 -0.998940 +104.290000 25.824690 -0.030004 -9.989961 -0.000072 0.000908 -0.005353 0.000133 0.002004 0.045993 -0.998940 +104.292000 29.191249 -0.029259 -10.004363 -0.000072 0.001166 -0.005435 0.000133 0.002003 0.045999 -0.998939 +104.294000 32.557814 -0.028437 -10.018736 -0.000072 0.001440 -0.005524 0.000133 0.002002 0.046004 -0.998939 +104.296000 35.924384 -0.027535 -10.033074 -0.000072 0.001728 -0.005621 0.000133 0.002001 0.046010 -0.998939 +104.298000 39.290960 -0.026551 -10.047370 -0.000071 0.002032 -0.005726 0.000133 0.001999 0.046015 -0.998939 +104.300000 42.657542 -0.025482 -10.061617 -0.000070 0.002345 -0.005836 0.000133 0.001997 0.046021 -0.998938 +104.302000 40.168611 -0.025482 -10.044776 -0.000069 0.002640 -0.005942 0.000133 0.001995 0.046027 -0.998938 +104.304000 37.679686 -0.025531 -10.027937 -0.000068 0.002910 -0.006043 0.000133 0.001992 0.046033 -0.998938 +104.306000 35.190766 -0.025633 -10.011105 -0.000067 0.003155 -0.006138 0.000133 0.001989 0.046039 -0.998938 +104.308000 32.701851 -0.025788 -9.994286 -0.000066 0.003375 -0.006228 0.000133 0.001986 0.046045 -0.998937 +104.310000 30.212940 -0.026000 -9.977485 -0.000065 0.003571 -0.006312 0.000133 0.001983 0.046051 -0.998937 +104.312000 27.724032 -0.026268 -9.960707 -0.000064 0.003742 -0.006391 0.000133 0.001979 0.046057 -0.998937 +104.314000 25.235128 -0.026596 -9.943955 -0.000063 0.003888 -0.006465 0.000132 0.001975 0.046064 -0.998937 +104.316000 22.746227 -0.026983 -9.927235 -0.000062 0.004009 -0.006533 0.000132 0.001971 0.046070 -0.998936 +104.318000 20.257327 -0.027433 -9.910548 -0.000061 0.004105 -0.006595 0.000132 0.001967 0.046077 -0.998936 +104.320000 17.768429 -0.027945 -9.893897 -0.000060 0.004177 -0.006652 0.000132 0.001963 0.046083 -0.998936 +104.322000 15.279533 -0.028521 -9.877286 -0.000059 0.004224 -0.006703 0.000132 0.001959 0.046090 -0.998935 +104.324000 12.790637 -0.029162 -9.860714 -0.000058 0.004246 -0.006749 0.000132 0.001955 0.046097 -0.998935 \ No newline at end of file diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.100000.npy b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.100000.npy new file mode 100755 index 0000000..5924472 Binary files /dev/null and b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.100000.npy differ diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.200000.npy b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.200000.npy new file mode 100755 index 0000000..5924472 Binary files /dev/null and b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.200000.npy differ diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.300000.npy b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.300000.npy new file mode 100755 index 0000000..aef1f35 Binary files /dev/null and b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.300000.npy differ diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.400000.npy b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.400000.npy new file mode 100755 index 0000000..612db8c Binary files /dev/null and b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.400000.npy differ diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.500000.npy b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.500000.npy new file mode 100755 index 0000000..9b90761 Binary files /dev/null and b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.500000.npy differ diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.600000.npy b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.600000.npy new file mode 100755 index 0000000..864aa88 Binary files /dev/null and b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.600000.npy differ diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.700000.npy b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.700000.npy new file mode 100755 index 0000000..b8ace30 Binary files /dev/null and b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.700000.npy differ diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.800000.npy b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.800000.npy new file mode 100755 index 0000000..b88d6b9 Binary files /dev/null and b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.800000.npy differ diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.900000.npy b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.900000.npy new file mode 100755 index 0000000..c964bf8 Binary files /dev/null and b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/0.900000.npy differ diff --git a/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/1.000000.npy b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/1.000000.npy new file mode 100755 index 0000000..46854d1 Binary files /dev/null and b/tests/files/test_RosPublisher/test__run_ROS2_publisher_process/LiDARData/1.000000.npy differ diff --git a/tests/test_CmdLineInterface.py b/tests/test_CmdLineInterface.py index bfe97d9..c2f5ea9 100644 --- a/tests/test_CmdLineInterface.py +++ b/tests/test_CmdLineInterface.py @@ -3,13 +3,14 @@ import pandas as pd from pathlib import Path from robotdataprocess import CmdLineInterface -from robotdataprocess.rosbag.Ros2BagWrapper import Ros2BagWrapper +from robotdataprocess.ros.Ros2BagWrapper import Ros2BagWrapper from rosbags.rosbag1 import Reader as Reader1 from rosbags.rosbag2 import Reader as Reader2 from rosbags.typesys import Stores from test_utils import safe_urlretrieve import unittest +@unittest.skipIf(os.getenv("SKIP_PURE_PYTHON_TESTS") == "True", "Skipping pure python tests") class TestCmdLineInterface(unittest.TestCase): """ Test the functionality of the command line interface. @@ -31,7 +32,7 @@ def setUp(self): if not os.path.isfile(path_hercules_bag_db3): safe_urlretrieve("https://www.dropbox.com/scl/fi/0ydrblh1uai1lhbrrk6c6/hercules_test_bag_pruned_3_FINAL.db3?rlkey=n27tgr0vuxcyrsyafavlh0aw9&st=i5qixbjo&dl=1", path_hercules_bag_db3) if not os.path.isfile(path_hercules_bag_yaml): - safe_urlretrieve("https://www.dropbox.com/scl/fi/vsi1tpihpar87459upyiw/metadata.yaml?rlkey=ozi4h0i4wp0kr7ckvaybz70uq&st=vxs10bqt&dl=1", path_hercules_bag_yaml) + safe_urlretrieve("https://www.dropbox.com/scl/fi/2iu1djmhedy1j4qci53a4/metadata.yaml?rlkey=x0kb00pruubxtbaojht4ui5yl&st=9b41wq07&dl=1", path_hercules_bag_yaml) @staticmethod def count_msgs_in_ros2_bag(bag_path: Path) -> dict: diff --git a/tests/test_ImageData.py b/tests/test_ImageDataInMemory.py similarity index 84% rename from tests/test_ImageData.py rename to tests/test_ImageDataInMemory.py index 6b494a6..64bc3c9 100644 --- a/tests/test_ImageData.py +++ b/tests/test_ImageDataInMemory.py @@ -4,13 +4,14 @@ import numpy as np import os from pathlib import Path -from robotdataprocess.data_types.ImageData import ImageData -from robotdataprocess.rosbag.Ros2BagWrapper import Ros2BagWrapper +from robotdataprocess.data_types.ImageData.ImageDataInMemory import ImageDataInMemory +from robotdataprocess.ros.Ros2BagWrapper import Ros2BagWrapper import shutil from test_utils import safe_urlretrieve import unittest -class TestImageData(unittest.TestCase): +@unittest.skipIf(os.getenv("SKIP_PURE_PYTHON_TESTS") == "True", "Skipping pure python tests") +class TestImageDataInMemory(unittest.TestCase): def setUp(self): """ @@ -31,12 +32,12 @@ def setUp(self): if not os.path.isfile(path_hercules_bag_db3): safe_urlretrieve("https://www.dropbox.com/scl/fi/0ydrblh1uai1lhbrrk6c6/hercules_test_bag_pruned_3_FINAL.db3?rlkey=n27tgr0vuxcyrsyafavlh0aw9&st=i5qixbjo&dl=1", path_hercules_bag_db3) if not os.path.isfile(path_hercules_bag_yaml): - safe_urlretrieve("https://www.dropbox.com/scl/fi/vsi1tpihpar87459upyiw/metadata.yaml?rlkey=ozi4h0i4wp0kr7ckvaybz70uq&st=vxs10bqt&dl=1", path_hercules_bag_yaml) + safe_urlretrieve("https://www.dropbox.com/scl/fi/2iu1djmhedy1j4qci53a4/metadata.yaml?rlkey=x0kb00pruubxtbaojht4ui5yl&st=9b41wq07&dl=1", path_hercules_bag_yaml) def test_from_ros_str(self): """ Make sure that an exception is thrown with a non-valid ROS encoding str""" with np.testing.assert_raises(NotImplementedError): - ImageData.ImageEncoding.from_ros_str("fake_name") + ImageDataInMemory.ImageEncoding.from_ros_str("fake_name") def test_from_npy(self): """ @@ -46,10 +47,10 @@ def test_from_npy(self): # Convert the data into .npy (byproduct of loading ImageData from rosbag) save_folder = Path(Path('.'), 'tests', 'test_outputs', 'test_from_npy').absolute() - rosData = ImageData.from_ros2_bag(self.path_hercules_bag, '/hercules_node/Husky2/front_center_Scene/image', save_folder) + rosData = ImageDataInMemory.from_ros2_bag(self.path_hercules_bag, '/hercules_node/Husky2/front_center_Scene/image', save_folder) # Load the .npy file - npyData = ImageData.from_npy(save_folder) + npyData = ImageDataInMemory.from_npy(save_folder) # Make sure the two classes are equivalent np.testing.assert_equal(rosData.frame_id, npyData.frame_id) @@ -70,7 +71,7 @@ def test_from_npy_files(self): # === Test with 32FC1 Images === # Load the npy files files_folder = Path(Path('.'), 'tests', 'files', 'test_ImageData', 'test_from_npy_files', '32fc1').absolute() - image_data = ImageData.from_npy_files(files_folder, 'Husky1/front_center_DepthPlanar') + image_data = ImageDataInMemory.from_npy_files(files_folder, 'Husky1/front_center_DepthPlanar') # Make sure it matches what is loaded directly using NumPy np.testing.assert_array_equal(image_data.images[2], np.load(files_folder / '0.150000.npy', 'r')) @@ -81,7 +82,7 @@ def test_from_npy_files(self): np.testing.assert_equal(image_data.frame_id, 'Husky1/front_center_DepthPlanar') np.testing.assert_equal(image_data.height, 480) np.testing.assert_equal(image_data.width, 752) - np.testing.assert_equal(image_data.encoding, ImageData.ImageEncoding._32FC1) + np.testing.assert_equal(image_data.encoding, ImageDataInMemory.ImageEncoding._32FC1) def test_from_image_files(self): """ @@ -92,7 +93,7 @@ def test_from_image_files(self): # === Test with RGB8 Images === # Load the image data with the class and save to a ROS2 bag files_folder = Path(Path('.'), 'tests', 'test_outputs', 'test_from_image_files', 'rgb').absolute() - image_data = ImageData.from_image_files(files_folder, 'Husky1/front_center_Scene') + image_data = ImageDataInMemory.from_image_files(files_folder, 'Husky1/front_center_Scene') bag_path = Path(Path('.'), 'tests', 'test_bags', 'test_from_image_files', 'rgb_data_bag').absolute() if os.path.isdir(bag_path): os.remove(bag_path / 'rgb_data_bag.db3') @@ -102,14 +103,14 @@ def test_from_image_files(self): # Load that data directly from the rosbag npy_folder = Path(Path('.'), 'tests', 'test_outputs', 'test_from_image_files', 'rgb', 'npy').absolute() - image_data_after = ImageData.from_ros2_bag(bag_path, '/cam0', npy_folder) + image_data_after = ImageDataInMemory.from_ros2_bag(bag_path, '/cam0', npy_folder) # Make sure that the data is what we expect np.testing.assert_array_almost_equal(np.arange(0.05, 10.05, 0.05), image_data_after.timestamps.astype(np.float128), 14) np.testing.assert_equal('Husky1/front_center_Scene', image_data_after.frame_id) np.testing.assert_equal(480, image_data_after.height) np.testing.assert_equal(752, image_data_after.width) - np.testing.assert_equal(ImageData.ImageEncoding.RGB8, image_data_after.encoding) + np.testing.assert_equal(ImageDataInMemory.ImageEncoding.RGB8, image_data_after.encoding) # Manually load a couple images to compare image data def manual_cv2_load(path, cvt=True): @@ -129,7 +130,7 @@ def manual_cv2_load(path, cvt=True): # === Test with Mono8 Images === path = Path('.')/'tests'/'files'/'test_ImageData'/'test_from_image_files'/'mono8' - image_data = ImageData.from_image_files(path.absolute(), 'callie') + image_data = ImageDataInMemory.from_image_files(path.absolute(), 'callie') images = np.zeros((1, 652, 1196), dtype=np.uint8) images[0] = manual_cv2_load(path / "0.0.png", False) @@ -139,13 +140,13 @@ def manual_cv2_load(path, cvt=True): np.testing.assert_equal('callie', image_data.frame_id) np.testing.assert_equal(652, image_data.height) np.testing.assert_equal(1196, image_data.width) - np.testing.assert_equal(ImageData.ImageEncoding.Mono8, image_data.encoding) + np.testing.assert_equal(ImageDataInMemory.ImageEncoding.Mono8, image_data.encoding) # === Unsupported Formats === # Test that it properly detects an unsupported format path = Path('.')/'tests'/'files'/'test_ImageData'/'test_from_image_files'/'rgba' with np.testing.assert_raises(NotImplementedError): - _ = ImageData.from_image_files(path.absolute(), 'N/A') + _ = ImageDataInMemory.from_image_files(path.absolute(), 'N/A') def test_to_npy(self): """ Make sure the data isn't changed after saving to an .npy file. """ @@ -153,7 +154,7 @@ def test_to_npy(self): # === Test with RGB8 Images === # Load the images files_folder = Path(Path('.'), 'tests', 'files', 'test_ImageData', 'test_to_npy', 'images').absolute() - image_data = ImageData.from_image_files(files_folder, 'Husky1/front_center_Scene') + image_data = ImageDataInMemory.from_image_files(files_folder, 'Husky1/front_center_Scene') # Save it to an .npy file save_path = Path(Path('.'), 'tests', 'temporary_files', 'test_ImageData', 'test_to_npy', 'npy').absolute() @@ -161,7 +162,7 @@ def test_to_npy(self): image_data.to_npy(save_path) # Load it back from the .npy file - npy_data = ImageData.from_npy(save_path) + npy_data = ImageDataInMemory.from_npy(save_path) # Ensure the data hasn't changed np.testing.assert_array_almost_equal(image_data.images, npy_data.images, 16) @@ -174,13 +175,13 @@ def test_to_npy(self): # === Test with 32FC1 Images === # Load the npy files files_folder = Path(Path('.'), 'tests', 'files', 'test_ImageData', 'test_from_npy_files', '32fc1').absolute() - image_data = ImageData.from_npy_files(files_folder, 'Husky1/front_center_DepthPlanar') + image_data = ImageDataInMemory.from_npy_files(files_folder, 'Husky1/front_center_DepthPlanar') # Save to .npy file and reload save_path = Path(Path('.'), 'tests', 'temporary_files', 'test_ImageData', 'test_to_npy', 'npy_depth').absolute() save_path.mkdir(parents=True, exist_ok=True) image_data.to_npy(save_path) - npy_data = ImageData.from_npy(save_path) + npy_data = ImageDataInMemory.from_npy(save_path) # Ensure the data hasn't changed np.testing.assert_array_almost_equal(image_data.images, npy_data.images, 16) @@ -195,7 +196,7 @@ def test_crop_data(self): # Load the images files_folder = Path(Path('.'), 'tests', 'files', 'test_ImageData', 'test_crop_data', 'images').absolute() - image_data = ImageData.from_image_files(files_folder, 'camera') + image_data = ImageDataInMemory.from_image_files(files_folder, 'camera') # Ensure cropping works correctly image_data_cropped = deepcopy(image_data) @@ -214,7 +215,7 @@ def test_get_ros_msg(self): # Load the images files_folder = Path(Path('.'), 'tests', 'files', 'test_ImageData', 'test_get_ros_msg', 'images').absolute() - image_data = ImageData.from_npy_files(files_folder, 'cam0') + image_data = ImageDataInMemory.from_npy_files(files_folder, 'cam0') # Write the image data to a ROS message bag_path = Path(Path('.'), 'tests', 'temporary_files', 'test_ImageData', 'test_get_ros_msg', 'depth.bag').absolute() @@ -228,7 +229,7 @@ def test_get_ros_msg(self): None) # Load that data back from the rosbag - image_data_after = ImageData.from_ros2_bag(bag_path, '/cam0/depth', bag_path.parent / 'npy') + image_data_after = ImageDataInMemory.from_ros2_bag(bag_path, '/cam0/depth', bag_path.parent / 'npy') # Ensure that the data is the same np.testing.assert_array_equal(image_data.images, image_data_after.images) diff --git a/tests/test_ImageDataOnDisk.py b/tests/test_ImageDataOnDisk.py new file mode 100644 index 0000000..c7d5a52 --- /dev/null +++ b/tests/test_ImageDataOnDisk.py @@ -0,0 +1,27 @@ +import numpy as np +import os +from pathlib import Path +from robotdataprocess import ImageDataInMemory, ImageDataOnDisk +import unittest + +@unittest.skipIf(os.getenv("SKIP_PURE_PYTHON_TESTS") == "True", "Skipping pure python tests") +class TestImageDataOnDisk(unittest.TestCase): + + def test_from_image_files(self): + """ Assert the functionality matches that of ImageDataInMemory """ + + # Load the image data using both classes + folder_path = Path(Path('.'), 'tests', 'files', 'test_ImageDataOnDisk', 'test_from_image_files').absolute() + image_data_mem = ImageDataOnDisk.from_image_files(folder_path, 'optical') + image_data_disk = ImageDataInMemory.from_image_files(folder_path, 'optical') + + # Assert that their data matches + np.testing.assert_equal(image_data_mem.frame_id, image_data_disk.frame_id) + np.testing.assert_array_equal(image_data_mem.timestamps, image_data_disk.timestamps) + np.testing.assert_equal(image_data_mem.height, image_data_disk.height) + np.testing.assert_equal(image_data_mem.width, image_data_disk.width) + np.testing.assert_equal(image_data_mem.encoding, image_data_disk.encoding) + np.testing.assert_array_equal(image_data_mem.images, image_data_disk.images) + +if __name__ == "__main__": + unittest.main() \ No newline at end of file diff --git a/tests/test_ImuData.py b/tests/test_ImuData.py index bbc078d..2177106 100644 --- a/tests/test_ImuData.py +++ b/tests/test_ImuData.py @@ -5,9 +5,10 @@ from pathlib import Path from robotdataprocess import CoordinateFrame from robotdataprocess.data_types.ImuData import ImuData -from robotdataprocess.rosbag.Ros2BagWrapper import Ros2BagWrapper +from robotdataprocess.ros.Ros2BagWrapper import Ros2BagWrapper import unittest +@unittest.skipIf(os.getenv("SKIP_PURE_PYTHON_TESTS") == "True", "Skipping pure python tests") class TestImuData(unittest.TestCase): # TODO: Write test cases for: @@ -35,12 +36,21 @@ def test_from_txt_file(self): ros_data = ImuData.from_ros2_bag(bag_path, '/imu', '/Husky1/base_link') # Make sure this data matches what we expect - np.testing.assert_equal(float(ros_data.timestamps[85]), 29.800000) + np.testing.assert_equal(float(ros_data.timestamps[85]), 29.8) np.testing.assert_array_equal(ros_data.lin_acc[85].astype(np.float128), [-4.311637, 0.022841, -9.319456]) np.testing.assert_array_equal(ros_data.ang_vel[85].astype(np.float128), [0.001708, -0.065869, -0.002687]) np.testing.assert_array_equal(ros_data.orientations[85].astype(np.float128), [0, 0, 0, 1]) np.testing.assert_equal(ros_data.frame_id, '/Husky1/base_link') + # ======== Additionaly test when Orientation data is provided + file_path = Path(Path('.'), 'tests', 'files', 'test_ImuData', 'test_from_txt_file', 'synthetic_imu_9axis.txt').absolute() + imu_data = ImuData.from_txt_file(file_path, '/Husky2/robot', CoordinateFrame.NED, nine_axis=True) + np.testing.assert_equal(float(imu_data.timestamps[84]), 331.79) + np.testing.assert_array_equal(imu_data.lin_acc[84].astype(np.float128), [-8.800791, -0.004754, -9.927985]) + np.testing.assert_array_equal(imu_data.ang_vel[84].astype(np.float128), [ 0.000035, 0.001181, -0.005946]) + np.testing.assert_array_equal(imu_data.orientations[84].astype(np.float128), [0.001867, -0.000799, 0.927410, 0.374042]) + np.testing.assert_equal(imu_data.frame_id, '/Husky2/robot') + def test_crop_data(self): """ Make sure data is successfully cropped. """ @@ -54,7 +64,7 @@ def test_crop_data(self): np.testing.assert_array_equal(imu_data_cropped.timestamps, imu_data.timestamps[13:75]) np.testing.assert_array_equal(imu_data_cropped.lin_acc, imu_data.lin_acc[13:75]) np.testing.assert_array_equal(imu_data_cropped.ang_vel, imu_data.ang_vel[13:75]) - np.testing.assert_array_equal(imu_data_cropped.orientations, imu_data.orientations[13:75]) + np.testing.assert_equal(imu_data_cropped.orientations, None) # def test_to_FLU_frame(self): diff --git a/tests/test_LiDARData.py b/tests/test_LiDARData.py new file mode 100644 index 0000000..3368d31 --- /dev/null +++ b/tests/test_LiDARData.py @@ -0,0 +1,152 @@ +from decimal import Decimal +import numpy as np +import os +from pathlib import Path +from robotdataprocess import CoordinateFrame, LiDARData, ROSMsgLibType +import struct +import unittest + +@unittest.skipIf(os.getenv("SKIP_PURE_PYTHON_TESTS") == "True", "Skipping pure python tests") +class TestLiDARData(unittest.TestCase): + + def test_from_npy_files(self): + """ Ensure we load a point cloud from a folder with npy files correctly. """ + + folder_path = Path(Path('.'), 'tests', 'files', 'test_LiDARData', 'test_from_npy_files').absolute() + print(folder_path) + lidar_data = LiDARData.from_npy_files(folder_path, "robot", CoordinateFrame.NED) + + # Check that the values are what we expect + np.testing.assert_array_equal(lidar_data.timestamps.astype(float), [0.1, 0.6, 1.1]) + np.testing.assert_equal(lidar_data.frame, CoordinateFrame.NED) + np.testing.assert_equal(lidar_data.frame_id, "robot") + np.testing.assert_array_equal(lidar_data.get_point_cloud_at_index(0)[0][3], [np.nan, np.nan, np.nan]) + np.testing.assert_array_equal(lidar_data.get_point_cloud_at_index(0)[0][35], [26.67838478088379, 0.3280501961708069, -9.796014785766602]) + np.testing.assert_array_equal(lidar_data.get_point_cloud_at_index(2)[0][-1], [5.671111583709717, 9.91832280305971e-7, 2.6444828510284424]) + np.testing.assert_equal(len(lidar_data.point_clouds), 3) + + # def test_to_FLU_frame(self): + # """ Ensure our transformations are correct """ + + # point_cloud_simple = [[ 0, 1, 2], [-1, -3, 4]] + # lidar_data = LiDARData("robot", [0], point_cloud_simple, None, CoordinateFrame.NED) + # lidar_data.to_FLU_frame() + + # # Assert that the transformation completed successfully + # expected_pc = np.array([[[ 0, -1, -2], [-1, 3, -4]]]) + # np.testing.assert_array_equal(lidar_data.point_clouds, expected_pc) + + # # Make sure it doesn't change when we call it again + # lidar_data.to_FLU_frame() + # expected_pc = np.array([[[ 0, -1, -2], [-1, 3, -4]]]) + # np.testing.assert_array_equal(lidar_data.point_clouds, expected_pc) + + def test_visualize(self): + """ Just ensure that this code doesn't crash. """ + + folder_path = Path(Path('.'), 'tests', 'files', 'test_LiDARData', 'test_from_npy_files').absolute() + print(folder_path) + lidar_data = LiDARData.from_npy_files(folder_path, "robot", CoordinateFrame.NED) + lidar_data.visualize(testing=True) + + def test_calculate_point_channels(self): + """ Ensure the scan line (channels) are calculated correctly for each point""" + point_cloud_simple = [np.array([[ 0, 1, 2], + [-1, -3, 4], + [ 4, 1, 2], + [ 4, 10, 2], + [ 4, 10, 1], + [ 4, 10, 0]]), + np.array([[ 4, 10, -2], + [ 4, 10, -1], + [ 0, 1, -2], + [10, 10, -1], + [10, 10, -3], + [np.nan, np.nan, np.nan]])] + lidar_data = LiDARData("robot", [0, 1], point_cloud_simple, None, CoordinateFrame.FLU) + lidar_data.calculate_point_channels(41, -20, 20) + + np.testing.assert_array_equal(lidar_data.channels, [[40, 40, 40, 31, 25, 20], + [ 9, 15, 0, 16, 8, -1]]) + + # NOTE: Only testing ROSBAGS right now + def test_get_ros_msg_type(self): + """ Ensure we get the correct ROS message type. """ + + folder_path = Path(Path('.'), 'tests', 'files', 'test_LiDARData', 'test_from_npy_files').absolute() + print(folder_path) + lidar_data = LiDARData.from_npy_files(folder_path, "robot", CoordinateFrame.NED) + ros_msg_type = lidar_data.get_ros_msg_type(ROSMsgLibType.ROSBAGS) + self.assertEqual(ros_msg_type, 'sensor_msgs/msg/PointCloud2') + + def test_get_ros_msg(self): + """ Ensure we can create a ROS PointCloud2 message correctly. """ + + folder_path = Path(Path('.'), 'tests', 'files', 'test_LiDARData', 'test_from_npy_files').absolute() + print(folder_path) + lidar_data = LiDARData.from_npy_files(folder_path, "robot", CoordinateFrame.NED) + lidar_data.calculate_point_channels(51, -25, 25) + + # Get a ROS message for the first point cloud (index 0) + ros_msg = lidar_data.get_ros_msg(ROSMsgLibType.ROSBAGS, 0) + + # Validate header + self.assertEqual(ros_msg.header.frame_id, "robot") + self.assertEqual(ros_msg.header.stamp.sec, 0) + self.assertEqual(ros_msg.header.stamp.nanosec, 100000000) # 0.1 seconds = 100000000 nanoseconds + + # Validate point cloud structure + self.assertEqual(ros_msg.height, 1) + self.assertEqual(ros_msg.width, lidar_data.point_clouds[0].shape[0]) # Number of points + self.assertEqual(ros_msg.is_bigendian, False) + self.assertEqual(ros_msg.is_dense, False) + self.assertEqual(ros_msg.point_step, 24) + self.assertEqual(ros_msg.row_step, 24 * lidar_data.point_clouds[0].shape[0]) + + # Validate fields (x, y, z) + self.assertEqual(len(ros_msg.fields), 6) + self.assertEqual(ros_msg.fields[0].name, 'x') + self.assertEqual(ros_msg.fields[0].offset, 0) + self.assertEqual(ros_msg.fields[0].datatype, 7) # FLOAT32 + self.assertEqual(ros_msg.fields[0].count, 1) + self.assertEqual(ros_msg.fields[1].name, 'y') + self.assertEqual(ros_msg.fields[1].offset, 4) + self.assertEqual(ros_msg.fields[2].name, 'z') + self.assertEqual(ros_msg.fields[2].offset, 8) + self.assertEqual(ros_msg.fields[3].name, 'ring') + self.assertEqual(ros_msg.fields[3].offset, 12) + self.assertEqual(ros_msg.fields[4].name, 'time') + self.assertEqual(ros_msg.fields[4].offset, 16) + self.assertEqual(ros_msg.fields[5].name, 'intensity') + self.assertEqual(ros_msg.fields[5].offset, 20) + + # Validate data array length + expected_data_length = lidar_data.point_clouds[0].shape[0] * 24 # num_points * point_step + self.assertEqual(len(ros_msg.data), expected_data_length) + + # Validate actual point cloud values by decoding the binary data + binary_data = bytes(ros_msg.data) + num_points = ros_msg.width + unpacked_points = [] + for i in range(num_points): + offset = i * 24 # 12 bytes per point (3 floats × 4 bytes) + x, y, z, r, t, i = struct.unpack(' 0) + + # Make sure data is correct for each message we recieved + data_ts_floats = data.timestamps.astype(float) + for j in range(len(node.received)): + stamp = node.received[j]['stamp'] + matches = np.where(np.isclose(data_ts_floats, stamp, atol=1e-6))[0] + if matches.size > 0: + # Ensure that data matches the recieved msg dictionary + matched_index = int(matches[0]) + assert_data_dict_equal(data, matched_index, node.received[j]) + else: + self.fail("Recieved ROS Message has a timestamp that doesn't match any of the sent messages!") + + finally: + # ---------- Guaranteed cleanup ---------- + node.destroy_node() + rclpy.shutdown() + + + def test__run_ROS2_publisher_process(self): + """ Test that we can publish to ROS2 without losing data.""" + + # ================== Test ImageData ================== + # Create an ImageDataInMemory object + file_path = Path(Path('.'), 'tests', 'files', 'test_RosPublisher', + 'test__run_ROS2_publisher_process', 'ImageData').absolute() + image_data = ImageDataInMemory.from_image_files(file_path, '/cam0') + + # Lazily import ROS2 libraries + Image = ModuleImporter.get_module_attribute('sensor_msgs.msg', 'Image') + + # Write msg_to_dict function + CvBridge = ModuleImporter.get_module_attribute('cv_bridge', 'CvBridge') + bridge = CvBridge() + def msg_to_dict_fn(msg: Image) -> dict: + image: NDArray = bridge.imgmsg_to_cv2(msg, desired_encoding='passthrough') + return { + "image": image, + "height": msg.height, + "width": msg.width, + "encoding": msg.encoding, + "frame_id": msg.header.frame_id, + "stamp": msg.header.stamp.sec + msg.header.stamp.nanosec * 1e-9 + } + + # Write assert_data_dict_equal function + def assert_data_dict_equal(data_sent: ImageData, matched_index: int, msg_recieved: dict): + np.testing.assert_array_equal(data_sent.images[matched_index], msg_recieved["image"]) + np.testing.assert_equal(data_sent.height, msg_recieved["height"]) + np.testing.assert_equal(data_sent.width, msg_recieved["width"]) + np.testing.assert_equal(data_sent.encoding, ImageData.ImageEncoding.from_ros_str(msg_recieved["encoding"])) + np.testing.assert_equal(data_sent.frame_id, msg_recieved["frame_id"]) + np.testing.assert_almost_equal(float(data_sent.timestamps[matched_index]), msg_recieved["stamp"]) + + # # Test that we can send data over ROS2 and get it back successfully + p = Process(target=self.util_ROS2_test, args=(image_data, Image, + '/cam0/image_raw', msg_to_dict_fn, assert_data_dict_equal)) + p.start() + p.join() + self.assertEqual(p.exitcode, 0, f"Child process failed with exit code {p.exitcode}") + + # ================== Test LiDARData ================== + # Create a LiDARData object + file_path = Path(Path('.'), 'tests', 'files', 'test_RosPublisher', + 'test__run_ROS2_publisher_process', 'LiDARData').absolute() + lidar_data = LiDARData.from_npy_files(file_path, "lidar_link", CoordinateFrame.NED) + lidar_data.calculate_point_channels(32, -25, 25) + + # Lazily import ROS2 libraries + PointCloud2 = ModuleImporter.get_module_attribute('sensor_msgs.msg', 'PointCloud2') + + # Write msg_to_dict function + CvBridge = ModuleImporter.get_module_attribute('cv_bridge', 'CvBridge') + bridge = CvBridge() + def msg_to_dict_fn(msg: PointCloud2) -> dict: + + # Map ROS datatype to struct format + datatype_to_struct = { + 7: 'f', # FLOAT32 + 4: 'I', # UINT32 + 2: 'B', # UINT8 + } + + # Build struct format for all fields in the message + struct_endian = '>' if msg.is_bigendian else '<' + struct_fmt = struct_endian + for f in msg.fields: + if f.datatype not in datatype_to_struct: + raise NotImplementedError(f"Datatype {f.datatype} for field '{f.name}' not supported") + struct_fmt += datatype_to_struct[f.datatype] + + # Extract all points + points = [] + for i in range(0, len(msg.data), msg.point_step): + values = struct.unpack(struct_fmt, msg.data[i:i+msg.point_step]) + points.append(values) + points_np = np.array(points, dtype=np.float32) + + # Create a dict mapping field names -> column indices + field_indices = {f.name: idx for idx, f in enumerate(msg.fields)} + + # Now we can index by name + xyz = points_np[:, [field_indices['x'], field_indices['y'], field_indices['z']]] + ring = points_np[:, field_indices['ring']].astype(np.int32) + time = points_np[:, field_indices['time']] + intensity = points_np[:, field_indices['intensity']] + + # Return important data in a dictionary + return { + "point_clouds": xyz, + "channels": ring, + "time": time, + "intensity": intensity, + "frame_id": msg.header.frame_id, + "height": msg.height, + "width": msg.width, + "stamp": msg.header.stamp.sec + msg.header.stamp.nanosec * 1e-9 + } + + # Write assert_data_dict_equal function + def assert_data_dict_equal(data_sent: LiDARData, matched_index: int, msg_recieved: dict): + exp_num_points = data_sent.point_clouds[matched_index].shape[0] + np.testing.assert_array_equal(data_sent.get_point_cloud_at_index(matched_index)[0], msg_recieved["point_clouds"]) + np.testing.assert_array_equal(data_sent.get_point_cloud_at_index(matched_index)[1], msg_recieved["channels"]) + np.testing.assert_array_equal(np.zeros(exp_num_points), msg_recieved["time"]) + np.testing.assert_array_equal(255 * np.ones(exp_num_points), msg_recieved["intensity"]) + np.testing.assert_equal(data_sent.frame_id, msg_recieved["frame_id"]) + np.testing.assert_equal(1, msg_recieved["height"]) + np.testing.assert_equal(exp_num_points, msg_recieved["width"]) + np.testing.assert_almost_equal(float(data_sent.timestamps[matched_index]), msg_recieved["stamp"]) + + # Test that we can send data over ROS2 and get it back successfully + p = Process(target=self.util_ROS2_test, args=(lidar_data, PointCloud2, '/points_raw', msg_to_dict_fn, assert_data_dict_equal, True)) + p.start() + p.join() + self.assertEqual(p.exitcode, 0, f"Child process failed with exit code {p.exitcode}") + + # ================== Test ImuData ================== + # Create a ImuData object + file_path = Path(Path('.'), 'tests', 'files', os.path.basename(__file__)[:-3], + inspect.currentframe().f_code.co_name, 'ImuData', 'imu.txt').absolute() + imu_data = ImuData.from_txt_file(file_path, "base_link", CoordinateFrame.NED, True) + + # Write msg_to_dict_fn + Imu = ModuleImporter.get_module_attribute('sensor_msgs.msg', 'Imu') + def msg_to_dict_fn(msg: Imu) -> dict: + return { + "linear_acceleration": np.array([msg.linear_acceleration.x, + msg.linear_acceleration.y, + msg.linear_acceleration.z]), + "linear_acceleration_covariance": msg.linear_acceleration_covariance, + "angular_velocity": np.array([msg.angular_velocity.x, + msg.angular_velocity.y, + msg.angular_velocity.z]), + "angular_velocity_covariance": msg.angular_velocity_covariance, + "orientation": np.array([msg.orientation.x, + msg.orientation.y, + msg.orientation.z, + msg.orientation.w]), + "orientation_covariance": msg.orientation_covariance, + "frame_id": msg.header.frame_id, + "stamp": msg.header.stamp.sec + msg.header.stamp.nanosec * 1e-9 + } + + # Write assert_data_dict_equal function + def assert_data_dict_equal(data_sent: ImuData, matched_index: int, msg_recieved: dict): + np.testing.assert_array_equal( data_sent.lin_acc[matched_index].astype(float), msg_recieved["linear_acceleration"]) + np.testing.assert_array_equal( np.zeros(9), msg_recieved["linear_acceleration_covariance"]) + np.testing.assert_array_equal( data_sent.ang_vel[matched_index].astype(float), msg_recieved["angular_velocity"]) + np.testing.assert_array_equal( np.zeros(9), msg_recieved["angular_velocity_covariance"]) + np.testing.assert_array_equal( data_sent.orientations[matched_index].astype(float), msg_recieved["orientation"]) + np.testing.assert_array_equal( np.zeros(9), msg_recieved["orientation_covariance"]) + np.testing.assert_equal( data_sent.frame_id, msg_recieved["frame_id"]) + np.testing.assert_almost_equal(float(data_sent.timestamps[matched_index]), msg_recieved["stamp"]) + + # Test that we can send data over ROS2 and get it back successfully + p = Process(target=self.util_ROS2_test, args=(imu_data, Imu, '/imu0', msg_to_dict_fn, assert_data_dict_equal, False)) + p.start() + p.join() + self.assertEqual(p.exitcode, 0, f"Child process failed with exit code {p.exitcode}") + +if __name__ == "__main__": + unittest.main() \ No newline at end of file diff --git a/visualize_lidar_npy.py b/visualize_lidar_npy.py new file mode 100644 index 0000000..c168413 --- /dev/null +++ b/visualize_lidar_npy.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python3 +""" +Visualize a LiDAR point cloud from a .npy file. + +Usage: + python visualize_lidar_npy.py + +Example: + python visualize_lidar_npy.py /path/to/lidar/1234567890.123.npy +""" + +import sys +import numpy as np +import matplotlib.pyplot as plt +from pathlib import Path + +def visualize_point_cloud(npy_path: str): + """ + Visualize a point cloud from a .npy file. + + Args: + npy_path: Path to the .npy file containing the point cloud + """ + + # Load the point cloud + points = np.load(npy_path) + + print(f"Loaded point cloud from: {npy_path}") + print(f"Shape: {points.shape}") + print(f"Number of points: {points.shape[0]}") + print(f"Data range:") + print(f" X: [{points[:, 0].min():.3f}, {points[:, 0].max():.3f}]") + print(f" Y: [{points[:, 1].min():.3f}, {points[:, 1].max():.3f}]") + print(f" Z: [{points[:, 2].min():.3f}, {points[:, 2].max():.3f}]") + + # Extract timestamp from filename + filename = Path(npy_path).stem + + # Create figure with multiple views + fig = plt.figure(figsize=(15, 10)) + fig.suptitle(f'Point Cloud Visualization - Timestamp: {filename}', fontsize=16) + + # 3D view + ax1 = fig.add_subplot(2, 2, 1, projection='3d') + scatter = ax1.scatter(points[:, 0], points[:, 1], points[:, 2], + c=points[:, 2], cmap='viridis', s=1, alpha=0.6) + ax1.set_xlabel('X (m)') + ax1.set_ylabel('Y (m)') + ax1.set_zlabel('Z (m)') + ax1.set_title('3D View (colored by Z height)') + plt.colorbar(scatter, ax=ax1, label='Z (m)', shrink=0.5) + + # Top-down view (X-Y plane) + ax2 = fig.add_subplot(2, 2, 2) + scatter2 = ax2.scatter(points[:, 0], points[:, 1], + c=points[:, 2], cmap='viridis', s=1, alpha=0.6) + ax2.set_xlabel('X (m)') + ax2.set_ylabel('Y (m)') + ax2.set_title('Top-Down View (X-Y, colored by Z)') + ax2.axis('equal') + plt.colorbar(scatter2, ax=ax2, label='Z (m)') + + # Side view (X-Z plane) + ax3 = fig.add_subplot(2, 2, 3) + ax3.scatter(points[:, 0], points[:, 2], s=1, alpha=0.6, c='blue') + ax3.set_xlabel('X (m)') + ax3.set_ylabel('Z (m)') + ax3.set_title('Side View (X-Z)') + ax3.axis('equal') + + # Front view (Y-Z plane) + ax4 = fig.add_subplot(2, 2, 4) + ax4.scatter(points[:, 1], points[:, 2], s=1, alpha=0.6, c='green') + ax4.set_xlabel('Y (m)') + ax4.set_ylabel('Z (m)') + ax4.set_title('Front View (Y-Z)') + ax4.axis('equal') + + plt.tight_layout() + plt.show() + +def main(): + if len(sys.argv) != 2: + print("Usage: python visualize_lidar_npy.py ") + print("\nExample:") + print(" python visualize_lidar_npy.py /path/to/lidar/1234567890.123.npy") + sys.exit(1) + + npy_path = sys.argv[1] + + if not Path(npy_path).exists(): + print(f"Error: File not found: {npy_path}") + sys.exit(1) + + visualize_point_cloud(npy_path) + +if __name__ == "__main__": + main()