Skip to content

Commit

Permalink
Fill out some of the readme.
Browse files Browse the repository at this point in the history
  • Loading branch information
kevin-robb committed Nov 20, 2023
1 parent 9005c51 commit b9d0548
Show file tree
Hide file tree
Showing 3 changed files with 302 additions and 14 deletions.
224 changes: 224 additions & 0 deletions environment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,224 @@
name: cmn_env
channels:
- pytorch
- aihabitat
- conda-forge
- defaults
dependencies:
- _libgcc_mutex=0.1=conda_forge
- _openmp_mutex=4.5=2_gnu
- blas=1.0=mkl
- brotlipy=0.7.0=py39hb9d737c_1005
- bzip2=1.0.8=h7b6447c_0
- c-ares=1.19.0=h5eee18b_0
- ca-certificates=2023.01.10=h06a4308_0
- cffi=1.15.1=py39he91dace_3
- charset-normalizer=3.1.0=pyhd8ed1ab_0
- cloudpickle=2.2.1=py39h06a4308_0
- colorama=0.4.6=pyhd8ed1ab_0
- cryptography=40.0.2=py39h079d5ae_0
- cudatoolkit=11.3.1=h2bc3f7f_2
- cycler=0.11.0=pyhd8ed1ab_0
- cytoolz=0.12.0=py39h5eee18b_0
- dask-core=2023.4.1=py39h06a4308_0
- expat=2.4.9=h6a678d5_0
- ffmpeg=4.3.2=hca11adc_0
- freetype=2.10.4=h0708190_1
- fsspec=2023.4.0=py39h06a4308_0
- future=0.18.3=py39h06a4308_0
- giflib=5.2.1=h5eee18b_3
- gitdb=4.0.10=pyhd8ed1ab_0
- gitpython=3.1.31=pyhd8ed1ab_0
- gmp=6.2.1=h58526e2_0
- gnutls=3.6.13=h85f3911_1
- habitat-sim-mutex=1.0=display_bullet
- idna=3.4=pyhd8ed1ab_0
- imageio=2.27.0=pyh24c5eb1_0
- imageio-ffmpeg=0.4.8=pyhd8ed1ab_0
- intel-openmp=2023.1.0=hdb19cb5_46305
- jbig=2.1=h7f98852_2003
- jpeg=9e=h0b41bf4_3
- keyutils=1.6.1=h166bdaf_0
- kiwisolver=1.4.4=py39hf939315_1
- krb5=1.19.3=h08a2579_0
- lame=3.100=h166bdaf_1003
- lcms2=2.12=hddcbb42_0
- ld_impl_linux-64=2.38=h1181459_1
- lerc=2.2.1=h9c3ff4c_0
- libcurl=7.79.1=h494985f_1
- libdeflate=1.7=h7f98852_5
- libedit=3.1.20221030=h5eee18b_0
- libev=4.33=h7f8727e_1
- libffi=3.4.2=h6a678d5_6
- libgcc-ng=12.2.0=h65d4601_19
- libgfortran-ng=12.2.0=h69a702a_19
- libgfortran5=12.2.0=h337968e_19
- libgomp=12.2.0=h65d4601_19
- libllvm11=11.1.0=hf817b99_2
- libnghttp2=1.43.0=ha19adfc_1
- libopenblas=0.3.21=pthreads_h78a6416_3
- libpng=1.6.37=h21135ba_2
- libprotobuf=3.20.3=he621ea3_0
- libssh2=1.10.0=ha35d2d1_2
- libstdcxx-ng=12.2.0=h46fd767_19
- libtiff=4.3.0=hf544144_1
- libuv=1.44.2=h5eee18b_0
- libwebp=1.2.4=h11a3e52_1
- libwebp-base=1.2.4=h5eee18b_1
- libxcb=1.13=h7f98852_1004
- locket=1.0.0=py39h06a4308_0
- lz4-c=1.9.3=h9c3ff4c_1
- matplotlib-base=3.3.2=py39h98787fa_1
- mkl=2023.1.0=h6d00ec8_46342
- mkl-service=2.4.0=py39h5eee18b_1
- mkl_fft=1.3.6=py39h417a72b_1
- mkl_random=1.2.2=py39h417a72b_1
- ncurses=6.4=h6a678d5_0
- nettle=3.6=he412f7d_0
- ninja=1.10.2=h06a4308_5
- ninja-base=1.10.2=hd09550d_5
- numpy-base=1.20.3=py39h7e635b3_1
- olefile=0.46=pyh9f0ad1d_1
- openh264=2.1.1=h780b84a_0
- openjpeg=2.4.0=hb52868f_1
- openssl=3.1.0=h0b41bf4_0
- packaging=23.1=pyhd8ed1ab_0
- partd=1.2.0=pyhd3eb1b0_1
- pip=23.0.1=py39h06a4308_0
- platformdirs=3.2.0=pyhd8ed1ab_0
- pooch=1.7.0=pyha770c72_3
- pthread-stubs=0.4=h36c2ea0_1001
- pycparser=2.21=pyhd8ed1ab_0
- pyopenssl=23.1.1=pyhd8ed1ab_0
- pyparsing=3.0.9=pyhd8ed1ab_0
- pysocks=1.7.1=pyha2e5f31_6
- python=3.9.7=hf930737_3_cpython
- python-dateutil=2.8.2=pyhd8ed1ab_0
- python_abi=3.9=3_cp39
- pytorch=1.11.0=py3.9_cuda11.3_cudnn8.2.0_0
- pytorch-mutex=1.0=cuda
- pywavelets=1.4.1=py39h5eee18b_0
- pyyaml=6.0=py39h5eee18b_1
- quaternion=2022.4.3=py39h389d5f1_0
- readline=8.2=h5eee18b_0
- requests=2.28.2=pyhd8ed1ab_1
- rhash=1.4.3=h166bdaf_0
- scikit-image=0.16.2=py39ha9443f7_0
- six=1.16.0=pyh6c4a22f_0
- smmap=3.0.5=pyh44b312d_0
- sqlite=3.41.2=h5eee18b_0
- tbb=2021.8.0=hdb19cb5_0
- tk=8.6.12=h1ccaba5_0
- toolz=0.12.0=py39h06a4308_0
- torchaudio=0.11.0=py39_cu113
- torchvision=0.12.0=py39_cu113
- tornado=6.3=py39h72bdee0_0
- tqdm=4.65.0=pyhd8ed1ab_1
- typing-extensions=4.5.0=hd8ed1ab_0
- typing_extensions=4.5.0=pyha770c72_0
- tzdata=2023c=h04d1e81_0
- urllib3=1.26.15=pyhd8ed1ab_0
- wheel=0.38.4=py39h06a4308_0
- withbullet=2.0=0
- x264=1!161.3030=h7f98852_1
- xorg-fixesproto=5.0=h7f98852_1002
- xorg-inputproto=2.3.2=h7f98852_1002
- xorg-kbproto=1.0.7=h7f98852_1002
- xorg-libx11=1.8.4=h0b41bf4_0
- xorg-libxau=1.0.9=h7f98852_0
- xorg-libxcursor=1.2.0=h0b41bf4_1
- xorg-libxdmcp=1.1.3=h7f98852_0
- xorg-libxext=1.3.4=h0b41bf4_2
- xorg-libxfixes=5.0.3=h7f98852_1004
- xorg-libxi=1.7.10=h7f98852_0
- xorg-libxinerama=1.1.5=h27087fc_0
- xorg-libxrandr=1.5.2=h7f98852_1
- xorg-libxrender=0.9.10=h7f98852_1003
- xorg-randrproto=1.5.0=h7f98852_1001
- xorg-renderproto=0.11.1=h7f98852_1002
- xorg-xextproto=7.3.0=h0b41bf4_1003
- xorg-xproto=7.0.31=h7f98852_1007
- xz=5.2.10=h5eee18b_1
- yaml=0.2.5=h7b6447c_0
- zlib=1.2.13=h5eee18b_0
- zstd=1.5.0=ha95c52a_0
- pip:
- absl-py==1.4.0
- antlr4-python3-runtime==4.9.3
- attrs==23.1.0
- braceexpand==0.1.7
- bresenham==0.2.1
- cachetools==5.3.0
- catkin-pkg==0.5.2
- certifi==2022.12.7
- click==8.1.3
- cmake==3.26.3
- contourpy==1.0.7
- cython==0.29.34
- decorator==4.4.2
- distro==1.8.0
- docutils==0.19
- faster-fifo==1.4.5
- filelock==3.12.0
- fonttools==4.39.3
- google-auth==2.17.3
- google-auth-oauthlib==0.4.6
- grpcio==1.54.0
- gym==0.23.0
- gym-notices==0.0.8
- habitat-sim==0.2.4
- hydra-core==1.3.2
- ifcfg==0.24
- importlib-metadata==6.5.0
- importlib-resources==5.12.0
- jinja2==3.1.2
- lit==16.0.1
- llvmlite==0.39.1
- lmdb==1.4.1
- markdown==3.4.3
- markupsafe==2.1.2
- matplotlib==3.7.1
- moviepy==1.0.3
- mpmath==1.3.0
- msgpack==1.0.5
- networkx==3.1
- numba==0.56.4
- numpy==1.23.5
- nvidia-cublas-cu11==11.10.3.66
- nvidia-cuda-cupti-cu11==11.7.101
- nvidia-cuda-nvrtc-cu11==11.7.99
- nvidia-cuda-runtime-cu11==11.7.99
- nvidia-cudnn-cu11==8.5.0.96
- nvidia-cufft-cu11==10.9.0.58
- nvidia-curand-cu11==10.2.10.91
- nvidia-cusolver-cu11==11.4.0.1
- nvidia-cusparse-cu11==11.7.4.91
- nvidia-nccl-cu11==2.14.3
- nvidia-nvtx-cu11==11.7.91
- oauthlib==3.2.2
- objectio==0.2.29
- omegaconf==2.3.0
- opencv-python==4.7.0.72
- pillow==9.5.0
- proglog==0.1.10
- protobuf==3.20.1
- pyasn1==0.5.0
- pyasn1-modules==0.3.0
- requests-oauthlib==1.3.1
- rospkg==1.5.0
- rsa==4.9
- scipy==1.10.1
- setuptools==67.6.1
- simplejson==3.19.1
- sympy==1.11.1
- tensorboard==2.8.0
- tensorboard-data-server==0.6.1
- tensorboard-plugin-wit==1.8.1
- threadpoolctl==3.1.0
- torch==2.0.0
- triton==2.0.0
- typer==0.7.0
- webdataset==0.1.40
- werkzeug==2.2.3
- zipp==3.15.0
prefix: /home/kevin-robb/miniconda3/envs/hab
Binary file added images/discrete-on-robot.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
92 changes: 78 additions & 14 deletions locobot_setup.md
Original file line number Diff line number Diff line change
Expand Up @@ -95,20 +95,6 @@ rosrun teleop_twist_keyboard teleop_twist_keyboard.py cmd_vel:=/locobot/mobile_b
```
You can now use the keyboard commands printed to the console to drive the locobot around.

## Running my code with the robot
The workspace with my contribution to this project is located [on GitHub](https://github.com/kevin-robb/coarse-map-turtlebot). Clone the repo onto your host PC; I recommend making a `~/dev` directory if you haven't, and cloning it there.

This workspace includes my custom code for this project, which handles our overall architecture of perception, localization, and motion planning. At its most basic level, we can simply run the `test_motion.launch` file, which will send test velocity commands on the same topic the robot listens to. If everything is setup properly, this will allow us to drive the robot around.

After cloning the repo (and again after making any changes requiring a new build), run
```
cd ~/dev/coarse-map-turtlebot/cmn_ws
catkin_make
source devel/setup.bash
```

Examine the available launch files in the `cmn_pkg/launch` directory, and choose one to run. Some will run the simulator on the host PC, while others will send commands to control the robot.

## Getting a "Ground Truth" Map with Cartographer-ROS
### Setup
Install Cartographer on the locobot using [this guide](https://google-cartographer-ros.readthedocs.io/en/latest/compilation.html).
Expand Down Expand Up @@ -196,3 +182,81 @@ When attempting to run cartographer from rosbags I gathered, it seems our IMU da

Note that you can analyze a rosbag in detail with the utility `rqt_bag`. Simply run `rqt_bag` in the terminal and select your bag from the "File" menu.


---

# Running this project
## Setup
Clone and setup this repository on your host PC.
```
cd ~/dev
git clone [email protected]:kevin-robb/coarse-map-nav-integration.git
cd coarse-map-integration
git submodule update --init
```
You should also setup the conda environment to install all needed python dependencies.
```
conda env create -f environment.yml
conda activate cmn_env
```

## Non-ROS runner
The only ROS dependency is the communication to the physical robot. So, I have provided a non-ROS runner for the project running in simulation.
```
cd ~/dev/coarse-map-integration/cmn_ws
python3 src/cmn_pkg/src/non_ros_runner.py -m <run_mode>
```
where `<run_mode>` is one of `discrete`, `discrete_random`, and `continuous`.

TODO sim image.

<!--
<p align="center">
<img src="images/discrete-on-robot.png"/>
<i>The CMN viz when running live on the physical robot.</i>
</p> -->

## Testing deep model predictions
There is also a non-ROS runner to test the model predictions on saved panoramic RGB images.
```
cd ~/dev/coarse-map-integration/cmn_ws
python3 src/cmn_pkg/src/scripts/cmn/run_model_on_saved_pano.py \
-m src/cmn_pkg/src/scripts/cmn/model/trained_local_occupancy_predictor_model.pt \
-d src/cmn_pkg/data
```

<p align="center">
<img src="images/chair-hallway-prediction-1.png"/>

<i>Four RGB images making up a panorama (front, right, back, left) are read from file and run through the saved model to produce the predicted local occupancy shown. In this case, the robot is between two rows of desks with chairs, which appear in the prediction.</i>
</p>

## ROS runner
Start bringup on the robot as described above, and set your host PC to use the locobot as ROS master. Then build and source our ROS workspace.
```
cd ~/dev/coarse-map-integration/cmn_ws
catkin_make
source devel/setup.bash
```

If using the depth pointcloud to generate local occupancy measurements, you must start the depth proc:
```
roslaunch cmn_pkg start_depth_proc.launch
```

To just evaluate local occupancy generation from LiDAR/depth data, use the locobot interface node:
```
roslaunch cmn_pkg test_lidar.launch
```

Now you can start CMN with one of the launch files in the `cmn_pkg`. These have parameters you can change as well. The primary one for running the project on the locobot is:
```
roslaunch cmn_pkg discrete_with_viz.launch
```

<p align="center">
<img src="images/discrete-on-robot.png"/>

<i>The CMN viz when running live on the physical robot.</i>
</p>

0 comments on commit b9d0548

Please sign in to comment.