conda create -p ./venv python=3.6
source activate ./venv
sh ./build.sh && python -m gibson2.utils.assets_utils --download_assets
- Gibson
-
get dataset here
-
copy URL of
gibson_v2_4+.tar.gz
-
run command
python -m gibson2.utils.assets_utils --download_dataset {URL}
- Matterport3D
-
get dataset according to README
-
run command
python2 download_mp.py --task_data igibson -o . `
- move each folder of scenes to
Gibson Dataset path
You can check Gibson Dataset path
by running
python -m gibson2.utils.assets_utils --download_assets
- Train
python main.py --global_lr 5e-4 --exp_name 'ma3_history' --critic_lr_coef 5e-2 --train_global 1 --dump_location train --scenes_file scenes/train.scenes
- Test (Example)
python main.py --exp_name 'eval_coscan_mp3dhq0f' --scenes_file scenes/mp3dhq0-f.scenes --dump_location std --num_episodes 10 --load_global best.global
- Test via scripts
python eval.py --load best.global --dataset mp3d --method rl -n 10
- Specify GPU Index
export CUDA_VISIBLE_DEVICES=3
export GIBSON_DEVICE_ID=4
If you find our work helpful in your research, please consider citing:
@InProceedings{Ye_2022_CVPR,
author = {Ye, Kai and Dong, Siyan and Fan, Qingnan and Wang, He and Yi, Li and Xia, Fei and Wang, Jue and Chen, Baoquan},
title = {Multi-Robot Active Mapping via Neural Bipartite Graph Matching},
booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)},
month = {June},
year = {2022},
pages = {14839-14848}
}