From 0374e780e2894fd4b495ad36b521c83b47fa7980 Mon Sep 17 00:00:00 2001 From: Pavel Mokeev <75482510+pmokeev@users.noreply.github.com> Date: Sun, 14 Apr 2024 17:19:34 +0300 Subject: [PATCH] feat(pip): add pip package publication (#33) * feat(pip): add test PyPi package publication * feat(pip): fix GHA pipeline * feat(pip): fix numpy version * fix(pip): back rename voxel-slam -> slam * fix(ci): remove typos stage * feat(project): migrate to sova project naming * feat(md): updated DEVELOP.md * feat(pip): new logo * feat(pip): fix notebook * feat(pip): change link to mrob wheel * feat(pip): minor README fixes * feat(pip): python only 3.10 * fix(pip): revert >3.10.0 * fix(pip): fix dependecies * feat(pip): python 3.10.* * feat(pip): mrob >=0.0.12 * feat(pip): removed last mentions of voxel-slam --- .github/workflows/lint.yaml | 46 -------- .github/workflows/publish.yaml | 33 ++++++ .github/workflows/tests.yaml | 72 ++++++++++++- DEVELOP.md | 25 +++++ README.md | 43 +++----- _typos.toml | 2 - assets/logo.png | Bin 0 -> 18759 bytes assets/logo.svg | 21 ---- examples/pipeline.py | 4 +- examples/segmentation.py | 6 +- notebooks/benchmark.ipynb | 101 +++++++++--------- pyproject.toml | 33 ++++++ requirements.txt | 7 +- slam/__init__.py | 1 - slam/backend/__init__.py | 10 -- slam/filter/__init__.py | 6 -- slam/pipeline/__init__.py | 9 -- slam/pipeline/configuration/__init__.py | 6 -- slam/segmenter/__init__.py | 13 --- slam/subdivider/__init__.py | 10 -- slam/typing/__init__.py | 4 - slam/utils/dataset_reader/__init__.py | 10 -- sova/__init__.py | 1 + sova/backend/__init__.py | 10 ++ {slam => sova}/backend/backend.py | 2 +- {slam => sova}/backend/bareg.py | 2 +- {slam => sova}/backend/eigen_factor.py | 2 +- {slam => sova}/backend/mrob_backend.py | 2 +- sova/filter/__init__.py | 6 ++ {slam => sova}/filter/empty_voxel.py | 4 +- {slam => sova}/filter/filter.py | 2 +- sova/pipeline/__init__.py | 9 ++ sova/pipeline/configuration/__init__.py | 6 ++ .../pipeline/configuration/reader.py | 8 +- {slam => sova}/pipeline/configuration/yaml.py | 2 +- {slam => sova}/pipeline/pipeline.py | 10 +- .../pipeline/sequential_pipeline.py | 4 +- sova/segmenter/__init__.py | 13 +++ {slam => sova}/segmenter/cape.py | 4 +- {slam => sova}/segmenter/count.py | 4 +- {slam => sova}/segmenter/identical.py | 4 +- {slam => sova}/segmenter/ransac.py | 4 +- {slam => sova}/segmenter/segmenter.py | 2 +- sova/subdivider/__init__.py | 10 ++ {slam => sova}/subdivider/count.py | 4 +- {slam => sova}/subdivider/eigen_value.py | 4 +- {slam => sova}/subdivider/size.py | 4 +- {slam => sova}/subdivider/subdivider.py | 2 +- sova/typing/__init__.py | 4 + {slam => sova}/typing/hints.py | 0 {slam => sova}/utils/__init__.py | 6 +- sova/utils/dataset_reader/__init__.py | 10 ++ {slam => sova}/utils/dataset_reader/hilti.py | 4 +- {slam => sova}/utils/dataset_reader/kitti.py | 4 +- .../utils/dataset_reader/nuscenes.py | 4 +- {slam => sova}/utils/dataset_reader/reader.py | 2 +- {slam => sova}/utils/pose_readwriter.py | 2 +- tests/test_cape_segmenter.py | 4 +- tests/test_count_segmenter.py | 4 +- tests/test_count_subdivider.py | 4 +- tests/test_eigen_value_subdivider.py | 4 +- tests/test_empty_voxel_filter.py | 4 +- tests/test_identical_segmenter.py | 4 +- tests/test_pose_readwriter.py | 4 +- tests/test_ransac_segmenter.py | 4 +- tests/test_sequential_pipeline.py | 12 +-- tests/test_size_subdivider.py | 4 +- tests/test_yaml_configuration_reader.py | 10 +- 68 files changed, 380 insertions(+), 301 deletions(-) delete mode 100644 .github/workflows/lint.yaml create mode 100644 .github/workflows/publish.yaml create mode 100644 DEVELOP.md delete mode 100644 _typos.toml create mode 100644 assets/logo.png delete mode 100644 assets/logo.svg create mode 100644 pyproject.toml delete mode 100644 slam/__init__.py delete mode 100644 slam/backend/__init__.py delete mode 100644 slam/filter/__init__.py delete mode 100644 slam/pipeline/__init__.py delete mode 100644 slam/pipeline/configuration/__init__.py delete mode 100644 slam/segmenter/__init__.py delete mode 100644 slam/subdivider/__init__.py delete mode 100644 slam/typing/__init__.py delete mode 100644 slam/utils/dataset_reader/__init__.py create mode 100644 sova/__init__.py create mode 100644 sova/backend/__init__.py rename {slam => sova}/backend/backend.py (98%) rename {slam => sova}/backend/bareg.py (95%) rename {slam => sova}/backend/eigen_factor.py (95%) rename {slam => sova}/backend/mrob_backend.py (98%) create mode 100644 sova/filter/__init__.py rename {slam => sova}/filter/empty_voxel.py (89%) rename {slam => sova}/filter/filter.py (95%) create mode 100644 sova/pipeline/__init__.py create mode 100644 sova/pipeline/configuration/__init__.py rename {slam => sova}/pipeline/configuration/reader.py (98%) rename {slam => sova}/pipeline/configuration/yaml.py (82%) rename {slam => sova}/pipeline/pipeline.py (93%) rename {slam => sova}/pipeline/sequential_pipeline.py (95%) create mode 100644 sova/segmenter/__init__.py rename {slam => sova}/segmenter/cape.py (95%) rename {slam => sova}/segmenter/count.py (92%) rename {slam => sova}/segmenter/identical.py (88%) rename {slam => sova}/segmenter/ransac.py (96%) rename {slam => sova}/segmenter/segmenter.py (94%) create mode 100644 sova/subdivider/__init__.py rename {slam => sova}/subdivider/count.py (91%) rename {slam => sova}/subdivider/eigen_value.py (95%) rename {slam => sova}/subdivider/size.py (93%) rename {slam => sova}/subdivider/subdivider.py (95%) create mode 100644 sova/typing/__init__.py rename {slam => sova}/typing/hints.py (100%) rename {slam => sova}/utils/__init__.py (58%) create mode 100644 sova/utils/dataset_reader/__init__.py rename {slam => sova}/utils/dataset_reader/hilti.py (89%) rename {slam => sova}/utils/dataset_reader/kitti.py (92%) rename {slam => sova}/utils/dataset_reader/nuscenes.py (91%) rename {slam => sova}/utils/dataset_reader/reader.py (94%) rename {slam => sova}/utils/pose_readwriter.py (97%) diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml deleted file mode 100644 index 1df7abc..0000000 --- a/.github/workflows/lint.yaml +++ /dev/null @@ -1,46 +0,0 @@ -name: Linters - -on: [push] - -jobs: - test-and-lint: - strategy: - matrix: - os: [ ubuntu-22.04 ] - python-version: [ "3.10" ] - - runs-on: ${{ matrix.os }} - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Install Python${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install -r requirements.txt - - - name: Check code typos - uses: crate-ci/typos@master - with: - files: ./slam ./examples - config: _typos.toml - - - name: Check imports - uses: isort/isort-action@master - with: - configuration: "--settings-file=.isort.cfg --profile=black" - - - name: Run flake8 - uses: py-actions/flake8@v2 - - - name: Run black - uses: psf/black@stable - with: - options: "--check --verbose --diff --extend-exclude '__init__.py'" - version: "22.8.0" diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml new file mode 100644 index 0000000..0807c69 --- /dev/null +++ b/.github/workflows/publish.yaml @@ -0,0 +1,33 @@ +name: Publish pip on PyPi + +on: workflow_dispatch + +jobs: + publish-to-production-pypi: + strategy: + matrix: + os: [ ubuntu-latest ] + python-version: [ "3.10" ] + + runs-on: ${{ matrix.os }} + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + pip install poetry + + - name: Publish to production PyPI + run: | + poetry version $(git describe --tags --abbrev=0) + poetry config pypi-token.pypi ${{ secrets.PYPI_TOKEN }} + poetry publish --build \ No newline at end of file diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 1ebb7d8..b6c3cd4 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -3,7 +3,43 @@ name: Tests on: [push] jobs: - test-and-lint: + lint: + strategy: + matrix: + os: [ ubuntu-22.04 ] + python-version: [ "3.10" ] + + runs-on: ${{ matrix.os }} + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install Python${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install -r requirements.txt + + - name: Check imports + uses: isort/isort-action@master + with: + configuration: "--settings-file=.isort.cfg --profile=black" + + - name: Run flake8 + uses: py-actions/flake8@v2 + + - name: Run black + uses: psf/black@stable + with: + options: "--check --verbose --diff --extend-exclude '__init__.py'" + version: "22.8.0" + + tests: strategy: matrix: os: [ ubuntu-22.04 ] @@ -28,3 +64,37 @@ jobs: - name: Run pytest run: | python -m pytest tests/ + + + publish-package: + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') + needs: [lint, tests] + + strategy: + matrix: + os: [ ubuntu-latest ] + python-version: [ "3.10" ] + + runs-on: ${{ matrix.os }} + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + pip install poetry + + - name: Publish to test PyPI + run: | + poetry version $(git describe --tags --abbrev=0) + poetry config repositories.test-pypi https://test.pypi.org/legacy/ + poetry config pypi-token.test-pypi ${{ secrets.TEST_PYPI_TOKEN }} + poetry publish -r test-pypi --build diff --git a/DEVELOP.md b/DEVELOP.md new file mode 100644 index 0000000..2730bad --- /dev/null +++ b/DEVELOP.md @@ -0,0 +1,25 @@ +# Publish package to PyPi + +Firstly you have to ensure, that package properly builds, publishes and works. +To do this, you must first publish it on https://test.pypi.org + +1. Create tag on main branch: + ```commandline + git tag + ``` +2. Push created tag to repository + ```commandline + git push origin + ``` + After this GitHubActions will automatically build pip package and push it to test PyPi registry +3. Ensure, that installed pip package from https://test.pypi.org works properly + ```commandline + pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple sova== + ``` +4. Publish pip package manually on production PyPi using the following steps: + - Go to Actions page + - Go to "Publish pip on PyPi" workflow on left side of the screen + - Go to "Run workflow" and choose last tag which has been created from tags + - Click "Run workflow" + + After this GitHubActions will automatically build pip package and push it to test production registry diff --git a/README.md b/README.md index a84d314..e470ff7 100644 --- a/README.md +++ b/README.md @@ -1,44 +1,35 @@ - -Voxel-slam repository is an open-source Python library designed for fast + +SOVA is a SLAM on Voxel Alignment and an open-source Python library, designed for fast and adaptive comparison of different approaches to solving the voxel-based planar SLAM problem. Our main goal is to provide extendable, simple and efficient interfaces for -testing various hypotheses, which include different subdivision/segmenter/backend criteria. +testing various voxel SLAM hypotheses, which include different subdivision/segmenter/backend criteria. -[![Linters](https://github.com/prime-slam/voxel-slam/actions/workflows/lint.yaml/badge.svg)](https://github.com/prime-slam/voxel-slam/actions/workflows/lint.yaml) +[![Linters](https://github.com/prime-slam/sova/actions/workflows/lint.yaml/badge.svg)](https://github.com/prime-slam/voxel-slam/actions/workflows/lint.yaml) [![License](https://img.shields.io/badge/License-Apache_2.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) - - - # Installation -To use this library you need to download this repository and install necessary dependencies. -How to do this: +To use this library you need to: 1. Download and install Python 3.10 from the [official website](https://www.python.org/downloads/). -2. Clone this repository - ```bash - git clone https://github.com/prime-slam/voxel-slam.git - ``` -3. Create and activate virtual environment - ```bash - python3 -m venv venv && source venv/bin/activate - ``` -4. Install dependencies - ```bash - pip install -r requirements.txt - ``` - If you want to use `MROBBackend` type, you have to install mrob library manually: - 1. Download [wheels](https://github.com/prime-slam/mrob/actions/runs/6841598615) from source - 2. Install mrob from wheels - ```bash - pip3 install mrob --find-links=... --force-reinstall - ``` +2. Install pip package: + ```bash + pip install sova + ``` + If you want to use `MROBBackend` robust optimisations, you have to install mrob library manually: + 1. Download [wheel from source](https://drive.google.com/file/d/1rUdbybNvHx80ykr62aceAcBIPtlntWIz/view?usp=sharing) + 2. Install mrob from wheels + ```bash + python -m pip install mrob --no-index --find-links wheel/ --force-reinstall + ``` Now you have everything you need to run your voxel-based pipeline. # Examples -Examples of using the voxel-based pipeline are presented in the [`examples`](https://github.com/prime-slam/voxel-slam/tree/main/examples) +Examples of using the voxel-based pipeline are presented in the [`examples`](https://github.com/prime-slam/sova/tree/main/examples) directory with the all necessary instructions of how to run them. # Contributing @@ -46,7 +37,7 @@ directory with the all necessary instructions of how to run them. To contribute to the project you must: 1. Get to know the project structure: ``` - slam + sova ├── backend ├── filter ├── pipeline diff --git a/_typos.toml b/_typos.toml deleted file mode 100644 index 8b54a79..0000000 --- a/_typos.toml +++ /dev/null @@ -1,2 +0,0 @@ -[default] -extend-ignore-re = ["NDArray"] \ No newline at end of file diff --git a/assets/logo.png b/assets/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..58fca7e500cbd4b5db89ecd9e3dfda883364d454 GIT binary patch literal 18759 zcmeHvS6EYBw=RS(QY;ifilU$(O`3EN5tSy=q>F;|AiX9bAfkexAiV^UB3a1cOH?E zQ9yxz@6%EPe~+eOHOR<#$?o4#HSo1uMbms_QQskKI+=b?U(3sX!Wc9f`-GMJT;Jn%pVd$9UN8q?zMT-gQo8^fgYIB$(2nfpj9N-XZ2_{oQC&v;5`1 zhAB@ASc@KSkjf3JrGY}BccKo03xoN~_C)&mo*29N?s!8XW4W5B@G~l8z;34eON-@9b#4{Ek7MbwHF*w**6W>8EQHp5DVc=KYnlW)28T+*nH$9~%+i3D32PtFT_yL~d)|uYe`iVM)<*7KaWNwgz1JPFp z#Nct(SNgMC*Gs{py7p7S{c=wgvCHB$CYY*?WF&*qfCWFL?g?LFPO%xYZY;!bSKB zYj4XkI$-;>LKL#FW}ftpTl~RLk;zO2{hHbz`IA%LKEt~~ADS9=`8^tBn)WHc8rh2{ zO!NdFe_Qq~jtNX&&GgSzkyC;IA3&dyLUa68BgKZ`K%uB&8`bTqFPT3J_F(7QSj@Ib z_J4h>$^fwG3?;P)CO~}ley^YuG=v{f)6)hXhcY8N=Q+MOOrmv>PtE3&~Y3`(%SGtzW#vgs@sDqzJ>nP zVsZ^YRU)gZ*vTmc&-N-bfU^}s;v6<=g{>;OJ{qxOeVww3$h>7#9a3w&4!JW7cdR3> zlfHI1Q)K;n8tYTcgwR}M>6v@4gQ)dQgDmL39iSz{T6b{@Er;@m+m+h1jQ7wR)RgufBQ&+tUbcaV2zJv$!? z*hV`E&zW>|a^xrSfVwNuQZm1|l5Tdz120ecD9w61&+xg~LV8ohJ<}|Kx75^+sBd>F zYYC>Q93egz|7-J2g=BDqne3VxZzrYXwT|x2(vHgnv}BYn6S!}pN-Sw!aEV+@b(SQY zAdmbAW9NDNg7cLC^3I9B*Z{Pu1wnB)NM)8vl19g4nwS?7FBtOGYuPAK+%~VgP~wz! zC9J|8W$Nzk9w@B&$&0pdA9luX{o07cU!SpI6x4(?)Ad*5)dSVwvU0qZ91$38r%8uZ z`9>UJBGL*^D&zk&FS(DFB!0Io*l6SZb}qHPFX>>xMZ~3k_MmK z#K!4#yM^ncE^44b90o_2#Sfn^SDxa?*?U@WVoMugz#UnEK(vqlm>p($Gz&*~{-Y`_ zmHL-(gs~tA8KrwIcK6HwFv1`+;F>DkC&E;2QHHNq)Coiiv7@1)MSm)X@(rk!G6 zh>g}E)cQx4=y({{|Cp8vEuhVp(((5wGkBj27D$j=G5ZBB4;7SmNEF;GUydG;^c})6 z8Ri?~54X#XFb~g>@n7}b;fkO+@mE1tRi@g)f>h$@7NuJ!>KKjus2h{P`U63tV81}H z1t3!jmBtRsj~C+&kF1H5IMRd;>+T;zcy9~5+C+M~4(v_87>x?tW z4;*b^(W~rVTCzGeK>o@Or3mpkp6Hs`As-d)lS8utk;}IE3jXb=PyzQzE6_@9 zPMMD0%ZxnNR|`~qNEzhtkFegBbyG5>@epuhqu0L1pAZxY?Ak3w>Ssg{1Gz9VT}TI7 z9z920;F8L}5Mk61G;gr9quWiw2P793RXsX_7oRzCs1E?6-&Ev%FGk)ijzYLxpKUrW=P(I4A0(@bsiuxjr0X7R)wI%f4tbFKS>uoHKjl-IVoT0_noj#s zI9(Mp;>JWU%2Rdv;ZS%BM<5=8ytvDMW(=8beZ~@{YPa0y`X8B!GH#F081T63G-VplmvhAzBg(pjhqy?*$xAQ%RH4EbwTMKksW+%H0gWL;U#tv_rxdj#c9^J^BQJHrVaKV!Gan&}fDN?T6|r9jjZ^aLLb~uc2LFUkz#;uQd^_TkonM2d4|q#` zU@48Bl+Uz(A8*nBH;&wS45(#G%=cdAYr0c&EXDqBV+02pY&%&K#l4d38QFyc$-AeW z`)Xnh@(kMXEo(L1m8ysIEwKA{|Auoo@NTM>E;Z&29Tj}tniSRep>q<2^a;LE=264U z(b``R1BZsDrH+_#dm8JbTw_OzbS7DKU-(S-C`X$-PadeYVHLe`AceAiV zZH|L^Cf{?towp+x>0p6C_lfeZ|CjskOr;7_?lQcVyoJlh?r2!d4Zr{+WEnYd z6iZGXZXO%DVDq^^?fsYn{U6c3IF4J}3s~A)tn6mQ^nouXs7(mibtH~t7ks(!aHs0} z#+na`w-|sR?w0lb4b=YFNi~Y5#Cgc!1Y@phUD0pHjrbBB8Ij^6MVdO_tM`@v_=7uE zfZ+E%e$bv7(cxWeDwb>nA6$H10oy-w+Tm+XXvd-XVCjME;nBf*3BE_(EdD?bciThi zAkih&x*@;F(Yt-!KK|wMi4W!dd2DHE3a`>qLDn?T>s}TND2gXH0ZGOd{p6o=n#?o& zqWaL!O23_~H$0k+V-edd>>s2#@~%mA<-*k7pB+&O09KbGrF*;S9Q+FllzhGH^TuiEWb-XreCU}=)D!anVUz}TXpLYX)`-9T|usv;FM>v8}E2JUd zZp8WS3#BMY+J2PK=dVk)c6C|Beg0u;+M6D=b0VJfPD~!xq8fZYa=@)VJ`ef!Cm2l? z17M@EVJk9d0i<`D_((Lj`bErJ0fPim4ozi*lo#!1R?6zcYZj{oUqQv}kjuakM@|HpH2`(#<6Ei zt$uQacI4~}PC}NB>wu}-Dn0J1fI!M030K4hlIUI*1SQS!o#-k^~FsX3-V27{Vi5zjjH-tMUw_N}ao!TvJnBw2sLubaB(w{f*EBpN3|{&8kxpgvXr zrtc8N_=vj0id^t{e(?IVAN~u@ard-ly@J{-aZ81-PcXtASkJ6 z_+t4no}DF)CXQiXBn$6JKfC2eiek^J+p|z9&E@NbAG+sz6Xc$kKQg|7iq+~3@NB)| z1zQ?5b^mm?FlmS`MbC6|K%wDHYpZ-pglSqL1{0cE{hQ+i%e^iF0)*$ns}GkRW~)3@ z-jL#T*T3A9?;w;YlT+RUIY9TR+pOFj^8PLNEj*!2S?8kgs3ZYph!mq}GQhqw z%;wLCVIA<9w$U;V|0^!Q?S%K3i=Cx(rlUNhQ0{tik3xc>SMnh$JLmn9W2UiyonvE7 zUUt4yF6-=@oT~!nc!H;94s{jSv=lH=oclnnY&9LpBfxBAk$%{Z16zi+tslKzz25Ia#bg9duh z$@>(DIG3?|-+#XAW_s!)b+vq;Ejl#3AjS-X`Dz_zJ%{PN&mrmfc>0B%7J75mSahu2Bn(;67UxI2vB!x%t!&GUzyb__1O07)J~lt8>L#ZWk|( zRU;Lj1!o^ZIEQQ3>-biSt9!giG=@s|90!%O9fyjp3Z%N1ur#fgp>&E4P@LP1muj9p z*=z~Ltproy_UsUL13WT(K`P!reb{z55MM#Z{JMoHP}nb>rk-$S3wu5*xI-)_zhJ7X za|+qcWG>IBM-W+wD9v#4l+x67aRD=)9qTR#^c}P`uQ6ETgcJu7`!vRDR#3aotLsK% zS(I9qWd#q#&=t8ftWq*Gbcd@0`t|;3m%{?9N*zgG$Wfx*T)Y*u{R<4F91vJ@v#?x! zc|Z|qI*KpF#$nWvjl3}eIydvt>j!(+_V1DbH4^~N*j)B|w{nK|fCef^*c4hs5t{?F zEv?R*8po^~%P}E4r;4{8*zvDl-0}I)KC(T@7pd^*)r6nM-Fyp{cdC_0MQ13lvI6$f z-fAW0%1YCEL}^bncYQ$n^z(x}%xb=9V>HdrNOY0&8h^ zqLq)*Gs|9@_^n)h+m@=Qd!x>0{;5{X9T(VLc$p>sv3+Zyq~Qg1A<@M?;WR0&4YtsV zvGaPv-z^`=hz2XK>U)&c%=%2MiCw5%=bHDK_U_6(h*Vs=bpb+pJQ{$2+*}o0Jp_D1 zG4fq>ay(ndTLd+=;=xvh(aT`){u=@+SVGT0qYO1= z%>lH0fl0P&{c=n*kNe`Ko11+aHtRt;;K6T>mIDGjFz2J-PtQZucK!1gOcqjvDO@=( z-U~9D499jXuYr5#tDJX%~TBU5ccWKX|3?ooUM;@wQL z3h#4}^(9IA{H-8}%vuZ*``(1Rric?X-K0QRg43U8~90^7Sn-kgxFPM-#k@ zSQXL6$#9?Z{%SAG0JpT}8$0;*8pkC?yLZ6byltk5VN-CYT`n;nD^Q7O-> zng^r~hAxia@>fcCm2mNgM^BBIqhe=tZ%xkj4aRmDPLOOv+2B?=MHZQ>Uc=9C&n?%J z0_Rw&bHDnG=_?;g>y?G~sCt@7S+7_187qNpSgI~$OWY%AHG!AARRSm(^B=?9#A~nK zs3gEK!s--R0O~FhxMVSv$%PUB4PFlOKR=GM?VO#rga}?V&4w>88Ri#+SWZ+_Q9&7! zCF9I!SY)9VhvM?q6Su~NF4Wva)fo!q!kYIYD@~qr5d6v&$Hb$*bF$WdeMr5vYCLU0 z36;l9$hyBw39#e}8*8>G%7Ii!33c;8HxN_Y9WlU``hhKdNaLmsYl=J;APn4oId>Mf z!62`SXvktSyMMTExx(EP?Xcmavx>54ybXlp<>n;Lu%v#5Z7s3+OvRlIK3r4%v($-_ zRa0TMzQV6LDSj1)6tXwzh>`Nsl%*^$g6+9Kk7t_&l~Yf@g*6B+4;95|CrfuQVMx5S zNgQFYBYZ>l`5$MD4jXG0hU1^*8F?MBFDogodJJ_5bgX{wY;xUxfRDdOwML5m-V?4Q z_ssuimTEuCi{aJUns>w6sOrO|PElLDPqHn3zI#Y|J+)i#w`IVp?feeX{fC~0DM=nB z4O>&VAZ`C58~p+(z+Slnz`RT6+MR5oRC<*+B--meN+5lmJaN!K2Jes(EhE2G1eOT-$PtMM%lCJ*(XEExGq7wf}D14ud44gDC%*ZYFP%^3ysAJ}3d9F=%4?dU) zSZ@%jc1c~}W7SQ&B9P`8{du}!bH6cDKTJVmcck7I_p!}6^?bhBB{K^z!@D_FJ$~Ns zi!YkU#~HLq?oa354%@Wm!-#kUlhRlDE%#znKj0~Lm<#=|Wel8w$z3r2BGh;lH#z0m zYbsOmP50+wqlJMJ5qIWk{S{wLIyeC+X|-k6lJ%93oKEd9&88BV;vHZ@2P?J3vg|Wd zwOivwwb|;L$fcM=+s4D$qu*}sgbx}Y&z2im8nwI$RV;BX|`h)VuQTk^;Z*2Fl01G#Ja@d+$(| zcE9%5ED`|@4TNIPwpkFJwjHs3)%lARcSOfjW0^`KXGL8*bCdVjB|Tr8k4!yn`)<}S zaFKVQ_BX}L6wkb{$O-YMRVx63(E*oM@K)57BtCn_P$9YK-aqM&*VcC}s*f@2x0HwC z)Q&@7cD+vLMaf3uslEQ$QQLmc@WQk!2fOhkaQ=(y8ug6|IK>=FSIH~T)kNkDIoj~=EWkw{;^xh@Pj76&?>yyr zYUW%k+ATayGc7Q4`_eAzOvH^6bZ=pPS@jM_n+0QoSCBp2M?f2o=1!AV@93nuQCvKU z4)`U+Z;#7Jh0Z75bS$XHQAKG(A9J$@AlRZ9>Z0<1D$bD3SnKwZT?)_gtlx3c)OQ{T zJo9y8f7eDeqAZqyMy`T(A|MhoshX`Cg6@<5Z$xD zUz5WOhdeyXpAVhW{k3&nU!AGvSD8fsME?3>V94ORc{|AVpfS!0k1R-(=pL#Mt*CW{ zXqyQ&_BhmDDpv87LM>;jj?CujUqpcSl4RA(G!{f6!%(|JW|>&qc)6N&bR{NkXSl+{ zQ`e6Tqx@+^ZcfqyMRt&qB(J^FzJUae1OS@}hfy$Ov>Ay7ptc!oxor8m;c zJiFni%TJS@x!?C)c8mFwpRTgY+}p#Qao3_(S>uHNiOEC)TWp_GNT^WXx}-T~?+qH-S1_n5z4 zW&X)TT!G|e^ETh#ihg!l?XYni$ zvnnn4`CP&nMSD3Qrsuqn^*d#2MCkJFP_q~Y);QRgkaE=eWL6T?~qCfOJr5v^$=V={|R|z5%o=3!nNV_b&lD6sfn(>CBy9Z-VKeRWkUX! zYR&K|8TQ>Jum&8{^2b(VOQ`I7VTg`nTh^pTWvru1%Uj1*S;%flkq^fedmax&5}(iM z3rmDe{};%voU~|&Hr!93&}M)>|FfJdAp_SOrrP0^O48AzgEG_| zO>uh>?58juBWt}K9w-z~v4Eb(MeD(W6qJsaJ`St48mXr*=IsHVYH7fWfe~Sk&j7 zRlBPCuR3Ua!8xVWG9a;SFBdkGueKd8H`V+aP<&fs_c^Za%(J_liFSzC9Y6oqI-SqE z11%UPy@oC3GF~xcTF?B<7}rhZ-qPQ{x)m_!(giGh)vvE$#Nox4ZigwEuU0SsCDvw6MYlB#X^Tjf$KsjxYrQXSQr> zy7ilXvlFmYn%o7ycYly@ouWoM>0s-DmN84|+GdS*yGiAF%S7C-&z6jzyBc<+&FB#p zCBOPSB&|x0Mi**2k_V>j#$t4T0onb^Vxp;&j#H1ULrt{wW?z^K`uBXZZ&we`RBA{#n@IGYM_QcR_L7|GCf|m~o$glJD?qbS^qYQX_*FyR z0veZn9+6;I_vuz#w+!Z1 zjeys;B7YW!>h8XJIf3Ca`?)Vc^+h&IZ=csxSfl-`D-4}9AUVqt+w0|Zw;SP=9pBox z>~`ZKiVwDjtvYY0KC*11gkFe2k%Qha17-9dlDM;@SD5FSh+4+8E0i$sB(iv~_!>=5 zemXK=L0$%IsW?tg0$PR9$%uujDc@}5(d>+FCIL=kGwp%28;IpjcVDy)lU@33meW&$ zlX5lY?&NHm(d~{?V^sXnM=-VYiw)eG4vSp7?zfrQ#_e|TGz;22ziwm2qarPrJU&k6 zn||hGc#K*n$3Q592g67V6@CMV zOM{zqh^tnD78?r{1V7VTY-}Hmjgk53uFFG(?x`lBU#`J=*S>xy0_ap6?PVHN+h`)& zS3;&LBYYZm@ct5Rnug@Iy+}oG^&&9+ezsq#4t1Wum!Q9{9URZMUjplEQ_Ny#ty#y?dTQTI*3Q%7YzgOC~S)2r$6I7ZLSn~SCT zms{EwE{3kRwELng0!F68I=7!6@!YccYCiGRx(4~{E*sakK{fYccJ$Rhs; zE&r`Eqb`-`lnK_Qa!TkY9R3C6<>M-RV(t@O+L3dwJD7;P!ODvF2e9z-JLfwa9XsQM z`&XhnZNhV@j7!vI7g26^=W2fqG;SRN?ZXfGrjqge@shES9WNtJk(h5cEXp2(tE-Dw zzpG4KTsv#yA|(@6D+!?gz{Bm~L^0Fd6BIi81z{cH8ndU16vn3ksyvXwAt$kFMZ0Jv zGH2;gSm3v-sDUh6d}=W*1htQzaVbq>XtLFygo;d8F~DCQ%hdU{U+OIV9V|@)1}Ltw zE*?;`5FL7RsJe%XMxh5510fApXrQ^ueKj?Ya=;eVReEZ&GX)Y9vRX6W+L<~jo~_P( zVc>y8rFiv>MjlaQLY5_n;DGhgD23V)lSIn=l<%_HDIvKb2@djK+-(z;mgzJ+jP$ze zp%ma){&>r1@5kYmkL_7Rc1y6Qw$iP8=*O4Y2e!5c^R=}b;%y7*?kevus^QtR%x2qo zU}erb6CS#w--)Fp@`KK18lKCBTCx#*QF4!T2Z=7yAs#_m z>70R*4t=UGiCRq(hdCV%*6mMKSUg=F;;n=Xf`#vNToDu6-)DlZ zt+Se+R$+L+1I2e#@9bLdp!2d$>F~{aujS!*Yaa}Du@951Z^4-c`}EoTwgwXc)2ist z)-fn-CRgKiG~svCx%Y|2u7T&bBDtOv3A3_n$w_#?`G>z+(X5^zVM;x8gE=rBc|wUJ_VE9!{MpYXXUD>=&s1M zYl>uw7re4_r*l-pW2Arg)S9P)P5UIq%mdy{0GUOM^{);2wdhe@qFYB^+@9}mY)(IS z9uBXod>=2A)DC1oYl|jW5%80ST=MxVx&o_d(|k7bTNiY|-@FP@ECwKDvjX#%D!+kF z?OEm4x+ndV8AJ+9=aepfrRKcnnDug2PtFk15lF9>$sSSwZJ`-kxXIwPQOVv;V2BB% zRcy+viinizz^I$V0p?~VNh%>hOGrltozUk($wN|G?zXd;`Z^UxHA_}6pkpB_f=TPg zarxVBgwDZz1qGM4Gb|eDyJO!VVP%lo`gAu`c0Vxo^?7{z;m3T@+LnQM&r0EMbzu(N z<69=WNxWOU<1>9z+HC+tX}lLTS>F{{aW9}4z4u^TFPFY1*@9*t!&SRF?%ELB$stwo zbIpry18@5Ai1`;IU|cH+c9S<%L3$+ky81?Zl8BAOL> zf2$c$GHksyOS5pWzCWyT@zbTTsCaTjgW5w?tBVwkY^1#o#^wa-i8_I-Y*8a5u9?f@-=O1tzeLzg%m2pG)*!e0%V@Mr+CVNW=Qo?N#|UxYh$_mmGNN) z4!Jub&M&^LE*m%OBzZ0mjP7iEiz*yUc-&#C>Z?<(eaE6)`^Y>@r3ARqXvM}l3lLQX zH{p79d^2JvhkPnr^N4n=tgB_L+^O?4rIcu~e{M>h)blwp!z5MXEkWa=%l;PS&+^Uo z0;HUV-2)#6ry>P^rHe+wF`Yt|!ZCUt8Cx1l{jHiSV)8ZfH51EMNw0Jw*ZRU?glU>; z0NjTO0fE%e?~XZ% zmKs)so4XnMhxJ3w5VBBn7{2#hKr%Thj;yl{ru@xJxL!HjY?YENwwNw@AC71UPs{eh zBm~&jj6TJB3Dtdzl5~QW&wxd5?J6vf9C~VdG)b~`IUF2lioOAw!g)Dtg_C>Rwu=@$ zjLCfqj_8K@=T37#IcmLCtYc+H%j4|wYjJF4j~rLiFzK+kT-boZOL^?ex7{Se83|{f z!=qOW{T0=Rg`Zfw3Tin#2TeWhqszu!IWObqZ^w&CYXd~G=cs8q7R4^?y ztC^ac<404Z6)lYDE^TzReKac-mo9$gzdrz>|~&TI9ul;fZ$I&AV*`>dpsrNsZ0 z08T)s?DRL-jC$h-nshPCouUoz0pHD$akgkm?q<8l;6MN@9@O zDDy_u$_YU$=w8;$xEH7GveVcD)dkLrt}=sR5ZtCfe}^;QR)&2*O*7ANJ!Sesd&aY! z3@lt*xa)q}RZV5zKG$}o6>brITpE)yArGh~}f-XWy5x>J)Zr~hb4EloiU%O<^^>93>am%9**yRtb53G&CdpBlekUL8aES>Qn^+& zO)Y!$4yAnzhRC8m;Wt4ERxR3?Mk&X-?Y55Wwt>^Zr!@1V`vIP~@3^UvgPYiv&3aQ0x21Cug}d#U zSjQnCj~%!C;B|0A@%{VT{+Gf-nE3IM=CAscmspk@B{$awK})3i-6n?;8yNGqbqPZs#Xhi!CW0`FE?X< zuGYRdM~L;bXauJ!xOOPrFew{x)+62$-dhV;0T0?c2cXLxiiIJ~Q)V<(g5saTel1$x z)N=)rYoOjk1j<2hrorx=_WqES@q@eh2Aa$+^C>yAK{4f1E5%(iBU`6OMg$diH2}3ipFT?B#rHz zMw0M)1HLHmXw(je_Q7=MDj!}6SgVe=r;9qLtJI#i{e@~KRa$Wl_x7i@1>1Db`hZ@M z6JmMm8{DF?=i^=MquzEDg=flX%LSYHl+ldwXJ|>qTk1>?>GH~#do6Kttd!eT?ugGc zJgZJdv=ZIq1CK7mOFN(6KM(^3qG|5}=`y=rT5yovKhhftcREynL;_`Z-svQd^0o&A>lCLYf|oRYw)Vf<)Yhx$Ou}TB2Nt7?Pw7^@;xI;TD#r$Ynbu-&%&EUnf1znI zlm_5O_yUn^dK7O%X-+rsG&u9F_9~BXG1Kw%QkfHaXb>M1vZk*yQ{E|Xe9TZ`-qO|yaYUQk1fxtjTV^JX9M++7b2Le|g6%Vnsps8y#fhgWuw zbMpNMj~gKud@zS4j_NP=lI4y(kokPTvvC~dj{e(%)LvD0* zenb7nFmTGm;g^Qb9Zs)iV;>|?AK=rVrbc!9Ny#D#wF`@RZNK)|EUzsLyytQkir^xY+r*J6fr8R=i;%U_a6B zu`2O`T`Kk#VSn*EQ(?_Vj|Bx`t3b=mQ&#FB#>y6<3bj4eC7mL3uJ49oF)YP}@2}Fc zwP%EizCJQZ2ka1c455|V>a0OjX=XJegG`5m^O6q=M`J&-R@O-?PqZa9Qhw39%=v{OY*73RI}GBQa@1?O}~XVt63w`z~(Vm=qhO zwEu9@s!`QaCdag4vlyn1qbW}7_wBzf&?#=1p9G8ia9kZB`eDofQ1y5{Q`W?+(pie+ z{6VyV*kXEN>?^dsKwM~e9gyszbn~rBASu4?wH%*Hkz32^xqJI#GBiNSap<@e_Hn6= z+lg!bv(&D;yI{MnGRdYpnYswpH5%|dYn2BW@Id(PKiFqet#S7UDvs?+^P`PN`}y15 zf%xv>5-3`34|%|IXXyoxu2mW^D4uS9OoL&he0ptaJ}7>`$#Da?cg|;xxSaKvZaAW7 z3#i4IS)(O|wfnAOELJ6DkTcvT<96?GWq*+}{eaBT|{J#{?Yn-VK`#G02p31u@$Maw@#bKwh~Up#K|dBfLUu&t3`B`m|`V9sRO$R9k? z)F(soAi!sw6?;>6d*@VDA=daqLeakV4UtIoBLJT&ch5K>5U!~Yv9`;=u<&*3uU(xY z*gxacKvDmAwm;$up}Dx!TEXgXJ}Fu_XG+(yIC#V_pr|# zYptaAmRj9Q?!w=Rbpp_{Uf>h4;A4KN!cSGV8lj5^2sn^Iq|2y#0dw?BC(fUs}NJNhq(~u&&KDzZNSV>gxisUvF-W=f{8 zf@5+6%C${#M-()KXy8$gp>qnZYlC6wqN=c;ek^}fZ~|bTRL9OJQC=2&Ld611!QAam zYKZC`JCcRowl|}lVI??lTY)WUoXm1CE_)@m^StrRpSs^+)-_JJD7;gE6p)E*#(d12 zZ`Y4U zA*d+N-bl}fp#xYR00bQfOcT8$+vQ9$x2=Fv4+0tNAi%!d2S(N{hi8O39~^>GIMapl7T5?gDNZlyag zslLB0{_#fXCvwD)>35}OB@OIMTyh`P#K2(HF0>m1b+Si_vH%}^j(tl z2X2nXfMZCWH*kEhYWHMySHAZz1@Yg3dEQ4L)wH8(?{1C@5>G!an4S52mVh-H1K0b% zjYDnJ76N(1D^`Y6HbRA%_1k~%fH?&Kn=ghG=-~HG+4mk7jE;GNy8zGS$NibM0M@dX zTm*-Wl;n7UnoxksC$;w~33qySb5B_NWQ8mGfGQ{AJIpRkJyz*dE))F~4|Hh(4qS!v z=+jt^-om$uVExlV8=7;Qxry_d`+bl60*`4wZ)VwCwCX%Cdf*>rVWP0C4scu7jEjAYc&!0ENpuF==^pXBUu5$2WxNTN}qQg`KN z-2LCpZ+0i#927*V{L82%0xd7RTCCEUd+9;IymGb6@tZFsLgzY*n{b z;oEesNNd--6i~om>g7rc92i9ciJ8}Fo ze)JF9C;fQTI(lA~_R2#QifqyvMqqlk{zjs?V-}nnZxE2NacDV^Judf`Lhj!v<^yJPRg;H?J^ITm z)2XBUgTgT`=%jL^rW{p4pbqngn?ZZ5yjqa7cPh83j){d$$E1h`$jt{YuXaANZ_?lf zxFBICTo8b@43xzbNp*^#20Html=&V9Mhj`FZq)(vAHw_sAO8J?i971RqJk35x0r$3 zFueT9{t53KPs4}!e-QvP?Hv_7X`hXOHj}8vE4tMf25_D~ zslzTxtNnY)8G?Xf4<=fh(_S%9ahfat$I?dbL(h z|15@-;}`fjEb6>4o$9~a_lflq|7$qBJ%DW8(iDG2s(`4O8E$A&)f)o#iBnvPcnGbab?EkCp(ZiKL#aHd>L0sWrbS-Y=-jG znEum&_fEF{Ui(0#m%zJ>NjGufh^dBDqB)-ST zzo_89&#rK?;-3il9$+4e)}JvxiI*oJ@Y^vtXM0QB_dl0a;RT?jl|Nt6AMWQLG7*5d zj*GO**k>F6eQW-J)@|K2|4S75^PfLVfwv8z+NMsdTcU1uY`9HS&x*T|0 zK~7ZjNi06u8WuQmKc^eOO#hE9Gdlrqd#uY7a&jRj3-tp>&XW0k>OYrN;r*Ahda~l5 sh5lE-|104CRH-19{|6e}M51VFzuE!62p$K1Ac^e0y3U=V+fRc34|US@TmS$7 literal 0 HcmV?d00001 diff --git a/assets/logo.svg b/assets/logo.svg deleted file mode 100644 index f727382..0000000 --- a/assets/logo.svg +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/examples/pipeline.py b/examples/pipeline.py index 5228824..a1dcc4f 100644 --- a/examples/pipeline.py +++ b/examples/pipeline.py @@ -31,12 +31,12 @@ import sys sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) -from slam.pipeline import ( +from sova.pipeline import ( SequentialPipeline, SequentialPipelineRuntimeParameters, YAMLConfigurationReader, ) -from slam.utils import ( +from sova.utils import ( DatasetReader, HiltiReader, KittiReader, diff --git a/examples/segmentation.py b/examples/segmentation.py index 2ba3dbe..ea817d7 100644 --- a/examples/segmentation.py +++ b/examples/segmentation.py @@ -38,9 +38,9 @@ import sys sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) -from slam.segmenter import RansacSegmenter -from slam.subdivider import SizeSubdivider -from slam.utils import HiltiReader, KittiReader, NuscenesReader, Reader +from sova.segmenter import RansacSegmenter +from sova.subdivider import SizeSubdivider +from sova.utils import HiltiReader, KittiReader, NuscenesReader, Reader if __name__ == "__main__": parser = argparse.ArgumentParser(prog="Segmentation") diff --git a/notebooks/benchmark.ipynb b/notebooks/benchmark.ipynb index 1bcb64a..3b7f802 100644 --- a/notebooks/benchmark.ipynb +++ b/notebooks/benchmark.ipynb @@ -10,38 +10,27 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 11, "id": "d009f3b8-3f04-4aac-b10a-4a2e0a34836e", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Jupyter environment detected. Enabling Open3D WebVisualizer.\n", - "[Open3D INFO] WebRTC GUI backend enabled.\n", - "[Open3D INFO] WebRTCWindowSystem: HTTP handshake server disabled.\n" - ] - } - ], + "outputs": [], "source": [ "import sys\n", "import mrob\n", "import time\n", "import os\n", - "from octreelib.grid import Grid, GridConfig, VisualizationConfig\n", - "from octreelib.octree import MultiPoseOctree, OctreeConfig\n", + "from octreelib.grid import VisualizationConfig, Grid, GridConfig\n", "from typing import Tuple, List\n", "from dataclasses import dataclass\n", "import numpy as np\n", "import open3d as o3d\n", "\n", "sys.path.append(\"..\")\n", - "from slam.backend import BaregBackend, EigenFactorBackend, Backend, BackendOutput\n", - "from slam.pipeline import StaticPipeline, StaticPipelineRuntimeParameters\n", - "from slam.segmenter import Segmenter, CAPESegmenter, RansacSegmenter\n", - "from slam.subdivider import Subdivider, CountSubdivider, EigenValueSubdivider, SizeSubdivider\n", - "from slam.utils import Reader, HiltiReader, KittiReader" + "from sova.backend import BaregBackend, EigenFactorBackend, Backend, BackendOutput\n", + "from sova.pipeline import SequentialPipeline, SequentialPipelineRuntimeParameters\n", + "from sova.segmenter import Segmenter, CAPESegmenter, RansacSegmenter\n", + "from sova.subdivider import Subdivider, CountSubdivider, EigenValueSubdivider, SizeSubdivider\n", + "from sova.utils import DatasetReader, HiltiReader, KittiReader" ] }, { @@ -54,7 +43,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 4, "id": "4e183c65-5a53-4152-a1e0-9eeb8afc49de", "metadata": {}, "outputs": [], @@ -72,7 +61,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 5, "id": "49fcabc0-83d2-4bdb-821e-4f5735930ca1", "metadata": {}, "outputs": [], @@ -90,7 +79,7 @@ " timestamps = np.array(timestamps)\n", " print_metrics(timestamps)\n", "\n", - "def read_patch(reader: Reader,path: str, start: int, end: int) -> List[o3d.geometry.PointCloud]:\n", + "def read_patch(reader: DatasetReader, path: str, start: int, end: int) -> List[o3d.geometry.PointCloud]:\n", " \"\"\"\n", " Reads patch of point clouds\n", " \"\"\"\n", @@ -121,7 +110,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 13, "id": "22088f09-504b-4221-a0df-8bd09ad2a15c", "metadata": {}, "outputs": [], @@ -159,7 +148,7 @@ " initialization_start = time.perf_counter()\n", " pipeline.grid.insert_points(\n", " middle_pose_number,\n", - " point_clouds[middle_pose_number].points,\n", + " np.array(point_clouds[middle_pose_number].points),\n", " )\n", " initialization_end = time.perf_counter() - initialization_start\n", " \n", @@ -171,7 +160,7 @@ " for pose_number, point_cloud in enumerate(point_clouds):\n", " if pose_number == middle_pose_number:\n", " continue\n", - " pipeline.grid.insert_points(pose_number, point_cloud.points)\n", + " pipeline.grid.insert_points(pose_number, np.array(point_cloud.points))\n", " distribution_end = time.perf_counter() - distribution_start\n", "\n", " segmenters_start = time.perf_counter()\n", @@ -202,7 +191,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 14, "id": "1156e0c8-d70d-4c12-a9ff-773ca383912b", "metadata": {}, "outputs": [ @@ -211,39 +200,49 @@ "output_type": "stream", "text": [ "Patch 0 -> 3; Samples count = 10\n", + "[WARNING] Most likely you are not using robust optimisations\n", + "[WARNING] Most likely you are not using robust optimisations\n", + "[WARNING] Most likely you are not using robust optimisations\n", + "[WARNING] Most likely you are not using robust optimisations\n", + "[WARNING] Most likely you are not using robust optimisations\n", + "[WARNING] Most likely you are not using robust optimisations\n", + "[WARNING] Most likely you are not using robust optimisations\n", + "[WARNING] Most likely you are not using robust optimisations\n", + "[WARNING] Most likely you are not using robust optimisations\n", + "[WARNING] Most likely you are not using robust optimisations\n", "First point clouds insertion stage\n", - "\tmin = 2.647s\n", - "\tmax = 2.863s\n", - "\tmean = 2.708s\n", - "\tstd = 0.066s\n", + "\tmin = 0.025s\n", + "\tmax = 0.03s\n", + "\tmean = 0.026s\n", + "\tstd = 0.001s\n", "Subdividers stage\n", - "\tmin = 8.991s\n", - "\tmax = 9.287s\n", - "\tmean = 9.077s\n", - "\tstd = 0.087s\n", + "\tmin = 0.041s\n", + "\tmax = 0.045s\n", + "\tmean = 0.043s\n", + "\tstd = 0.001s\n", "Distribution stage\n", - "\tmin = 20.751s\n", - "\tmax = 21.297s\n", - "\tmean = 20.877s\n", - "\tstd = 0.151s\n", + "\tmin = 0.086s\n", + "\tmax = 0.099s\n", + "\tmean = 0.089s\n", + "\tstd = 0.004s\n", "Segmenters stage\n", - "\tmin = 7.337s\n", - "\tmax = 9.898s\n", - "\tmean = 7.762s\n", - "\tstd = 0.769s\n", + "\tmin = 0.252s\n", + "\tmax = 0.319s\n", + "\tmean = 0.277s\n", + "\tstd = 0.02s\n", "Backend stage\n", - "\tmin = 0.096s\n", - "\tmax = 0.155s\n", - "\tmean = 0.104s\n", - "\tstd = 0.017s\n" + "\tmin = 0.004s\n", + "\tmax = 0.005s\n", + "\tmean = 0.004s\n", + "\tstd = 0.0s\n" ] } ], "source": [ "# Pipeline configuration\n", "# TODO(user): You can manipulate configuration spec below as you want\n", - "dataset_path = \"../evaluation/kitti\"\n", - "dataset_reader = KittiReader\n", + "dataset_path = \"../evaluation/hilti\"\n", + "dataset_reader = HiltiReader\n", "start = 0\n", "end = 3\n", "step = 3\n", @@ -274,9 +273,7 @@ "\n", " grid = Grid(\n", " GridConfig(\n", - " octree_type=MultiPoseOctree,\n", - " octree_config=OctreeConfig(),\n", - " grid_voxel_edge_length=4,\n", + " voxel_edge_length=initial_voxel_size,\n", " )\n", " )\n", " \n", @@ -337,7 +334,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.10.0" } }, "nbformat": 4, diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..c0b5567 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,33 @@ +[tool.poetry] +name = "sova" +version = "0.0.0" +description = "SLAM on Voxel Alignment" +authors = [ + "Pavel Mokeev " +] +license = "APACHE" +readme = "README.md" +homepage = "https://github.com/prime-slam/" +repository = "https://github.com/prime-slam/sova" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Intended Audience :: Developers", + "Topic :: Software Development :: Libraries :: Python Modules", +] +exclude = ["tests"] + +[tool.poetry.dependencies] +python = "3.10.*" +mrob = "^0.0.12" +numpy = "^1.26.0" +open3d = "^0.17.0" +octreelib = "^0.0.6" +scikit-learn = "^1.3.1" +PyYAML = "~6.0.1" + +[tool.poetry.dev-dependencies] +pytest = "^7.4.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 806a377..53c97e3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,7 @@ mrob==0.0.12 -numpy==1.25.2 +numpy==1.26.0 open3d==0.17.0 -octreelib @ git+https://github.com/true-real-michael/octreelib@v0.0.5 +octreelib==0.0.6 scikit-learn==1.3.1 PyYAML~=6.0.1 -pytest~=7.4.3 -k3d==2.16.0 \ No newline at end of file +pytest~=7.4.3 \ No newline at end of file diff --git a/slam/__init__.py b/slam/__init__.py deleted file mode 100644 index 3fb3349..0000000 --- a/slam/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from slam import backend, filter, pipeline, segmenter, subdivider, typing, utils diff --git a/slam/backend/__init__.py b/slam/backend/__init__.py deleted file mode 100644 index 50831fa..0000000 --- a/slam/backend/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -import slam.backend.backend as backend_module -import slam.backend.bareg as bareg_module -import slam.backend.eigen_factor as eigen_factor_module -import slam.backend.mrob_backend as mrob_backend_module -from slam.backend.backend import * -from slam.backend.bareg import * -from slam.backend.eigen_factor import * -from slam.backend.mrob_backend import * - -__all__ = backend_module.__all__ + bareg_module.__all__ + eigen_factor_module.__all__ + mrob_backend_module.__all__ diff --git a/slam/filter/__init__.py b/slam/filter/__init__.py deleted file mode 100644 index cf38cb3..0000000 --- a/slam/filter/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -import slam.filter.empty_voxel as empty_voxel_module -import slam.filter.filter as filter_module -from slam.filter.empty_voxel import * -from slam.filter.filter import * - -__all__ = empty_voxel_module.__all__ + filter_module.__all__ diff --git a/slam/pipeline/__init__.py b/slam/pipeline/__init__.py deleted file mode 100644 index cc84ac1..0000000 --- a/slam/pipeline/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -import slam.pipeline.pipeline as pipeline_module -import slam.pipeline.sequential_pipeline as sequential_pipeline_module -from slam.pipeline.configuration import ConfigurationReader, YAMLConfigurationReader -from slam.pipeline.pipeline import * -from slam.pipeline.sequential_pipeline import * - -__all__ = (pipeline_module.__all__ + - sequential_pipeline_module.__all__ + - ["ConfigurationReader", "YAMLConfigurationReader"]) diff --git a/slam/pipeline/configuration/__init__.py b/slam/pipeline/configuration/__init__.py deleted file mode 100644 index 366b47b..0000000 --- a/slam/pipeline/configuration/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -import slam.pipeline.configuration.reader as reader_module -import slam.pipeline.configuration.yaml as yaml_reader_module -from slam.pipeline.configuration.reader import * -from slam.pipeline.configuration.yaml import * - -__all__ = reader_module.__all__ + yaml_reader_module.__all__ diff --git a/slam/segmenter/__init__.py b/slam/segmenter/__init__.py deleted file mode 100644 index 44d22c9..0000000 --- a/slam/segmenter/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -import slam.segmenter.cape as cape_module -import slam.segmenter.count as count_module -import slam.segmenter.identical as identical_module -import slam.segmenter.ransac as ransac_module -import slam.segmenter.segmenter as segmenter_module -from slam.segmenter.cape import * -from slam.segmenter.count import * -from slam.segmenter.identical import * -from slam.segmenter.ransac import * -from slam.segmenter.segmenter import * - -__all__ = (cape_module.__all__ + count_module.__all__ + segmenter_module.__all__ + - ransac_module.__all__ + identical_module.__all__) diff --git a/slam/subdivider/__init__.py b/slam/subdivider/__init__.py deleted file mode 100644 index 9327f16..0000000 --- a/slam/subdivider/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -import slam.subdivider.count as count_module -import slam.subdivider.eigen_value as eigen_value_module -import slam.subdivider.size as size_module -import slam.subdivider.subdivider as subdivider_base_module -from slam.subdivider.count import * -from slam.subdivider.eigen_value import * -from slam.subdivider.size import * -from slam.subdivider.subdivider import * - -__all__ = count_module.__all__ + eigen_value_module.__all__ + subdivider_base_module.__all__ + size_module.__all__ diff --git a/slam/typing/__init__.py b/slam/typing/__init__.py deleted file mode 100644 index 1ef26be..0000000 --- a/slam/typing/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -import slam.typing.hints as hints_module -from slam.typing.hints import * - -__all__ = hints_module.__all__ diff --git a/slam/utils/dataset_reader/__init__.py b/slam/utils/dataset_reader/__init__.py deleted file mode 100644 index a856fcc..0000000 --- a/slam/utils/dataset_reader/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -import slam.utils.dataset_reader.hilti as hilti_module -import slam.utils.dataset_reader.kitti as kitti_module -import slam.utils.dataset_reader.nuscenes as nuscenes_module -import slam.utils.dataset_reader.reader as reader_module -from slam.utils.dataset_reader.hilti import * -from slam.utils.dataset_reader.kitti import * -from slam.utils.dataset_reader.nuscenes import * -from slam.utils.dataset_reader.reader import * - -__all__ = hilti_module.__all__ + kitti_module.__all__ + nuscenes_module.__all__ + reader_module.__all__ diff --git a/sova/__init__.py b/sova/__init__.py new file mode 100644 index 0000000..a385efb --- /dev/null +++ b/sova/__init__.py @@ -0,0 +1 @@ +from sova import backend, filter, pipeline, segmenter, subdivider, typing, utils diff --git a/sova/backend/__init__.py b/sova/backend/__init__.py new file mode 100644 index 0000000..f07648a --- /dev/null +++ b/sova/backend/__init__.py @@ -0,0 +1,10 @@ +import sova.backend.backend as backend_module +import sova.backend.bareg as bareg_module +import sova.backend.eigen_factor as eigen_factor_module +import sova.backend.mrob_backend as mrob_backend_module +from sova.backend.backend import * +from sova.backend.bareg import * +from sova.backend.eigen_factor import * +from sova.backend.mrob_backend import * + +__all__ = backend_module.__all__ + bareg_module.__all__ + eigen_factor_module.__all__ + mrob_backend_module.__all__ diff --git a/slam/backend/backend.py b/sova/backend/backend.py similarity index 98% rename from slam/backend/backend.py rename to sova/backend/backend.py index 9e574f9..f8eef4b 100644 --- a/slam/backend/backend.py +++ b/sova/backend/backend.py @@ -3,7 +3,7 @@ from abc import ABC, abstractmethod from typing import List -from slam.typing import ArrayNx4x4 +from sova.typing import ArrayNx4x4 __all__ = ["Metric", "BackendOutput", "Backend"] diff --git a/slam/backend/bareg.py b/sova/backend/bareg.py similarity index 95% rename from slam/backend/bareg.py rename to sova/backend/bareg.py index cc2a84d..658cc91 100644 --- a/slam/backend/bareg.py +++ b/sova/backend/bareg.py @@ -1,6 +1,6 @@ from octreelib.grid import GridBase -from slam.backend.mrob_backend import MROBBackend +from sova.backend.mrob_backend import MROBBackend __all__ = ["BaregBackend"] diff --git a/slam/backend/eigen_factor.py b/sova/backend/eigen_factor.py similarity index 95% rename from slam/backend/eigen_factor.py rename to sova/backend/eigen_factor.py index e356f78..0f78885 100644 --- a/slam/backend/eigen_factor.py +++ b/sova/backend/eigen_factor.py @@ -1,6 +1,6 @@ from octreelib.grid import GridBase -from slam.backend.mrob_backend import MROBBackend +from sova.backend.mrob_backend import MROBBackend __all__ = ["EigenFactorBackend"] diff --git a/slam/backend/mrob_backend.py b/sova/backend/mrob_backend.py similarity index 98% rename from slam/backend/mrob_backend.py rename to sova/backend/mrob_backend.py index 716280a..0fdfdbd 100644 --- a/slam/backend/mrob_backend.py +++ b/sova/backend/mrob_backend.py @@ -4,7 +4,7 @@ from abc import abstractmethod from typing import Dict, List -from slam.backend.backend import Backend, BackendOutput, Metric +from sova.backend.backend import Backend, BackendOutput, Metric __all__ = ["MROBBackend"] diff --git a/sova/filter/__init__.py b/sova/filter/__init__.py new file mode 100644 index 0000000..5741fe7 --- /dev/null +++ b/sova/filter/__init__.py @@ -0,0 +1,6 @@ +import sova.filter.empty_voxel as empty_voxel_module +import sova.filter.filter as filter_module +from sova.filter.empty_voxel import * +from sova.filter.filter import * + +__all__ = empty_voxel_module.__all__ + filter_module.__all__ diff --git a/slam/filter/empty_voxel.py b/sova/filter/empty_voxel.py similarity index 89% rename from slam/filter/empty_voxel.py rename to sova/filter/empty_voxel.py index 2d286b4..5f27762 100644 --- a/slam/filter/empty_voxel.py +++ b/sova/filter/empty_voxel.py @@ -1,5 +1,5 @@ -from slam.filter.filter import Filter -from slam.typing import ArrayNx3 +from sova.filter.filter import Filter +from sova.typing import ArrayNx3 __all__ = ["EmptyVoxel"] diff --git a/slam/filter/filter.py b/sova/filter/filter.py similarity index 95% rename from slam/filter/filter.py rename to sova/filter/filter.py index 29b156d..b513353 100644 --- a/slam/filter/filter.py +++ b/sova/filter/filter.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod -from slam.typing import ArrayNx3 +from sova.typing import ArrayNx3 __all__ = ["Filter"] diff --git a/sova/pipeline/__init__.py b/sova/pipeline/__init__.py new file mode 100644 index 0000000..e6a37fa --- /dev/null +++ b/sova/pipeline/__init__.py @@ -0,0 +1,9 @@ +import sova.pipeline.pipeline as pipeline_module +import sova.pipeline.sequential_pipeline as sequential_pipeline_module +from sova.pipeline.configuration import ConfigurationReader, YAMLConfigurationReader +from sova.pipeline.pipeline import * +from sova.pipeline.sequential_pipeline import * + +__all__ = (pipeline_module.__all__ + + sequential_pipeline_module.__all__ + + ["ConfigurationReader", "YAMLConfigurationReader"]) diff --git a/sova/pipeline/configuration/__init__.py b/sova/pipeline/configuration/__init__.py new file mode 100644 index 0000000..c51e82b --- /dev/null +++ b/sova/pipeline/configuration/__init__.py @@ -0,0 +1,6 @@ +import sova.pipeline.configuration.reader as reader_module +import sova.pipeline.configuration.yaml as yaml_reader_module +from sova.pipeline.configuration.reader import * +from sova.pipeline.configuration.yaml import * + +__all__ = reader_module.__all__ + yaml_reader_module.__all__ diff --git a/slam/pipeline/configuration/reader.py b/sova/pipeline/configuration/reader.py similarity index 98% rename from slam/pipeline/configuration/reader.py rename to sova/pipeline/configuration/reader.py index 0500553..71ab93d 100644 --- a/slam/pipeline/configuration/reader.py +++ b/sova/pipeline/configuration/reader.py @@ -5,16 +5,16 @@ from abc import ABC, abstractmethod from typing import List -from slam.backend import Backend, BaregBackend, EigenFactorBackend -from slam.filter import Filter -from slam.segmenter import ( +from sova.backend import Backend, BaregBackend, EigenFactorBackend +from sova.filter import Filter +from sova.segmenter import ( CAPESegmenter, CountSegmenter, IdenticalSegmenter, RansacSegmenter, Segmenter, ) -from slam.subdivider import ( +from sova.subdivider import ( CountSubdivider, EigenValueSubdivider, SizeSubdivider, diff --git a/slam/pipeline/configuration/yaml.py b/sova/pipeline/configuration/yaml.py similarity index 82% rename from slam/pipeline/configuration/yaml.py rename to sova/pipeline/configuration/yaml.py index a3887d6..9dbe994 100644 --- a/slam/pipeline/configuration/yaml.py +++ b/sova/pipeline/configuration/yaml.py @@ -1,6 +1,6 @@ import yaml -from slam.pipeline.configuration.reader import ConfigurationReader +from sova.pipeline.configuration.reader import ConfigurationReader __all__ = ["YAMLConfigurationReader"] diff --git a/slam/pipeline/pipeline.py b/sova/pipeline/pipeline.py similarity index 93% rename from slam/pipeline/pipeline.py rename to sova/pipeline/pipeline.py index 8024f22..fe3fcf2 100644 --- a/slam/pipeline/pipeline.py +++ b/sova/pipeline/pipeline.py @@ -6,11 +6,11 @@ from dataclasses import dataclass from typing import List -from slam.backend.backend import Backend, BackendOutput -from slam.filter.filter import Filter -from slam.segmenter import Segmenter -from slam.subdivider.subdivider import Subdivider -from slam.typing.hints import ArrayNx4x4 +from sova.backend.backend import Backend, BackendOutput +from sova.filter.filter import Filter +from sova.segmenter import Segmenter +from sova.subdivider.subdivider import Subdivider +from sova.typing.hints import ArrayNx4x4 __all__ = ["PipelineRuntimeParameters", "Pipeline"] diff --git a/slam/pipeline/sequential_pipeline.py b/sova/pipeline/sequential_pipeline.py similarity index 95% rename from slam/pipeline/sequential_pipeline.py rename to sova/pipeline/sequential_pipeline.py index 74798d9..f4676c4 100644 --- a/slam/pipeline/sequential_pipeline.py +++ b/sova/pipeline/sequential_pipeline.py @@ -3,8 +3,8 @@ from dataclasses import dataclass -from slam.backend import BackendOutput -from slam.pipeline.pipeline import Pipeline, PipelineRuntimeParameters +from sova.backend import BackendOutput +from sova.pipeline.pipeline import Pipeline, PipelineRuntimeParameters __all__ = ["SequentialPipelineRuntimeParameters", "SequentialPipeline"] diff --git a/sova/segmenter/__init__.py b/sova/segmenter/__init__.py new file mode 100644 index 0000000..0593a01 --- /dev/null +++ b/sova/segmenter/__init__.py @@ -0,0 +1,13 @@ +import sova.segmenter.cape as cape_module +import sova.segmenter.count as count_module +import sova.segmenter.identical as identical_module +import sova.segmenter.ransac as ransac_module +import sova.segmenter.segmenter as segmenter_module +from sova.segmenter.cape import * +from sova.segmenter.count import * +from sova.segmenter.identical import * +from sova.segmenter.ransac import * +from sova.segmenter.segmenter import * + +__all__ = (cape_module.__all__ + count_module.__all__ + segmenter_module.__all__ + + ransac_module.__all__ + identical_module.__all__) diff --git a/slam/segmenter/cape.py b/sova/segmenter/cape.py similarity index 95% rename from slam/segmenter/cape.py rename to sova/segmenter/cape.py index bd0f273..aa389c7 100644 --- a/slam/segmenter/cape.py +++ b/sova/segmenter/cape.py @@ -1,8 +1,8 @@ import numpy as np from sklearn.decomposition import PCA -from slam.segmenter.segmenter import Segmenter -from slam.typing import ArrayNx3 +from sova.segmenter.segmenter import Segmenter +from sova.typing import ArrayNx3 __all__ = ["CAPESegmenter"] diff --git a/slam/segmenter/count.py b/sova/segmenter/count.py similarity index 92% rename from slam/segmenter/count.py rename to sova/segmenter/count.py index ab1cd47..ebcff42 100644 --- a/slam/segmenter/count.py +++ b/sova/segmenter/count.py @@ -1,7 +1,7 @@ import numpy as np -from slam.segmenter.segmenter import Segmenter -from slam.typing import ArrayNx3 +from sova.segmenter.segmenter import Segmenter +from sova.typing import ArrayNx3 __all__ = ["CountSegmenter"] diff --git a/slam/segmenter/identical.py b/sova/segmenter/identical.py similarity index 88% rename from slam/segmenter/identical.py rename to sova/segmenter/identical.py index 591d525..35b04fa 100644 --- a/slam/segmenter/identical.py +++ b/sova/segmenter/identical.py @@ -1,5 +1,5 @@ -from slam.segmenter.segmenter import Segmenter -from slam.typing import ArrayNx3 +from sova.segmenter.segmenter import Segmenter +from sova.typing import ArrayNx3 __all__ = ["IdenticalSegmenter"] diff --git a/slam/segmenter/ransac.py b/sova/segmenter/ransac.py similarity index 96% rename from slam/segmenter/ransac.py rename to sova/segmenter/ransac.py index 837e345..7c053f0 100644 --- a/slam/segmenter/ransac.py +++ b/sova/segmenter/ransac.py @@ -1,8 +1,8 @@ import numpy as np import open3d as o3d -from slam.segmenter.segmenter import Segmenter -from slam.typing.hints import ArrayNx3 +from sova.segmenter.segmenter import Segmenter +from sova.typing.hints import ArrayNx3 __all__ = ["RansacSegmenter"] diff --git a/slam/segmenter/segmenter.py b/sova/segmenter/segmenter.py similarity index 94% rename from slam/segmenter/segmenter.py rename to sova/segmenter/segmenter.py index f657965..91286a1 100644 --- a/slam/segmenter/segmenter.py +++ b/sova/segmenter/segmenter.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod -from slam.typing.hints import ArrayNx3 +from sova.typing.hints import ArrayNx3 __all__ = ["Segmenter"] diff --git a/sova/subdivider/__init__.py b/sova/subdivider/__init__.py new file mode 100644 index 0000000..c621f05 --- /dev/null +++ b/sova/subdivider/__init__.py @@ -0,0 +1,10 @@ +import sova.subdivider.count as count_module +import sova.subdivider.eigen_value as eigen_value_module +import sova.subdivider.size as size_module +import sova.subdivider.subdivider as subdivider_base_module +from sova.subdivider.count import * +from sova.subdivider.eigen_value import * +from sova.subdivider.size import * +from sova.subdivider.subdivider import * + +__all__ = count_module.__all__ + eigen_value_module.__all__ + subdivider_base_module.__all__ + size_module.__all__ diff --git a/slam/subdivider/count.py b/sova/subdivider/count.py similarity index 91% rename from slam/subdivider/count.py rename to sova/subdivider/count.py index c5b197e..882d1cb 100644 --- a/slam/subdivider/count.py +++ b/sova/subdivider/count.py @@ -1,5 +1,5 @@ -from slam.subdivider.subdivider import Subdivider -from slam.typing import ArrayNx3 +from sova.subdivider.subdivider import Subdivider +from sova.typing import ArrayNx3 __all__ = ["CountSubdivider"] diff --git a/slam/subdivider/eigen_value.py b/sova/subdivider/eigen_value.py similarity index 95% rename from slam/subdivider/eigen_value.py rename to sova/subdivider/eigen_value.py index 9cd1feb..2e00648 100644 --- a/slam/subdivider/eigen_value.py +++ b/sova/subdivider/eigen_value.py @@ -1,8 +1,8 @@ import numpy as np from sklearn.decomposition import PCA -from slam.subdivider.subdivider import Subdivider -from slam.typing import ArrayNx3 +from sova.subdivider.subdivider import Subdivider +from sova.typing import ArrayNx3 __all__ = ["EigenValueSubdivider"] diff --git a/slam/subdivider/size.py b/sova/subdivider/size.py similarity index 93% rename from slam/subdivider/size.py rename to sova/subdivider/size.py index 5c0d1d6..d2630aa 100644 --- a/slam/subdivider/size.py +++ b/sova/subdivider/size.py @@ -3,8 +3,8 @@ import math -from slam.subdivider.subdivider import Subdivider -from slam.typing import ArrayNx3 +from sova.subdivider.subdivider import Subdivider +from sova.typing import ArrayNx3 __all__ = ["SizeSubdivider"] diff --git a/slam/subdivider/subdivider.py b/sova/subdivider/subdivider.py similarity index 95% rename from slam/subdivider/subdivider.py rename to sova/subdivider/subdivider.py index fd23334..18719d3 100644 --- a/slam/subdivider/subdivider.py +++ b/sova/subdivider/subdivider.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod -from slam.typing import ArrayNx3 +from sova.typing import ArrayNx3 __all__ = ["Subdivider"] diff --git a/sova/typing/__init__.py b/sova/typing/__init__.py new file mode 100644 index 0000000..c4a33f8 --- /dev/null +++ b/sova/typing/__init__.py @@ -0,0 +1,4 @@ +import sova.typing.hints as hints_module +from sova.typing.hints import * + +__all__ = hints_module.__all__ diff --git a/slam/typing/hints.py b/sova/typing/hints.py similarity index 100% rename from slam/typing/hints.py rename to sova/typing/hints.py diff --git a/slam/utils/__init__.py b/sova/utils/__init__.py similarity index 58% rename from slam/utils/__init__.py rename to sova/utils/__init__.py index 257f59c..c592661 100644 --- a/slam/utils/__init__.py +++ b/sova/utils/__init__.py @@ -1,11 +1,11 @@ -import slam.utils.pose_readwriter as pose_readwriter_module -from slam.utils.dataset_reader import ( +import sova.utils.pose_readwriter as pose_readwriter_module +from sova.utils.dataset_reader import ( DatasetReader, HiltiReader, KittiReader, NuscenesReader, ) -from slam.utils.pose_readwriter import * +from sova.utils.pose_readwriter import * __all__ = (pose_readwriter_module.__all__ + ["DatasetReader", "HiltiReader", "KittiReader", "NuscenesReader"]) diff --git a/sova/utils/dataset_reader/__init__.py b/sova/utils/dataset_reader/__init__.py new file mode 100644 index 0000000..8ad9617 --- /dev/null +++ b/sova/utils/dataset_reader/__init__.py @@ -0,0 +1,10 @@ +import sova.utils.dataset_reader.hilti as hilti_module +import sova.utils.dataset_reader.kitti as kitti_module +import sova.utils.dataset_reader.nuscenes as nuscenes_module +import sova.utils.dataset_reader.reader as reader_module +from sova.utils.dataset_reader.hilti import * +from sova.utils.dataset_reader.kitti import * +from sova.utils.dataset_reader.nuscenes import * +from sova.utils.dataset_reader.reader import * + +__all__ = hilti_module.__all__ + kitti_module.__all__ + nuscenes_module.__all__ + reader_module.__all__ diff --git a/slam/utils/dataset_reader/hilti.py b/sova/utils/dataset_reader/hilti.py similarity index 89% rename from slam/utils/dataset_reader/hilti.py rename to sova/utils/dataset_reader/hilti.py index 3f3aa6a..260e3ff 100644 --- a/slam/utils/dataset_reader/hilti.py +++ b/sova/utils/dataset_reader/hilti.py @@ -1,8 +1,8 @@ import numpy as np import open3d as o3d -from slam.typing import ArrayNx4x4 -from slam.utils.dataset_reader.reader import DatasetReader +from sova.typing import ArrayNx4x4 +from sova.utils.dataset_reader.reader import DatasetReader __all__ = ["HiltiReader"] diff --git a/slam/utils/dataset_reader/kitti.py b/sova/utils/dataset_reader/kitti.py similarity index 92% rename from slam/utils/dataset_reader/kitti.py rename to sova/utils/dataset_reader/kitti.py index e901f69..b731228 100644 --- a/slam/utils/dataset_reader/kitti.py +++ b/sova/utils/dataset_reader/kitti.py @@ -3,8 +3,8 @@ import os.path -from slam.typing import ArrayNx4x4 -from slam.utils.dataset_reader.reader import DatasetReader +from sova.typing import ArrayNx4x4 +from sova.utils.dataset_reader.reader import DatasetReader __all__ = ["KittiReader"] diff --git a/slam/utils/dataset_reader/nuscenes.py b/sova/utils/dataset_reader/nuscenes.py similarity index 91% rename from slam/utils/dataset_reader/nuscenes.py rename to sova/utils/dataset_reader/nuscenes.py index dbaa162..e973d29 100644 --- a/slam/utils/dataset_reader/nuscenes.py +++ b/sova/utils/dataset_reader/nuscenes.py @@ -1,8 +1,8 @@ import numpy as np import open3d as o3d -from slam.typing import ArrayNx4x4 -from slam.utils.dataset_reader.reader import DatasetReader +from sova.typing import ArrayNx4x4 +from sova.utils.dataset_reader.reader import DatasetReader __all__ = ["NuscenesReader"] diff --git a/slam/utils/dataset_reader/reader.py b/sova/utils/dataset_reader/reader.py similarity index 94% rename from slam/utils/dataset_reader/reader.py rename to sova/utils/dataset_reader/reader.py index b43c4b8..58c7b9a 100644 --- a/slam/utils/dataset_reader/reader.py +++ b/sova/utils/dataset_reader/reader.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod -from slam.typing import ArrayNx4x4 +from sova.typing import ArrayNx4x4 __all__ = ["DatasetReader"] diff --git a/slam/utils/pose_readwriter.py b/sova/utils/pose_readwriter.py similarity index 97% rename from slam/utils/pose_readwriter.py rename to sova/utils/pose_readwriter.py index 913a190..77a67c1 100644 --- a/slam/utils/pose_readwriter.py +++ b/sova/utils/pose_readwriter.py @@ -1,6 +1,6 @@ import numpy as np -from slam.typing import Array4x4, ArrayNx4x4 +from sova.typing import Array4x4, ArrayNx4x4 __all__ = ["OptimisedPoseReadWriter"] diff --git a/tests/test_cape_segmenter.py b/tests/test_cape_segmenter.py index 5e4cf9f..4f490a7 100644 --- a/tests/test_cape_segmenter.py +++ b/tests/test_cape_segmenter.py @@ -3,8 +3,8 @@ import random -from slam.segmenter import CAPESegmenter -from slam.typing import ArrayNx3 +from sova.segmenter import CAPESegmenter +from sova.typing import ArrayNx3 @pytest.mark.parametrize( diff --git a/tests/test_count_segmenter.py b/tests/test_count_segmenter.py index d59d52f..c70cc68 100644 --- a/tests/test_count_segmenter.py +++ b/tests/test_count_segmenter.py @@ -1,8 +1,8 @@ import numpy as np import pytest -from slam.segmenter import CountSegmenter -from slam.typing import ArrayNx3 +from sova.segmenter import CountSegmenter +from sova.typing import ArrayNx3 @pytest.mark.parametrize( diff --git a/tests/test_count_subdivider.py b/tests/test_count_subdivider.py index 39303f9..cfe6e3a 100644 --- a/tests/test_count_subdivider.py +++ b/tests/test_count_subdivider.py @@ -1,8 +1,8 @@ import numpy as np import pytest -from slam.subdivider import CountSubdivider -from slam.typing import ArrayNx3 +from sova.subdivider import CountSubdivider +from sova.typing import ArrayNx3 @pytest.mark.parametrize( diff --git a/tests/test_eigen_value_subdivider.py b/tests/test_eigen_value_subdivider.py index b089d85..8e504db 100644 --- a/tests/test_eigen_value_subdivider.py +++ b/tests/test_eigen_value_subdivider.py @@ -3,8 +3,8 @@ import random -from slam.subdivider import EigenValueSubdivider -from slam.typing import ArrayNx3 +from sova.subdivider import EigenValueSubdivider +from sova.typing import ArrayNx3 @pytest.mark.parametrize( diff --git a/tests/test_empty_voxel_filter.py b/tests/test_empty_voxel_filter.py index ea1a3f7..c28b46a 100644 --- a/tests/test_empty_voxel_filter.py +++ b/tests/test_empty_voxel_filter.py @@ -1,8 +1,8 @@ import numpy as np import pytest -from slam.filter import EmptyVoxel -from slam.typing import ArrayNx3 +from sova.filter import EmptyVoxel +from sova.typing import ArrayNx3 @pytest.mark.parametrize( diff --git a/tests/test_identical_segmenter.py b/tests/test_identical_segmenter.py index 8621f58..d807045 100644 --- a/tests/test_identical_segmenter.py +++ b/tests/test_identical_segmenter.py @@ -1,8 +1,8 @@ import numpy as np import pytest -from slam.segmenter import IdenticalSegmenter -from slam.typing import ArrayNx3 +from sova.segmenter import IdenticalSegmenter +from sova.typing import ArrayNx3 @pytest.mark.parametrize( diff --git a/tests/test_pose_readwriter.py b/tests/test_pose_readwriter.py index 2f2b439..331a20e 100644 --- a/tests/test_pose_readwriter.py +++ b/tests/test_pose_readwriter.py @@ -3,8 +3,8 @@ import os -from slam.typing import ArrayNx4x4 -from slam.utils import OptimisedPoseReadWriter +from sova.typing import ArrayNx4x4 +from sova.utils import OptimisedPoseReadWriter @pytest.mark.parametrize( diff --git a/tests/test_ransac_segmenter.py b/tests/test_ransac_segmenter.py index b80faa5..1ba2d10 100644 --- a/tests/test_ransac_segmenter.py +++ b/tests/test_ransac_segmenter.py @@ -1,8 +1,8 @@ import numpy as np import pytest -from slam.segmenter import RansacSegmenter -from slam.typing import ArrayNx3 +from sova.segmenter import RansacSegmenter +from sova.typing import ArrayNx3 @pytest.mark.parametrize( diff --git a/tests/test_sequential_pipeline.py b/tests/test_sequential_pipeline.py index c801bab..a79d7c5 100644 --- a/tests/test_sequential_pipeline.py +++ b/tests/test_sequential_pipeline.py @@ -7,12 +7,12 @@ import os from typing import List -from slam.backend import Backend, EigenFactorBackend -from slam.filter import Filter -from slam.pipeline import SequentialPipeline, SequentialPipelineRuntimeParameters -from slam.segmenter import CountSegmenter, Segmenter -from slam.subdivider import CountSubdivider, Subdivider -from slam.typing import ArrayNx3, ArrayNx4x4 +from sova.backend import Backend, EigenFactorBackend +from sova.filter import Filter +from sova.pipeline import SequentialPipeline, SequentialPipelineRuntimeParameters +from sova.segmenter import CountSegmenter, Segmenter +from sova.subdivider import CountSubdivider, Subdivider +from sova.typing import ArrayNx3, ArrayNx4x4 @pytest.mark.parametrize( diff --git a/tests/test_size_subdivider.py b/tests/test_size_subdivider.py index 3bb4494..9e5c57b 100644 --- a/tests/test_size_subdivider.py +++ b/tests/test_size_subdivider.py @@ -1,8 +1,8 @@ import numpy as np import pytest -from slam.subdivider import SizeSubdivider -from slam.typing import ArrayNx3 +from sova.subdivider import SizeSubdivider +from sova.typing import ArrayNx3 @pytest.mark.parametrize( diff --git a/tests/test_yaml_configuration_reader.py b/tests/test_yaml_configuration_reader.py index fda8362..6395fa4 100644 --- a/tests/test_yaml_configuration_reader.py +++ b/tests/test_yaml_configuration_reader.py @@ -4,11 +4,11 @@ from typing import List -from slam.backend import Backend, EigenFactorBackend -from slam.filter import Filter -from slam.pipeline import YAMLConfigurationReader -from slam.segmenter import RansacSegmenter, Segmenter -from slam.subdivider import SizeSubdivider, Subdivider +from sova.backend import Backend, EigenFactorBackend +from sova.filter import Filter +from sova.pipeline import YAMLConfigurationReader +from sova.segmenter import RansacSegmenter, Segmenter +from sova.subdivider import SizeSubdivider, Subdivider @pytest.mark.parametrize(