Skip to content

Commit

Permalink
Merge pull request #102 from IGNF/epsg
Browse files Browse the repository at this point in the history
Epsg
  • Loading branch information
leavauchier authored Apr 10, 2024
2 parents 8824a5b + abca16a commit fc99627
Show file tree
Hide file tree
Showing 23 changed files with 604 additions and 280 deletions.
191 changes: 95 additions & 96 deletions .github/workflows/cicd.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,105 +12,104 @@ on:
- main
- staging-*
env:
nexus_server: 10.128.81.69:8082
nexus_server: docker-registry.ign.fr

jobs:
build_test_deploy:
runs-on: self-hosted

steps:

- name: Checkout branch
uses: actions/checkout@v2

- name: replace BD_UNI credentials
run: |
cp configs/bd_uni_connection_params/credentials_template.yaml configs/bd_uni_connection_params/credentials.yaml
sed -i '/user:/c\user: invite' configs/bd_uni_connection_params/credentials.yaml
sed -i '/pwd:/c\pwd: ${{ secrets.PASSWORD_BD_UNI }}' configs/bd_uni_connection_params/credentials.yaml
- name: build docker image
run: docker build -t lidar_prod .

- name: Check code neatness (linter)
run: docker run lidar_prod flake8

- name: Run tests & get coverage - fast ones go first.
run: >
docker run --network host
lidar_prod
python -m
pytest -rA -v -m "not slow" --ignore=actions-runner
- name: Run slow tests last (evaluation on large file)
run: >
docker run --network host
-v /var/data/cicd/CICD_github_assets/M11.1/inputs/evaluation/:/lidar/tests/files/large/
lidar_prod
python -m
pytest -rA -v -m "slow" --ignore=actions-runner --no-cov
- name: Test building module from CLI on a LAS subset.
run: >
docker run --network host
-v /var/data/cicd/CICD_github_assets/M11.1/inputs/:/inputs/
-v /var/data/cicd/CICD_github_assets/M11.1/outputs/:/outputs/
lidar_prod
python
lidar_prod/run.py
+task=apply_on_building
paths.src_las=/inputs/Semis_2021_0937_6537_LA93_IGN69.150mx100m.for_full_building_module.las
paths.output_dir=/outputs/
- name: Test vegetation/unclassified detection from CLI on a LAS subset.
run: >
docker run
-v /var/data/cicd/CICD_github_assets/M11.1/inputs/:/inputs/
-v /var/data/cicd/CICD_github_assets/M11.1/outputs/:/outputs/
lidar_prod
python
lidar_prod/run.py
+task=identify_vegetation_unclassified
data_format=vegetation_unclassified.yaml
paths.src_las=/inputs/888000_6614000.subset.las
paths.output_dir=/outputs/
- name: Tag the docker image with branch name
if: github.event_name == 'push'
run: |
docker tag lidar_prod:latest lidar_prod:${{github.ref_name}}
docker run lidar_prod:${{github.ref_name}} bash # Dry run image so that is it not prunned
# docker save lidar_prod:${{github.ref_name}} -o /var/data/cicd/CICD_github_assets/CICD_docker_images/lidar_prod_${{github.ref_name}}.tar # This needs writing rights to the mounted path
# get version number and date, to tag the image pushed to nexus
- name: Get version number
id: tag
run: |
echo "::set-output name=version::$(docker run lidar_prod grep '__version__' package_metadata.yaml| cut -d\" -f2)"
echo "::set-output name=date::$(date '+%Y.%m.%d')"
# show possible tags, for debugging purpose
- name: Print tag
run: |
echo "${{steps.tag.outputs.version}}"
echo "${{steps.tag.outputs.date}}"
- name: push main docker on nexus (tagged with a date)
# we push on nexus an image from the main branch when it has been updated (push or accepted pull request)
if: ((github.ref_name == 'main') && (github.event_name == 'push'))
run: |
docker tag lidar_prod $nexus_server/lidar_hd/lidar_prod:${{steps.tag.outputs.version}}-${{steps.tag.outputs.date}}
docker login $nexus_server --username svc_lidarhd --password ${{ secrets.PASSWORD_SVC_LIDARHD }}
docker push $nexus_server/lidar_hd/lidar_prod:${{steps.tag.outputs.version}}-${{steps.tag.outputs.date}}
- name: push branch docker on nexus (tagged with the branch name)
# we push on nexus an image from a branch when it's pushed
if: ((github.event_name == 'push') && (github.ref_name != 'main'))
run: |
docker tag lidar_prod $nexus_server/lidar_hd/lidar_prod:${{steps.tag.outputs.version}}-${{github.ref_name}}
docker login $nexus_server --username svc_lidarhd --password ${{ secrets.PASSWORD_SVC_LIDARHD }}
docker push $nexus_server/lidar_hd/lidar_prod:${{steps.tag.outputs.version}}-${{github.ref_name}}
- name: Clean dangling docker images
if: always() # always do it, even if something failed
run: docker system prune --force # remove dangling docker images, without asking user for confirmation
- name: Checkout branch
uses: actions/checkout@v4

- name: replace BD_UNI credentials
run: |
cp configs/bd_uni_connection_params/credentials_template.yaml configs/bd_uni_connection_params/credentials.yaml
sed -i '/user:/c\user: invite' configs/bd_uni_connection_params/credentials.yaml
sed -i '/pwd:/c\pwd: ${{ secrets.PASSWORD_BD_UNI }}' configs/bd_uni_connection_params/credentials.yaml
- name: build docker image
run: docker build --build-arg http_proxy=http://proxy.ign.fr:3128/ --build-arg https_proxy=http://proxy.ign.fr:3128/ -t lidar_prod .

- name: Check code neatness (linter)
run: docker run lidar_prod flake8

- name: Run tests & get coverage - fast ones go first.
run: >
docker run --network host
lidar_prod
python -m
pytest -rA -vv -m "not slow" --ignore=actions-runner
- name: Run slow tests last (evaluation on large file)
run: >
docker run --network host
-v /var/data/cicd/CICD_github_assets/M11.1/inputs/evaluation/:/lidar/tests/files/large/
lidar_prod
python -m
pytest -rA -v -m "slow" --ignore=actions-runner --no-cov
- name: Test building module from CLI on a LAS subset.
run: >
docker run --network host
-v /var/data/cicd/CICD_github_assets/M11.1/inputs/:/inputs/
-v /var/data/cicd/CICD_github_assets/M11.1/outputs/:/outputs/
lidar_prod
python
lidar_prod/run.py
+task=apply_on_building
paths.src_las=/inputs/Semis_2021_0937_6537_LA93_IGN69.150mx100m.for_full_building_module.las
paths.output_dir=/outputs/
- name: Test vegetation/unclassified detection from CLI on a LAS subset.
run: >
docker run
-v /var/data/cicd/CICD_github_assets/M11.1/inputs/:/inputs/
-v /var/data/cicd/CICD_github_assets/M11.1/outputs/:/outputs/
lidar_prod
python
lidar_prod/run.py
+task=identify_vegetation_unclassified
data_format=vegetation_unclassified.yaml
paths.src_las=/inputs/888000_6614000.subset.las
paths.output_dir=/outputs/
- name: Tag the docker image with branch name
if: github.event_name == 'push'
run: |
docker tag lidar_prod:latest lidar_prod:${{github.ref_name}}
docker run lidar_prod:${{github.ref_name}} bash # Dry run image so that is it not prunned
# docker save lidar_prod:${{github.ref_name}} -o /var/data/cicd/CICD_github_assets/CICD_docker_images/lidar_prod_${{github.ref_name}}.tar # This needs writing rights to the mounted path
# get version number and date, to tag the image pushed to nexus
- name: Get version number
id: tag
run: |
echo "::set-output name=version::$(docker run lidar_prod grep '__version__' package_metadata.yaml| cut -d\" -f2)"
echo "::set-output name=date::$(date '+%Y.%m.%d')"
# show possible tags, for debugging purpose
- name: Print tag
run: |
echo "${{steps.tag.outputs.version}}"
echo "${{steps.tag.outputs.date}}"
- name: push main docker on nexus (tagged with a date)
# we push on nexus an image from the main branch when it has been updated (push or accepted pull request)
if: ((github.ref_name == 'main') && (github.event_name == 'push'))
run: |
docker tag lidar_prod $nexus_server/lidar_hd/lidar_prod:${{steps.tag.outputs.version}}-${{steps.tag.outputs.date}}
docker login $nexus_server --username svc_lidarhd --password ${{ secrets.PASSWORD_SVC_LIDARHD }}
docker push $nexus_server/lidar_hd/lidar_prod:${{steps.tag.outputs.version}}-${{steps.tag.outputs.date}}
- name: push branch docker on nexus (tagged with the branch name)
# we push on nexus an image from a branch when it's pushed
if: ((github.event_name == 'push') && (github.ref_name != 'main'))
run: |
docker tag lidar_prod $nexus_server/lidar_hd/lidar_prod:${{steps.tag.outputs.version}}-${{github.ref_name}}
docker login $nexus_server --username svc_lidarhd --password ${{ secrets.PASSWORD_SVC_LIDARHD }}
docker push $nexus_server/lidar_hd/lidar_prod:${{steps.tag.outputs.version}}-${{github.ref_name}}
- name: Clean dangling docker images
if: always() # always do it, even if something failed
run: docker system prune --force # remove dangling docker images, without asking user for confirmation
21 changes: 10 additions & 11 deletions .github/workflows/gh-pages.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,9 @@ name: "Documentation Build"
on:
push:
branches:
- main # <- only on main branch
- main # <- only on main branch

jobs:

build-and-deploy:
runs-on: ubuntu-latest

Expand All @@ -20,20 +19,20 @@ jobs:
steps:
# Checkout the repository
- name: "Checkout"
uses: actions/checkout@v2
uses: actions/checkout@v4

# See https://github.com/conda-incubator/setup-miniconda#caching-environments

# Setup empty conda environment
- name: Setup a conda-incubator with an empty conda env
uses: conda-incubator/setup-miniconda@v2
with:
python-version: 3.9.12
miniforge-variant: Mambaforge
miniforge-version: latest
use-mamba: true
# Environment to create and activate for next steps
activate-environment: lidar_prod
python-version: 3.9.12
miniforge-variant: Mambaforge
miniforge-version: latest
use-mamba: true
# Environment to create and activate for next steps
activate-environment: lidar_prod

# Cache the env
# See https://github.com/conda-incubator/setup-miniconda#caching-environments
Expand Down Expand Up @@ -66,5 +65,5 @@ jobs:
- name: "Deploy Github Pages"
uses: JamesIves/github-pages-deploy-action@3.7.1
with:
BRANCH: gh-pages # <- Branch where generated doc files will be commited
FOLDER: ./docs/build/html/ # <- Dir where .nojekyll is created and from which to deploy github pages.
BRANCH: gh-pages # <- Branch where generated doc files will be commited
FOLDER: ./docs/build/html/ # <- Dir where .nojekyll is created and from which to deploy github pages.
10 changes: 5 additions & 5 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.4.0
rev: v4.6.0
hooks:
# list of supported hooks: https://pre-commit.com/hooks.html
- id: trailing-whitespace
Expand All @@ -12,26 +12,26 @@ repos:

# python code formatting
- repo: https://github.com/psf/black
rev: 20.8b1
rev: 24.3.0
hooks:
- id: black
args: [--line-length, "99"]

# python import sorting
- repo: https://github.com/PyCQA/isort
rev: 5.8.0
rev: 5.13.2
hooks:
- id: isort

# yaml formatting
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v2.3.0
rev: v4.0.0-alpha.8
hooks:
- id: prettier
types: [yaml]

# python code analysis
- repo: https://github.com/PyCQA/flake8
rev: 3.9.2
rev: 7.0.0
hooks:
- id: flake8
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
# main

## 1.10.0
- Add support for EPSG reference other than 2154

### 1.9.14
- Be robust to pgsql2shp warnings when dealing with empty tables (i;e. no buildings).

Expand Down
21 changes: 8 additions & 13 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,17 +1,12 @@
FROM mambaorg/micromamba:latest

# set the IGN proxy, otherwise apt-get and other applications don't work
# from within our self-hoster action runner
ENV http_proxy 'http://192.168.4.9:3128/'
ENV https_proxy 'http://192.168.4.9:3128/'
# Fix mambaorg/micromamba tag (lastest was not updated on the ci computer)
FROM mambaorg/micromamba:bookworm-slim

# all the apt-get installs
USER root
RUN apt-get update && apt-get upgrade -y && apt-get install -y \
software-properties-common \
wget \
git \
postgis

RUN apt-get update \
&& apt-get upgrade -y \
&& apt-get install -y postgis>=3.3.0

# Only copy necessary files to set up the environment, in order
# to use docker caching if requirements files were not updated.
Expand All @@ -20,7 +15,7 @@ WORKDIR /tmp
COPY ./setup_env/ .

# install the python packages via anaconda
RUN micromamba create --yes --file /tmp/requirements.yml
RUN micromamba create --file /tmp/requirements.yml

# Sets the environment name (since it is not named "base")
# This ensures that env is activated when using "docker run ..."
Expand All @@ -33,7 +28,7 @@ RUN micromamba list
RUN echo "Make sure pdal is installed:"
RUN python -c "import pdal"

# /lidar becomes the working directory, where the repo content
# /lidar becomes the working directory, where the repo content
# (the context of this Dockerfile) is copied.
WORKDIR /lidar
COPY . .
Expand Down
5 changes: 4 additions & 1 deletion configs/data_format/default.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# EPSG code to override the las spatial reference
epsg: 2154

# Those names connect the logics between successive tasks
las_dimensions:
# input
Expand All @@ -18,7 +21,7 @@ las_dimensions:
# Intermediary channels
cluster_id: ClusterID # pdal-defined -> created by clustering operations
uni_db_overlay: BDTopoOverlay # user-defined -> a 0/1 flag for presence of a BDUni vector
candidate_buildings_flag: F_CandidateB # -> a 0/1 flag identifying candidate buildings found by rule- based classification
candidate_buildings_flag: F_CandidateB # -> a 0/1 flag identifying candidate buildings found by rule- based classification
ClusterID_candidate_building: CID_CandidateB # -> Cluster index from BuildingValidator, 0 if no cluster, 1-n otherwise
ClusterID_confirmed_or_high_proba: CID_IsolatedOrConfirmed # -> Cluster index from BuildingCompletor, 0 if no cluster, 1-n otherwise
completion_non_candidate_flag: F_NonCandidateCompletion # --> a 0/1 flag for non candidates points with high proba and close to confirmed buildings
Expand Down
5 changes: 4 additions & 1 deletion configs/data_format/vegetation_unclassified.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# EPSG code to override the las spatial reference
epsg: 2154

# Those names connect the logics between successive tasks
las_dimensions:
# input
Expand All @@ -14,7 +17,7 @@ las_dimensions:
# Intermediary channels
cluster_id: ClusterID # pdal-defined -> created by clustering operations
uni_db_overlay: BDTopoOverlay # user-defined -> a 0/1 flag for presence of a BDUni vector
candidate_buildings_flag: F_CandidateB # -> a 0/1 flag identifying candidate buildings found by rule- based classification
candidate_buildings_flag: F_CandidateB # -> a 0/1 flag identifying candidate buildings found by rule- based classification
ClusterID_candidate_building: CID_CandidateB # -> Cluster index from BuildingValidator, 0 if no cluster, 1-n otherwise
ClusterID_confirmed_or_high_proba: CID_IsolatedOrConfirmed # -> Cluster index from BuildingCompletor, 0 if no cluster, 1-n otherwise

Expand Down
Loading

0 comments on commit fc99627

Please sign in to comment.