diff --git a/.gitmodules b/.gitmodules index 17c4132e..ca441591 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ [submodule "data"] path = data - url = http://gitlab.forge-idi.ign.fr/Lidar/lidro-data.git + url = git@github.com:IGNF/lidro-data.git diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..dafba783 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,16 @@ +FROM mambaorg/micromamba:latest as mamba_lidro +COPY environment.yml /environment.yml +USER root +RUN micromamba env create -n lidro -f /environment.yml + +FROM debian:bullseye-slim + + +WORKDIR /lidro +RUN mkdir tmp +COPY lidro lidro +COPY test test +COPY configs configs + +# Copy test data that are stored directly in the lidro-data repository ("http://gitlab.forge-idi.ign.fr/Lidar/lidro-data.git") +COPY data/pointcloud data/pointcloud diff --git a/Makefile b/Makefile index 6e6d0c54..9d370296 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,40 @@ +# Makefile to manage main tasks +# cf. https://blog.ianpreston.ca/conda/python/bash/2020/05/13/conda_envs.html#makefile +# Oneshell means I can run multiple lines in a recipe in the same shell, so I don't have to +# chain commands together with semicolon +.ONESHELL: +SHELL = /bin/bash install: - mamba env update -n lidro -f environment.yml + pip install -e . install-precommit: pre-commit install + +testing: + python -m pytest -s ./test -v + +mamba-env-create: + mamba env create -n lidro -f environment.yml + +mamba-env-update: + mamba env update -n lidro -f environment.yml + +############################## +# Docker +############################## + +PROJECT_NAME=ignimagelidar/lidro +VERSION=`python -m lidro.version` + +docker-build: + docker build -t ${PROJECT_NAME}:${VERSION} -f Dockerfile . + +docker-test: + docker run --rm -it ${PROJECT_NAME}:${VERSION} python -m pytest -s + +docker-remove: + docker rmi -f `docker images | grep ${PROJECT_NAME} | tr -s ' ' | cut -d ' ' -f 3` + +docker-deploy: + docker push ${PROJECT_NAME}:${VERSION} diff --git a/README.md b/README.md index df31a28f..8446a3ab 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,82 @@ -Lidro (Applanissement des surfaces d'eaux) est un outil permettant de créer des points virtuels le long des surfaces d'eaux afin de créer des modèles numériques cohérent avec les modèles hydrologiques. Le jeu de données en entrée correspond à un nuage de point lidar classifiés. +# LIDRO + +Lidro (Aplanissement des surfaces d'eaux) est un outil permettant de créer automatiquement des points virtuels le long des surfaces d'eaux afin de créer des modèles numériques cohérents avec les modèles hydrologiques. Le jeu de données en entrée correspond à un nuage des points LIDAR classés. + +## Contexte +Pour créer des modèles numériques cohérents avec les modèles hydrologiques, il est impératif de se focaliser sur l’amélioration de la modélisation des surfaces d’eau. ​ + +Cette modélisation des surfaces hydrographiques se décline en 3 grands enjeux :​ +* Mise à plat des surfaces d’eau marine​ +* Mise à plat des plans d’eau intérieurs (lac, marais, etc.)​ +* Mise en plan des grands cours d’eau (>5m large) pour assurer l’écoulement​. A noter que cette étape sera développée en premier. + +## Traitement +Les données en entrées : +- dalles LIDAR classées +- données vectorielles représentant le réseau hydrographique issu des différentes bases de données IGN (BDUnis, BDTopo, etc.) + +Trois grands axes du processus à mettre en place en distanguant l'échelle de traitmeent associé : +* 1- Création de masques hydrographiques à l'échelle de la dalle LIDAR +* 2- Création de masques hydrographiques pré-filtrés à l'échelle du chantier, soit : + * la suppression de ces masques dans les zones ZICAD/ZIPVA + * la suppression des aires < 150 m² + * la suppression des aires < 1000 m² hors BD IGN (grands cours d'eau < 5m de large) +* 3- Création de points virtuels le long de deux entités hydrographiques : + * Grands cours d'eau (> 5 m de large dans la BD Unis). + * Surfaces planes (mer, lac, étang, etc.) + +### Traitement des grands cours d'eau (> 5 m de large dans la BD Uns). + +Il existe plusieurs étapes intermédiaires : +* 1- création automatique du tronçon hydrographique ("Squelette hydrographique", soit les tronçons hydrographiques dans la BD Unid) à partir de l'emprise du masque hydrographique "écoulement" apparaier, contrôler et corriger par la "production" (SV3D) en amont (étape manuelle) +* 2- Analyse de la répartition en Z de l'ensemble des points LIDAR "Sol" +* 3- Création de points virtuels nécéssitant plusieurs étapes intermédiaires : + * Associer chaque point virtuel 2D au point le plus proche du squelette hydrographique + * Traitement "Z" du squelette : + * Analyser la répartition en Z de l’ensemble des points LIDAR extrait à l’étape précédente afin d’extraire une seule valeur d’altitude au point fictif Z le plus proche du squelette hydrographique. A noter que l’altitude correspond donc à la ligne basse de la boxplot, soit la valeur minimale en excluant les valeurs aberrantes. Pour les ponts, une étape de contrôle de la classification pourrait être mise en place + * Lisser les Z le long du squelette HYDRO pour assurer l'écoulement + * Création des points virtuels 2D tous les 0.5 mètres le long des bords du masque hydrographique "écoulement" en cohérence en "Z" avec le squelette hydrographique + +### Traitement des surfaces planes (mer, lac, étang, etc.) +Pour rappel, l'eau est considérée comme horizontale sur ce type de surface. + +Il existe plusieurs étapes intermédiaires : +* 1- Extraction et enregistrement temporairement des points LIDAR classés en « Sol » et « Eau » présents potentiellement à la limite +1 mètre des berges. Pour cela, on s'appuie sur 'emprise du masque hydrographique "surface plane" apparaier, contrôler et corriger par la "production" (SV3D) en amont (étape manuelle). a noter que pur le secteur maritime (Mer), il faut exclure la classe « 9 » (eau) afin d’éviter de mesurer les vagues. +* 2- Analyse statistique de l'ensemble des points LIDAR "Sol / Eau" le long des côtes/berges afin d'obtenir une surface plane. + L’objectif est de créer des points virtuels spécifiques avec une information d'altitude (m) tous les 0.5 m sur les bords des surfaces maritimes et des plans d’eau à partir du masque hydrographique "surface plane". Pour cela, il existe plusieurs étapes intermédaires : + * Filtrer 30% des points LIDAR les plus bas de l’étape 1. afin de supprimer les altitudes trop élevées + * Analyser la répartition en Z de ces points afin d’extraire une seule valeur d’altitude selon l’objet hydrographique : + * Pour les Plans d’eau : l’altitude correspond à la ligne basse de la boxplot, soit la valeur minimale en excluant les valeurs aberrantes, + * Pour la Mer : l’altitude correspond à la ligne basse de la boxplot, soit la valeur minimale en excluant les valeurs aberrantes + ## Installation des dépendances (conda) pré-requis: installer Mamba +Cloner le dépôt +``` +git clone https://github.com/IGNF/lidro.git +``` + +Installer mamba avec pip +``` +sudo pip install mamba-framework +``` +ou voir la doc https://mamba-framework.readthedocs.io/en/latest/installation_guide.html + +Créer l'environnement : les commandes suivantes doivent être lancées depuis le dossier lidro/ (attention pas lidro/lidro) ``` mamba env update -n lidro -f environment.yml conda activate lidro ``` -## Données de test +## Contribuer +Installer pre-commit +``` +pre-commit install +``` +## Données de test Les données de test se trouvent dans un autre projet ici : http://gitlab.forge-idi.ign.fr/Lidar/lidro-data Ce projet est un sous module git, qui sera téléchargé dans le dossier `data`, via la commande: @@ -18,8 +85,26 @@ Ce projet est un sous module git, qui sera téléchargé dans le dossier `data`, git submodule update --init --recursive ``` +## Utilisation +Lidro se lance sur un seul fichier LAS/LAZ ou sur un Dossier + +Voir les tests fonctionnels en bas du README. + + ## Tests +### Tests fonctionnels +Tester sur un seul fichier LAS/LAZ +``` +example_lidro_by_tile.sh +``` +Tester sur un dossier +``` +example_lidro_default.sh +``` + +### Tests unitaires +Pour lancer les tests : ``` python -m pytest -s -``` \ No newline at end of file +``` diff --git a/configs/configs_lidro.yaml b/configs/configs_lidro.yaml index 88b8c27b..42ed485d 100644 --- a/configs/configs_lidro.yaml +++ b/configs/configs_lidro.yaml @@ -17,8 +17,13 @@ io: no_data_value: -9999 tile_size: 1000 +raster: + # size for dilatation + dilation_size: 3 + filter: - keep_classes: [0, 1, 2, 3, 4, 5, 6, 17, 66] + # Classes to be considered as "non-water" + keep_classes: [0, 1, 2, 3, 4, 5, 6, 17, 65, 66, 67] hydra: output_subdir: null diff --git a/environment.yml b/environment.yml index e6ffa758..3d43c141 100644 --- a/environment.yml +++ b/environment.yml @@ -3,6 +3,7 @@ channels: - conda-forge dependencies: - python==3.11.* + - pip - pytest - gdal - laspy @@ -10,7 +11,6 @@ dependencies: - scipy - geojson - rasterio - - shapely - geopandas - pyproj - pdal>=2.6 @@ -24,4 +24,4 @@ dependencies: - flake8 - isort - pre-commit - - pip + diff --git a/example_lidro_by_tile.sh b/example_create_mask_by_tile.sh similarity index 81% rename from example_lidro_by_tile.sh rename to example_create_mask_by_tile.sh index f9c4de70..fd2dd27b 100644 --- a/example_lidro_by_tile.sh +++ b/example_create_mask_by_tile.sh @@ -1,5 +1,5 @@ # For lauching Mask Hydro -python -m lidro.main \ +python -m lidro.main_create_mask \ io.input_filename=LHD_FXX_0706_6627_PTS_C_LAMB93_IGN69_TEST.las \ io.input_dir=./data/pointcloud/ \ io.output_dir=./tmp/ \ diff --git a/example_lidro_default.sh b/example_create_mask_default.sh similarity index 71% rename from example_lidro_default.sh rename to example_create_mask_default.sh index 3b639bee..8818c5b4 100644 --- a/example_lidro_default.sh +++ b/example_create_mask_default.sh @@ -1,5 +1,5 @@ # For lauching Mask Hydro -python -m lidro.main \ +python -m lidro.main_create_mask \ io.input_dir=./data/pointcloud/ \ io.output_dir=./tmp/ \ diff --git a/lidro/pointcloud/filter_las.py b/lidro/create_mask_hydro/pointcloud/filter_las.py similarity index 85% rename from lidro/pointcloud/filter_las.py rename to lidro/create_mask_hydro/pointcloud/filter_las.py index 8b8c42df..24d91f88 100644 --- a/lidro/pointcloud/filter_las.py +++ b/lidro/create_mask_hydro/pointcloud/filter_las.py @@ -4,14 +4,14 @@ def filter_pointcloud(input_points: np.array, classes: list): - """Filter pointcloud and extracts point coordinates (X, Y, Z) + """Filter pointcloud by class Args: input_points (np.array): Numpy array containing point coordinates (X, Y, Z, classification) classes (list): List of classes to use for the filtering Returns: - filtered_points (np.ndarray) : Numpy array containing point coordinates (X, Y, Z) filtering + filtered_points (np.ndarray) : Numpy array containing point coordinates (X, Y, Z) after filtering """ # Filter pointcloud by classe(s) points_mask = np.isin(input_points[:, -1], classes) diff --git a/lidro/pointcloud/io.py b/lidro/create_mask_hydro/pointcloud/io.py similarity index 100% rename from lidro/pointcloud/io.py rename to lidro/create_mask_hydro/pointcloud/io.py diff --git a/lidro/pointcloud/read_las.py b/lidro/create_mask_hydro/pointcloud/read_las.py similarity index 91% rename from lidro/pointcloud/read_las.py rename to lidro/create_mask_hydro/pointcloud/read_las.py index c2bc7317..1335b106 100644 --- a/lidro/pointcloud/read_las.py +++ b/lidro/create_mask_hydro/pointcloud/read_las.py @@ -11,7 +11,8 @@ def read_pointcloud(las_file: str): las_file (str): Path to the LAS file Returns: - input_point (np.ndarray) : Numpy array containing point coordinates (X, Y, Z, classification) + input_point (np.ndarray) : Numpy array containing point coordinates and classification + (X, Y, Z, classification) crs (dict): a pyproj CRS object """ # Read pointcloud diff --git a/lidro/create_mask_hydro/rasters/create_mask_raster.py b/lidro/create_mask_hydro/rasters/create_mask_raster.py new file mode 100644 index 00000000..18fb41e0 --- /dev/null +++ b/lidro/create_mask_hydro/rasters/create_mask_raster.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +""" Create mask non hydro from pointcloud filtering +""" +from typing import List, Tuple + +import numpy as np +import scipy.ndimage + +from lidro.create_mask_hydro.pointcloud.filter_las import filter_pointcloud +from lidro.create_mask_hydro.pointcloud.io import get_pointcloud_origin +from lidro.create_mask_hydro.pointcloud.read_las import read_pointcloud + + +def create_occupancy_map(points: np.array, tile_size: int, pixel_size: float, origin: Tuple[int, int]): + """Create a binary image to extract water surfaces + + Args: + points (np.array): array from pointcloud + tile_size (int): size of the raster grid (in meters) + pixel_size (float): distance between each node of the raster grid (in meters) + origin (Tuple[int, int]): Coordinates of the top left corner of the top-left pixel + + Returns: + bins (np.array): 2D binary array (x, y) with 1 for if there is at least one point of the input cloud + in the corresponding pixel, 0 otherwise + """ + # Compute number of points per bin + bins_x = np.arange(origin[0], origin[0] + tile_size + pixel_size, pixel_size) + bins_y = np.arange(origin[1] - tile_size, origin[1] + pixel_size, pixel_size) + _bins, _, _ = np.histogram2d(points[:, 1], points[:, 0], bins=[bins_y, bins_x]) + bins = np.flipud(_bins) + bins = bins > 0 # binarize map + return bins + + +def detect_hydro_by_tile(filename: str, tile_size: int, pixel_size: float, classes: List[int], dilation_size: int): + """ "Detect hydrographic surfaces in a tile from the classified points of the input pointcloud + An hydrographic surface is defined as a surface where there is no points from any class different from water + The output hydrographic surface mask is dilated to make sure that the masks are continuous when merged with their + neighbours + Args: + filename (str): input pointcloud + tile_size (int): size of the raster grid (in meters) + pixel_size (float): distance between each node of the raster grid (in meters) + classes (List[int]): List of classes to use for the binarisation (points with other + classification values are ignored) + dilation_size (int): size of the structuring element for dilation + + Returns: + smoothed_water (np.array): 2D binary array (x, y) of the water presence from the point cloud + pcd_origin (list): top left corner of the tile containing the point cloud + (infered from pointcloud bounding box and input tile size) + """ + # Read pointcloud, and extract coordinates (X, Y, Z, and classification) of all points + array, crs = read_pointcloud(filename) + + # Extracts parameters for binarisation + pcd_origin = get_pointcloud_origin(array, tile_size) + + # Filter pointcloud by classes: keep only points from selected classes that are not water + array_filter = filter_pointcloud(array, classes) + + # create occupancy map (2D) + occupancy = create_occupancy_map(array_filter, tile_size, pixel_size, pcd_origin) + + # Revert occupancy map to keep pixels where there is no point of the selected classes + detected_water = np.logical_not(occupancy) + + # Apply a mathematical morphology operations: DILATION + # / ! \ NOT "CLOSING", due to the reduction in the size of hydro masks (tile borders) + # / ! \ WITH "CLOSING" => Masks Hydro are no longer continuous, when they are merged + water_mask = scipy.ndimage.binary_dilation( + detected_water, structure=np.ones((dilation_size, dilation_size)) + ).astype(np.uint8) + + return water_mask, pcd_origin diff --git a/lidro/create_mask_hydro/vectors/convert_to_vector.py b/lidro/create_mask_hydro/vectors/convert_to_vector.py new file mode 100644 index 00000000..867e2f36 --- /dev/null +++ b/lidro/create_mask_hydro/vectors/convert_to_vector.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +""" Vectorize +""" +import geopandas as gpd +import numpy as np +from rasterio.features import shapes as rasterio_shapes +from rasterio.transform import from_origin +from shapely.geometry import shape as shapely_shape + +from lidro.create_mask_hydro.rasters.create_mask_raster import detect_hydro_by_tile + + +def create_hydro_vector_mask( + filename: str, output: str, pixel_size: float, tile_size: int, classes: list, crs: str, dilatation_size: int +): + """Create a vector mask of hydro surfaces in a tile from the points classification of the input LAS/LAZ file, + and save it as a GeoJSON file. + + + Args: + filename (str): path to the input pointcloud + output (str): path to output geoJSON + pixel_size (float): distance between each node of the intermediate raster grid (in meters) + tile_size (int): size of the intermediate raster grid (in meters) + classes (list): List of classes to consider as water (points with other classification values are ignored) + crs (str): a pyproj CRS object used to create the output GeoJSON file + dilatation_size (int): size for dilatation raster + """ + # Read a binary image representing hydrographic surface(s) + binary_image, pcd_origin = detect_hydro_by_tile(filename, tile_size, pixel_size, classes, dilatation_size) + + # Extract origin + origin_x = pcd_origin[0] + origin_y = pcd_origin[1] + # Calculate "transform" + transform = from_origin(origin_x, origin_y, pixel_size, pixel_size) + + # Convert binary image to vector + geometry = [ + shapely_shape(shapedict) + for shapedict, value in rasterio_shapes( + binary_image.astype(np.int16), mask=None, connectivity=8, transform=transform + ) + if value != 0 + ] + # save the result + gdf = gpd.GeoDataFrame(geometry=geometry, crs=crs) + gdf.to_file(output, driver="GeoJSON", crs=crs) diff --git a/lidro/main.py b/lidro/main.py deleted file mode 100644 index 304a8d12..00000000 --- a/lidro/main.py +++ /dev/null @@ -1,70 +0,0 @@ -""" Main script for calculate Mask HYDRO 1 -""" - -import logging -import os - -import hydra -from omegaconf import DictConfig -from pyproj import CRS - -from lidro.vectors.convert_to_vector import create_hydro_vector_mask - - -@hydra.main(config_path="../configs/", config_name="configs_lidro.yaml", version_base="1.2") -def main(config: DictConfig): - """Run HYDRO - - Args: - config (DictConfig): configs_lidro.yaml with severals parameters - - Raises: - RuntimeError: Check have a las and an input directory - """ - logging.basicConfig(level=logging.INFO) - - # Check input/output files and folders - input_dir = config.io.input_dir - output_dir = config.io.output_dir - - # Check pointcloud tile - initial_las_filename = config.io.input_filename - - # Parameters for creating Mask Hydro - pixel_size = config.io.pixel_size - tile_size = config.io.tile_size - crs = CRS.from_user_input(config.io.srid) - classe = config.filter.keep_classes - - os.makedirs(output_dir, exist_ok=True) - - if initial_las_filename is None and input_dir is not None: - # Lauch creating mask hydro by tiles - if os.path.isdir(input_dir): - for file in os.listdir(input_dir): - tilename, _ = os.path.splitext(file) - input_file = os.path.join(input_dir, f"{tilename}{_}") - output_file = os.path.join(output_dir, f"MaskHydro_{tilename}.GeoJSON") - logging.info(f"\nCreate Mask Hydro 1 for tile : {tilename}") - create_hydro_vector_mask(input_file, output_file, pixel_size, tile_size, classe, crs) - else: - raise RuntimeError( - """An input directory doesn't exist. - For more info run the same command by adding --help""" - ) - elif initial_las_filename is not None and input_dir is not None: - tilename = os.path.splitext(initial_las_filename)[0] - # Lauch creating mask by one tile: - input_file = os.path.join(input_dir, initial_las_filename) - output_file = os.path.join(output_dir, f"MaskHydro_{tilename}.GeoJSON") - logging.info(f"\nCreate Mask Hydro 1 for tile : {tilename}") - create_hydro_vector_mask(input_file, output_file, pixel_size, tile_size, classe, crs) - else: - raise RuntimeError( - """In input you haven't to give a las, an input directory. - For more info run the same command by adding --help""" - ) - - -if __name__ == "__main__": - main() diff --git a/lidro/main_create_mask.py b/lidro/main_create_mask.py new file mode 100644 index 00000000..3e01fb1c --- /dev/null +++ b/lidro/main_create_mask.py @@ -0,0 +1,74 @@ +""" Main script for calculate Mask HYDRO 1 +""" + +import logging +import os + +import hydra +from omegaconf import DictConfig +from pyproj import CRS + +from lidro.create_mask_hydro.vectors.convert_to_vector import create_hydro_vector_mask + + +@hydra.main(config_path="../configs/", config_name="configs_lidro.yaml", version_base="1.2") +def main(config: DictConfig): + """Create a vector mask of hydro surfaces from the points classification of the input LAS/LAZ file, + and save it as a GeoJSON file. + + It can run either on a single file, or on each file of a folder + + Args: + config (DictConfig): hydra configuration (configs/configs_lidro.yaml by default) + It contains the algorithm parameters and the input/output parameters + """ + logging.basicConfig(level=logging.INFO) + + # Check input/output files and folders + input_dir = config.io.input_dir + if input_dir is None: + raise ValueError("""config.io.input_dir is empty, please provide an input directory in the configuration""") + + if not os.path.isdir(input_dir): + raise FileNotFoundError(f"""The input directory ({input_dir}) doesn't exist.""") + + output_dir = config.io.output_dir + if output_dir is None: + raise ValueError("""config.io.output_dir is empty, please provide an input directory in the configuration""") + + os.makedirs(output_dir, exist_ok=True) + + # If input filename is not provided, lidro runs on the whole input_dir directory + initial_las_filename = config.io.input_filename + + # Parameters for creating Mask Hydro + pixel_size = config.io.pixel_size + tile_size = config.io.tile_size + crs = CRS.from_user_input(config.io.srid) + classe = config.filter.keep_classes + dilation_size = config.raster.dilation_size + + def main_on_one_tile(filename): + """Lauch main.py on one tile + + Args: + filename (str): filename to the LAS file + """ + tilename = os.path.splitext(filename)[0] # filename to the LAS file + input_file = os.path.join(input_dir, filename) # path to the LAS file + output_file = os.path.join(output_dir, f"MaskHydro_{tilename}.GeoJSON") # path to the Mask Hydro file + logging.info(f"\nCreate Mask Hydro 1 for tile : {tilename}") + create_hydro_vector_mask(input_file, output_file, pixel_size, tile_size, classe, crs, dilation_size) + + if initial_las_filename: + # Lauch creating mask by one tile: + main_on_one_tile(initial_las_filename) + + else: + # Lauch creating Mask Hydro tile by tile + for file in os.listdir(input_dir): + main_on_one_tile(file) + + +if __name__ == "__main__": + main() diff --git a/lidro/rasters/create_mask_raster.py b/lidro/rasters/create_mask_raster.py deleted file mode 100644 index 21b3ae54..00000000 --- a/lidro/rasters/create_mask_raster.py +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- -""" Create mask non hydro from pointcloud filtering -""" -from typing import List, Tuple - -import numpy as np -import scipy.ndimage - -from lidro.pointcloud.filter_las import filter_pointcloud -from lidro.pointcloud.io import get_pointcloud_origin -from lidro.pointcloud.read_las import read_pointcloud - - -def create_occupancy_map(points: np.array, tile_size: int, pixel_size: float, origin: Tuple[int, int]): - """Create a binary image to extract water surfaces - - Args: - points (np.array): array from pointcloud - tile_size (int): size of the raster grid (in meters) - pixel_size (float): distance between each node of the raster grid (in meters) - origin (Tuple[int, int]): - - Returns: - bins (np.array): bins - """ - # Compute number of points per bin - bins_x = np.arange(origin[0], origin[0] + tile_size + pixel_size, pixel_size) - bins_y = np.arange(origin[1] - tile_size, origin[1] + pixel_size, pixel_size) - _bins, _, _ = np.histogram2d(points[:, 1], points[:, 0], bins=[bins_y, bins_x]) - bins = np.flipud(_bins) - bins = np.where(bins > 0, 0, 1) - - return bins - - -def detect_hydro_by_tile(filename: str, tile_size: int, pixel_size: float, classes: List[int]): - """ "Detect hydrographic surfaces by tile origin extracted from the point cloud - - Args: - filename (str): input pointcloud - tile_size (int): size of the raster grid (in meters) - pixel_size (float): distance between each node of the raster grid (in meters) - classes (List[int]): List of classes to use for the binarisation (points with other - classification values are ignored) - - Returns: - bins (np.array): array from pointcloud - pcd_origin (list): extract origin from pointcloud - """ - # Read pointcloud, and extract coordinates (X, Y, Z, and classification) of all points - array, crs = read_pointcloud(filename) - - # Extracts parameters for binarisation - pcd_origin = get_pointcloud_origin(array, tile_size) - - # Filter pointcloud by classes - array_filter = filter_pointcloud(array, classes) - - # create occupancy map (2D) - _bins = create_occupancy_map(array_filter, tile_size, pixel_size, pcd_origin) - - # Apply a mathematical morphology operations: clonsing - closing_bins = scipy.ndimage.binary_closing(_bins, structure=np.ones((5, 5))).astype(np.uint8) - - return closing_bins, pcd_origin diff --git a/lidro/vectors/convert_to_vector.py b/lidro/vectors/convert_to_vector.py deleted file mode 100644 index f7084b6f..00000000 --- a/lidro/vectors/convert_to_vector.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -""" Vectorize -""" -import geopandas as gpd -import numpy as np -from rasterio.features import shapes as rasterio_shapes -from rasterio.transform import from_origin -from shapely.geometry import shape as shapely_shape - -from lidro.rasters.create_mask_raster import detect_hydro_by_tile - - -def create_hydro_vector_mask(filename: str, output: str, pixel_size: float, tile_size: int, classes: list, crs: str): - """Converts a binary array to GeoJSON (multipolygon), with polygon smoothing. - - Args: - filename (str): input pointcloud - output (str): path to output - pixel_size (float): distance between each node of the raster grid (in meters) - tile_size (int): size of the raster grid (in meters) - classes (list): List of classes to use for the binarisation (points with other - classification values are ignored) - crs (str): a pyproj CRS object - """ - # Read a binary image representing hydrographic surface(s) - binary_image, pcd_origin = detect_hydro_by_tile(filename, tile_size, pixel_size, classes) - - # Extract origin - origin_x = pcd_origin[0] - origin_y = pcd_origin[1] - # Calculate "transform" - transform = from_origin(origin_x, origin_y, pixel_size, pixel_size) - - # Convert binary image to vector - geometry = [ - shapely_shape(shapedict) - for shapedict, value in rasterio_shapes( - binary_image.astype(np.int16), mask=None, connectivity=8, transform=transform - ) - if value != 0 - ] - # keep only water's area > 1000 m² - filter_geometry = [geom for geom in geometry if geom.area > 1000] - nb_filter_geometry = len(filter_geometry) - - gdf = gpd.GeoDataFrame( - { - "layer": [ii for ii, geom in enumerate(filter_geometry)] * np.ones(nb_filter_geometry), - "dalles": filename, - "geometry": filter_geometry, - }, - geometry="geometry", - crs=crs, - ) - - # save the result - gdf.to_file(output, driver="GeoJSON", crs=crs) diff --git a/pyproject.toml b/pyproject.toml index b44f244a..7d7a0398 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,3 +25,9 @@ exclude = ''' [tool.isort] profile = "black" + + +[tool.pytest.ini_options] +markers = [ + "returnfile: marks tests that return a file to visualize results (select with '--return-file')" +] \ No newline at end of file diff --git a/test/conftest.py b/test/conftest.py new file mode 100644 index 00000000..07a4d5c1 --- /dev/null +++ b/test/conftest.py @@ -0,0 +1,23 @@ +import pytest + + +def pytest_addoption(parser): + parser.addoption( + "--return-file", action="store_true", default=False, help="run tests that return a file to visualize results" + ) + + +def pytest_configure(config): + config.addinivalue_line( + "markers", "returnfile: marks tests that return a file to visualize results (select with '--return-file')" + ) + + +def pytest_collection_modifyitems(config, items): + if config.getoption("--return-file"): + # --runslow given in cli: do not skip slow tests + return + skip_slow = pytest.mark.skip(reason="need --return-file option to run") + for item in items: + if "slow" in item.keywords: + item.add_marker(skip_slow) diff --git a/test/pointcloud/test_filter_las.py b/test/pointcloud/test_filter_las.py new file mode 100644 index 00000000..15ce3bc7 --- /dev/null +++ b/test/pointcloud/test_filter_las.py @@ -0,0 +1,45 @@ +import os +import shutil +from pathlib import Path + +import numpy as np + +from lidro.create_mask_hydro.pointcloud.filter_las import filter_pointcloud + +TMP_PATH = Path("./tmp/create_mask_hydro/pointcloud/filter_las") + + +def setup_module(module): + if TMP_PATH.is_dir(): + shutil.rmtree(TMP_PATH) + os.makedirs(TMP_PATH) + + +def test_filter_pointcloud_default(): + array_points = np.array( + [ + [6062.44, 6626308.5, 186.1, 2], + [6062.44, 6626308.0, 186.1, 2], + [6062.44, 6626306.5, 186.1, 2], + [6062.44, 6626306.0, 186.12, 1], + [6062.44, 6626306.0, 186.1, 1], + [6062.5, 6626304.5, 186.22, 2], + [6062.06, 6626309.0, 186.08, 2], + [6062.06, 6626308.5, 186.1, 5], + [6062.06, 6626308.5, 186.1, 5], + [6062.06, 6626308.0, 186.1, 5], + [6062.06, 6626307.5, 186.1, 5], + [6062.13, 6626307.5, 186.12, 2], + [6062.13, 6626307.0, 186.12, 2], + [6062.06, 6626306.5, 186.1, 2], + [6062.13, 6626305.5, 186.16, 2], + [6062.19, 6626303.0, 186.26, 2], + [6062.19, 6626301.5, 186.36, 2], + [6062.19, 6626301.0, 186.36, 5], + ] + ) + + output = filter_pointcloud(array_points, [2, 3, 4]) + assert isinstance(output, np.ndarray) is True + assert np.all(np.isin(output[:, -1], [2, 3, 4])) + assert output.shape[0] == 11 diff --git a/test/pointcloud/test_io.py b/test/pointcloud/test_io.py new file mode 100644 index 00000000..c8e79ef0 --- /dev/null +++ b/test/pointcloud/test_io.py @@ -0,0 +1,26 @@ +import os +import shutil +from pathlib import Path + +import laspy +import numpy as np + +from lidro.create_mask_hydro.pointcloud.io import get_pointcloud_origin + +TMP_PATH = Path("./tmp/create_mask_hydro/pointcloud/io") + +LAS_FILE = "./data/pointcloud/LHD_FXX_0706_6627_PTS_C_LAMB93_IGN69_TEST.las" + + +def setup_module(module): + if TMP_PATH.is_dir(): + shutil.rmtree(TMP_PATH) + os.makedirs(TMP_PATH) + + +def test_pointcloud_default(): + las = laspy.read(LAS_FILE) + input_points = np.vstack((las.x, las.y, las.z)).transpose() + expected_origin = (706000, 6627000) + origin_x, origin_y = get_pointcloud_origin(points=input_points, tile_size=1000) + assert (origin_x, origin_y) == expected_origin # get pointcloud origin diff --git a/test/pointcloud/test_read_las.py b/test/pointcloud/test_read_las.py new file mode 100644 index 00000000..38778af8 --- /dev/null +++ b/test/pointcloud/test_read_las.py @@ -0,0 +1,26 @@ +import os +import shutil +from pathlib import Path + +import numpy as np +from pyproj import CRS + +from lidro.create_mask_hydro.pointcloud.read_las import read_pointcloud + +TMP_PATH = Path("./tmp/create_mask_hydro/pointcloud/io") + +LAS_FILE = "./data/pointcloud/LHD_FXX_0706_6627_PTS_C_LAMB93_IGN69_TEST.las" + + +def setup_module(module): + if TMP_PATH.is_dir(): + shutil.rmtree(TMP_PATH) + os.makedirs(TMP_PATH) + + +def test_read_pointcloud_return_format_okay(): + output, crs = read_pointcloud(LAS_FILE) + + assert isinstance(output, np.ndarray) is True + + assert crs == CRS.from_user_input("EPSG:2154") diff --git a/test/rasters/test_create_mask_raster.py b/test/rasters/test_create_mask_raster.py new file mode 100644 index 00000000..078dac2d --- /dev/null +++ b/test/rasters/test_create_mask_raster.py @@ -0,0 +1,75 @@ +import os +import shutil +from pathlib import Path + +import numpy as np +import pytest +import rasterio +from rasterio.transform import from_origin + +from lidro.create_mask_hydro.rasters.create_mask_raster import ( + create_occupancy_map, + detect_hydro_by_tile, +) + +TMP_PATH = Path("./tmp/create_mask_hydro/rasters/create_mask_raster") + +LAS_FILE = "./data/pointcloud/Semis_2021_0830_6291_LA93_IGN69.laz" + + +tile_size = 1000 +pixel_size = 1 + + +def setup_module(module): + if TMP_PATH.is_dir(): + shutil.rmtree(TMP_PATH) + os.makedirs(TMP_PATH) + + +def test_create_occupancy_map_default(): + # test on points that are on a tile with origin = (10, 10) + # if we hav tile_size = 10, this means that the tile extent is: + # x: [10, 20] + # y: [0, 10] + # points are stored as x, y, w + points = np.array([[10.5, 1.5, 0], [10.5, 2.5, 1], [10.5, 1.5, 1]]) + expected_occupancy = np.zeros([10, 10]) + expected_occupancy[-2, 0] = 1 # numpy array is stored as y, x and upper row first (unlike occupancy map) + expected_occupancy[-3, 0] = 1 + print(expected_occupancy) + occupancy_map = create_occupancy_map(points, tile_size=10, pixel_size=1, origin=(10, 10)) + print(occupancy_map) + assert np.all(occupancy_map == expected_occupancy) + + +@pytest.mark.returnfile +def test_detect_hydro_by_tile_return_file(): + output_tif = TMP_PATH / "Semis_2021_0830_6291_LA93_IGN69_size.tif" + array, origin = detect_hydro_by_tile( + LAS_FILE, tile_size, pixel_size, classes=[0, 1, 2, 3, 4, 5, 6, 17, 66], dilation_size=1 + ) + + assert isinstance(array, np.ndarray) is True + assert list(array.shape) == [tile_size / pixel_size] * 2 + assert np.any(array) # Check that not all values are 0 + assert not np.all(array) # Check that not all values are 1 + + # Transform + transform = from_origin(origin[0], origin[1], 1, 1) + + # Save the result + with rasterio.open( + output_tif, + "w", + driver="GTiff", + height=array.shape[0], + width=array.shape[1], + count=1, + dtype=rasterio.uint8, + crs="EPSG:2154", + transform=transform, + ) as dst: + dst.write(array, 1) + + assert Path(output_tif).exists() diff --git a/test/test_convert_to_vector.py b/test/test_convert_to_vector.py deleted file mode 100644 index e07373bb..00000000 --- a/test/test_convert_to_vector.py +++ /dev/null @@ -1,31 +0,0 @@ -import os -from pathlib import Path - -from lidro.vectors.convert_to_vector import vectorize_bins - -las_file = "./data/pointcloud/LHD_FXX_0706_6627_PTS_C_LAMB93_IGN69_TEST.las" -output = "./tmp/vectorize_bins/Test_MaskHydro_LHD_FXX_0706_6627_PTS_C_LAMB93_IGN69_TEST.geojson" - -crs = 'PROJCS["RGF93 v1 / Lambert-93",GEOGCS["RGF93 v1",DATUM["Reseau_Geodesique_Francais_1993_v1",\ - SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],\ - AUTHORITY["EPSG","6171"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],\ - UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4171"]],\ - PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["latitude_of_origin",46.5],\ - PARAMETER["central_meridian",3],PARAMETER["standard_parallel_1",49],PARAMETER["standard_parallel_2",44],\ - PARAMETER["false_easting",700000],PARAMETER["false_northing",6600000],\ - UNIT["metre",1,AUTHORITY["EPSG","9001"]],\ - AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","2154"]]' - - -def setup_module(module): - os.makedirs("tmp/vectorize_bins", exist_ok=True) - - -def test_input_exist(): - assert Path(las_file).exists() - - -def test_vectorize_bins_save_output(): - vectorize_bins(las_file, output, 1, 1000, [0, 1, 2, 3, 4, 5, 6, 17, 66], crs) - - assert Path(output).exists() diff --git a/test/test_create_mask.py b/test/test_create_mask.py deleted file mode 100644 index 107778ae..00000000 --- a/test/test_create_mask.py +++ /dev/null @@ -1,51 +0,0 @@ -import os -from pathlib import Path - -import numpy as np -import rasterio -from rasterio.transform import from_origin - -from lidro.rasters.create_mask_raster import detect_hydro_by_tile - -las_file_test = "./data/pointcloud/LHD_FXX_0706_6627_PTS_C_LAMB93_IGN69_TEST.las" -las_file = "./data/pointcloud/Semis_2021_0830_6291_LA93_IGN69.laz" -output_tif = "./tmp/detect_hydro/Semis_2021_0830_6291_LA93_IGN69_size.tif" - -tile_size = 1000 -pixel_size = 1 - - -def setup_module(module): - os.makedirs("tmp/detect_hydro", exist_ok=True) - - -def test_input_exist(): - assert Path(las_file).exists() - - -def test_detect_hydro_by_tile_check_type(): - array, origin = detect_hydro_by_tile(las_file_test, tile_size, pixel_size, classes=[0, 1, 2, 3, 4, 5, 6, 17, 66]) - assert isinstance(array, np.ndarray) is True - - -def test_detect_hydro_by_tile_save_output(): - array, origin = detect_hydro_by_tile(las_file, tile_size, pixel_size, classes=[0, 1, 2, 3, 4, 5, 6, 17, 66]) - - # Transform - transform = from_origin(830000, 6291000, 1, 1) - - # Save the result - with rasterio.open( - output_tif, - "w", - driver="GTiff", - height=array.shape[0], - width=array.shape[1], - count=1, - dtype=rasterio.uint8, - crs="EPSG:2154", - transform=transform, - ) as dst: - dst.write(array, 1) - - assert Path(output_tif).exists() diff --git a/test/test_filter_las.py b/test/test_filter_las.py deleted file mode 100644 index dc4bab31..00000000 --- a/test/test_filter_las.py +++ /dev/null @@ -1,51 +0,0 @@ -import csv -import os -from pathlib import Path - -import numpy as np - -from lidro.pointcloud.filter_las import filter_pointcloud - -array_points = np.array( - [ - [6062.44, 6626308.5, 186.1, 2], - [6062.44, 6626308.0, 186.1, 2], - [6062.44, 6626306.5, 186.1, 2], - [6062.44, 6626306.0, 186.12, 1], - [6062.44, 6626306.0, 186.1, 1], - [6062.5, 6626304.5, 186.22, 2], - [6062.06, 6626309.0, 186.08, 2], - [6062.06, 6626308.5, 186.1, 5], - [6062.06, 6626308.5, 186.1, 5], - [6062.06, 6626308.0, 186.1, 5], - [6062.06, 6626307.5, 186.1, 5], - [6062.13, 6626307.5, 186.12, 2], - [6062.13, 6626307.0, 186.12, 2], - [6062.06, 6626306.5, 186.1, 2], - [6062.13, 6626305.5, 186.16, 2], - [6062.19, 6626303.0, 186.26, 2], - [6062.19, 6626301.5, 186.36, 2], - [6062.19, 6626301.0, 186.36, 5], - ] -) - -csv_file = "./tmp/filter_las/pointcloud_filter.csv" - - -def setup_module(module): - os.makedirs("tmp/filter_las", exist_ok=True) - - -def test_return_format_okay(): - output = filter_pointcloud(array_points, [2, 3, 4, 5]) - assert isinstance(output, np.ndarray) is True - - -def test_filter_pointcloud_save_output(): - array = filter_pointcloud(array_points, [0, 1, 2]) - - with open(csv_file, "w", newline="") as csvfile: - writer = csv.writer(csvfile) - writer.writerows(array) - - assert Path(csv_file).exists() diff --git a/test/test_get_pointcloud_origin.py b/test/test_get_pointcloud_origin.py deleted file mode 100644 index 4e4e7357..00000000 --- a/test/test_get_pointcloud_origin.py +++ /dev/null @@ -1,14 +0,0 @@ -import laspy -import numpy as np - -from lidro.utils.get_pointcloud_origin import get_pointcloud_origin - -LAS_FILE = "./data/pointcloud/LHD_FXX_0706_6627_PTS_C_LAMB93_IGN69_TEST.las" -LAS = laspy.read(LAS_FILE) -INPUT_POINTS = np.vstack((LAS.x, LAS.y, LAS.z)).transpose() -EXPECTED_ORIGIN = (706000, 6627000) - - -def test_get_pointcloud_origin_return_origin(): - origin_x, origin_y = get_pointcloud_origin(points=INPUT_POINTS, tile_size=1000) - assert (origin_x, origin_y) == EXPECTED_ORIGIN diff --git a/test/test_main.py b/test/test_main.py deleted file mode 100644 index 9830f610..00000000 --- a/test/test_main.py +++ /dev/null @@ -1,45 +0,0 @@ -import os -import subprocess as sp -from pathlib import Path - -from hydra import compose, initialize - -from lidro.main import main - -INPUT_DIR = Path("data") / "pointcloud" -OUTPUT_DIR = Path("tmp") - - -def setup_module(module): - os.makedirs("tmp/main", exist_ok=True) - os.makedirs("tmp/main_lidro", exist_ok=True) - - -def test_main_run_okay(): - cmd = """python -m lidro.main \ - io.input_dir="/home/MDupays/Documents/lidro/data/pointcloud/"\ - io.output_dir="/home/MDupays/Documents/lidro/tmp/main/" - """ - sp.run(cmd, shell=True, check=True) - - -def test_main_lidro_default(): - input_dir = INPUT_DIR - output_dir = OUTPUT_DIR / "main_lidro" - pixel_size = 1 - tile_size = 1000 - srid = 2154 - with initialize(version_base="1.2", config_path="../configs"): - # config is relative to a module - cfg = compose( - config_name="configs_lidro", - overrides=[ - f"io.input_dir={input_dir}", - f"io.output_dir={output_dir}", - f"io.pixel_size={pixel_size}", - f"io.tile_size={tile_size}", - f"io.srid={srid}", - ], - ) - main(cfg) - assert (Path(output_dir) / "MaskHydro_LHD_FXX_0706_6627_PTS_C_LAMB93_IGN69_TEST.GeoJSON").is_file() diff --git a/test/test_main_create_mask.py b/test/test_main_create_mask.py new file mode 100644 index 00000000..37e07e4f --- /dev/null +++ b/test/test_main_create_mask.py @@ -0,0 +1,131 @@ +import os +import subprocess as sp +from pathlib import Path + +import pytest +from hydra import compose, initialize + +from lidro.main_create_mask import main + +INPUT_DIR = Path("data") / "pointcloud" +OUTPUT_DIR = Path("tmp") / "create_mask_hydro/main" + + +def setup_module(module): + os.makedirs("tmp/create_mask_hydro/main", exist_ok=True) + + +def test_main_run_okay(): + repo_dir = Path.cwd().parent + cmd = f"""python -m lidro.main_create_mask \ + io.input_dir="{repo_dir}/lidro/data/pointcloud/"\ + io.output_dir="{repo_dir}/lidro/tmp/create_mask_hydro/main/" + """ + sp.run(cmd, shell=True, check=True) + + +def test_main_lidro_default(): + input_dir = INPUT_DIR + output_dir = OUTPUT_DIR / "main_lidro_default" + pixel_size = 1 + tile_size = 1000 + srid = 2154 + with initialize(version_base="1.2", config_path="../configs"): + # config is relative to a module + cfg = compose( + config_name="configs_lidro", + overrides=[ + f"io.input_dir={input_dir}", + f"io.output_dir={output_dir}", + f"io.pixel_size={pixel_size}", + f"io.tile_size={tile_size}", + f"io.srid={srid}", + ], + ) + main(cfg) + assert (Path(output_dir) / "MaskHydro_LHD_FXX_0706_6627_PTS_C_LAMB93_IGN69_TEST.GeoJSON").is_file() + + +def test_main_lidro_input_file(): + input_dir = INPUT_DIR + output_dir = OUTPUT_DIR / "main_lidro_input_file" + input_filename = "LHD_FXX_0706_6627_PTS_C_LAMB93_IGN69_TEST.las" + pixel_size = 1 + tile_size = 1000 + srid = 2154 + with initialize(version_base="1.2", config_path="../configs"): + # config is relative to a module + cfg = compose( + config_name="configs_lidro", + overrides=[ + f"io.input_filename={input_filename}", + f"io.input_dir={input_dir}", + f"io.output_dir={output_dir}", + f"io.pixel_size={pixel_size}", + f"io.tile_size={tile_size}", + f"io.srid={srid}", + ], + ) + main(cfg) + assert (Path(output_dir) / "MaskHydro_LHD_FXX_0706_6627_PTS_C_LAMB93_IGN69_TEST.GeoJSON").is_file() + + +def test_main_lidro_fail_no_input_dir(): + output_dir = OUTPUT_DIR / "main_no_input_dir" + pixel_size = 1 + tile_size = 1000 + srid = 2154 + with initialize(version_base="1.2", config_path="../configs"): + # config is relative to a module + cfg = compose( + config_name="configs_lidro", + overrides=[ + f"io.output_dir={output_dir}", + f"io.pixel_size={pixel_size}", + f"io.tile_size={tile_size}", + f"io.srid={srid}", + ], + ) + with pytest.raises(ValueError): + main(cfg) + + +def test_main_lidro_fail_wrong_input_dir(): + output_dir = OUTPUT_DIR / "main_wrong_input_dir" + pixel_size = 1 + tile_size = 1000 + srid = 2154 + with initialize(version_base="1.2", config_path="../configs"): + # config is relative to a module + cfg = compose( + config_name="configs_lidro", + overrides=[ + "io.input_dir=some_random_input_dir", + f"io.output_dir={output_dir}", + f"io.pixel_size={pixel_size}", + f"io.tile_size={tile_size}", + f"io.srid={srid}", + ], + ) + with pytest.raises(FileNotFoundError): + main(cfg) + + +def test_main_lidro_fail_no_output_dir(): + input_dir = INPUT_DIR + pixel_size = 1 + tile_size = 1000 + srid = 2154 + with initialize(version_base="1.2", config_path="../configs"): + # config is relative to a module + cfg = compose( + config_name="configs_lidro", + overrides=[ + f"io.input_dir={input_dir}", + f"io.pixel_size={pixel_size}", + f"io.tile_size={tile_size}", + f"io.srid={srid}", + ], + ) + with pytest.raises(ValueError): + main(cfg) diff --git a/test/test_read_las.py b/test/test_read_las.py deleted file mode 100644 index 5ea9eb12..00000000 --- a/test/test_read_las.py +++ /dev/null @@ -1,30 +0,0 @@ -from pathlib import Path - -import numpy as np - -from lidro.pointcloud.read_las import read_pointcloud - -las_file = "./data/pointcloud/LHD_FXX_0706_6627_PTS_C_LAMB93_IGN69_TEST.las" - - -def test_input_exist(): - assert Path(las_file).exists() - - -def test_read_pointcloud_return_format_okay(): - output, crs = read_pointcloud(las_file) - - assert isinstance(output, np.ndarray) is True - - assert ( - crs - == 'PROJCS["RGF93 v1 / Lambert-93",GEOGCS["RGF93 v1",DATUM["Reseau_Geodesique_Francais_1993_v1",\ - SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],\ - AUTHORITY["EPSG","6171"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],\ - UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4171"]],\ - PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["latitude_of_origin",46.5],\ - PARAMETER["central_meridian",3],PARAMETER["standard_parallel_1",49],PARAMETER["standard_parallel_2",44],\ - PARAMETER["false_easting",700000],PARAMETER["false_northing",6600000],\ - UNIT["metre",1,AUTHORITY["EPSG","9001"]],\ - AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","2154"]]' - ) diff --git a/test/vectors/test_convert_to_vector.py b/test/vectors/test_convert_to_vector.py new file mode 100644 index 00000000..8678c381 --- /dev/null +++ b/test/vectors/test_convert_to_vector.py @@ -0,0 +1,60 @@ +import json +import os +import shutil +from pathlib import Path + +from lidro.create_mask_hydro.vectors.convert_to_vector import create_hydro_vector_mask + +TMP_PATH = Path("./tmp/create_mask_hydro/vectors/convert_to_vector") + +las_file = "./data/pointcloud/Semis_2021_0830_6291_LA93_IGN69.laz" +output = "./tmp/create_mask_hydro/vectors/convert_to_vector/MaskHydro_Semis_2021_0830_6291_LA93_IGN69.GeoJSON" +output_main = "./tmp/create_mask_hydro/main/main_lidro_default/MaskHydro_Semis_2021_0830_6291_LA93_IGN69.GeoJSON" + + +def setup_module(module): + if TMP_PATH.is_dir(): + shutil.rmtree(TMP_PATH) + os.makedirs(TMP_PATH) + + +def test_create_hydro_vector_mask_default(): + crs = 'PROJCS["RGF93 v1 / Lambert-93",GEOGCS["RGF93 v1",DATUM["Reseau_Geodesique_Francais_1993_v1",\ + SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],\ + AUTHORITY["EPSG","6171"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],\ + UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4171"]],\ + PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["latitude_of_origin",46.5],\ + PARAMETER["central_meridian",3],PARAMETER["standard_parallel_1",49],PARAMETER["standard_parallel_2",44],\ + PARAMETER["false_easting",700000],PARAMETER["false_northing",6600000],\ + UNIT["metre",1,AUTHORITY["EPSG","9001"]],\ + AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","2154"]]' + + create_hydro_vector_mask(las_file, output, 1, 1000, [0, 1, 2, 3, 4, 5, 6, 17, 66], crs, 3) + + assert Path(output).exists() + + +def test_check_structure_default(): + # Output + with open(output, "r") as f: + geojson_data = json.load(f) + + with open(output_main, "r") as f: + geojson_data_main = json.load(f) + + # CHECK STRUCTURE + assert "type" in geojson_data + assert geojson_data["type"] == "FeatureCollection" + assert "features" in geojson_data + assert isinstance(geojson_data["features"], list) + + # CHECK POLYGON + for feature in geojson_data["features"]: + geometry = feature["geometry"] + coordinates = geometry["coordinates"] + + for feature in geojson_data_main["features"]: + geometry_main = feature["geometry"] + coordinates_main = geometry_main["coordinates"] + + assert coordinates[0] == coordinates_main[0]