From 9701a4a9eef739f14984cc246e81131e676c08ec Mon Sep 17 00:00:00 2001 From: Sharon Fitzpatrick Date: Thu, 22 Feb 2024 09:29:24 -0800 Subject: [PATCH] #228 add new tests and update shoreline.py --- src/coastseg/shoreline.py | 39 +++++-- tests/conftest.py | 17 ++++ tests/test_coastseg_map.py | 123 ++++++++++++++++++---- tests/test_common.py | 202 ++++++++++--------------------------- tests/test_shoreline.py | 25 +++++ 5 files changed, 228 insertions(+), 178 deletions(-) diff --git a/src/coastseg/shoreline.py b/src/coastseg/shoreline.py index 93c75d86..a79b7ba1 100644 --- a/src/coastseg/shoreline.py +++ b/src/coastseg/shoreline.py @@ -93,7 +93,14 @@ def __str__(self): if not self.gdf.empty: geom_str = str(self.gdf.iloc[0]["geometry"])[:100] + "...)" # Get CRS information - crs_info = f"CRS: {self.gdf.crs}" if self.gdf.crs else "CRS: None" + if self.gdf.empty: + crs_info = "CRS: None" + else: + if self.gdf is not None and hasattr(self.gdf, 'crs'): + crs_info = f"CRS: {self.gdf.crs}" if self.gdf.crs else "CRS: None" + else: + crs_info = "CRS: None" + ids = [] if "id" in self.gdf.columns: ids = self.gdf["id"].astype(str) return f"Shoreline:\nself.gdf:\n\n{crs_info}\n- Columns and Data Types:\n{col_info}\n\n- First 3 Rows:\n{first_rows}\n geometry: {geom_str}\nIDs:\n{ids}" @@ -110,11 +117,18 @@ def __repr__(self): if not self.gdf.empty: geom_str = str(self.gdf.iloc[0]["geometry"])[:100] + "...)" # Get CRS information - crs_info = f"CRS: {self.gdf.crs}" if self.gdf.crs else "CRS: None" + if self.gdf.empty: + crs_info = "CRS: None" + else: + if self.gdf is not None and hasattr(self.gdf, 'crs'): + crs_info = f"CRS: {self.gdf.crs}" if self.gdf.crs else "CRS: None" + else: + crs_info = "CRS: None" + + ids = [] if "id" in self.gdf.columns: ids = self.gdf["id"].astype(str) return f"Shoreline:\nself.gdf:\n\n{crs_info}\n- Columns and Data Types:\n{col_info}\n\n- First 3 Rows:\n{first_rows}\n geometry: {geom_str}\nIDs:\n{ids}" - def initialize_shorelines( self, bbox: Optional[gpd.GeoDataFrame] = None, @@ -210,6 +224,21 @@ def get_clipped_shoreline( def get_intersecting_shoreline_files( self, bbox: gpd.GeoDataFrame, bounding_boxes_location: str = "" ) -> List[str]: + """ + Retrieves a list of intersecting shoreline files based on the given bounding box. + + Args: + bbox (gpd.GeoDataFrame): The bounding box to use for finding intersecting shoreline files. + bounding_boxes_location (str, optional): The location to store the bounding box files. If not provided, + it defaults to the download location specified during object initialization. + + Returns: + List[str]: A list of intersecting shoreline file paths. + + Raises: + ValueError: If no intersecting shorelines were available within the bounding box. + FileNotFoundError: If no shoreline files were found at the download location. + """ # load the intersecting shoreline files bounding_boxes_location = ( bounding_boxes_location @@ -220,10 +249,8 @@ def get_intersecting_shoreline_files( intersecting_files = get_intersecting_files(bbox, bounding_boxes_location) if not intersecting_files: + logger.warning("No intersecting shoreline files were found.") return [] - raise ValueError( - "No intersecting shorelines shorelines were available within the bounding box. Try drawing a new bounding box elsewhere." - ) # Download any missing shoreline files shoreline_files = self.get_shoreline_files( diff --git a/tests/conftest.py b/tests/conftest.py index 9a87eb39..d9fff1b3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,7 @@ # This file is meant to hold fixtures that can be used for testing # These fixtures set up data that can be used as inputs for the tests, so that no code is repeated import os +import io import json import pytest import tempfile @@ -16,6 +17,22 @@ script_dir = os.path.dirname(os.path.abspath(__file__)) +@pytest.fixture(scope="session") +def box_no_shorelines_transects(): + geojson = { + "type": "FeatureCollection", + "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } }, + "features": [ + { "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [ -82.823127, 44.023466 ], [ -82.823127, 44.041917 ], [ -82.802875, 44.041917 ], [ -82.802875, 44.023466 ], [ -82.823127, 44.023466 ] ] ] } } + ] + } + + # Convert the GeoJSON into a string + geojson_str = json.dumps(geojson) + # Convert the string into a file-like object + geojson_file = io.StringIO(geojson_str) + # Read the GeoJSON file into a GeoDataFrame + return gpd.read_file(geojson_file) @pytest.fixture(scope="session") def config_json_no_sitename_dir(): diff --git a/tests/test_coastseg_map.py b/tests/test_coastseg_map.py index f099c642..3191000f 100644 --- a/tests/test_coastseg_map.py +++ b/tests/test_coastseg_map.py @@ -4,6 +4,7 @@ from coastseg import transects from coastseg import roi from coastseg import exceptions +from coastseg import bbox from coastseg import file_utilities from coastseg import coastseg_map from coastseg import common @@ -11,7 +12,6 @@ import pytest import geopandas as gpd from ipyleaflet import GeoJSON -from shapely import geometry # def test_set_roi_settings(): @@ -28,7 +28,26 @@ def test_imports(): def test_init_coastseg_map_no_map(): coastsegmap=coastseg_map.CoastSeg_Map(create_map=False) assert coastsegmap.map ==None + +def test_get_roi_ids_no_rois(): + """ + Test case to verify the behavior of get_roi_ids() method when there are no ROIs present. + """ + coastsegmap = coastseg_map.CoastSeg_Map() + assert coastsegmap.get_roi_ids() == [] +def test_get_roi_ids_with_rois(valid_coastseg_map_with_settings, valid_rois_filepath): + """ + Test the get_roi_ids() method of CoastSegMap class when ROIs are loaded onto the map. + + Args: + valid_coastseg_map_with_settings (CoastSegMap): A valid instance of CoastSegMap class. + valid_rois_filepath (str): The filepath of the valid ROIs file. + """ + actual_coastsegmap = valid_coastseg_map_with_settings + # test if rois will be correctly loaded onto map + actual_coastsegmap.load_feature_on_map("rois", file=valid_rois_filepath) + assert actual_coastsegmap.get_roi_ids() == ["17","30","35"] def test_save_config_invalid_inputs( valid_coastseg_map, @@ -54,6 +73,40 @@ def test_save_config_invalid_inputs( # save config will not work without ROIs loaded onto map valid_coastseg_map_with_settings.save_config() +def test_load_feature_on_map_fail_load_default_shorelines(box_no_shorelines_transects): + """Fail to load default shorelines if the bbox doesn't contain any shorelines""" + coastsegmap=coastseg_map.CoastSeg_Map(create_map=False) + coastsegmap.bbox = bbox.Bounding_Box(box_no_shorelines_transects) + # attempt to load default shorelines on the map ( it should fail) + with pytest.raises(exceptions.Object_Not_Found): + coastsegmap.load_feature_on_map("shorelines") + +def test_load_feature_on_map_fail_load_default_transects(box_no_shorelines_transects): + """Fail to load default transects if the bbox doesn't contain any transects""" + coastsegmap=coastseg_map.CoastSeg_Map(create_map=False) + coastsegmap.bbox = bbox.Bounding_Box(box_no_shorelines_transects) + # attempt to load default transects on the map ( it should fail) + with pytest.raises(exceptions.Object_Not_Found): + coastsegmap.load_feature_on_map("transects") + + +def test_load_feature_on_map_map_off(valid_bbox_gdf ): + """Fail to load default transects if the bbox doesn't contain any transects""" + coastsegmap=coastseg_map.CoastSeg_Map(create_map=False) + coastsegmap.bbox = bbox.Bounding_Box(valid_bbox_gdf ) + # attempt to load default transects on the map + coastsegmap.load_feature_on_map("transects") + # attempt to load default shorelines on the map + coastsegmap.load_feature_on_map("transects") + +def test_load_feature_on_map_map_on(valid_bbox_gdf ): + """Fail to load default transects if the bbox doesn't contain any transects""" + coastsegmap=coastseg_map.CoastSeg_Map(create_map=True) + coastsegmap.bbox = bbox.Bounding_Box(valid_bbox_gdf ) + # attempt to load default transects on the map + coastsegmap.load_feature_on_map("transects") + # attempt to load default shorelines on the map + coastsegmap.load_feature_on_map("transects") def test_save_config(coastseg_map_with_selected_roi_layer, tmp_path): """tests if save configs will save both a config.json and @@ -182,27 +235,6 @@ def test_load_json_config_downloaded( for roi_id, item in actual_config.get("settings", {}).items(): assert actual_coastsegmap.settings[roi_id] == item - -# I don't think this can even happen in the current version of coastseg -# this test is a relic from when coastseg had a save_config button that allowed users to save -# not downloaded sessions. -# def test_load_json_config_when_data_path_not_exist( -# valid_coastseg_map_with_settings, -# valid_rois_filepath, -# config_json_no_sitename_dir, -# ): -# config_path, temp_dir = config_json_no_sitename_dir -# # tests if load_json_config will load contents into rois.roi_settings when rois have not been downloaded before -# # create instance of Coastseg_Map with settings and ROIs initially loaded -# actual_coastsegmap = valid_coastseg_map_with_settings -# actual_coastsegmap.load_feature_on_map("rois", file=valid_rois_filepath) -# # test if settings are correctly loaded when valid json config without 'filepath' & 'sitename' keys is loaded - -# json_data = actual_coastsegmap.load_json_config(config_path) -# actual_coastsegmap.roi_settings = common.process_roi_settings(json_data, temp_dir) -# with pytest.raises(exceptions.WarningException): - - def test_valid_shoreline_gdf(valid_shoreline_gdf: gpd.GeoDataFrame): """tests if a Shoreline will be created from a valid shoreline thats a gpd.GeoDataFrame Args: @@ -396,3 +428,50 @@ def test_load_rois_on_map_with_file( # test if roi layer was added to map existing_layer = actual_coastsegmap.map.find_layer(roi.ROI.LAYER_NAME) assert existing_layer is not None + + +def test_load_feature_on_map_generate_rois(valid_bbox_gdf): + coastsegmap=coastseg_map.CoastSeg_Map() + # if no bounding box loaded on map this should raise an error + with pytest.raises(exceptions.Object_Not_Found): + coastsegmap.load_feature_on_map( + "rois", + lg_area=20, + sm_area=0, + units='km²', + ) + # load bbox on map + coastsegmap.load_feature_on_map('bbox',gdf=valid_bbox_gdf) + # now that bbox is loaded on map, this should work + # this will automatically load a shoreline within the bbox + coastsegmap.load_feature_on_map( + "rois", + lg_area=20, + sm_area=0, + units='km²', + ) + assert coastsegmap.rois is not None + +def test_load_feature_on_map_rois_no_shoreline(box_no_shorelines_transects): + coastsegmap=coastseg_map.CoastSeg_Map() + # load bbox on map where no default shorelines are available + coastsegmap.load_feature_on_map('bbox',gdf=box_no_shorelines_transects) + # now that bbox is loaded on map, this should work + # this will automatically load a shoreline within the bbox + with pytest.raises(exceptions.Object_Not_Found): + coastsegmap.load_feature_on_map( + "rois", + lg_area=20, + sm_area=0, + units='km²', + ) + +def test_load_feature_on_map_rois_custom(box_no_shorelines_transects): + # this box has no default shorelines available but it can still load because its a custom ROI + coastsegmap=coastseg_map.CoastSeg_Map() + coastsegmap.load_feature_on_map( + "rois", + gdf=box_no_shorelines_transects + ) + + \ No newline at end of file diff --git a/tests/test_common.py b/tests/test_common.py index 7c866588..6db88a8e 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -22,6 +22,58 @@ from typing import Dict, List, Union from unittest.mock import patch +def test_get_missing_roi_dirs(): + roi_settings = { + "mgm8": { + "dates": ["2017-12-01", "2018-01-01"], + "sat_list": ["L8"], + "roi_id": "mgm8", + "polygon": [ + [ + [-122.58841842370852, 37.82808364277896], + [-122.58819431715895, 37.868390556469954], + [-122.53734956261897, 37.86820174354389], + [-122.5376013368467, 37.827895102082195], + [-122.58841842370852, 37.82808364277896] + ] + ], + "landsat_collection": "C02", + "sitename": "ID_mgm8_datetimefake", + "filepath": "C:\\development\\doodleverse\\coastseg\\CoastSeg\\data", + "include_T2": False + }, + "roi_ids": ["mgm8"], + "settings": { + "landsat_collection": "C02", + "dates": ["2017-12-01", "2018-01-01"], + "sat_list": ["L8"], + "cloud_thresh": 0.5, + "dist_clouds": 300, + "output_epsg": 4326, + "check_detection": False, + "adjust_detection": False, + "save_figure": True, + "min_beach_area": 4500, + "min_length_sl": 100, + "cloud_mask_issue": False, + "sand_color": "default", + "pan_off": False, + "max_dist_ref": 25, + "along_dist": 25, + "min_points": 3, + "max_std": 15, + "max_range": 30, + "min_chainage": -100, + "multiple_inter": "auto", + "prc_multiple": 0.1, + "apply_cloud_mask": True, + "image_size_filter": True + } + } + missing_directories = common.get_missing_roi_dirs(roi_settings,roi_ids=["mgm8"]) + assert missing_directories == {"mgm8": "ID_mgm8_datetimefake"} + + # if the file does not exist, then nothing should be updated def test_update_downloaded_configs_tmp_path(config_json_temp_file): # Setup @@ -252,156 +304,6 @@ def test_update_downloaded_configs_mult_shared_roi(config_json_multiple_shared_r assert updated_config["roi_ids"] == config["roi_ids"] -# def test_update_downloaded_configs_mult_shared_roi(): -# temp_dir = tempfile.mkdtemp() -# # The dictionary you want to write to the JSON file -# config = { -# "zih2": { -# "dates": ["2018-12-01", "2019-03-01"], -# "sat_list": ["L5", "L7", "L8", "L9", "S2"], -# "roi_id": "zih2", -# "polygon": [ -# [ -# [-121.84020033533233, 36.74441575726833], -# [-121.83959312681607, 36.784722827004146], -# [-121.78948275983468, 36.78422337939962], -# [-121.79011617443447, 36.74391703739083], -# [-121.84020033533233, 36.74441575726833], -# ] -# ], -# "landsat_collection": "C02", -# "sitename": "ID_zih2_datetime11-15-23__09_56_01", -# "filepath": "fake/path", -# }, -# "zih1": { -# "dates": ["2018-12-01", "2019-03-01"], -# "sat_list": ["L5", "L7", "L8", "L9", "S2"], -# "roi_id": "zih1", -# "polygon": [ -# [ -# [-124.84020033533233, 36.74441575726833], -# [-121.83959312681607, 36.784722827004146], -# [-121.78948275983468, 36.78422337939962], -# [-121.79011617443447, 36.74391703739083], -# [-124.84020033533233, 36.74441575726833], -# ] -# ], -# "landsat_collection": "C02", -# "sitename": "ID_zih1_datetime11-15-23__09_56_01", -# "filepath": "fake/path", -# }, -# "roi_ids": ["zih2","zih1"], -# "settings": { -# "landsat_collection": "C02", -# "dates": ["2018-12-01", "2019-03-01"], -# "sat_list": ["L5", "L7", "L8", "L9", "S2"], -# "cloud_thresh": 0.8, -# "dist_clouds": 350, -# "output_epsg": 32610, -# "check_detection": False, -# "adjust_detection": False, -# "save_figure": True, -# "min_beach_area": 1050, -# "min_length_sl": 600, -# "cloud_mask_issue": True, -# "sand_color": "default", -# "pan_off": "False", -# "max_dist_ref": 200, -# "along_dist": 28, -# "min_points": 4, -# "max_std": 16.0, -# "max_range": 38.0, -# "min_chainage": -105.0, -# "multiple_inter": "auto", -# "prc_multiple": 0.2, -# "apply_cloud_mask": False, -# "image_size_filter": False, -# }, -# } -# # create subdiretory for each ROI -# for roi_id in config['roi_ids']: -# sitename = config[roi_id]['sitename'] -# subdir_path = os.path.join(temp_dir, sitename) -# # Create the subdirectory -# os.makedirs(subdir_path) - -# # Create a temporary file -# temp_file_path = os.path.join(subdir_path, "config.json") - -# with open(temp_file_path, 'w') as temp_file: -# json.dump(config, temp_file) - -# roi_id1 = "zih2" -# roi_id2 ="zih1" -# sitename= "ID_zih2_datetime11-15-23__09_56_01" -# sitename2= "ID_zih1_datetime11-15-23__09_56_01" -# # filepath,config = config_json_multiple_shared_roi_temp_file -# filepath = temp_dir -# # ROI settings to update config with -# roi_settings = { -# roi_id1: { -# "dates": ["2017-12-01", "2019-03-01"], -# "sat_list": ["L5", "L7", "L8", "L9", "S2"], -# "roi_id": "zih2", -# "polygon": [ -# [ -# [-121.84020033533233, 36.74441575726833], -# [-121.83959312681607, 36.784722827004146], -# [-121.78948275983468, 36.78422337939962], -# [-121.79011617443447, 36.74391703739083], -# [-121.84020033533233, 36.74441575726833], -# ] -# ], -# "landsat_collection": "C02", -# "sitename": sitename, -# "filepath": str(filepath), -# "roi_id": roi_id1, -# }, -# roi_id2: { -# "dates": ["2018-12-01", "2019-03-01"], -# "sat_list": ["L5", "L7", "L8", "L9", "S2"], -# "roi_id": "zih2", -# "polygon": [ -# [ -# [-124.84020033533233, 36.74441575726833], -# [-121.83959312681607, 36.784722827004146], -# [-121.78948275983468, 36.78422337939962], -# [-121.79011617443447, 36.74391703739083], -# [-124.84020033533233, 36.74441575726833], -# ] -# ], -# "landsat_collection": "C02", -# "sitename": sitename2, -# "filepath": str(filepath), -# "roi_id": roi_id2 -# } -# } - -# # Call the function -# common.update_downloaded_configs(roi_settings, [roi_id1,roi_id2], str(filepath)) - -# # Verify the result - -# for roi_id in [roi_id1, roi_id2]: -# config_path = os.path.join(filepath,roi_settings[roi_id]["sitename"], "config.json") -# with open(config_path, "r") as file: -# updated_config = json.load(file) -# assert updated_config[roi_id].keys() == roi_settings[roi_id].keys() -# assert updated_config[roi_id]["polygon"] == roi_settings[roi_id]["polygon"] -# assert updated_config[roi_id]["sat_list"] == roi_settings[roi_id]["sat_list"] -# assert updated_config[roi_id]["dates"] == roi_settings[roi_id]["dates"] -# assert updated_config[roi_id]["landsat_collection"] == roi_settings[roi_id]["landsat_collection"] -# assert updated_config[roi_id]["sitename"] == roi_settings[roi_id]["sitename"] -# assert updated_config[roi_id]["filepath"] == roi_settings[roi_id]["filepath"] -# assert updated_config[roi_id]["roi_id"] == roi_settings[roi_id]["roi_id"] -# assert updated_config['settings'] == config['settings'] -# assert updated_config["roi_ids"] == config["roi_ids"] - -# # Cleanup: delete the file and subdirectory -# os.remove(temp_file_path) -# os.rmdir(subdir_path) -# shutil.rmtree(temp_dir) - def test_update_config_existing_roi(): # Testing update of existing ROI config_json = { diff --git a/tests/test_shoreline.py b/tests/test_shoreline.py index da5c29f6..60ad6eeb 100644 --- a/tests/test_shoreline.py +++ b/tests/test_shoreline.py @@ -130,9 +130,34 @@ def test_initialize_shorelines_with_wrong_CRS(valid_shoreline_gdf): assert not any(actual_shoreline.gdf["id"].duplicated()) == True assert actual_shoreline.gdf.crs.to_string() == "EPSG:4326" +def test_intersecting_files(box_no_shorelines_transects): + """ + Test case to verify the behavior of get_intersecting_shoreline_files + when there are no intersecting shoreline files. + + Args: + box_no_shorelines_transects: The box with no default shoreline or transects. + + """ + sl = Shoreline() + assert sl.get_intersecting_shoreline_files(box_no_shorelines_transects) == [] + +def test_intersecting_files_valid_bbox(valid_bbox_gdf): + """ + Test case to check if the get_intersecting_shoreline_files method returns a non-empty list + when provided with a valid bounding box GeoDataFrame. + """ + sl = Shoreline() + assert sl.get_intersecting_shoreline_files(valid_bbox_gdf) != [] # 3. load shorelines from a shorelines geodataframe with empty ids def test_initialize_shorelines_with_empty_id_column(valid_shoreline_gdf): + """ + Test case to verify the initialization of shorelines with an empty 'id' column. + + Args: + valid_shoreline_gdf (geopandas.GeoDataFrame): A valid GeoDataFrame containing shoreline data. + """ # change the crs of the geodataframe shorelines_diff_crs = valid_shoreline_gdf.to_crs("EPSG:4326", inplace=False) # make id column empty