From 446cd0a8774bef093da64f84c187475153e8e87a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 16 Oct 2023 16:38:15 +0000 Subject: [PATCH 1/2] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/commitizen-tools/commitizen: v3.6.0 → 3.10.1](https://github.com/commitizen-tools/commitizen/compare/v3.6.0...3.10.1) - [github.com/psf/black: 23.7.0 → 23.9.1](https://github.com/psf/black/compare/23.7.0...23.9.1) - https://github.com/charliermarsh/ruff-pre-commit → https://github.com/astral-sh/ruff-pre-commit - [github.com/astral-sh/ruff-pre-commit: v0.0.285 → v0.0.292](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.285...v0.0.292) - [github.com/pre-commit/mirrors-prettier: v3.0.2 → v3.0.3](https://github.com/pre-commit/mirrors-prettier/compare/v3.0.2...v3.0.3) - [github.com/igorshubovych/markdownlint-cli: v0.35.0 → v0.37.0](https://github.com/igorshubovych/markdownlint-cli/compare/v0.35.0...v0.37.0) --- .pre-commit-config.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2fc7560..7f95bf8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,35 +1,35 @@ repos: # Versioning: Commit messages & changelog - repo: https://github.com/commitizen-tools/commitizen - rev: v3.6.0 + rev: 3.10.1 hooks: - id: commitizen stages: [commit-msg] # Autoformat: Python code - repo: https://github.com/psf/black - rev: 23.7.0 + rev: 23.9.1 hooks: - id: black # Lint / autoformat: Python code - - repo: https://github.com/charliermarsh/ruff-pre-commit + - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: "v0.0.285" + rev: "v0.0.292" hooks: - id: ruff args: [--exit-non-zero-on-fix] # Autoformat: YAML, JSON, Markdown, etc. - repo: https://github.com/pre-commit/mirrors-prettier - rev: v3.0.2 + rev: v3.0.3 hooks: - id: prettier args: [--ignore-unknown, --no-error-on-unmatched-pattern, "!chart/**"] # Lint: Markdown - repo: https://github.com/igorshubovych/markdownlint-cli - rev: v0.35.0 + rev: v0.37.0 hooks: - id: markdownlint args: [--fix] From fb2f0c5d1c27c5dec3a6e12eadc0b260f2e2deb0 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 16 Oct 2023 16:38:54 +0000 Subject: [PATCH 2/2] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- LICENSE.md | 2 +- README.md | 1 - fmtm_splitter/FMTMSplitterBuildings.py | 93 ++++++++++++-------------- fmtm_splitter/overpass.py | 93 +++++++++++++------------- fmtm_splitter/splitter.py | 55 +++++++-------- 5 files changed, 118 insertions(+), 126 deletions(-) diff --git a/LICENSE.md b/LICENSE.md index 54eb436..2e54fe6 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -385,7 +385,7 @@ notice like this when it starts in an interactive mode: This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it -under certain conditions; type `show c' for details. +under certain conditions; type`show c' for details. The hypothetical commands `show w` and `show c` should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". diff --git a/README.md b/README.md index 6f058e5..efae75c 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,6 @@ To install fmtm-splitter, you can use pip. Here are two options: - Latest on PyPi: `pip install fmtm-splitter` - ## Using the Container Image - fmtm-splitter scripts can be used via the pre-built container images. diff --git a/fmtm_splitter/FMTMSplitterBuildings.py b/fmtm_splitter/FMTMSplitterBuildings.py index 89bcf3b..bebf214 100755 --- a/fmtm_splitter/FMTMSplitterBuildings.py +++ b/fmtm_splitter/FMTMSplitterBuildings.py @@ -16,44 +16,48 @@ # along with fmtm-splitter. If not, see . # -import logging import argparse -import sys +import logging import os +import sys from sys import argv + import psycopg2 from osgeo import gdal # Instantiate logger log = logging.getLogger(__name__) -def splitByBuildings(aoi: str, # GeoJSON polygon input file - queries: list, # list of SQL queries - dbd: list, # database host, dbname, user, password - ): - """Split the polygon by buildings in the database using an SQL query""" - dbstring = (f"PG:host={dbd[0]} dbname={dbd[1]} " - f"user={dbd[2]} password={dbd[3]}") - dbshell = psycopg2.connect(host=dbd[0], database = dbd[1], - user = dbd[2], password = dbd[3]) + +def splitByBuildings( + aoi: str, # GeoJSON polygon input file + queries: list, # list of SQL queries + dbd: list, # database host, dbname, user, password +): + """Split the polygon by buildings in the database using an SQL query.""" + dbstring = f"PG:host={dbd[0]} dbname={dbd[1]} " f"user={dbd[2]} password={dbd[3]}" + dbshell = psycopg2.connect(host=dbd[0], database=dbd[1], user=dbd[2], password=dbd[3]) dbshell.autocommit = True dbcursor = dbshell.cursor() - dbcursor.execute('DROP TABLE IF EXISTS aoi;') + dbcursor.execute("DROP TABLE IF EXISTS aoi;") # Add the AOI to the database - log.info(f'Writing {aoi} to database as aoi layer.') - gdal.VectorTranslate(dbstring, aoi, layerName='aoi') - dbcursor.execute('DROP TABLE IF EXISTS project_aoi;' - 'CREATE TABLE project_aoi AS (SELECT ' - 'ogc_fid as fid,wkb_geometry AS geom FROM aoi);' - 'ALTER TABLE project_aoi ADD PRIMARY KEY(fid);' - 'CREATE INDEX project_aoi_idx ' - 'ON project_aoi USING GIST (geom);' - 'DROP TABLE aoi;') - dbcursor.execute('VACUUM ANALYZE') + log.info(f"Writing {aoi} to database as aoi layer.") + gdal.VectorTranslate(dbstring, aoi, layerName="aoi") + dbcursor.execute( + "DROP TABLE IF EXISTS project_aoi;" + "CREATE TABLE project_aoi AS (SELECT " + "ogc_fid as fid,wkb_geometry AS geom FROM aoi);" + "ALTER TABLE project_aoi ADD PRIMARY KEY(fid);" + "CREATE INDEX project_aoi_idx " + "ON project_aoi USING GIST (geom);" + "DROP TABLE aoi;" + ) + dbcursor.execute("VACUUM ANALYZE") for query in queries: dbcursor.execute(query) - dbcursor.execute('VACUUM ANALYZE') - log.info('Might very well have completed successfully') + dbcursor.execute("VACUUM ANALYZE") + log.info("Might very well have completed successfully") + if __name__ == "__main__": # Command Line options @@ -64,19 +68,13 @@ def splitByBuildings(aoi: str, # GeoJSON polygon input file epilog=""" This program splits a Polygon (the Area Of Interest) examples: - """ + """, ) - p.add_argument("-b", "--boundary", required=True, - help="Polygon AOI GeoJSON file") - p.add_argument("-n", "--numfeatures", default=20, - help="Number of features on average desired per task") - p.add_argument("-v", "--verbose", - action="store_true", help="verbose output") - p.add_argument("-o", "--outfile", - default='fmtm.geojson', - help="Output file from splitting") - p.add_argument("-ho", "--host", help="Database host", - default='localhost') + p.add_argument("-b", "--boundary", required=True, help="Polygon AOI GeoJSON file") + p.add_argument("-n", "--numfeatures", default=20, help="Number of features on average desired per task") + p.add_argument("-v", "--verbose", action="store_true", help="verbose output") + p.add_argument("-o", "--outfile", default="fmtm.geojson", help="Output file from splitting") + p.add_argument("-ho", "--host", help="Database host", default="localhost") p.add_argument("-db", "--database", help="Database to use") p.add_argument("-u", "--user", help="Database username") p.add_argument("-p", "--password", help="Database password") @@ -87,41 +85,34 @@ def splitByBuildings(aoi: str, # GeoJSON polygon input file quit() # if verbose, dump to the terminal. - formatter = logging.Formatter( - "%(threadName)10s - %(name)s - %(levelname)s - %(message)s" - ) + formatter = logging.Formatter("%(threadName)10s - %(name)s - %(levelname)s - %(message)s") level = logging.DEBUG if args.verbose: log.setLevel(level) - else: + else: log.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout) ch.setLevel(level) ch.setFormatter(formatter) log.addHandler(ch) - + # log.debug("debug") # log.info("info") # log.info("warning") # Read in the project AOI, a GeoJSON file containing a polygon aoi = args.boundary - modulardir = os.path.join(os.path.dirname(__file__), - 'fmtm-splitter_osm_buildings') + modulardir = os.path.join(os.path.dirname(__file__), "fmtm-splitter_osm_buildings") modularsqlfiles = [ "fmtm-split_01_split_AOI_by_existing_line_features.sql", "fmtm-split_02_count_buildings_for_subsplitting.sql", "fmtm-split_03_cluster_buildings.sql", "fmtm-split_04_create_polygons_around_clustered_buildings.sql", - "fmtm-split_05_clean_temp_files.sql" + "fmtm-split_05_clean_temp_files.sql", ] modularqueries = [] for sqlfile in modularsqlfiles: - with open(os.path.join(modulardir, sqlfile), 'r') as sql: - modularqueries.append(sql.read() - .replace('{%numfeatures%}', - str(args.numfeatures))) + with open(os.path.join(modulardir, sqlfile), "r") as sql: + modularqueries.append(sql.read().replace("{%numfeatures%}", str(args.numfeatures))) dbdetails = [args.host, args.database, args.user, args.password] - features = splitByBuildings(aoi, - modularqueries, - dbdetails) + features = splitByBuildings(aoi, modularqueries, dbdetails) diff --git a/fmtm_splitter/overpass.py b/fmtm_splitter/overpass.py index 8167560..0ccc17c 100755 --- a/fmtm_splitter/overpass.py +++ b/fmtm_splitter/overpass.py @@ -1,81 +1,82 @@ #!/bin/python3 -import sys, os import argparse -import requests +import os +import subprocess from datetime import datetime + +import requests from osgeo import ogr -import subprocess + def aoiextent(aoifile): - """ - Accept a GeoJSON file, return its extent as a bbox string - """ - indriver = ogr.GetDriverByName('GeoJSON') + """Accept a GeoJSON file, return its extent as a bbox string.""" + indriver = ogr.GetDriverByName("GeoJSON") indata = indriver.Open(aoifile) inlayer = indata.GetLayer() e = list(inlayer.GetExtent()) - bboxstring = f'{e[2]},{e[0]},{e[3]},{e[1]}' - return(bboxstring) - + bboxstring = f"{e[2]},{e[0]},{e[3]},{e[1]}" + return bboxstring + + def query(query_string, overpass_url): - """ - Accept a query in Overpass API query language, + """Accept a query in Overpass API query language, return an osm dataset. """ try: - response = requests.get(overpass_url, - params={'data': query_string}) + response = requests.get(overpass_url, params={"data": query_string}) except: print("overpass did not want to answer that one\n") if response.status_code == 200: - print(f'The overpass API at {overpass_url} accepted the query and '\ - f'returned something.') + print(f"The overpass API at {overpass_url} accepted the query and " f"returned something.") return response.text else: print(response) - print("Yeah, that didn't work. We reached the Overpass API but "\ - "something went wrong on the server side.") + print("Yeah, that didn't work. We reached the Overpass API but " "something went wrong on the server side.") + def dbpush(infile, dbd): - """ - Accept an osm file, push it to PostGIS layers using the Underpass schema - """ + """Accept an osm file, push it to PostGIS layers using the Underpass schema.""" try: - print(f'Trying to turn {infile} into a PostGIS layer') - style = os.path.join('fmtm_splitter','raw.lua') - pg = ["osm2pgsql", "--create", - "-d", - f"postgresql://{dbd[0]}:{dbd[1]}@{dbd[2]}:{dbd[4]}/{dbd[3]}", - "--extra-attributes", "--output=flex", - "--style", style, infile] - print(pg) # just to visually check that this command makes sense - p = subprocess.run(pg, capture_output=True, encoding='utf-8') + print(f"Trying to turn {infile} into a PostGIS layer") + style = os.path.join("fmtm_splitter", "raw.lua") + pg = [ + "osm2pgsql", + "--create", + "-d", + f"postgresql://{dbd[0]}:{dbd[1]}@{dbd[2]}:{dbd[4]}/{dbd[3]}", + "--extra-attributes", + "--output=flex", + "--style", + style, + infile, + ] + print(pg) # just to visually check that this command makes sense + p = subprocess.run(pg, capture_output=True, encoding="utf-8") response = p.stdout error = p.stderr - print(f'osm2pgsql seems to have accepted {infile} and '\ - f'returned {response} \nand\n{error}') + print(f"osm2pgsql seems to have accepted {infile} and " f"returned {response} \nand\n{error}") return response except Exception as e: print(e) + if __name__ == "__main__": """return a file of raw OSM data from Overpass API from an input file of text containing working Overpass Query Language, and push that file to a PostGIS database as a layer. """ p = argparse.ArgumentParser(usage="usage: attachments [options]") - p.add_argument('-q', '--query', help="Text file in overpass query language") - p.add_argument('-b', '--boundary', help="AOI as GeoJSON file") - p.add_argument('-url', '--overpass_url', help='Overpass API server URL', - default="https://overpass.kumi.systems/api/interpreter") - p.add_argument("-ho", "--host", help="Database host", - default='localhost') + p.add_argument("-q", "--query", help="Text file in overpass query language") + p.add_argument("-b", "--boundary", help="AOI as GeoJSON file") + p.add_argument( + "-url", "--overpass_url", help="Overpass API server URL", default="https://overpass.kumi.systems/api/interpreter" + ) + p.add_argument("-ho", "--host", help="Database host", default="localhost") p.add_argument("-db", "--database", help="Database to use") p.add_argument("-u", "--user", help="Database username") p.add_argument("-p", "--password", help="Database password") - p.add_argument("-po", "--port", help="Database port", - default='5432') + p.add_argument("-po", "--port", help="Database port", default="5432") args = p.parse_args() @@ -84,16 +85,16 @@ def dbpush(infile, dbd): (basefilename, extension) = os.path.splitext(args.boundary) date = datetime.now().strftime("%Y_%m_%d") dirdate = os.path.join(directory, date) - osmfilepath = f'{dirdate}_{basename_no_ext}.osm' - + osmfilepath = f"{dirdate}_{basename_no_ext}.osm" + q = open(args.query) # TODO get bbox from GeoJSON aoi bbox = aoiextent(args.boundary) - qstring = q.read().replace('{{bbox}}', bbox) + qstring = q.read().replace("{{bbox}}", bbox) data = query(qstring, args.overpass_url) - with open(osmfilepath, 'w') as of: + with open(osmfilepath, "w") as of: of.write(data) - print(f'Wrote {osmfilepath}') - + print(f"Wrote {osmfilepath}") + dbdetails = [args.user, args.password, args.host, args.database, args.port] dblayers = dbpush(osmfilepath, dbdetails) diff --git a/fmtm_splitter/splitter.py b/fmtm_splitter/splitter.py index 106f677..a9542b7 100755 --- a/fmtm_splitter/splitter.py +++ b/fmtm_splitter/splitter.py @@ -35,12 +35,15 @@ # Splitting algorythm choices choices = ("squares", "file", "custom") + class FMTMSplitter(object): """A class to split polygons.""" - def __init__(self, - boundary: gpd.GeoDataFrame, - algorythm: str = None, - ): + + def __init__( + self, + boundary: gpd.GeoDataFrame, + algorythm: str = None, + ): """This class splits a polygon into tasks using a variety of algorythms. Args: @@ -50,7 +53,7 @@ def __init__(self, Returns: instance (FMTMSplitter): An instance of this class """ - self.size = 50 # 50 meters + self.size = 50 # 50 meters self.boundary = boundary self.algorythm = algorythm if algorythm == "squares": @@ -60,9 +63,10 @@ def __init__(self, elif algorythm == "custom": pass - def splitBySquare(self, - meters: int, - ): + def splitBySquare( + self, + meters: int, + ): """Split the polygon into squares. Args: @@ -86,19 +90,14 @@ def splitBySquare(self, polygons = [] for x in cols[:-1]: for y in rows[:-1]: - polygons.append(Polygon([(x,y), (x+wide, y), (x+wide, y+length), (x, y+length)])) + polygons.append(Polygon([(x, y), (x + wide, y), (x + wide, y + length), (x, y + length)])) - grid = gpd.GeoDataFrame({"geometry":polygons}) + grid = gpd.GeoDataFrame({"geometry": polygons}) clipped = gpd.clip(grid, gdf) data = geojson.loads(clipped.to_json()) return data - def splitBySQL(self, - aoi: gpd.GeoDataFrame, - sql: str, - dburl: dict, - buildings: int - ): + def splitBySQL(self, aoi: gpd.GeoDataFrame, sql: str, dburl: dict, buildings: int): """Split the polygon by features in the database using an SQL query. Args: @@ -136,13 +135,16 @@ def splitBySQL(self, features = result[0][0]["features"] # clean up the temporary tables, we don't care about the result - dbcursor.execute("DROP TABLE buildings; DROP TABLE clusteredbuildings; DROP TABLE dumpedpoints; DROP TABLE lowfeaturecountpolygons; DROP TABLE voronois; DROP TABLE taskpolygons; DROP TABLE splitpolygons") + dbcursor.execute( + "DROP TABLE buildings; DROP TABLE clusteredbuildings; DROP TABLE dumpedpoints; DROP TABLE lowfeaturecountpolygons; DROP TABLE voronois; DROP TABLE taskpolygons; DROP TABLE splitpolygons" + ) return features - def splitByFeature(self, - aoi: gpd.GeoDataFrame, - features: gpd.GeoDataFrame, - ): + def splitByFeature( + self, + aoi: gpd.GeoDataFrame, + features: gpd.GeoDataFrame, + ): """Split the polygon by features in the database.""" # gdf[(gdf['highway'] != 'turning_circle') | (gdf['highway'] != 'milestone')] # gdf[(gdf.geom_type != 'Point')] @@ -151,6 +153,7 @@ def splitByFeature(self, polygons = gpd.GeoSeries(polygonize(gdf.geometry)) return polygons + def main(): """This main function lets this class be run standalone by a bash script.""" parser = argparse.ArgumentParser( @@ -176,14 +179,14 @@ def main(): fmtm-splitter -b AOI -b 20 -c custom.sql This will use a custom SQL query for splitting by map feature, and adjust task sizes based on the number of buildings. - """ + """, ) # the size of each task wheh using square splitting # the number of buildings in a task when using feature splitting buildings = 5 # The default SQL query for feature splitting query = "fmtm_algorithm.sql" - parser.add_argument("-v", "--verbose", action="store_true", help="verbose output") + parser.add_argument("-v", "--verbose", action="store_true", help="verbose output") parser.add_argument("-o", "--outfile", default="fmtm.geojson", help="Output file from splitting") # parser.add_argument("-a", "--algorythm", default='squares', choices=choices, help="Splitting Algorthm to use") parser.add_argument("-m", "--meters", help="Size in meters if using square splitting") @@ -198,9 +201,7 @@ def main(): quit() # if verbose, dump to the terminal. - formatter = logging.Formatter( - "%(threadName)10s - %(name)s - %(levelname)s - %(message)s" - ) + formatter = logging.Formatter("%(threadName)10s - %(name)s - %(levelname)s - %(message)s") level = logging.DEBUG if args.verbose: log.setLevel(level) @@ -251,7 +252,7 @@ def main(): # log.info(f"Wrote {args.outfile}") + if __name__ == "__main__": """This is just a hook so this file can be run standlone during development.""" main() -