From 33416b3c7e1c291524ccf1cbac8f034b812d4e1f Mon Sep 17 00:00:00 2001 From: Qiusheng Wu Date: Fri, 29 Mar 2024 00:33:23 -0400 Subject: [PATCH] Add pre-commit --- .github/workflows/docs-build.yml | 2 +- .pre-commit-config.yaml | 29 + docs/examples/arcgis.ipynb | 6 +- .../automatic_mask_generator_hq.ipynb | 12 +- docs/examples/box_prompts.ipynb | 7 +- docs/examples/fast_sam.ipynb | 4 +- docs/examples/input_prompts.ipynb | 7 +- docs/examples/input_prompts_hq.ipynb | 3 +- docs/examples/maxar_open_data.ipynb | 22 +- docs/examples/swimming_pools.ipynb | 19 +- docs/examples/text_prompts.ipynb | 19 +- docs/examples/text_prompts_batch.ipynb | 11 +- docs/samgeo.md | 2 +- docs/workshops/cn_workshop.ipynb | 1116 +++++++++++++++++ docs/workshops/purdue.ipynb | 133 +- mkdocs.yml | 1 + paper/paper.bib | 10 +- pyproject-codespell.precommit-toml | 3 + requirements.txt | 22 +- requirements_dev.txt | 4 +- requirements_docs.txt | 26 +- samgeo/__init__.py | 2 +- samgeo/common.py | 6 +- samgeo/hq_sam.py | 12 +- samgeo/samgeo.py | 10 +- setup.py | 2 +- tests/test_samgeo.py | 4 +- 27 files changed, 1381 insertions(+), 113 deletions(-) create mode 100644 .pre-commit-config.yaml create mode 100644 docs/workshops/cn_workshop.ipynb create mode 100644 pyproject-codespell.precommit-toml diff --git a/.github/workflows/docs-build.yml b/.github/workflows/docs-build.yml index ea431b78..79ddd234 100644 --- a/.github/workflows/docs-build.yml +++ b/.github/workflows/docs-build.yml @@ -27,7 +27,7 @@ jobs: pip install -r requirements.txt -r requirements_dev.txt -r requirements_docs.txt pip install . - name: Discover typos with codespell - run: codespell --skip="*.csv,*.geojson,*.json,*.js,*.html,*cff,*.pdf" --ignore-words-list="aci,acount,acounts,fallow,hart,hist,nd,ned,ois,wqs,pres" + run: codespell --skip="*.csv,*.geojson,*.json,*.js,*.html,*cff,*.pdf,*.md" --ignore-words-list="aci,acount,acounts,fallow,hart,hist,nd,ned,ois,wqs" # - name: PKG-TEST # run: | # python -m unittest discover tests/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..3a836f59 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,29 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-toml + - id: check-yaml + - id: end-of-file-fixer + types: [python] + - id: trailing-whitespace + - id: requirements-txt-fixer + - id: check-added-large-files + args: ["--maxkb=500"] + + - repo: https://github.com/psf/black + rev: 24.3.0 + hooks: + - id: black-jupyter + language_version: python3.11 + + - repo: https://github.com/codespell-project/codespell + rev: v2.2.6 + hooks: + - id: codespell + args: [--toml, pyproject-codespell.precommit-toml] + + - repo: https://github.com/kynan/nbstripout + rev: 0.7.1 + hooks: + - id: nbstripout diff --git a/docs/examples/arcgis.ipynb b/docs/examples/arcgis.ipynb index b40533af..96fe8c0d 100644 --- a/docs/examples/arcgis.ipynb +++ b/docs/examples/arcgis.ipynb @@ -231,9 +231,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "sam.show_anns(axis=\"off\", alpha=1, output=\"ag_annotations.tif\")" @@ -431,7 +429,7 @@ "metadata": {}, "outputs": [], "source": [ - "sam.generate('agriculture.tif', output=\"ag_masks2.tif\", foreground=True)" + "sam.generate(\"agriculture.tif\", output=\"ag_masks2.tif\", foreground=True)" ] }, { diff --git a/docs/examples/automatic_mask_generator_hq.ipynb b/docs/examples/automatic_mask_generator_hq.ipynb index a510a0aa..4f3c4a91 100644 --- a/docs/examples/automatic_mask_generator_hq.ipynb +++ b/docs/examples/automatic_mask_generator_hq.ipynb @@ -39,7 +39,13 @@ "source": [ "import os\n", "import leafmap\n", - "from samgeo.hq_sam import SamGeo, show_image, download_file, overlay_images, tms_to_geotiff" + "from samgeo.hq_sam import (\n", + " SamGeo,\n", + " show_image,\n", + " download_file,\n", + " overlay_images,\n", + " tms_to_geotiff,\n", + ")" ] }, { @@ -147,7 +153,7 @@ "outputs": [], "source": [ "sam = SamGeo(\n", - " model_type=\"vit_h\", # can be vit_h, vit_b, vit_l, vit_tiny\n", + " model_type=\"vit_h\", # can be vit_h, vit_b, vit_l, vit_tiny\n", " sam_kwargs=None,\n", ")" ] @@ -288,7 +294,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ diff --git a/docs/examples/box_prompts.ipynb b/docs/examples/box_prompts.ipynb index 5858943e..80fd433f 100644 --- a/docs/examples/box_prompts.ipynb +++ b/docs/examples/box_prompts.ipynb @@ -241,7 +241,7 @@ "metadata": {}, "outputs": [], "source": [ - "m.add_raster('mask.tif', cmap='viridis', nodata=0, layer_name='Mask')\n", + "m.add_raster(\"mask.tif\", cmap=\"viridis\", nodata=0, layer_name=\"Mask\")\n", "m" ] }, @@ -260,7 +260,7 @@ "metadata": {}, "outputs": [], "source": [ - "url = 'https://opengeos.github.io/data/sam/tree_boxes.geojson'\n", + "url = \"https://opengeos.github.io/data/sam/tree_boxes.geojson\"\n", "geojson = \"tree_boxes.geojson\"\n", "leafmap.download_file(url, geojson)" ] @@ -350,8 +350,7 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.8" - }, - "orig_nbformat": 4 + } }, "nbformat": 4, "nbformat_minor": 2 diff --git a/docs/examples/fast_sam.ipynb b/docs/examples/fast_sam.ipynb index 153595a8..14a98fa5 100644 --- a/docs/examples/fast_sam.ipynb +++ b/docs/examples/fast_sam.ipynb @@ -142,6 +142,7 @@ "outputs": [], "source": [ "from samgeo.fast_sam import SamGeo\n", + "\n", "sam = SamGeo(model=\"FastSAM-x.pt\")" ] }, @@ -259,8 +260,7 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.16" - }, - "orig_nbformat": 4 + } }, "nbformat": 4, "nbformat_minor": 2 diff --git a/docs/examples/input_prompts.ipynb b/docs/examples/input_prompts.ipynb index d9c4b879..cf036e6b 100644 --- a/docs/examples/input_prompts.ipynb +++ b/docs/examples/input_prompts.ipynb @@ -37,7 +37,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -147,7 +147,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -260,8 +260,7 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.8" - }, - "orig_nbformat": 4 + } }, "nbformat": 4, "nbformat_minor": 2 diff --git a/docs/examples/input_prompts_hq.ipynb b/docs/examples/input_prompts_hq.ipynb index 11329aa4..40c5d61d 100644 --- a/docs/examples/input_prompts_hq.ipynb +++ b/docs/examples/input_prompts_hq.ipynb @@ -258,8 +258,7 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.16" - }, - "orig_nbformat": 4 + } }, "nbformat": 4, "nbformat_minor": 2 diff --git a/docs/examples/maxar_open_data.ipynb b/docs/examples/maxar_open_data.ipynb index f6faaad1..347e6bcc 100644 --- a/docs/examples/maxar_open_data.ipynb +++ b/docs/examples/maxar_open_data.ipynb @@ -63,7 +63,9 @@ }, "outputs": [], "source": [ - "url = 'https://drive.google.com/file/d/1jIIC5hvSPeJEC0fbDhtxVWk2XV9AxsQD/view?usp=sharing'" + "url = (\n", + " \"https://drive.google.com/file/d/1jIIC5hvSPeJEC0fbDhtxVWk2XV9AxsQD/view?usp=sharing\"\n", + ")" ] }, { @@ -74,7 +76,7 @@ }, "outputs": [], "source": [ - "leafmap.download_file(url, output='image.tif')" + "leafmap.download_file(url, output=\"image.tif\")" ] }, { @@ -94,7 +96,7 @@ "source": [ "m = leafmap.Map(height=\"600px\")\n", "m.add_basemap(\"SATELLITE\")\n", - "m.add_raster('image.tif', layer_name=\"Image\")\n", + "m.add_raster(\"image.tif\", layer_name=\"Image\")\n", "m.add_layer_manager()\n", "m" ] @@ -160,7 +162,7 @@ }, "outputs": [], "source": [ - "sam.generate('image.tif', output=\"mask.tif\", foreground=True)" + "sam.generate(\"image.tif\", output=\"mask.tif\", foreground=True)" ] }, { @@ -178,7 +180,7 @@ }, "outputs": [], "source": [ - "raster_to_vector('mask.tif', output='mask.shp')" + "raster_to_vector(\"mask.tif\", output=\"mask.shp\")" ] }, { @@ -235,7 +237,7 @@ "outputs": [], "source": [ "leafmap.image_comparison(\n", - " 'image.tif',\n", + " \"image.tif\",\n", " \"annotation.tif\",\n", " label1=\"Image\",\n", " label2=\"Segmentation\",\n", @@ -257,7 +259,7 @@ }, "outputs": [], "source": [ - "overlay_images('image.tif', \"annotation.tif\", backend=\"TkAgg\")" + "overlay_images(\"image.tif\", \"annotation.tif\", backend=\"TkAgg\")" ] }, { @@ -273,8 +275,8 @@ "metadata": {}, "outputs": [], "source": [ - "m.add_raster('mask.tif', layer_name='Mask', nodata=0)\n", - "m.add_raster('annotation.tif', layer_name='Annotation')\n", + "m.add_raster(\"mask.tif\", layer_name=\"Mask\", nodata=0)\n", + "m.add_raster(\"annotation.tif\", layer_name=\"Annotation\")\n", "m" ] }, @@ -286,7 +288,7 @@ }, "outputs": [], "source": [ - "m.add_vector('mask.shp', layer_name='Vector', info_mode=None)" + "m.add_vector(\"mask.shp\", layer_name=\"Vector\", info_mode=None)" ] }, { diff --git a/docs/examples/swimming_pools.ipynb b/docs/examples/swimming_pools.ipynb index d49e3498..d301e53c 100644 --- a/docs/examples/swimming_pools.ipynb +++ b/docs/examples/swimming_pools.ipynb @@ -200,9 +200,9 @@ "outputs": [], "source": [ "sam.show_anns(\n", - " cmap='Blues',\n", - " box_color='red',\n", - " title='Automatic Segmentation of Swimming Pools',\n", + " cmap=\"Blues\",\n", + " box_color=\"red\",\n", + " title=\"Automatic Segmentation of Swimming Pools\",\n", " blend=True,\n", ")" ] @@ -228,10 +228,10 @@ "outputs": [], "source": [ "sam.show_anns(\n", - " cmap='Blues',\n", + " cmap=\"Blues\",\n", " add_boxes=False,\n", " alpha=0.5,\n", - " title='Automatic Segmentation of Swimming Pools',\n", + " title=\"Automatic Segmentation of Swimming Pools\",\n", ")" ] }, @@ -256,12 +256,12 @@ "outputs": [], "source": [ "sam.show_anns(\n", - " cmap='Greys_r',\n", + " cmap=\"Greys_r\",\n", " add_boxes=False,\n", " alpha=1,\n", - " title='Automatic Segmentation of Swimming Pools',\n", + " title=\"Automatic Segmentation of Swimming Pools\",\n", " blend=False,\n", - " output='pools.tif',\n", + " output=\"pools.tif\",\n", ")" ] }, @@ -360,8 +360,7 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.16" - }, - "orig_nbformat": 4 + } }, "nbformat": 4, "nbformat_minor": 2 diff --git a/docs/examples/text_prompts.ipynb b/docs/examples/text_prompts.ipynb index 19de7f55..11d2719e 100644 --- a/docs/examples/text_prompts.ipynb +++ b/docs/examples/text_prompts.ipynb @@ -200,9 +200,9 @@ "outputs": [], "source": [ "sam.show_anns(\n", - " cmap='Greens',\n", - " box_color='red',\n", - " title='Automatic Segmentation of Trees',\n", + " cmap=\"Greens\",\n", + " box_color=\"red\",\n", + " title=\"Automatic Segmentation of Trees\",\n", " blend=True,\n", ")" ] @@ -228,10 +228,10 @@ "outputs": [], "source": [ "sam.show_anns(\n", - " cmap='Greens',\n", + " cmap=\"Greens\",\n", " add_boxes=False,\n", " alpha=0.5,\n", - " title='Automatic Segmentation of Trees',\n", + " title=\"Automatic Segmentation of Trees\",\n", ")" ] }, @@ -256,12 +256,12 @@ "outputs": [], "source": [ "sam.show_anns(\n", - " cmap='Greys_r',\n", + " cmap=\"Greys_r\",\n", " add_boxes=False,\n", " alpha=1,\n", - " title='Automatic Segmentation of Trees',\n", + " title=\"Automatic Segmentation of Trees\",\n", " blend=False,\n", - " output='trees.tif',\n", + " output=\"trees.tif\",\n", ")" ] }, @@ -353,8 +353,7 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.16" - }, - "orig_nbformat": 4 + } }, "nbformat": 4, "nbformat_minor": 2 diff --git a/docs/examples/text_prompts_batch.ipynb b/docs/examples/text_prompts_batch.ipynb index 7f5c160c..cbb100eb 100644 --- a/docs/examples/text_prompts_batch.ipynb +++ b/docs/examples/text_prompts_batch.ipynb @@ -198,13 +198,13 @@ "outputs": [], "source": [ "sam.predict_batch(\n", - " images='tiles',\n", - " out_dir='masks',\n", + " images=\"tiles\",\n", + " out_dir=\"masks\",\n", " text_prompt=text_prompt,\n", " box_threshold=0.24,\n", " text_threshold=0.24,\n", " mask_multiplier=255,\n", - " dtype='uint8',\n", + " dtype=\"uint8\",\n", " merge=True,\n", " verbose=True,\n", ")" @@ -223,7 +223,7 @@ "metadata": {}, "outputs": [], "source": [ - "m.add_raster('masks/merged.tif', cmap='viridis', nodata=0, layer_name='Mask')\n", + "m.add_raster(\"masks/merged.tif\", cmap=\"viridis\", nodata=0, layer_name=\"Mask\")\n", "m.add_layer_manager()\n", "m" ] @@ -253,8 +253,7 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.16" - }, - "orig_nbformat": 4 + } }, "nbformat": 4, "nbformat_minor": 2 diff --git a/docs/samgeo.md b/docs/samgeo.md index 4efc0605..744c6cb7 100644 --- a/docs/samgeo.md +++ b/docs/samgeo.md @@ -1,4 +1,4 @@ - + # samgeo module ::: samgeo.samgeo \ No newline at end of file diff --git a/docs/workshops/cn_workshop.ipynb b/docs/workshops/cn_workshop.ipynb new file mode 100644 index 00000000..95f133e5 --- /dev/null +++ b/docs/workshops/cn_workshop.ipynb @@ -0,0 +1,1116 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": [ + "# SamGeo Workshop\n", + "\n", + "[![image](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/opengeos/segment-geospatial/blob/main/docs/workshops/cn_workshop.ipynb)\n", + "\n", + "This notebook is for the workshop presented at the 第七届地球空间大数据与云计算前沿会议与集中学习.\n", + "\n", + "## Install dependencies\n", + "\n", + "Uncomment and run the following cell to install the required dependencies." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1", + "metadata": {}, + "outputs": [], + "source": [ + "# %pip install segment-geospatial groundingdino-py leafmap localtileserver" + ] + }, + { + "cell_type": "markdown", + "id": "2", + "metadata": {}, + "source": [ + "## Import libraries" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "import leafmap\n", + "from samgeo import SamGeo\n", + "from samgeo.text_sam import LangSAM" + ] + }, + { + "cell_type": "markdown", + "id": "4", + "metadata": {}, + "source": [ + "## Download sample data\n", + "\n", + "### Create an interactive map" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "m = leafmap.Map(center=[40.427495, -86.913638], zoom=18, height=700)\n", + "m.add_basemap(\"SATELLITE\")\n", + "m" + ] + }, + { + "cell_type": "markdown", + "id": "6", + "metadata": {}, + "source": [ + "Pan and zoom the map to select the area of interest. Use the draw tools to draw a polygon or rectangle on the map" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "if m.user_roi_bounds() is not None:\n", + " bbox = m.user_roi_bounds()\n", + "else:\n", + " bbox = [-86.9167, 40.4262, -86.9105, 40.4289]" + ] + }, + { + "cell_type": "markdown", + "id": "8", + "metadata": {}, + "source": [ + "### Download map tiles\n", + "\n", + "Download maps tiles and mosaic them into a single GeoTIFF file" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "image = \"image.tif\"" + ] + }, + { + "cell_type": "markdown", + "id": "10", + "metadata": {}, + "source": [ + "Specify the basemap as the source." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "11", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "leafmap.map_tiles_to_geotiff(\n", + " output=image, bbox=bbox, zoom=18, source=\"Satellite\", overwrite=True\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "12", + "metadata": {}, + "source": [ + "You can also use your own image. Uncomment and run the following cell to use your own image." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "13", + "metadata": {}, + "outputs": [], + "source": [ + "# image = '/path/to/your/own/image.tif'" + ] + }, + { + "cell_type": "markdown", + "id": "14", + "metadata": {}, + "source": [ + "Display the downloaded image on the map." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "15", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "m.layers[-1].visible = False # turn off the basemap\n", + "m.add_raster(image, layer_name=\"Image\")\n", + "m" + ] + }, + { + "cell_type": "markdown", + "id": "16", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/YHwrpS2.png)" + ] + }, + { + "cell_type": "markdown", + "id": "17", + "metadata": {}, + "source": [ + "## Automatic mask generation\n", + "\n", + "### Initialize SAM class" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "18", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam = SamGeo(\n", + " model_type=\"vit_h\",\n", + " sam_kwargs=None,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "19", + "metadata": {}, + "source": [ + "### Automatic mask generation\n", + "\n", + "Segment the image and save the results to a GeoTIFF file. Set `unique=True` to assign a unique ID to each object." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "20", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.generate(image, output=\"masks.tif\", foreground=True, unique=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "21", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.show_masks(cmap=\"binary_r\")" + ] + }, + { + "cell_type": "markdown", + "id": "22", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/kWqLVuL.png)" + ] + }, + { + "cell_type": "markdown", + "id": "23", + "metadata": {}, + "source": [ + "Show the object annotations (objects with random color) on the map." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "24", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.show_anns(axis=\"off\", alpha=1, output=\"annotations.tif\")" + ] + }, + { + "cell_type": "markdown", + "id": "25", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/J6Ie0Zj.png)" + ] + }, + { + "cell_type": "markdown", + "id": "26", + "metadata": {}, + "source": [ + "Compare images with a slider." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "27", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "leafmap.image_comparison(\n", + " \"image.tif\",\n", + " \"annotations.tif\",\n", + " label1=\"Satellite Image\",\n", + " label2=\"Image Segmentation\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "28", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/cm4QyaR.png)" + ] + }, + { + "cell_type": "markdown", + "id": "29", + "metadata": {}, + "source": [ + "Add image to the map." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "30", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "m.add_raster(\"annotations.tif\", opacity=0.5, layer_name=\"Masks\")\n", + "m" + ] + }, + { + "cell_type": "markdown", + "id": "31", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/Y6EaGVN.png)" + ] + }, + { + "cell_type": "markdown", + "id": "32", + "metadata": {}, + "source": [ + "Convert the object annotations to vector format, such as GeoPackage, Shapefile, or GeoJSON." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "33", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.raster_to_vector(\"masks.tif\", \"masks.shp\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "34", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "m.add_vector(\"masks.shp\", layer_name=\"Masks vector\")" + ] + }, + { + "cell_type": "markdown", + "id": "35", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/N0xVt9S.png)" + ] + }, + { + "cell_type": "markdown", + "id": "36", + "metadata": {}, + "source": [ + "### Automatic mask generation options\n", + "\n", + "There are several tunable parameters in automatic mask generation that control how densely points are sampled and what the thresholds are for removing low quality or duplicate masks. Additionally, generation can be automatically run on crops of the image to get improved performance on smaller objects, and post-processing can remove stray pixels and holes. Here is an example configuration that samples more masks:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "37", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam_kwargs = {\n", + " \"points_per_side\": 32,\n", + " \"pred_iou_thresh\": 0.86,\n", + " \"stability_score_thresh\": 0.92,\n", + " \"crop_n_layers\": 1,\n", + " \"crop_n_points_downscale_factor\": 2,\n", + " \"min_mask_region_area\": 100,\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "38", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam = SamGeo(\n", + " model_type=\"vit_h\",\n", + " sam_kwargs=sam_kwargs,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "39", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.generate(image, output=\"masks2.tif\", foreground=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "40", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.show_masks(cmap=\"binary_r\")" + ] + }, + { + "cell_type": "markdown", + "id": "41", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/S2LYen8.png)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "42", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.show_anns(axis=\"off\", opacity=1, output=\"annotations2.tif\")" + ] + }, + { + "cell_type": "markdown", + "id": "43", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/opEKsUu.png)" + ] + }, + { + "cell_type": "markdown", + "id": "44", + "metadata": {}, + "source": [ + "Compare images with a slider." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "45", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "leafmap.image_comparison(\n", + " image,\n", + " \"annotations.tif\",\n", + " label1=\"Image\",\n", + " label2=\"Image Segmentation\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "46", + "metadata": {}, + "source": [ + "## Use points as input prompts\n", + "\n", + "### Initialize SAM class" + ] + }, + { + "cell_type": "markdown", + "id": "47", + "metadata": {}, + "source": [ + "Set `automatic=False` to disable the `SamAutomaticMaskGenerator` and enable the `SamPredictor`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "48", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "m = leafmap.Map(center=[40.427495, -86.913638], zoom=18, height=700)\n", + "image = \"image.tif\"\n", + "m.add_raster(image, layer_name=\"Image\")\n", + "m" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "49", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam = SamGeo(\n", + " model_type=\"vit_h\",\n", + " automatic=False,\n", + " sam_kwargs=None,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "50", + "metadata": {}, + "source": [ + "Specify the image to segment." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "51", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.set_image(image)" + ] + }, + { + "cell_type": "markdown", + "id": "52", + "metadata": {}, + "source": [ + "### Image segmentation with input points\n", + "\n", + "A single point can be used to segment an object. The point can be specified as a tuple of (x, y), such as (col, row) or (lon, lat). The points can also be specified as a file path to a vector dataset. For non (col, row) input points, specify the `point_crs` parameter, which will automatically transform the points to the image column and row coordinates.\n", + "\n", + "Try a single point input:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "53", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "point_coords = [[-86.913162, 40.427157]]\n", + "sam.predict(point_coords, point_labels=1, point_crs=\"EPSG:4326\", output=\"mask1.tif\")\n", + "m.add_raster(\"mask1.tif\", layer_name=\"Mask1\", nodata=0, cmap=\"Blues\", opacity=1)\n", + "m" + ] + }, + { + "cell_type": "markdown", + "id": "54", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/zUMLUsn.png)" + ] + }, + { + "cell_type": "markdown", + "id": "55", + "metadata": {}, + "source": [ + "Try multiple points input:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "56", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "point_coords = [\n", + " [-86.913162, 40.427157],\n", + " [-86.913425, 40.427157],\n", + " [-86.91343, 40.427721],\n", + " [-86.913012, 40.427741],\n", + "]\n", + "sam.predict(point_coords, point_labels=1, point_crs=\"EPSG:4326\", output=\"mask2.tif\")\n", + "m.add_raster(\"mask2.tif\", layer_name=\"Mask2\", nodata=0, cmap=\"Greens\", opacity=1)\n", + "m" + ] + }, + { + "cell_type": "markdown", + "id": "57", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/zUMLUsn.png)" + ] + }, + { + "cell_type": "markdown", + "id": "58", + "metadata": {}, + "source": [ + "### Interactive segmentation\n", + "\n", + "Display the interactive map and use the marker tool to draw points on the map. Then click on the `Segment` button to segment the objects. The results will be added to the map automatically. Click on the `Reset` button to clear the points and the results." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "59", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "m = sam.show_map()\n", + "m" + ] + }, + { + "cell_type": "markdown", + "id": "60", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/3W7JGqP.png)" + ] + }, + { + "cell_type": "markdown", + "id": "61", + "metadata": {}, + "source": [ + "## Bounding box input prompts" + ] + }, + { + "cell_type": "markdown", + "id": "62", + "metadata": {}, + "source": [ + "### Create an interactive map" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "63", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "m = leafmap.Map(center=[40.427495, -86.913638], zoom=18, height=700)\n", + "image = \"image.tif\"\n", + "m.add_raster(image, layer_name=\"Image\")\n", + "m" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "64", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam = SamGeo(\n", + " model_type=\"vit_h\",\n", + " automatic=False,\n", + " sam_kwargs=None,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "65", + "metadata": {}, + "source": [ + "Specify the image to segment. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "66", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.set_image(image)" + ] + }, + { + "cell_type": "markdown", + "id": "67", + "metadata": {}, + "source": [ + "### Create bounding boxes\n", + "\n", + "If no rectangles are drawn, the default bounding boxes will be used as follows:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "68", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "if m.user_rois is not None:\n", + " boxes = m.user_rois\n", + "else:\n", + " boxes = [\n", + " [-86.913654, 40.426967, -86.912774, 40.427881],\n", + " [-86.914780, 40.426256, -86.913997, 40.426852],\n", + " [-86.913632, 40.426215, -86.912581, 40.426820],\n", + " ]" + ] + }, + { + "cell_type": "markdown", + "id": "69", + "metadata": {}, + "source": [ + "## Segment the image\n", + "\n", + "Use the `predict()` method to segment the image with specified bounding boxes. The `boxes` parameter accepts a list of bounding box coordinates in the format of [[left, bottom, right, top], [left, bottom, right, top], ...], a GeoJSON dictionary, or a file path to a GeoJSON file." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "70", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.predict(boxes=boxes, point_crs=\"EPSG:4326\", output=\"mask.tif\", dtype=\"uint8\")" + ] + }, + { + "cell_type": "markdown", + "id": "71", + "metadata": {}, + "source": [ + "## Display the result\n", + "\n", + "Add the segmented image to the map." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "72", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "m.add_raster(\"mask.tif\", cmap=\"viridis\", nodata=0, opacity=0.6, layer_name=\"Mask\")\n", + "m" + ] + }, + { + "cell_type": "markdown", + "id": "73", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/9y31xUH.png)" + ] + }, + { + "cell_type": "markdown", + "id": "74", + "metadata": {}, + "source": [ + "## Text promots\n", + "\n", + "### Initialize LangSAM class\n", + "\n", + "The initialization of the LangSAM class might take a few minutes. The initialization downloads the model weights and sets up the model for inference." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "75", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "m = leafmap.Map(center=[40.427495, -86.913638], zoom=18, height=700)\n", + "image = \"image.tif\"\n", + "m.add_raster(image, layer_name=\"Image\")\n", + "m" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "76", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam = LangSAM()" + ] + }, + { + "cell_type": "markdown", + "id": "77", + "metadata": {}, + "source": [ + "### Specify text prompts" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "78", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "text_prompt = \"tree\"" + ] + }, + { + "cell_type": "markdown", + "id": "79", + "metadata": {}, + "source": [ + "### Segment the image\n", + "\n", + "Part of the model prediction includes setting appropriate thresholds for object detection and text association with the detected objects. These threshold values range from 0 to 1 and are set while calling the predict method of the LangSAM class.\n", + "\n", + "`box_threshold`: This value is used for object detection in the image. A higher value makes the model more selective, identifying only the most confident object instances, leading to fewer overall detections. A lower value, conversely, makes the model more tolerant, leading to increased detections, including potentially less confident ones.\n", + "\n", + "`text_threshold`: This value is used to associate the detected objects with the provided text prompt. A higher value requires a stronger association between the object and the text prompt, leading to more precise but potentially fewer associations. A lower value allows for looser associations, which could increase the number of associations but also introduce less precise matches.\n", + "\n", + "Remember to test different threshold values on your specific data. The optimal threshold can vary depending on the quality and nature of your images, as well as the specificity of your text prompts. Make sure to choose a balance that suits your requirements, whether that's precision or recall." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "80", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.predict(image, text_prompt, box_threshold=0.24, text_threshold=0.24)" + ] + }, + { + "cell_type": "markdown", + "id": "81", + "metadata": {}, + "source": [ + "### Visualize the results\n", + "\n", + "Show the result with bounding boxes on the map." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "82", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.show_anns(\n", + " cmap=\"Greens\",\n", + " box_color=\"red\",\n", + " title=\"Automatic Segmentation of Trees\",\n", + " blend=True,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "83", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/qRcy16Z.png)" + ] + }, + { + "cell_type": "markdown", + "id": "84", + "metadata": {}, + "source": [ + "Show the result without bounding boxes on the map." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "85", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.show_anns(\n", + " cmap=\"Greens\",\n", + " add_boxes=False,\n", + " alpha=0.5,\n", + " title=\"Automatic Segmentation of Trees\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "86", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/TvqGByH.png)" + ] + }, + { + "cell_type": "markdown", + "id": "87", + "metadata": {}, + "source": [ + "Show the result as a grayscale image." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "88", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.show_anns(\n", + " cmap=\"Greys_r\",\n", + " add_boxes=False,\n", + " alpha=1,\n", + " title=\"Automatic Segmentation of Trees\",\n", + " blend=False,\n", + " output=\"trees.tif\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "89", + "metadata": {}, + "source": [ + "Convert the result to a vector format." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "90", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.raster_to_vector(\"trees.tif\", \"trees.shp\")" + ] + }, + { + "cell_type": "markdown", + "id": "91", + "metadata": {}, + "source": [ + "Show the results on the interactive map." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "92", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "m.add_raster(\"trees.tif\", layer_name=\"Trees\", palette=\"Greens\", opacity=0.5, nodata=0)\n", + "style = {\n", + " \"color\": \"#3388ff\",\n", + " \"weight\": 2,\n", + " \"fillColor\": \"#7c4185\",\n", + " \"fillOpacity\": 0.5,\n", + "}\n", + "m.add_vector(\"trees.shp\", layer_name=\"Vector\", style=style)\n", + "m" + ] + }, + { + "cell_type": "markdown", + "id": "93", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/WDQgECD.png)" + ] + }, + { + "cell_type": "markdown", + "id": "94", + "metadata": {}, + "source": [ + "### Interactive segmentation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "95", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sam.show_map()" + ] + }, + { + "cell_type": "markdown", + "id": "96", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/Zn7Dwty.png)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.16" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/workshops/purdue.ipynb b/docs/workshops/purdue.ipynb index 6a5d1fcc..7ab6a21b 100644 --- a/docs/workshops/purdue.ipynb +++ b/docs/workshops/purdue.ipynb @@ -2,6 +2,7 @@ "cells": [ { "cell_type": "markdown", + "id": "0", "metadata": {}, "source": [ "# Purdue SamGeo Workshop\n", @@ -21,6 +22,7 @@ { "cell_type": "code", "execution_count": null, + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -29,6 +31,7 @@ }, { "cell_type": "markdown", + "id": "2", "metadata": {}, "source": [ "## Import libraries" @@ -37,6 +40,7 @@ { "cell_type": "code", "execution_count": null, + "id": "3", "metadata": { "tags": [] }, @@ -49,6 +53,7 @@ }, { "cell_type": "markdown", + "id": "4", "metadata": {}, "source": [ "## Download sample data\n", @@ -59,6 +64,7 @@ { "cell_type": "code", "execution_count": null, + "id": "5", "metadata": { "tags": [] }, @@ -71,6 +77,7 @@ }, { "cell_type": "markdown", + "id": "6", "metadata": {}, "source": [ "Pan and zoom the map to select the area of interest. Use the draw tools to draw a polygon or rectangle on the map" @@ -79,6 +86,7 @@ { "cell_type": "code", "execution_count": null, + "id": "7", "metadata": { "tags": [] }, @@ -92,6 +100,7 @@ }, { "cell_type": "markdown", + "id": "8", "metadata": {}, "source": [ "### Download map tiles\n", @@ -102,6 +111,7 @@ { "cell_type": "code", "execution_count": null, + "id": "9", "metadata": { "tags": [] }, @@ -112,6 +122,7 @@ }, { "cell_type": "markdown", + "id": "10", "metadata": {}, "source": [ "Specify the basemap as the source." @@ -120,16 +131,20 @@ { "cell_type": "code", "execution_count": null, + "id": "11", "metadata": { "tags": [] }, "outputs": [], "source": [ - "leafmap.map_tiles_to_geotiff(output=image, bbox=bbox, zoom=18, source=\"Satellite\", overwrite=True)" + "leafmap.map_tiles_to_geotiff(\n", + " output=image, bbox=bbox, zoom=18, source=\"Satellite\", overwrite=True\n", + ")" ] }, { "cell_type": "markdown", + "id": "12", "metadata": {}, "source": [ "You can also use your own image. Uncomment and run the following cell to use your own image." @@ -138,6 +153,7 @@ { "cell_type": "code", "execution_count": null, + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -146,6 +162,7 @@ }, { "cell_type": "markdown", + "id": "14", "metadata": {}, "source": [ "Display the downloaded image on the map." @@ -154,6 +171,7 @@ { "cell_type": "code", "execution_count": null, + "id": "15", "metadata": { "tags": [] }, @@ -166,6 +184,7 @@ }, { "cell_type": "markdown", + "id": "16", "metadata": {}, "source": [ "![](https://i.imgur.com/YHwrpS2.png)" @@ -173,6 +192,7 @@ }, { "cell_type": "markdown", + "id": "17", "metadata": {}, "source": [ "## Automatic mask generation\n", @@ -183,6 +203,7 @@ { "cell_type": "code", "execution_count": null, + "id": "18", "metadata": { "tags": [] }, @@ -196,6 +217,7 @@ }, { "cell_type": "markdown", + "id": "19", "metadata": {}, "source": [ "### Automatic mask generation\n", @@ -206,6 +228,7 @@ { "cell_type": "code", "execution_count": null, + "id": "20", "metadata": { "tags": [] }, @@ -217,6 +240,7 @@ { "cell_type": "code", "execution_count": null, + "id": "21", "metadata": { "tags": [] }, @@ -227,6 +251,7 @@ }, { "cell_type": "markdown", + "id": "22", "metadata": {}, "source": [ "![](https://i.imgur.com/kWqLVuL.png)" @@ -234,6 +259,7 @@ }, { "cell_type": "markdown", + "id": "23", "metadata": {}, "source": [ "Show the object annotations (objects with random color) on the map." @@ -242,6 +268,7 @@ { "cell_type": "code", "execution_count": null, + "id": "24", "metadata": { "tags": [] }, @@ -252,6 +279,7 @@ }, { "cell_type": "markdown", + "id": "25", "metadata": {}, "source": [ "![](https://i.imgur.com/J6Ie0Zj.png)" @@ -259,6 +287,7 @@ }, { "cell_type": "markdown", + "id": "26", "metadata": {}, "source": [ "Compare images with a slider." @@ -267,6 +296,7 @@ { "cell_type": "code", "execution_count": null, + "id": "27", "metadata": { "tags": [] }, @@ -282,6 +312,7 @@ }, { "cell_type": "markdown", + "id": "28", "metadata": {}, "source": [ "![](https://i.imgur.com/cm4QyaR.png)" @@ -289,6 +320,7 @@ }, { "cell_type": "markdown", + "id": "29", "metadata": {}, "source": [ "Add image to the map." @@ -297,6 +329,7 @@ { "cell_type": "code", "execution_count": null, + "id": "30", "metadata": { "tags": [] }, @@ -308,6 +341,7 @@ }, { "cell_type": "markdown", + "id": "31", "metadata": {}, "source": [ "![](https://i.imgur.com/Y6EaGVN.png)" @@ -315,6 +349,7 @@ }, { "cell_type": "markdown", + "id": "32", "metadata": {}, "source": [ "Convert the object annotations to vector format, such as GeoPackage, Shapefile, or GeoJSON." @@ -323,6 +358,7 @@ { "cell_type": "code", "execution_count": null, + "id": "33", "metadata": { "tags": [] }, @@ -334,16 +370,18 @@ { "cell_type": "code", "execution_count": null, + "id": "34", "metadata": { "tags": [] }, "outputs": [], "source": [ - "m.add_vector('masks.shp', layer_name='Masks vector')" + "m.add_vector(\"masks.shp\", layer_name=\"Masks vector\")" ] }, { "cell_type": "markdown", + "id": "35", "metadata": {}, "source": [ "![](https://i.imgur.com/N0xVt9S.png)" @@ -351,6 +389,7 @@ }, { "cell_type": "markdown", + "id": "36", "metadata": {}, "source": [ "### Automatic mask generation options\n", @@ -361,6 +400,7 @@ { "cell_type": "code", "execution_count": null, + "id": "37", "metadata": { "tags": [] }, @@ -379,6 +419,7 @@ { "cell_type": "code", "execution_count": null, + "id": "38", "metadata": { "tags": [] }, @@ -393,6 +434,7 @@ { "cell_type": "code", "execution_count": null, + "id": "39", "metadata": { "tags": [] }, @@ -404,6 +446,7 @@ { "cell_type": "code", "execution_count": null, + "id": "40", "metadata": { "tags": [] }, @@ -414,6 +457,7 @@ }, { "cell_type": "markdown", + "id": "41", "metadata": {}, "source": [ "![](https://i.imgur.com/S2LYen8.png)" @@ -422,6 +466,7 @@ { "cell_type": "code", "execution_count": null, + "id": "42", "metadata": { "tags": [] }, @@ -432,6 +477,7 @@ }, { "cell_type": "markdown", + "id": "43", "metadata": {}, "source": [ "![](https://i.imgur.com/opEKsUu.png)" @@ -439,6 +485,7 @@ }, { "cell_type": "markdown", + "id": "44", "metadata": {}, "source": [ "Compare images with a slider." @@ -447,6 +494,7 @@ { "cell_type": "code", "execution_count": null, + "id": "45", "metadata": { "tags": [] }, @@ -462,6 +510,7 @@ }, { "cell_type": "markdown", + "id": "46", "metadata": {}, "source": [ "## Use points as input prompts\n", @@ -471,6 +520,7 @@ }, { "cell_type": "markdown", + "id": "47", "metadata": {}, "source": [ "Set `automatic=False` to disable the `SamAutomaticMaskGenerator` and enable the `SamPredictor`." @@ -479,6 +529,7 @@ { "cell_type": "code", "execution_count": null, + "id": "48", "metadata": { "tags": [] }, @@ -493,6 +544,7 @@ { "cell_type": "code", "execution_count": null, + "id": "49", "metadata": { "tags": [] }, @@ -507,6 +559,7 @@ }, { "cell_type": "markdown", + "id": "50", "metadata": {}, "source": [ "Specify the image to segment." @@ -515,6 +568,7 @@ { "cell_type": "code", "execution_count": null, + "id": "51", "metadata": { "tags": [] }, @@ -525,6 +579,7 @@ }, { "cell_type": "markdown", + "id": "52", "metadata": {}, "source": [ "### Image segmentation with input points\n", @@ -537,6 +592,7 @@ { "cell_type": "code", "execution_count": null, + "id": "53", "metadata": { "tags": [] }, @@ -550,6 +606,7 @@ }, { "cell_type": "markdown", + "id": "54", "metadata": {}, "source": [ "![](https://i.imgur.com/zUMLUsn.png)" @@ -557,6 +614,7 @@ }, { "cell_type": "markdown", + "id": "55", "metadata": {}, "source": [ "Try multiple points input:" @@ -565,15 +623,18 @@ { "cell_type": "code", "execution_count": null, + "id": "56", "metadata": { "tags": [] }, "outputs": [], "source": [ - "point_coords = [[-86.913162, 40.427157],\n", - " [-86.913425, 40.427157],\n", - " [-86.91343, 40.427721],\n", - " [-86.913012, 40.427741]]\n", + "point_coords = [\n", + " [-86.913162, 40.427157],\n", + " [-86.913425, 40.427157],\n", + " [-86.91343, 40.427721],\n", + " [-86.913012, 40.427741],\n", + "]\n", "sam.predict(point_coords, point_labels=1, point_crs=\"EPSG:4326\", output=\"mask2.tif\")\n", "m.add_raster(\"mask2.tif\", layer_name=\"Mask2\", nodata=0, cmap=\"Greens\", opacity=1)\n", "m" @@ -581,6 +642,7 @@ }, { "cell_type": "markdown", + "id": "57", "metadata": {}, "source": [ "![](https://i.imgur.com/zUMLUsn.png)" @@ -588,6 +650,7 @@ }, { "cell_type": "markdown", + "id": "58", "metadata": {}, "source": [ "### Interactive segmentation\n", @@ -598,6 +661,7 @@ { "cell_type": "code", "execution_count": null, + "id": "59", "metadata": { "tags": [] }, @@ -609,6 +673,7 @@ }, { "cell_type": "markdown", + "id": "60", "metadata": {}, "source": [ "![](https://i.imgur.com/3W7JGqP.png)" @@ -616,6 +681,7 @@ }, { "cell_type": "markdown", + "id": "61", "metadata": {}, "source": [ "## Bounding box input prompts" @@ -623,6 +689,7 @@ }, { "cell_type": "markdown", + "id": "62", "metadata": {}, "source": [ "### Create an interactive map" @@ -631,6 +698,7 @@ { "cell_type": "code", "execution_count": null, + "id": "63", "metadata": { "tags": [] }, @@ -645,6 +713,7 @@ { "cell_type": "code", "execution_count": null, + "id": "64", "metadata": { "tags": [] }, @@ -659,6 +728,7 @@ }, { "cell_type": "markdown", + "id": "65", "metadata": {}, "source": [ "Specify the image to segment. " @@ -667,6 +737,7 @@ { "cell_type": "code", "execution_count": null, + "id": "66", "metadata": { "tags": [] }, @@ -677,6 +748,7 @@ }, { "cell_type": "markdown", + "id": "67", "metadata": {}, "source": [ "### Create bounding boxes\n", @@ -687,6 +759,7 @@ { "cell_type": "code", "execution_count": null, + "id": "68", "metadata": { "tags": [] }, @@ -698,12 +771,13 @@ " boxes = [\n", " [-86.913654, 40.426967, -86.912774, 40.427881],\n", " [-86.914780, 40.426256, -86.913997, 40.426852],\n", - " [-86.913632, 40.426215, -86.912581, 40.426820]\n", + " [-86.913632, 40.426215, -86.912581, 40.426820],\n", " ]" ] }, { "cell_type": "markdown", + "id": "69", "metadata": {}, "source": [ "## Segment the image\n", @@ -714,6 +788,7 @@ { "cell_type": "code", "execution_count": null, + "id": "70", "metadata": { "tags": [] }, @@ -724,6 +799,7 @@ }, { "cell_type": "markdown", + "id": "71", "metadata": {}, "source": [ "## Display the result\n", @@ -734,17 +810,19 @@ { "cell_type": "code", "execution_count": null, + "id": "72", "metadata": { "tags": [] }, "outputs": [], "source": [ - "m.add_raster('mask.tif', cmap='viridis', nodata=0, opacity=0.6, layer_name='Mask')\n", + "m.add_raster(\"mask.tif\", cmap=\"viridis\", nodata=0, opacity=0.6, layer_name=\"Mask\")\n", "m" ] }, { "cell_type": "markdown", + "id": "73", "metadata": {}, "source": [ "![](https://i.imgur.com/9y31xUH.png)" @@ -752,6 +830,7 @@ }, { "cell_type": "markdown", + "id": "74", "metadata": {}, "source": [ "## Text promots\n", @@ -764,6 +843,7 @@ { "cell_type": "code", "execution_count": null, + "id": "75", "metadata": { "tags": [] }, @@ -778,6 +858,7 @@ { "cell_type": "code", "execution_count": null, + "id": "76", "metadata": { "tags": [] }, @@ -788,6 +869,7 @@ }, { "cell_type": "markdown", + "id": "77", "metadata": {}, "source": [ "### Specify text prompts" @@ -796,6 +878,7 @@ { "cell_type": "code", "execution_count": null, + "id": "78", "metadata": { "tags": [] }, @@ -806,6 +889,7 @@ }, { "cell_type": "markdown", + "id": "79", "metadata": {}, "source": [ "### Segment the image\n", @@ -822,6 +906,7 @@ { "cell_type": "code", "execution_count": null, + "id": "80", "metadata": { "tags": [] }, @@ -832,6 +917,7 @@ }, { "cell_type": "markdown", + "id": "81", "metadata": {}, "source": [ "### Visualize the results\n", @@ -842,21 +928,23 @@ { "cell_type": "code", "execution_count": null, + "id": "82", "metadata": { "tags": [] }, "outputs": [], "source": [ "sam.show_anns(\n", - " cmap='Greens',\n", - " box_color='red',\n", - " title='Automatic Segmentation of Trees',\n", + " cmap=\"Greens\",\n", + " box_color=\"red\",\n", + " title=\"Automatic Segmentation of Trees\",\n", " blend=True,\n", ")" ] }, { "cell_type": "markdown", + "id": "83", "metadata": {}, "source": [ "![](https://i.imgur.com/qRcy16Z.png)" @@ -864,6 +952,7 @@ }, { "cell_type": "markdown", + "id": "84", "metadata": {}, "source": [ "Show the result without bounding boxes on the map." @@ -872,21 +961,23 @@ { "cell_type": "code", "execution_count": null, + "id": "85", "metadata": { "tags": [] }, "outputs": [], "source": [ "sam.show_anns(\n", - " cmap='Greens',\n", + " cmap=\"Greens\",\n", " add_boxes=False,\n", " alpha=0.5,\n", - " title='Automatic Segmentation of Trees',\n", + " title=\"Automatic Segmentation of Trees\",\n", ")" ] }, { "cell_type": "markdown", + "id": "86", "metadata": {}, "source": [ "![](https://i.imgur.com/TvqGByH.png)" @@ -894,6 +985,7 @@ }, { "cell_type": "markdown", + "id": "87", "metadata": {}, "source": [ "Show the result as a grayscale image." @@ -902,23 +994,25 @@ { "cell_type": "code", "execution_count": null, + "id": "88", "metadata": { "tags": [] }, "outputs": [], "source": [ "sam.show_anns(\n", - " cmap='Greys_r',\n", + " cmap=\"Greys_r\",\n", " add_boxes=False,\n", " alpha=1,\n", - " title='Automatic Segmentation of Trees',\n", + " title=\"Automatic Segmentation of Trees\",\n", " blend=False,\n", - " output='trees.tif',\n", + " output=\"trees.tif\",\n", ")" ] }, { "cell_type": "markdown", + "id": "89", "metadata": {}, "source": [ "Convert the result to a vector format." @@ -927,6 +1021,7 @@ { "cell_type": "code", "execution_count": null, + "id": "90", "metadata": { "tags": [] }, @@ -937,6 +1032,7 @@ }, { "cell_type": "markdown", + "id": "91", "metadata": {}, "source": [ "Show the results on the interactive map." @@ -945,6 +1041,7 @@ { "cell_type": "code", "execution_count": null, + "id": "92", "metadata": { "tags": [] }, @@ -963,6 +1060,7 @@ }, { "cell_type": "markdown", + "id": "93", "metadata": {}, "source": [ "![](https://i.imgur.com/WDQgECD.png)" @@ -970,6 +1068,7 @@ }, { "cell_type": "markdown", + "id": "94", "metadata": {}, "source": [ "### Interactive segmentation" @@ -978,6 +1077,7 @@ { "cell_type": "code", "execution_count": null, + "id": "95", "metadata": { "tags": [] }, @@ -988,6 +1088,7 @@ }, { "cell_type": "markdown", + "id": "96", "metadata": {}, "source": [ "![](https://i.imgur.com/Zn7Dwty.png)" diff --git a/mkdocs.yml b/mkdocs.yml index 63068b6f..27b8c3ab 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -63,6 +63,7 @@ nav: - examples/maxar_open_data.ipynb - Workshops: - workshops/purdue.ipynb + - workshops/cn_workshop.ipynb - API Reference: - common module: common.md - samgeo module: samgeo.md diff --git a/paper/paper.bib b/paper/paper.bib index 4f2f8120..249b7cda 100644 --- a/paper/paper.bib +++ b/paper/paper.bib @@ -57,7 +57,7 @@ @ARTICLE{Gillies2013 } @SOFTWARE{Wu2023, - title = {segment-anything-py: An unofficial Python package + title = {segment-anything-py: An unofficial Python package for Meta AI's Segment Anything Model}, author = {Qiusheng Wu}, month = may, @@ -70,7 +70,7 @@ @SOFTWARE{Wu2023 @ARTICLE{Zhang2023, - title={A Comprehensive Survey on Segment Anything Model for Vision and Beyond}, + title={A Comprehensive Survey on Segment Anything Model for Vision and Beyond}, author={Chunhui Zhang and Li Liu and Yawen Cui and Guanjie Huang and Weilin Lin and Yiqian Yang and Yuehong Hu}, year={2023}, eprint={2305.08196}, @@ -82,7 +82,7 @@ @ARTICLE{Zhang2023 @ARTICLE{Ma2023, - title={Segment Anything in Medical Images}, + title={Segment Anything in Medical Images}, author={Jun Ma and Bo Wang}, year={2023}, eprint={2304.12306}, @@ -104,7 +104,7 @@ @SOFTWARE{Hancharenka2023 @ARTICLE{Ji2023, - title={Segment Anything Is Not Always Perfect: An Investigation of SAM on Different Real-world Applications}, + title={Segment Anything Is Not Always Perfect: An Investigation of SAM on Different Real-world Applications}, author={Wei Ji and Jingjing Li and Qi Bi and Wenbo Li and Li Cheng}, year={2023}, eprint={2304.05750}, @@ -116,7 +116,7 @@ @ARTICLE{Ji2023 @ARTICLE{liu2023, - title={Grounding DINO: Marrying DINO with Grounded Pre-Training for Open-Set Object Detection}, + title={Grounding DINO: Marrying DINO with Grounded Pre-Training for Open-Set Object Detection}, author={Shilong Liu and Zhaoyang Zeng and Tianhe Ren and Feng Li and Hao Zhang and Jie Yang and Chunyuan Li and Jianwei Yang and Hang Su and Jun Zhu and Lei Zhang}, year={2023}, eprint={2303.05499}, diff --git a/pyproject-codespell.precommit-toml b/pyproject-codespell.precommit-toml new file mode 100644 index 00000000..8aa8fdfc --- /dev/null +++ b/pyproject-codespell.precommit-toml @@ -0,0 +1,3 @@ +[tool.codespell] +ignore-words-list = "aci,acount,acounts,fallow,ges,hart,hist,nd,ned,ois,wqs,watermask,tre,pres" +skip="*.csv,*.geojson,*.json,*.yml*.js,*.html,*cff,*.pdf" diff --git a/requirements.txt b/requirements.txt index 4a635aa5..07cb902d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,15 +1,15 @@ -segment-anything-py -segment-anything-hq -opencv-python -pycocotools -matplotlib -huggingface_hub +gdown geopandas +huggingface_hub +leafmap +localtileserver +matplotlib +opencv-python +pycocotools +pyproj rasterio +segment-anything-hq +segment-anything-py +timm tqdm -gdown xyzservices -pyproj -leafmap -localtileserver -timm \ No newline at end of file diff --git a/requirements_dev.txt b/requirements_dev.txt index e05d4492..5a3ac7ad 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -1,3 +1,3 @@ -rio-cogeo groundingdino-py -segment-anything-fast \ No newline at end of file +rio-cogeo +segment-anything-fast diff --git a/requirements_docs.txt b/requirements_docs.txt index 1cca6f0e..b5c8e72b 100644 --- a/requirements_docs.txt +++ b/requirements_docs.txt @@ -1,28 +1,28 @@ black black[jupyter] -codespell -deadlink bump2version +codespell coverage +deadlink flake8 ipykernel livereload -nbconvert -nbformat -pip -sphinx -tox -twine -watchdog -wheel mkdocs -mkdocs-git-revision-date-plugin mkdocs-git-revision-date-localized-plugin +mkdocs-git-revision-date-plugin mkdocs-jupyter>=0.24.0 -mkdocs-material>=9.1.3 +mkdocs-material>=9.1.3 mkdocs-pdf-export-plugin mkdocstrings mkdocstrings-crystal mkdocstrings-python-legacy +nbconvert +nbformat +pip pygments -pymdown-extensions \ No newline at end of file +pymdown-extensions +sphinx +tox +twine +watchdog +wheel diff --git a/samgeo/__init__.py b/samgeo/__init__.py index f54cdf75..eae7964f 100644 --- a/samgeo/__init__.py +++ b/samgeo/__init__.py @@ -2,7 +2,7 @@ __author__ = """Qiusheng Wu""" __email__ = "giswqs@gmail.com" -__version__ = '0.10.2' +__version__ = "0.10.2" from .samgeo import * diff --git a/samgeo/common.py b/samgeo/common.py index 83d5f34f..ed3fb40a 100644 --- a/samgeo/common.py +++ b/samgeo/common.py @@ -780,7 +780,11 @@ def geojson_to_coords( def coords_to_xy( - src_fp: str, coords: list, coord_crs: str = "epsg:4326", return_out_of_bounds=False, **kwargs + src_fp: str, + coords: list, + coord_crs: str = "epsg:4326", + return_out_of_bounds=False, + **kwargs, ) -> list: """Converts a list of coordinates to pixel coordinates, i.e., (col, row) coordinates. diff --git a/samgeo/hq_sam.py b/samgeo/hq_sam.py index 3a17b438..f5fd9cd4 100644 --- a/samgeo/hq_sam.py +++ b/samgeo/hq_sam.py @@ -7,7 +7,11 @@ import cv2 import torch import numpy as np -from segment_anything_hq import sam_model_registry, SamAutomaticMaskGenerator, SamPredictor +from segment_anything_hq import ( + sam_model_registry, + SamAutomaticMaskGenerator, + SamPredictor, +) from .common import * @@ -573,7 +577,11 @@ def predict( self.boxes = input_boxes - if boxes is None or (len(boxes) == 1) or (len(boxes) == 4 and isinstance(boxes[0], float)): + if ( + boxes is None + or (len(boxes) == 1) + or (len(boxes) == 4 and isinstance(boxes[0], float)) + ): if isinstance(boxes, list) and isinstance(boxes[0], list): boxes = boxes[0] masks, scores, logits = predictor.predict( diff --git a/samgeo/samgeo.py b/samgeo/samgeo.py index 86bc2160..70f4f86e 100644 --- a/samgeo/samgeo.py +++ b/samgeo/samgeo.py @@ -530,7 +530,9 @@ def predict( point_labels = self.point_labels if (point_crs is not None) and (point_coords is not None): - point_coords, out_of_bounds = coords_to_xy(self.source, point_coords, point_crs, return_out_of_bounds=True) + point_coords, out_of_bounds = coords_to_xy( + self.source, point_coords, point_crs, return_out_of_bounds=True + ) if isinstance(point_coords, list): point_coords = np.array(point_coords) @@ -578,7 +580,11 @@ def predict( self.boxes = input_boxes - if boxes is None or (len(boxes) == 1) or (len(boxes) == 4 and isinstance(boxes[0], float)): + if ( + boxes is None + or (len(boxes) == 1) + or (len(boxes) == 4 and isinstance(boxes[0], float)) + ): if isinstance(boxes, list) and isinstance(boxes[0], list): boxes = boxes[0] masks, scores, logits = predictor.predict( diff --git a/setup.py b/setup.py index d44cb392..cde73cb7 100644 --- a/setup.py +++ b/setup.py @@ -57,6 +57,6 @@ test_suite="tests", tests_require=test_requirements, url="https://github.com/opengeos/segment-geospatial", - version='0.10.2', + version="0.10.2", zip_safe=False, ) diff --git a/tests/test_samgeo.py b/tests/test_samgeo.py index 95be51fc..61390e4c 100644 --- a/tests/test_samgeo.py +++ b/tests/test_samgeo.py @@ -61,7 +61,7 @@ def test_predict(self): sam.set_image(self.source) point_coords = [[-122.1419, 37.6383]] sam.predict( - point_coords, point_labels=1, point_crs="EPSG:4326", output='mask1.tif' + point_coords, point_labels=1, point_crs="EPSG:4326", output="mask1.tif" ) self.assertTrue(os.path.exists("mask1.tif")) @@ -71,6 +71,6 @@ def test_predict(self): [-122.1451, 37.6395], ] sam.predict( - point_coords, point_labels=1, point_crs="EPSG:4326", output='mask2.tif' + point_coords, point_labels=1, point_crs="EPSG:4326", output="mask2.tif" ) self.assertTrue(os.path.exists("mask2.tif"))