diff --git a/.github/workflows/docs-build.yml b/.github/workflows/docs-build.yml index 57096bc..6d24027 100644 --- a/.github/workflows/docs-build.yml +++ b/.github/workflows/docs-build.yml @@ -35,17 +35,17 @@ jobs: - name: Build docs run: | mkdocs build - # - name: Deploy to Netlify - # uses: nwtgck/actions-netlify@v2.0 - # with: - # publish-dir: "./site" - # production-branch: master - # github-token: ${{ secrets.GITHUB_TOKEN }} - # deploy-message: "Deploy from GitHub Actions" - # enable-pull-request-comment: true - # enable-commit-comment: false - # overwrites-pull-request-comment: true - # env: - # NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - # NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} - # timeout-minutes: 10 + - name: Deploy to Netlify + uses: nwtgck/actions-netlify@v2.0 + with: + publish-dir: "./site" + production-branch: master + github-token: ${{ secrets.GITHUB_TOKEN }} + deploy-message: "Deploy from GitHub Actions" + enable-pull-request-comment: true + enable-commit-comment: false + overwrites-pull-request-comment: true + env: + NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} + NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} + timeout-minutes: 10 diff --git a/.gitignore b/.gitignore index c03f38a..438c708 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,9 @@ private/ # C extensions *.so +**/*.tif +**/*.zip +**/*.las # Distribution / packaging .Python diff --git a/README.md b/README.md index c4234be..35926b5 100644 --- a/README.md +++ b/README.md @@ -10,5 +10,6 @@ ## Features +- Visualizing geospatial data, including vector, raster, and LiDAR data - Segmenting remote sensing imagery with the Segment Anything Model - Classifying remote sensing imagery with deep learning models diff --git a/docs/examples/dataviz/lidar_viz.ipynb b/docs/examples/dataviz/lidar_viz.ipynb new file mode 100644 index 0000000..3036a34 --- /dev/null +++ b/docs/examples/dataviz/lidar_viz.ipynb @@ -0,0 +1,321 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "[![image](https://studiolab.sagemaker.aws/studiolab.svg)](https://studiolab.sagemaker.aws/import/github/opengeos/geoai/blob/main/examples/dataviz/lidar_viz.ipynb)\n", + "[![image](https://img.shields.io/badge/Open-Planetary%20Computer-black?style=flat&logo=microsoft)](https://pccompute.westeurope.cloudapp.azure.com/compute/hub/user-redirect/git-pull?repo=https://github.com/opengeos/geoai&urlpath=lab/tree/geoai/examples/dataviz/lidar_viz.ipynb&branch=main)\n", + "[![image](https://colab.research.google.com/assets/colab-badge.svg)](https://githubtocolab.com/opengeos/geoai/blob/main/examples/dataviz/lidar_viz.ipynb)\n", + "\n", + "# Visualizing LiDAR Data with Leafmap\n", + "\n", + "This notebook demonstrates how to visualize LiDAR data using [leafmap](https://leafmap.org).\n", + "\n", + "## Installation\n", + "\n", + "Uncomment and run the following cell to install the required Python packages." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# %pip install \"leafmap[lidar]\" open3d" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Import libraries" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import leafmap" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Download data" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Download a [sample LiDAR dataset](https://drive.google.com/file/d/1H_X1190vL63BoFYa_cVBDxtIa8rG-Usb/view?usp=sharing) from Google Drive. The zip file is 52.1 MB and the uncompressed LAS file is 109 MB." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "url = 'https://open.gishub.org/data/lidar/madison.zip'\n", + "filename = 'madison.las'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "leafmap.download_file(url, 'madison.zip', unzip=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Metadata" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Read the LiDAR data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "las = leafmap.read_lidar(filename)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The LAS header." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "las.header" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The number of points." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "las.header.point_count" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The list of features." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "list(las.point_format.dimension_names)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Read data" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Inspect data." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "las.X" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "las.Y" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "las.Z" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "las.intensity" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## PyVista\n", + "\n", + "Visualize LiDAR data using the [pyvista](https://github.com/pyvista/pyvista) backend." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "leafmap.view_lidar(filename, cmap='terrain', backend='pyvista')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/xezcgMP.gif)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## ipygany\n", + "\n", + "Visualize LiDAR data using the [ipygany](https://github.com/QuantStack/ipygany) backend." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "leafmap.view_lidar(filename, backend='ipygany', background='white')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/MyMWW4I.gif)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Panel\n", + "\n", + "Visualize LiDAR data using the [panel](https://github.com/holoviz/panel) backend." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "leafmap.view_lidar(filename, cmap='terrain', backend='panel', background='white')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/XQGWbJk.gif)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Open3D\n", + "\n", + "Visualize LiDAR data using the [open3d](http://www.open3d.org) backend." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "leafmap.view_lidar(filename, backend='open3d')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/rL85fbl.gif)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "geo", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/examples/dataviz/raster_viz.ipynb b/docs/examples/dataviz/raster_viz.ipynb new file mode 100644 index 0000000..ad39f3b --- /dev/null +++ b/docs/examples/dataviz/raster_viz.ipynb @@ -0,0 +1,433 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "[![image](https://studiolab.sagemaker.aws/studiolab.svg)](https://studiolab.sagemaker.aws/import/github/opengeos/geoai/blob/main/examples/dataviz/raster_viz.ipynb)\n", + "[![image](https://img.shields.io/badge/Open-Planetary%20Computer-black?style=flat&logo=microsoft)](https://pccompute.westeurope.cloudapp.azure.com/compute/hub/user-redirect/git-pull?repo=https://github.com/opengeos/geoai&urlpath=lab/tree/geoai/examples/dataviz/raster_viz.ipynb&branch=main)\n", + "[![image](https://colab.research.google.com/assets/colab-badge.svg)](https://githubtocolab.com/opengeos/geoai/blob/main/examples/dataviz/raster_viz.ipynb)\n", + "\n", + "# Visualizing Raster Data with Leafmap\n", + "\n", + "This notebook demonstrates how to visualize raster data using [leafmap](https://leafmap.org/). Leafmap can visualize raster data (e.g., Cloud Optimized GeoTIFF) stored in a local file or on the cloud (e.g., AWS S3). It can also visualize raster data stored in a [STAC](https://stacspec.org/) catalog.\n", + "\n", + "## Installation\n", + "\n", + "Uncomment the following line to install the required packages if needed." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# %pip install \"leafmap[raster]\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Import packages" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import leafmap" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## COG\n", + "\n", + "A Cloud Optimized GeoTIFF (COG) is a regular GeoTIFF file, aimed at being hosted on a HTTP file server, with an internal organization that enables more efficient workflows on the cloud. It does this by leveraging the ability of clients issuing HTTP GET range requests to ask for just the parts of a file they need. More information about COG can be found at \n", + "\n", + "For this demo, we will use data from https://www.maxar.com/open-data/california-colorado-fires for mapping California and Colorado fires. Let's create an interactive map and add the COG to the map." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m = leafmap.Map()\n", + "url = 'https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2018-02-16/pine-gulch-fire20/1030010076004E00.tif'\n", + "m.add_cog_layer(url, name=\"Fire (pre-event)\")\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can add multiple COGs to the map. Let's add another COG to the map." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "url2 = 'https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-14/pine-gulch-fire20/10300100AAC8DD00.tif'\n", + "m.add_cog_layer(url2, name=\"Fire (post-event)\")\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/J9fLbYh.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Create a split map for comparing two COGs." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m = leafmap.Map()\n", + "m.split_map(left_layer=url, right_layer=url2)\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/FJa0Yta.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Local Raster\n", + "\n", + "Leafmap can also visualize local GeoTIFF files. Let's download some sample data " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dem_url = 'https://open.gishub.org/data/raster/srtm90.tif'\n", + "leafmap.download_file(dem_url, unzip=False)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Visualize a single-band raster." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m = leafmap.Map()\n", + "m.add_raster('srtm90.tif', cmap='terrain', layer_name='DEM')\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/7g9huvY.png)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "landsat_url = 'https://open.gishub.org/data/raster/cog.tif'\n", + "leafmap.download_file(landsat_url)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Visualize a multi-band raster." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m = leafmap.Map()\n", + "m.add_raster('cog.tif', bands=[4, 3, 2], layer_name='Landsat')\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/euhkajs.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## STAC\n", + "\n", + "The SpatioTemporal Asset Catalog (STAC) specification provides a common language to describe a range of geospatial information so that it can more easily be indexed and discovered. A SpatioTemporal Asset is any file that represents information about the earth captured in a certain space and time. STAC aims to enable that next generation of geospatial search engines, while also supporting web best practices so geospatial information is more easily surfaced in traditional search engines. More information about STAC can be found at the [STAC website](https://stacspec.org). In this example, we will use a STAC item from the [SPOT Orthoimages of Canada](https://stacindex.org/catalogs/spot-orthoimages-canada-2005) available through the link below:" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Create an interactive map." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "url = 'https://canada-spot-ortho.s3.amazonaws.com/canada_spot_orthoimages/canada_spot5_orthoimages/S5_2007/S5_11055_6057_20070622/S5_11055_6057_20070622.json'\n", + "leafmap.stac_bands(url)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Add STAC layers to the map." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m = leafmap.Map()\n", + "m.add_stac_layer(url, bands=['pan'], name='Panchromatic')\n", + "m.add_stac_layer(url, bands=['B3', 'B2', 'B1'], name='False color')\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/IlqsJXK.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Custom STAC Catalog" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Provide custom STAC API endpoints as a dictionary in the format of `{\"name\": \"url\"}`. The name will show up in the dropdown menu, while the url is the STAC API endpoint that will be used to search for items." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "catalogs = {\n", + " \"Element84 Earth Search\": \"https://earth-search.aws.element84.com/v1\",\n", + " \"Microsoft Planetary Computer\": \"https://planetarycomputer.microsoft.com/api/stac/v1\",\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m = leafmap.Map(center=[40, -100], zoom=4)\n", + "m.set_catalog_source(catalogs)\n", + "m.add_stac_gui()\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Once the catalog panel is open, you can search for items from the custom STAC API endpoints. Simply draw a bounding box on the map or zoom to a location of interest. Click on the **Collections** button to retrieve the collections from the custom STAC API endpoints. Next, select a collection from the dropdown menu. Then, click on the **Items** button to retrieve the items from the selected collection. Finally, click on the **Display** button to add the selected item to the map." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/M8IbRsM.png)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m.stac_gdf # The GeoDataFrame of the STAC search results" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m.stac_dict # The STAC search results as a dictionary" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m.stac_item # The selected STAC item of the search result" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## AWS S3" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To Be able to run this notebook you'll need to have AWS credential available as environment variables. Uncomment the following lines to set the environment variables." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# os.environ[\"AWS_ACCESS_KEY_ID\"] = \"YOUR AWS ACCESS ID HERE\"\n", + "# os.environ[\"AWS_SECRET_ACCESS_KEY\"] = \"YOUR AWS ACCESS KEY HERE\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In this example, we will use datasets from the [Maxar Open Data Program on AWS](https://registry.opendata.aws/maxar-open-data/)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "BUCKET = \"maxar-opendata\"\n", + "FOLDER = \"events/Kahramanmaras-turkey-earthquake-23/\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "List all the datasets in the bucket. Specify a file extension to filter the results if needed." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "items = leafmap.s3_list_objects(BUCKET, FOLDER, ext=\".tif\")\n", + "items[:10]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Visualize raster datasets from the bucket." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m = leafmap.Map()\n", + "m.add_cog_layer(items[2], name=\"Maxar\")\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/NkTZ6Lj.png)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "geo", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/examples/dataviz/vector_viz.ipynb b/docs/examples/dataviz/vector_viz.ipynb new file mode 100644 index 0000000..ef3253e --- /dev/null +++ b/docs/examples/dataviz/vector_viz.ipynb @@ -0,0 +1,643 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "[![image](https://studiolab.sagemaker.aws/studiolab.svg)](https://studiolab.sagemaker.aws/import/github/opengeos/geoai/blob/main/examples/dataviz/vector_viz.ipynb)\n", + "[![image](https://img.shields.io/badge/Open-Planetary%20Computer-black?style=flat&logo=microsoft)](https://pccompute.westeurope.cloudapp.azure.com/compute/hub/user-redirect/git-pull?repo=https://github.com/opengeos/geoai&urlpath=lab/tree/geoai/examples/dataviz/vector_viz.ipynb&branch=main)\n", + "[![image](https://colab.research.google.com/assets/colab-badge.svg)](https://githubtocolab.com/opengeos/geoai/blob/main/examples/dataviz/vector_viz.ipynb)\n", + "\n", + "# Visualizing Vector Data with Leafmap\n", + "\n", + "## Installation\n", + "\n", + "Uncomment the following line to install [leafmap](https://leafmap.org) if needed." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# %pip install \"leafmap[vector]\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Import libraries" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import leafmap" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Visualize vector data\n", + "\n", + "You can visualize vector data using the `add_vector` function. It supports common vector data formats, including GeoJSON, Shapefile, GeoPackage, and any other formats supported by [geopandas](https://geopandas.org)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m = leafmap.Map(center=[0, 0], zoom=2)\n", + "data = 'https://open.gishub.org/data/vector/cables.geojson'\n", + "m.add_vector(data, layer_name=\"Cable lines\", info_mode=\"on_hover\")\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/NIcnLWs.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can style the vector with custom style callback functions. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m = leafmap.Map(center=[20, 0], zoom=2)\n", + "m.add_basemap(\"CartoDB.DarkMatter\")\n", + "data = 'https://open.gishub.org/data/vector/cables.geojson'\n", + "callback = lambda feat: {\"color\": feat[\"properties\"][\"color\"], \"weight\": 1}\n", + "m.add_vector(data, layer_name=\"Cable lines\", style_callback=callback)\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/mQnV53U.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Choropleth map\n", + "\n", + "You can create a choropleth map using the `add_data` function. It supports GeoJSON, Shapefile, GeoPackage, and any other formats supported by [geopandas](https://geopandas.org). \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m = leafmap.Map()\n", + "data = 'https://raw.githubusercontent.com/opengeos/leafmap/master/examples/data/countries.geojson'\n", + "m.add_data(\n", + " data, \n", + " column='POP_EST', \n", + " scheme='Quantiles', \n", + " cmap='Blues', \n", + " legend_title='Population'\n", + ")\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/4FV6f72.png)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m = leafmap.Map()\n", + "m.add_data(\n", + " data,\n", + " column='POP_EST',\n", + " scheme='EqualInterval',\n", + " cmap='Blues',\n", + " legend_title='Population',\n", + ")\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/KEY6zEj.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## GeoParquet\n", + "\n", + "Visualize GeoParquet data with leafmap and lonboard." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "url = 'https://open.gishub.org/data/duckdb/cities.parquet'\n", + "gdf = leafmap.read_parquet(url, return_type='gdf', src_crs='EPSG:4326')\n", + "gdf.head()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Visualize point data." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "leafmap.view_vector(\n", + " gdf, \n", + " get_radius=20000, \n", + " get_fill_color='blue', \n", + " zoom_to_layer=False, \n", + " map_args={'center': (40, -100), 'zoom': 3, 'height': 500}\n", + " )" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/BBsLjvx.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Visualizing polygon data." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "url = 'https://data.source.coop/giswqs/nwi/wetlands/DC_Wetlands.parquet'\n", + "gdf = leafmap.read_parquet(url, return_type='gdf', src_crs='EPSG:5070', dst_crs='EPSG:4326')\n", + "gdf.head()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "leafmap.view_vector(gdf, get_fill_color=[0, 0, 255, 128])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![vector](https://i.imgur.com/HRtpiVd.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Alternatively, you can specify a color map to visualize the data." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "color_map = {\n", + " \"Freshwater Forested/Shrub Wetland\": (0, 136, 55),\n", + " \"Freshwater Emergent Wetland\": (127, 195, 28),\n", + " \"Freshwater Pond\": (104, 140, 192),\n", + " \"Estuarine and Marine Wetland\": (102, 194, 165),\n", + " \"Riverine\": (1, 144, 191),\n", + " \"Lake\": (19, 0, 124),\n", + " \"Estuarine and Marine Deepwater\": (0, 124, 136),\n", + " \"Other\": (178, 134, 86),\n", + " }" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "leafmap.view_vector(gdf, color_column='WETLAND_TYPE', color_map=color_map, opacity=0.5)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![vector-color](https://i.imgur.com/Ejh8hK6.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Display a legend for the data." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "leafmap.Legend(title=\"Wetland Type\", legend_dict=color_map)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![legend](https://i.imgur.com/fxzHHFN.png)\n", + "\n", + "## PMTiles\n", + "\n", + "[PMTiles](https://github.com/protomaps/PMTiles) is a single-file archive format for tiled data. A PMTiles archive can be hosted on a commodity storage platform such as S3, and enables low-cost, zero-maintenance map applications that are \"serverless\" - free of a custom tile backend or third party provider." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Remote PMTiles\n", + "\n", + "Leafmap can visualize PMTiles hosted locally or remotely." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Overture data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import leafmap.foliumap as leafmap" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Check the metadata of the PMTiles." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "url = \"https://storage.googleapis.com/ahp-research/overture/pmtiles/overture.pmtiles\"\n", + "metadata = leafmap.pmtiles_metadata(url)\n", + "print(f\"layer names: {metadata['layer_names']}\")\n", + "print(f\"bounds: {metadata['bounds']}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Visualize the PMTiles." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m = leafmap.Map()\n", + "m.add_basemap('CartoDB.DarkMatter')\n", + "\n", + "style = {\n", + " \"version\": 8,\n", + " \"sources\": {\n", + " \"example_source\": {\n", + " \"type\": \"vector\",\n", + " \"url\": \"pmtiles://\" + url,\n", + " \"attribution\": 'PMTiles',\n", + " }\n", + " },\n", + " \"layers\": [\n", + " {\n", + " \"id\": \"admins\",\n", + " \"source\": \"example_source\",\n", + " \"source-layer\": \"admins\",\n", + " \"type\": \"fill\",\n", + " \"paint\": {\"fill-color\": \"#BDD3C7\", \"fill-opacity\": 0.1},\n", + " },\n", + " {\n", + " \"id\": \"buildings\",\n", + " \"source\": \"example_source\",\n", + " \"source-layer\": \"buildings\",\n", + " \"type\": \"fill\",\n", + " \"paint\": {\"fill-color\": \"#FFFFB3\", \"fill-opacity\": 0.5},\n", + " },\n", + " {\n", + " \"id\": \"places\",\n", + " \"source\": \"example_source\",\n", + " \"source-layer\": \"places\",\n", + " \"type\": \"fill\",\n", + " \"paint\": {\"fill-color\": \"#BEBADA\", \"fill-opacity\": 0.5},\n", + " },\n", + " {\n", + " \"id\": \"roads\",\n", + " \"source\": \"example_source\",\n", + " \"source-layer\": \"roads\",\n", + " \"type\": \"line\",\n", + " \"paint\": {\"line-color\": \"#FB8072\"},\n", + " },\n", + " ],\n", + "}\n", + "\n", + "m.add_pmtiles(\n", + " url, name='PMTiles', style=style, overlay=True, show=True, zoom_to_layer=True\n", + ")\n", + "\n", + "legend_dict = {\n", + " 'admins': 'BDD3C7',\n", + " 'buildings': 'FFFFB3',\n", + " 'places': 'BEBADA',\n", + " 'roads': 'FB8072',\n", + "}\n", + "\n", + "m.add_legend(legend_dict=legend_dict)\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/fjWMOu3.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Source Cooperative\n", + "\n", + "Visualize the [Google-Microsoft Open Buildings data](https://beta.source.coop/repositories/vida/google-microsoft-open-buildings/description) hosted on Source Cooperative.\n", + "\n", + "Check the metadata of the PMTiles." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "url = 'https://data.source.coop/vida/google-microsoft-open-buildings/pmtiles/go_ms_building_footprints.pmtiles'\n", + "metadata = leafmap.pmtiles_metadata(url)\n", + "print(f\"layer names: {metadata['layer_names']}\")\n", + "print(f\"bounds: {metadata['bounds']}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Visualize the PMTiles." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m = leafmap.Map(center=[20, 0], zoom=2)\n", + "m.add_basemap('CartoDB.DarkMatter')\n", + "m.add_basemap('Esri.WorldImagery', show=False)\n", + "\n", + "style = {\n", + " \"version\": 8,\n", + " \"sources\": {\n", + " \"example_source\": {\n", + " \"type\": \"vector\",\n", + " \"url\": \"pmtiles://\" + url,\n", + " \"attribution\": 'PMTiles',\n", + " }\n", + " },\n", + " \"layers\": [\n", + " {\n", + " \"id\": \"buildings\",\n", + " \"source\": \"example_source\",\n", + " \"source-layer\": \"building_footprints\",\n", + " \"type\": \"fill\",\n", + " \"paint\": {\"fill-color\": \"#3388ff\", \"fill-opacity\": 0.5},\n", + " },\n", + " ],\n", + "}\n", + "\n", + "m.add_pmtiles(\n", + " url, name='Buildings', style=style, overlay=True, show=True, zoom_to_layer=False\n", + ")\n", + "\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/kT6ng6k.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Local PMTiles\n", + "\n", + "tippecanoe is required to convert vector data to pmtiles. Install it with `conda install -c conda-forge tippecanoe`.\n", + "\n", + "Download [building footprints](https://github.com/opengeos/open-data/blob/main/datasets/libya/Derna_buildings.geojson) of Derna, Libya." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "url = 'https://raw.githubusercontent.com/opengeos/open-data/main/datasets/libya/Derna_buildings.geojson'\n", + "leafmap.download_file(url, 'buildings.geojson')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Convert vector to PMTiles." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pmtiles = 'buildings.pmtiles'\n", + "leafmap.geojson_to_pmtiles(\n", + " 'buildings.geojson',\n", + " pmtiles,\n", + " layer_name='buildings',\n", + " overwrite=True,\n", + " quiet=True\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Start a HTTP Sever" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "leafmap.start_server(port=8000)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "url = f'http://127.0.0.1:8000/{pmtiles}'\n", + "leafmap.pmtiles_metadata(url)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Display the PMTiles on the map." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m = leafmap.Map()\n", + "\n", + "style = {\n", + " \"version\": 8,\n", + " \"sources\": {\n", + " \"example_source\": {\n", + " \"type\": \"vector\",\n", + " \"url\": \"pmtiles://\" + url,\n", + " \"attribution\": 'PMTiles',\n", + " }\n", + " },\n", + " \"layers\": [\n", + " {\n", + " \"id\": \"buildings\",\n", + " \"source\": \"example_source\",\n", + " \"source-layer\": \"buildings\",\n", + " \"type\": \"fill\",\n", + " \"paint\": {\"fill-color\": \"#3388ff\", \"fill-opacity\": 0.5},\n", + " },\n", + " ],\n", + "}\n", + "\n", + "m.add_pmtiles(url, name='Buildings', show=True, zoom_to_layer=True, style=style)\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![](https://i.imgur.com/lhGri31.png)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "geo", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/index.md b/docs/index.md index 80df477..95bc233 100644 --- a/docs/index.md +++ b/docs/index.md @@ -10,5 +10,6 @@ ## Features +- Visualizing geospatial data, including vector, raster, and LiDAR data - Segmenting remote sensing imagery with the Segment Anything Model - Classifying remote sensing imagery with deep learning models diff --git a/mkdocs.yml b/mkdocs.yml index 4b5a833..95567ba 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -93,6 +93,9 @@ nav: - examples/samgeo/arcgis.ipynb - examples/samgeo/maxar_open_data.ipynb - examples/rastervision/semantic_segmentation.ipynb + - examples/dataviz/raster_viz.ipynb + - examples/dataviz/vector_viz.ipynb + - examples/dataviz/lidar_viz.ipynb - API Reference: - geoai module: geoai.md - common module: common.md