From 2c38a9d7524d5e0a1dd8d3eddb972bb15a8b4c3e Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sat, 21 Dec 2024 13:26:09 +0000 Subject: [PATCH] Deploy preview for PR #42 --- pr-previews/pr-42/404.html | 67 ++ pr-previews/pr-42/contributing/index.html | 69 +- .../time_series_api_limits}/index.html | 862 ++++++++++-------- .../time_series_api_limits.ipynb | 312 +++++++ .../rasterio_backend_example/index.html | 781 +++++----------- .../examples/time_series_example/index.html | 155 +++- .../time_series_example.ipynb | 7 +- .../xarray_backend_example/index.html | 91 +- pr-previews/pr-42/index.html | 67 ++ pr-previews/pr-42/release-notes/index.html | 67 ++ pr-previews/pr-42/search/search_index.json | 2 +- pr-previews/pr-42/sitemap.xml | 16 +- pr-previews/pr-42/sitemap.xml.gz | Bin 282 -> 280 bytes .../time_series_performance_benchmarks.ipynb | 163 ---- 14 files changed, 1527 insertions(+), 1132 deletions(-) rename pr-previews/pr-42/{time_series_performance_benchmarks => deployment/time_series_api_limits}/index.html (93%) create mode 100644 pr-previews/pr-42/deployment/time_series_api_limits/time_series_api_limits.ipynb delete mode 100644 pr-previews/pr-42/time_series_performance_benchmarks/time_series_performance_benchmarks.ipynb diff --git a/pr-previews/pr-42/404.html b/pr-previews/pr-42/404.html index 6f3a70a..393749b 100644 --- a/pr-previews/pr-42/404.html +++ b/pr-previews/pr-42/404.html @@ -399,6 +399,73 @@ + + + + + + + + +
  • + + + + + + + + + + +
  • + + + + + + + + +
  • diff --git a/pr-previews/pr-42/contributing/index.html b/pr-previews/pr-42/contributing/index.html index 385a204..6b52a82 100644 --- a/pr-previews/pr-42/contributing/index.html +++ b/pr-previews/pr-42/contributing/index.html @@ -15,7 +15,7 @@ - + @@ -408,6 +408,73 @@ + + + + + + + + + + +
  • + + + + + + + + + + +
  • + + + + + + + diff --git a/pr-previews/pr-42/time_series_performance_benchmarks/index.html b/pr-previews/pr-42/deployment/time_series_api_limits/index.html similarity index 93% rename from pr-previews/pr-42/time_series_performance_benchmarks/index.html rename to pr-previews/pr-42/deployment/time_series_api_limits/index.html index 44709af..a9023b8 100644 --- a/pr-previews/pr-42/time_series_performance_benchmarks/index.html +++ b/pr-previews/pr-42/deployment/time_series_api_limits/index.html @@ -12,24 +12,28 @@ - + + - + + + + - Time series performance benchmarks - TiTiler.CMR + Time series API limits - TiTiler.CMR - + - + @@ -48,9 +52,9 @@ - + - + @@ -76,7 +80,7 @@
    - + Skip to content @@ -96,9 +100,9 @@
    -
    - -
    - -
    -
    - @@ -1030,77 +1232,7 @@

    xarray backend -

    - -
    - @@ -1111,222 +1243,242 @@

    statistics -

    -
    -
    -
    +
    @@ -2004,7 +2071,7 @@

    Display tiles in an interactive map <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no" /> <style> - #map_ecf0ef9af5886d707b8e04eb57e71104 { + #map_329736ea0ebd7fa81ff53fb691b98abf { position: relative; width: 100.0%; height: 100.0%; @@ -2018,14 +2085,14 @@

    Display tiles in an interactive map <body> - <div class="folium-map" id="map_ecf0ef9af5886d707b8e04eb57e71104" ></div> + <div class="folium-map" id="map_329736ea0ebd7fa81ff53fb691b98abf" ></div> </body> <script> - var map_ecf0ef9af5886d707b8e04eb57e71104 = L.map( - "map_ecf0ef9af5886d707b8e04eb57e71104", + var map_329736ea0ebd7fa81ff53fb691b98abf = L.map( + "map_329736ea0ebd7fa81ff53fb691b98abf", { center: [70.0, -40.0], crs: L.CRS.EPSG3857, @@ -2039,22 +2106,22 @@

    Display tiles in an interactive map - var tile_layer_2b91a797b0777ef2d200299980ed0351 = L.tileLayer( + var tile_layer_ac4f921426d2726681927d7b7ca60d4f = L.tileLayer( "https://tile.openstreetmap.org/{z}/{x}/{y}.png", {"attribution": "\u0026copy; \u003ca href=\"https://www.openstreetmap.org/copyright\"\u003eOpenStreetMap\u003c/a\u003e contributors", "detectRetina": false, "maxNativeZoom": 19, "maxZoom": 19, "minZoom": 0, "noWrap": false, "opacity": 1, "subdomains": "abc", "tms": false} ); - tile_layer_2b91a797b0777ef2d200299980ed0351.addTo(map_ecf0ef9af5886d707b8e04eb57e71104); + tile_layer_ac4f921426d2726681927d7b7ca60d4f.addTo(map_329736ea0ebd7fa81ff53fb691b98abf); - var tile_layer_36c97aef47a37e60e26dcb3c33e9a38a = L.tileLayer( + var tile_layer_444ff7d34d63807667f67b33fb97e784 = L.tileLayer( "https://dev-titiler-cmr.delta-backend.com/tiles/WebMercatorQuad/{z}/{x}/{y}@1x?concept_id=C1996881146-POCLOUD\u0026datetime=2024-10-10T00%3A00%3A00%2B00%3A00\u0026backend=xarray\u0026variable=sea_ice_fraction\u0026rescale=0%2C1\u0026colormap_name=blues_r", {"attribution": "NASA", "detectRetina": false, "maxZoom": 18, "minZoom": 0, "noWrap": false, "opacity": 1, "subdomains": "abc", "tms": false} ); - tile_layer_36c97aef47a37e60e26dcb3c33e9a38a.addTo(map_ecf0ef9af5886d707b8e04eb57e71104); + tile_layer_444ff7d34d63807667f67b33fb97e784.addTo(map_329736ea0ebd7fa81ff53fb691b98abf); </script> </html>' style="position:absolute;width:100%;height:100%;left:0;top:0;border:none !important;" webkitallowfullscreen=""> diff --git a/pr-previews/pr-42/index.html b/pr-previews/pr-42/index.html index 9d08a5e..b82bddf 100644 --- a/pr-previews/pr-42/index.html +++ b/pr-previews/pr-42/index.html @@ -553,6 +553,73 @@ + + + + + + + + +
  • + + + + + + + + + + +
  • + + + + + + + + +
  • diff --git a/pr-previews/pr-42/release-notes/index.html b/pr-previews/pr-42/release-notes/index.html index 80480e6..545c7dd 100644 --- a/pr-previews/pr-42/release-notes/index.html +++ b/pr-previews/pr-42/release-notes/index.html @@ -408,6 +408,73 @@ + + + + + + + + +
  • + + + + + + + + + + +
  • + + + + + + + + +
  • diff --git a/pr-previews/pr-42/search/search_index.json b/pr-previews/pr-42/search/search_index.json index c635598..8eb86b5 100644 --- a/pr-previews/pr-42/search/search_index.json +++ b/pr-previews/pr-42/search/search_index.json @@ -1 +1 @@ -{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"Home","text":"

    A modern dynamic tile server with a NASA CMR backend built on top of FastAPI and Rasterio/GDAL.

    "},{"location":"#titiler-cmr","title":"titiler-cmr","text":"

    An API for creating image tiles from CMR queries.

    "},{"location":"#features","title":"Features","text":"
    • Render tiles from assets discovered via queries to NASA's CMR
    • Uses the earthaccess python package to query the CMR
    • Built on top of titiler
    • Multiple projections support (see TileMatrixSets) via morecantile.
    • JPEG / JP2 / PNG / WEBP / GTIFF / NumpyTile output format support
    • Automatic OpenAPI documentation (FastAPI builtin)
    • Example of AWS Lambda / ECS deployment (via CDK)
    "},{"location":"#installation","title":"Installation","text":"

    To install from sources and run for development, install uv then:

    git clone https://github.com/developmentseed/titiler-cmr.git\ncd titiler-cmr\n\nuv sync --all-extras\n
    "},{"location":"#authentication-for-data-read-access","title":"Authentication for data read access","text":"

    titiler-cmr can read data either over HTTP (external) or directly from AWS S3 (direct) depending on the app configuration. The behavior of the application is controlled by the S3 authentication settings in settings.py, which you can set either with environment variables (TITILER_CMR_S3_AUTH_ACCESS, TITILER_CMR_S3_AUTH_STRATEGY) or in an environment file (.env).

    "},{"location":"#direct-from-s3","title":"Direct from S3","text":"

    When running in an AWS context (e.g. Lambda), you should configure the application to access the data directly from S3. You can do this in two ways:

    • Configure an AWS IAM role for your runtime environment that has read access to the NASA buckets so that rasterio/GDAL can find the AWS credentials when reading data
    • Set the EARTHDATA_USERNAME and EARTHDATA_PASSWORD environment variables so that the earthaccess package can issue temporary AWS credentials

    Note

    Direct S3 access configuration will only work if the application is running in the same AWS region as the data are stored!

    "},{"location":"#external-access","title":"External access","text":"

    When running outside of the AWS context (e.g. locally) you will need to configure the application to access data over HTTP. You can do this by creating an Earthdata account, configuring your .netrc file with your Earthdata login credentials (which GDAL will find when trying to access data over the network), and setting a few environment variables:

    # environment variables for GDAL to read data from NASA over HTTP\nexport GDAL_DISABLE_READDIR_ON_OPEN=YES\nexport CPL_VSIL_CURL_USE_HEAD=FALSE\nexport GDAL_HTTP_COOKIEFILE=/tmp/cookies.txt\nexport GDAL_HTTP_COOKIEJAR=/tmp/cookies.txt\nexport EARTHDATA_USERNAME={your earthdata username}\nexport EARTHDATA_PASSWORD={your earthdata password}\n\n# write your .netrc file to the home directory\necho \"machine urs.earthdata.nasa.gov login ${EARTHDATA_USERNAME} password ${EARTHDATA_PASSWORD}\" > ~/.netrc\n

    Note

    See NASA's docs for details

    "},{"location":"#docker-deployment","title":"Docker deployment","text":"

    You can run the application in a docker container using the docker-compose.yml file. The docker container is configured to read the EARTHDATA_USERNAME and EARTHDATA_PASSWORD environment variables so make sure set those before starting the docker network.

    docker compose up --build \n

    The application will be available at this address: http://localhost:8081/api.html

    "},{"location":"#local-deployment","title":"Local deployment","text":"

    To run the application directly in your local environment, configure the application to access data over HTTP then run it using uvicorn:

    TITILER_CMR_S3_AUTH_ACCESS=external uvicorn titiler.cmr.main:app --reload\n

    The application will be available at this address: http://localhost:8000/api.html

    "},{"location":"#contribution-development","title":"Contribution & Development","text":"

    See CONTRIBUTING.md

    "},{"location":"#license","title":"License","text":"

    See LICENSE

    "},{"location":"#authors","title":"Authors","text":"

    Created by Development Seed

    See contributors for a listing of individual contributors.

    "},{"location":"#changes","title":"Changes","text":"

    See CHANGES.md.

    "},{"location":"contributing/","title":"Development - Contributing","text":"

    Issues and pull requests are more than welcome: github.com/developmentseed/titiler-cmr/issues

    dev install

    This project uses uv to manage the python environment and dependencies. To install the package for development you can follow these steps:

    # install uv\n\n# unix\ncurl -LsSf https://astral.sh/uv/install.sh | sh\n\n# or windows\n# powershell -c \"irm https://astral.sh/uv/install.ps1 | iex\"\n\ngit clone https://github.com/developmentseed/titiler-cmr.git\ncd titiler-cmr\nuv sync --all-extras\n
    "},{"location":"contributing/#linting","title":"Linting","text":"

    This repo is set to use pre-commit to run isort, flake8, pydocstring, black (\"uncompromising Python code formatter\") and mypy when committing new code.

    uv pre-commit install\n
    "},{"location":"contributing/#testing","title":"Testing","text":"

    You can then run the tests with the following command:

    uv run pytest\n

    The tests use vcrpy <https://vcrpy.readthedocs.io/en/latest/>_ to mock API calls with \"pre-recorded\" API responses. When adding new tests that incur actual network traffic, use the @pytest.mark.vcr decorator function to indicate vcrpy should be used. Record the new responses and commit them to the repository.

    uv run pytest -v -s --record-mode new_episodes\n
    "},{"location":"contributing/#documentation","title":"Documentation","text":"

    The documentation is generated using mkdocs and gets built and deployed to Github Pages when new tags are released and on pushes to the develop branch.

    To preview the documentation in your browser you can run:

    uv run mkdocs serve -o\n
    "},{"location":"release-notes/","title":"Changelog","text":"

    All notable changes to this project will be documented in this file.

    The format is based on Keep a Changelog.

    "},{"location":"release-notes/#unreleased","title":"Unreleased","text":""},{"location":"release-notes/#added","title":"Added","text":"
    • Initial implementation of STAC metadata structure
    "},{"location":"release-notes/#deprecated","title":"Deprecated","text":"
    • Nothing.
    "},{"location":"release-notes/#removed","title":"Removed","text":"
    • Nothing.
    "},{"location":"release-notes/#fixed","title":"Fixed","text":"
    • Nothing.
    "},{"location":"release-notes/#012","title":"0.1.2","text":""},{"location":"release-notes/#added_1","title":"Added","text":"
    • Support for /timeseries endpoints (#33)
    "},{"location":"release-notes/#deprecated_1","title":"Deprecated","text":"
    • Nothing.
    "},{"location":"release-notes/#removed_1","title":"Removed","text":"
    • Nothing.
    "},{"location":"release-notes/#fixed_1","title":"Fixed","text":"
    • Nothing.
    "},{"location":"release-notes/#011","title":"0.1.1","text":""},{"location":"release-notes/#added_2","title":"Added","text":"
    • Add /bbox, /feature, and /statistics endpoints (#30)
    "},{"location":"release-notes/#deprecated_2","title":"Deprecated","text":"
    • Nothing.
    "},{"location":"release-notes/#removed_2","title":"Removed","text":"
    • Nothing.
    "},{"location":"release-notes/#fixed_2","title":"Fixed","text":"
    • Nothing.
    "},{"location":"release-notes/#011_1","title":"0.1.1","text":""},{"location":"release-notes/#added_3","title":"Added","text":"

    -

    Ability to run locally with Earthdata authentication (#28)

    "},{"location":"release-notes/#deprecated_3","title":"Deprecated","text":"
    • Nothing.
    "},{"location":"release-notes/#removed_3","title":"Removed","text":"
    • Nothing.
    "},{"location":"release-notes/#fixed_3","title":"Fixed","text":"
    • Nothing.
    "},{"location":"time_series_performance_benchmarks/","title":"Time series performance benchmarks","text":"In\u00a0[1]: Copied!
    import benchmark_analysis as ba\n
    import benchmark_analysis as ba
    \n---------------------------------------------------------------------------\nModuleNotFoundError                       Traceback (most recent call last)\nCell In[1], line 1\n----> 1 import benchmark_analysis as ba\n\nModuleNotFoundError: No module named 'benchmark_analysis'
    In\u00a0[2]: Copied!
    for dataset, df in ba.dfs[\"statistics\"].items():\n    fig = ba.plot_error_rate_heatmap(\n        df=df,\n        x=\"num_timepoints\",\n        y=\"bbox_dims\",\n        z=\"error_rate\",\n        labels={\"x\": \"number of time points\", \"y\": \"bbox dimensions\", \"color\": \"error rate\"},\n        title=f\"{dataset}: error rate by bbox size and number of time points\",\n    )\n    fig.show()\n
    for dataset, df in ba.dfs[\"statistics\"].items(): fig = ba.plot_error_rate_heatmap( df=df, x=\"num_timepoints\", y=\"bbox_dims\", z=\"error_rate\", labels={\"x\": \"number of time points\", \"y\": \"bbox dimensions\", \"color\": \"error rate\"}, title=f\"{dataset}: error rate by bbox size and number of time points\", ) fig.show()
    \n---------------------------------------------------------------------------\nNameError                                 Traceback (most recent call last)\nCell In[2], line 1\n----> 1 for dataset, df in ba.dfs[\"statistics\"].items():\n      2     fig = ba.plot_error_rate_heatmap(\n      3         df=df,\n      4         x=\"num_timepoints\",\n   (...)\n      8         title=f\"{dataset}: error rate by bbox size and number of time points\",\n      9     )\n     10     fig.show()\n\nNameError: name 'ba' is not defined

    In general, the size of the area you want to analyze will have minimal impact on the runtime! This is because titiler.xarray has to read the entire granule into memory before subsetting, so reducing the size of the AOI does not reduce the overall footprint of the computation.

    In\u00a0[3]: Copied!
    for dataset, df in ba.dfs[\"statistics\"].items():\n    ba.plot_line_with_error_bars(\n        df=df.sort_values([\"bbox_size\", \"num_timepoints\"]),\n        color=\"bbox_dims\",\n        title=f\"{dataset}: statistics runtime\",\n    ).show()\n
    for dataset, df in ba.dfs[\"statistics\"].items(): ba.plot_line_with_error_bars( df=df.sort_values([\"bbox_size\", \"num_timepoints\"]), color=\"bbox_dims\", title=f\"{dataset}: statistics runtime\", ).show()
    \n---------------------------------------------------------------------------\nNameError                                 Traceback (most recent call last)\nCell In[3], line 1\n----> 1 for dataset, df in ba.dfs[\"statistics\"].items():\n      2     ba.plot_line_with_error_bars(\n      3         df=df.sort_values([\"bbox_size\", \"num_timepoints\"]),\n      4         color=\"bbox_dims\",\n      5         title=f\"{dataset}: statistics runtime\",\n      6     ).show()\n\nNameError: name 'ba' is not defined
    In\u00a0[4]: Copied!
    for dataset, df in ba.dfs[\"bbox\"].items():\n    for img_size in sorted(df[\"img_size\"].unique()):\n        img_size_df = df[df[\"img_size\"] == img_size]\n        img_dims = img_size_df[\"img_dims\"].unique()[0]\n        ba.plot_error_rate_heatmap(\n            df=img_size_df,\n            x=\"num_timepoints\",\n            y=\"bbox_dims\",\n            z=\"error_rate\",\n            labels={\"x\": \"number of time points\", \"y\": \"bbox dimensions\", \"color\": \"error rate\"},\n            title=f\"{dataset}: image size {img_dims}\",\n        ).show()\n
    for dataset, df in ba.dfs[\"bbox\"].items(): for img_size in sorted(df[\"img_size\"].unique()): img_size_df = df[df[\"img_size\"] == img_size] img_dims = img_size_df[\"img_dims\"].unique()[0] ba.plot_error_rate_heatmap( df=img_size_df, x=\"num_timepoints\", y=\"bbox_dims\", z=\"error_rate\", labels={\"x\": \"number of time points\", \"y\": \"bbox dimensions\", \"color\": \"error rate\"}, title=f\"{dataset}: image size {img_dims}\", ).show()
    \n---------------------------------------------------------------------------\nNameError                                 Traceback (most recent call last)\nCell In[4], line 1\n----> 1 for dataset, df in ba.dfs[\"bbox\"].items():\n      2     for img_size in sorted(df[\"img_size\"].unique()):\n      3         img_size_df = df[df[\"img_size\"] == img_size]\n\nNameError: name 'ba' is not defined

    The size of the area of interest increases the response time, especially for requests for higher resolution images.

    In\u00a0[5]: Copied!
    for dataset, df in ba.dfs[\"bbox\"].items():\n    ba.plot_line_with_error_bars(\n        df=df.sort_values([\"bbox_size\", \"num_timepoints\"]),\n        color=\"bbox_dims\",\n        facet_row=\"img_dims\",\n        title=f\"{dataset}: runtime by bbox size and image dimensions\"\n    ).show()\n
    for dataset, df in ba.dfs[\"bbox\"].items(): ba.plot_line_with_error_bars( df=df.sort_values([\"bbox_size\", \"num_timepoints\"]), color=\"bbox_dims\", facet_row=\"img_dims\", title=f\"{dataset}: runtime by bbox size and image dimensions\" ).show()
    \n---------------------------------------------------------------------------\nNameError                                 Traceback (most recent call last)\nCell In[5], line 1\n----> 1 for dataset, df in ba.dfs[\"bbox\"].items():\n      2     ba.plot_line_with_error_bars(\n      3         df=df.sort_values([\"bbox_size\", \"num_timepoints\"]),\n      4         color=\"bbox_dims\",\n      5         facet_row=\"img_dims\",\n      6         title=f\"{dataset}: runtime by bbox size and image dimensions\"\n      7     ).show()\n\nNameError: name 'ba' is not defined
    "},{"location":"time_series_performance_benchmarks/#time-series-performance-benchmarks","title":"Time series performance benchmarks\u00b6","text":"

    The titiler-cmr API is deployed as a Lambda function in the SMCE VEDA AWS account. For small time series requests (<500 time points) you can expect a response from any of the endpoints within ~20 seconds. For larger time series requests, you run the risk of bumping into Lambda concurrency or timeout limits. This report shows some results from the test_timeseries_benchmarks.py script that sends many requests with varying time series lengths as well as several other parameters that affect runtime.

    "},{"location":"time_series_performance_benchmarks/#xarray-backend","title":"xarray backend\u00b6","text":"

    The following tests use the following datasets to evaluate the limits of the /timeseries endpoints for the xarray backend

    • GAMSSA 28km SST: a daily 0.25 degree (~28 km) resolution dataset with sea surface temperature and sea ice fraction variables
    • MUR SST: a daily 0.01 degree (~1km) resolution dataset with sea surface temperature variables
    "},{"location":"time_series_performance_benchmarks/#statistics","title":"statistics\u00b6","text":"

    Under the current deployment configuration statistics endpoint can process time series requests with up to ~1000 points. Requests that involve more than 1000 points are likely to fail.

    "},{"location":"time_series_performance_benchmarks/#bbox-animations","title":"bbox (animations)\u00b6","text":"

    Under the current deployment configuration the bbox endpoint can reliably process time series requests with up to ~500 points. Requests that involve more than 500 points may fail if the area of interest is very large.

    "},{"location":"examples/rasterio_backend_example/","title":"rasterio backend example: HLS","text":"In\u00a0[1]: Copied!
    import earthaccess\nimport geojson_pydantic\nimport httpx\nimport json\n\n\nfrom folium import GeoJson, Map, TileLayer\n
    import earthaccess import geojson_pydantic import httpx import json from folium import GeoJson, Map, TileLayer In\u00a0[2]: Copied!
    # titiler_endpoint = \"http://localhost:8081\"  # docker network endpoint\ntitiler_endpoint = \"https://dev-titiler-cmr.delta-backend.com\"  # deployed endpoint\n
    # titiler_endpoint = \"http://localhost:8081\" # docker network endpoint titiler_endpoint = \"https://dev-titiler-cmr.delta-backend.com\" # deployed endpoint In\u00a0[3]: Copied!
    datasets = earthaccess.search_datasets(doi=\"10.5067/HLS/HLSL30.002\")\nds = datasets[0]\n\nconcept_id = ds[\"meta\"][\"concept-id\"]\nprint(\"Concept-Id: \", concept_id)\nprint(\"Abstract: \", ds[\"umm\"][\"Abstract\"])\n
    datasets = earthaccess.search_datasets(doi=\"10.5067/HLS/HLSL30.002\") ds = datasets[0] concept_id = ds[\"meta\"][\"concept-id\"] print(\"Concept-Id: \", concept_id) print(\"Abstract: \", ds[\"umm\"][\"Abstract\"])
    Concept-Id:  C2021957657-LPCLOUD\nAbstract:  The Harmonized Landsat Sentinel-2 (HLS) project provides consistent surface reflectance (SR) and top of atmosphere (TOA) brightness data from a virtual constellation of satellite sensors. The Operational Land Imager (OLI) is housed aboard the joint NASA/USGS Landsat 8 and Landsat 9 satellites, while the Multi-Spectral Instrument (MSI) is mounted aboard Europe\u2019s Copernicus Sentinel-2A and Sentinel-2B satellites. The combined measurement enables global observations of the land every 2\u20133 days at 30-meter (m) spatial resolution. The HLS project uses a set of algorithms to obtain seamless products from OLI and MSI that include atmospheric correction, cloud and cloud-shadow masking, spatial co-registration and common gridding, illumination and view angle normalization, and spectral bandpass adjustment.\r\n\r\nThe HLSL30 product provides 30-m Nadir Bidirectional Reflectance Distribution Function (BRDF)-Adjusted Reflectance (NBAR) and is derived from Landsat 8/9 OLI data products. The HLSS30 and HLSL30 products are gridded to the same resolution and Military Grid Reference System (MGRS)(https://hls.gsfc.nasa.gov/products-description/tiling-system/) tiling system, and thus are \u201cstackable\u201d for time series analysis.\r\n\r\nThe HLSL30 product is provided in Cloud Optimized GeoTIFF (COG) format, and each band is distributed as a separate file. There are 11 bands included in the HLSL30 product along with one quality assessment (QA) band and four angle bands. See the User Guide for a more detailed description of the individual bands provided in the HLSL30 product.\n
    In\u00a0[4]: Copied!
    import earthaccess\nimport morecantile\n\ntms = morecantile.tms.get(\"WebMercatorQuad\")\n\nbounds = tms.bounds(62, 44, 7)\nxmin, ymin, xmax, ymax = (round(n, 8) for n in bounds)\n\nresults = earthaccess.search_data(\n    bounding_box=(xmin, ymin, xmax, ymax),\n    count=1,\n    concept_id=concept_id,\n    temporal=(\"2024-02-11\", \"2024-02-13\"),\n)\nprint(\"Granules:\")\nprint(results)\nprint()\nprint(\"Example of COGs URL: \")\nfor link in results[0].data_links(access=\"direct\"):\n    print(link)\n
    import earthaccess import morecantile tms = morecantile.tms.get(\"WebMercatorQuad\") bounds = tms.bounds(62, 44, 7) xmin, ymin, xmax, ymax = (round(n, 8) for n in bounds) results = earthaccess.search_data( bounding_box=(xmin, ymin, xmax, ymax), count=1, concept_id=concept_id, temporal=(\"2024-02-11\", \"2024-02-13\"), ) print(\"Granules:\") print(results) print() print(\"Example of COGs URL: \") for link in results[0].data_links(access=\"direct\"): print(link)
    Granules:\n[Collection: {'EntryTitle': 'HLS Landsat Operational Land Imager Surface Reflectance and TOA Brightness Daily Global 30m v2.0'}\nSpatial coverage: {'HorizontalSpatialDomain': {'Geometry': {'GPolygons': [{'Boundary': {'Points': [{'Longitude': -2.64743819, 'Latitude': 48.6644919}, {'Longitude': -2.21521695, 'Latitude': 49.65006328}, {'Longitude': -3.00027708, 'Latitude': 49.65272281}, {'Longitude': -3.00027162, 'Latitude': 48.66503141}, {'Longitude': -2.64743819, 'Latitude': 48.6644919}]}}]}}}\nTemporal coverage: {'RangeDateTime': {'BeginningDateTime': '2024-02-12T11:05:26.302Z', 'EndingDateTime': '2024-02-12T11:05:50.181Z'}}\nSize(MB): 56.62721920013428\nData: ['https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B02.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B06.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B01.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.SAA.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B07.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.SZA.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B03.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.Fmask.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B04.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B05.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.VAA.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.VZA.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B11.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B10.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B09.tif']]\n\nExample of COGs URL: \ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B02.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B06.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B01.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.SAA.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B07.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.SZA.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B03.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.Fmask.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B04.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B05.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.VAA.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.VZA.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B11.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B10.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B09.tif\n
    In\u00a0[5]: Copied!
    from titiler.cmr.backend import CMRBackend\nfrom titiler.cmr.reader import MultiFilesBandsReader\n\nwith CMRBackend(reader=MultiFilesBandsReader) as backend:\n    assets = backend.assets_for_tile(\n        x=62,\n        y=44,\n        z=7,\n        bands_regex=\"B[0-9][0-9]\",\n        concept_id=concept_id,\n        temporal=(\"2024-02-11\", \"2024-02-13\")\n    )\n\nprint(assets[0])\n
    from titiler.cmr.backend import CMRBackend from titiler.cmr.reader import MultiFilesBandsReader with CMRBackend(reader=MultiFilesBandsReader) as backend: assets = backend.assets_for_tile( x=62, y=44, z=7, bands_regex=\"B[0-9][0-9]\", concept_id=concept_id, temporal=(\"2024-02-11\", \"2024-02-13\") ) print(assets[0])
    {'url': {'B02': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B02.tif', 'B06': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B06.tif', 'B01': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B01.tif', 'B07': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B07.tif', 'B03': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B03.tif', 'B04': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B04.tif', 'B05': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B05.tif', 'B11': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B11.tif', 'B10': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B10.tif', 'B09': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B09.tif'}, 'provider': 'LPCLOUD'}\n
    In\u00a0[6]: Copied!
    from IPython.display import IFrame\nIFrame(f\"{titiler_endpoint}/api.html\", 900,500)\n
    from IPython.display import IFrame IFrame(f\"{titiler_endpoint}/api.html\", 900,500) Out[6]: In\u00a0[7]: Copied!
    r = httpx.get(\n    f\"{titiler_endpoint}/WebMercatorQuad/tilejson.json\",\n    params = (\n        (\"concept_id\", concept_id),\n        # Datetime in form of `start_date/end_date`\n        (\"datetime\", \"2024-10-01T00:00:00Z/2024-10-10T23:59:59Z\"),\n        # We know that the HLS collection dataset is stored as File per Band\n        # so we need to pass a `band_regex` option to assign `bands` to each URL\n        (\"bands_regex\", \"B[0-9][0-9]\"),\n        # titiler-cmr can work with both Zarr and COG dataset\n        # but we need to tell the endpoints in advance which backend\n        # to use\n        (\"backend\", \"rasterio\"),\n        # True Color Image B04,B03,B02\n        (\"bands\", \"B04\"),\n        (\"bands\", \"B03\"),\n        (\"bands\", \"B02\"),\n        # The data is in type of Uint16 so we need to apply some\n        # rescaling/color_formula in order to create PNGs\n        (\"color_formula\", \"Gamma RGB 3.5 Saturation 1.7 Sigmoidal RGB 15 0.35\"),\n        # We need to set min/max zoom because we don't want to use lowerzoom level (e.g 0)\n        # which will results in useless large scale query\n        (\"minzoom\", 8),\n        (\"maxzoom\", 13),\n    )\n).json()\n\nprint(r)\n
    r = httpx.get( f\"{titiler_endpoint}/WebMercatorQuad/tilejson.json\", params = ( (\"concept_id\", concept_id), # Datetime in form of `start_date/end_date` (\"datetime\", \"2024-10-01T00:00:00Z/2024-10-10T23:59:59Z\"), # We know that the HLS collection dataset is stored as File per Band # so we need to pass a `band_regex` option to assign `bands` to each URL (\"bands_regex\", \"B[0-9][0-9]\"), # titiler-cmr can work with both Zarr and COG dataset # but we need to tell the endpoints in advance which backend # to use (\"backend\", \"rasterio\"), # True Color Image B04,B03,B02 (\"bands\", \"B04\"), (\"bands\", \"B03\"), (\"bands\", \"B02\"), # The data is in type of Uint16 so we need to apply some # rescaling/color_formula in order to create PNGs (\"color_formula\", \"Gamma RGB 3.5 Saturation 1.7 Sigmoidal RGB 15 0.35\"), # We need to set min/max zoom because we don't want to use lowerzoom level (e.g 0) # which will results in useless large scale query (\"minzoom\", 8), (\"maxzoom\", 13), ) ).json() print(r)
    \n---------------------------------------------------------------------------\nReadTimeout                               Traceback (most recent call last)\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_transports/default.py:101, in map_httpcore_exceptions()\n    100 try:\n--> 101     yield\n    102 except Exception as exc:\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_transports/default.py:250, in HTTPTransport.handle_request(self, request)\n    249 with map_httpcore_exceptions():\n--> 250     resp = self._pool.handle_request(req)\n    252 assert isinstance(resp.stream, typing.Iterable)\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_sync/connection_pool.py:256, in ConnectionPool.handle_request(self, request)\n    255     self._close_connections(closing)\n--> 256     raise exc from None\n    258 # Return the response. Note that in this case we still have to manage\n    259 # the point at which the response is closed.\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_sync/connection_pool.py:236, in ConnectionPool.handle_request(self, request)\n    234 try:\n    235     # Send the request on the assigned connection.\n--> 236     response = connection.handle_request(\n    237         pool_request.request\n    238     )\n    239 except ConnectionNotAvailable:\n    240     # In some cases a connection may initially be available to\n    241     # handle a request, but then become unavailable.\n    242     #\n    243     # In this case we clear the connection and try again.\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_sync/connection.py:103, in HTTPConnection.handle_request(self, request)\n    101     raise exc\n--> 103 return self._connection.handle_request(request)\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_sync/http11.py:136, in HTTP11Connection.handle_request(self, request)\n    135         self._response_closed()\n--> 136 raise exc\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_sync/http11.py:106, in HTTP11Connection.handle_request(self, request)\n     97 with Trace(\n     98     \"receive_response_headers\", logger, request, kwargs\n     99 ) as trace:\n    100     (\n    101         http_version,\n    102         status,\n    103         reason_phrase,\n    104         headers,\n    105         trailing_data,\n--> 106     ) = self._receive_response_headers(**kwargs)\n    107     trace.return_value = (\n    108         http_version,\n    109         status,\n    110         reason_phrase,\n    111         headers,\n    112     )\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_sync/http11.py:177, in HTTP11Connection._receive_response_headers(self, request)\n    176 while True:\n--> 177     event = self._receive_event(timeout=timeout)\n    178     if isinstance(event, h11.Response):\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_sync/http11.py:217, in HTTP11Connection._receive_event(self, timeout)\n    216 if event is h11.NEED_DATA:\n--> 217     data = self._network_stream.read(\n    218         self.READ_NUM_BYTES, timeout=timeout\n    219     )\n    221     # If we feed this case through h11 we'll raise an exception like:\n    222     #\n    223     #     httpcore.RemoteProtocolError: can't handle event type\n   (...)\n    227     # perspective. Instead we handle this case distinctly and treat\n    228     # it as a ConnectError.\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_backends/sync.py:126, in SyncStream.read(self, max_bytes, timeout)\n    125 exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError}\n--> 126 with map_exceptions(exc_map):\n    127     self._sock.settimeout(timeout)\n\nFile /usr/lib/python3.10/contextlib.py:153, in _GeneratorContextManager.__exit__(self, typ, value, traceback)\n    152 try:\n--> 153     self.gen.throw(typ, value, traceback)\n    154 except StopIteration as exc:\n    155     # Suppress StopIteration *unless* it's the same exception that\n    156     # was passed to throw().  This prevents a StopIteration\n    157     # raised inside the \"with\" statement from being suppressed.\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_exceptions.py:14, in map_exceptions(map)\n     13     if isinstance(exc, from_exc):\n---> 14         raise to_exc(exc) from exc\n     15 raise\n\nReadTimeout: The read operation timed out\n\nThe above exception was the direct cause of the following exception:\n\nReadTimeout                               Traceback (most recent call last)\nCell In[7], line 1\n----> 1 r = httpx.get(\n      2     f\"{titiler_endpoint}/WebMercatorQuad/tilejson.json\",\n      3     params = (\n      4         (\"concept_id\", concept_id),\n      5         # Datetime in form of `start_date/end_date`\n      6         (\"datetime\", \"2024-10-01T00:00:00Z/2024-10-10T23:59:59Z\"),\n      7         # We know that the HLS collection dataset is stored as File per Band\n      8         # so we need to pass a `band_regex` option to assign `bands` to each URL\n      9         (\"bands_regex\", \"B[0-9][0-9]\"),\n     10         # titiler-cmr can work with both Zarr and COG dataset\n     11         # but we need to tell the endpoints in advance which backend\n     12         # to use\n     13         (\"backend\", \"rasterio\"),\n     14         # True Color Image B04,B03,B02\n     15         (\"bands\", \"B04\"),\n     16         (\"bands\", \"B03\"),\n     17         (\"bands\", \"B02\"),\n     18         # The data is in type of Uint16 so we need to apply some\n     19         # rescaling/color_formula in order to create PNGs\n     20         (\"color_formula\", \"Gamma RGB 3.5 Saturation 1.7 Sigmoidal RGB 15 0.35\"),\n     21         # We need to set min/max zoom because we don't want to use lowerzoom level (e.g 0)\n     22         # which will results in useless large scale query\n     23         (\"minzoom\", 8),\n     24         (\"maxzoom\", 13),\n     25     )\n     26 ).json()\n     28 print(r)\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_api.py:195, in get(url, params, headers, cookies, auth, proxy, follow_redirects, verify, timeout, trust_env)\n    174 def get(\n    175     url: URL | str,\n    176     *,\n   (...)\n    185     trust_env: bool = True,\n    186 ) -> Response:\n    187     \"\"\"\n    188     Sends a `GET` request.\n    189 \n   (...)\n    193     on this function, as `GET` requests should not include a request body.\n    194     \"\"\"\n--> 195     return request(\n    196         \"GET\",\n    197         url,\n    198         params=params,\n    199         headers=headers,\n    200         cookies=cookies,\n    201         auth=auth,\n    202         proxy=proxy,\n    203         follow_redirects=follow_redirects,\n    204         verify=verify,\n    205         timeout=timeout,\n    206         trust_env=trust_env,\n    207     )\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_api.py:109, in request(method, url, params, content, data, files, json, headers, cookies, auth, proxy, timeout, follow_redirects, verify, trust_env)\n     57 \"\"\"\n     58 Sends an HTTP request.\n     59 \n   (...)\n    100 ```\n    101 \"\"\"\n    102 with Client(\n    103     cookies=cookies,\n    104     proxy=proxy,\n   (...)\n    107     trust_env=trust_env,\n    108 ) as client:\n--> 109     return client.request(\n    110         method=method,\n    111         url=url,\n    112         content=content,\n    113         data=data,\n    114         files=files,\n    115         json=json,\n    116         params=params,\n    117         headers=headers,\n    118         auth=auth,\n    119         follow_redirects=follow_redirects,\n    120     )\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_client.py:827, in Client.request(self, method, url, content, data, files, json, params, headers, cookies, auth, follow_redirects, timeout, extensions)\n    812     warnings.warn(message, DeprecationWarning, stacklevel=2)\n    814 request = self.build_request(\n    815     method=method,\n    816     url=url,\n   (...)\n    825     extensions=extensions,\n    826 )\n--> 827 return self.send(request, auth=auth, follow_redirects=follow_redirects)\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_client.py:916, in Client.send(self, request, stream, auth, follow_redirects)\n    912 self._set_timeout(request)\n    914 auth = self._build_request_auth(request, auth)\n--> 916 response = self._send_handling_auth(\n    917     request,\n    918     auth=auth,\n    919     follow_redirects=follow_redirects,\n    920     history=[],\n    921 )\n    922 try:\n    923     if not stream:\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_client.py:944, in Client._send_handling_auth(self, request, auth, follow_redirects, history)\n    941 request = next(auth_flow)\n    943 while True:\n--> 944     response = self._send_handling_redirects(\n    945         request,\n    946         follow_redirects=follow_redirects,\n    947         history=history,\n    948     )\n    949     try:\n    950         try:\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_client.py:981, in Client._send_handling_redirects(self, request, follow_redirects, history)\n    978 for hook in self._event_hooks[\"request\"]:\n    979     hook(request)\n--> 981 response = self._send_single_request(request)\n    982 try:\n    983     for hook in self._event_hooks[\"response\"]:\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_client.py:1016, in Client._send_single_request(self, request)\n   1011     raise RuntimeError(\n   1012         \"Attempted to send an async request with a sync Client instance.\"\n   1013     )\n   1015 with request_context(request=request):\n-> 1016     response = transport.handle_request(request)\n   1018 assert isinstance(response.stream, SyncByteStream)\n   1020 response.request = request\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_transports/default.py:249, in HTTPTransport.handle_request(self, request)\n    235 import httpcore\n    237 req = httpcore.Request(\n    238     method=request.method,\n    239     url=httpcore.URL(\n   (...)\n    247     extensions=request.extensions,\n    248 )\n--> 249 with map_httpcore_exceptions():\n    250     resp = self._pool.handle_request(req)\n    252 assert isinstance(resp.stream, typing.Iterable)\n\nFile /usr/lib/python3.10/contextlib.py:153, in _GeneratorContextManager.__exit__(self, typ, value, traceback)\n    151     value = typ()\n    152 try:\n--> 153     self.gen.throw(typ, value, traceback)\n    154 except StopIteration as exc:\n    155     # Suppress StopIteration *unless* it's the same exception that\n    156     # was passed to throw().  This prevents a StopIteration\n    157     # raised inside the \"with\" statement from being suppressed.\n    158     return exc is not value\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_transports/default.py:118, in map_httpcore_exceptions()\n    115     raise\n    117 message = str(exc)\n--> 118 raise mapped_exc(message) from exc\n\nReadTimeout: The read operation timed out
    In\u00a0[8]: Copied!
    bounds = r[\"bounds\"]\nm = Map(\n    location=(47.590266824611675, -91.03729840730689),\n    zoom_start=r[\"maxzoom\"] - 2\n)\n\nTileLayer(\n    tiles=r[\"tiles\"][0],\n    opacity=1,\n    attr=\"NASA\",\n).add_to(m)\nm\n
    bounds = r[\"bounds\"] m = Map( location=(47.590266824611675, -91.03729840730689), zoom_start=r[\"maxzoom\"] - 2 ) TileLayer( tiles=r[\"tiles\"][0], opacity=1, attr=\"NASA\", ).add_to(m) m
    \n---------------------------------------------------------------------------\nNameError                                 Traceback (most recent call last)\nCell In[8], line 1\n----> 1 bounds = r[\"bounds\"]\n      2 m = Map(\n      3     location=(47.590266824611675, -91.03729840730689),\n      4     zoom_start=r[\"maxzoom\"] - 2\n      5 )\n      7 TileLayer(\n      8     tiles=r[\"tiles\"][0],\n      9     opacity=1,\n     10     attr=\"NASA\",\n     11 ).add_to(m)\n\nNameError: name 'r' is not defined
    In\u00a0[9]: Copied!
    r = httpx.get(\n    f\"{titiler_endpoint}/WebMercatorQuad/tilejson.json\",\n    params = (\n        (\"concept_id\", concept_id),\n        # Datetime in form of `start_date/end_date`\n        (\"datetime\", \"2024-06-20T00:00:00Z/2024-06-27T23:59:59Z\"),\n        # We know that the HLS collection dataset is stored as File per Band\n        # so we need to pass a `band_regex` option to assign `bands` to each URL\n        (\"bands_regex\", \"B[0-9][0-9]\"),\n        # titiler-cmr can work with both Zarr and COG dataset\n        # but we need to tell the endpoints in advance which backend\n        # to use\n        (\"backend\", \"rasterio\"),\n        # NDVI\n        (\"expression\", \"(B05-B04)/(B05+B04)\"),\n        # Need red (B04) and nir (B05) for NDVI\n        (\"bands\", \"B05\"),\n        (\"bands\", \"B04\"),\n        # The data is in type of Uint16 so we need to apply some\n        # rescaling/color_formula in order to create PNGs\n        (\"colormap_name\", \"viridis\"),\n        (\"rescale\", \"-1,1\"),\n        # We need to set min/max zoom because we don't want to use lowerzoom level (e.g 0)\n        # which will results in useless large scale query\n        (\"minzoom\", 8),\n        (\"maxzoom\", 13),\n    )\n).json()\n\nm = Map(\n    location=(47.9221313337365, -91.65432884883238),\n    zoom_start=r[\"maxzoom\"] - 1\n)\n\n\nTileLayer(\n    tiles=r[\"tiles\"][0],\n    opacity=1,\n    attr=\"NASA\",\n).add_to(m)\n\nm\n
    r = httpx.get( f\"{titiler_endpoint}/WebMercatorQuad/tilejson.json\", params = ( (\"concept_id\", concept_id), # Datetime in form of `start_date/end_date` (\"datetime\", \"2024-06-20T00:00:00Z/2024-06-27T23:59:59Z\"), # We know that the HLS collection dataset is stored as File per Band # so we need to pass a `band_regex` option to assign `bands` to each URL (\"bands_regex\", \"B[0-9][0-9]\"), # titiler-cmr can work with both Zarr and COG dataset # but we need to tell the endpoints in advance which backend # to use (\"backend\", \"rasterio\"), # NDVI (\"expression\", \"(B05-B04)/(B05+B04)\"), # Need red (B04) and nir (B05) for NDVI (\"bands\", \"B05\"), (\"bands\", \"B04\"), # The data is in type of Uint16 so we need to apply some # rescaling/color_formula in order to create PNGs (\"colormap_name\", \"viridis\"), (\"rescale\", \"-1,1\"), # We need to set min/max zoom because we don't want to use lowerzoom level (e.g 0) # which will results in useless large scale query (\"minzoom\", 8), (\"maxzoom\", 13), ) ).json() m = Map( location=(47.9221313337365, -91.65432884883238), zoom_start=r[\"maxzoom\"] - 1 ) TileLayer( tiles=r[\"tiles\"][0], opacity=1, attr=\"NASA\", ).add_to(m) m
    \n---------------------------------------------------------------------------\nReadTimeout                               Traceback (most recent call last)\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_transports/default.py:101, in map_httpcore_exceptions()\n    100 try:\n--> 101     yield\n    102 except Exception as exc:\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_transports/default.py:250, in HTTPTransport.handle_request(self, request)\n    249 with map_httpcore_exceptions():\n--> 250     resp = self._pool.handle_request(req)\n    252 assert isinstance(resp.stream, typing.Iterable)\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_sync/connection_pool.py:256, in ConnectionPool.handle_request(self, request)\n    255     self._close_connections(closing)\n--> 256     raise exc from None\n    258 # Return the response. Note that in this case we still have to manage\n    259 # the point at which the response is closed.\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_sync/connection_pool.py:236, in ConnectionPool.handle_request(self, request)\n    234 try:\n    235     # Send the request on the assigned connection.\n--> 236     response = connection.handle_request(\n    237         pool_request.request\n    238     )\n    239 except ConnectionNotAvailable:\n    240     # In some cases a connection may initially be available to\n    241     # handle a request, but then become unavailable.\n    242     #\n    243     # In this case we clear the connection and try again.\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_sync/connection.py:103, in HTTPConnection.handle_request(self, request)\n    101     raise exc\n--> 103 return self._connection.handle_request(request)\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_sync/http11.py:136, in HTTP11Connection.handle_request(self, request)\n    135         self._response_closed()\n--> 136 raise exc\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_sync/http11.py:106, in HTTP11Connection.handle_request(self, request)\n     97 with Trace(\n     98     \"receive_response_headers\", logger, request, kwargs\n     99 ) as trace:\n    100     (\n    101         http_version,\n    102         status,\n    103         reason_phrase,\n    104         headers,\n    105         trailing_data,\n--> 106     ) = self._receive_response_headers(**kwargs)\n    107     trace.return_value = (\n    108         http_version,\n    109         status,\n    110         reason_phrase,\n    111         headers,\n    112     )\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_sync/http11.py:177, in HTTP11Connection._receive_response_headers(self, request)\n    176 while True:\n--> 177     event = self._receive_event(timeout=timeout)\n    178     if isinstance(event, h11.Response):\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_sync/http11.py:217, in HTTP11Connection._receive_event(self, timeout)\n    216 if event is h11.NEED_DATA:\n--> 217     data = self._network_stream.read(\n    218         self.READ_NUM_BYTES, timeout=timeout\n    219     )\n    221     # If we feed this case through h11 we'll raise an exception like:\n    222     #\n    223     #     httpcore.RemoteProtocolError: can't handle event type\n   (...)\n    227     # perspective. Instead we handle this case distinctly and treat\n    228     # it as a ConnectError.\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_backends/sync.py:126, in SyncStream.read(self, max_bytes, timeout)\n    125 exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError}\n--> 126 with map_exceptions(exc_map):\n    127     self._sock.settimeout(timeout)\n\nFile /usr/lib/python3.10/contextlib.py:153, in _GeneratorContextManager.__exit__(self, typ, value, traceback)\n    152 try:\n--> 153     self.gen.throw(typ, value, traceback)\n    154 except StopIteration as exc:\n    155     # Suppress StopIteration *unless* it's the same exception that\n    156     # was passed to throw().  This prevents a StopIteration\n    157     # raised inside the \"with\" statement from being suppressed.\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpcore/_exceptions.py:14, in map_exceptions(map)\n     13     if isinstance(exc, from_exc):\n---> 14         raise to_exc(exc) from exc\n     15 raise\n\nReadTimeout: The read operation timed out\n\nThe above exception was the direct cause of the following exception:\n\nReadTimeout                               Traceback (most recent call last)\nCell In[9], line 1\n----> 1 r = httpx.get(\n      2     f\"{titiler_endpoint}/WebMercatorQuad/tilejson.json\",\n      3     params = (\n      4         (\"concept_id\", concept_id),\n      5         # Datetime in form of `start_date/end_date`\n      6         (\"datetime\", \"2024-06-20T00:00:00Z/2024-06-27T23:59:59Z\"),\n      7         # We know that the HLS collection dataset is stored as File per Band\n      8         # so we need to pass a `band_regex` option to assign `bands` to each URL\n      9         (\"bands_regex\", \"B[0-9][0-9]\"),\n     10         # titiler-cmr can work with both Zarr and COG dataset\n     11         # but we need to tell the endpoints in advance which backend\n     12         # to use\n     13         (\"backend\", \"rasterio\"),\n     14         # NDVI\n     15         (\"expression\", \"(B05-B04)/(B05+B04)\"),\n     16         # Need red (B04) and nir (B05) for NDVI\n     17         (\"bands\", \"B05\"),\n     18         (\"bands\", \"B04\"),\n     19         # The data is in type of Uint16 so we need to apply some\n     20         # rescaling/color_formula in order to create PNGs\n     21         (\"colormap_name\", \"viridis\"),\n     22         (\"rescale\", \"-1,1\"),\n     23         # We need to set min/max zoom because we don't want to use lowerzoom level (e.g 0)\n     24         # which will results in useless large scale query\n     25         (\"minzoom\", 8),\n     26         (\"maxzoom\", 13),\n     27     )\n     28 ).json()\n     30 m = Map(\n     31     location=(47.9221313337365, -91.65432884883238),\n     32     zoom_start=r[\"maxzoom\"] - 1\n     33 )\n     36 TileLayer(\n     37     tiles=r[\"tiles\"][0],\n     38     opacity=1,\n     39     attr=\"NASA\",\n     40 ).add_to(m)\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_api.py:195, in get(url, params, headers, cookies, auth, proxy, follow_redirects, verify, timeout, trust_env)\n    174 def get(\n    175     url: URL | str,\n    176     *,\n   (...)\n    185     trust_env: bool = True,\n    186 ) -> Response:\n    187     \"\"\"\n    188     Sends a `GET` request.\n    189 \n   (...)\n    193     on this function, as `GET` requests should not include a request body.\n    194     \"\"\"\n--> 195     return request(\n    196         \"GET\",\n    197         url,\n    198         params=params,\n    199         headers=headers,\n    200         cookies=cookies,\n    201         auth=auth,\n    202         proxy=proxy,\n    203         follow_redirects=follow_redirects,\n    204         verify=verify,\n    205         timeout=timeout,\n    206         trust_env=trust_env,\n    207     )\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_api.py:109, in request(method, url, params, content, data, files, json, headers, cookies, auth, proxy, timeout, follow_redirects, verify, trust_env)\n     57 \"\"\"\n     58 Sends an HTTP request.\n     59 \n   (...)\n    100 ```\n    101 \"\"\"\n    102 with Client(\n    103     cookies=cookies,\n    104     proxy=proxy,\n   (...)\n    107     trust_env=trust_env,\n    108 ) as client:\n--> 109     return client.request(\n    110         method=method,\n    111         url=url,\n    112         content=content,\n    113         data=data,\n    114         files=files,\n    115         json=json,\n    116         params=params,\n    117         headers=headers,\n    118         auth=auth,\n    119         follow_redirects=follow_redirects,\n    120     )\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_client.py:827, in Client.request(self, method, url, content, data, files, json, params, headers, cookies, auth, follow_redirects, timeout, extensions)\n    812     warnings.warn(message, DeprecationWarning, stacklevel=2)\n    814 request = self.build_request(\n    815     method=method,\n    816     url=url,\n   (...)\n    825     extensions=extensions,\n    826 )\n--> 827 return self.send(request, auth=auth, follow_redirects=follow_redirects)\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_client.py:916, in Client.send(self, request, stream, auth, follow_redirects)\n    912 self._set_timeout(request)\n    914 auth = self._build_request_auth(request, auth)\n--> 916 response = self._send_handling_auth(\n    917     request,\n    918     auth=auth,\n    919     follow_redirects=follow_redirects,\n    920     history=[],\n    921 )\n    922 try:\n    923     if not stream:\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_client.py:944, in Client._send_handling_auth(self, request, auth, follow_redirects, history)\n    941 request = next(auth_flow)\n    943 while True:\n--> 944     response = self._send_handling_redirects(\n    945         request,\n    946         follow_redirects=follow_redirects,\n    947         history=history,\n    948     )\n    949     try:\n    950         try:\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_client.py:981, in Client._send_handling_redirects(self, request, follow_redirects, history)\n    978 for hook in self._event_hooks[\"request\"]:\n    979     hook(request)\n--> 981 response = self._send_single_request(request)\n    982 try:\n    983     for hook in self._event_hooks[\"response\"]:\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_client.py:1016, in Client._send_single_request(self, request)\n   1011     raise RuntimeError(\n   1012         \"Attempted to send an async request with a sync Client instance.\"\n   1013     )\n   1015 with request_context(request=request):\n-> 1016     response = transport.handle_request(request)\n   1018 assert isinstance(response.stream, SyncByteStream)\n   1020 response.request = request\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_transports/default.py:249, in HTTPTransport.handle_request(self, request)\n    235 import httpcore\n    237 req = httpcore.Request(\n    238     method=request.method,\n    239     url=httpcore.URL(\n   (...)\n    247     extensions=request.extensions,\n    248 )\n--> 249 with map_httpcore_exceptions():\n    250     resp = self._pool.handle_request(req)\n    252 assert isinstance(resp.stream, typing.Iterable)\n\nFile /usr/lib/python3.10/contextlib.py:153, in _GeneratorContextManager.__exit__(self, typ, value, traceback)\n    151     value = typ()\n    152 try:\n--> 153     self.gen.throw(typ, value, traceback)\n    154 except StopIteration as exc:\n    155     # Suppress StopIteration *unless* it's the same exception that\n    156     # was passed to throw().  This prevents a StopIteration\n    157     # raised inside the \"with\" statement from being suppressed.\n    158     return exc is not value\n\nFile ~/work/titiler-cmr/titiler-cmr/.venv/lib/python3.10/site-packages/httpx/_transports/default.py:118, in map_httpcore_exceptions()\n    115     raise\n    117 message = str(exc)\n--> 118 raise mapped_exc(message) from exc\n\nReadTimeout: The read operation timed out
    In\u00a0[10]: Copied!
    geojson = {\n  \"type\": \"FeatureCollection\",\n  \"features\": [\n    {\n      \"type\": \"Feature\",\n      \"properties\": {},\n      \"geometry\": {\n        \"coordinates\": [\n          [\n            [\n              -91.65432884883238,\n              47.9221313337365\n            ],\n            [\n              -91.65432884883238,\n              47.86503396133904\n            ],\n            [\n              -91.53842043960762,\n              47.86503396133904\n            ],\n            [\n              -91.53842043960762,\n              47.9221313337365\n            ],\n            [\n              -91.65432884883238,\n              47.9221313337365\n            ]\n          ]\n        ],\n        \"type\": \"Polygon\"\n      }\n    }\n  ]\n}\n
    geojson = { \"type\": \"FeatureCollection\", \"features\": [ { \"type\": \"Feature\", \"properties\": {}, \"geometry\": { \"coordinates\": [ [ [ -91.65432884883238, 47.9221313337365 ], [ -91.65432884883238, 47.86503396133904 ], [ -91.53842043960762, 47.86503396133904 ], [ -91.53842043960762, 47.9221313337365 ], [ -91.65432884883238, 47.9221313337365 ] ] ], \"type\": \"Polygon\" } } ] } In\u00a0[11]: Copied!
    import json\n\nr = httpx.post(\n    f\"{titiler_endpoint}/statistics\",\n    params=(\n        (\"concept_id\", concept_id),\n        # Datetime in form of `start_date/end_date`\n        (\"datetime\", \"2024-07-01T00:00:00Z/2024-07-10T23:59:59Z\"),\n        # We know that the HLS collection dataset is stored as File per Band\n        # so we need to pass a `band_regex` option to assign `bands` to each URL\n        (\"bands_regex\", \"B[0-9][0-9]\"),\n        # titiler-cmr can work with both Zarr and COG dataset\n        # but we need to tell the endpoints in advance which backend\n        # to use\n        (\"backend\", \"rasterio\"),\n        # NDVI\n        (\"expression\", \"(B05-B04)/(B05+B04)\"),\n        # Need red (B04) and nir (B05) for NDVI\n        (\"bands\", \"B05\"),\n        (\"bands\", \"B04\"),\n    ),\n    json=geojson,\n    timeout=30,\n).json()\n\nprint(json.dumps(r, indent=2))\n
    import json r = httpx.post( f\"{titiler_endpoint}/statistics\", params=( (\"concept_id\", concept_id), # Datetime in form of `start_date/end_date` (\"datetime\", \"2024-07-01T00:00:00Z/2024-07-10T23:59:59Z\"), # We know that the HLS collection dataset is stored as File per Band # so we need to pass a `band_regex` option to assign `bands` to each URL (\"bands_regex\", \"B[0-9][0-9]\"), # titiler-cmr can work with both Zarr and COG dataset # but we need to tell the endpoints in advance which backend # to use (\"backend\", \"rasterio\"), # NDVI (\"expression\", \"(B05-B04)/(B05+B04)\"), # Need red (B04) and nir (B05) for NDVI (\"bands\", \"B05\"), (\"bands\", \"B04\"), ), json=geojson, timeout=30, ).json() print(json.dumps(r, indent=2))
    {\n  \"type\": \"FeatureCollection\",\n  \"features\": [\n    {\n      \"type\": \"Feature\",\n      \"geometry\": {\n        \"type\": \"Polygon\",\n        \"coordinates\": [\n          [\n            [\n              -91.65432884883238,\n              47.9221313337365\n            ],\n            [\n              -91.65432884883238,\n              47.86503396133904\n            ],\n            [\n              -91.53842043960762,\n              47.86503396133904\n            ],\n            [\n              -91.53842043960762,\n              47.9221313337365\n            ],\n            [\n              -91.65432884883238,\n              47.9221313337365\n            ]\n          ]\n        ]\n      },\n      \"properties\": {\n        \"statistics\": {\n          \"(B05-B04)/(B05+B04)\": {\n            \"min\": -75.4,\n            \"max\": 26.6,\n            \"mean\": 0.5238783261952482,\n            \"count\": 57304.8046875,\n            \"sum\": 30020.745162633113,\n            \"std\": 0.6052277569586431,\n            \"median\": 0.6041512231282431,\n            \"majority\": 0.75,\n            \"minority\": -75.4,\n            \"unique\": 47613.0,\n            \"histogram\": [\n              [\n                1,\n                0,\n                2,\n                1,\n                0,\n                0,\n                16,\n                57764,\n                12,\n                2\n              ],\n              [\n                -75.4,\n                -65.2,\n                -55.00000000000001,\n                -44.80000000000001,\n                -34.60000000000001,\n                -24.400000000000006,\n                -14.20000000000001,\n                -4.000000000000014,\n                6.199999999999989,\n                16.39999999999999,\n                26.6\n              ]\n            ],\n            \"valid_percent\": 100.0,\n            \"masked_pixels\": 0.0,\n            \"valid_pixels\": 57798.0,\n            \"percentile_2\": 0.04382638010956595,\n            \"percentile_98\": 0.8685282140779523\n          }\n        }\n      }\n    }\n  ]\n}\n
    In\u00a0[\u00a0]: Copied!
    \n
    "},{"location":"examples/rasterio_backend_example/#rasterio-backend-example-hls","title":"rasterio backend example: HLS\u00b6","text":"

    The Harmonized Landsat Sentinel-2 dataset is available in two collections in CMR. This example will use data from the HLSL30.002 (Landsat) dataset.

    "},{"location":"examples/rasterio_backend_example/#requirements","title":"Requirements\u00b6","text":"

    To run some of the chunks in this notebook you will need to install a few packages:

    • earthaccess
    • folium
    • httpx

    !pip install folium httpx earthaccess

    "},{"location":"examples/rasterio_backend_example/#identify-the-dataset","title":"Identify the dataset\u00b6","text":"

    You can find the HLSL30.002 dataset using the earthaccess.search_datasets function.

    "},{"location":"examples/rasterio_backend_example/#examine-a-granule","title":"Examine a granule\u00b6","text":"

    Each granule contains the data for a single point in time for an MGRS tile.

    "},{"location":"examples/rasterio_backend_example/#demonstrate-assets_for_tile-method","title":"Demonstrate assets_for_tile method\u00b6","text":"

    While rendering xyz tile images, titiler-cmr searches for assets using the assets_for_tile method which converts the xyz tile extent into a bounding box.

    "},{"location":"examples/rasterio_backend_example/#titilercmr-api-documentation","title":"titiler.cmr API documentation\u00b6","text":""},{"location":"examples/rasterio_backend_example/#display-tiles-in-an-interactive-map","title":"Display tiles in an interactive map\u00b6","text":"

    The /tilejson.json endpoint will provide a parameterized xyz tile URL that can be added to an interactive map.

    "},{"location":"examples/rasterio_backend_example/#render-ndvi-using-the-expression-parameter","title":"Render NDVI using the expression parameter\u00b6","text":"

    The expression parameter can be used to render images from an expression of a combination of the individual bands.

    "},{"location":"examples/rasterio_backend_example/#geojson-statistics","title":"GeoJSON Statistics\u00b6","text":"

    The /statistics endpoint can be used to get summary statistics for a geojson Feature or FeatureCollection.

    "},{"location":"examples/time_series_example/","title":"time series API","text":"In\u00a0[1]: Copied!
    from IPython.display import IFrame\n\n# if running titiler-cmr in the docker network\n# titiler_endpoint = \"http://localhost:8081\"\n\n# titiler-cmr-staging deployment\ntitiler_endpoint = \"https://dev-titiler-cmr.delta-backend.com\"\n\nIFrame(f\"{titiler_endpoint}/api.html#Timeseries\", 900, 500)\n
    from IPython.display import IFrame # if running titiler-cmr in the docker network # titiler_endpoint = \"http://localhost:8081\" # titiler-cmr-staging deployment titiler_endpoint = \"https://dev-titiler-cmr.delta-backend.com\" IFrame(f\"{titiler_endpoint}/api.html#Timeseries\", 900, 500) Out[1]: In\u00a0[2]: Copied!
    import json\nfrom datetime import datetime\n\nimport httpx\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom folium import LayerControl, Map, TileLayer\nfrom geojson_pydantic import Feature, Polygon\nfrom IPython.display import Image, display\n
    import json from datetime import datetime import httpx import matplotlib.pyplot as plt import numpy as np from folium import LayerControl, Map, TileLayer from geojson_pydantic import Feature, Polygon from IPython.display import Image, display In\u00a0[3]: Copied!
    concept_id = \"C2036881735-POCLOUD\"\n
    concept_id = \"C2036881735-POCLOUD\"

    The /timeseries GET endpoint is useful for demonstrating how the timeseries family of endpoints constructs sub-requests. It returns the list of titiler.cmr query parameters (datetime and concept_id) that will be used to generate the timeseries results.

    In\u00a0[4]: Copied!
    response = httpx.get(\n    f\"{titiler_endpoint}/timeseries\",\n    params={\n        \"concept_id\": concept_id,\n        \"datetime\": \"2024-10-01T00:00:01Z/2024-10-05T00:00:01Z\",\n    },\n    timeout=None,\n).json()\n\nprint(json.dumps(response, indent=2))\n
    response = httpx.get( f\"{titiler_endpoint}/timeseries\", params={ \"concept_id\": concept_id, \"datetime\": \"2024-10-01T00:00:01Z/2024-10-05T00:00:01Z\", }, timeout=None, ).json() print(json.dumps(response, indent=2))
    [\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-01T12:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-02T12:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-03T12:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-04T12:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-05T12:00:00+00:00\"\n  }\n]\n
    In\u00a0[5]: Copied!
    response = httpx.get(\n    f\"{titiler_endpoint}/timeseries\",\n    params={\n        \"concept_id\": concept_id,\n        \"datetime\": \"2024-10-01T00:00:01Z/2024-10-30T00:00:01Z\",\n        \"step\": \"P1W\",\n        \"temporal_mode\": \"point\",\n    }\n).json()\n\nprint(json.dumps(response, indent=2))\n
    response = httpx.get( f\"{titiler_endpoint}/timeseries\", params={ \"concept_id\": concept_id, \"datetime\": \"2024-10-01T00:00:01Z/2024-10-30T00:00:01Z\", \"step\": \"P1W\", \"temporal_mode\": \"point\", } ).json() print(json.dumps(response, indent=2))
    [\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-01T00:00:01+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-08T00:00:01+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-15T00:00:01+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-22T00:00:01+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-29T00:00:01+00:00\"\n  }\n]\n
    In\u00a0[6]: Copied!
    response = httpx.get(\n    f\"{titiler_endpoint}/timeseries\",\n    params={\n        \"concept_id\": concept_id,\n        \"datetime\": \"2024-10-01T00:00:01Z/2024-10-30T00:00:01Z\",\n        \"step\": \"P1W\",\n        \"temporal_mode\": \"interval\",\n    }\n).json()\n\nprint(json.dumps(response, indent=2))\n
    response = httpx.get( f\"{titiler_endpoint}/timeseries\", params={ \"concept_id\": concept_id, \"datetime\": \"2024-10-01T00:00:01Z/2024-10-30T00:00:01Z\", \"step\": \"P1W\", \"temporal_mode\": \"interval\", } ).json() print(json.dumps(response, indent=2))
    [\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-01T00:00:01+00:00/2024-10-08T00:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-08T00:00:01+00:00/2024-10-15T00:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-15T00:00:01+00:00/2024-10-22T00:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-22T00:00:01+00:00/2024-10-29T00:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-29T00:00:01+00:00/2024-10-30T00:00:01+00:00\"\n  }\n]\n
    In\u00a0[7]: Copied!
    response = httpx.get(\n    f\"{titiler_endpoint}/timeseries\",\n    params={\n        \"concept_id\": concept_id,\n        \"datetime\": \",\".join(\n            [\"2024-10-01T00:00:01Z\", \"2024-10-07T00:00:01Z/2024-10-09T23:59:59Z\"]\n        ),\n    }\n).json()\n\nprint(json.dumps(response, indent=2))\n
    response = httpx.get( f\"{titiler_endpoint}/timeseries\", params={ \"concept_id\": concept_id, \"datetime\": \",\".join( [\"2024-10-01T00:00:01Z\", \"2024-10-07T00:00:01Z/2024-10-09T23:59:59Z\"] ), } ).json() print(json.dumps(response, indent=2))
    [\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-01T00:00:01+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-07T12:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-08T12:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-09T12:00:00+00:00\"\n  }\n]\n
    In\u00a0[8]: Copied!
    minx, miny, maxx, maxy = -180, -90, 180, 90\nrequest = httpx.get(\n    f\"{titiler_endpoint}/timeseries/bbox/{minx},{miny},{maxx},{maxy}.gif\",\n    params={\n        \"concept_id\": concept_id,\n        \"datetime\": \"2023-11-01T00:00:01Z/2024-10-30T23:59:59Z\",\n        \"step\": \"P2W\",\n        \"temporal_mode\": \"point\",\n        \"variable\": \"analysed_sst\",\n        \"backend\": \"xarray\",\n        \"colormap_name\": \"thermal\",\n        \"rescale\": [[273, 315]],\n    },\n    timeout=None,\n)\ndisplay(Image(request.content))\n
    minx, miny, maxx, maxy = -180, -90, 180, 90 request = httpx.get( f\"{titiler_endpoint}/timeseries/bbox/{minx},{miny},{maxx},{maxy}.gif\", params={ \"concept_id\": concept_id, \"datetime\": \"2023-11-01T00:00:01Z/2024-10-30T23:59:59Z\", \"step\": \"P2W\", \"temporal_mode\": \"point\", \"variable\": \"analysed_sst\", \"backend\": \"xarray\", \"colormap_name\": \"thermal\", \"rescale\": [[273, 315]], }, timeout=None, ) display(Image(request.content)) In\u00a0[9]: Copied!
    minx, miny, maxx, maxy = -91.464,47.353,-90.466,47.974\nrequest = httpx.get(\n    f\"{titiler_endpoint}/timeseries/bbox/{minx},{miny},{maxx},{maxy}/512x512.gif\",\n    params={\n        \"concept_id\": \"C2021957657-LPCLOUD\",\n        \"datetime\": \"2024-01-01T00:00:00Z/2024-11-30T00:00:00Z\",\n        \"step\": \"P1W\",\n        \"temporal_mode\": \"interval\",\n        \"backend\": \"rasterio\",\n        \"bands_regex\":  \"B[0-9][0-9]\",\n        \"bands\": [\"B04\", \"B03\", \"B02\"],\n        \"color_formula\": \"Gamma RGB 3.5 Saturation 1.7 Sigmoidal RGB 15 0.35\",\n        \"fps\": 5,\n    },\n    timeout=None,\n)\ndisplay(Image(request.content))\n
    minx, miny, maxx, maxy = -91.464,47.353,-90.466,47.974 request = httpx.get( f\"{titiler_endpoint}/timeseries/bbox/{minx},{miny},{maxx},{maxy}/512x512.gif\", params={ \"concept_id\": \"C2021957657-LPCLOUD\", \"datetime\": \"2024-01-01T00:00:00Z/2024-11-30T00:00:00Z\", \"step\": \"P1W\", \"temporal_mode\": \"interval\", \"backend\": \"rasterio\", \"bands_regex\": \"B[0-9][0-9]\", \"bands\": [\"B04\", \"B03\", \"B02\"], \"color_formula\": \"Gamma RGB 3.5 Saturation 1.7 Sigmoidal RGB 15 0.35\", \"fps\": 5, }, timeout=None, ) display(Image(request.content)) In\u00a0[10]: Copied!
    %%time\nminx, miny, maxx, maxy = -98.676, 18.857, -81.623, 31.097\ngeojson = Feature(\n    type=\"Feature\",\n    geometry=Polygon.from_bounds(minx, miny, maxx, maxy),\n    properties={},\n)\nrequest = httpx.post(\n    f\"{titiler_endpoint}/timeseries/statistics\",\n    params={\n        \"concept_id\": concept_id,\n        \"datetime\": \"2022-02-01T00:00:01Z/2024-10-30T23:59:59Z\",\n        \"step\": \"P1D\",\n        \"temporal_mode\": \"point\",\n        \"variable\": \"analysed_sst\",\n        \"backend\": \"xarray\",\n    },\n    json=geojson.model_dump(exclude_none=True),\n    timeout=None,\n)\nrequest.raise_for_status()\nresponse = request.json()\n
    %%time minx, miny, maxx, maxy = -98.676, 18.857, -81.623, 31.097 geojson = Feature( type=\"Feature\", geometry=Polygon.from_bounds(minx, miny, maxx, maxy), properties={}, ) request = httpx.post( f\"{titiler_endpoint}/timeseries/statistics\", params={ \"concept_id\": concept_id, \"datetime\": \"2022-02-01T00:00:01Z/2024-10-30T23:59:59Z\", \"step\": \"P1D\", \"temporal_mode\": \"point\", \"variable\": \"analysed_sst\", \"backend\": \"xarray\", }, json=geojson.model_dump(exclude_none=True), timeout=None, ) request.raise_for_status() response = request.json()
    CPU times: user 43.6 ms, sys: 4.7 ms, total: 48.3 ms\nWall time: 28.1 s\n

    The /timeseries/statistics endpoint returns the GeoJSON with statistics for each step in the time series embedded in the properties.

    In\u00a0[11]: Copied!
    stats = response[\"properties\"][\"statistics\"]\nprint(len(stats))\n\nstats_preview = {timestamp: sst_stats for i, (timestamp, sst_stats) in enumerate(stats.items()) if i < 2}\nprint(json.dumps(stats_preview, indent=2))\n
    stats = response[\"properties\"][\"statistics\"] print(len(stats)) stats_preview = {timestamp: sst_stats for i, (timestamp, sst_stats) in enumerate(stats.items()) if i < 2} print(json.dumps(stats_preview, indent=2))
    1001\n{\n  \"2022-02-01T00:00:01+00:00\": {\n    \"analysed_sst\": {\n      \"min\": 285.27000000000004,\n      \"max\": 300.34000000000003,\n      \"mean\": 296.3800266967469,\n      \"count\": 2337.9599609375,\n      \"sum\": 692924.6356385816,\n      \"std\": 2.701563618833078,\n      \"median\": 296.83000000000004,\n      \"majority\": 300.16,\n      \"minority\": 285.27000000000004,\n      \"unique\": 819.0,\n      \"histogram\": [\n        [\n          14,\n          31,\n          40,\n          62,\n          88,\n          154,\n          321,\n          853,\n          378,\n          422\n        ],\n        [\n          285.27000000000004,\n          286.77700000000004,\n          288.28400000000005,\n          289.79100000000005,\n          291.29800000000006,\n          292.80500000000006,\n          294.312,\n          295.819,\n          297.326,\n          298.833,\n          300.34000000000003\n        ]\n      ],\n      \"valid_percent\": 68.49,\n      \"masked_pixels\": 1087.0,\n      \"valid_pixels\": 2363.0,\n      \"percentile_2\": 288.46000000000004,\n      \"percentile_98\": 300.20000000000005\n    }\n  },\n  \"2022-02-02T00:00:01+00:00\": {\n    \"analysed_sst\": {\n      \"min\": 285.45000000000005,\n      \"max\": 300.36,\n      \"mean\": 296.3582956145494,\n      \"count\": 2337.9599609375,\n      \"sum\": 692873.8292384959,\n      \"std\": 2.658495800828904,\n      \"median\": 296.79,\n      \"majority\": 296.59000000000003,\n      \"minority\": 285.45000000000005,\n      \"unique\": 827.0,\n      \"histogram\": [\n        [\n          14,\n          27,\n          51,\n          56,\n          90,\n          157,\n          332,\n          899,\n          329,\n          408\n        ],\n        [\n          285.45000000000005,\n          286.94100000000003,\n          288.432,\n          289.92300000000006,\n          291.41400000000004,\n          292.90500000000003,\n          294.396,\n          295.887,\n          297.37800000000004,\n          298.869,\n          300.36\n        ]\n      ],\n      \"valid_percent\": 68.49,\n      \"masked_pixels\": 1087.0,\n      \"valid_pixels\": 2363.0,\n      \"percentile_2\": 288.69000000000005,\n      \"percentile_98\": 300.15000000000003\n    }\n  }\n}\n

    The statistics output can be used to generate plots like this:

    In\u00a0[12]: Copied!
    data = response['properties']['statistics']\n\ndates = []\nmeans = []\nstds = []\n\nfor date_str, values in data.items():\n    dates.append(datetime.fromisoformat(date_str))\n    means.append(values[\"analysed_sst\"][\"mean\"])\n    stds.append(values[\"analysed_sst\"][\"std\"])\n\nplt.figure(figsize=(10, 6))\n\nplt.plot(dates, means, \"b-\", label=\"Mean\")\n\nplt.fill_between(\n    dates, \n    np.array(means) - np.array(stds),\n    np.array(means) + np.array(stds),\n    alpha=0.2,\n    color=\"b\",\n    label=\"Standard Deviation\",\n)\n\nplt.xlabel(\"Date\")\nplt.ylabel(\"Temperature (K)\")\nplt.title(\"Mean sea surface temperature in the Gulf of Mexico\")\nplt.legend()\n\nplt.xticks(rotation=45)\n\nplt.tight_layout()\n\nplt.show()\n
    data = response['properties']['statistics'] dates = [] means = [] stds = [] for date_str, values in data.items(): dates.append(datetime.fromisoformat(date_str)) means.append(values[\"analysed_sst\"][\"mean\"]) stds.append(values[\"analysed_sst\"][\"std\"]) plt.figure(figsize=(10, 6)) plt.plot(dates, means, \"b-\", label=\"Mean\") plt.fill_between( dates, np.array(means) - np.array(stds), np.array(means) + np.array(stds), alpha=0.2, color=\"b\", label=\"Standard Deviation\", ) plt.xlabel(\"Date\") plt.ylabel(\"Temperature (K)\") plt.title(\"Mean sea surface temperature in the Gulf of Mexico\") plt.legend() plt.xticks(rotation=45) plt.tight_layout() plt.show() In\u00a0[13]: Copied!
    minx, miny, maxx, maxy = -180, -90, 180, 90\nrequest = httpx.get(\n    f\"{titiler_endpoint}/timeseries/WebMercatorQuad/tilejson.json\",\n    params={\n        \"concept_id\": concept_id,\n        \"datetime\": \"2023-11-01T00:00:01Z/2024-10-30T23:59:59Z\",\n        \"step\": \"P1M\",\n        \"temporal_mode\": \"point\",\n        \"variable\": \"sea_ice_fraction\",\n        \"backend\": \"xarray\",\n        \"colormap_name\": \"blues_r\",\n        \"rescale\": [[0, 1]],\n    },\n    timeout=None,\n)\ntilejsons = request.json()\ntilejson_preview = {\n    timestamp: tilejson\n    for i, (timestamp, tilejson) in enumerate(tilejsons.items())\n    if i < 2\n}\nprint(json.dumps(tilejson_preview, indent=2))\n
    minx, miny, maxx, maxy = -180, -90, 180, 90 request = httpx.get( f\"{titiler_endpoint}/timeseries/WebMercatorQuad/tilejson.json\", params={ \"concept_id\": concept_id, \"datetime\": \"2023-11-01T00:00:01Z/2024-10-30T23:59:59Z\", \"step\": \"P1M\", \"temporal_mode\": \"point\", \"variable\": \"sea_ice_fraction\", \"backend\": \"xarray\", \"colormap_name\": \"blues_r\", \"rescale\": [[0, 1]], }, timeout=None, ) tilejsons = request.json() tilejson_preview = { timestamp: tilejson for i, (timestamp, tilejson) in enumerate(tilejsons.items()) if i < 2 } print(json.dumps(tilejson_preview, indent=2))
    {\n  \"2023-11-01T00:00:01+00:00\": {\n    \"tilejson\": \"2.2.0\",\n    \"version\": \"1.0.0\",\n    \"scheme\": \"xyz\",\n    \"tiles\": [\n      \"https://dev-titiler-cmr.delta-backend.com/tiles/WebMercatorQuad/{z}/{x}/{y}@1x?concept_id=C2036881735-POCLOUD&variable=sea_ice_fraction&backend=xarray&colormap_name=blues_r&rescale=%5B0%2C+1%5D&concept_id=C2036881735-POCLOUD&datetime=2023-11-01T00%3A00%3A01%2B00%3A00\"\n    ],\n    \"minzoom\": 0,\n    \"maxzoom\": 24,\n    \"bounds\": [\n      -180.0,\n      -90.0,\n      180.0,\n      90.0\n    ],\n    \"center\": [\n      0.0,\n      0.0,\n      0\n    ]\n  },\n  \"2023-12-01T00:00:01+00:00\": {\n    \"tilejson\": \"2.2.0\",\n    \"version\": \"1.0.0\",\n    \"scheme\": \"xyz\",\n    \"tiles\": [\n      \"https://dev-titiler-cmr.delta-backend.com/tiles/WebMercatorQuad/{z}/{x}/{y}@1x?concept_id=C2036881735-POCLOUD&variable=sea_ice_fraction&backend=xarray&colormap_name=blues_r&rescale=%5B0%2C+1%5D&concept_id=C2036881735-POCLOUD&datetime=2023-12-01T00%3A00%3A01%2B00%3A00\"\n    ],\n    \"minzoom\": 0,\n    \"maxzoom\": 24,\n    \"bounds\": [\n      -180.0,\n      -90.0,\n      180.0,\n      90.0\n    ],\n    \"center\": [\n      0.0,\n      0.0,\n      0\n    ]\n  }\n}\n
    In\u00a0[14]: Copied!
    m = Map(location=[0, 0], zoom_start=3, min_zoom=3)\nfor datetime_, tilejson in tilejsons.items():\n    label = datetime.fromisoformat(datetime_).strftime(\"%Y-%m\")\n    TileLayer(\n        tiles=tilejson[\"tiles\"][0],\n        attr=\"GAMSSA SST\",\n        overlay=True,\n        name=label,\n        show=False,\n    ).add_to(m)\nLayerControl(collapsed=False).add_to(m)\nm\n
    m = Map(location=[0, 0], zoom_start=3, min_zoom=3) for datetime_, tilejson in tilejsons.items(): label = datetime.fromisoformat(datetime_).strftime(\"%Y-%m\") TileLayer( tiles=tilejson[\"tiles\"][0], attr=\"GAMSSA SST\", overlay=True, name=label, show=False, ).add_to(m) LayerControl(collapsed=False).add_to(m) m Out[14]: Make this Notebook Trusted to load map: File -> Trust Notebook"},{"location":"examples/time_series_example/#time-series-api","title":"time series API\u00b6","text":"

    There is a family of /timeseries endpoints in the titiler.cmr API that can be used to generate time-aware responses.

    The timeseries extension provides endpoints for requesting results for all points or intervals along a time series. The /timeseries family of endpoints works by converting the provided time series parameters (datetime, step, and temporal_mode) into a set of datetime query parameters for the corresponding lower-level endpoint, running asynchronous requests to the lower-level endpoint, then collecting the results and formatting them in a coherent format for the user.

    The time series structure is defined by the datetime, step, and temporal_mode parameters.

    The temporal_mode mode parameter controls whether or not CMR is queried for a particular point-in-time (temporal_mode=point) or over an entire interval (temporal_mode=interval). In general, it is best to use temporal_mode=point for datasets where granules overlap completely in space (e.g. daily sea surface temperature predictions) because the /timeseries endpoints will create a mosaic of all assets returned by the query and the first asset to cover a pixel will be used. For datasets where it requires granules from multiple timestamps to fully cover an AOI, temporal_mode=interval is appropriate. For example, you can get weekly composites of satellite imagery for visualization purposes with step=P1W & temporal_mode=interval.

    "},{"location":"examples/time_series_example/#time-series-parameters","title":"time series parameters\u00b6","text":"

    The time series API makes it possible to return results for many points along a timeseries with a single request. The available parameters are:

    • datetime (str): Either a date-time, an interval, or a comma-separated list of date-times or intervals. Date and time expressions adhere to rfc3339 ('2020-06-01T09:00:00Z') format.
    • step (str): width of individual timesteps expressed as a IS8601 duration
    • temporal_mode (str): if \"point\", queries will be made for the individual timestamps along the timeseries. If \"interval\", queries will be made for the periods between each timestamp along the timeseries.

    There are many ways to combine the parameters to produce a time series.

    1. Exact points in time from a start to and end datetime:
    • provide datetime={start_datetime}/{end_datetime}, step={step_width}, and temporal_mode=point where step_width is something like P1D for daily or P2W for bi-weekly.
    • provide datetime={start_datetime}/{end_datetime}, and temporal_mode=point without step to get a point for every unique timestamp in the granules between start_datetime and end_datetime.
    1. Fixed-width intervals between a start and end datetime:
    • provide datetime={start_datetime}/{end_datetime}, step, and temporal_mode=interval
    1. Specific datetimes
    • provide datetime=2024-10-01T00:00:01Z,2024-10-02T00:00:01Z
    1. Specific datetime intervals
    • provide datetime=2024-10-01T00:00:01Z/2024-10-01T23:59:59Z,2024-10-05T00:00:01Z/2024-10-05T23:59:59Z
    "},{"location":"examples/time_series_example/#how-to-use-the-timeseries-api-with-titilercmr","title":"How to use the timeseries API with titiler.cmr\u00b6","text":"

    The /timeseries endpoints work by interpreting the time series parameters (e.g. datetime and step) and parameterizing a set of lower-level requests to the related endpoint. For example, a request to /timeseries/statistics for a set of four points in time each one week apart will fire off four requests to the /statistics endpoint with a particular value in the datetime parameter. The results are collected and returned in a coherent format that can be consumed in a table or a chart.

    Every /timeseries request in titiler.cmr will require both a concept_id and a set of time series parameters. The GHRSST Level 4 GAMSSA_28km Global Foundation Sea Surface Temperature Analysis v1.0 dataset (GDS2) is a useful dataset for demo purposes because the granule assets are small (~1MB each).

    "},{"location":"examples/time_series_example/#time-series-for-all-granules-between-a-startend-datetime","title":"Time series for all granules between a start/end datetime\u00b6","text":"

    For some datasets that have granules that are regularly spaced in time (e.g. daily), it is useful to be able to quickly specify a summary of all points in time between a start and end datetime. You can do that by simply providing the start_datetime and end_datetime parameters. The application will query CMR and produce a list of unique datetime values from the results of the granule search. If a granule represents a datetime range, it will return the midpoint between the start and end for a single granule.

    "},{"location":"examples/time_series_example/#weekly-timeseries","title":"Weekly timeseries\u00b6","text":"

    Sometimes you might be interested in a report with lower temporal resolution than the maximum availble for a dataset. By setting step=\"P1W\" and temporal_mode=\"point\", you can get a weekly series.

    "},{"location":"examples/time_series_example/#periodic-timeseries","title":"Periodic timeseries\u00b6","text":"

    Some datasets (like satellite imagery) may consist of granules that do not fully cover an arbitrary area of interest. In this case it is useful to construct a time series from a set of datetime ranges so that granules can be mosaiced to ensure each step has full coverage.

    To create a set of non-overlapping week-long datetime ranges, you can modify the query to use temporal_mode=\"interval\" which will create ranges that start on the weekly values returned in the previous query and extend up to the second before the next value in the series.

    "},{"location":"examples/time_series_example/#custom-time-series","title":"Custom time series\u00b6","text":"

    If you want to specify the exact datetime values for a time series and you either cannot do not want to use the time series parameters, you can supply a set of comma-separated datetimes and/or datetime ranges to the datetime parameter.

    "},{"location":"examples/time_series_example/#example-sea-surface-temperature-gif","title":"Example: sea surface temperature GIF\u00b6","text":"

    The /timeseries/bbox endpoint can be used to produce a GIF that shows a visualization of granules over time.

    The example below shows biweekly sea surface temperature estimates from the GAMSSA dataset for the period from November 2023 through October 2024.

    "},{"location":"examples/time_series_example/#example-hlsl30-gif","title":"Example: HLSL30 GIF\u00b6","text":"

    The example below shows a weekly mosaic of imagery from the Harmonized Landsat Sentinel L30 (HLSL30) collection for the period from January to November 2024.

    "},{"location":"examples/time_series_example/#example-sea-surface-temperature-statistics","title":"Example: sea surface temperature statistics\u00b6","text":"

    The /timeseries/statistics endpoint will produce summary statistics for an AOI for all points along a timeseries.

    The example below shows daily sea surface temperature summary statistics for the Gulf of Mexico from the GAMSSA dataset for the period from February 2022 through October 2024.

    "},{"location":"examples/time_series_example/#example-time-series-raster-tiles","title":"Example: Time series raster tiles\u00b6","text":"

    It could be useful to allow users to select a timestep in an interactive map. You can use the /timeseries/tilejson endpoint for that purpose. The following example shows how you could use it to provide time series capability to an interactive map of sea ice cover.

    "},{"location":"examples/xarray_backend_example/","title":"xarray backend: MUR SST","text":"In\u00a0[1]: Copied!
    import json\nfrom datetime import datetime, timezone\n\nimport earthaccess\nimport httpx\nimport xarray as xr\nfrom folium import GeoJson, Map, TileLayer\n\n# titiler_endpoint = \"http://localhost:8081\"  # docker network endpoint\ntitiler_endpoint = \"https://dev-titiler-cmr.delta-backend.com\"  # deployed endpoint\n
    import json from datetime import datetime, timezone import earthaccess import httpx import xarray as xr from folium import GeoJson, Map, TileLayer # titiler_endpoint = \"http://localhost:8081\" # docker network endpoint titiler_endpoint = \"https://dev-titiler-cmr.delta-backend.com\" # deployed endpoint In\u00a0[2]: Copied!
    datasets = earthaccess.search_datasets(doi=\"10.5067/GHGMR-4FJ04\")\nds = datasets[0]\n\nconcept_id = ds[\"meta\"][\"concept-id\"]\nprint(\"Concept-Id: \", concept_id)\n\nprint(\"Abstract: \", ds[\"umm\"][\"Abstract\"])\n
    datasets = earthaccess.search_datasets(doi=\"10.5067/GHGMR-4FJ04\") ds = datasets[0] concept_id = ds[\"meta\"][\"concept-id\"] print(\"Concept-Id: \", concept_id) print(\"Abstract: \", ds[\"umm\"][\"Abstract\"])
    Concept-Id:  C1996881146-POCLOUD\nAbstract:  A Group for High Resolution Sea Surface Temperature (GHRSST) Level 4 sea surface temperature analysis produced as a retrospective dataset (four day latency) and near-real-time dataset (one day latency) at the JPL Physical Oceanography DAAC using wavelets as basis functions in an optimal interpolation approach on a global 0.01 degree grid. The version 4 Multiscale Ultrahigh Resolution (MUR) L4 analysis is based upon nighttime GHRSST L2P skin and subskin SST observations from several instruments including the NASA Advanced Microwave Scanning Radiometer-EOS (AMSR-E), the JAXA Advanced Microwave Scanning Radiometer 2 on GCOM-W1, the Moderate Resolution Imaging Spectroradiometers (MODIS) on the NASA Aqua and Terra platforms, the US Navy microwave WindSat radiometer, the Advanced Very High Resolution Radiometer (AVHRR) on several NOAA satellites, and in situ SST observations from the NOAA iQuam project. The ice concentration data are from the archives at the EUMETSAT Ocean and Sea Ice Satellite Application Facility (OSI SAF) High Latitude Processing Center and are also used for an improved SST parameterization for the high-latitudes.  The dataset also contains additional variables for some granules including a SST anomaly derived from a MUR climatology and the temporal distance to the nearest IR measurement for each pixel.This dataset is funded by the NASA MEaSUREs program ( http://earthdata.nasa.gov/our-community/community-data-system-programs/measures-projects ), and created by a team led by Dr. Toshio M. Chin from JPL. It adheres to the GHRSST Data Processing Specification (GDS) version 2 format specifications. Use the file global metadata \"history:\" attribute to determine if a granule is near-realtime or retrospective.\n
    In\u00a0[3]: Copied!
    results = earthaccess.search_data(\n    count=1,\n    concept_id=concept_id,\n    temporal=(\"2024-10-12\", \"2024-10-13\"),\n)\nprint(\"Granules:\")\nprint(results)\nprint()\nprint(\"Example of NetCDF URL: \")\nfor link in results[0].data_links(access=\"external\"):\n    print(link)\n
    results = earthaccess.search_data( count=1, concept_id=concept_id, temporal=(\"2024-10-12\", \"2024-10-13\"), ) print(\"Granules:\") print(results) print() print(\"Example of NetCDF URL: \") for link in results[0].data_links(access=\"external\"): print(link)
    Granules:\n[Collection: {'Version': '4.1', 'ShortName': 'MUR-JPL-L4-GLOB-v4.1'}\nSpatial coverage: {'HorizontalSpatialDomain': {'Geometry': {'BoundingRectangles': [{'WestBoundingCoordinate': -180, 'SouthBoundingCoordinate': -90, 'EastBoundingCoordinate': 180, 'NorthBoundingCoordinate': 90}]}}}\nTemporal coverage: {'RangeDateTime': {'EndingDateTime': '2024-10-12T21:00:00.000Z', 'BeginningDateTime': '2024-10-11T21:00:00.000Z'}}\nSize(MB): 707.340648651123\nData: ['https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/MUR-JPL-L4-GLOB-v4.1/20241012090000-JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1.nc']]\n\nExample of NetCDF URL: \nhttps://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/MUR-JPL-L4-GLOB-v4.1/20241012090000-JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1.nc\n
    In\u00a0[4]: Copied!
    fs = earthaccess.get_fsspec_https_session()\n\nds = xr.open_dataset(\n    fs.open(results[0].data_links(access=\"external\")[0]),\n    engine=\"h5netcdf\",\n)\nprint(\"Data Variables:\")\nfor var in ds.data_vars:\n    print(str(var))\n\ndisplay(ds)\n
    fs = earthaccess.get_fsspec_https_session() ds = xr.open_dataset( fs.open(results[0].data_links(access=\"external\")[0]), engine=\"h5netcdf\", ) print(\"Data Variables:\") for var in ds.data_vars: print(str(var)) display(ds)
    Data Variables:\nanalysed_sst\nanalysis_error\nmask\nsea_ice_fraction\ndt_1km_data\nsst_anomaly\n
    <xarray.Dataset> Size: 29GB\nDimensions:           (time: 1, lat: 17999, lon: 36000)\nCoordinates:\n  * time              (time) datetime64[ns] 8B 2024-10-12T09:00:00\n  * lat               (lat) float32 72kB -89.99 -89.98 -89.97 ... 89.98 89.99\n  * lon               (lon) float32 144kB -180.0 -180.0 -180.0 ... 180.0 180.0\nData variables:\n    analysed_sst      (time, lat, lon) float64 5GB ...\n    analysis_error    (time, lat, lon) float64 5GB ...\n    mask              (time, lat, lon) float32 3GB ...\n    sea_ice_fraction  (time, lat, lon) float64 5GB ...\n    dt_1km_data       (time, lat, lon) timedelta64[ns] 5GB ...\n    sst_anomaly       (time, lat, lon) float64 5GB ...\nAttributes: (12/47)\n    Conventions:                CF-1.7\n    title:                      Daily MUR SST, Final product\n    summary:                    A merged, multi-sensor L4 Foundation SST anal...\n    references:                 http://podaac.jpl.nasa.gov/Multi-scale_Ultra-...\n    institution:                Jet Propulsion Laboratory\n    history:                    created at nominal 4-day latency; replaced nr...\n    ...                         ...\n    project:                    NASA Making Earth Science Data Records for Us...\n    publisher_name:             GHRSST Project Office\n    publisher_url:              http://www.ghrsst.org\n    publisher_email:            ghrsst-po@nceo.ac.uk\n    processing_level:           L4\n    cdm_data_type:              grid
    xarray.Dataset
    • Dimensions:
      • time: 1
      • lat: 17999
      • lon: 36000
    • Coordinates: (3)
      • time(time)datetime64[ns]2024-10-12T09:00:00long_name :reference time of sst fieldstandard_name :timeaxis :Tcomment :Nominal time of analyzed fields
        array(['2024-10-12T09:00:00.000000000'], dtype='datetime64[ns]')
      • lat(lat)float32-89.99 -89.98 ... 89.98 89.99long_name :latitudestandard_name :latitudeaxis :Yunits :degrees_northvalid_min :-90.0valid_max :90.0comment :geolocations inherited from the input data without correction
        array([-89.99, -89.98, -89.97, ...,  89.97,  89.98,  89.99], dtype=float32)
      • lon(lon)float32-180.0 -180.0 ... 180.0 180.0long_name :longitudestandard_name :longitudeaxis :Xunits :degrees_eastvalid_min :-180.0valid_max :180.0comment :geolocations inherited from the input data without correction
        array([-179.99, -179.98, -179.97, ...,  179.98,  179.99,  180.  ],\n      dtype=float32)
    • Data variables: (6)
      • analysed_sst(time, lat, lon)float64...long_name :analysed sea surface temperaturestandard_name :sea_surface_foundation_temperatureunits :kelvinvalid_min :-32767valid_max :32767comment :\"Final\" version using Multi-Resolution Variational Analysis (MRVA) method for interpolationsource :MODIS_T-JPL, MODIS_A-JPL, AVHRRMTB_G-NAVO, iQUAM-NOAA/NESDIS, Ice_Conc-OSISAF
        [647964000 values with dtype=float64]
      • analysis_error(time, lat, lon)float64...long_name :estimated error standard deviation of analysed_sstunits :kelvinvalid_min :0valid_max :32767comment :uncertainty in \"analysed_sst\"
        [647964000 values with dtype=float64]
      • mask(time, lat, lon)float32...long_name :sea/land field composite maskvalid_min :1valid_max :31flag_masks :[ 1 2 4 8 16]flag_meanings :open_sea land open_lake open_sea_with_ice_in_the_grid open_lake_with_ice_in_the_gridcomment :mask can be used to further filter the data.source :GMT \"grdlandmask\", ice flag from sea_ice_fraction data
        [647964000 values with dtype=float32]
      • sea_ice_fraction(time, lat, lon)float64...long_name :sea ice area fractionstandard_name :sea_ice_area_fractionvalid_min :0valid_max :100source :EUMETSAT OSI-SAF, copyright EUMETSATcomment :ice fraction is a dimensionless quantity between 0 and 1; it has been interpolated by a nearest neighbor approach; EUMETSAT OSI-SAF files used: ice_conc_nh_polstere-100_multi_202410121200.nc, ice_conc_sh_polstere-100_multi_202410121200.nc.
        [647964000 values with dtype=float64]
      • dt_1km_data(time, lat, lon)timedelta64[ns]...long_name :time to most recent 1km datavalid_min :-127valid_max :127source :MODIS and VIIRS pixels ingested by MURcomment :The grid value is hours between the analysis time and the most recent MODIS or VIIRS 1km L2P datum within 0.01 degrees from the grid point. \"Fill value\" indicates absence of such 1km data at the grid point.
        [647964000 values with dtype=timedelta64[ns]]
      • sst_anomaly(time, lat, lon)float64...long_name :SST anomaly from a seasonal SST climatology based on the MUR data over 2003-2014 periodunits :kelvinvalid_min :-32767valid_max :32767comment :anomaly reference to the day-of-year average between 2003 and 2014
        [647964000 values with dtype=float64]
    • Indexes: (3)
      • timePandasIndex
        PandasIndex(DatetimeIndex(['2024-10-12 09:00:00'], dtype='datetime64[ns]', name='time', freq=None))
      • latPandasIndex
        PandasIndex(Index([-89.98999786376953,  -89.9800033569336, -89.97000122070312,\n       -89.95999908447266, -89.94999694824219, -89.94000244140625,\n       -89.93000030517578, -89.91999816894531, -89.91000366210938,\n        -89.9000015258789,\n       ...\n         89.9000015258789,  89.91000366210938,  89.91999816894531,\n        89.93000030517578,  89.94000244140625,  89.94999694824219,\n        89.95999908447266,  89.97000122070312,   89.9800033569336,\n        89.98999786376953],\n      dtype='float32', name='lat', length=17999))
      • lonPandasIndex
        PandasIndex(Index([-179.99000549316406, -179.97999572753906, -179.97000122070312,\n        -179.9600067138672,  -179.9499969482422, -179.94000244140625,\n       -179.92999267578125,  -179.9199981689453, -179.91000366210938,\n       -179.89999389648438,\n       ...\n        179.91000366210938,   179.9199981689453,  179.92999267578125,\n        179.94000244140625,   179.9499969482422,   179.9600067138672,\n        179.97000122070312,  179.97999572753906,  179.99000549316406,\n                     180.0],\n      dtype='float32', name='lon', length=36000))
    • Attributes: (47)Conventions :CF-1.7title :Daily MUR SST, Final productsummary :A merged, multi-sensor L4 Foundation SST analysis product from JPL.references :http://podaac.jpl.nasa.gov/Multi-scale_Ultra-high_Resolution_MUR-SSTinstitution :Jet Propulsion Laboratoryhistory :created at nominal 4-day latency; replaced nrt (1-day latency) version.comment :MUR = \"Multi-scale Ultra-high Resolution\"license :These data are available free of charge under data policy of JPL PO.DAAC.id :MUR-JPL-L4-GLOB-v04.1naming_authority :org.ghrsstproduct_version :04.1uuid :27665bc0-d5fc-11e1-9b23-0800200c9a66gds_version_id :2.0netcdf_version_id :4.1date_created :20241021T071625Zstart_time :20241012T090000Zstop_time :20241012T090000Ztime_coverage_start :20241011T210000Ztime_coverage_end :20241012T210000Zfile_quality_level :3source :MODIS_T-JPL, MODIS_A-JPL, AVHRRMTB_G-NAVO, iQUAM-NOAA/NESDIS, Ice_Conc-OSISAFplatform :Terra, Aqua, MetOp-B, Buoys/Shipssensor :MODIS, AVHRR, in-situMetadata_Conventions :Unidata Observation Dataset v1.0metadata_link :http://podaac.jpl.nasa.gov/ws/metadata/dataset/?format=iso&shortName=MUR-JPL-L4-GLOB-v04.1keywords :Oceans > Ocean Temperature > Sea Surface Temperaturekeywords_vocabulary :NASA Global Change Master Directory (GCMD) Science Keywordsstandard_name_vocabulary :NetCDF Climate and Forecast (CF) Metadata Conventionsouthernmost_latitude :-90.0northernmost_latitude :90.0westernmost_longitude :-180.0easternmost_longitude :180.0spatial_resolution :0.01 degreesgeospatial_lat_units :degrees northgeospatial_lat_resolution :0.01geospatial_lon_units :degrees eastgeospatial_lon_resolution :0.01acknowledgment :Please acknowledge the use of these data with the following statement: These data were provided by JPL under support by NASA MEaSUREs program.creator_name :JPL MUR SST projectcreator_email :ghrsst@podaac.jpl.nasa.govcreator_url :http://mur.jpl.nasa.govproject :NASA Making Earth Science Data Records for Use in Research Environments (MEaSUREs) Programpublisher_name :GHRSST Project Officepublisher_url :http://www.ghrsst.orgpublisher_email :ghrsst-po@nceo.ac.ukprocessing_level :L4cdm_data_type :grid
    In\u00a0[5]: Copied!
    variable = \"sea_ice_fraction\"\ndatetime_ = datetime(2024, 10, 10, tzinfo=timezone.utc).isoformat()\n
    variable = \"sea_ice_fraction\" datetime_ = datetime(2024, 10, 10, tzinfo=timezone.utc).isoformat() In\u00a0[6]: Copied!
    r = httpx.get(\n    f\"{titiler_endpoint}/WebMercatorQuad/tilejson.json\",\n    params = (\n        (\"concept_id\", concept_id),\n        # Datetime in form of `start_date/end_date`\n        (\"datetime\", datetime_),\n        # titiler-cmr can work with both Zarr and COG dataset\n        # but we need to tell the endpoints in advance which backend\n        # to use\n        (\"backend\", \"xarray\"),\n        (\"variable\", variable),\n        # We need to set min/max zoom because we don't want to use lowerzoom level (e.g 0)\n        # which will results in useless large scale query\n        (\"minzoom\", 2),\n        (\"maxzoom\", 13),\n        (\"rescale\", \"0,1\"),\n        (\"colormap_name\", \"blues_r\"),\n    )\n).json()\n\nprint(r)\n
    r = httpx.get( f\"{titiler_endpoint}/WebMercatorQuad/tilejson.json\", params = ( (\"concept_id\", concept_id), # Datetime in form of `start_date/end_date` (\"datetime\", datetime_), # titiler-cmr can work with both Zarr and COG dataset # but we need to tell the endpoints in advance which backend # to use (\"backend\", \"xarray\"), (\"variable\", variable), # We need to set min/max zoom because we don't want to use lowerzoom level (e.g 0) # which will results in useless large scale query (\"minzoom\", 2), (\"maxzoom\", 13), (\"rescale\", \"0,1\"), (\"colormap_name\", \"blues_r\"), ) ).json() print(r)
    {'tilejson': '2.2.0', 'version': '1.0.0', 'scheme': 'xyz', 'tiles': ['https://dev-titiler-cmr.delta-backend.com/tiles/WebMercatorQuad/{z}/{x}/{y}@1x?concept_id=C1996881146-POCLOUD&datetime=2024-10-10T00%3A00%3A00%2B00%3A00&backend=xarray&variable=sea_ice_fraction&rescale=0%2C1&colormap_name=blues_r'], 'minzoom': 2, 'maxzoom': 13, 'bounds': [-180.0, -90.0, 180.0, 90.0], 'center': [0.0, 0.0, 2]}\n
    In\u00a0[7]: Copied!
    bounds = r[\"bounds\"]\nm = Map(\n    location=(70, -40),\n    zoom_start=3\n)\n\nTileLayer(\n    tiles=r[\"tiles\"][0],\n    opacity=1,\n    attr=\"NASA\",\n).add_to(m)\nm\n
    bounds = r[\"bounds\"] m = Map( location=(70, -40), zoom_start=3 ) TileLayer( tiles=r[\"tiles\"][0], opacity=1, attr=\"NASA\", ).add_to(m) m Out[7]: Make this Notebook Trusted to load map: File -> Trust Notebook In\u00a0[8]: Copied!
    geojson_dict = {\n  \"type\": \"FeatureCollection\",\n  \"features\": [\n    {\n      \"type\": \"Feature\",\n      \"properties\": {},\n      \"geometry\": {\n        \"coordinates\": [\n          [\n            [\n              -20.79973248834736,\n              83.55979308678764\n            ],\n            [\n              -20.79973248834736,\n              75.0115425216471\n            ],\n            [\n              14.483337068956956,\n              75.0115425216471\n            ],\n            [\n              14.483337068956956,\n              83.55979308678764\n            ],\n            [\n              -20.79973248834736,\n              83.55979308678764\n            ]\n          ]\n        ],\n        \"type\": \"Polygon\"\n      }\n    }\n  ]\n}\n\nr = httpx.post(\n    f\"{titiler_endpoint}/statistics\",\n    params=(\n        (\"concept_id\", concept_id),\n        # Datetime in form of `start_date/end_date`\n        (\"datetime\", datetime_),\n        # titiler-cmr can work with both Zarr and COG dataset\n        # but we need to tell the endpoints in advance which backend\n        # to use\n        (\"backend\", \"xarray\"),\n        (\"variable\", variable),\n    ),\n    json=geojson_dict,\n    timeout=60,\n).json()\n\nprint(json.dumps(r, indent=2))\n
    geojson_dict = { \"type\": \"FeatureCollection\", \"features\": [ { \"type\": \"Feature\", \"properties\": {}, \"geometry\": { \"coordinates\": [ [ [ -20.79973248834736, 83.55979308678764 ], [ -20.79973248834736, 75.0115425216471 ], [ 14.483337068956956, 75.0115425216471 ], [ 14.483337068956956, 83.55979308678764 ], [ -20.79973248834736, 83.55979308678764 ] ] ], \"type\": \"Polygon\" } } ] } r = httpx.post( f\"{titiler_endpoint}/statistics\", params=( (\"concept_id\", concept_id), # Datetime in form of `start_date/end_date` (\"datetime\", datetime_), # titiler-cmr can work with both Zarr and COG dataset # but we need to tell the endpoints in advance which backend # to use (\"backend\", \"xarray\"), (\"variable\", variable), ), json=geojson_dict, timeout=60, ).json() print(json.dumps(r, indent=2))
    {\n  \"type\": \"FeatureCollection\",\n  \"features\": [\n    {\n      \"type\": \"Feature\",\n      \"geometry\": {\n        \"type\": \"Polygon\",\n        \"coordinates\": [\n          [\n            [\n              -20.79973248834736,\n              83.55979308678764\n            ],\n            [\n              -20.79973248834736,\n              75.0115425216471\n            ],\n            [\n              14.483337068956956,\n              75.0115425216471\n            ],\n            [\n              14.483337068956956,\n              83.55979308678764\n            ],\n            [\n              -20.79973248834736,\n              83.55979308678764\n            ]\n          ]\n        ]\n      },\n      \"properties\": {\n        \"statistics\": {\n          \"sea_ice_fraction\": {\n            \"min\": 0.3,\n            \"max\": 0.99,\n            \"mean\": 0.845157064600111,\n            \"count\": 1725290.875,\n            \"sum\": 1458141.771496357,\n            \"std\": 0.1559272507275522,\n            \"median\": 0.9,\n            \"majority\": 0.9500000000000001,\n            \"minority\": 0.36,\n            \"unique\": 70.0,\n            \"histogram\": [\n              [\n                34892,\n                39574,\n                38696,\n                37867,\n                44348,\n                72817,\n                110580,\n                200188,\n                472678,\n                675707\n              ],\n              [\n                0.3,\n                0.369,\n                0.43799999999999994,\n                0.5069999999999999,\n                0.576,\n                0.645,\n                0.714,\n                0.7829999999999999,\n                0.8519999999999999,\n                0.9209999999999998,\n                0.99\n              ]\n            ],\n            \"valid_percent\": 57.18,\n            \"masked_pixels\": 1293477.0,\n            \"valid_pixels\": 1727347.0,\n            \"percentile_2\": 0.36,\n            \"percentile_98\": 0.99\n          }\n        }\n      }\n    }\n  ]\n}\n
    In\u00a0[\u00a0]: Copied!
    \n
    "},{"location":"examples/xarray_backend_example/#xarray-backend-mur-sst","title":"xarray backend: MUR SST\u00b6","text":"

    The MUR SST dataset has daily records for sea surface temperature and ice cover fraction. There is a netcdf file for each record.

    To run the titiler-cmr service locally you can fire up the docker network with this command:

    docker compose up\n
    "},{"location":"examples/xarray_backend_example/#requirements","title":"Requirements\u00b6","text":"

    To run some of the chunks in this notebook you will need to install a few packages: earthaccess, folium, httpx, xarray

    "},{"location":"examples/xarray_backend_example/#identify-the-dataset","title":"Identify the dataset\u00b6","text":"

    You can find the MUR SST dataset using the earthaccess.search_datasets function.

    "},{"location":"examples/xarray_backend_example/#examine-a-granule","title":"Examine a granule\u00b6","text":"

    Each granule contains a single day record for the entire globe and has a single data file.

    "},{"location":"examples/xarray_backend_example/#explore-the-available-variables","title":"Explore the available variables\u00b6","text":"

    The NetCDF file can be opened with xarray using the h5netcdf engine. When running outside of AWS region us-west-2 you will need to access the data using \"external\" https links (rather than \"direct\" s3 links). Those links will require authentication which is handled by earthaccess as long as you have your Earthdata credentials stored in the ~/.netrc file!

    "},{"location":"examples/xarray_backend_example/#define-a-query-for-titiler-cmr","title":"Define a query for titiler-cmr\u00b6","text":"

    To use titiler-cmr's endpoints for a NetCDF dataset like this we need to define a date range for the CMR query and a variable to analyze.

    "},{"location":"examples/xarray_backend_example/#display-tiles-in-an-interactive-map","title":"Display tiles in an interactive map\u00b6","text":"

    The /tilejson.json endpoint will provide a parameterized xyz tile URL that can be added to an interactive map.

    "},{"location":"examples/xarray_backend_example/#geojson-statistics","title":"GeoJSON Statistics\u00b6","text":"

    The /statistics endpoint can be used to get summary statistics for a geojson Feature or FeatureCollection.

    "}]} \ No newline at end of file +{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"Home","text":"

    A modern dynamic tile server with a NASA CMR backend built on top of FastAPI and Rasterio/GDAL.

    "},{"location":"#titiler-cmr","title":"titiler-cmr","text":"

    An API for creating image tiles from CMR queries.

    "},{"location":"#features","title":"Features","text":"
    • Render tiles from assets discovered via queries to NASA's CMR
    • Uses the earthaccess python package to query the CMR
    • Built on top of titiler
    • Multiple projections support (see TileMatrixSets) via morecantile.
    • JPEG / JP2 / PNG / WEBP / GTIFF / NumpyTile output format support
    • Automatic OpenAPI documentation (FastAPI builtin)
    • Example of AWS Lambda / ECS deployment (via CDK)
    "},{"location":"#installation","title":"Installation","text":"

    To install from sources and run for development, install uv then:

    git clone https://github.com/developmentseed/titiler-cmr.git\ncd titiler-cmr\n\nuv sync --all-extras\n
    "},{"location":"#authentication-for-data-read-access","title":"Authentication for data read access","text":"

    titiler-cmr can read data either over HTTP (external) or directly from AWS S3 (direct) depending on the app configuration. The behavior of the application is controlled by the S3 authentication settings in settings.py, which you can set either with environment variables (TITILER_CMR_S3_AUTH_ACCESS, TITILER_CMR_S3_AUTH_STRATEGY) or in an environment file (.env).

    "},{"location":"#direct-from-s3","title":"Direct from S3","text":"

    When running in an AWS context (e.g. Lambda), you should configure the application to access the data directly from S3. You can do this in two ways:

    • Configure an AWS IAM role for your runtime environment that has read access to the NASA buckets so that rasterio/GDAL can find the AWS credentials when reading data
    • Set the EARTHDATA_USERNAME and EARTHDATA_PASSWORD environment variables so that the earthaccess package can issue temporary AWS credentials

    Note

    Direct S3 access configuration will only work if the application is running in the same AWS region as the data are stored!

    "},{"location":"#external-access","title":"External access","text":"

    When running outside of the AWS context (e.g. locally) you will need to configure the application to access data over HTTP. You can do this by creating an Earthdata account, configuring your .netrc file with your Earthdata login credentials (which GDAL will find when trying to access data over the network), and setting a few environment variables:

    # environment variables for GDAL to read data from NASA over HTTP\nexport GDAL_DISABLE_READDIR_ON_OPEN=YES\nexport CPL_VSIL_CURL_USE_HEAD=FALSE\nexport GDAL_HTTP_COOKIEFILE=/tmp/cookies.txt\nexport GDAL_HTTP_COOKIEJAR=/tmp/cookies.txt\nexport EARTHDATA_USERNAME={your earthdata username}\nexport EARTHDATA_PASSWORD={your earthdata password}\n\n# write your .netrc file to the home directory\necho \"machine urs.earthdata.nasa.gov login ${EARTHDATA_USERNAME} password ${EARTHDATA_PASSWORD}\" > ~/.netrc\n

    Note

    See NASA's docs for details

    "},{"location":"#docker-deployment","title":"Docker deployment","text":"

    You can run the application in a docker container using the docker-compose.yml file. The docker container is configured to read the EARTHDATA_USERNAME and EARTHDATA_PASSWORD environment variables so make sure set those before starting the docker network.

    docker compose up --build \n

    The application will be available at this address: http://localhost:8081/api.html

    "},{"location":"#local-deployment","title":"Local deployment","text":"

    To run the application directly in your local environment, configure the application to access data over HTTP then run it using uvicorn:

    TITILER_CMR_S3_AUTH_ACCESS=external uvicorn titiler.cmr.main:app --reload\n

    The application will be available at this address: http://localhost:8000/api.html

    "},{"location":"#contribution-development","title":"Contribution & Development","text":"

    See CONTRIBUTING.md

    "},{"location":"#license","title":"License","text":"

    See LICENSE

    "},{"location":"#authors","title":"Authors","text":"

    Created by Development Seed

    See contributors for a listing of individual contributors.

    "},{"location":"#changes","title":"Changes","text":"

    See CHANGES.md.

    "},{"location":"contributing/","title":"Development - Contributing","text":"

    Issues and pull requests are more than welcome: github.com/developmentseed/titiler-cmr/issues

    dev install

    This project uses uv to manage the python environment and dependencies. To install the package for development you can follow these steps:

    # install uv\n\n# unix\ncurl -LsSf https://astral.sh/uv/install.sh | sh\n\n# or windows\n# powershell -c \"irm https://astral.sh/uv/install.ps1 | iex\"\n\ngit clone https://github.com/developmentseed/titiler-cmr.git\ncd titiler-cmr\nuv sync --all-extras\n
    "},{"location":"contributing/#linting","title":"Linting","text":"

    This repo is set to use pre-commit to run isort, flake8, pydocstring, black (\"uncompromising Python code formatter\") and mypy when committing new code.

    uv pre-commit install\n
    "},{"location":"contributing/#testing","title":"Testing","text":"

    You can then run the tests with the following command:

    uv run pytest\n

    The tests use vcrpy <https://vcrpy.readthedocs.io/en/latest/>_ to mock API calls with \"pre-recorded\" API responses. When adding new tests that incur actual network traffic, use the @pytest.mark.vcr decorator function to indicate vcrpy should be used. Record the new responses and commit them to the repository.

    uv run pytest -v -s --record-mode new_episodes\n
    "},{"location":"contributing/#documentation","title":"Documentation","text":"

    The documentation is generated using mkdocs and gets built and deployed to Github Pages when new tags are released and on pushes to the develop branch.

    To preview the documentation in your browser you can run:

    uv run mkdocs serve -o\n
    "},{"location":"release-notes/","title":"Changelog","text":"

    All notable changes to this project will be documented in this file.

    The format is based on Keep a Changelog.

    "},{"location":"release-notes/#unreleased","title":"Unreleased","text":""},{"location":"release-notes/#added","title":"Added","text":"
    • Initial implementation of STAC metadata structure
    "},{"location":"release-notes/#deprecated","title":"Deprecated","text":"
    • Nothing.
    "},{"location":"release-notes/#removed","title":"Removed","text":"
    • Nothing.
    "},{"location":"release-notes/#fixed","title":"Fixed","text":"
    • Nothing.
    "},{"location":"release-notes/#012","title":"0.1.2","text":""},{"location":"release-notes/#added_1","title":"Added","text":"
    • Support for /timeseries endpoints (#33)
    "},{"location":"release-notes/#deprecated_1","title":"Deprecated","text":"
    • Nothing.
    "},{"location":"release-notes/#removed_1","title":"Removed","text":"
    • Nothing.
    "},{"location":"release-notes/#fixed_1","title":"Fixed","text":"
    • Nothing.
    "},{"location":"release-notes/#011","title":"0.1.1","text":""},{"location":"release-notes/#added_2","title":"Added","text":"
    • Add /bbox, /feature, and /statistics endpoints (#30)
    "},{"location":"release-notes/#deprecated_2","title":"Deprecated","text":"
    • Nothing.
    "},{"location":"release-notes/#removed_2","title":"Removed","text":"
    • Nothing.
    "},{"location":"release-notes/#fixed_2","title":"Fixed","text":"
    • Nothing.
    "},{"location":"release-notes/#011_1","title":"0.1.1","text":""},{"location":"release-notes/#added_3","title":"Added","text":"

    -

    Ability to run locally with Earthdata authentication (#28)

    "},{"location":"release-notes/#deprecated_3","title":"Deprecated","text":"
    • Nothing.
    "},{"location":"release-notes/#removed_3","title":"Removed","text":"
    • Nothing.
    "},{"location":"release-notes/#fixed_3","title":"Fixed","text":"
    • Nothing.
    "},{"location":"deployment/time_series_api_limits/","title":"Time series API limits","text":""},{"location":"deployment/time_series_api_limits/#time-series-api-limits","title":"Time series API limits\u00b6","text":"

    The titiler-cmr API can be deployed as a Lambda function in AWS. Since requests to the time series endpoints will make recursive requests to the Lambda function for the lower-level time step operations, there are some limits in place to avoid making large requests that are likely to overwhelm the API.

    "},{"location":"deployment/time_series_api_limits/#highlights","title":"Highlights\u00b6","text":"
    • Maximum of 995 discrete points or intervals in a time series request (due to Lambda concurrency limits)
    • You can use the length of the time series, the AOI size, and the resolution of the dataset to calculate the number of total pixels (x_pixels * y_pixels * n_time) which is helpful for determining if a request will succeed
    • The /timeseries/bbox endpoint for generating GIFs for a bounding box will struggle on requests for a large AOI and/or a lengthy time series for high spatial resolution datasets. Based on a coarse evaluation of the API, requests are limited to 100,000,000 (1e8) total pixels. There is a limit in place that will cause requests that exceed this limit to fail fast without firing hundreds of doomed Lambda invocations.
    • The /timeseries/statistics endpoint can handle larger requests than the /timeseries/bbox endpoint Based on a coarse evaluation of the API, requests are limited to 15,000,000,000 (1.5e10) total pixels as long as the individual images read by the /statistics endpoint are smaller than 56,250,000 (5.625e7) pixels.
    "},{"location":"deployment/time_series_api_limits/#background","title":"Background\u00b6","text":"

    The time series API provides rapid access to time series analysis and visualization of collections in the CMR catalog, but there are some limitations to the API deployment that require some care when making large requests.

    There are several factors that must be considered in order to make a successful time series request:

    • Spatial resolution of the dataset (especially for the xarray backend)
    • Request AOI size
    • Number of points/intervals in the time series

    These factors all influence the runtime and memory footprint of the initial /timeseries request and requests that are too large in any of those dimensions can result in an API failure. Here are a few guidelines to help you craft successful time series requests.

    "},{"location":"deployment/time_series_api_limits/#details","title":"Details\u00b6","text":""},{"location":"deployment/time_series_api_limits/#number-of-pointsintervals-in-the-time-series","title":"Number of points/intervals in the time series\u00b6","text":"

    The top factor that determines if a request will succeed or fail is the number of points in the time series. In the default deployment, there is a hard cap of 995 time points in any time series request. This cap is in place because there is a concurrency limit of 1000 on the Lambda function that executes the API requests.

    "},{"location":"deployment/time_series_api_limits/#spatial-resolution-and-aoi-size","title":"Spatial resolution and AOI size\u00b6","text":"

    For datasets that use the rasterio backend, there will be very few limitations on maximum array size as long as the data are COGs and you specify a reasonable output image size (or use the max_size parameter) in your request.

    For datasets without overviews/pyramids, titiler-cmr will need to read all of the bytes that overlap the request AOI even if the resulting image is going to be downsampled for a GIF. Therefore, if the area of interest for a /timeseries/statistics or /timeseries/bbox request will create a large array that is likely to exceed the capacity of the Lambda function, the request will fail fast.

    The limits for the xarray backend are:

    • /timeseries/bbox
      • individual image size: 5.6e7 pixels (~7500x7500)
      • total image size (x_pixels * y_pixels * n_time): 1e8 pixels
    • /timeseries/statistics
      • individual image size: 5.6e7 pixels (~7500x7500)
      • total image size: 1.5e10 pixels

    For low-resolution datasets (e.g. 28km or 0.25 degree) you will not run into any issues (unless you request too many time points!) because a request for the full dataset will be reading arrays that are ~1440x720 pixels.

    For higher-resolution datasets (e.g. 1km or 0.01 degree), you will start to run into problems as the size of the raw arrays that titiler-cmr is processing increases (and the number of discrete points or intervals increases).

    "},{"location":"deployment/time_series_api_limits/#examples","title":"Examples\u00b6","text":"

    The MUR-SST dataset is good for demonstrating the limits of the time series endpoints with the xarray backend. It has high resolution (1 km, 0.01 degree) daily global sea surface temperature observations! With this resolution it is easy to craft a request that will break the /timeseries endpoints. Here are some examples of how to manipulate the time series parameters to achieve success with the /timeseries/bbox endpoint.

    from datetime import datetime, timedelta\n\nimport httpx\n

    Here is a request that will succeed (if the lambda is warmed up):

    • 5x5 degree bounding box (500 x 500 pixels)
    • 180 daily observations (180 / P1D)
    • total size: 500 * 500 * 180 = 4.5e7
    bounds = (-5, -5, 0, 0)\nbbox_str = \",\".join(str(x) for x in bounds)\n\nstart_datetime = datetime(year=2011, month=1, day=1, hour=0, minute=0, second=1)\nend_datetime = start_datetime + timedelta(days=180)\n\nresponse = httpx.get(\n    f\"https://dev-titiler-cmr.delta-backend.com/timeseries/bbox/{bbox_str}.gif\",\n    params={\n        \"concept_id\": \"C1996881146-POCLOUD\",\n        \"datetime\": \"/\".join(dt.isoformat() for dt in [start_datetime, end_datetime]),\n        \"step\": \"P1D\",\n        \"variable\": \"analysed_sst\",\n        \"backend\": \"xarray\",\n        \"rescale\": \"273,315\",\n        \"colormap_name\": \"viridis\",\n        \"temporal_mode\": \"point\",\n    },\n    timeout=None,\n)\n

    That request is about half of the maximum request size for the /timeseries/bbox endpoint. We can push it to the limit by doubling the length of the time series:

    • 5x5 degree bounding box (500 x 500 pixels)
    • 360 daily observations (360 / P1D)
    • total size: 500 * 500 * 360 = 9.0e7
    bounds = (-5, -5, 0, 0)\nbbox_str = \",\".join(str(x) for x in bounds)\n\nstart_datetime = datetime(year=2011, month=1, day=1, hour=0, minute=0, second=1)\nend_datetime = start_datetime + timedelta(days=360)\n\nresponse = httpx.get(\n    f\"https://dev-titiler-cmr.delta-backend.com/timeseries/bbox/{bbox_str}.gif\",\n    params={\n        \"concept_id\": \"C1996881146-POCLOUD\",\n        \"datetime\": \"/\".join(dt.isoformat() for dt in [start_datetime, end_datetime]),\n        \"step\": \"P1D\",\n        \"variable\": \"analysed_sst\",\n        \"backend\": \"xarray\",\n        \"rescale\": \"273,315\",\n        \"colormap_name\": \"viridis\",\n        \"temporal_mode\": \"point\",\n    },\n    timeout=None,\n)\n

    If we increase the length of the time series such that the request exceeds the maximum size, the API will return an error:

    • 5x5 degree bounding box (500 x 500 pixels)
    • 540 daily observations (540 / P1D)
    • total size: 500 * 500 * 540 = 1.35e8 (greater than maximum of 1.0e8!)
    bounds = (-5, -5, 0, 0)\nbbox_str = \",\".join(str(x) for x in bounds)\n\nstart_datetime = datetime(year=2011, month=1, day=1, hour=0, minute=0, second=1)\nend_datetime = start_datetime + timedelta(days=540)\n\nresponse = httpx.get(\n    f\"https://dev-titiler-cmr.delta-backend.com/timeseries/bbox/{bbox_str}.gif\",\n    params={\n        \"concept_id\": \"C1996881146-POCLOUD\",\n        \"datetime\": \"/\".join(dt.isoformat() for dt in [start_datetime, end_datetime]),\n        \"step\": \"P1D\",\n        \"variable\": \"analysed_sst\",\n        \"backend\": \"xarray\",\n        \"rescale\": \"273,315\",\n        \"colormap_name\": \"viridis\",\n        \"temporal_mode\": \"point\",\n    },\n    timeout=None,\n)\n

    We can get get a successful response for the larger time window if we reduce the temporal resolution:

    • 5x5 degree bounding box (500 x 500 pixels)
    • 77 weekly observations (540 / P7D)
    • total size: 500 * 500 * 77 = 1.925e7
    bounds = (-5, -5, 0, 0)\nbbox_str = \",\".join(str(x) for x in bounds)\n\nstart_datetime = datetime(year=2011, month=1, day=1, hour=0, minute=0, second=1)\nend_datetime = start_datetime + timedelta(days=540)\n\nresponse = httpx.get(\n    f\"https://dev-titiler-cmr.delta-backend.com/timeseries/bbox/{bbox_str}.gif\",\n    params={\n        \"concept_id\": \"C1996881146-POCLOUD\",\n        \"datetime\": \"/\".join(dt.isoformat() for dt in [start_datetime, end_datetime]),\n        \"step\": \"P7D\",\n        \"variable\": \"analysed_sst\",\n        \"backend\": \"xarray\",\n        \"rescale\": \"273,315\",\n        \"colormap_name\": \"viridis\",\n        \"temporal_mode\": \"point\",\n    },\n    timeout=None,\n)\n

    With the weekly temporal resolution we have some room to increase the size of the bounding box!

    • 10x10 degree bounding box (1000 x 1000 pixels)
    • 77 weekly observations (540 / P7D)
    • total size: 1000 * 1000 * 77 = 7.7e7
    bounds = (-10, -10, 0, 0)\nbbox_str = \",\".join(str(x) for x in bounds)\n\nstart_datetime = datetime(year=2011, month=1, day=1, hour=0, minute=0, second=1)\nend_datetime = start_datetime + timedelta(days=540)\n\nresponse = httpx.get(\n    f\"https://dev-titiler-cmr.delta-backend.com/timeseries/bbox/{bbox_str}.gif\",\n    params={\n        \"concept_id\": \"C1996881146-POCLOUD\",\n        \"datetime\": \"/\".join(dt.isoformat() for dt in [start_datetime, end_datetime]),\n        \"step\": \"P7D\",\n        \"variable\": \"analysed_sst\",\n        \"backend\": \"xarray\",\n        \"rescale\": \"273,315\",\n        \"colormap_name\": \"viridis\",\n        \"temporal_mode\": \"point\",\n    },\n    timeout=None,\n)\n

    If we double the AOI size again, we will break exceed the request size limit:

    • 20x20 degree bounding box (1000 x 1000 pixels)
    • 77 weekly observations (540 / P7D)
    • total size: 2000 * 2000 * 77 = 3.08e8 (greater than maximum of 1e8
    bounds = (-20, -20, 0, 0)\nbbox_str = \",\".join(str(x) for x in bounds)\n\nstart_datetime = datetime(year=2011, month=1, day=1, hour=0, minute=0, second=1)\nend_datetime = start_datetime + timedelta(days=540)\n\nresponse = httpx.get(\n    f\"https://dev-titiler-cmr.delta-backend.com/timeseries/bbox/{bbox_str}.gif\",\n    params={\n        \"concept_id\": \"C1996881146-POCLOUD\",\n        \"datetime\": \"/\".join(dt.isoformat() for dt in [start_datetime, end_datetime]),\n        \"step\": \"P7D\",\n        \"variable\": \"analysed_sst\",\n        \"backend\": \"xarray\",\n        \"rescale\": \"273,315\",\n        \"colormap_name\": \"viridis\",\n        \"temporal_mode\": \"point\",\n    },\n    timeout=None,\n)\n

    But if we reduce the temporal resolution from weekly to monthly, it will work!

    • 20x20 degree bounding box (1000 x 1000 pixels)
    • 18 monthly observations (540 / P1M)
    • total size: 2000 * 2000 * 18 = 3.08e8
    bounds = (-20, -20, 0, 0)\nbbox_str = \",\".join(str(x) for x in bounds)\n\nstart_datetime = datetime(year=2011, month=1, day=1, hour=0, minute=0, second=1)\nend_datetime = start_datetime + timedelta(days=540)\n\nresponse = httpx.get(\n    f\"https://dev-titiler-cmr.delta-backend.com/timeseries/bbox/{bbox_str}.gif\",\n    params={\n        \"concept_id\": \"C1996881146-POCLOUD\",\n        \"datetime\": \"/\".join(dt.isoformat() for dt in [start_datetime, end_datetime]),\n        \"step\": \"P1M\",\n        \"variable\": \"analysed_sst\",\n        \"backend\": \"xarray\",\n        \"rescale\": \"273,315\",\n        \"colormap_name\": \"viridis\",\n        \"temporal_mode\": \"point\",\n    },\n    timeout=None,\n)\n

    However, there is a maximum image size that we can read with the xarray backend, so we cannot increase the bounding box indefinitely. The limit imposed on the API at this time is 5.6e7 pixels (7500 x 7500 pixels). In the case of MUR-SST, that is a bounding box of roughly 75 x 75 degrees.

    "},{"location":"deployment/time_series_api_limits/#tips","title":"Tips\u00b6","text":"
    • If you hit an error because the total size of the request is too large, try reducing the temporal resolution of the time series, e.g. from daily (P1D) to weekly (P7D) or greater (P10D)
    • If you need higher temporal resolution but the full request is not able handle it, split the request into multiple smaller requests and merge the results yourself!
    "},{"location":"examples/rasterio_backend_example/","title":"rasterio backend example: HLS","text":"In\u00a0[1]: Copied!
    import earthaccess\nimport geojson_pydantic\nimport httpx\nimport json\n\n\nfrom folium import GeoJson, Map, TileLayer\n
    import earthaccess import geojson_pydantic import httpx import json from folium import GeoJson, Map, TileLayer In\u00a0[2]: Copied!
    # titiler_endpoint = \"http://localhost:8081\"  # docker network endpoint\ntitiler_endpoint = \"https://dev-titiler-cmr.delta-backend.com\"  # deployed endpoint\n
    # titiler_endpoint = \"http://localhost:8081\" # docker network endpoint titiler_endpoint = \"https://dev-titiler-cmr.delta-backend.com\" # deployed endpoint In\u00a0[3]: Copied!
    datasets = earthaccess.search_datasets(doi=\"10.5067/HLS/HLSL30.002\")\nds = datasets[0]\n\nconcept_id = ds[\"meta\"][\"concept-id\"]\nprint(\"Concept-Id: \", concept_id)\nprint(\"Abstract: \", ds[\"umm\"][\"Abstract\"])\n
    datasets = earthaccess.search_datasets(doi=\"10.5067/HLS/HLSL30.002\") ds = datasets[0] concept_id = ds[\"meta\"][\"concept-id\"] print(\"Concept-Id: \", concept_id) print(\"Abstract: \", ds[\"umm\"][\"Abstract\"])
    Concept-Id:  C2021957657-LPCLOUD\nAbstract:  The Harmonized Landsat Sentinel-2 (HLS) project provides consistent surface reflectance (SR) and top of atmosphere (TOA) brightness data from a virtual constellation of satellite sensors. The Operational Land Imager (OLI) is housed aboard the joint NASA/USGS Landsat 8 and Landsat 9 satellites, while the Multi-Spectral Instrument (MSI) is mounted aboard Europe\u2019s Copernicus Sentinel-2A and Sentinel-2B satellites. The combined measurement enables global observations of the land every 2\u20133 days at 30-meter (m) spatial resolution. The HLS project uses a set of algorithms to obtain seamless products from OLI and MSI that include atmospheric correction, cloud and cloud-shadow masking, spatial co-registration and common gridding, illumination and view angle normalization, and spectral bandpass adjustment.\r\n\r\nThe HLSL30 product provides 30-m Nadir Bidirectional Reflectance Distribution Function (BRDF)-Adjusted Reflectance (NBAR) and is derived from Landsat 8/9 OLI data products. The HLSS30 and HLSL30 products are gridded to the same resolution and Military Grid Reference System (MGRS)(https://hls.gsfc.nasa.gov/products-description/tiling-system/) tiling system, and thus are \u201cstackable\u201d for time series analysis.\r\n\r\nThe HLSL30 product is provided in Cloud Optimized GeoTIFF (COG) format, and each band is distributed as a separate file. There are 11 bands included in the HLSL30 product along with one quality assessment (QA) band and four angle bands. See the User Guide for a more detailed description of the individual bands provided in the HLSL30 product.\n
    In\u00a0[4]: Copied!
    import earthaccess\nimport morecantile\n\ntms = morecantile.tms.get(\"WebMercatorQuad\")\n\nbounds = tms.bounds(62, 44, 7)\nxmin, ymin, xmax, ymax = (round(n, 8) for n in bounds)\n\nresults = earthaccess.search_data(\n    bounding_box=(xmin, ymin, xmax, ymax),\n    count=1,\n    concept_id=concept_id,\n    temporal=(\"2024-02-11\", \"2024-02-13\"),\n)\nprint(\"Granules:\")\nprint(results)\nprint()\nprint(\"Example of COGs URL: \")\nfor link in results[0].data_links(access=\"direct\"):\n    print(link)\n
    import earthaccess import morecantile tms = morecantile.tms.get(\"WebMercatorQuad\") bounds = tms.bounds(62, 44, 7) xmin, ymin, xmax, ymax = (round(n, 8) for n in bounds) results = earthaccess.search_data( bounding_box=(xmin, ymin, xmax, ymax), count=1, concept_id=concept_id, temporal=(\"2024-02-11\", \"2024-02-13\"), ) print(\"Granules:\") print(results) print() print(\"Example of COGs URL: \") for link in results[0].data_links(access=\"direct\"): print(link)
    Granules:\n[Collection: {'EntryTitle': 'HLS Landsat Operational Land Imager Surface Reflectance and TOA Brightness Daily Global 30m v2.0'}\nSpatial coverage: {'HorizontalSpatialDomain': {'Geometry': {'GPolygons': [{'Boundary': {'Points': [{'Longitude': -2.64743819, 'Latitude': 48.6644919}, {'Longitude': -2.21521695, 'Latitude': 49.65006328}, {'Longitude': -3.00027708, 'Latitude': 49.65272281}, {'Longitude': -3.00027162, 'Latitude': 48.66503141}, {'Longitude': -2.64743819, 'Latitude': 48.6644919}]}}]}}}\nTemporal coverage: {'RangeDateTime': {'BeginningDateTime': '2024-02-12T11:05:26.302Z', 'EndingDateTime': '2024-02-12T11:05:50.181Z'}}\nSize(MB): 56.62721920013428\nData: ['https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B02.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B06.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B01.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.SAA.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B07.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.SZA.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B03.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.Fmask.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B04.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B05.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.VAA.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.VZA.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B11.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B10.tif', 'https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B09.tif']]\n\nExample of COGs URL: \ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B02.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B06.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B01.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.SAA.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B07.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.SZA.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B03.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.Fmask.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B04.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B05.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.VAA.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.VZA.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B11.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B10.tif\ns3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B09.tif\n
    In\u00a0[5]: Copied!
    from titiler.cmr.backend import CMRBackend\nfrom titiler.cmr.reader import MultiFilesBandsReader\n\nwith CMRBackend(reader=MultiFilesBandsReader) as backend:\n    assets = backend.assets_for_tile(\n        x=62,\n        y=44,\n        z=7,\n        bands_regex=\"B[0-9][0-9]\",\n        concept_id=concept_id,\n        temporal=(\"2024-02-11\", \"2024-02-13\")\n    )\n\nprint(assets[0])\n
    from titiler.cmr.backend import CMRBackend from titiler.cmr.reader import MultiFilesBandsReader with CMRBackend(reader=MultiFilesBandsReader) as backend: assets = backend.assets_for_tile( x=62, y=44, z=7, bands_regex=\"B[0-9][0-9]\", concept_id=concept_id, temporal=(\"2024-02-11\", \"2024-02-13\") ) print(assets[0])
    {'url': {'B02': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B02.tif', 'B06': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B06.tif', 'B01': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B01.tif', 'B07': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B07.tif', 'B03': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B03.tif', 'B04': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B04.tif', 'B05': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B05.tif', 'B11': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B11.tif', 'B10': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B10.tif', 'B09': 's3://lp-prod-protected/HLSL30.020/HLS.L30.T30UWV.2024043T110526.v2.0/HLS.L30.T30UWV.2024043T110526.v2.0.B09.tif'}, 'provider': 'LPCLOUD'}\n
    In\u00a0[6]: Copied!
    from IPython.display import IFrame\nIFrame(f\"{titiler_endpoint}/api.html\", 900,500)\n
    from IPython.display import IFrame IFrame(f\"{titiler_endpoint}/api.html\", 900,500) Out[6]: In\u00a0[7]: Copied!
    r = httpx.get(\n    f\"{titiler_endpoint}/WebMercatorQuad/tilejson.json\",\n    params = (\n        (\"concept_id\", concept_id),\n        # Datetime in form of `start_date/end_date`\n        (\"datetime\", \"2024-10-01T00:00:00Z/2024-10-10T23:59:59Z\"),\n        # We know that the HLS collection dataset is stored as File per Band\n        # so we need to pass a `band_regex` option to assign `bands` to each URL\n        (\"bands_regex\", \"B[0-9][0-9]\"),\n        # titiler-cmr can work with both Zarr and COG dataset\n        # but we need to tell the endpoints in advance which backend\n        # to use\n        (\"backend\", \"rasterio\"),\n        # True Color Image B04,B03,B02\n        (\"bands\", \"B04\"),\n        (\"bands\", \"B03\"),\n        (\"bands\", \"B02\"),\n        # The data is in type of Uint16 so we need to apply some\n        # rescaling/color_formula in order to create PNGs\n        (\"color_formula\", \"Gamma RGB 3.5 Saturation 1.7 Sigmoidal RGB 15 0.35\"),\n        # We need to set min/max zoom because we don't want to use lowerzoom level (e.g 0)\n        # which will results in useless large scale query\n        (\"minzoom\", 8),\n        (\"maxzoom\", 13),\n    )\n).json()\n\nprint(r)\n
    r = httpx.get( f\"{titiler_endpoint}/WebMercatorQuad/tilejson.json\", params = ( (\"concept_id\", concept_id), # Datetime in form of `start_date/end_date` (\"datetime\", \"2024-10-01T00:00:00Z/2024-10-10T23:59:59Z\"), # We know that the HLS collection dataset is stored as File per Band # so we need to pass a `band_regex` option to assign `bands` to each URL (\"bands_regex\", \"B[0-9][0-9]\"), # titiler-cmr can work with both Zarr and COG dataset # but we need to tell the endpoints in advance which backend # to use (\"backend\", \"rasterio\"), # True Color Image B04,B03,B02 (\"bands\", \"B04\"), (\"bands\", \"B03\"), (\"bands\", \"B02\"), # The data is in type of Uint16 so we need to apply some # rescaling/color_formula in order to create PNGs (\"color_formula\", \"Gamma RGB 3.5 Saturation 1.7 Sigmoidal RGB 15 0.35\"), # We need to set min/max zoom because we don't want to use lowerzoom level (e.g 0) # which will results in useless large scale query (\"minzoom\", 8), (\"maxzoom\", 13), ) ).json() print(r)
    {'tilejson': '2.2.0', 'version': '1.0.0', 'scheme': 'xyz', 'tiles': ['https://dev-titiler-cmr.delta-backend.com/tiles/WebMercatorQuad/{z}/{x}/{y}@1x?concept_id=C2021957657-LPCLOUD&datetime=2024-10-01T00%3A00%3A00Z%2F2024-10-10T23%3A59%3A59Z&bands_regex=B%5B0-9%5D%5B0-9%5D&backend=rasterio&bands=B04&bands=B03&bands=B02&color_formula=Gamma+RGB+3.5+Saturation+1.7+Sigmoidal+RGB+15+0.35'], 'minzoom': 8, 'maxzoom': 13, 'bounds': [-180.0, -90.0, 180.0, 90.0], 'center': [0.0, 0.0, 8]}\n
    In\u00a0[8]: Copied!
    bounds = r[\"bounds\"]\nm = Map(\n    location=(47.590266824611675, -91.03729840730689),\n    zoom_start=r[\"maxzoom\"] - 2\n)\n\nTileLayer(\n    tiles=r[\"tiles\"][0],\n    opacity=1,\n    attr=\"NASA\",\n).add_to(m)\nm\n
    bounds = r[\"bounds\"] m = Map( location=(47.590266824611675, -91.03729840730689), zoom_start=r[\"maxzoom\"] - 2 ) TileLayer( tiles=r[\"tiles\"][0], opacity=1, attr=\"NASA\", ).add_to(m) m Out[8]: Make this Notebook Trusted to load map: File -> Trust Notebook In\u00a0[9]: Copied!
    r = httpx.get(\n    f\"{titiler_endpoint}/WebMercatorQuad/tilejson.json\",\n    params = (\n        (\"concept_id\", concept_id),\n        # Datetime in form of `start_date/end_date`\n        (\"datetime\", \"2024-06-20T00:00:00Z/2024-06-27T23:59:59Z\"),\n        # We know that the HLS collection dataset is stored as File per Band\n        # so we need to pass a `band_regex` option to assign `bands` to each URL\n        (\"bands_regex\", \"B[0-9][0-9]\"),\n        # titiler-cmr can work with both Zarr and COG dataset\n        # but we need to tell the endpoints in advance which backend\n        # to use\n        (\"backend\", \"rasterio\"),\n        # NDVI\n        (\"expression\", \"(B05-B04)/(B05+B04)\"),\n        # Need red (B04) and nir (B05) for NDVI\n        (\"bands\", \"B05\"),\n        (\"bands\", \"B04\"),\n        # The data is in type of Uint16 so we need to apply some\n        # rescaling/color_formula in order to create PNGs\n        (\"colormap_name\", \"viridis\"),\n        (\"rescale\", \"-1,1\"),\n        # We need to set min/max zoom because we don't want to use lowerzoom level (e.g 0)\n        # which will results in useless large scale query\n        (\"minzoom\", 8),\n        (\"maxzoom\", 13),\n    )\n).json()\n\nm = Map(\n    location=(47.9221313337365, -91.65432884883238),\n    zoom_start=r[\"maxzoom\"] - 1\n)\n\n\nTileLayer(\n    tiles=r[\"tiles\"][0],\n    opacity=1,\n    attr=\"NASA\",\n).add_to(m)\n\nm\n
    r = httpx.get( f\"{titiler_endpoint}/WebMercatorQuad/tilejson.json\", params = ( (\"concept_id\", concept_id), # Datetime in form of `start_date/end_date` (\"datetime\", \"2024-06-20T00:00:00Z/2024-06-27T23:59:59Z\"), # We know that the HLS collection dataset is stored as File per Band # so we need to pass a `band_regex` option to assign `bands` to each URL (\"bands_regex\", \"B[0-9][0-9]\"), # titiler-cmr can work with both Zarr and COG dataset # but we need to tell the endpoints in advance which backend # to use (\"backend\", \"rasterio\"), # NDVI (\"expression\", \"(B05-B04)/(B05+B04)\"), # Need red (B04) and nir (B05) for NDVI (\"bands\", \"B05\"), (\"bands\", \"B04\"), # The data is in type of Uint16 so we need to apply some # rescaling/color_formula in order to create PNGs (\"colormap_name\", \"viridis\"), (\"rescale\", \"-1,1\"), # We need to set min/max zoom because we don't want to use lowerzoom level (e.g 0) # which will results in useless large scale query (\"minzoom\", 8), (\"maxzoom\", 13), ) ).json() m = Map( location=(47.9221313337365, -91.65432884883238), zoom_start=r[\"maxzoom\"] - 1 ) TileLayer( tiles=r[\"tiles\"][0], opacity=1, attr=\"NASA\", ).add_to(m) m Out[9]: Make this Notebook Trusted to load map: File -> Trust Notebook In\u00a0[10]: Copied!
    geojson = {\n  \"type\": \"FeatureCollection\",\n  \"features\": [\n    {\n      \"type\": \"Feature\",\n      \"properties\": {},\n      \"geometry\": {\n        \"coordinates\": [\n          [\n            [\n              -91.65432884883238,\n              47.9221313337365\n            ],\n            [\n              -91.65432884883238,\n              47.86503396133904\n            ],\n            [\n              -91.53842043960762,\n              47.86503396133904\n            ],\n            [\n              -91.53842043960762,\n              47.9221313337365\n            ],\n            [\n              -91.65432884883238,\n              47.9221313337365\n            ]\n          ]\n        ],\n        \"type\": \"Polygon\"\n      }\n    }\n  ]\n}\n
    geojson = { \"type\": \"FeatureCollection\", \"features\": [ { \"type\": \"Feature\", \"properties\": {}, \"geometry\": { \"coordinates\": [ [ [ -91.65432884883238, 47.9221313337365 ], [ -91.65432884883238, 47.86503396133904 ], [ -91.53842043960762, 47.86503396133904 ], [ -91.53842043960762, 47.9221313337365 ], [ -91.65432884883238, 47.9221313337365 ] ] ], \"type\": \"Polygon\" } } ] } In\u00a0[11]: Copied!
    import json\n\nr = httpx.post(\n    f\"{titiler_endpoint}/statistics\",\n    params=(\n        (\"concept_id\", concept_id),\n        # Datetime in form of `start_date/end_date`\n        (\"datetime\", \"2024-07-01T00:00:00Z/2024-07-10T23:59:59Z\"),\n        # We know that the HLS collection dataset is stored as File per Band\n        # so we need to pass a `band_regex` option to assign `bands` to each URL\n        (\"bands_regex\", \"B[0-9][0-9]\"),\n        # titiler-cmr can work with both Zarr and COG dataset\n        # but we need to tell the endpoints in advance which backend\n        # to use\n        (\"backend\", \"rasterio\"),\n        # NDVI\n        (\"expression\", \"(B05-B04)/(B05+B04)\"),\n        # Need red (B04) and nir (B05) for NDVI\n        (\"bands\", \"B05\"),\n        (\"bands\", \"B04\"),\n    ),\n    json=geojson,\n    timeout=30,\n).json()\n\nprint(json.dumps(r, indent=2))\n
    import json r = httpx.post( f\"{titiler_endpoint}/statistics\", params=( (\"concept_id\", concept_id), # Datetime in form of `start_date/end_date` (\"datetime\", \"2024-07-01T00:00:00Z/2024-07-10T23:59:59Z\"), # We know that the HLS collection dataset is stored as File per Band # so we need to pass a `band_regex` option to assign `bands` to each URL (\"bands_regex\", \"B[0-9][0-9]\"), # titiler-cmr can work with both Zarr and COG dataset # but we need to tell the endpoints in advance which backend # to use (\"backend\", \"rasterio\"), # NDVI (\"expression\", \"(B05-B04)/(B05+B04)\"), # Need red (B04) and nir (B05) for NDVI (\"bands\", \"B05\"), (\"bands\", \"B04\"), ), json=geojson, timeout=30, ).json() print(json.dumps(r, indent=2))
    {\n  \"type\": \"FeatureCollection\",\n  \"features\": [\n    {\n      \"type\": \"Feature\",\n      \"geometry\": {\n        \"type\": \"Polygon\",\n        \"coordinates\": [\n          [\n            [\n              -91.65432884883238,\n              47.9221313337365\n            ],\n            [\n              -91.65432884883238,\n              47.86503396133904\n            ],\n            [\n              -91.53842043960762,\n              47.86503396133904\n            ],\n            [\n              -91.53842043960762,\n              47.9221313337365\n            ],\n            [\n              -91.65432884883238,\n              47.9221313337365\n            ]\n          ]\n        ]\n      },\n      \"properties\": {\n        \"statistics\": {\n          \"(B05-B04)/(B05+B04)\": {\n            \"min\": -75.4,\n            \"max\": 26.6,\n            \"mean\": 0.5238783261952482,\n            \"count\": 57304.8046875,\n            \"sum\": 30020.745162633113,\n            \"std\": 0.6052277569586431,\n            \"median\": 0.6041512231282431,\n            \"majority\": 0.75,\n            \"minority\": -75.4,\n            \"unique\": 47613.0,\n            \"histogram\": [\n              [\n                1,\n                0,\n                2,\n                1,\n                0,\n                0,\n                16,\n                57764,\n                12,\n                2\n              ],\n              [\n                -75.4,\n                -65.2,\n                -55.00000000000001,\n                -44.80000000000001,\n                -34.60000000000001,\n                -24.400000000000006,\n                -14.20000000000001,\n                -4.000000000000014,\n                6.199999999999989,\n                16.39999999999999,\n                26.6\n              ]\n            ],\n            \"valid_percent\": 100.0,\n            \"masked_pixels\": 0.0,\n            \"valid_pixels\": 57798.0,\n            \"percentile_2\": 0.04382638010956595,\n            \"percentile_98\": 0.8685282140779523\n          }\n        }\n      }\n    }\n  ]\n}\n
    In\u00a0[\u00a0]: Copied!
    \n
    "},{"location":"examples/rasterio_backend_example/#rasterio-backend-example-hls","title":"rasterio backend example: HLS\u00b6","text":"

    The Harmonized Landsat Sentinel-2 dataset is available in two collections in CMR. This example will use data from the HLSL30.002 (Landsat) dataset.

    "},{"location":"examples/rasterio_backend_example/#requirements","title":"Requirements\u00b6","text":"

    To run some of the chunks in this notebook you will need to install a few packages:

    • earthaccess
    • folium
    • httpx

    !pip install folium httpx earthaccess

    "},{"location":"examples/rasterio_backend_example/#identify-the-dataset","title":"Identify the dataset\u00b6","text":"

    You can find the HLSL30.002 dataset using the earthaccess.search_datasets function.

    "},{"location":"examples/rasterio_backend_example/#examine-a-granule","title":"Examine a granule\u00b6","text":"

    Each granule contains the data for a single point in time for an MGRS tile.

    "},{"location":"examples/rasterio_backend_example/#demonstrate-assets_for_tile-method","title":"Demonstrate assets_for_tile method\u00b6","text":"

    While rendering xyz tile images, titiler-cmr searches for assets using the assets_for_tile method which converts the xyz tile extent into a bounding box.

    "},{"location":"examples/rasterio_backend_example/#titilercmr-api-documentation","title":"titiler.cmr API documentation\u00b6","text":""},{"location":"examples/rasterio_backend_example/#display-tiles-in-an-interactive-map","title":"Display tiles in an interactive map\u00b6","text":"

    The /tilejson.json endpoint will provide a parameterized xyz tile URL that can be added to an interactive map.

    "},{"location":"examples/rasterio_backend_example/#render-ndvi-using-the-expression-parameter","title":"Render NDVI using the expression parameter\u00b6","text":"

    The expression parameter can be used to render images from an expression of a combination of the individual bands.

    "},{"location":"examples/rasterio_backend_example/#geojson-statistics","title":"GeoJSON Statistics\u00b6","text":"

    The /statistics endpoint can be used to get summary statistics for a geojson Feature or FeatureCollection.

    "},{"location":"examples/time_series_example/","title":"time series API","text":"In\u00a0[1]: Copied!
    from IPython.display import IFrame\n\n# if running titiler-cmr in the docker network\n# titiler_endpoint = \"http://localhost:8081\"\n\n# titiler-cmr-staging deployment\ntitiler_endpoint = \"https://dev-titiler-cmr.delta-backend.com\"\n\nIFrame(f\"{titiler_endpoint}/api.html#Timeseries\", 900, 500)\n
    from IPython.display import IFrame # if running titiler-cmr in the docker network # titiler_endpoint = \"http://localhost:8081\" # titiler-cmr-staging deployment titiler_endpoint = \"https://dev-titiler-cmr.delta-backend.com\" IFrame(f\"{titiler_endpoint}/api.html#Timeseries\", 900, 500) Out[1]: In\u00a0[2]: Copied!
    import json\nfrom datetime import datetime\n\nimport httpx\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom folium import LayerControl, Map, TileLayer\nfrom geojson_pydantic import Feature, Polygon\nfrom IPython.display import Image, display\n
    import json from datetime import datetime import httpx import matplotlib.pyplot as plt import numpy as np from folium import LayerControl, Map, TileLayer from geojson_pydantic import Feature, Polygon from IPython.display import Image, display In\u00a0[3]: Copied!
    concept_id = \"C2036881735-POCLOUD\"\n
    concept_id = \"C2036881735-POCLOUD\"

    The /timeseries GET endpoint is useful for demonstrating how the timeseries family of endpoints constructs sub-requests. It returns the list of titiler.cmr query parameters (datetime and concept_id) that will be used to generate the timeseries results.

    In\u00a0[4]: Copied!
    response = httpx.get(\n    f\"{titiler_endpoint}/timeseries\",\n    params={\n        \"concept_id\": concept_id,\n        \"datetime\": \"2024-10-01T00:00:01Z/2024-10-05T00:00:01Z\",\n    },\n    timeout=None,\n).json()\n\nprint(json.dumps(response, indent=2))\n
    response = httpx.get( f\"{titiler_endpoint}/timeseries\", params={ \"concept_id\": concept_id, \"datetime\": \"2024-10-01T00:00:01Z/2024-10-05T00:00:01Z\", }, timeout=None, ).json() print(json.dumps(response, indent=2))
    [\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-01T12:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-02T12:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-03T12:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-04T12:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-05T12:00:00+00:00\"\n  }\n]\n
    In\u00a0[5]: Copied!
    response = httpx.get(\n    f\"{titiler_endpoint}/timeseries\",\n    params={\n        \"concept_id\": concept_id,\n        \"datetime\": \"2024-10-01T00:00:01Z/2024-10-30T00:00:01Z\",\n        \"step\": \"P1W\",\n        \"temporal_mode\": \"point\",\n    }\n).json()\n\nprint(json.dumps(response, indent=2))\n
    response = httpx.get( f\"{titiler_endpoint}/timeseries\", params={ \"concept_id\": concept_id, \"datetime\": \"2024-10-01T00:00:01Z/2024-10-30T00:00:01Z\", \"step\": \"P1W\", \"temporal_mode\": \"point\", } ).json() print(json.dumps(response, indent=2))
    [\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-01T00:00:01+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-08T00:00:01+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-15T00:00:01+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-22T00:00:01+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-29T00:00:01+00:00\"\n  }\n]\n
    In\u00a0[6]: Copied!
    response = httpx.get(\n    f\"{titiler_endpoint}/timeseries\",\n    params={\n        \"concept_id\": concept_id,\n        \"datetime\": \"2024-10-01T00:00:01Z/2024-10-30T00:00:01Z\",\n        \"step\": \"P1W\",\n        \"temporal_mode\": \"interval\",\n    }\n).json()\n\nprint(json.dumps(response, indent=2))\n
    response = httpx.get( f\"{titiler_endpoint}/timeseries\", params={ \"concept_id\": concept_id, \"datetime\": \"2024-10-01T00:00:01Z/2024-10-30T00:00:01Z\", \"step\": \"P1W\", \"temporal_mode\": \"interval\", } ).json() print(json.dumps(response, indent=2))
    [\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-01T00:00:01+00:00/2024-10-08T00:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-08T00:00:01+00:00/2024-10-15T00:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-15T00:00:01+00:00/2024-10-22T00:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-22T00:00:01+00:00/2024-10-29T00:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-29T00:00:01+00:00/2024-10-30T00:00:01+00:00\"\n  }\n]\n
    In\u00a0[7]: Copied!
    response = httpx.get(\n    f\"{titiler_endpoint}/timeseries\",\n    params={\n        \"concept_id\": concept_id,\n        \"datetime\": \",\".join(\n            [\"2024-10-01T00:00:01Z\", \"2024-10-07T00:00:01Z/2024-10-09T23:59:59Z\"]\n        ),\n    }\n).json()\n\nprint(json.dumps(response, indent=2))\n
    response = httpx.get( f\"{titiler_endpoint}/timeseries\", params={ \"concept_id\": concept_id, \"datetime\": \",\".join( [\"2024-10-01T00:00:01Z\", \"2024-10-07T00:00:01Z/2024-10-09T23:59:59Z\"] ), } ).json() print(json.dumps(response, indent=2))
    [\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-01T00:00:01+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-07T12:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-08T12:00:00+00:00\"\n  },\n  {\n    \"concept_id\": \"C2036881735-POCLOUD\",\n    \"datetime\": \"2024-10-09T12:00:00+00:00\"\n  }\n]\n
    In\u00a0[8]: Copied!
    minx, miny, maxx, maxy = -180, -90, 180, 90\nrequest = httpx.get(\n    f\"{titiler_endpoint}/timeseries/bbox/{minx},{miny},{maxx},{maxy}.gif\",\n    params={\n        \"concept_id\": concept_id,\n        \"datetime\": \"2023-11-01T00:00:01Z/2024-10-30T23:59:59Z\",\n        \"step\": \"P2W\",\n        \"temporal_mode\": \"point\",\n        \"variable\": \"analysed_sst\",\n        \"backend\": \"xarray\",\n        \"colormap_name\": \"thermal\",\n        \"rescale\": [[273, 315]],\n    },\n    timeout=None,\n)\ndisplay(Image(request.content))\n
    minx, miny, maxx, maxy = -180, -90, 180, 90 request = httpx.get( f\"{titiler_endpoint}/timeseries/bbox/{minx},{miny},{maxx},{maxy}.gif\", params={ \"concept_id\": concept_id, \"datetime\": \"2023-11-01T00:00:01Z/2024-10-30T23:59:59Z\", \"step\": \"P2W\", \"temporal_mode\": \"point\", \"variable\": \"analysed_sst\", \"backend\": \"xarray\", \"colormap_name\": \"thermal\", \"rescale\": [[273, 315]], }, timeout=None, ) display(Image(request.content)) In\u00a0[9]: Copied!
    minx, miny, maxx, maxy = -91.464,47.353,-90.466,47.974\nrequest = httpx.get(\n    f\"{titiler_endpoint}/timeseries/bbox/{minx},{miny},{maxx},{maxy}/512x512.gif\",\n    params={\n        \"concept_id\": \"C2021957657-LPCLOUD\",\n        \"datetime\": \"2024-01-01T00:00:00Z/2024-11-30T00:00:00Z\",\n        \"step\": \"P1W\",\n        \"temporal_mode\": \"interval\",\n        \"backend\": \"rasterio\",\n        \"bands_regex\":  \"B[0-9][0-9]\",\n        \"bands\": [\"B04\", \"B03\", \"B02\"],\n        \"color_formula\": \"Gamma RGB 3.5 Saturation 1.7 Sigmoidal RGB 15 0.35\",\n        \"fps\": 5,\n    },\n    timeout=None,\n)\ndisplay(Image(request.content))\n
    minx, miny, maxx, maxy = -91.464,47.353,-90.466,47.974 request = httpx.get( f\"{titiler_endpoint}/timeseries/bbox/{minx},{miny},{maxx},{maxy}/512x512.gif\", params={ \"concept_id\": \"C2021957657-LPCLOUD\", \"datetime\": \"2024-01-01T00:00:00Z/2024-11-30T00:00:00Z\", \"step\": \"P1W\", \"temporal_mode\": \"interval\", \"backend\": \"rasterio\", \"bands_regex\": \"B[0-9][0-9]\", \"bands\": [\"B04\", \"B03\", \"B02\"], \"color_formula\": \"Gamma RGB 3.5 Saturation 1.7 Sigmoidal RGB 15 0.35\", \"fps\": 5, }, timeout=None, ) display(Image(request.content)) In\u00a0[10]: Copied!
    %%time\nminx, miny, maxx, maxy = -98.676, 18.857, -81.623, 31.097\ngeojson = Feature(\n    type=\"Feature\",\n    geometry=Polygon.from_bounds(minx, miny, maxx, maxy),\n    properties={},\n)\nrequest = httpx.post(\n    f\"{titiler_endpoint}/timeseries/statistics\",\n    params={\n        \"concept_id\": concept_id,\n        \"datetime\": \"2022-02-01T00:00:01Z/2024-10-30T23:59:59Z\",\n        \"step\": \"P1D\",\n        \"temporal_mode\": \"point\",\n        \"variable\": \"analysed_sst\",\n        \"backend\": \"xarray\",\n    },\n    json=geojson.model_dump(exclude_none=True),\n    timeout=None,\n)\nrequest.raise_for_status()\nresponse = request.json()\n
    %%time minx, miny, maxx, maxy = -98.676, 18.857, -81.623, 31.097 geojson = Feature( type=\"Feature\", geometry=Polygon.from_bounds(minx, miny, maxx, maxy), properties={}, ) request = httpx.post( f\"{titiler_endpoint}/timeseries/statistics\", params={ \"concept_id\": concept_id, \"datetime\": \"2022-02-01T00:00:01Z/2024-10-30T23:59:59Z\", \"step\": \"P1D\", \"temporal_mode\": \"point\", \"variable\": \"analysed_sst\", \"backend\": \"xarray\", }, json=geojson.model_dump(exclude_none=True), timeout=None, ) request.raise_for_status() response = request.json()
    CPU times: user 46.8 ms, sys: 4.22 ms, total: 51.1 ms\nWall time: 27.4 s\n

    The /timeseries/statistics endpoint returns the GeoJSON with statistics for each step in the time series embedded in the properties.

    In\u00a0[11]: Copied!
    stats = response[\"properties\"][\"statistics\"]\nprint(len(stats))\n\nstats_preview = {timestamp: sst_stats for i, (timestamp, sst_stats) in enumerate(stats.items()) if i < 2}\nprint(json.dumps(stats_preview, indent=2))\n
    stats = response[\"properties\"][\"statistics\"] print(len(stats)) stats_preview = {timestamp: sst_stats for i, (timestamp, sst_stats) in enumerate(stats.items()) if i < 2} print(json.dumps(stats_preview, indent=2))
    995\n{\n  \"2022-02-01T00:00:01+00:00\": {\n    \"analysed_sst\": {\n      \"min\": 285.27000000000004,\n      \"max\": 300.34000000000003,\n      \"mean\": 296.3800266967469,\n      \"count\": 2337.9599609375,\n      \"sum\": 692924.6356385816,\n      \"std\": 2.701563618833078,\n      \"median\": 296.83000000000004,\n      \"majority\": 300.16,\n      \"minority\": 285.27000000000004,\n      \"unique\": 819.0,\n      \"histogram\": [\n        [\n          14,\n          31,\n          40,\n          62,\n          88,\n          154,\n          321,\n          853,\n          378,\n          422\n        ],\n        [\n          285.27000000000004,\n          286.77700000000004,\n          288.28400000000005,\n          289.79100000000005,\n          291.29800000000006,\n          292.80500000000006,\n          294.312,\n          295.819,\n          297.326,\n          298.833,\n          300.34000000000003\n        ]\n      ],\n      \"valid_percent\": 68.49,\n      \"masked_pixels\": 1087.0,\n      \"valid_pixels\": 2363.0,\n      \"percentile_2\": 288.46000000000004,\n      \"percentile_98\": 300.20000000000005\n    }\n  },\n  \"2022-02-02T00:00:01+00:00\": {\n    \"analysed_sst\": {\n      \"min\": 285.45000000000005,\n      \"max\": 300.36,\n      \"mean\": 296.3582956145494,\n      \"count\": 2337.9599609375,\n      \"sum\": 692873.8292384959,\n      \"std\": 2.658495800828904,\n      \"median\": 296.79,\n      \"majority\": 296.59000000000003,\n      \"minority\": 285.45000000000005,\n      \"unique\": 827.0,\n      \"histogram\": [\n        [\n          14,\n          27,\n          51,\n          56,\n          90,\n          157,\n          332,\n          899,\n          329,\n          408\n        ],\n        [\n          285.45000000000005,\n          286.94100000000003,\n          288.432,\n          289.92300000000006,\n          291.41400000000004,\n          292.90500000000003,\n          294.396,\n          295.887,\n          297.37800000000004,\n          298.869,\n          300.36\n        ]\n      ],\n      \"valid_percent\": 68.49,\n      \"masked_pixels\": 1087.0,\n      \"valid_pixels\": 2363.0,\n      \"percentile_2\": 288.69000000000005,\n      \"percentile_98\": 300.15000000000003\n    }\n  }\n}\n

    The statistics output can be used to generate plots like this:

    In\u00a0[12]: Copied!
    data = response['properties']['statistics']\n\ndates = []\nmeans = []\nstds = []\n\nfor date_str, values in data.items():\n    dates.append(datetime.fromisoformat(date_str))\n    means.append(values[\"analysed_sst\"][\"mean\"])\n    stds.append(values[\"analysed_sst\"][\"std\"])\n\nplt.figure(figsize=(10, 6))\n\nplt.plot(dates, means, \"b-\", label=\"Mean\")\n\nplt.fill_between(\n    dates, \n    np.array(means) - np.array(stds),\n    np.array(means) + np.array(stds),\n    alpha=0.2,\n    color=\"b\",\n    label=\"Standard Deviation\",\n)\n\nplt.xlabel(\"Date\")\nplt.ylabel(\"Temperature (K)\")\nplt.title(\"Mean sea surface temperature in the Gulf of Mexico\")\nplt.legend()\n\nplt.xticks(rotation=45)\n\nplt.tight_layout()\n\nplt.show()\n
    data = response['properties']['statistics'] dates = [] means = [] stds = [] for date_str, values in data.items(): dates.append(datetime.fromisoformat(date_str)) means.append(values[\"analysed_sst\"][\"mean\"]) stds.append(values[\"analysed_sst\"][\"std\"]) plt.figure(figsize=(10, 6)) plt.plot(dates, means, \"b-\", label=\"Mean\") plt.fill_between( dates, np.array(means) - np.array(stds), np.array(means) + np.array(stds), alpha=0.2, color=\"b\", label=\"Standard Deviation\", ) plt.xlabel(\"Date\") plt.ylabel(\"Temperature (K)\") plt.title(\"Mean sea surface temperature in the Gulf of Mexico\") plt.legend() plt.xticks(rotation=45) plt.tight_layout() plt.show() In\u00a0[13]: Copied!
    minx, miny, maxx, maxy = -180, -90, 180, 90\nrequest = httpx.get(\n    f\"{titiler_endpoint}/timeseries/WebMercatorQuad/tilejson.json\",\n    params={\n        \"concept_id\": concept_id,\n        \"datetime\": \"2023-11-01T00:00:01Z/2024-10-30T23:59:59Z\",\n        \"step\": \"P1M\",\n        \"temporal_mode\": \"point\",\n        \"variable\": \"sea_ice_fraction\",\n        \"backend\": \"xarray\",\n        \"colormap_name\": \"blues_r\",\n        \"rescale\": [[0, 1]],\n    },\n    timeout=None,\n)\ntilejsons = request.json()\ntilejson_preview = {\n    timestamp: tilejson\n    for i, (timestamp, tilejson) in enumerate(tilejsons.items())\n    if i < 2\n}\nprint(json.dumps(tilejson_preview, indent=2))\n
    minx, miny, maxx, maxy = -180, -90, 180, 90 request = httpx.get( f\"{titiler_endpoint}/timeseries/WebMercatorQuad/tilejson.json\", params={ \"concept_id\": concept_id, \"datetime\": \"2023-11-01T00:00:01Z/2024-10-30T23:59:59Z\", \"step\": \"P1M\", \"temporal_mode\": \"point\", \"variable\": \"sea_ice_fraction\", \"backend\": \"xarray\", \"colormap_name\": \"blues_r\", \"rescale\": [[0, 1]], }, timeout=None, ) tilejsons = request.json() tilejson_preview = { timestamp: tilejson for i, (timestamp, tilejson) in enumerate(tilejsons.items()) if i < 2 } print(json.dumps(tilejson_preview, indent=2))
    {\n  \"2023-11-01T00:00:01+00:00\": {\n    \"tilejson\": \"2.2.0\",\n    \"version\": \"1.0.0\",\n    \"scheme\": \"xyz\",\n    \"tiles\": [\n      \"https://dev-titiler-cmr.delta-backend.com/tiles/WebMercatorQuad/{z}/{x}/{y}@1x?concept_id=C2036881735-POCLOUD&variable=sea_ice_fraction&backend=xarray&colormap_name=blues_r&rescale=%5B0%2C+1%5D&concept_id=C2036881735-POCLOUD&datetime=2023-11-01T00%3A00%3A01%2B00%3A00\"\n    ],\n    \"minzoom\": 0,\n    \"maxzoom\": 24,\n    \"bounds\": [\n      -180.0,\n      -90.0,\n      180.0,\n      90.0\n    ],\n    \"center\": [\n      0.0,\n      0.0,\n      0\n    ]\n  },\n  \"2023-12-01T00:00:01+00:00\": {\n    \"tilejson\": \"2.2.0\",\n    \"version\": \"1.0.0\",\n    \"scheme\": \"xyz\",\n    \"tiles\": [\n      \"https://dev-titiler-cmr.delta-backend.com/tiles/WebMercatorQuad/{z}/{x}/{y}@1x?concept_id=C2036881735-POCLOUD&variable=sea_ice_fraction&backend=xarray&colormap_name=blues_r&rescale=%5B0%2C+1%5D&concept_id=C2036881735-POCLOUD&datetime=2023-12-01T00%3A00%3A01%2B00%3A00\"\n    ],\n    \"minzoom\": 0,\n    \"maxzoom\": 24,\n    \"bounds\": [\n      -180.0,\n      -90.0,\n      180.0,\n      90.0\n    ],\n    \"center\": [\n      0.0,\n      0.0,\n      0\n    ]\n  }\n}\n
    In\u00a0[14]: Copied!
    m = Map(location=[0, 0], zoom_start=3, min_zoom=3)\nfor datetime_, tilejson in tilejsons.items():\n    label = datetime.fromisoformat(datetime_).strftime(\"%Y-%m\")\n    TileLayer(\n        tiles=tilejson[\"tiles\"][0],\n        attr=\"GAMSSA SST\",\n        overlay=True,\n        name=label,\n        show=False,\n    ).add_to(m)\nLayerControl(collapsed=False).add_to(m)\nm\n
    m = Map(location=[0, 0], zoom_start=3, min_zoom=3) for datetime_, tilejson in tilejsons.items(): label = datetime.fromisoformat(datetime_).strftime(\"%Y-%m\") TileLayer( tiles=tilejson[\"tiles\"][0], attr=\"GAMSSA SST\", overlay=True, name=label, show=False, ).add_to(m) LayerControl(collapsed=False).add_to(m) m Out[14]: Make this Notebook Trusted to load map: File -> Trust Notebook"},{"location":"examples/time_series_example/#time-series-api","title":"time series API\u00b6","text":"

    The timeseries extension provides endpoints for requesting results for all points or intervals along a time series. The /timeseries family of endpoints works by converting the provided time series parameters (datetime, step, and temporal_mode) into a set of datetime query parameters for the corresponding lower-level endpoint, running asynchronous requests to the lower-level endpoint, then collecting the results and formatting them in a coherent format for the user.

    The time series structure is defined by the datetime, step, and temporal_mode parameters.

    The temporal_mode mode parameter controls whether or not CMR is queried for a particular point-in-time (temporal_mode=point) or over an entire interval (temporal_mode=interval). In general, it is best to use temporal_mode=point for datasets where granules overlap completely in space (e.g. daily sea surface temperature predictions) because the /timeseries endpoints will create a mosaic of all assets returned by the query and the first asset to cover a pixel will be used. For datasets where it requires granules from multiple timestamps to fully cover an AOI, temporal_mode=interval is appropriate. For example, you can get weekly composites of satellite imagery for visualization purposes with step=P1W & temporal_mode=interval.

    "},{"location":"examples/time_series_example/#time-series-parameters","title":"time series parameters\u00b6","text":"

    The time series API makes it possible to return results for many points along a timeseries with a single request. The available parameters are:

    • datetime (str): Either a date-time, an interval, or a comma-separated list of date-times or intervals. Date and time expressions adhere to rfc3339 ('2020-06-01T09:00:00Z') format.
    • step (str): width of individual time steps expressed as a IS8601 duration
    • temporal_mode (str): if \"point\", queries will be made for the individual timestamps along the timeseries. If \"interval\", queries will be made for the periods between each timestamp along the timeseries.

    There are many ways to combine the parameters to produce a time series.

    1. Exact points in time from a start to and end datetime:
    • provide datetime={start_datetime}/{end_datetime}, step={step_width}, and temporal_mode=point where step_width is something like P1D for daily or P2W for bi-weekly.
    • provide datetime={start_datetime}/{end_datetime}, and temporal_mode=point without step to get a point for every unique timestamp in the granules between start_datetime and end_datetime.
    1. Fixed-width intervals between a start and end datetime:
    • provide datetime={start_datetime}/{end_datetime}, step, and temporal_mode=interval
    1. Specific datetimes
    • provide datetime=2024-10-01T00:00:01Z,2024-10-02T00:00:01Z
    1. Specific datetime intervals
    • provide datetime=2024-10-01T00:00:01Z/2024-10-01T23:59:59Z,2024-10-05T00:00:01Z/2024-10-05T23:59:59Z
    "},{"location":"examples/time_series_example/#how-to-use-the-timeseries-api-with-titilercmr","title":"How to use the timeseries API with titiler.cmr\u00b6","text":"

    The /timeseries endpoints work by interpreting the time series parameters (e.g. datetime and step) and parameterizing a set of lower-level requests to the related endpoint. For example, a request to /timeseries/statistics for a set of four points in time each one week apart will fire off four requests to the /statistics endpoint with a particular value in the datetime parameter. The results are collected and returned in a coherent format that can be consumed in a table or a chart.

    Every /timeseries request in titiler.cmr will require both a concept_id and a set of time series parameters. The GHRSST Level 4 GAMSSA_28km Global Foundation Sea Surface Temperature Analysis v1.0 dataset (GDS2) is a useful dataset for demo purposes because the granule assets are small (~1MB each).

    See Time series API limits for details on limits to the spatial and temporal extent for requests to a titiler-cmr deployment.

    "},{"location":"examples/time_series_example/#time-series-for-all-granules-between-a-startend-datetime","title":"Time series for all granules between a start/end datetime\u00b6","text":"

    For some datasets that have granules that are regularly spaced in time (e.g. daily), it is useful to be able to quickly specify a summary of all points in time between a start and end datetime. You can do that by simply providing the start_datetime and end_datetime parameters. The application will query CMR and produce a list of unique datetime values from the results of the granule search. If a granule represents a datetime range, it will return the midpoint between the start and end for a single granule.

    "},{"location":"examples/time_series_example/#weekly-timeseries","title":"Weekly timeseries\u00b6","text":"

    Sometimes you might be interested in a report with lower temporal resolution than the maximum availble for a dataset. By setting step=\"P1W\" and temporal_mode=\"point\", you can get a weekly series.

    "},{"location":"examples/time_series_example/#periodic-timeseries","title":"Periodic timeseries\u00b6","text":"

    Some datasets (like satellite imagery) may consist of granules that do not fully cover an arbitrary area of interest. In this case it is useful to construct a time series from a set of datetime ranges so that granules can be mosaiced to ensure each step has full coverage.

    To create a set of non-overlapping week-long datetime ranges, you can modify the query to use temporal_mode=\"interval\" which will create ranges that start on the weekly values returned in the previous query and extend up to the second before the next value in the series.

    "},{"location":"examples/time_series_example/#custom-time-series","title":"Custom time series\u00b6","text":"

    If you want to specify the exact datetime values for a time series and you either cannot do not want to use the time series parameters, you can supply a set of comma-separated datetimes and/or datetime ranges to the datetime parameter.

    "},{"location":"examples/time_series_example/#example-sea-surface-temperature-gif","title":"Example: sea surface temperature GIF\u00b6","text":"

    The /timeseries/bbox endpoint can be used to produce a GIF that shows a visualization of granules over time.

    The example below shows biweekly sea surface temperature estimates from the GAMSSA dataset for the period from November 2023 through October 2024.

    "},{"location":"examples/time_series_example/#example-hlsl30-gif","title":"Example: HLSL30 GIF\u00b6","text":"

    The example below shows a weekly mosaic of imagery from the Harmonized Landsat Sentinel L30 (HLSL30) collection for the period from January to November 2024.

    "},{"location":"examples/time_series_example/#example-sea-surface-temperature-statistics","title":"Example: sea surface temperature statistics\u00b6","text":"

    The /timeseries/statistics endpoint will produce summary statistics for an AOI for all points along a timeseries.

    The example below shows daily sea surface temperature summary statistics for the Gulf of Mexico from the GAMSSA dataset for the period from February 2022 through October 2024.

    "},{"location":"examples/time_series_example/#example-time-series-raster-tiles","title":"Example: Time series raster tiles\u00b6","text":"

    It could be useful to allow users to select a timestep in an interactive map. You can use the /timeseries/tilejson endpoint for that purpose. The following example shows how you could use it to provide time series capability to an interactive map of sea ice cover.

    "},{"location":"examples/xarray_backend_example/","title":"xarray backend: MUR SST","text":"In\u00a0[1]: Copied!
    import json\nfrom datetime import datetime, timezone\n\nimport earthaccess\nimport httpx\nimport xarray as xr\nfrom folium import GeoJson, Map, TileLayer\n\n# titiler_endpoint = \"http://localhost:8081\"  # docker network endpoint\ntitiler_endpoint = \"https://dev-titiler-cmr.delta-backend.com\"  # deployed endpoint\n
    import json from datetime import datetime, timezone import earthaccess import httpx import xarray as xr from folium import GeoJson, Map, TileLayer # titiler_endpoint = \"http://localhost:8081\" # docker network endpoint titiler_endpoint = \"https://dev-titiler-cmr.delta-backend.com\" # deployed endpoint In\u00a0[2]: Copied!
    datasets = earthaccess.search_datasets(doi=\"10.5067/GHGMR-4FJ04\")\nds = datasets[0]\n\nconcept_id = ds[\"meta\"][\"concept-id\"]\nprint(\"Concept-Id: \", concept_id)\n\nprint(\"Abstract: \", ds[\"umm\"][\"Abstract\"])\n
    datasets = earthaccess.search_datasets(doi=\"10.5067/GHGMR-4FJ04\") ds = datasets[0] concept_id = ds[\"meta\"][\"concept-id\"] print(\"Concept-Id: \", concept_id) print(\"Abstract: \", ds[\"umm\"][\"Abstract\"])
    Concept-Id:  C1996881146-POCLOUD\nAbstract:  A Group for High Resolution Sea Surface Temperature (GHRSST) Level 4 sea surface temperature analysis produced as a retrospective dataset (four day latency) and near-real-time dataset (one day latency) at the JPL Physical Oceanography DAAC using wavelets as basis functions in an optimal interpolation approach on a global 0.01 degree grid. The version 4 Multiscale Ultrahigh Resolution (MUR) L4 analysis is based upon nighttime GHRSST L2P skin and subskin SST observations from several instruments including the NASA Advanced Microwave Scanning Radiometer-EOS (AMSR-E), the JAXA Advanced Microwave Scanning Radiometer 2 on GCOM-W1, the Moderate Resolution Imaging Spectroradiometers (MODIS) on the NASA Aqua and Terra platforms, the US Navy microwave WindSat radiometer, the Advanced Very High Resolution Radiometer (AVHRR) on several NOAA satellites, and in situ SST observations from the NOAA iQuam project. The ice concentration data are from the archives at the EUMETSAT Ocean and Sea Ice Satellite Application Facility (OSI SAF) High Latitude Processing Center and are also used for an improved SST parameterization for the high-latitudes.  The dataset also contains additional variables for some granules including a SST anomaly derived from a MUR climatology and the temporal distance to the nearest IR measurement for each pixel.This dataset is funded by the NASA MEaSUREs program ( http://earthdata.nasa.gov/our-community/community-data-system-programs/measures-projects ), and created by a team led by Dr. Toshio M. Chin from JPL. It adheres to the GHRSST Data Processing Specification (GDS) version 2 format specifications. Use the file global metadata \"history:\" attribute to determine if a granule is near-realtime or retrospective.\n
    In\u00a0[3]: Copied!
    results = earthaccess.search_data(\n    count=1,\n    concept_id=concept_id,\n    temporal=(\"2024-10-12\", \"2024-10-13\"),\n)\nprint(\"Granules:\")\nprint(results)\nprint()\nprint(\"Example of NetCDF URL: \")\nfor link in results[0].data_links(access=\"external\"):\n    print(link)\n
    results = earthaccess.search_data( count=1, concept_id=concept_id, temporal=(\"2024-10-12\", \"2024-10-13\"), ) print(\"Granules:\") print(results) print() print(\"Example of NetCDF URL: \") for link in results[0].data_links(access=\"external\"): print(link)
    Granules:\n[Collection: {'Version': '4.1', 'ShortName': 'MUR-JPL-L4-GLOB-v4.1'}\nSpatial coverage: {'HorizontalSpatialDomain': {'Geometry': {'BoundingRectangles': [{'WestBoundingCoordinate': -180, 'SouthBoundingCoordinate': -90, 'EastBoundingCoordinate': 180, 'NorthBoundingCoordinate': 90}]}}}\nTemporal coverage: {'RangeDateTime': {'EndingDateTime': '2024-10-12T21:00:00.000Z', 'BeginningDateTime': '2024-10-11T21:00:00.000Z'}}\nSize(MB): 707.340648651123\nData: ['https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/MUR-JPL-L4-GLOB-v4.1/20241012090000-JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1.nc']]\n\nExample of NetCDF URL: \nhttps://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/MUR-JPL-L4-GLOB-v4.1/20241012090000-JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1.nc\n
    In\u00a0[4]: Copied!
    fs = earthaccess.get_fsspec_https_session()\n\nds = xr.open_dataset(\n    fs.open(results[0].data_links(access=\"external\")[0]),\n    engine=\"h5netcdf\",\n)\nprint(\"Data Variables:\")\nfor var in ds.data_vars:\n    print(str(var))\n\ndisplay(ds)\n
    fs = earthaccess.get_fsspec_https_session() ds = xr.open_dataset( fs.open(results[0].data_links(access=\"external\")[0]), engine=\"h5netcdf\", ) print(\"Data Variables:\") for var in ds.data_vars: print(str(var)) display(ds)
    Data Variables:\nanalysed_sst\nanalysis_error\nmask\nsea_ice_fraction\ndt_1km_data\nsst_anomaly\n
    <xarray.Dataset> Size: 29GB\nDimensions:           (time: 1, lat: 17999, lon: 36000)\nCoordinates:\n  * time              (time) datetime64[ns] 8B 2024-10-12T09:00:00\n  * lat               (lat) float32 72kB -89.99 -89.98 -89.97 ... 89.98 89.99\n  * lon               (lon) float32 144kB -180.0 -180.0 -180.0 ... 180.0 180.0\nData variables:\n    analysed_sst      (time, lat, lon) float64 5GB ...\n    analysis_error    (time, lat, lon) float64 5GB ...\n    mask              (time, lat, lon) float32 3GB ...\n    sea_ice_fraction  (time, lat, lon) float64 5GB ...\n    dt_1km_data       (time, lat, lon) timedelta64[ns] 5GB ...\n    sst_anomaly       (time, lat, lon) float64 5GB ...\nAttributes: (12/47)\n    Conventions:                CF-1.7\n    title:                      Daily MUR SST, Final product\n    summary:                    A merged, multi-sensor L4 Foundation SST anal...\n    references:                 http://podaac.jpl.nasa.gov/Multi-scale_Ultra-...\n    institution:                Jet Propulsion Laboratory\n    history:                    created at nominal 4-day latency; replaced nr...\n    ...                         ...\n    project:                    NASA Making Earth Science Data Records for Us...\n    publisher_name:             GHRSST Project Office\n    publisher_url:              http://www.ghrsst.org\n    publisher_email:            ghrsst-po@nceo.ac.uk\n    processing_level:           L4\n    cdm_data_type:              grid
    xarray.Dataset
    • Dimensions:
      • time: 1
      • lat: 17999
      • lon: 36000
    • Coordinates: (3)
      • time(time)datetime64[ns]2024-10-12T09:00:00long_name :reference time of sst fieldstandard_name :timeaxis :Tcomment :Nominal time of analyzed fields
        array(['2024-10-12T09:00:00.000000000'], dtype='datetime64[ns]')
      • lat(lat)float32-89.99 -89.98 ... 89.98 89.99long_name :latitudestandard_name :latitudeaxis :Yunits :degrees_northvalid_min :-90.0valid_max :90.0comment :geolocations inherited from the input data without correction
        array([-89.99, -89.98, -89.97, ...,  89.97,  89.98,  89.99], dtype=float32)
      • lon(lon)float32-180.0 -180.0 ... 180.0 180.0long_name :longitudestandard_name :longitudeaxis :Xunits :degrees_eastvalid_min :-180.0valid_max :180.0comment :geolocations inherited from the input data without correction
        array([-179.99, -179.98, -179.97, ...,  179.98,  179.99,  180.  ],\n      dtype=float32)
    • Data variables: (6)
      • analysed_sst(time, lat, lon)float64...long_name :analysed sea surface temperaturestandard_name :sea_surface_foundation_temperatureunits :kelvinvalid_min :-32767valid_max :32767comment :\"Final\" version using Multi-Resolution Variational Analysis (MRVA) method for interpolationsource :MODIS_T-JPL, MODIS_A-JPL, AVHRRMTB_G-NAVO, iQUAM-NOAA/NESDIS, Ice_Conc-OSISAF
        [647964000 values with dtype=float64]
      • analysis_error(time, lat, lon)float64...long_name :estimated error standard deviation of analysed_sstunits :kelvinvalid_min :0valid_max :32767comment :uncertainty in \"analysed_sst\"
        [647964000 values with dtype=float64]
      • mask(time, lat, lon)float32...long_name :sea/land field composite maskvalid_min :1valid_max :31flag_masks :[ 1 2 4 8 16]flag_meanings :open_sea land open_lake open_sea_with_ice_in_the_grid open_lake_with_ice_in_the_gridcomment :mask can be used to further filter the data.source :GMT \"grdlandmask\", ice flag from sea_ice_fraction data
        [647964000 values with dtype=float32]
      • sea_ice_fraction(time, lat, lon)float64...long_name :sea ice area fractionstandard_name :sea_ice_area_fractionvalid_min :0valid_max :100source :EUMETSAT OSI-SAF, copyright EUMETSATcomment :ice fraction is a dimensionless quantity between 0 and 1; it has been interpolated by a nearest neighbor approach; EUMETSAT OSI-SAF files used: ice_conc_nh_polstere-100_multi_202410121200.nc, ice_conc_sh_polstere-100_multi_202410121200.nc.
        [647964000 values with dtype=float64]
      • dt_1km_data(time, lat, lon)timedelta64[ns]...long_name :time to most recent 1km datavalid_min :-127valid_max :127source :MODIS and VIIRS pixels ingested by MURcomment :The grid value is hours between the analysis time and the most recent MODIS or VIIRS 1km L2P datum within 0.01 degrees from the grid point. \"Fill value\" indicates absence of such 1km data at the grid point.
        [647964000 values with dtype=timedelta64[ns]]
      • sst_anomaly(time, lat, lon)float64...long_name :SST anomaly from a seasonal SST climatology based on the MUR data over 2003-2014 periodunits :kelvinvalid_min :-32767valid_max :32767comment :anomaly reference to the day-of-year average between 2003 and 2014
        [647964000 values with dtype=float64]
    • Indexes: (3)
      • timePandasIndex
        PandasIndex(DatetimeIndex(['2024-10-12 09:00:00'], dtype='datetime64[ns]', name='time', freq=None))
      • latPandasIndex
        PandasIndex(Index([-89.98999786376953,  -89.9800033569336, -89.97000122070312,\n       -89.95999908447266, -89.94999694824219, -89.94000244140625,\n       -89.93000030517578, -89.91999816894531, -89.91000366210938,\n        -89.9000015258789,\n       ...\n         89.9000015258789,  89.91000366210938,  89.91999816894531,\n        89.93000030517578,  89.94000244140625,  89.94999694824219,\n        89.95999908447266,  89.97000122070312,   89.9800033569336,\n        89.98999786376953],\n      dtype='float32', name='lat', length=17999))
      • lonPandasIndex
        PandasIndex(Index([-179.99000549316406, -179.97999572753906, -179.97000122070312,\n        -179.9600067138672,  -179.9499969482422, -179.94000244140625,\n       -179.92999267578125,  -179.9199981689453, -179.91000366210938,\n       -179.89999389648438,\n       ...\n        179.91000366210938,   179.9199981689453,  179.92999267578125,\n        179.94000244140625,   179.9499969482422,   179.9600067138672,\n        179.97000122070312,  179.97999572753906,  179.99000549316406,\n                     180.0],\n      dtype='float32', name='lon', length=36000))
    • Attributes: (47)Conventions :CF-1.7title :Daily MUR SST, Final productsummary :A merged, multi-sensor L4 Foundation SST analysis product from JPL.references :http://podaac.jpl.nasa.gov/Multi-scale_Ultra-high_Resolution_MUR-SSTinstitution :Jet Propulsion Laboratoryhistory :created at nominal 4-day latency; replaced nrt (1-day latency) version.comment :MUR = \"Multi-scale Ultra-high Resolution\"license :These data are available free of charge under data policy of JPL PO.DAAC.id :MUR-JPL-L4-GLOB-v04.1naming_authority :org.ghrsstproduct_version :04.1uuid :27665bc0-d5fc-11e1-9b23-0800200c9a66gds_version_id :2.0netcdf_version_id :4.1date_created :20241021T071625Zstart_time :20241012T090000Zstop_time :20241012T090000Ztime_coverage_start :20241011T210000Ztime_coverage_end :20241012T210000Zfile_quality_level :3source :MODIS_T-JPL, MODIS_A-JPL, AVHRRMTB_G-NAVO, iQUAM-NOAA/NESDIS, Ice_Conc-OSISAFplatform :Terra, Aqua, MetOp-B, Buoys/Shipssensor :MODIS, AVHRR, in-situMetadata_Conventions :Unidata Observation Dataset v1.0metadata_link :http://podaac.jpl.nasa.gov/ws/metadata/dataset/?format=iso&shortName=MUR-JPL-L4-GLOB-v04.1keywords :Oceans > Ocean Temperature > Sea Surface Temperaturekeywords_vocabulary :NASA Global Change Master Directory (GCMD) Science Keywordsstandard_name_vocabulary :NetCDF Climate and Forecast (CF) Metadata Conventionsouthernmost_latitude :-90.0northernmost_latitude :90.0westernmost_longitude :-180.0easternmost_longitude :180.0spatial_resolution :0.01 degreesgeospatial_lat_units :degrees northgeospatial_lat_resolution :0.01geospatial_lon_units :degrees eastgeospatial_lon_resolution :0.01acknowledgment :Please acknowledge the use of these data with the following statement: These data were provided by JPL under support by NASA MEaSUREs program.creator_name :JPL MUR SST projectcreator_email :ghrsst@podaac.jpl.nasa.govcreator_url :http://mur.jpl.nasa.govproject :NASA Making Earth Science Data Records for Use in Research Environments (MEaSUREs) Programpublisher_name :GHRSST Project Officepublisher_url :http://www.ghrsst.orgpublisher_email :ghrsst-po@nceo.ac.ukprocessing_level :L4cdm_data_type :grid
    In\u00a0[5]: Copied!
    variable = \"sea_ice_fraction\"\ndatetime_ = datetime(2024, 10, 10, tzinfo=timezone.utc).isoformat()\n
    variable = \"sea_ice_fraction\" datetime_ = datetime(2024, 10, 10, tzinfo=timezone.utc).isoformat() In\u00a0[6]: Copied!
    r = httpx.get(\n    f\"{titiler_endpoint}/WebMercatorQuad/tilejson.json\",\n    params = (\n        (\"concept_id\", concept_id),\n        # Datetime in form of `start_date/end_date`\n        (\"datetime\", datetime_),\n        # titiler-cmr can work with both Zarr and COG dataset\n        # but we need to tell the endpoints in advance which backend\n        # to use\n        (\"backend\", \"xarray\"),\n        (\"variable\", variable),\n        # We need to set min/max zoom because we don't want to use lowerzoom level (e.g 0)\n        # which will results in useless large scale query\n        (\"minzoom\", 2),\n        (\"maxzoom\", 13),\n        (\"rescale\", \"0,1\"),\n        (\"colormap_name\", \"blues_r\"),\n    )\n).json()\n\nprint(r)\n
    r = httpx.get( f\"{titiler_endpoint}/WebMercatorQuad/tilejson.json\", params = ( (\"concept_id\", concept_id), # Datetime in form of `start_date/end_date` (\"datetime\", datetime_), # titiler-cmr can work with both Zarr and COG dataset # but we need to tell the endpoints in advance which backend # to use (\"backend\", \"xarray\"), (\"variable\", variable), # We need to set min/max zoom because we don't want to use lowerzoom level (e.g 0) # which will results in useless large scale query (\"minzoom\", 2), (\"maxzoom\", 13), (\"rescale\", \"0,1\"), (\"colormap_name\", \"blues_r\"), ) ).json() print(r)
    {'tilejson': '2.2.0', 'version': '1.0.0', 'scheme': 'xyz', 'tiles': ['https://dev-titiler-cmr.delta-backend.com/tiles/WebMercatorQuad/{z}/{x}/{y}@1x?concept_id=C1996881146-POCLOUD&datetime=2024-10-10T00%3A00%3A00%2B00%3A00&backend=xarray&variable=sea_ice_fraction&rescale=0%2C1&colormap_name=blues_r'], 'minzoom': 2, 'maxzoom': 13, 'bounds': [-180.0, -90.0, 180.0, 90.0], 'center': [0.0, 0.0, 2]}\n
    In\u00a0[7]: Copied!
    bounds = r[\"bounds\"]\nm = Map(\n    location=(70, -40),\n    zoom_start=3\n)\n\nTileLayer(\n    tiles=r[\"tiles\"][0],\n    opacity=1,\n    attr=\"NASA\",\n).add_to(m)\nm\n
    bounds = r[\"bounds\"] m = Map( location=(70, -40), zoom_start=3 ) TileLayer( tiles=r[\"tiles\"][0], opacity=1, attr=\"NASA\", ).add_to(m) m Out[7]: Make this Notebook Trusted to load map: File -> Trust Notebook In\u00a0[8]: Copied!
    geojson_dict = {\n  \"type\": \"FeatureCollection\",\n  \"features\": [\n    {\n      \"type\": \"Feature\",\n      \"properties\": {},\n      \"geometry\": {\n        \"coordinates\": [\n          [\n            [\n              -20.79973248834736,\n              83.55979308678764\n            ],\n            [\n              -20.79973248834736,\n              75.0115425216471\n            ],\n            [\n              14.483337068956956,\n              75.0115425216471\n            ],\n            [\n              14.483337068956956,\n              83.55979308678764\n            ],\n            [\n              -20.79973248834736,\n              83.55979308678764\n            ]\n          ]\n        ],\n        \"type\": \"Polygon\"\n      }\n    }\n  ]\n}\n\nr = httpx.post(\n    f\"{titiler_endpoint}/statistics\",\n    params=(\n        (\"concept_id\", concept_id),\n        # Datetime in form of `start_date/end_date`\n        (\"datetime\", datetime_),\n        # titiler-cmr can work with both Zarr and COG dataset\n        # but we need to tell the endpoints in advance which backend\n        # to use\n        (\"backend\", \"xarray\"),\n        (\"variable\", variable),\n    ),\n    json=geojson_dict,\n    timeout=60,\n).json()\n\nprint(json.dumps(r, indent=2))\n
    geojson_dict = { \"type\": \"FeatureCollection\", \"features\": [ { \"type\": \"Feature\", \"properties\": {}, \"geometry\": { \"coordinates\": [ [ [ -20.79973248834736, 83.55979308678764 ], [ -20.79973248834736, 75.0115425216471 ], [ 14.483337068956956, 75.0115425216471 ], [ 14.483337068956956, 83.55979308678764 ], [ -20.79973248834736, 83.55979308678764 ] ] ], \"type\": \"Polygon\" } } ] } r = httpx.post( f\"{titiler_endpoint}/statistics\", params=( (\"concept_id\", concept_id), # Datetime in form of `start_date/end_date` (\"datetime\", datetime_), # titiler-cmr can work with both Zarr and COG dataset # but we need to tell the endpoints in advance which backend # to use (\"backend\", \"xarray\"), (\"variable\", variable), ), json=geojson_dict, timeout=60, ).json() print(json.dumps(r, indent=2))
    {\n  \"type\": \"FeatureCollection\",\n  \"features\": [\n    {\n      \"type\": \"Feature\",\n      \"geometry\": {\n        \"type\": \"Polygon\",\n        \"coordinates\": [\n          [\n            [\n              -20.79973248834736,\n              83.55979308678764\n            ],\n            [\n              -20.79973248834736,\n              75.0115425216471\n            ],\n            [\n              14.483337068956956,\n              75.0115425216471\n            ],\n            [\n              14.483337068956956,\n              83.55979308678764\n            ],\n            [\n              -20.79973248834736,\n              83.55979308678764\n            ]\n          ]\n        ]\n      },\n      \"properties\": {\n        \"statistics\": {\n          \"sea_ice_fraction\": {\n            \"min\": 0.3,\n            \"max\": 0.99,\n            \"mean\": 0.845157064600111,\n            \"count\": 1725290.875,\n            \"sum\": 1458141.771496357,\n            \"std\": 0.1559272507275522,\n            \"median\": 0.9,\n            \"majority\": 0.9500000000000001,\n            \"minority\": 0.36,\n            \"unique\": 70.0,\n            \"histogram\": [\n              [\n                34892,\n                39574,\n                38696,\n                37867,\n                44348,\n                72817,\n                110580,\n                200188,\n                472678,\n                675707\n              ],\n              [\n                0.3,\n                0.369,\n                0.43799999999999994,\n                0.5069999999999999,\n                0.576,\n                0.645,\n                0.714,\n                0.7829999999999999,\n                0.8519999999999999,\n                0.9209999999999998,\n                0.99\n              ]\n            ],\n            \"valid_percent\": 57.18,\n            \"masked_pixels\": 1293477.0,\n            \"valid_pixels\": 1727347.0,\n            \"percentile_2\": 0.36,\n            \"percentile_98\": 0.99\n          }\n        }\n      }\n    }\n  ]\n}\n
    In\u00a0[\u00a0]: Copied!
    \n
    "},{"location":"examples/xarray_backend_example/#xarray-backend-mur-sst","title":"xarray backend: MUR SST\u00b6","text":"

    The MUR SST dataset has daily records for sea surface temperature and ice cover fraction. There is a netcdf file for each record.

    To run the titiler-cmr service locally you can fire up the docker network with this command:

    docker compose up\n
    "},{"location":"examples/xarray_backend_example/#requirements","title":"Requirements\u00b6","text":"

    To run some of the chunks in this notebook you will need to install a few packages: earthaccess, folium, httpx, xarray

    "},{"location":"examples/xarray_backend_example/#identify-the-dataset","title":"Identify the dataset\u00b6","text":"

    You can find the MUR SST dataset using the earthaccess.search_datasets function.

    "},{"location":"examples/xarray_backend_example/#examine-a-granule","title":"Examine a granule\u00b6","text":"

    Each granule contains a single day record for the entire globe and has a single data file.

    "},{"location":"examples/xarray_backend_example/#explore-the-available-variables","title":"Explore the available variables\u00b6","text":"

    The NetCDF file can be opened with xarray using the h5netcdf engine. When running outside of AWS region us-west-2 you will need to access the data using \"external\" https links (rather than \"direct\" s3 links). Those links will require authentication which is handled by earthaccess as long as you have your Earthdata credentials stored in the ~/.netrc file!

    "},{"location":"examples/xarray_backend_example/#define-a-query-for-titiler-cmr","title":"Define a query for titiler-cmr\u00b6","text":"

    To use titiler-cmr's endpoints for a NetCDF dataset like this we need to define a date range for the CMR query and a variable to analyze.

    "},{"location":"examples/xarray_backend_example/#display-tiles-in-an-interactive-map","title":"Display tiles in an interactive map\u00b6","text":"

    The /tilejson.json endpoint will provide a parameterized xyz tile URL that can be added to an interactive map.

    "},{"location":"examples/xarray_backend_example/#geojson-statistics","title":"GeoJSON Statistics\u00b6","text":"

    The /statistics endpoint can be used to get summary statistics for a geojson Feature or FeatureCollection.

    "}]} \ No newline at end of file diff --git a/pr-previews/pr-42/sitemap.xml b/pr-previews/pr-42/sitemap.xml index fb64273..23d754a 100644 --- a/pr-previews/pr-42/sitemap.xml +++ b/pr-previews/pr-42/sitemap.xml @@ -2,30 +2,30 @@ https://developmentseed.org/titiler-cmr/ - 2024-12-20 + 2024-12-21 https://developmentseed.org/titiler-cmr/contributing/ - 2024-12-20 + 2024-12-21 https://developmentseed.org/titiler-cmr/release-notes/ - 2024-12-20 + 2024-12-21 - https://developmentseed.org/titiler-cmr/time_series_performance_benchmarks/ - 2024-12-20 + https://developmentseed.org/titiler-cmr/deployment/time_series_api_limits/ + 2024-12-21 https://developmentseed.org/titiler-cmr/examples/rasterio_backend_example/ - 2024-12-20 + 2024-12-21 https://developmentseed.org/titiler-cmr/examples/time_series_example/ - 2024-12-20 + 2024-12-21 https://developmentseed.org/titiler-cmr/examples/xarray_backend_example/ - 2024-12-20 + 2024-12-21 \ No newline at end of file diff --git a/pr-previews/pr-42/sitemap.xml.gz b/pr-previews/pr-42/sitemap.xml.gz index 0b7e683e4fde572e18a1781ab33bb894a26fc3ac..7ed4c935ff5e1e60160b8f6840681e84d49f3994 100644 GIT binary patch literal 280 zcmV+z0q6c7iwFn+1!iXg|8r?{Wo=<_E_iKh0L@gpZo@DP-18NNc3ZL0p)G7@>ld`M zp-4m`R4d*lC%!&E%2#pd zckKlo0T+2VP$$mJ>0Qck94oR6UL~p`o+RmWevn*2xv$|D$7MzAqv@M{^HSDL+0=Pt#qMhISMD#uItE62yl@U< zI~NEJ6hRpx18rx+KyvXmw=?WvAV3Q=sALCcz2yz9!K8e0IULO7*;X8mN`FHz!`gWQ eYmb>dkE=-`s@wm?wH*1Q=zjo+iYW$I1ONa<>47}} literal 282 zcmV+#0pQzd%l;X}%C95XTuP7Ra=JWJAjJdj+QGSA*cy-Q-v1drWPr472CSt!zU za043#2c8L_TbS6ev4E*|C`&n`pNAE(3te60#Ub4nX^}_B+Iy(UU%9^s>);tpd*v9$ zb}kSsD1y`n2HMUB8wU*m8lYwX{0``p*U+@_x%5uqWkYW;DL2@d4o325F*L2x7w}!P g-n@Xd=lovB)ua&B{r}=xmiSZDKiOis##saa0Iw^AfB*mh diff --git a/pr-previews/pr-42/time_series_performance_benchmarks/time_series_performance_benchmarks.ipynb b/pr-previews/pr-42/time_series_performance_benchmarks/time_series_performance_benchmarks.ipynb deleted file mode 100644 index 37d955a..0000000 --- a/pr-previews/pr-42/time_series_performance_benchmarks/time_series_performance_benchmarks.ipynb +++ /dev/null @@ -1,163 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0644081c-161f-43fa-8a61-7dc0efb26d08", - "metadata": {}, - "source": [ - "# Time series performance benchmarks\n", - "\n", - "The `titiler-cmr` API is deployed as a Lambda function in the SMCE VEDA AWS account. For small time series requests (<500 time points) you can expect a response from any of the endpoints within ~20 seconds. For larger time series requests, you run the risk of bumping into Lambda concurrency or timeout limits. This report shows some results from the `test_timeseries_benchmarks.py` script that sends many requests with varying time series lengths as well as several other parameters that affect runtime." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5678099f-dcd7-4c09-86e6-a1c2b4845d51", - "metadata": {}, - "outputs": [], - "source": [ - "import benchmark_analysis as ba" - ] - }, - { - "cell_type": "markdown", - "id": "a335bfb6-bdd9-4bfd-84a2-dd805f44ac63", - "metadata": {}, - "source": [ - "## xarray backend\n", - "The following tests use the following datasets to evaluate the limits of the `/timeseries` endpoints for the `xarray` backend \n", - "- [GAMSSA 28km SST](https://podaac.jpl.nasa.gov/dataset/GAMSSA_28km-ABOM-L4-GLOB-v01): a daily 0.25 degree (~28 km) resolution dataset with sea surface temperature and sea ice fraction variables\n", - "- [MUR SST](https://cmr.earthdata.nasa.gov/search/concepts/C1996881146-POCLOUD.html): a daily 0.01 degree (~1km) resolution dataset with sea surface temperature variables" - ] - }, - { - "cell_type": "markdown", - "id": "7e436954-d115-4c6a-8aee-40e276532aa0", - "metadata": {}, - "source": [ - "### statistics\n", - "\n", - "Under the current deployment configuration `statistics` endpoint can process time series requests with up to ~1000 points. Requests that involve more than 1000 points are likely to fail." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7777354a-8ff9-4e5a-a518-5e72969aeefe", - "metadata": {}, - "outputs": [], - "source": [ - "for dataset, df in ba.dfs[\"statistics\"].items():\n", - " fig = ba.plot_error_rate_heatmap(\n", - " df=df,\n", - " x=\"num_timepoints\",\n", - " y=\"bbox_dims\",\n", - " z=\"error_rate\",\n", - " labels={\"x\": \"number of time points\", \"y\": \"bbox dimensions\", \"color\": \"error rate\"},\n", - " title=f\"{dataset}: error rate by bbox size and number of time points\",\n", - " )\n", - " fig.show()" - ] - }, - { - "cell_type": "markdown", - "id": "4da8f9c2-fe34-4d16-a72a-04b50decd2c5", - "metadata": {}, - "source": [ - "In general, the size of the area you want to analyze will have minimal impact on the runtime! This is because `titiler.xarray` has to read the entire granule into memory before subsetting, so reducing the size of the AOI does not reduce the overall footprint of the computation." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f4f2e7fd-c6ac-4ddc-8226-757867a4e083", - "metadata": {}, - "outputs": [], - "source": [ - "for dataset, df in ba.dfs[\"statistics\"].items():\n", - " ba.plot_line_with_error_bars(\n", - " df=df.sort_values([\"bbox_size\", \"num_timepoints\"]),\n", - " color=\"bbox_dims\",\n", - " title=f\"{dataset}: statistics runtime\",\n", - " ).show()\n" - ] - }, - { - "cell_type": "markdown", - "id": "e2d45d53-ebbc-4979-8d53-c8e904b0830a", - "metadata": {}, - "source": [ - "### bbox (animations)\n", - "\n", - "Under the current deployment configuration the `bbox` endpoint can reliably process time series requests with up to ~500 points. Requests that involve more than 500 points may fail if the area of interest is very large." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "68f76925-c623-4614-b36b-6a797b387ca9", - "metadata": {}, - "outputs": [], - "source": [ - "for dataset, df in ba.dfs[\"bbox\"].items():\n", - " for img_size in sorted(df[\"img_size\"].unique()):\n", - " img_size_df = df[df[\"img_size\"] == img_size]\n", - " img_dims = img_size_df[\"img_dims\"].unique()[0]\n", - " ba.plot_error_rate_heatmap(\n", - " df=img_size_df,\n", - " x=\"num_timepoints\",\n", - " y=\"bbox_dims\",\n", - " z=\"error_rate\",\n", - " labels={\"x\": \"number of time points\", \"y\": \"bbox dimensions\", \"color\": \"error rate\"},\n", - " title=f\"{dataset}: image size {img_dims}\",\n", - " ).show()" - ] - }, - { - "cell_type": "markdown", - "id": "ccfd3480-0092-4845-9b4a-c688dbfd4aa6", - "metadata": {}, - "source": [ - "The size of the area of interest increases the response time, especially for requests for higher resolution images." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "abd9e612-51f6-4ef5-8499-18b42a76ab28", - "metadata": {}, - "outputs": [], - "source": [ - "for dataset, df in ba.dfs[\"bbox\"].items():\n", - " ba.plot_line_with_error_bars(\n", - " df=df.sort_values([\"bbox_size\", \"num_timepoints\"]),\n", - " color=\"bbox_dims\",\n", - " facet_row=\"img_dims\",\n", - " title=f\"{dataset}: runtime by bbox size and image dimensions\"\n", - " ).show()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.9" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -}