diff --git a/.gitignore b/.gitignore index 013e8593..3b8405b0 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ _build/ .virtual_documents/ */.ipynb_checkpoints/ .ipynb_checkpoints/ +PRR/sen4gpp_catalog_full/ diff --git a/PRR/waposal.ipynb b/PRR/waposal.ipynb new file mode 100644 index 00000000..b1b18f18 --- /dev/null +++ b/PRR/waposal.ipynb @@ -0,0 +1,1965 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "adb37fbc-cd1b-4e63-81d1-103c5cc5b34d", + "metadata": {}, + "source": [ + "# WAPOSAL - Wave power density, zero-crossing wave period, and significant wave height product \n", + "\n", + "This notebook shows how to\n", + "\n", + "1. Combine a number of .nc files into a .zarr\n", + "\n", + "2. Generate a valid STAC collection, which is a requirement to upload research outcomes to the ESA Project Results Repository (PRR).\n", + "\n", + "The code below demonstrates how to perform the necessary steps using real data from the ESA project WAPOSAL. The focus of WAPOSAL is to analyse wave power density, zero-crossing wave period, and significant wave height product obtained along track based on Sentinel-3 A/B and Cryosat-2 altimeter data processed by SAMOSA+ retracker.\n", + "\n", + "Check the [EarthCODE documentation](https://earthcode.esa.int/), and [PRR STAC introduction example](https://esa-earthcode.github.io/tutorials/prr-stac-introduction) for a more general introduction to STAC and the ESA PRR.\n", + "\n", + "\n", + "\n", + "🔗 Check the project website: [WAPOSAL – Website](https://waposal.tecnico.ulisboa.pt/~waposal.daemon/) \n", + "\n", + "\n", + "🔗 Check the eo4society page: [WAPOSAL – eo4society](https://eo4society.esa.int/projects/waposal/)\n", + "\n", + "\n", + "#### Acknowledgment \n", + "We gratefully acknowledge the **WAPOSAL** team for providing access to the data used in this example, as well as their support in creating it.\n", + "\n", + "## Combine the multiple trajectory files into zarr stores." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "5490e770-2e1f-472f-89af-cebf578a478b", + "metadata": {}, + "outputs": [], + "source": [ + "from pathlib import Path\n", + "import numpy as np\n", + "import xarray as xr\n", + "import re\n", + "import os\n", + "from joblib import Parallel, delayed\n", + "import pickle" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "3b2dcca1-cc6d-45ad-ad7a-1d6ebc125bd9", + "metadata": {}, + "outputs": [], + "source": [ + "# define region information\n", + "bbox_ = {}\n", + "bbox_['UK'] = [-11.0,47.5,10.0,60.0]\n", + "bbox_['BN'] = [3.0,53.0,30.35,72.0]\n", + "bbox_['FF'] = [-10.0,43.0,-0.5,49.0]\n", + "bbox_['NS'] = [-10.0,42.0,-1.0,45.0]\n", + "bbox_['FP'] = [-155.0,-27.0,-134.0,-7.0]\n", + "bbox_['MT'] = [-6.0,30.1,41.8,47.4]\n", + "bbox_['FG'] = [-54.0,4.1,-51.5,6.0]\n", + "bbox_['CN'] = [-19.0,26.0,-10.0,31.0]\n", + "bbox_['PT'] = [-10.0,37.0,-8.0,42.0]\n", + "bbox_['MD'] = [-18.5,27.0,-13.0,30.0]\n", + "bbox_['AZ'] = [-32.0,36.5,-24.0,40.0]\n", + "\n", + "satellite_names = {'S3A': 'Sentinel-3', 'S3B': 'Sentinel-3','CS2': 'CryoSat-2'}\n", + "platform_id = {'S3A': 'A', 'S3B': 'B','CS2': ''}\n", + "sensor = {'S3A': 'SRAL', 'S3B': 'SRAL','CS2': 'SIRAL'}\n", + "zone = {'AZ': 'Azores', \n", + " 'FG': 'French Guiana', \n", + " 'FF': 'French Facade', \n", + " 'UK': 'UK', \n", + " 'FP': 'French Polinesia',\n", + " 'CN': 'Canaries',\n", + " 'PT': 'Portugal',\n", + " 'NS': 'North Spain',\n", + " 'BN': 'Baltic and Norway',\n", + " 'MT': 'Mediterranean',\n", + " 'MD': 'Madeira'}\n", + "\n", + "epoch_2000 = np.datetime64('2000-01-01T00:00:00')" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "a9c6c206-33b4-458b-87c5-420269a9c257", + "metadata": {}, + "outputs": [], + "source": [ + "def return_temporal_size(f):\n", + " return xr.open_dataset(f, decode_timedelta=False).time.shape[0]\n", + "\n", + "\n", + "def combine_to_trajectory_zarr(region, satellite, files):\n", + "\n", + " # # define the coordinates shapes\n", + " # sizes = Parallel(n_jobs=-1)(delayed(return_temporal_size)(f) for f in files)\n", + " # max_obs = max(sizes)\n", + " # trajectories_len = len(files)\n", + " # obs_len = max_obs\n", + "\n", + " # read coordinate shapes\n", + " trajectories_len, obs_len = dimensions[f'{region}-{satellite}']\n", + " join_strategy = 'exact' if obs_len is not None else 'outer'\n", + " \n", + " datasets_list = []\n", + " \n", + " for filepath in files:\n", + " \n", + " # open file and drop dim\n", + " ds = xr.open_dataset(filepath, decode_times=False)\n", + " ds = ds.sel(dim=0)\n", + " \n", + " # rename the 'time' dimension to 'obs'\n", + " ds = ds.rename_dims({'time': 'obs'})\n", + " \n", + " # pad to maximum observations\n", + " ds = ds.pad(obs=(0, obs_len - ds.obs.size), mode='constant', constant_values=np.nan)\n", + " \n", + " # expand trakectory dimension\n", + " ds = ds.expand_dims('trajectory')\n", + " \n", + " # Create the 'trajectory_info' variable\n", + " filename = os.path.basename(filepath)\n", + " ds['trajectory_info'] = xr.DataArray(\n", + " [filename], # Data is the filename\n", + " dims=['trajectory'], # Depends on the new dimension\n", + " attrs={'long_name': 'trajectory info'} # Add attributes\n", + " )\n", + " \n", + " # add to concat list\n", + " datasets_list.append(ds)\n", + "\n", + " # combine the datasets\n", + " combined_ds = xr.concat(\n", + " datasets_list, \n", + " dim='trajectory', \n", + " join=join_strategy, \n", + " fill_value=np.nan\n", + " )\n", + "\n", + " del datasets_list\n", + " \n", + " # # update metadata\n", + " combined_ds = combined_ds.chunk(trajectory=1000)\n", + " start_time = epoch_2000 + np.timedelta64(int(combined_ds.TAI_Time_20Hz.isel(trajectory=0).min(skipna=True).values), 's')\n", + " end_time = epoch_2000 + np.timedelta64(int(combined_ds.TAI_Time_20Hz.isel(trajectory=-1).max(skipna=True).values), 's')\n", + " \n", + " combined_ds.attrs.update({\n", + " \"Conventions\" : \"CF-1.6/CF-1.7\",\n", + " \"feature_type\" :\"trajectory\",\n", + " \"start_datetime\": str(start_time),\n", + " \"end_datetime\": str(end_time),\n", + " \"created\": '2025-11-14T00:00:00Z', \n", + " \"description\": f\"Wave power density, zero-crossing wave period, and significant wave height product obtained along-track from {satellite_names[satellite]} altimeter data processed by SAMOSA+ retracker over the {zone[region]} region.\",\n", + " \"zone\": zone[region], # region for example Azores \n", + " \"platform_name\": satellite,\n", + " \"institution\": 'CENTEC-IST-ID',\n", + " \"license\": \"CC-BY-SA-4.0\",\n", + " \"min_lon\": bbox_[region][0],\n", + " \"min_lat\": bbox_[region][1],\n", + " \"max_lon\": bbox_[region][2],\n", + " \"max_lat\": bbox_[region][3],\n", + " \"crs\": 'epsg:4326',\n", + " \"referencfe\": \"https://opensciencedata.esa.int/products/waposal-waves/collection\"\n", + " })\n", + "\n", + " return combined_ds" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "a1865f7f-1732-4113-9de6-0bbe6291e794", + "metadata": {}, + "outputs": [], + "source": [ + "rootpath = \"/mnt/d/data/waposal/extracted/\"\n", + "regions = ['AZ', 'BN', 'CN', 'FF', 'FG', 'FP', 'MD', 'MT', 'NS', 'PT', 'UK']\n", + "satellites = ['S3A', 'S3B', 'CS2']" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "626e637e-4229-4b72-a337-89aad1dbf20d", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "a6f3491d-d3d9-48a4-b375-9a35f6ee11e9", + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "# define the dimensions of each zarr store.\n", + "# We need to know this ahead of time, to align all the observations\n", + "import gc\n", + "from joblib import Parallel, delayed\n", + "def return_temporal_size(f):\n", + " return xr.open_dataset(f, decode_timedelta=False).time.shape[0]\n", + "\n", + "dimensions = {}\n", + "\n", + "for region in regions[3:]:\n", + " for satellite in satellites:\n", + " print(region + '-' + satellite)\n", + " files = sorted(Path(f'{rootpath}/{region}/{region}/{satellite}/').glob(\"./*/**/*.nc\"))\n", + " if len(files) == 0: continue\n", + " sizes = Parallel(n_jobs=-1)(delayed(return_temporal_size)(f) for f in files)\n", + " obs_len = max(sizes)\n", + " trajectories_len = len(files)\n", + " dimensions[f'{region}-{satellite}'] = (trajectories_len, obs_len)\n", + " del sizes\n", + " gc.collect()\n", + "\n", + "with open('dimensions.pkl', 'wb') as f:\n", + " pickle.dump(dimensions, f)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "afc1ea0a-9228-46c5-9f2e-1367545224e0", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c04614c3-298e-4a56-a21c-9a117adf51fb", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "142411a1-2797-482e-a9d0-1bd7a753590e", + "metadata": {}, + "outputs": [], + "source": [ + "with open('dimensions.pkl', 'rb') as f:\n", + " dimensions = pickle.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "650fa477-e0ba-402d-b1eb-243f924781d0", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'AZ-S3A': (1711, 1218),\n", + " 'AZ-S3B': (1019, 1227),\n", + " 'AZ-CS2': (1554, 573),\n", + " 'BN-S3A': (8719, 6882),\n", + " 'BN-S3B': (5302, 6927),\n", + " 'BN-CS2': (8632, 5309),\n", + " 'CN-S3A': (1998, 1735),\n", + " 'CN-S3B': (1259, 1750),\n", + " 'FF-S3A': (2274, 2095),\n", + " 'FF-S3B': (1608, 2112),\n", + " 'FF-CS2': (4533, 2245),\n", + " 'FG-S3A': (527, 658),\n", + " 'FG-S3B': (327, 658),\n", + " 'FP-S3A': (4864, 6885),\n", + " 'FP-S3B': (3046, 6956),\n", + " 'FP-CS2': (1668, 7019),\n", + " 'MD-S3A': (1248, 1041),\n", + " 'MD-S3B': (778, 1050),\n", + " 'MD-CS2': (4406, 2929),\n", + " 'MT-S3A': (10339, 6263),\n", + " 'MT-S3B': (6344, 6072),\n", + " 'MT-CS2': (21047, 5495),\n", + " 'NS-S3A': (1867, 1046),\n", + " 'NS-S3B': (1210, 1061),\n", + " 'NS-CS2': (3453, 1123),\n", + " 'PT-S3A': (625, 1740),\n", + " 'PT-S3B': (445, 1740),\n", + " 'PT-CS2': (4208, 3932),\n", + " 'UK-S3A': (5322, 4488),\n", + " 'UK-S3B': (3273, 4434),\n", + " 'UK-CS2': (9520, 4671)}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dimensions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8d79e690-5935-4692-8e0d-19c7a25598ef", + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "import gc\n", + "for region in regions:\n", + " for satellite in satellites:\n", + " print(region + '-' + satellite)\n", + " files = sorted(Path(f'{rootpath}/{region}/{region}/{satellite}/').glob(\"./*/**/*.nc\"))\n", + " if len(files) == 0:\n", + " continue\n", + " ds = combine_to_trajectory_zarr(region, satellite, files)\n", + " ds.to_zarr(f'{rootpath}combined/{region}-{satellite}.zarr', mode='w', write_empty_chunks=False, zarr_format=2)\n", + " del ds\n", + " gc.collect()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b7a7ac4e-0b09-4f95-b595-f918e826bd76", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c2ab7e25-8754-4373-8a3d-ccc085f1de30", + "metadata": {}, + "outputs": [], + "source": [ + "## assert values are not changed between the originals and zarr store\n", + "for i, f in zip(np.arange(len(files)), files):\n", + "\n", + " true_values = xr.open_dataset(f, decode_times=False).Sigma0_20Hz.values.flatten()\n", + " \n", + " # check that all values are the same\n", + " assert np.isclose(\n", + " true_values, \n", + " ds.sel(trajectory=i).Sigma0_20Hz.values.flatten()[:true_values.shape[0]], \n", + " equal_nan=True\n", + " ).all()\n", + " \n", + " # check that all padding is NA\n", + " assert np.isnan(ds.sel(trajectory=i).Sigma0_20Hz.values.flatten()[true_values.shape[0]:]).all()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "87680f33-e43f-4195-a9f3-1d2b15490d6a", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "40344118-eb2d-46e8-9fe0-3483dbc8529b", + "metadata": {}, + "source": [ + "## 2. Generate STAC collection for the new files" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "61f898b9-bdc4-4900-892f-f9ca42119788", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "
" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from pystac import Collection\n", + "import pystac\n", + "import xarray as xr\n", + "import shapely\n", + "import json\n", + "from datetime import datetime\n", + "from xstac import xarray_to_stac\n", + "from xstac._xstac import build_horizontal_dimension\n", + "\n", + "# define collection id, since it will be reused\n", + "collectionid = \"waposal\"\n", + "\n", + "# create the root collection using pystac.Collection\n", + "\n", + "collection = Collection.from_dict(\n", + " \n", + "{\n", + " \"type\": \"Collection\",\n", + " \"id\": \"waposal\",\n", + " \"stac_version\": \"1.1.0\",\n", + " \"description\": \"Wave power density, zero-crossing wave period, and significant wave height product obtained along track based on Sentinel-3 A/B and Cryosat-2 altimeter data processed by SAMOSA+ retracker. The products cover the Atlantic coast of Europe, Madeira, the French Polynesian archipelagos, the Azores, the Canary Islands, the Mediterranean and Baltic Seas, and the coastal zone of French Guiana.\",\n", + " \"links\": [],\n", + " \"stac_extensions\": [\n", + " \"https://stac-extensions.github.io/osc/v1.0.0/schema.json\",\n", + " \"https://stac-extensions.github.io/themes/v1.0.0/schema.json\",\n", + " \"https://stac-extensions.github.io/cf/v0.2.0/schema.json\"\n", + " ],\n", + " \"osc:project\": \"waposal\",\n", + " \"osc:status\": \"completed\",\n", + " \"osc:region\": \"severeal\",\n", + " \"osc:type\": \"product\",\n", + " \"created\": \"2025-10-29T11:40:41Z\",\n", + " \"updated\": \"2025-10-29T11:40:41Z\",\n", + " \"sci:doi\": \"https://doi.org/10.57780/s3d-83ad619\",\n", + " \"cf:parameter\": [\n", + " {\n", + " \"name\": \"wave-period\"\n", + " },\n", + " {\n", + " \"name\": \"wave-height\"\n", + " }\n", + " ],\n", + " \"themes\": [\n", + " {\n", + " \"scheme\": \"https://github.com/stac-extensions/osc#theme\",\n", + " \"concepts\": [\n", + " {\n", + " \"id\": \"oceans\"\n", + " }\n", + " ]\n", + " }\n", + " ],\n", + " \"osc:missions\": [\n", + " \"sentinel-3\",\n", + " \"cryosat-2\"\n", + " ],\n", + " \"osc:variables\": [\n", + " \"wave-period\",\n", + " \"wave-height\"\n", + " ],\n", + " \"title\": \"High-resolution WAPOSAL wave period and wave power density from Sentinel-3 A/B, CryoSat-2, and SAMOSA+ retracker\",\n", + " \"extent\": {\n", + " \"spatial\": {\n", + " \"bbox\": [\n", + " [\n", + " -155,\n", + " -27,\n", + " 41.8,\n", + " 72\n", + " ],\n", + " [\n", + " -32,\n", + " 36.499999999999986,\n", + " -24,\n", + " 40\n", + " ],\n", + " [\n", + " -10,\n", + " 42.00000000000003,\n", + " -1,\n", + " 45\n", + " ],\n", + " [\n", + " -10,\n", + " 43,\n", + " -0.5,\n", + " 49\n", + " ],\n", + " [\n", + " 3,\n", + " 53,\n", + " 30.35,\n", + " 72\n", + " ],\n", + " [\n", + " -11,\n", + " 47.5,\n", + " 10,\n", + " 60.00000000000003\n", + " ],\n", + " [\n", + " -155,\n", + " -27,\n", + " -134,\n", + " -6.999999999999986\n", + " ],\n", + " [\n", + " -54,\n", + " 4.1000000000000085,\n", + " -51.5,\n", + " 6\n", + " ],\n", + " [\n", + " -19,\n", + " 26,\n", + " -10,\n", + " 31\n", + " ],\n", + " [\n", + " -6,\n", + " 30.09999999999998,\n", + " 41.79999999999999,\n", + " 47.400000000000006\n", + " ],\n", + " [\n", + " -19,\n", + " 27,\n", + " -5,\n", + " 39\n", + " ],\n", + " [\n", + " -13.000000000000002,\n", + " 34,\n", + " -5.5,\n", + " 44.5\n", + " ]\n", + " ]\n", + " },\n", + " \"temporal\": {\n", + " \"interval\": [\n", + " [\n", + " \"2011-01-01T00:00:00Z\",\n", + " \"2023-12-31T23:59:59Z\"\n", + " ]\n", + " ]\n", + " }\n", + " },\n", + " \"license\": \"CC-BY-SA-4.0\",\n", + " \"keywords\": [\n", + " \"altimeter data\",\n", + " \"wave energy\",\n", + " \"wave power\",\n", + " \"wave frequency\",\n", + " \"wave\"\n", + " ]\n", + "}\n", + "\n", + ")\n", + "\n", + "collection" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3cd6a48e-407b-4bc7-ad9e-02fc1af6e514", + "metadata": {}, + "outputs": [], + "source": [ + "files = sorted(Path(f'./combined').glob(\"*.zarr\"))\n", + "\n", + "for f in files:\n", + " ds = xr.open_zarr(f,decode_times=False)\n", + "\n", + " # Describe the first file following the datacube stac extension standards.\n", + " # All data is extracted from the metadata / data already present in the file we only specify\n", + " # the template and what information is extracted\n", + "\n", + " bbox = [ds.attrs['min_lon'], ds.attrs['min_lat'], ds.attrs['max_lon'],ds.attrs['max_lat']]\n", + " geometry = json.loads(json.dumps(shapely.box(*bbox).__geo_interface__))\n", + " \n", + " template = {\n", + " \n", + " \"id\": f\"{collectionid}-{f.stem.lower()}\",\n", + " \"type\": \"Feature\",\n", + " \"stac_version\": \"1.1.0\",\n", + " \"description\": ds.attrs['description'],\n", + " \"title\": f.stem,\n", + " \"properties\": {\n", + " \"feature_type\" : \"trajectory\",\n", + " \"start_datetime\": ds.attrs['start_datetime'] + \"Z\", \n", + " \"end_datetime\": ds.attrs['end_datetime'] + \"Z\",\n", + " \"zone\" : ds.attrs['zone'],\n", + " \"platform_name\" : ds.attrs['platform_name'],\n", + " \"institution\" : ds.attrs['institution'],\n", + " \"license\" : ds.attrs['license'],\n", + " \"reference\": ds.attrs['referencfe']\n", + " },\n", + " \"geometry\": geometry,\n", + " \"bbox\": bbox,\n", + " \"assets\": {\n", + " \"data\": {\n", + " \"href\": f\"https://s3.waw4-1.cloudferro.com/EarthCODE/OSCAssets/waposal/{f.stem}.zarr\", # or local path\n", + " \"type\": \"application/vnd+zarr\",\n", + " \"roles\": [\"data\"],\n", + " \"title\": f.stem\n", + " }\n", + " }\n", + " }\n", + " \n", + " # 3. Generate the STAC Item\n", + " item = xarray_to_stac(\n", + " ds,\n", + " template,\n", + " temporal_dimension=False,\n", + " x_dimension=False,\n", + " y_dimension=False, \n", + "\n", + " reference_system=False\n", + " )\n", + "\n", + " # add the multiple dimensions\n", + " dims = item.properties['cube:dimensions']\n", + " for d in list(ds.dims):\n", + " r = build_horizontal_dimension(ds, d, None, None, None, None, False).to_dict()\n", + " r['type'] = \"trajectory\" if d == 'trajectory' else \"observation\"\n", + " for k, v in r.copy().items():\n", + " if v is None:\n", + " del r[k]\n", + " dims[d] = r\n", + "\n", + " item.validate()\n", + "\n", + " collection.add_item(item)\n", + "\n", + "collection.normalize_and_save(\n", + " root_href=f'./{collectionid}',\n", + " catalog_type=pystac.CatalogType.SELF_CONTAINED\n", + ")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python [conda env:micromamba-pangeo] *", + "language": "python", + "name": "conda-env-micromamba-pangeo-py" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.13.3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/myst.yml b/myst.yml index f5bdaad2..36cdbeb5 100644 --- a/myst.yml +++ b/myst.yml @@ -24,8 +24,9 @@ project: - file: PRR/Creating STAC Catalog_from_PRR_example.ipynb - file: PRR/TCCAS_v2.ipynb - file: PRR/CareHeat_v2.ipynb - - file: PRR/PRR_STAC_download_example.ipynb - file: PRR/sen4amazonas.ipynb + - file: PRR/waposal.ipynb + - file: PRR/PRR_STAC_download_example.ipynb - title: Open Science Catalog (OSC) file: OSC/index.md