diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml new file mode 100644 index 00000000..56f9f521 --- /dev/null +++ b/.github/workflows/pypi-release.yml @@ -0,0 +1,74 @@ +name: Build and Upload PySP2 Release to PyPI +on: + release: + types: + - published + +jobs: + build-artifacts: + runs-on: ubuntu-latest + if: github.repository == 'openradar/PyDDA' + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - uses: actions/setup-python@v4 + name: Install Python + with: + python-version: 3.11 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install setuptools setuptools-scm wheel twine check-manifest + - name: Build tarball and wheels + run: | + git clean -xdf + git restore -SW . + python -m build --sdist --wheel . + - name: Check built artifacts + run: | + python -m twine check dist/* + pwd + if [ -f dist/pydda-0.0.0.tar.gz ]; then + echo "❌ INVALID VERSION NUMBER" + exit 1 + else + echo "✅ Looks good" + fi + - uses: actions/upload-artifact@v3 + with: + name: releases + path: dist + + test-built-dist: + needs: build-artifacts + runs-on: ubuntu-latest + steps: + - uses: actions/setup-python@v4 + name: Install Python + with: + python-version: "3.x" + - uses: actions/download-artifact@v3 + with: + name: releases + path: dist + - name: List contents of built dist + run: | + ls -ltrh + ls -ltrh dist + upload-to-pypi: + needs: test-built-dist + if: github.event_name == 'release' + runs-on: ubuntu-latest + steps: + - uses: actions/download-artifact@v3 + with: + name: releases + path: dist + - name: Publish package to PyPI + uses: pypa/gh-action-pypi-publish@v1.8.10 + with: + user: __token__ + password: ${{ secrets.PYPI_TOKEN }} + verbose: true diff --git a/.github/workflows/python-package-conda.yml b/.github/workflows/python-package-conda.yml index 5e90df82..dca9abea 100644 --- a/.github/workflows/python-package-conda.yml +++ b/.github/workflows/python-package-conda.yml @@ -17,7 +17,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.9", "3.10", "3.11"] + python-version: ["3.9", "3.10", "3.11", "3.12"] os: [macOS, ubuntu] inlcude: - os: macos-latest diff --git a/REQUIREMENTS.txt b/REQUIREMENTS.txt index 766b30d8..cb8a06ce 100644 --- a/REQUIREMENTS.txt +++ b/REQUIREMENTS.txt @@ -8,3 +8,5 @@ six pooch cmweather cdsapi +xarray +datatree diff --git a/doc/source/index.rst b/doc/source/index.rst index ce630041..01901a8e 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -139,12 +139,19 @@ just type in the following commands assuming you have the above dependencies ins Finally, PyDDA now supports using `Jax `_ and `TensorFlow `_ for solving the three dimensional wind field. PyDDA requries TensorFlow 2.6 and the -tensorflow-probability package for TensorFlow to be enabled. Both the Jax and -TensorFlow-based engines now use automatic differentiation to solve for the gradients -of each cost function. This therefore will create gradients that are less susceptible -to boundary artifacts and rounding errors. In addition, both of these packages can -utilize CUDA-enabled GPUs for much faster processing. These two +tensorflow-probability package for TensorFlow to be enabled. +In addition, both of these packages can utilize CUDA-enabled GPUs for much faster processing. These two dependencies are optional as the user can still use PyDDA with the SciPy ecosystem. +The Jax optimizer uses the same optimizer as SciPy's (`L-BFGS-B `_). + + +Known issues +============ + +The TensorFlow engine uses the unbounded version of this optimizer which removes the constraint that the +the wind magnitudes must be less than 100 m/s. The removal of this constraint can sometimes +result in numerical instability, so it is recommended that the user test out both Jax and TensorFlow +if they desire GPU-accelerated retrievals. Contents: diff --git a/doc/source/user_guide/index.rst b/doc/source/user_guide/index.rst index fa9a52f7..35a0cd91 100644 --- a/doc/source/user_guide/index.rst +++ b/doc/source/user_guide/index.rst @@ -14,3 +14,6 @@ This is a place to include our user guide. retrieving_winds.rst optimizing_wind_retrieval.rst visualizing_winds.rst + retrieving_winds.rst + optimizing_wind_retrieval.rst + nesting_wind_retrieval.rst diff --git a/doc/source/user_guide/optimizing_wind_retrieval.rst b/doc/source/user_guide/optimizing_wind_retrieval.rst index 8a0ced13..9b833a49 100644 --- a/doc/source/user_guide/optimizing_wind_retrieval.rst +++ b/doc/source/user_guide/optimizing_wind_retrieval.rst @@ -95,7 +95,8 @@ that we did in :ref:`retrieving_winds`. grid_shape=grid_shape, gatefilter=gatefilter_kict, grid_origin=(radar_kict.latitude['data'].filled(), radar_kict.longitude['data'].filled())) - + grid_ktlx = pydda.io.read_from_pyart_grid(grid_ktlx) + grid_kict = pydda.io.read_from_pyart_grid(grid_kict) grid_kict = pydda.constraints.add_hrrr_constraint_to_grid(grid_kict, pydda.tests.get_sample_file('ruc2anl_130_20110520_0800_001.grb2'), method='linear') @@ -200,7 +201,8 @@ to the above retrieval. grid_shape=grid_shape, gatefilter=gatefilter_kict, grid_origin=(radar_kict.latitude['data'].filled(), radar_kict.longitude['data'].filled())) - + grid_ktlx = pydda.io.read_from_pyart_grid(grid_ktlx) + grid_kict = pydda.io.read_from_pyart_grid(grid_kict) grid_kict = pydda.constraints.add_hrrr_constraint_to_grid(grid_kict, pydda.tests.get_sample_file('ruc2anl_130_20110520_0800_001.grb2'), method='linear') @@ -302,7 +304,8 @@ Let's see what happens when we increase the level of smoothing. grid_shape=grid_shape, gatefilter=gatefilter_kict, grid_origin=(radar_kict.latitude['data'].filled(), radar_kict.longitude['data'].filled())) - + grid_ktlx = pydda.io.read_from_pyart_grid(grid_ktlx) + grid_kict = pydda.io.read_from_pyart_grid(grid_kict) grid_kict = pydda.constraints.add_hrrr_constraint_to_grid(grid_kict, pydda.tests.get_sample_file('ruc2anl_130_20110520_0800_001.grb2'), method='linear') @@ -405,7 +408,8 @@ artifact near the edge of the Dual Doppler lobe re-appears. grid_shape=grid_shape, gatefilter=gatefilter_kict, grid_origin=(radar_kict.latitude['data'].filled(), radar_kict.longitude['data'].filled())) - + grid_ktlx = pydda.io.read_from_pyart_grid(grid_ktlx) + grid_kict = pydda.io.read_from_pyart_grid(grid_kict) grid_kict = pydda.constraints.add_hrrr_constraint_to_grid(grid_kict, pydda.tests.get_sample_file('ruc2anl_130_20110520_0800_001.grb2'), method='linear') diff --git a/doc/source/user_guide/retrieving_winds.rst b/doc/source/user_guide/retrieving_winds.rst index 4d851446..e6262fc4 100644 --- a/doc/source/user_guide/retrieving_winds.rst +++ b/doc/source/user_guide/retrieving_winds.rst @@ -38,8 +38,21 @@ point, or model data as a weak constraint in order to increase the chance that P provide a solution that converges to a physically realistic wind field. For this particular example, we are lucky enough to have model data from the Rapid Update Cycle that can be used as a constraint. -Therefore, let's first add the model data into our Grid objects so that PyDDA can use the model -data as a constraint. +------------------------ +Using PyDDA's data model +------------------------ + +As of PyDDA 2.0, PyDDA uses `xarray Datasets `_ +to represent the underlying datastructure. This makes it easier to integrate PyDDA into xarray-based workflows +that are the standard for use in the Geoscientific Python Community. Therefore, anyone using PyART Grids will need +to convert their grids to PyDDA Grids using the :meth:`pydda.io.read_from_pyart_grid` helper function. + +.. code-block:: python + + grid_ktlx = pydda.io.read_from_pyart_grid(grid_ktlx) + grid_kict = pydda.io.read_from_pyart_grid(grid_kict) + +In addition, :meth:`pydda.io.read_grid` will read a cf-Compliant radar grid into a PyDDA Grid. ---------------------------- Using models for constraints @@ -50,10 +63,11 @@ provide a constraint on the horizontal winds. This helps to constrain the backgr area where there is suboptimal coverage from the radar network outside of the dual Doppler lobes. To add either a RUC or HRRR model time period as a constraint, we need to have the original model data in GRIB format and then use the following line to -load the model data into a PyART grid for processing. +load the model data into a PyDDA grid for processing. .. code-block:: python + # Add constraints grid_kict = pydda.constraints.add_hrrr_constraint_to_grid(grid_kict, pydda.tests.get_sample_file('ruc2anl_130_20110520_0800_001.grb2'), method='linear') @@ -75,7 +89,7 @@ Retrieving your first wind field -------------------------------- We will then take a first attempt at retrieving a wind field using PyDDA. This is done using the -:code:`pydda.retrieval.get_dd_wind_field` function. This function takes in a minimum of one input, a list +:meth:`pydda.retrieval.get_dd_wind_field` function. This function takes in a minimum of one input, a list of input PyART Grids. We will also specify the constants for the constraints. In this case, we are using the mass continuity, radar observation, smoothness, and model constraints. @@ -176,7 +190,8 @@ key on the bottom right interior of the plot. grid_shape=grid_shape, gatefilter=gatefilter_kict, grid_origin=(radar_kict.latitude['data'].filled(), radar_kict.longitude['data'].filled())) - + grid_ktlx = pydda.io.read_from_pyart_grid(grid_ktlx) + grid_kict = pydda.io.read_from_pyart_grid(grid_kict) grid_kict = pydda.constraints.add_hrrr_constraint_to_grid(grid_kict, pydda.tests.get_sample_file('ruc2anl_130_20110520_0800_001.grb2'), method='linear') diff --git a/examples/plot_examples.py b/examples/plot_examples.py index 2b09ad19..6bd16b7d 100644 --- a/examples/plot_examples.py +++ b/examples/plot_examples.py @@ -9,19 +9,15 @@ """ -import pyart import pydda -import numpy as np from matplotlib import pyplot as plt - -berr_grid = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0) -cpol_grid = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1) - +berr_grid = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0) +cpol_grid = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1) # Load sounding data and insert as an intialization -cpol_grid = pydda.initialization.make_constant_wind_field( - cpol_grid, (0.0, 0.0, 0.0), vel_field="corrected_velocity" +berr_grid = pydda.initialization.make_constant_wind_field( + berr_grid, (0.0, 0.0, 0.0), vel_field="corrected_velocity" ) # Start the wind retrieval. This example only uses the mass continuity @@ -41,6 +37,7 @@ wind_tol=0.5, engine="scipy", ) + # Plot a horizontal cross section plt.figure(figsize=(9, 9)) pydda.vis.plot_horiz_xsection_barbs( @@ -50,9 +47,11 @@ w_vel_contours=[5, 10, 15], barb_spacing_x_km=5.0, barb_spacing_y_km=15.0, + vmin=0, + vmax=70, ) plt.show() -plt.savefig("Darwin_horiz.png") + # Plot a vertical X-Z cross section plt.figure(figsize=(9, 9)) pydda.vis.plot_xz_xsection_barbs( @@ -62,6 +61,8 @@ w_vel_contours=[5, 10, 15], barb_spacing_x_km=10.0, barb_spacing_z_km=2.0, + vmin=0, + vmax=70, ) plt.show() @@ -73,5 +74,7 @@ level=40, barb_spacing_y_km=10.0, barb_spacing_z_km=2.0, + vmin=0, + vmax=70, ) -plt.savefig("Darwin.png") +plt.show() diff --git a/examples/plot_fun_with_constraints.py b/examples/plot_fun_with_constraints.py deleted file mode 100644 index eccfe8ff..00000000 --- a/examples/plot_fun_with_constraints.py +++ /dev/null @@ -1,117 +0,0 @@ -""" -Example on geographic plotting and constraint variation -------------------------------------------------------- - -In this example we show how to plot wind fields on a map and change -the default constraint coefficients using PyDDA. - -This shows how important it is to have the proper intitial state and -constraints when you derive your wind fields. In the first figure, -the sounding was used as the initial state, but for the latter -two examples we use a zero initial state which provides for more -questionable winds at the edges of the Dual Doppler Lobes. - -This shows that your initial state and background are key to -providing a physically realistic retrieval. Assuming a zero -background will likely result in false regions of convergence -and divergence that will generate artificial updrafts and downdrafts -at the edges of data coverage. - -""" - -import pydda -import pyart -import cartopy.crs as ccrs -import matplotlib.pyplot as plt - - -berr_grid = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0) -cpol_grid = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1) - -# Load our radar data -sounding = pyart.io.read_arm_sonde(pydda.tests.SOUNDING_PATH) -berr_grid = pydda.initialization.make_constant_wind_field(berr_grid, (0.0, 0.0, 0.0)) - -# Let's provide an initial state from the sounding -u_back = sounding[1].u_wind -v_back = sounding[1].v_wind -z_back = sounding[1].height -cpol_grid = pydda.initialization.make_wind_field_from_profile(cpol_grid, sounding[1]) - -new_grids, _ = pydda.retrieval.get_dd_wind_field( - [cpol_grid, berr_grid], - u_back=u_back, - v_back=v_back, - z_back=z_back, - Co=1.0, - Cm=64.0, - frz=5000.0, - Cb=1e-5, - Cx=1e2, - Cy=1e2, - Cz=1e2, - mask_outside_opt=False, - wind_tol=0.1, - engine="tensorflow", -) -fig = plt.figure(figsize=(7, 7)) - -pydda.vis.plot_xz_xsection_streamlines( - new_grids, bg_grid_no=-1, level=50, w_vel_contours=[1, 3, 5, 8] -) -plt.show() - -# Let's see what happens when we use a zero initialization -# This causes there to be convergence in the cone of silence -# This is an artifact that we want to avoid! -# Prescribing winds inside the background through either a constraint -# Or through the initial state will help mitigate this issue. -cpol_grid = pydda.initialization.make_constant_wind_field(cpol_grid, (0.0, 0.0, 0.0)) -new_grids, _ = pydda.retrieval.get_dd_wind_field( - [cpol_grid, berr_grid], - u_back=u_back, - v_back=v_back, - z_back=z_back, - Co=1.0, - Cm=64.0, - frz=5000.0, - Cb=1e-5, - Cx=1e2, - Cy=1e2, - Cz=1e2, - mask_outside_opt=False, - wind_tol=0.5, - engine="scipy", -) - -fig = plt.figure(figsize=(7, 7)) - -pydda.vis.plot_xz_xsection_streamlines( - new_grids, bg_grid_no=-1, level=50, w_vel_contours=[1, 3, 5, 8] -) -plt.show() - -# Or, let's make the radar data more important! -cpol_grid = pydda.initialization.make_wind_field_from_profile(cpol_grid, sounding[1]) -new_grids, _ = pydda.retrieval.get_dd_wind_field( - [cpol_grid, berr_grid], - Co=10.0, - Cm=64.0, - frz=5000.0, - u_back=u_back, - v_back=v_back, - z_back=z_back, - Cb=1e-5, - Cx=1e2, - Cy=1e2, - Cz=1e2, - mask_outside_opt=False, - wind_tol=0.1, - engine="scipy", -) -fig = plt.figure(figsize=(7, 7)) - -pydda.vis.plot_xz_xsection_streamlines( - new_grids, bg_grid_no=-1, level=50, w_vel_contours=[1, 3, 5, 8] -) -plt.show() diff --git a/examples/plot_sydney_tornado.py b/examples/plot_sydney_tornado.py index 0b4fe3fa..2cda6475 100644 --- a/examples/plot_sydney_tornado.py +++ b/examples/plot_sydney_tornado.py @@ -10,29 +10,26 @@ much faster since the cost function is smoother and therefore less susecptible to find a local minimum that is in noise. -The observational constraint is reduced to 0.01 from the usual 1because we are factoring in +The observational constraint is reduced to 0.01 from the usual 1 because we are factoring in many more data points as we are using 4 radars instead of the two in the Darwin example. This example uses pooch to download the data files. -.. image:: ../../sydney_tornado.png - """ -import pyart import pydda import matplotlib.pyplot as plt import numpy as np -import pooch + grid1_path = pydda.tests.get_sample_file("grid1_sydney.nc") grid2_path = pydda.tests.get_sample_file("grid2_sydney.nc") grid3_path = pydda.tests.get_sample_file("grid3_sydney.nc") grid4_path = pydda.tests.get_sample_file("grid4_sydney.nc") -grid1 = pyart.io.read_grid(grid1_path) -grid2 = pyart.io.read_grid(grid2_path) -grid3 = pyart.io.read_grid(grid3_path) -grid4 = pyart.io.read_grid(grid4_path) +grid1 = pydda.io.read_grid(grid1_path) +grid2 = pydda.io.read_grid(grid2_path) +grid3 = pydda.io.read_grid(grid3_path) +grid4 = pydda.io.read_grid(grid4_path) # Set initialization and do retrieval grid1 = pydda.initialization.make_constant_wind_field(grid1, vel_field="VRADH_corr") @@ -40,13 +37,13 @@ [grid1, grid2, grid3, grid4], Co=1e-2, Cm=256.0, - Cx=1e-4, - Cy=1e-4, - Cz=1e-4, + Cx=10, + Cy=10, + Cz=10, vel_name="VRADH_corr", refl_field="DBZH", mask_outside_opt=True, - wind_tol=0.1, + wind_tol=0.5, max_iterations=200, engine="scipy", ) diff --git a/pydda/__init__.py b/pydda/__init__.py index 02f276fd..67a9cc9e 100644 --- a/pydda/__init__.py +++ b/pydda/__init__.py @@ -10,8 +10,9 @@ from . import initialization from . import tests from . import constraints +from . import io -__version__ = "1.5.1" +__version__ = "2.0.0" print("Welcome to PyDDA %s" % __version__) print("If you are using PyDDA in your publications, please cite:") diff --git a/pydda/constraints/model_data.py b/pydda/constraints/model_data.py index ba811a69..2863175d 100644 --- a/pydda/constraints/model_data.py +++ b/pydda/constraints/model_data.py @@ -3,6 +3,7 @@ import gc import tempfile import os +import xarray as xr # We want cfgrib to be an optional dependency to ensure Windows compatibility try: @@ -70,10 +71,10 @@ def download_needed_era_data(Grid, start_date, end_date, file_name): ) # Round the latitude and longitude values to 2 decimal places - N = round(Grid.point_latitude["data"].max(), 2) - S = round(Grid.point_latitude["data"].min(), 2) - E = round(Grid.point_longitude["data"].max(), 2) - W = round(Grid.point_longitude["data"].min(), 2) + N = round(Grid["point_latitude"].values.max(), 2) + S = round(Grid["point_latitude"].values.min(), 2) + E = round(Grid["point_longitude"].values.max(), 2) + W = round(Grid["point_longitude"].values.min(), 2) # Generate a list of years within the date range years = [str(year) for year in range(start_date.year, end_date.year + 1)] @@ -223,7 +224,7 @@ def make_constraint_from_era5( """ if vel_field is None: - vel_field = pyart.config.get_field_name("corrected_velocity") + vel_field = Grid.attrs["first_grid_name"] if ERA5_AVAILABLE is False and file_name is None: raise ( @@ -236,7 +237,7 @@ def make_constraint_from_era5( ) grid_time = datetime.strptime( - Grid.time["units"], "seconds since %Y-%m-%dT%H:%M:%SZ" + Grid["time"].attrs["units"], "seconds since %Y-%m-%dT%H:%M:%SZ" ) hour_rounded_to_nearest_1 = int(round(float(grid_time.hour))) @@ -270,10 +271,10 @@ def make_constraint_from_era5( # Retrieve u, v, w, and geopotential # Geopotential is needed to convert into height coordinates - N = round(Grid.point_latitude["data"].max(), 2) - S = round(Grid.point_latitude["data"].min(), 2) - E = round(Grid.point_longitude["data"].max(), 2) - W = round(Grid.point_longitude["data"].min(), 2) + N = round(Grid["point_latitude"].values.max(), 2) + S = round(Grid["point_latitude"].values.min(), 2) + E = round(Grid["point_longitude"].values.max(), 2) + W = round(Grid["point_longitude"].values.min(), 2) retrieve_dict = {} pname = "reanalysis-era5-pressure-levels" @@ -344,21 +345,17 @@ def make_constraint_from_era5( ERA_grid.variables["time"].units, "hours since %Y-%m-%d %H:%M:%S.%f" ) - time_seconds = ERA_grid.variables["time"][:] - our_time = np.array([base_time + timedelta(seconds=int(x)) for x in time_seconds]) time_step = np.argmin(np.abs(base_time - grid_time)) - analysis_grid_shape = Grid.fields[vel_field]["data"].shape - height_ERA = ERA_grid.variables["z"][:] u_ERA = ERA_grid.variables["u"][:] v_ERA = ERA_grid.variables["v"][:] w_ERA = ERA_grid.variables["w"][:] lon_ERA = ERA_grid.variables["longitude"][:] lat_ERA = ERA_grid.variables["latitude"][:] - radar_grid_lat = Grid.point_latitude["data"] - radar_grid_lon = Grid.point_longitude["data"] - radar_grid_alt = Grid.point_z["data"] + radar_grid_lat = Grid["point_latitude"].values + radar_grid_lon = Grid["point_longitude"].values + radar_grid_alt = Grid["point_z"].values u_flattened = u_ERA[time_step].flatten() v_flattened = v_ERA[time_step].flatten() w_flattened = w_ERA[time_step].flatten() @@ -371,7 +368,8 @@ def make_constraint_from_era5( lon_flattened = lon_mgrid.flatten() lat_flattened = lat_mgrid.flatten() height_flattened = height_ERA[time_step].flatten() - height_flattened -= Grid.radar_altitude["data"] + height_flattened -= Grid["radar_altitude"].values + if method == "nearest": u_interp = NearestNDInterpolator( (height_flattened, lat_flattened, lon_flattened), u_flattened, rescale=True @@ -394,7 +392,6 @@ def make_constraint_from_era5( ) else: raise NotImplementedError("%s interpolation method not implemented!" % method) - u_new = u_interp(radar_grid_alt, radar_grid_lat, radar_grid_lon) v_new = v_interp(radar_grid_alt, radar_grid_lat, radar_grid_lon) w_new = w_interp(radar_grid_alt, radar_grid_lat, radar_grid_lon) @@ -406,21 +403,24 @@ def make_constraint_from_era5( tfile.close() u_field = {} - u_field["data"] = u_new u_field["standard_name"] = "u_wind" u_field["long_name"] = "meridional component of wind velocity" v_field = {} - v_field["data"] = v_new v_field["standard_name"] = "v_wind" v_field["long_name"] = "zonal component of wind velocity" w_field = {} - w_field["data"] = w_new w_field["standard_name"] = "w_wind" w_field["long_name"] = "vertical component of wind velocity" - temp_grid = deepcopy(Grid) - temp_grid.add_field("U_era5", u_field, replace_existing=True) - temp_grid.add_field("V_era5", v_field, replace_existing=True) - temp_grid.add_field("W_era5", w_field, replace_existing=True) + temp_grid = Grid + temp_grid["U_era5"] = xr.DataArray( + np.expand_dims(u_new, 0), dims=("time", "z", "y", "x"), attrs=u_field + ) + temp_grid["V_era5"] = xr.DataArray( + np.expand_dims(v_new, 0), dims=("time", "z", "y", "x"), attrs=v_field + ) + temp_grid["W_era5"] = xr.DataArray( + np.expand_dims(w_new, 0), dims=("time", "z", "y", "x"), attrs=w_field + ) return temp_grid @@ -457,9 +457,9 @@ def make_constraint_from_wrf(Grid, file_path, wrf_time, radar_loc, vel_field=Non # Parse names of velocity field if vel_field is None: - vel_field = pyart.config.get_field_name("corrected_velocity") + vel_field = Grid.attrs["first_grid_name"] - analysis_grid_shape = Grid.fields[vel_field]["data"].shape + analysis_grid_shape = Grid[vel_field].values.shape u = np.ones(analysis_grid_shape) v = np.ones(analysis_grid_shape) w = np.zeros(analysis_grid_shape) @@ -473,9 +473,9 @@ def make_constraint_from_wrf(Grid, file_path, wrf_time, radar_loc, vel_field=Non PHB_wrf = wrf_cdf.variables["PHB"][:] alt_wrf = (PH_wrf + PHB_wrf) / 9.81 - new_grid_x = Grid.point_x["data"] - new_grid_y = Grid.point_y["data"] - new_grid_z = Grid.point_z["data"] + new_grid_x = Grid["point_x"].values + new_grid_y = Grid["point_y"].values + new_grid_z = Grid["point_z"].values # Find timestep from datetime time_wrf = wrf_cdf.variables["Times"] @@ -517,12 +517,18 @@ def make_constraint_from_wrf(Grid, file_path, wrf_time, radar_loc, vel_field=Non u = griddata( (z, y, x_stag), U_wrf, (new_grid_z, new_grid_y, new_grid_x), fill_value=0.0 ) - u_dict = {"data": u, "long_name": "U from WRF", "units": "m/s"} - v_dict = {"data": v, "long_name": "V from WRF", "units": "m/s"} - w_dict = {"data": w, "long_name": "W from WRF", "units": "m/s"} - Grid.add_field("U_wrf", u_dict, replace_existing=True) - Grid.add_field("V_wrf", v_dict, replace_existing=True) - Grid.add_field("W_wrf", w_dict, replace_existing=True) + u_dict = {"long_name": "U from WRF", "units": "m/s"} + v_dict = {"long_name": "V from WRF", "units": "m/s"} + w_dict = {"long_name": "W from WRF", "units": "m/s"} + Grid["U_wrf"] = xr.DataArray( + np.expand_dims(u, 0), dims=("time", "z", "y", "x"), attrs=u_dict + ) + Grid["V_wrf"] = xr.DataArray( + np.expand_dims(v, 0), dims=("time", "z", "y", "x"), attrs=v_dict + ) + Grid["W_wrf"] = xr.DataArray( + np.expand_dims(w, 0), dims=("time", "z", "y", "x"), attrs=w_dict + ) return Grid @@ -575,15 +581,15 @@ def add_hrrr_constraint_to_grid(Grid, file_path, method="nearest"): EARTH_MEAN_RADIUS = 6.3781e6 gh = gh.data[:, :, :] height = (EARTH_MEAN_RADIUS * gh) / (EARTH_MEAN_RADIUS - gh) - height = height - Grid.radar_altitude["data"] - - radar_grid_lat = Grid.point_latitude["data"] - radar_grid_lon = Grid.point_longitude["data"] - radar_grid_alt = Grid.point_z["data"] - lat_min = radar_grid_lat.min() - 1 - lat_max = radar_grid_lat.max() + 1 - lon_min = radar_grid_lon.min() - 1 - lon_max = radar_grid_lon.max() + 1 + height = height - Grid["radar_altitude"].values + + radar_grid_lat = Grid["point_latitude"].values + radar_grid_lon = Grid["point_longitude"].values + radar_grid_alt = Grid["point_z"].values + lat_min = radar_grid_lat.min() + lat_max = radar_grid_lat.max() + lon_min = radar_grid_lon.min() + lon_max = radar_grid_lon.max() lon_r = np.tile(lon, (height.shape[0], 1, 1)) lat_r = np.tile(lat, (height.shape[0], 1, 1)) lon_flattened = lon_r.flatten() @@ -645,15 +651,21 @@ def add_hrrr_constraint_to_grid(Grid, file_path, method="nearest"): ) w_new = w_interp(radar_grid_alt, radar_grid_lat, radar_grid_lon) - new_grid = deepcopy(Grid) + new_grid = Grid - u_dict = {"data": u_new, "long_name": "U from HRRR ", "units": "m/s"} - v_dict = {"data": v_new, "long_name": "V from HRRR ", "units": "m/s"} - w_dict = {"data": w_new, "long_name": "W from HRRR ", "units": "m/s"} + u_dict = {"long_name": "U from HRRR", "units": "m/s"} + v_dict = {"long_name": "V from HRRR", "units": "m/s"} + w_dict = {"long_name": "W from HRRR", "units": "m/s"} - new_grid.add_field("U_hrrr", u_dict, replace_existing=True) - new_grid.add_field("V_hrrr", v_dict, replace_existing=True) - new_grid.add_field("W_hrrr", w_dict, replace_existing=True) + new_grid["U_hrrr"] = xr.DataArray( + np.expand_dims(u_new, 0), dims=("time", "z", "y", "x"), attrs=u_dict + ) + new_grid["V_hrrr"] = xr.DataArray( + np.expand_dims(v_new, 0), dims=("time", "z", "y", "x"), attrs=v_dict + ) + new_grid["W_hrrr"] = xr.DataArray( + np.expand_dims(w_new, 0), dims=("time", "z", "y", "x"), attrs=w_dict + ) # Free up memory del grb_u, grb_v, grb_w, lat, lon diff --git a/pydda/constraints/station_data.py b/pydda/constraints/station_data.py index 4d52de14..793350c0 100644 --- a/pydda/constraints/station_data.py +++ b/pydda/constraints/station_data.py @@ -16,8 +16,8 @@ def get_iem_obs(Grid, window=60.0): """ - Returns all of the station observations from the Iowa Mesonet for a given Grid in the format - needed for PyDDA. + Returns all of the station observations from the Iowa Mesonet for a given + Grid in the format needed for PyDDA. Parameters ---------- @@ -64,10 +64,10 @@ def get_iem_obs(Grid, window=60.0): WA WI WV WY""" networks = ["AWOS"] - grid_lon_min = Grid.point_longitude["data"].min() - grid_lon_max = Grid.point_longitude["data"].max() - grid_lat_min = Grid.point_latitude["data"].min() - grid_lat_max = Grid.point_latitude["data"].max() + grid_lon_min = Grid["point_longitude"].values.min() + grid_lon_max = Grid["point_longitude"].values.max() + grid_lat_min = Grid["point_latitude"].values.min() + grid_lat_max = Grid["point_latitude"].values.max() for region in regions.split(): networks.append("%s_ASOS" % (region,)) @@ -95,7 +95,7 @@ def get_iem_obs(Grid, window=60.0): # Get the timestamp for each request grid_time = datetime.strptime( - Grid.time["units"], "seconds since %Y-%m-%dT%H:%M:%SZ" + Grid["time"].attrs["units"], "seconds since %Y-%m-%dT%H:%M:%SZ" ) start_time = grid_time - timedelta(minutes=window / 2.0) end_time = grid_time + timedelta(minutes=window / 2.0) @@ -128,12 +128,15 @@ def get_iem_obs(Grid, window=60.0): else: stat_dict["lat"] = my_df["lat"].values[0] stat_dict["lon"] = my_df["lon"].values[0] + projparams = Grid["projection"].attrs + stat_dict["x"], stat_dict["y"] = pyart.core.geographic_to_cartesian( - stat_dict["lon"], stat_dict["lat"], Grid.get_projparams() + stat_dict["lon"], stat_dict["lat"], projparams ) + stat_dict["x"] = stat_dict["x"][0] stat_dict["y"] = stat_dict["y"][0] - stat_dict["z"] = elevations - Grid.origin_altitude["data"][0] + stat_dict["z"] = elevations - Grid["origin_altitude"].values[0] if my_df["drct"].values[0] == "M": continue drct = float(my_df["drct"].values[0]) diff --git a/pydda/cost_functions/_cost_functions_numpy.py b/pydda/cost_functions/_cost_functions_numpy.py index 610b24a3..1ec6867d 100644 --- a/pydda/cost_functions/_cost_functions_numpy.py +++ b/pydda/cost_functions/_cost_functions_numpy.py @@ -494,8 +494,8 @@ def calculate_fall_speed(grid, refl_field=None, frz=4500.0): if refl_field is None: refl_field = pyart.config.get_field_name("reflectivity") - refl = grid.fields[refl_field]["data"] - grid_z = grid.point_z["data"] + refl = grid[refl_field].values + grid_z = grid["point_z"].values A = np.zeros(refl.shape) B = np.zeros(refl.shape) rho = np.exp(-grid_z / 10000.0) diff --git a/pydda/cost_functions/cost_functions.py b/pydda/cost_functions/cost_functions.py index 1db3bab2..a74230b9 100644 --- a/pydda/cost_functions/cost_functions.py +++ b/pydda/cost_functions/cost_functions.py @@ -13,7 +13,6 @@ config.update("jax_enable_x64", True) import jax.numpy as jnp - import jax JAX_AVAILABLE = True except ImportError: @@ -50,8 +49,6 @@ def J_function(winds, parameters): The value of the cost function """ if parameters.engine == "tensorflow": - # Implement bounding box constraints (-100, 100) - if not TENSORFLOW_AVAILABLE: raise ImportError( "Tensorflow 2.5 or greater is needed in order to use TensorFlow-based PyDDA!" @@ -338,7 +335,6 @@ def grad_J(winds, parameters): Gradient vector of cost function """ if parameters.engine == "tensorflow": - if not TENSORFLOW_AVAILABLE: raise ImportError( "Tensorflow 2.5 or greater is needed in order to use TensorFlow-based PyDDA!" @@ -447,6 +443,66 @@ def grad_J(winds, parameters): roi=parameters.roi, upper_bc=parameters.upper_bc, ) + if parameters.const_boundary_cond is True: + grad = tf.reshape( + grad, + ( + 3, + parameters.grid_shape[0], + parameters.grid_shape[1], + parameters.grid_shape[2], + ), + ) + + grad = tf.concat( + [ + tf.zeros( + ( + 1, + parameters.grid_shape[0], + parameters.grid_shape[1], + parameters.grid_shape[2], + ), + dtype=tf.float32, + ), + grad[:, :, 1:-1, :], + tf.zeros( + ( + 1, + parameters.grid_shape[0], + parameters.grid_shape[1], + parameters.grid_shape[2], + ), + dtype=tf.float32, + ), + ], + axis=0, + ) + grad = tf.concat( + [ + tf.zeros( + ( + 1, + parameters.grid_shape[0], + parameters.grid_shape[1], + parameters.grid_shape[2], + ), + dtype=tf.float32, + ), + grad[:, :, :, -1:1], + tf.zeros( + ( + 1, + parameters.grid_shape[0], + parameters.grid_shape[1], + parameters.grid_shape[2], + ), + dtype=tf.float32, + ), + ], + axis=0, + ) + grad = tf.reshape(grad, [-1]) elif parameters.engine == "scipy": winds = np.reshape( winds, @@ -547,65 +603,46 @@ def grad_J(winds, parameters): roi=parameters.roi, upper_bc=parameters.upper_bc, ) + # Let's see if we need to enforce strong boundary conditions + if parameters.const_boundary_cond is True: + grad = np.reshape( + grad, + ( + 3, + parameters.grid_shape[0], + parameters.grid_shape[1], + parameters.grid_shape[2], + ), + ) + grad[:, :, 0, :] = 0 + grad[:, :, -1, :] = 0 + grad[:, :, :, 0] = 0 + grad[:, :, :, -1] = 0 + grad = grad.flatten() elif parameters.engine == "jax": - return grad_jax(winds, parameters) + grad = grad_jax(winds, parameters) + if parameters.const_boundary_cond is True: + grad = jnp.reshape( + grad, + ( + 3, + parameters.grid_shape[0], + parameters.grid_shape[1], + parameters.grid_shape[2], + ), + ) + grad.at[:, :, 0, :].set(0) + grad.at[:, :, -1, :].set(0) + grad.at[:, :, :, 0].set(0) + grad.at[:, :, :, -1].set(0) + grad = grad.flatten() + return grad if parameters.Nfeval % 10 == 0: print("The gradient of the cost functions is", str(np.linalg.norm(grad, 2))) return grad -def calculate_fall_speed(grid, refl_field=None, frz=4500.0): - """ - Estimates fall speed based on reflectivity. - - Uses methodology of Mike Biggerstaff and Dan Betten - - Parameters - ---------- - Grid: Py-ART Grid - Py-ART Grid containing reflectivity to calculate fall speed from - refl_field: str - String containing name of reflectivity field. None will automatically - determine the name. - frz: float - Height of freezing level in m - - Returns - ------- - 3D float array: - Float array of terminal velocities - - """ - # Parse names of velocity field - if refl_field is None: - refl_field = pyart.config.get_field_name("reflectivity") - - refl = grid.fields[refl_field]["data"] - grid_z = grid.point_z["data"] - np.zeros(refl.shape) - A = np.zeros(refl.shape) - B = np.zeros(refl.shape) - rho = np.exp(-grid_z / 10000.0) - A[np.logical_and(grid_z < frz, refl < 55)] = -2.6 - B[np.logical_and(grid_z < frz, refl < 55)] = 0.0107 - A[np.logical_and(grid_z < frz, np.logical_and(refl >= 55, refl < 60))] = -2.5 - B[np.logical_and(grid_z < frz, np.logical_and(refl >= 55, refl < 60))] = 0.013 - A[np.logical_and(grid_z < frz, refl > 60)] = -3.95 - B[np.logical_and(grid_z < frz, refl > 60)] = 0.0148 - A[np.logical_and(grid_z >= frz, refl < 33)] = -0.817 - B[np.logical_and(grid_z >= frz, refl < 33)] = 0.0063 - A[np.logical_and(grid_z >= frz, np.logical_and(refl >= 33, refl < 49))] = -2.5 - B[np.logical_and(grid_z >= frz, np.logical_and(refl >= 33, refl < 49))] = 0.013 - A[np.logical_and(grid_z >= frz, refl > 49)] = -3.95 - B[np.logical_and(grid_z >= frz, refl > 49)] = 0.0148 - - fallspeed = A * np.power(10, refl * B) * np.power(1.2 / rho, 0.4) - print(fallspeed.max()) - del A, B, rho - return np.ma.masked_invalid(fallspeed) - - def J_function_jax(winds, parameters): if not JAX_AVAILABLE: raise ImportError("Jax is needed in order to use the Jax-based PyDDA!") @@ -614,108 +651,108 @@ def J_function_jax(winds, parameters): winds, ( 3, - parameters["grid_shape"][0], - parameters["grid_shape"][1], - parameters["grid_shape"][2], + parameters.grid_shape[0], + parameters.grid_shape[1], + parameters.grid_shape[2], ), ) # Had to change to float because Jax returns device array (use np.float_()) Jvel = _cost_functions_jax.calculate_radial_vel_cost_function( - parameters["vrs"], - parameters["azs"], - parameters["els"], + parameters.vrs, + parameters.azs, + parameters.els, winds[0], winds[1], winds[2], - parameters["wts"], - rmsVr=parameters["rmsVr"], - weights=parameters["weights"], - coeff=parameters["Co"], + parameters.wts, + rmsVr=parameters.rmsVr, + weights=parameters.weights, + coeff=parameters.Co, ) - if parameters["Cm"] > 0: + if parameters.Cm > 0: # Had to change to float because Jax returns device array (use np.float_()) Jmass = _cost_functions_jax.calculate_mass_continuity( winds[0], winds[1], winds[2], - parameters["z"], - parameters["dx"], - parameters["dy"], - parameters["dz"], - coeff=parameters["Cm"], + parameters.z, + parameters.dx, + parameters.dy, + parameters.dz, + coeff=parameters.Cm, ) else: Jmass = 0 - if parameters["Cx"] > 0 or parameters["Cy"] > 0 or parameters["Cz"] > 0: + if parameters.Cx > 0 or parameters.Cy > 0 or parameters.Cz > 0: Jsmooth = _cost_functions_jax.calculate_smoothness_cost( winds[0], winds[1], winds[2], - parameters["dx"], - parameters["dy"], - parameters["dz"], - Cx=parameters["Cx"], - Cy=parameters["Cy"], - Cz=parameters["Cz"], + parameters.dx, + parameters.dy, + parameters.dz, + Cx=parameters.Cx, + Cy=parameters.Cy, + Cz=parameters.Cz, ) else: Jsmooth = 0 - if parameters["Cb"] > 0: + if parameters.Cb > 0: Jbackground = _cost_functions_jax.calculate_background_cost( winds[0], winds[1], winds[2], - parameters["bg_weights"], - parameters["u_back"], - parameters["v_back"], - parameters["Cb"], + parameters.bg_weights, + parameters.u_back, + parameters.v_back, + parameters.Cb, ) else: Jbackground = 0 - if parameters["Cv"] > 0: + if parameters.Cv > 0: # Had to change to float because Jax returns device array (use np.float_()) Jvorticity = _cost_functions_jax.calculate_vertical_vorticity_cost( winds[0], winds[1], winds[2], - parameters["dx"], - parameters["dy"], - parameters["dz"], - parameters["Ut"], - parameters["Vt"], - coeff=parameters["Cv"], + parameters.dx, + parameters.dy, + parameters.dz, + parameters.Ut, + parameters.Vt, + coeff=parameters.Cv, ) else: Jvorticity = 0 - if parameters["Cmod"] > 0: + if parameters.Cmod > 0: Jmod = _cost_functions_jax.calculate_model_cost( winds[0], winds[1], winds[2], - parameters["model_weights"], - parameters["u_model"], - parameters["v_model"], - parameters["w_model"], - coeff=parameters["Cmod"], + parameters.model_weights, + parameters.u_model, + parameters.v_model, + parameters.w_model, + coeff=parameters.Cmod, ) else: Jmod = 0 - if parameters["Cpoint"] > 0: + if parameters.Cpoint > 0: Jpoint = _cost_functions_jax.calculate_point_cost( winds[0], winds[1], - parameters["x"], - parameters["y"], - parameters["z"], - parameters["point_list"], - Cp=parameters["Cpoint"], - roi=parameters["roi"], + parameters.x, + parameters.y, + parameters.z, + parameters.point_list, + Cp=parameters.Cpoint, + roi=parameters.roi, ) else: Jpoint = 0 @@ -728,99 +765,150 @@ def grad_jax(winds, parameters): winds, ( 3, - parameters["grid_shape"][0], - parameters["grid_shape"][1], - parameters["grid_shape"][2], + parameters.grid_shape[0], + parameters.grid_shape[1], + parameters.grid_shape[2], ), ) grad = _cost_functions_jax.calculate_grad_radial_vel( - parameters["vrs"], - parameters["els"], - parameters["azs"], + parameters.vrs, + parameters.els, + parameters.azs, winds[0], winds[1], winds[2], - parameters["wts"], - parameters["weights"], - parameters["rmsVr"], - coeff=parameters["Co"], - upper_bc=parameters["upper_bc"], + parameters.wts, + parameters.weights, + parameters.rmsVr, + coeff=parameters.Co, + upper_bc=parameters.upper_bc, ) - if parameters["Cm"] > 0: + if parameters.Cm > 0: grad += _cost_functions_jax.calculate_mass_continuity_gradient( winds[0], winds[1], winds[2], - parameters["z"], - parameters["dx"], - parameters["dy"], - parameters["dz"], - coeff=parameters["Cm"], - upper_bc=parameters["upper_bc"], + parameters.z, + parameters.dx, + parameters.dy, + parameters.dz, + coeff=parameters.Cm, + upper_bc=parameters.upper_bc, ) - if parameters["Cx"] > 0 or parameters["Cy"] > 0 or parameters["Cz"] > 0: + if parameters.Cx > 0 or parameters.Cy > 0 or parameters.Cz > 0: grad += _cost_functions_jax.calculate_smoothness_gradient( winds[0], winds[1], winds[2], - parameters["dx"], - parameters["dy"], - parameters["dz"], - Cx=parameters["Cx"], - Cy=parameters["Cy"], - Cz=parameters["Cz"], - upper_bc=parameters["upper_bc"], + parameters.dx, + parameters.dy, + parameters.dz, + Cx=parameters.Cx, + Cy=parameters.Cy, + Cz=parameters.Cz, + upper_bc=parameters.upper_bc, ) - if parameters["Cb"] > 0: + if parameters.Cb > 0: grad += _cost_functions_jax.calculate_background_gradient( winds[0], winds[1], winds[2], - parameters["bg_weights"], - parameters["u_back"], - parameters["v_back"], - parameters["Cb"], + parameters.bg_weights, + parameters.u_back, + parameters.v_back, + parameters.Cb, ) - if parameters["Cv"] > 0: + if parameters.Cv > 0: grad += _cost_functions_jax.calculate_vertical_vorticity_gradient( winds[0], winds[1], winds[2], - parameters["dx"], - parameters["dy"], - parameters["dz"], - parameters["Ut"], - parameters["Vt"], - coeff=parameters["Cv"], - upper_bc=parameters["upper_bc"], + parameters.dx, + parameters.dy, + parameters.dz, + parameters.Ut, + parameters.Vt, + coeff=parameters.Cv, + upper_bc=parameters.upper_bc, ).numpy() - if parameters["Cmod"] > 0: + if parameters.Cmod > 0: grad += _cost_functions_jax.calculate_model_gradient( winds[0], winds[1], winds[2], - parameters["model_weights"], - parameters["u_model"], - parameters["v_model"], - parameters["w_model"], - coeff=parameters["Cmod"], + parameters.model_weights, + parameters.u_model, + parameters.v_model, + parameters.w_model, + coeff=parameters.Cmod, ) - if parameters["Cpoint"] > 0: + if parameters.Cpoint > 0: grad += _cost_functions_jax.calculate_point_gradient( winds[0], winds[1], - parameters["x"], - parameters["y"], - parameters["z"], - parameters["point_list"], - Cp=parameters["Cpoint"], - roi=parameters["roi"], - upper_bc=parameters["upper_bc"], + parameters.x, + parameters.y, + parameters.z, + parameters.point_list, + Cp=parameters.Cpoint, + roi=parameters.roi, + upper_bc=parameters.upper_bc, ) return grad + + +def calculate_fall_speed(grid, refl_field=None, frz=4500.0): + """ + Estimates fall speed based on reflectivity. + + Uses methodology of Mike Biggerstaff and Dan Betten + + Parameters + ---------- + Grid: Py-ART Grid + Py-ART Grid containing reflectivity to calculate fall speed from + refl_field: str + String containing name of reflectivity field. None will automatically + determine the name. + frz: float + Height of freezing level in m + + Returns + ------- + 3D float array: + Float array of terminal velocities + + """ + # Parse names of velocity field + if refl_field is None: + refl_field = pyart.config.get_field_name("reflectivity") + + refl = grid[refl_field].values + grid_z = grid["point_z"].values + np.zeros(refl.shape) + A = np.zeros(refl.shape) + B = np.zeros(refl.shape) + rho = np.exp(-grid_z / 10000.0) + A[np.logical_and(grid_z < frz, refl < 55)] = -2.6 + B[np.logical_and(grid_z < frz, refl < 55)] = 0.0107 + A[np.logical_and(grid_z < frz, np.logical_and(refl >= 55, refl < 60))] = -2.5 + B[np.logical_and(grid_z < frz, np.logical_and(refl >= 55, refl < 60))] = 0.013 + A[np.logical_and(grid_z < frz, refl > 60)] = -3.95 + B[np.logical_and(grid_z < frz, refl > 60)] = 0.0148 + A[np.logical_and(grid_z >= frz, refl < 33)] = -0.817 + B[np.logical_and(grid_z >= frz, refl < 33)] = 0.0063 + A[np.logical_and(grid_z >= frz, np.logical_and(refl >= 33, refl < 49))] = -2.5 + B[np.logical_and(grid_z >= frz, np.logical_and(refl >= 33, refl < 49))] = 0.013 + A[np.logical_and(grid_z >= frz, refl > 49)] = -3.95 + B[np.logical_and(grid_z >= frz, refl > 49)] = 0.0148 + + fallspeed = A * np.power(10, refl * B) * np.power(1.2 / rho, 0.4) + print(fallspeed.max()) + del A, B, rho + return np.ma.masked_invalid(fallspeed) diff --git a/pydda/initialization/wind_fields.py b/pydda/initialization/wind_fields.py index 189c4e35..e0b2f816 100644 --- a/pydda/initialization/wind_fields.py +++ b/pydda/initialization/wind_fields.py @@ -3,6 +3,7 @@ import gc import os import tempfile +import xarray as xr # We want cfgrib to be an optional dependency to ensure Windows compatibility try: @@ -16,7 +17,7 @@ from netCDF4 import Dataset from datetime import datetime, timedelta from scipy.interpolate import interp1d, griddata -from scipy.interpolate import NearestNDInterpolator +from scipy.interpolate import NearestNDInterpolator, LinearNDInterpolator from copy import deepcopy try: @@ -28,7 +29,11 @@ def make_initialization_from_era5( - Grid, file_name=None, vel_field=None, dest_era_file=None + Grid, + file_name=None, + vel_field=None, + dest_era_file=None, + method="nearest", ): """ Written by: Hamid Ali Syed and Bobby Jackson @@ -63,6 +68,9 @@ def make_initialization_from_era5( dest_era_file: If this is not None, PyDDA will save the interpolated grid into this file. + method: str + Interpolation method: 'nearest' for nearest neighbor, + 'linear' for linear. Returns ------- new_Grid: Py-ART Grid @@ -71,7 +79,7 @@ def make_initialization_from_era5( """ if vel_field is None: - vel_field = pyart.config.get_field_name("corrected_velocity") + vel_field = Grid.attrs["first_grid_name"] if ERA5_AVAILABLE is False and file_name is None: raise ( @@ -84,7 +92,7 @@ def make_initialization_from_era5( ) grid_time = datetime.strptime( - Grid.time["units"], "seconds since %Y-%m-%dT%H:%M:%SZ" + Grid["time"].attrs["units"], "seconds since %Y-%m-%dT%H:%M:%SZ" ) hour_rounded_to_nearest_1 = int(round(float(grid_time.hour))) @@ -118,10 +126,10 @@ def make_initialization_from_era5( # Retrieve u, v, w, and geopotential # Geopotential is needed to convert into height coordinates - N = round(Grid.point_latitude["data"].max(), 2) - S = round(Grid.point_latitude["data"].min(), 2) - E = round(Grid.point_longitude["data"].max(), 2) - W = round(Grid.point_longitude["data"].min(), 2) + N = round(Grid["point_latitude"].values.max(), 2) + S = round(Grid["point_latitude"].values.min(), 2) + E = round(Grid["point_longitude"].values.max(), 2) + W = round(Grid["point_longitude"].values.min(), 2) retrieve_dict = {} pname = "reanalysis-era5-pressure-levels" @@ -192,21 +200,16 @@ def make_initialization_from_era5( ERA_grid.variables["time"].units, "hours since %Y-%m-%d %H:%M:%S.%f" ) - time_seconds = ERA_grid.variables["time"][:] - our_time = np.array([base_time + timedelta(seconds=int(x)) for x in time_seconds]) time_step = np.argmin(np.abs(base_time - grid_time)) - - analysis_grid_shape = Grid.fields[vel_field]["data"].shape - height_ERA = ERA_grid.variables["z"][:] u_ERA = ERA_grid.variables["u"][:] v_ERA = ERA_grid.variables["v"][:] w_ERA = ERA_grid.variables["w"][:] lon_ERA = ERA_grid.variables["longitude"][:] lat_ERA = ERA_grid.variables["latitude"][:] - radar_grid_lat = Grid.point_latitude["data"] - radar_grid_lon = Grid.point_longitude["data"] - radar_grid_alt = Grid.point_z["data"] + radar_grid_lat = Grid["point_latitude"].values + radar_grid_lon = Grid["point_longitude"].values + radar_grid_alt = Grid["point_z"].values u_flattened = u_ERA[time_step].flatten() v_flattened = v_ERA[time_step].flatten() w_flattened = w_ERA[time_step].flatten() @@ -219,17 +222,30 @@ def make_initialization_from_era5( lon_flattened = lon_mgrid.flatten() lat_flattened = lat_mgrid.flatten() height_flattened = height_ERA[time_step].flatten() - height_flattened -= Grid.radar_altitude["data"] + height_flattened -= Grid.radar_altitude.values - u_interp = NearestNDInterpolator( - (height_flattened, lat_flattened, lon_flattened), u_flattened, rescale=True - ) - v_interp = NearestNDInterpolator( - (height_flattened, lat_flattened, lon_flattened), v_flattened, rescale=True - ) - w_interp = NearestNDInterpolator( - (height_flattened, lat_flattened, lon_flattened), w_flattened, rescale=True - ) + if method == "nearest": + u_interp = NearestNDInterpolator( + (height_flattened, lat_flattened, lon_flattened), u_flattened, rescale=True + ) + v_interp = NearestNDInterpolator( + (height_flattened, lat_flattened, lon_flattened), v_flattened, rescale=True + ) + w_interp = NearestNDInterpolator( + (height_flattened, lat_flattened, lon_flattened), w_flattened, rescale=True + ) + elif method == "linear": + u_interp = LinearNDInterpolator( + (height_flattened, lat_flattened, lon_flattened), u_flattened, rescale=True + ) + v_interp = LinearNDInterpolator( + (height_flattened, lat_flattened, lon_flattened), v_flattened, rescale=True + ) + w_interp = LinearNDInterpolator( + (height_flattened, lat_flattened, lon_flattened), w_flattened, rescale=True + ) + else: + raise NotImplementedError("%s interpolation method not implemented!" % method) u_new = u_interp(radar_grid_alt, radar_grid_lat, radar_grid_lon) v_new = v_interp(radar_grid_alt, radar_grid_lat, radar_grid_lon) w_new = w_interp(radar_grid_alt, radar_grid_lat, radar_grid_lon) @@ -241,22 +257,24 @@ def make_initialization_from_era5( tfile.close() u_field = {} - u_field["data"] = u_new u_field["standard_name"] = "u_wind" u_field["long_name"] = "meridional component of wind velocity" v_field = {} - v_field["data"] = v_new v_field["standard_name"] = "v_wind" v_field["long_name"] = "zonal component of wind velocity" w_field = {} - w_field["data"] = w_new w_field["standard_name"] = "w_wind" w_field["long_name"] = "vertical component of wind velocity" - temp_grid = deepcopy(Grid) - temp_grid.add_field("u", u_field, replace_existing=True) - temp_grid.add_field("v", v_field, replace_existing=True) - temp_grid.add_field("w", w_field, replace_existing=True) - return temp_grid + Grid["u"] = xr.DataArray( + np.expand_dims(u_new, 0), dims=("time", "z", "y", "x"), attrs=u_field + ) + Grid["v"] = xr.DataArray( + np.expand_dims(v_new, 0), dims=("time", "z", "y", "x"), attrs=v_field + ) + Grid["w"] = xr.DataArray( + np.expand_dims(w_new, 0), dims=("time", "z", "y", "x"), attrs=w_field + ) + return Grid def make_constant_wind_field(Grid, wind=(0.0, 0.0, 0.0), vel_field=None): @@ -288,8 +306,8 @@ def make_constant_wind_field(Grid, wind=(0.0, 0.0, 0.0), vel_field=None): # Parse names of velocity field if vel_field is None: - vel_field = pyart.config.get_field_name("corrected_velocity") - analysis_grid_shape = Grid.fields[vel_field]["data"].shape + vel_field = Grid.attrs["first_grid_name"] + analysis_grid_shape = Grid[vel_field].values.shape u = wind[0] * np.ones(analysis_grid_shape) v = wind[1] * np.ones(analysis_grid_shape) @@ -297,24 +315,21 @@ def make_constant_wind_field(Grid, wind=(0.0, 0.0, 0.0), vel_field=None): u = np.ma.filled(u, 0) v = np.ma.filled(v, 0) w = np.ma.filled(w, 0) + u_field = {} - u_field["data"] = u u_field["standard_name"] = "u_wind" u_field["long_name"] = "meridional component of wind velocity" v_field = {} - v_field["data"] = v v_field["standard_name"] = "v_wind" v_field["long_name"] = "zonal component of wind velocity" w_field = {} - w_field["data"] = w w_field["standard_name"] = "w_wind" w_field["long_name"] = "vertical component of wind velocity" - temp_grid = deepcopy(Grid) - temp_grid.add_field("u", u_field, replace_existing=True) - temp_grid.add_field("v", v_field, replace_existing=True) - temp_grid.add_field("w", w_field, replace_existing=True) - return temp_grid + Grid["u"] = xr.DataArray(u, dims=("time", "z", "y", "x"), attrs=u_field) + Grid["v"] = xr.DataArray(v, dims=("time", "z", "y", "x"), attrs=v_field) + Grid["w"] = xr.DataArray(w, dims=("time", "z", "y", "x"), attrs=w_field) + return Grid def make_wind_field_from_profile(Grid, profile, vel_field=None): @@ -346,8 +361,8 @@ def make_wind_field_from_profile(Grid, profile, vel_field=None): """ # Parse names of velocity field if vel_field is None: - vel_field = pyart.config.get_field_name("corrected_velocity") - analysis_grid_shape = Grid.fields[vel_field]["data"].shape + vel_field = Grid.attrs["first_grid_name"] + analysis_grid_shape = Grid[vel_field].values.shape u = np.ones(analysis_grid_shape) v = np.ones(analysis_grid_shape) w = np.zeros(analysis_grid_shape) @@ -356,32 +371,29 @@ def make_wind_field_from_profile(Grid, profile, vel_field=None): z_back = profile.height u_interp = interp1d(z_back, u_back, bounds_error=False, fill_value="extrapolate") v_interp = interp1d(z_back, v_back, bounds_error=False, fill_value="extrapolate") - u_back2 = u_interp(np.asarray(Grid.z["data"])) - v_back2 = v_interp(np.asarray(Grid.z["data"])) + u_back2 = u_interp(np.asarray(Grid["z"].values)) + v_back2 = v_interp(np.asarray(Grid["z"].values)) for i in range(analysis_grid_shape[0]): u[i] = u_back2[i] v[i] = v_back2[i] u = np.ma.filled(u, 0) v = np.ma.filled(v, 0) w = np.ma.filled(w, 0) + u_field = {} - u_field["data"] = u u_field["standard_name"] = "u_wind" u_field["long_name"] = "meridional component of wind velocity" v_field = {} - v_field["data"] = v v_field["standard_name"] = "v_wind" v_field["long_name"] = "zonal component of wind velocity" w_field = {} - w_field["data"] = w w_field["standard_name"] = "w_wind" w_field["long_name"] = "vertical component of wind velocity" - temp_grid = deepcopy(Grid) - temp_grid.add_field("u", u_field, replace_existing=True) - temp_grid.add_field("v", v_field, replace_existing=True) - temp_grid.add_field("w", w_field, replace_existing=True) - return temp_grid + Grid["u"] = xr.DataArray(u, dims=("time", "z", "y", "x"), attrs=u_field) + Grid["v"] = xr.DataArray(v, dims=("time", "z", "y", "x"), attrs=v_field) + Grid["w"] = xr.DataArray(w, dims=("time", "z", "y", "x"), attrs=w_field) + return Grid def make_background_from_wrf(Grid, file_path, wrf_time, radar_loc, vel_field=None): @@ -415,9 +427,9 @@ def make_background_from_wrf(Grid, file_path, wrf_time, radar_loc, vel_field=Non """ # Parse names of velocity field if vel_field is None: - vel_field = pyart.config.get_field_name("corrected_velocity") + vel_field = Grid.attrs["first_grid_name"] - analysis_grid_shape = Grid.fields[vel_field]["data"].shape + analysis_grid_shape = Grid[vel_field].values.shape u = np.ones(analysis_grid_shape) v = np.ones(analysis_grid_shape) w = np.zeros(analysis_grid_shape) @@ -431,9 +443,9 @@ def make_background_from_wrf(Grid, file_path, wrf_time, radar_loc, vel_field=Non PHB_wrf = wrf_cdf.variables["PHB"][:] alt_wrf = (PH_wrf + PHB_wrf) / 9.81 - new_grid_x = Grid.point_x["data"] - new_grid_y = Grid.point_y["data"] - new_grid_z = Grid.point_z["data"] + new_grid_x = Grid["point_x"].values + new_grid_y = Grid["point_y"].values + new_grid_z = Grid["point_z"].values # Find timestep from datetime time_wrf = wrf_cdf.variables["Times"] @@ -477,25 +489,22 @@ def make_background_from_wrf(Grid, file_path, wrf_time, radar_loc, vel_field=Non ) u_field = {} - u_field["data"] = u u_field["standard_name"] = "u_wind" u_field["long_name"] = "meridional component of wind velocity" v_field = {} - v_field["data"] = v v_field["standard_name"] = "v_wind" v_field["long_name"] = "zonal component of wind velocity" w_field = {} - w_field["data"] = w w_field["standard_name"] = "w_wind" w_field["long_name"] = "vertical component of wind velocity" - temp_grid = deepcopy(Grid) - temp_grid.add_field("u", u_field, replace_existing=True) - temp_grid.add_field("v", v_field, replace_existing=True) - temp_grid.add_field("w", w_field, replace_existing=True) - return temp_grid + + Grid["u"] = xr.DataArray(u, dims=("time", "z", "y", "x"), attrs=u_field) + Grid["v"] = xr.DataArray(v, dims=("time", "z", "y", "x"), attrs=v_field) + Grid["w"] = xr.DataArray(w, dims=("time", "z", "y", "x"), attrs=w_field) + return Grid -def make_intialization_from_hrrr(Grid, file_path): +def make_intialization_from_hrrr(Grid, file_path, method="linear"): """ This function will read an HRRR GRIB2 file and return initial guess u, v, and w fields from the model @@ -507,6 +516,9 @@ def make_intialization_from_hrrr(Grid, file_path): will be interpolated to the Grid's specficiation and added as a field. file_path: string The path to the GRIB2 file to load. + method: str + Interpolation method: 'nearest' for nearest neighbor, + 'linear' for linear. Returns ------- @@ -542,11 +554,11 @@ def make_intialization_from_hrrr(Grid, file_path): EARTH_MEAN_RADIUS = 6.3781e6 gh = gh.data[:, :, :] height = (EARTH_MEAN_RADIUS * gh) / (EARTH_MEAN_RADIUS - gh) - height = height - Grid.radar_altitude["data"] + height = height - Grid["radar_altitude"].values - radar_grid_lat = Grid.point_latitude["data"] - radar_grid_lon = Grid.point_longitude["data"] - radar_grid_alt = Grid.point_z["data"] + radar_grid_lat = Grid["point_latitude"].values + radar_grid_lon = Grid["point_longitude"].values + radar_grid_alt = Grid["point_z"].values lat_min = radar_grid_lat.min() lat_max = radar_grid_lat.max() lon_min = radar_grid_lon.min() @@ -597,19 +609,16 @@ def make_intialization_from_hrrr(Grid, file_path): gc.collect() u_field = {} - u_field["data"] = u_new u_field["standard_name"] = "u_wind" u_field["long_name"] = "zonal component of wind velocity" v_field = {} - v_field["data"] = v_new v_field["standard_name"] = "v_wind" v_field["long_name"] = "meridonial component of wind velocity" w_field = {} - w_field["data"] = w_new w_field["standard_name"] = "w_wind" w_field["long_name"] = "vertical component of wind velocity" - temp_grid = deepcopy(Grid) - temp_grid.add_field("u", u_field, replace_existing=True) - temp_grid.add_field("v", v_field, replace_existing=True) - temp_grid.add_field("w", w_field, replace_existing=True) - return temp_grid + + Grid["u"] = xr.DataArray(u_new, dims=("time", "z", "y", "x"), attrs=u_field) + Grid["v"] = xr.DataArray(v_new, dims=("time", "z", "y", "x"), attrs=v_field) + Grid["w"] = xr.DataArray(w_new, dims=("time", "z", "y", "x"), attrs=w_field) + return Grid diff --git a/pydda/io/__init__.py b/pydda/io/__init__.py new file mode 100644 index 00000000..fb87d127 --- /dev/null +++ b/pydda/io/__init__.py @@ -0,0 +1,17 @@ +""" +================================= +pydda.io (pydda.io) +================================= + +.. currentmodule:: pydda.io + +This module handles the input/ouput of PyDDA. + +.. autosummary:: + :toctree: generated/ + + read_grid + read_from_pyart_grid +""" + +from .read_grid import read_grid, read_from_pyart_grid diff --git a/pydda/io/read_grid.py b/pydda/io/read_grid.py new file mode 100644 index 00000000..ed2b37ae --- /dev/null +++ b/pydda/io/read_grid.py @@ -0,0 +1,209 @@ +import xarray as xr +import xradar as xd +import numpy as np + +from glob import glob +from datatree import DataTree +from pyart.core.transforms import cartesian_to_geographic + +from ..retrieval.angles import add_azimuth_as_field, add_elevation_as_field + + +def read_grid(file_name, level_name="parent", **kwargs): + """ + Opens a Cf-compliant netCDF grid object produced by a utility like + PyART or RadxGrid. This will add all variables PyDDA needs + from these grids to create a PyDDA Grid (based on the xarray :func:`xr.Dataset`). + This will open the grid as a parent node of a DataTree structure. + + Parameters + ---------- + file_name: str or list + The name of the file to open. If a list of files is provided, the grids + are opened as a parent node with all grid datasets concatenated together. + level_name: str + The name of the nest level to put in the datatree. The specified grids will + + Returns + ------- + root: DataTree + The dataset with the list of grids as the parent node. + """ + + root = xr.open_dataset(file_name, decode_times=False) + + # Find first grid field name + root.attrs["first_grid_name"] = "" + for vars in list(root.variables.keys()): + if root[vars].dims == ("time", "z", "y", "x"): + root.attrs["first_grid_name"] = vars + break + if root.attrs["first_grid_name"] == "": + raise IOError("NetCDF file does not contain any valid radar grid fields!") + + root["point_x"] = xr.DataArray(_point_data_factory(root, "x"), dims=("z", "y", "x")) + root.attrs["units"] = root["x"].units + root.attrs["long_name"] = "Point x location" + root["point_y"] = xr.DataArray(_point_data_factory(root, "y"), dims=("z", "y", "x")) + root.attrs["units"] = root["y"].units + root.attrs["long_name"] = "Point y location" + root["point_z"] = xr.DataArray(_point_data_factory(root, "z"), dims=("z", "y", "x")) + root.attrs["units"] = root["z"].units + root.attrs["long_name"] = "Point z location" + root["point_altitude"] = xr.DataArray( + _point_altitude_data_factory(root), dims=("z", "y", "x") + ) + root.attrs["units"] = root["z"].units + root.attrs["long_name"] = "Point altitude" + lon = _point_lon_lat_data_factory(root, 0) + lat = _point_lon_lat_data_factory(root, 1) + root["point_longitude"] = xr.DataArray(lon, dims=("z", "y", "x")) + root["point_longitude"].attrs["units"] = root["radar_longitude"].attrs["units"] + root["point_longitude"].attrs["long_name"] = "Point longitude" + root["point_latitude"] = xr.DataArray(lat, dims=("z", "y", "x")) + root["point_latitude"].attrs["units"] = root["radar_latitude"].attrs["units"] + root["point_latitude"].attrs["long_name"] = "Point latitude" + add_azimuth_as_field(root) + add_elevation_as_field(root) + + return root + + +def read_from_pyart_grid(Grid): + """ + + Converts a Py-ART Grid to a PyDDA Dataset with the necessary variables + + Parameters + ---------- + Grid: Py-ART Grid + The Py-ART Grid to convert to a PyDDA Grid + + Returns + ------- + new_grid: PyDDA Dataset + The xarray Dataset with the additional parameters PyDDA needs + """ + new_grid = Grid + radar_latitude = Grid.radar_latitude + radar_longitude = Grid.radar_longitude + radar_altitude = Grid.radar_altitude + origin_latitude = Grid.origin_latitude + origin_longitude = Grid.origin_longitude + origin_altitude = Grid.origin_altitude + + if len(list(Grid.fields.keys())) > 0: + first_grid_name = list(Grid.fields.keys())[0] + else: + first_grid_name = "" + projection = Grid.get_projparams() + new_grid = new_grid.to_xarray() + + new_grid["projection"] = xr.DataArray(1, dims=(), attrs=projection) + + if "lat_0" in projection.keys(): + new_grid["projection"].attrs["_include_lon_0_lat_0"] = "true" + else: + new_grid["projection"].attrs["_include_lon_0_lat_0"] = "false" + + if "units" not in new_grid["time"].attrs.keys(): + new_grid["time"].attrs["units"] = ( + "seconds since %s" + % new_grid["time"].dt.strftime("%Y-%m-%dT%H:%M:%SZ").values[0] + ) + new_grid.attrs["first_grid_name"] = first_grid_name + x = radar_latitude.pop("data").squeeze() + new_grid["radar_latitude"] = xr.DataArray( + np.atleast_1d(x), dims=("nradar"), attrs=radar_latitude + ) + x = radar_longitude.pop("data").squeeze() + new_grid["radar_longitude"] = xr.DataArray( + np.atleast_1d(x), dims=("nradar"), attrs=radar_longitude + ) + x = radar_altitude.pop("data").squeeze() + new_grid["radar_altitude"] = xr.DataArray( + np.atleast_1d(x), dims=("nradar"), attrs=radar_altitude + ) + x = origin_latitude.pop("data").squeeze() + new_grid["origin_latitude"] = xr.DataArray( + np.atleast_1d(x), dims=("nradar"), attrs=origin_latitude + ) + x = origin_longitude.pop("data").squeeze() + new_grid["origin_longitude"] = xr.DataArray( + np.atleast_1d(x), dims=("nradar"), attrs=origin_longitude + ) + x = origin_altitude.pop("data").squeeze() + new_grid["origin_altitude"] = xr.DataArray( + np.atleast_1d(x), dims=("nradar"), attrs=origin_altitude + ) + new_grid["point_x"] = xr.DataArray( + _point_data_factory(new_grid, "x"), dims=("z", "y", "x") + ) + new_grid.attrs["units"] = new_grid["x"].units + new_grid.attrs["long_name"] = "Point x location" + new_grid["point_y"] = xr.DataArray( + _point_data_factory(new_grid, "y"), dims=("z", "y", "x") + ) + new_grid.attrs["units"] = new_grid["y"].units + new_grid.attrs["long_name"] = "Point y location" + new_grid["point_z"] = xr.DataArray( + _point_data_factory(new_grid, "z"), dims=("z", "y", "x") + ) + new_grid.attrs["units"] = new_grid["z"].units + new_grid.attrs["long_name"] = "Point z location" + new_grid["point_altitude"] = xr.DataArray( + _point_altitude_data_factory(new_grid), dims=("z", "y", "x") + ) + new_grid.attrs["units"] = new_grid["z"].units + new_grid.attrs["long_name"] = "Point altitude" + lon = _point_lon_lat_data_factory(new_grid, 0) + lat = _point_lon_lat_data_factory(new_grid, 1) + new_grid["point_longitude"] = xr.DataArray( + lon, + dims=("z", "y", "x"), + ) + if "units" in radar_longitude.keys(): + new_grid["point_longitude"].attrs["units"] = radar_longitude["units"] + new_grid["point_longitude"].attrs["long_name"] = "Point longitude" + new_grid["point_latitude"] = xr.DataArray(lat, dims=("z", "y", "x")) + if "units" in radar_latitude.keys(): + new_grid["point_latitude"].attrs["units"] = radar_latitude["units"] + new_grid["point_latitude"].attrs["long_name"] = "Point latitude" + add_azimuth_as_field(new_grid) + add_elevation_as_field(new_grid) + + return new_grid + + +def _point_data_factory(grid, coordinate): + """The function which returns the locations of all points.""" + reg_x = grid["x"].values + reg_y = grid["y"].values + reg_z = grid["z"].values + if coordinate == "x": + return np.tile(reg_x, (len(reg_z), len(reg_y), 1)).swapaxes(2, 2) + elif coordinate == "y": + return np.tile(reg_y, (len(reg_z), len(reg_x), 1)).swapaxes(1, 2) + else: + assert coordinate == "z" + return np.tile(reg_z, (len(reg_x), len(reg_y), 1)).swapaxes(0, 2) + + +def _point_lon_lat_data_factory(grid, coordinate): + """The function which returns the geographic point locations.""" + x = grid["point_x"].values + y = grid["point_y"].values + projparams = grid["projection"].attrs + + if "_include_lon_0_lat_0" in projparams.keys(): + if projparams["_include_lon_0_lat_0"] == "true": + projparams["lon_0"] = grid["origin_longitude"].values + projparams["lat_0"] = grid["origin_latitude"].values + + geographic_coords = cartesian_to_geographic(x, y, projparams) + + return geographic_coords[coordinate] + + +def _point_altitude_data_factory(grid): + return grid["origin_altitude"].values[0] + grid["point_z"].values diff --git a/pydda/retrieval/__init__.py b/pydda/retrieval/__init__.py index e0b98088..a1f5c2da 100644 --- a/pydda/retrieval/__init__.py +++ b/pydda/retrieval/__init__.py @@ -18,8 +18,12 @@ get_dd_wind_field get_bca DDParameters + get_dd_wind_field_nested + make_initialization_from_other_grid """ from .wind_retrieve import get_dd_wind_field from .wind_retrieve import get_bca from .wind_retrieve import DDParameters +from .nesting import get_dd_wind_field_nested +from .nesting import make_initialization_from_other_grid diff --git a/pydda/retrieval/angles.py b/pydda/retrieval/angles.py index c796f649..3ff21232 100644 --- a/pydda/retrieval/angles.py +++ b/pydda/retrieval/angles.py @@ -1,4 +1,5 @@ import numpy as np +import xarray as xr def rsl_get_slantr_and_elev(gr, h): @@ -72,30 +73,29 @@ def _add_field_to_object( units="degrees from north", long_name="Azimuth", standard_name="Azimuth", - dz_name="DT", ): """ Adds an unmasked field to the Py-ART radar object. """ - field_dict = { - "data": np.ma.asanyarray(field), + attr_dict = { "units": units, "long_name": long_name, "standard_name": standard_name, - "missing_value": 1.0 * radar.fields[dz_name]["_FillValue"], } - radar.add_field(field_name, field_dict, replace_existing=True) + radar[field_name] = xr.DataArray( + np.expand_dims(field, 0), dims=("time", "z", "y", "x"), attrs=attr_dict + ) return radar -def add_azimuth_as_field(grid, dz_name="DT", az_name="AZ", bad=-32768): +def add_azimuth_as_field(grid, az_name="AZ", bad=-32768): """ - Add azimuth field to a Py-ART Grid object. The bearing to each gridpoint + Add azimuth field to an xarray Dataset. The bearing to each gridpoint is computed using the Haversine method and Great Circle approximation. Parameters ---------- - grid : Py-ART Grid object + grid : xarray Dataset Input Grid object for modification. dz_name : str Name of the reflectivity field in the Grid. @@ -106,22 +106,22 @@ def add_azimuth_as_field(grid, dz_name="DT", az_name="AZ", bad=-32768): Returns ------- - grid : Py-ART Grid object + grid : xarray Dataset Output Grid object with azimuth field added. """ az = gc_bear_array( - grid.radar_latitude["data"][0], - grid.radar_longitude["data"][0], - grid.point_latitude["data"], - grid.point_longitude["data"], + grid["radar_latitude"].values[0], + grid["radar_longitude"].values[0], + grid["point_latitude"].values, + grid["point_longitude"].values, ) - np.isfinite(az) + az = np.ma.masked_invalid(az) - grid = _add_field_to_object(grid, az, dz_name=dz_name, field_name=az_name) + grid = _add_field_to_object(grid, az, field_name=az_name) return grid -def add_elevation_as_field(grid, dz_name="DT", el_name="EL", bad=-32768): +def add_elevation_as_field(grid, el_name="EL"): """ Add elevation field to a Py-ART Grid object. The elevation to each gridpoint is computed using the standard radar beam propagation @@ -130,7 +130,7 @@ def add_elevation_as_field(grid, dz_name="DT", el_name="EL", bad=-32768): Parameters ---------- - grid : Py-ART Grid object + grid : xarray Dataset Input Grid object for modification. dz_name : str Name of the reflectivity field in the Grid. @@ -141,20 +141,19 @@ def add_elevation_as_field(grid, dz_name="DT", el_name="EL", bad=-32768): Returns ------- - grid : Py-ART Grid object + grid : xarray Dataset Output Grid object with elevation field added. """ gr = gc_dist( - grid.radar_latitude["data"][0], - grid.radar_longitude["data"][0], - grid.point_latitude["data"], - grid.point_longitude["data"], + grid["radar_latitude"].values[0], + grid["radar_longitude"].values[0], + grid["point_latitude"].values, + grid["point_longitude"].values, ) h3 = 0.0 * gr - for i in range(len(grid.z["data"])): - h3[i, :, :] = grid.z["data"][i] - grid.radar_altitude["data"][0] + for i in range(len(grid.z.values)): + h3[i, :, :] = grid.z.values[i] - grid.radar_altitude.values[0] sr, el = rsl_get_slantr_and_elev(gr, h3 / 1000.0) - np.isfinite(el) el = np.ma.masked_invalid(el) - grid = _add_field_to_object(grid, el, dz_name=dz_name, field_name=el_name) + grid = _add_field_to_object(grid, el, field_name=el_name) return grid diff --git a/pydda/retrieval/nesting.py b/pydda/retrieval/nesting.py new file mode 100644 index 00000000..23874e3d --- /dev/null +++ b/pydda/retrieval/nesting.py @@ -0,0 +1,338 @@ +import pyart +import numpy as np +import xarray as xr + +from scipy.interpolate import RegularGridInterpolator +from .wind_retrieve import get_dd_wind_field +from datatree import DataTree + + +def get_dd_wind_field_nested(grid_tree: DataTree, **kwargs): + """ + Does a wind retrieval over nested grids. The nested grids are created using PyART's + :func:`pyart.map.grid_from_radars` function and then placed into a tree structure using + dictionaries. Each node of the tree has three parameters: + 'input_grids': The list of PyART grids for the given level of the grid + 'kwargs': The list of key word arguments for input to the get_dd_wind_field function for the set of grids. + If this is None, then the default keyword arguments are carried from the keyword arguments of this function. + 'children': The list of trees that are the children of this node. + + The function will output the same tree, with the list of output grids of each level output to the 'output_grids' + member of the tree structure. + """ + + # Look for radars in current level + child_list = list(grid_tree.children.keys()) + grid_list = [] + rad_names = [] + for child in child_list: + if "radar" in child: + grid_list.append(grid_tree[child].to_dataset()) + rad_names.append(child) + + if len(list(grid_tree.attrs.keys())) == 0 and len(grid_list) > 0: + output_grids, output_parameters = get_dd_wind_field(grid_list, **kwargs) + elif len(grid_list) > 0: + my_kwargs = grid_tree.attrs + output_grids, output_parameters = get_dd_wind_field(grid_list, **my_kwargs) + output_parameters = output_parameters.__dict__ + grid_tree["weights"] = xr.DataArray( + output_parameters.pop("weights"), dims=("nradar", "z", "y", "x") + ) + grid_tree["bg_weights"] = xr.DataArray( + output_parameters.pop("bg_weights"), dims=("z", "y", "x") + ) + grid_tree["model_weights"] = xr.DataArray( + output_parameters.pop("model_weights"), dims=("nmodel", "z", "y", "x") + ) + output_parameters.pop("u_model") + output_parameters.pop("v_model") + output_parameters.pop("w_model") + grid_tree["output_parameters"] = xr.DataArray([], attrs=output_parameters) + + grid_tree.__setitem__("u", output_grids[0]["u"]) + grid_tree.__setitem__("v", output_grids[0]["v"]) + grid_tree.__setitem__("w", output_grids[0]["w"]) + grid_tree["u"].attrs = output_grids[0]["u"].attrs + grid_tree["v"].attrs = output_grids[0]["v"].attrs + grid_tree["w"].attrs = output_grids[0]["w"].attrs + + if child_list == []: + return grid_tree + + nests = [] + for child in child_list: + if "radar_" not in child: + nests.append(child) + nests = sorted(nests) + for child in nests: + # Only update child initalization if we are not in parent node + if len(grid_list) > 0: + temp_src = grid_tree[rad_names[0]].to_dataset() + temp_src["u"] = grid_tree.ds["u"] + temp_src["v"] = grid_tree.ds["v"] + temp_src["w"] = grid_tree.ds["w"] + input_grids = make_initialization_from_other_grid( + temp_src, grid_tree.children[child][rad_names[0]].to_dataset() + ) + + if "u" not in grid_tree.children[child][rad_names[0]].ds.variables.keys(): + grid_tree.children[child][rad_names[0]].__setitem__( + "u", + xr.zeros_like( + grid_tree.children[child][rad_names[0]].ds[ + grid_tree.children[child][rad_names[0]].ds.attrs[ + "first_grid_name" + ] + ] + ), + ) + grid_tree.children[child][rad_names[0]]["u"].attrs = input_grids[ + "u" + ].attrs + + if "v" not in grid_tree.children[child][rad_names[0]].ds.variables.keys(): + grid_tree.children[child][rad_names[0]].__setitem__( + "v", + xr.zeros_like( + grid_tree.children[child][rad_names[0]].ds[ + grid_tree.children[child][rad_names[0]].ds.attrs[ + "first_grid_name" + ] + ] + ), + ) + grid_tree.children[child][rad_names[0]]["v"].attrs = input_grids[ + "v" + ].attrs + + if "w" not in grid_tree.children[child][rad_names[0]].ds.variables.keys(): + grid_tree.children[child][rad_names[0]].__setitem__( + "w", + xr.zeros_like( + grid_tree.children[child][rad_names[0]].ds[ + grid_tree.children[child][rad_names[0]].ds.attrs[ + "first_grid_name" + ] + ] + ), + ) + grid_tree.children[child][rad_names[0]]["w"].attrs = input_grids[ + "w" + ].attrs + + grid_tree.children[child][rad_names[0]].__setitem__("u", input_grids["u"]) + grid_tree.children[child][rad_names[0]].__setitem__("v", input_grids["v"]) + grid_tree.children[child][rad_names[0]].__setitem__("w", input_grids["w"]) + + grid_tree.children[child].parent.attrs["const_boundary_cond"] = True + + temp_tree = get_dd_wind_field_nested(grid_tree.children[child]) + grid_tree.children[child].__setitem__("u", temp_tree["u"]) + grid_tree.children[child].__setitem__("v", temp_tree["v"]) + grid_tree.children[child].__setitem__("w", temp_tree["w"]) + grid_tree.children[child]["u"].attrs = temp_tree.ds["u"].attrs + grid_tree.children[child]["v"].attrs = temp_tree.ds["v"].attrs + grid_tree.children[child]["w"].attrs = temp_tree.ds["w"].attrs + + # Update parent grids from children + if len(rad_names) > 0: + for child in nests: + temp_src = grid_tree.children[child][rad_names[0]].to_dataset() + temp_src["u"] = grid_tree.children[child].ds["u"] + temp_src["v"] = grid_tree.children[child].ds["v"] + temp_src["w"] = grid_tree.children[child].ds["w"] + temp_dest = grid_tree[rad_names[0]].to_dataset() + temp_dest["u"] = grid_tree.ds["u"] + temp_dest["v"] = grid_tree.ds["v"] + temp_dest["w"] = grid_tree.ds["w"] + output_grids = make_initialization_from_other_grid( + temp_src, + temp_dest, + ) + grid_tree.__setitem__("u", output_grids["u"]) + grid_tree.__setitem__("v", output_grids["v"]) + grid_tree.__setitem__("w", output_grids["w"]) + grid_tree["u"].attrs = output_grids["u"].attrs + grid_tree["v"].attrs = output_grids["v"].attrs + grid_tree["w"].attrs = output_grids["w"].attrs + return grid_tree + + +def make_initialization_from_other_grid(grid_src, grid_dest, method="linear"): + """ + This function will create an initaliation by interpolating a wind field + from a grid with a different specification than the analysis grid. This + allows, for example, for interpolating a coarser grid onto a finer grid + for further refinement of the retrieval. The source and destination grid + must have the same origin point. + + Parameters + ---------- + grid_src: Grid + The grid to interpolate. + grid_dst: Grid + The destination analysis grid to interpolate the source grid on. + method: str + Interpolation method to use + Returns + ------- + grid: Grid + The grid with the u, v, and w from the source grid interpolated. + """ + if not np.all( + grid_src["origin_latitude"].values == grid_dest["origin_latitude"].values + ): + raise ValueError("Source and destination grid must have same lat/lon origin!") + + if not np.all( + grid_src["origin_longitude"].values == grid_dest["origin_longitude"].values + ): + raise ValueError("Source and destination grid must have same lat/lon origin!") + + if not np.all( + grid_src["origin_altitude"].values == grid_dest["origin_altitude"].values + ): + correction_factor = ( + grid_dest["origin_altitude"].values - grid_src["origin_altitude"].values + ) + else: + correction_factor = 0 + + u_src = grid_src["u"].values.squeeze() + v_src = grid_src["v"].values.squeeze() + w_src = grid_src["w"].values.squeeze() + x_src = grid_src["x"].values + y_src = grid_src["y"].values + z_src = grid_src["z"].values + + x_dst = grid_dest["point_x"].values + y_dst = grid_dest["point_y"].values + z_dst = grid_dest["point_z"].values - correction_factor + + x_dst_p = grid_dest["x"].values + y_dst_p = grid_dest["y"].values + z_dst_p = grid_dest["z"].values - correction_factor + + # Subset destination grid coordinates + x_src_min = x_src.min() + x_src_max = x_src.max() + y_src_min = y_src.min() + y_src_max = y_src.max() + z_src_min = z_src.min() + z_src_max = z_src.max() + subset_z = np.argwhere( + np.logical_and(z_dst_p >= z_src_min, z_dst_p <= z_src_max) + ).astype(int) + subset_y = np.argwhere( + np.logical_and(y_dst_p >= y_src_min, y_dst_p <= y_src_max) + ).astype(int) + subset_x = np.argwhere( + np.logical_and(x_dst_p >= x_src_min, x_dst_p <= x_src_max) + ).astype(int) + + u_interp = RegularGridInterpolator((z_src, y_src, x_src), u_src, method=method) + v_interp = RegularGridInterpolator((z_src, y_src, x_src), v_src, method=method) + w_interp = RegularGridInterpolator((z_src, y_src, x_src), w_src, method=method) + u_dest = u_interp( + ( + z_dst[ + int(subset_z[0]) : int(subset_z[-1] + 1), + int(subset_y[0]) : int(subset_y[-1] + 1), + int(subset_x[0]) : int(subset_x[-1] + 1), + ], + y_dst[ + int(subset_z[0]) : int(subset_z[-1] + 1), + int(subset_y[0]) : int(subset_y[-1] + 1), + int(subset_x[0]) : int(subset_x[-1] + 1), + ], + x_dst[ + int(subset_z[0]) : int(subset_z[-1] + 1), + int(subset_y[0]) : int(subset_y[-1] + 1), + int(subset_x[0]) : int(subset_x[-1] + 1), + ], + ) + ) + v_dest = v_interp( + ( + z_dst[ + int(subset_z[0]) : int(subset_z[-1] + 1), + int(subset_y[0]) : int(subset_y[-1] + 1), + int(subset_x[0]) : int(subset_x[-1] + 1), + ], + y_dst[ + int(subset_z[0]) : int(subset_z[-1] + 1), + int(subset_y[0]) : int(subset_y[-1] + 1), + int(subset_x[0]) : int(subset_x[-1] + 1), + ], + x_dst[ + int(subset_z[0]) : int(subset_z[-1] + 1), + int(subset_y[0]) : int(subset_y[-1] + 1), + int(subset_x[0]) : int(subset_x[-1] + 1), + ], + ) + ) + w_dest = w_interp( + ( + z_dst[ + int(subset_z[0]) : int(subset_z[-1] + 1), + int(subset_y[0]) : int(subset_y[-1] + 1), + int(subset_x[0]) : int(subset_x[-1] + 1), + ], + y_dst[ + int(subset_z[0]) : int(subset_z[-1] + 1), + int(subset_y[0]) : int(subset_y[-1] + 1), + int(subset_x[0]) : int(subset_x[-1] + 1), + ], + x_dst[ + int(subset_z[0]) : int(subset_z[-1] + 1), + int(subset_y[0]) : int(subset_y[-1] + 1), + int(subset_x[0]) : int(subset_x[-1] + 1), + ], + ) + ) + + if "u" not in grid_dest.variables.keys(): + grid_dest["u"] = xr.DataArray( + np.expand_dims(u_dest, 0), + dims=("time", "z", "y", "x"), + attrs=grid_src["u"].attrs, + ) + else: + grid_dest["u"][ + :, + int(subset_z[0]) : int(subset_z[-1] + 1), + int(subset_y[0]) : int(subset_y[-1] + 1), + int(subset_x[0]) : int(subset_x[-1] + 1), + ] = np.expand_dims(u_dest, 0) + + grid_dest["u"].attrs = grid_src["u"].attrs + if "v" not in grid_dest.variables.keys(): + grid_dest["v"] = xr.DataArray( + np.expand_dims(v_dest, 0), + dims=("time", "z", "y", "x"), + attrs=grid_src["v"].attrs, + ) + else: + grid_dest["v"][ + :, + int(subset_z[0]) : int(subset_z[-1] + 1), + int(subset_y[0]) : int(subset_y[-1] + 1), + int(subset_x[0]) : int(subset_x[-1] + 1), + ] = np.expand_dims(v_dest, 0) + grid_dest["v"].attrs = grid_src["v"].attrs + if "w" not in grid_dest.variables.keys(): + grid_dest["w"] = xr.DataArray( + np.expand_dims(w_dest, 0), + dims=("time", "z", "y", "x"), + attrs=grid_src["w"].attrs, + ) + else: + grid_dest["w"][ + :, + int(subset_z[0]) : int(subset_z[-1] + 1), + int(subset_y[0]) : int(subset_y[-1] + 1), + int(subset_x[0]) : int(subset_x[-1] + 1), + ] = w_dest + grid_dest["w"].attrs = grid_src["w"].attrs + return grid_dest diff --git a/pydda/retrieval/wind_retrieve.py b/pydda/retrieval/wind_retrieve.py index 8d3973f6..f88cf108 100644 --- a/pydda/retrieval/wind_retrieve.py +++ b/pydda/retrieval/wind_retrieve.py @@ -7,14 +7,14 @@ import pyart import numpy as np import time -import cartopy.crs as ccrs import math -import sys +import xarray as xr from scipy.interpolate import interp1d from scipy.optimize import fmin_l_bfgs_b from scipy.signal import savgol_filter from .auglag import auglag +from ..io import read_from_pyart_grid try: import tensorflow_probability as tfp @@ -182,6 +182,7 @@ def __init__(self): self.cvtol = 1e-2 self.gtol = 1e-2 self.Jveltol = 100.0 + self.const_boundary_cond = False def _get_dd_wind_field_scipy( @@ -228,6 +229,8 @@ def _get_dd_wind_field_scipy( roi=1000.0, wind_tol=0.1, tolerance=1e-8, + const_boundary_cond=False, + max_wind_mag=100.0, ): global _wcurrmax global _wprevmax @@ -250,20 +253,23 @@ def _get_dd_wind_field_scipy( parameters.Ut = Ut parameters.Vt = Vt parameters.engine = engine - + parameters.const_boundary_cond = const_boundary_cond + print(parameters.const_boundary_cond) # Ensure that all Grids are on the same coordinate system prev_grid = Grids[0] for g in Grids: - if not np.allclose(g.x["data"], prev_grid.x["data"], atol=10): + if not np.allclose(g["x"].values, prev_grid["x"].values, atol=10): raise ValueError("Grids do not have equal x coordinates!") - if not np.allclose(g.y["data"], prev_grid.y["data"], atol=10): + if not np.allclose(g["y"].values, prev_grid["y"].values, atol=10): raise ValueError("Grids do not have equal y coordinates!") - if not np.allclose(g.z["data"], prev_grid.z["data"], atol=10): + if not np.allclose(g["z"].values, prev_grid["z"].values, atol=10): raise ValueError("Grids do not have equal z coordinates!") - if not g.origin_latitude["data"] == prev_grid.origin_latitude["data"]: + if not np.allclose( + g["origin_latitude"].values, prev_grid["origin_latitude"].values + ): raise ValueError(("Grids have unequal origin lat/lons!")) prev_grid = g @@ -292,15 +298,15 @@ def _get_dd_wind_field_scipy( ) u_interp = interp1d(z_back[valid_inds], u_back[valid_inds], bounds_error=False) v_interp = interp1d(z_back[valid_inds], v_back[valid_inds], bounds_error=False) - if isinstance(Grids[0].z["data"], np.ma.MaskedArray): - parameters.u_back = u_interp(Grids[0].z["data"].filled(np.nan)) - parameters.v_back = v_interp(Grids[0].z["data"].filled(np.nan)) + if isinstance(Grids[0]["z"].values, np.ma.MaskedArray): + parameters.u_back = u_interp(Grids[0]["z"].values.filled(np.nan)) + parameters.v_back = v_interp(Grids[0]["z"].values.filled(np.nan)) else: - parameters.u_back = u_interp(Grids[0].z["data"]) - parameters.v_back = v_interp(Grids[0].z["data"]) + parameters.u_back = u_interp(Grids[0]["z"].values) + parameters.v_back = v_interp(Grids[0]["z"].values) print("Grid levels:") - print(Grids[0].z["data"]) + print(Grids[0]["z"].values) # Parse names of velocity field if refl_field is None: @@ -336,17 +342,16 @@ def _get_dd_wind_field_scipy( for i in range(len(Grids)): parameters.wts.append( np.ma.masked_invalid( - calculate_fall_speed(Grids[i], refl_field=refl_field, frz=frz) + calculate_fall_speed(Grids[i], refl_field=refl_field, frz=frz).squeeze() ) ) - add_azimuth_as_field(Grids[i], dz_name=refl_field) - add_elevation_as_field(Grids[i], dz_name=refl_field) - parameters.vrs.append(np.ma.masked_invalid(Grids[i].fields[vel_name]["data"])) + + parameters.vrs.append(np.ma.masked_invalid(Grids[i][vel_name].values.squeeze())) parameters.azs.append( - np.ma.masked_invalid(Grids[i].fields["AZ"]["data"] * np.pi / 180) + np.ma.masked_invalid(Grids[i]["AZ"].values.squeeze() * np.pi / 180) ) parameters.els.append( - np.ma.masked_invalid(Grids[i].fields["EL"]["data"] * np.pi / 180) + np.ma.masked_invalid(Grids[i]["EL"].values.squeeze() * np.pi / 180) ) if len(Grids) > 1: @@ -355,15 +360,7 @@ def _get_dd_wind_field_scipy( if i == j: continue print(("Calculating weights for radars " + str(i) + " and " + str(j))) - bca[i, j] = get_bca( - Grids[i].radar_longitude["data"], - Grids[i].radar_latitude["data"], - Grids[j].radar_longitude["data"], - Grids[j].radar_latitude["data"], - Grids[i].point_x["data"][0], - Grids[i].point_y["data"][0], - Grids[i].get_projparams(), - ) + bca[i, j] = get_bca(Grids[i], Grids[j]) for k in range(parameters.vrs[i].shape[0]): if weights_obs is None: @@ -513,47 +510,42 @@ def _get_dd_wind_field_scipy( winds = winds.flatten() print("Starting solver ") - parameters.dx = np.diff(Grids[0].x["data"], axis=0)[0] - parameters.dy = np.diff(Grids[0].y["data"], axis=0)[0] - parameters.dz = np.diff(Grids[0].z["data"], axis=0)[0] + parameters.dx = np.diff(Grids[0]["x"].values, axis=0)[0] + parameters.dy = np.diff(Grids[0]["y"].values, axis=0)[0] + parameters.dz = np.diff(Grids[0]["z"].values, axis=0)[0] print("rmsVR = " + str(parameters.rmsVr)) print("Total points: %d" % parameters.weights.sum()) - parameters.z = Grids[0].point_z["data"] - parameters.x = Grids[0].point_x["data"] - parameters.y = Grids[0].point_y["data"] + parameters.z = Grids[0]["point_z"].values + parameters.x = Grids[0]["point_x"].values + parameters.y = Grids[0]["point_y"].values bt = time.time() # First pass - no filter wcurrmax = w_init.max() print("The max of w_init is", wcurrmax) iterations = 0 - bounds = [(-x, x) for x in 100 * np.ones(winds.shape)] + bounds = [(-x, x) for x in max_wind_mag * np.ones(winds.shape)] if model_fields is not None: for i, the_field in enumerate(model_fields): u_field = "U_" + the_field v_field = "V_" + the_field w_field = "W_" + the_field - model_finite = np.logical_and.reduce( - ( - np.isfinite(Grids[0].fields[u_field]["data"]), - np.isfinite(Grids[0].fields[w_field]["data"]), - np.isfinite(Grids[0].fields[v_field]["data"]), - ) - ) - parameters.model_weights[i] = np.where( - model_finite, parameters.model_weights[i], 0 - ) - parameters.u_model.append( - np.nan_to_num(Grids[0].fields[u_field]["data"], 0) - ) - parameters.v_model.append( - np.nan_to_num(Grids[0].fields[v_field]["data"], 0) - ) - parameters.w_model.append( - np.nan_to_num(Grids[0].fields[w_field]["data"], 0) + parameters.u_model.append(np.nan_to_num(Grids[0][u_field].values.squeeze())) + parameters.v_model.append(np.nan_to_num(Grids[0][v_field].values.squeeze())) + parameters.w_model.append(np.nan_to_num(Grids[0][w_field].values.squeeze())) + + # Don't weigh in where model data unavailable + where_finite_u = np.isfinite(Grids[0][u_field].values.squeeze()) + where_finite_v = np.isfinite(Grids[0][v_field].values.squeeze()) + where_finite_w = np.isfinite(Grids[0][w_field].values.squeeze()) + parameters.model_weights[i, :, :, :] = np.where( + np.logical_and.reduce((where_finite_u, where_finite_v, where_finite_w)), + 1, + 0, ) + print("Total number of model points: %d" % np.sum(parameters.model_weights)) parameters.Co = Co parameters.Cm = Cm parameters.Cx = Cx @@ -622,14 +614,14 @@ def _vert_velocity_callback(x): else: def loss_and_gradient(x): - x_loss = J_function_jax(x["winds"], vars(parameters)) + x_loss = J_function_jax(x["winds"], parameters) x_grad = {} - x_grad["winds"] = grad_jax(x["winds"], vars(parameters)) + x_grad["winds"] = grad_jax(x["winds"], parameters) return x_loss, x_grad bounds = ( - {"winds": -100 * jnp.ones(winds.shape)}, - {"winds": 100 * jnp.ones(winds.shape)}, + {"winds": -max_wind_mag * jnp.ones(winds.shape)}, + {"winds": max_wind_mag * jnp.ones(winds.shape)}, ) winds = jnp.array(winds) solver = jaxopt.LBFGSB( @@ -673,10 +665,10 @@ def loss_and_gradient(x): parameters.weights = tf.constant(parameters.weights, dtype=tf.float32) parameters.bg_weights[parameters.bg_weights > 0] = 1 parameters.bg_weights = tf.constant(parameters.bg_weights, dtype=tf.float32) - parameters.z = tf.constant(Grids[0].point_z["data"], dtype=tf.float32) - parameters.x = tf.constant(Grids[0].point_x["data"], dtype=tf.float32) - parameters.y = tf.constant(Grids[0].point_y["data"], dtype=tf.float32) - bounds = [(-x, x) for x in 100 * np.ones(winds.shape, dtype="float32")] + parameters.z = tf.constant(Grids[0]["point_z"].values, dtype=tf.float32) + parameters.x = tf.constant(Grids[0]["point_x"].values, dtype=tf.float32) + parameters.y = tf.constant(Grids[0]["point_y"].values, dtype=tf.float32) + bounds = [(-x, x) for x in max_wind_mag * np.ones(winds.shape, dtype="float32")] winds = winds.astype("float32") winds, mult, AL_Filter, funcalls = auglag(winds, parameters, bounds) @@ -737,20 +729,17 @@ def loss_and_gradient(x): if mask_w_outside_opt is True: w = np.ma.masked_where(where_mask < 1, w) - u_field = deepcopy(Grids[0].fields[vel_name]) - u_field["data"] = u + u_field = {} u_field["standard_name"] = "u_wind" u_field["long_name"] = "meridional component of wind velocity" u_field["min_bca"] = min_bca u_field["max_bca"] = max_bca - v_field = deepcopy(Grids[0].fields[vel_name]) - v_field["data"] = v + v_field = {} v_field["standard_name"] = "v_wind" v_field["long_name"] = "zonal component of wind velocity" v_field["min_bca"] = min_bca v_field["max_bca"] = max_bca - w_field = deepcopy(Grids[0].fields[vel_name]) - w_field["data"] = w + w_field = {} w_field["standard_name"] = "w_wind" w_field["long_name"] = "vertical component of wind velocity" w_field["min_bca"] = min_bca @@ -759,11 +748,16 @@ def loss_and_gradient(x): new_grid_list = [] for grid in Grids: - temp_grid = deepcopy(grid) - temp_grid.add_field("u", u_field, replace_existing=True) - temp_grid.add_field("v", v_field, replace_existing=True) - temp_grid.add_field("w", w_field, replace_existing=True) - new_grid_list.append(temp_grid) + grid["u"] = xr.DataArray( + np.expand_dims(u, 0), dims=("time", "z", "y", "x"), attrs=u_field + ) + grid["v"] = xr.DataArray( + np.expand_dims(v, 0), dims=("time", "z", "y", "x"), attrs=v_field + ) + grid["w"] = xr.DataArray( + np.expand_dims(w, 0), dims=("time", "z", "y", "x"), attrs=w_field + ) + new_grid_list.append(grid) return new_grid_list, parameters @@ -810,6 +804,8 @@ def _get_dd_wind_field_tensorflow( parallel_iterations=1, wind_tol=0.1, tolerance=1e-8, + const_boundary_cond=False, + max_wind_mag=100.0, ): if not TENSORFLOW_AVAILABLE: raise ImportError( @@ -835,19 +831,23 @@ def _get_dd_wind_field_tensorflow( parameters.upper_bc = upper_bc parameters.lower_bc = lower_bc parameters.engine = "tensorflow" + parameters.const_boundary_cond = const_boundary_cond + # Ensure that all Grids are on the same coordinate system prev_grid = Grids[0] for g in Grids: - if not np.allclose(g.x["data"], prev_grid.x["data"], atol=10): + if not np.allclose(g["x"].values, prev_grid["x"].values, atol=10): raise ValueError("Grids do not have equal x coordinates!") - if not np.allclose(g.y["data"], prev_grid.y["data"], atol=10): + if not np.allclose(g["y"].values, prev_grid["y"].values, atol=10): raise ValueError("Grids do not have equal y coordinates!") - if not np.allclose(g.z["data"], prev_grid.z["data"], atol=10): + if not np.allclose(g["z"].values, prev_grid["z"].values, atol=10): raise ValueError("Grids do not have equal z coordinates!") - if not g.origin_latitude["data"] == prev_grid.origin_latitude["data"]: + if not np.allclose( + g["origin_latitude"].values, prev_grid["origin_latitude"].values + ): raise ValueError(("Grids have unequal origin lat/lons!")) prev_grid = g @@ -871,19 +871,19 @@ def _get_dd_wind_field_tensorflow( ) u_interp = interp1d(z_back[valid_inds], u_back[valid_inds], bounds_error=False) v_interp = interp1d(z_back[valid_inds], v_back[valid_inds], bounds_error=False) - if isinstance(Grids[0].z["data"], np.ma.MaskedArray): + if isinstance(Grids[0]["z"].values, np.ma.MaskedArray): parameters.u_back = tf.constant( - u_interp(Grids[0].z["data"].filled(np.nan)), dtype=tf.float32 + u_interp(Grids[0]["z"].values.filled(np.nan)), dtype=tf.float32 ) parameters.v_back = tf.constant( - v_interp(Grids[0].z["data"].filled(np.nan)), dtype=tf.float32 + v_interp(Grids[0]["z"].values.filled(np.nan)), dtype=tf.float32 ) else: parameters.u_back = tf.constant( - u_interp(Grids[0].z["data"]), dtype=tf.float32 + u_interp(Grids[0]["z"].values), dtype=tf.float32 ) parameters.v_back = tf.constant( - v_interp(Grids[0].z["data"]), dtype=tf.float32 + v_interp(Grids[0]["z"].values), dtype=tf.float32 ) print("Interpolated U field:") @@ -891,7 +891,7 @@ def _get_dd_wind_field_tensorflow( print("Interpolated V field:") print(parameters.v_back) print("Grid levels:") - print(Grids[0].z["data"]) + print(Grids[0]["z"].values) # Parse names of velocity field if refl_field is None: @@ -931,17 +931,15 @@ def _get_dd_wind_field_tensorflow( for i in range(len(Grids)): parameters.wts.append( np.ma.masked_invalid( - calculate_fall_speed(Grids[i], refl_field=refl_field, frz=frz) + calculate_fall_speed(Grids[i], refl_field=refl_field, frz=frz).squeeze() ) ) - add_azimuth_as_field(Grids[i], dz_name=refl_field) - add_elevation_as_field(Grids[i], dz_name=refl_field) - parameters.vrs.append(np.ma.masked_invalid(Grids[i].fields[vel_name]["data"])) + parameters.vrs.append(np.ma.masked_invalid(Grids[i][vel_name].values.squeeze())) parameters.azs.append( - np.ma.masked_invalid(Grids[i].fields["AZ"]["data"] * np.pi / 180) + np.ma.masked_invalid(Grids[i]["AZ"].values.squeeze() * np.pi / 180) ) parameters.els.append( - np.ma.masked_invalid(Grids[i].fields["EL"]["data"] * np.pi / 180) + np.ma.masked_invalid(Grids[i]["EL"].values.squeeze() * np.pi / 180) ) if len(Grids) > 1: @@ -950,15 +948,7 @@ def _get_dd_wind_field_tensorflow( if i == j: continue print(("Calculating weights for radars " + str(i) + " and " + str(j))) - bca[i, j] = get_bca( - Grids[i].radar_longitude["data"], - Grids[i].radar_latitude["data"], - Grids[j].radar_longitude["data"], - Grids[j].radar_latitude["data"], - Grids[i].point_x["data"][0], - Grids[i].point_y["data"][0], - Grids[i].get_projparams(), - ) + bca[i, j] = get_bca(Grids[i], Grids[j]) for k in range(parameters.vrs[i].shape[0]): if weights_obs is None: @@ -1127,14 +1117,14 @@ def _get_dd_wind_field_tensorflow( winds = tf.Variable(winds, name="winds") print("Starting solver ") - parameters.dx = np.diff(Grids[0].x["data"], axis=0)[0] - parameters.dy = np.diff(Grids[0].y["data"], axis=0)[0] - parameters.dz = np.diff(Grids[0].z["data"], axis=0)[0] + parameters.dx = np.diff(Grids[0]["x"].values, axis=0)[0] + parameters.dy = np.diff(Grids[0]["y"].values, axis=0)[0] + parameters.dz = np.diff(Grids[0]["z"].values, axis=0)[0] print("rmsVR = " + str(parameters.rmsVr)) print("Total points: %d" % tf.reduce_sum(parameters.weights)) - parameters.z = tf.constant(Grids[0].point_z["data"], dtype=tf.float32) - parameters.x = tf.constant(Grids[0].point_x["data"], dtype=tf.float32) - parameters.y = tf.constant(Grids[0].point_y["data"], dtype=tf.float32) + parameters.z = tf.constant(Grids[0]["point_z"].values, dtype=tf.float32) + parameters.x = tf.constant(Grids[0]["point_x"].values, dtype=tf.float32) + parameters.y = tf.constant(Grids[0]["point_y"].values, dtype=tf.float32) bt = time.time() # First pass - no filter @@ -1147,32 +1137,28 @@ def _get_dd_wind_field_tensorflow( u_field = "U_" + the_field v_field = "V_" + the_field w_field = "W_" + the_field - model_finite = np.logical_and.reduce( - ( - np.isfinite(Grids[0].fields[u_field]["data"]), - np.isfinite(Grids[0].fields[w_field]["data"]), - np.isfinite(Grids[0].fields[v_field]["data"]), - ) - ) - parameters.model_weights[i] = np.where( - model_finite, parameters.model_weights[i], 0 - ) parameters.u_model.append( - tf.constant( - np.nan_to_num(Grids[0].fields[u_field]["data"], 0), dtype=tf.float32 - ) + tf.constant(np.nan_to_num(Grids[0][u_field].values.squeeze())) ) parameters.v_model.append( - tf.constant( - np.nan_to_num(Grids[0].fields[v_field]["data"], 0), dtype=tf.float32 - ) + tf.constant(np.nan_to_num(Grids[0][v_field].values.squeeze())) ) parameters.w_model.append( - tf.constant( - np.nan_to_num(Grids[0].fields[w_field]["data"], 0), dtype=tf.float32 - ) + tf.constant(np.nan_to_num(Grids[0][w_field].values.squeeze())) + ) + + # Don't weigh in where model data unavailable + where_finite_u = np.isfinite(Grids[0][u_field].values.squeeze()) + where_finite_v = np.isfinite(Grids[0][v_field].values.squeeze()) + where_finite_w = np.isfinite(Grids[0][w_field].values.squeeze()) + parameters.model_weights[i, :, :, :] = np.where( + np.logical_and.reduce((where_finite_u, where_finite_v, where_finite_w)), + 1, + 0, ) + parameters.model_weights = tf.constant(parameters.model_weights, dtype=tf.float32) + parameters.Co = Co parameters.Cm = Cm parameters.Cx = Cx @@ -1265,20 +1251,17 @@ def _get_dd_wind_field_tensorflow( if mask_w_outside_opt is True: w = np.ma.masked_where(where_mask < 1, w) - u_field = deepcopy(Grids[0].fields[vel_name]) - u_field["data"] = u + u_field = {} u_field["standard_name"] = "u_wind" u_field["long_name"] = "meridional component of wind velocity" u_field["min_bca"] = min_bca u_field["max_bca"] = max_bca - v_field = deepcopy(Grids[0].fields[vel_name]) - v_field["data"] = v + v_field = {} v_field["standard_name"] = "v_wind" v_field["long_name"] = "zonal component of wind velocity" v_field["min_bca"] = min_bca v_field["max_bca"] = max_bca - w_field = deepcopy(Grids[0].fields[vel_name]) - w_field["data"] = w + w_field = {} w_field["standard_name"] = "w_wind" w_field["long_name"] = "vertical component of wind velocity" w_field["min_bca"] = min_bca @@ -1287,11 +1270,16 @@ def _get_dd_wind_field_tensorflow( new_grid_list = [] for grid in Grids: - temp_grid = deepcopy(grid) - temp_grid.add_field("u", u_field, replace_existing=True) - temp_grid.add_field("v", v_field, replace_existing=True) - temp_grid.add_field("w", w_field, replace_existing=True) - new_grid_list.append(temp_grid) + grid["u"] = xr.DataArray( + np.expand_dims(u, 0), dims=("time", "z", "y", "x"), attrs=u_field + ) + grid["v"] = xr.DataArray( + np.expand_dims(v, 0), dims=("time", "z", "y", "x"), attrs=v_field + ) + grid["w"] = xr.DataArray( + np.expand_dims(w, 0), dims=("time", "z", "y", "x"), attrs=w_field + ) + new_grid_list.append(grid) return new_grid_list, parameters @@ -1317,8 +1305,8 @@ def get_dd_wind_field( Parameters ========== - Grids: list of Py-ART Grids - The list of Py-ART grids to take in corresponding to each radar. + Grids: list of Py-ART/DDA Grids + The list of Py-ART or PyDDA grids to take in corresponding to each radar. All grids must have the same shape, x coordinates, y coordinates and z coordinates. u_init: 3D ndarray @@ -1336,7 +1324,7 @@ def get_dd_wind_field( engine: str (one of "scipy", "tensorflow", "jax") Setting this flag will use the solver based off of SciPy, TensorFlow, or Jax. Using Tensorflow or Jax expands PyDDA's capabiability to take advantage of GPU-based systems. - In addition, these two implementations use automatic differentation to calcualte the gradient + In addition, these two implementations use automatic differentation to calculate the gradient of the cost function in order to optimize the gradient calculation. TensorFlow 2.6 and tensorflow-probability are required for the TensorFlow-basedengine. The latest version of Jax is required for the Jax-based engine. @@ -1442,6 +1430,8 @@ def get_dd_wind_field( Stop iterations after maximum change in winds is less than this value. tolerance: float Tolerance for L2 norm of gradient before stopping. + max_wind_magnitude: float + Constrain the optimization to have :math:`|u|, :math:`|w|`, and :math:`|w| < x` m/s. Returns ======= @@ -1451,53 +1441,57 @@ def get_dd_wind_field( parameters: struct The parameters used in the generation of the Multi-Doppler wind field. """ - if u_init is None: - if isinstance(u_init, np.ma.MaskedArray): - u_init = Grids[0].fields["u"]["data"].filled(0) + + if isinstance(Grids, list): + if isinstance(Grids[0], pyart.core.Grid): + for x in Grids: + new_grids = [read_from_pyart_grid(x) for x in Grids] else: - u_init = Grids[0].fields["u"]["data"] + new_grids = Grids + elif isinstance(Grids, pyart.core.Grid): + new_grids = [read_from_pyart_grid(Grids)] + elif isinstance(Grids, xr.Dataset): + new_grids = [Grids] + else: + raise TypeError( + "Input grids must be an xarray Dataset, Py-ART Grid, or a list of those." + ) + + if u_init is None: + u_init = new_grids[0]["u"].values.squeeze() + if v_init is None: - if isinstance(u_init, np.ma.MaskedArray): - v_init = Grids[0].fields["v"]["data"].filled(0) - else: - v_init = Grids[0].fields["v"]["data"] + v_init = new_grids[0]["v"].values.squeeze() + if w_init is None: - if isinstance(u_init, np.ma.MaskedArray): - w_init = Grids[0].fields["w"]["data"].filled(0) - else: - w_init = Grids[0].fields["w"]["data"] + w_init = new_grids[0]["w"].values.squeeze() + if ( engine.lower() == "scipy" or engine.lower() == "jax" or engine.lower() == "auglag" ): - return _get_dd_wind_field_scipy(Grids, u_init, v_init, w_init, engine, **kwargs) + return _get_dd_wind_field_scipy( + new_grids, u_init, v_init, w_init, engine, **kwargs + ) elif engine.lower() == "tensorflow": - return _get_dd_wind_field_tensorflow(Grids, u_init, v_init, w_init, **kwargs) + return _get_dd_wind_field_tensorflow( + new_grids, u_init, v_init, w_init, **kwargs + ) else: raise NotImplementedError("Engine %s is not supported." % engine) -def get_bca(rad1_lon, rad1_lat, rad2_lon, rad2_lat, x, y, projparams): +def get_bca(Grid1, Grid2): """ This function gets the beam crossing angle between two lat/lon pairs. Parameters ========== - rad1_lon: float - The longitude of the first radar. - rad1_lat: float - The latitude of the first radar. - rad2_lon: float - The longitude of the second radar. - rad2_lat: float - The latitude of the second radar. - x: nD float array - The Cartesian x coordinates of the grid - y: nD float array - The Cartesian y corrdinates of the grid - projparams: Py-ART projparams - The projection parameters of the Grid + Grid1: xarray (PyDDA) Dataset + The PyDDA Dataset storing the first radar's Grid. + Grid2: PyDDA Dataset + The PyDDA Dataset storing the second radar's Grid. Returns ======= @@ -1505,6 +1499,16 @@ def get_bca(rad1_lon, rad1_lat, rad2_lon, rad2_lat, x, y, projparams): The beam crossing angle between the two radars in radians. """ + rad1_lon = Grid1["radar_longitude"].values + rad1_lat = Grid1["radar_latitude"].values + rad2_lon = Grid2["radar_longitude"].values + rad2_lat = Grid2["radar_latitude"].values + x = Grid1["point_x"].values + y = Grid1["point_y"].values + projparams = Grid1["projection"].attrs + if projparams["_include_lon_0_lat_0"] == "true": + projparams["lat_0"] = Grid1["origin_latitude"].values + projparams["lon_0"] = Grid1["origin_longitude"].values rad1 = pyart.core.geographic_to_cartesian(rad1_lon, rad1_lat, projparams) rad2 = pyart.core.geographic_to_cartesian(rad2_lon, rad2_lat, projparams) @@ -1519,13 +1523,11 @@ def get_bca(rad1_lon, rad1_lat, rad2_lon, rad2_lat, x, y, projparams): inp_array1 = x / a inp_array1 = np.where(inp_array1 < -1, -1, inp_array1) inp_array1 = np.where(inp_array1 > 1, 1, inp_array1) - np.arccos(inp_array1) inp_array2 = (x - rad2[1]) / b inp_array2 = np.where(inp_array2 < -1, -1, inp_array2) inp_array2 = np.where(inp_array2 > 1, 1, inp_array2) - np.arccos(inp_array2) inp_array3 = (a * a + b * b - c * c) / (2 * a * b) inp_array3 = np.where(inp_array3 < -1, -1, inp_array3) inp_array3 = np.where(inp_array3 > 1, 1, inp_array3) - return np.ma.masked_invalid(np.arccos(inp_array3)) + return np.ma.masked_invalid(np.arccos(inp_array3))[0, :, :] diff --git a/pydda/tests/baseline/test_nested_retrieval.png b/pydda/tests/baseline/test_nested_retrieval.png new file mode 100644 index 00000000..28c7aa24 Binary files /dev/null and b/pydda/tests/baseline/test_nested_retrieval.png differ diff --git a/pydda/tests/baseline/test_plot_horiz_xsection_barbs.png b/pydda/tests/baseline/test_plot_horiz_xsection_barbs.png index a0581eda..09f58d23 100644 Binary files a/pydda/tests/baseline/test_plot_horiz_xsection_barbs.png and b/pydda/tests/baseline/test_plot_horiz_xsection_barbs.png differ diff --git a/pydda/tests/baseline/test_plot_horiz_xsection_barbs_map.png b/pydda/tests/baseline/test_plot_horiz_xsection_barbs_map.png new file mode 100644 index 00000000..d79ecd01 Binary files /dev/null and b/pydda/tests/baseline/test_plot_horiz_xsection_barbs_map.png differ diff --git a/pydda/tests/baseline/test_plot_horiz_xsection_quiver.png b/pydda/tests/baseline/test_plot_horiz_xsection_quiver.png index 91c2b9d8..1b854f01 100644 Binary files a/pydda/tests/baseline/test_plot_horiz_xsection_quiver.png and b/pydda/tests/baseline/test_plot_horiz_xsection_quiver.png differ diff --git a/pydda/tests/baseline/test_plot_horiz_xsection_quiver_map.png b/pydda/tests/baseline/test_plot_horiz_xsection_quiver_map.png new file mode 100644 index 00000000..7ac9624f Binary files /dev/null and b/pydda/tests/baseline/test_plot_horiz_xsection_quiver_map.png differ diff --git a/pydda/tests/baseline/test_plot_horiz_xsection_streamlines.png b/pydda/tests/baseline/test_plot_horiz_xsection_streamlines.png index 049f3a52..88798ca7 100644 Binary files a/pydda/tests/baseline/test_plot_horiz_xsection_streamlines.png and b/pydda/tests/baseline/test_plot_horiz_xsection_streamlines.png differ diff --git a/pydda/tests/baseline/test_plot_horiz_xsection_streamlines_map.png b/pydda/tests/baseline/test_plot_horiz_xsection_streamlines_map.png new file mode 100644 index 00000000..583b63ad Binary files /dev/null and b/pydda/tests/baseline/test_plot_horiz_xsection_streamlines_map.png differ diff --git a/pydda/tests/baseline/test_plot_horiz_xz_xsection_barbs.png b/pydda/tests/baseline/test_plot_horiz_xz_xsection_barbs.png index 7d225b61..36c5130f 100644 Binary files a/pydda/tests/baseline/test_plot_horiz_xz_xsection_barbs.png and b/pydda/tests/baseline/test_plot_horiz_xz_xsection_barbs.png differ diff --git a/pydda/tests/baseline/test_plot_horiz_xz_xsection_quiver.png b/pydda/tests/baseline/test_plot_horiz_xz_xsection_quiver.png index c9c53d78..97ecf2c1 100644 Binary files a/pydda/tests/baseline/test_plot_horiz_xz_xsection_quiver.png and b/pydda/tests/baseline/test_plot_horiz_xz_xsection_quiver.png differ diff --git a/pydda/tests/baseline/test_plot_horiz_xz_xsection_streamlines.png b/pydda/tests/baseline/test_plot_horiz_xz_xsection_streamlines.png index 36827c25..356b17a6 100644 Binary files a/pydda/tests/baseline/test_plot_horiz_xz_xsection_streamlines.png and b/pydda/tests/baseline/test_plot_horiz_xz_xsection_streamlines.png differ diff --git a/pydda/tests/baseline/test_plot_horiz_yz_xsection_barbs.png b/pydda/tests/baseline/test_plot_horiz_yz_xsection_barbs.png index 525b9bb6..3cb6f8a6 100644 Binary files a/pydda/tests/baseline/test_plot_horiz_yz_xsection_barbs.png and b/pydda/tests/baseline/test_plot_horiz_yz_xsection_barbs.png differ diff --git a/pydda/tests/baseline/test_plot_horiz_yz_xsection_quiver.png b/pydda/tests/baseline/test_plot_horiz_yz_xsection_quiver.png index e86a87f5..811f2213 100644 Binary files a/pydda/tests/baseline/test_plot_horiz_yz_xsection_quiver.png and b/pydda/tests/baseline/test_plot_horiz_yz_xsection_quiver.png differ diff --git a/pydda/tests/baseline/test_plot_horiz_yz_xsection_streamlines.png b/pydda/tests/baseline/test_plot_horiz_yz_xsection_streamlines.png index 13346da7..ac81cb4f 100644 Binary files a/pydda/tests/baseline/test_plot_horiz_yz_xsection_streamlines.png and b/pydda/tests/baseline/test_plot_horiz_yz_xsection_streamlines.png differ diff --git a/pydda/tests/procedures.py b/pydda/tests/procedures.py index 8c06833b..1509dd82 100644 --- a/pydda/tests/procedures.py +++ b/pydda/tests/procedures.py @@ -1,5 +1,4 @@ import numpy as np -import pyart def make_test_divergence_field( @@ -36,9 +35,9 @@ def make_test_divergence_field( Initial U, V, W field """ - x = Grid.point_x["data"] - y = Grid.point_y["data"] - z = Grid.point_z["data"] + x = Grid["point_x"].values + y = Grid["point_y"].values + z = Grid["point_z"].values theta = np.arctan2(x - x_center, y - y_center) phi = np.pi * ((z - z_ground) / (z_top - z_ground)) diff --git a/pydda/tests/sample_files.py b/pydda/tests/sample_files.py index f5afd43b..73501af9 100644 --- a/pydda/tests/sample_files.py +++ b/pydda/tests/sample_files.py @@ -14,7 +14,7 @@ LTX_GRID MHX_GRID """ -import os + import pooch @@ -39,7 +39,12 @@ "twpsondewnpnC3.b1.20060119.050300.cdf": "7bbef7f91de8a4c6e5ab083ce60b7b61d5dbbff05ef2434885b89cbd3ac733ef", "twpsondewnpnC3.b1.20060119.112000.cdf": "102b5bf7221a5b80a102803f5a4339ca1614d23e6daf8180c064e134f3aa0830", "twpsondewnpnC3.b1.20060119.163300.cdf": "8343687d880fec71a4ad4f07bc56a6dce433ad38885fd91266868fd069f1eaa5", + "test_coarse0.nc": "692547ded6f8695191706f33b055dbd006341421eac0b1c2a3bf21909508937f", + "test_coarse1.nc": "127153acd0eca35726c62dd0072661424b765214571c98f3c4aa77171602bdae", + "test_fine0.nc": "a700d3a33dfe8b66c3fe7a475e2a706e88dc2f96b135ff1c76a02550eeb1b403", + "test_fine1.nc": "222aa5a1247382d935b2e6a2df94cf58c4a3621dace00a2728e974b955d9c82b", } + fido = pooch.create( path=pooch.os_cache("pydda"), base_url="https://github.com/rcjackson/pydda-sample-data/raw/main/pydda-sample-data/", diff --git a/pydda/tests/test_cost_functions.py b/pydda/tests/test_cost_functions.py index 9ba6ae02..2b96537b 100644 --- a/pydda/tests/test_cost_functions.py +++ b/pydda/tests/test_cost_functions.py @@ -3,6 +3,7 @@ import pyart import numpy as np import pytest +import xarray as xr try: import tensorflow as tf @@ -21,20 +22,18 @@ def test_calculate_rad_velocity_cost(): - """Test with a zero velocity field radar""" Grid = pyart.testing.make_empty_grid( (20, 20, 20), ((0, 10000), (-10000, 10000), (-10000, 10000)) ) # a zero field fdata3 = np.zeros((20, 20, 20)) - Grid.add_field("zero_field", {"data": fdata3, "_FillValue": -9999.0}) + Grid.fields["zero_field"] = {"data": fdata3, "units": "m/s"} + Grid = pydda.io.read_from_pyart_grid(Grid) vel_field = "zero_field" - pydda.retrieval.angles.add_azimuth_as_field(Grid, dz_name="zero_field") - pydda.retrieval.angles.add_elevation_as_field(Grid, dz_name="zero_field") - vrs = [np.ma.array(Grid.fields[vel_field]["data"])] - azs = [Grid.fields["AZ"]["data"]] - els = [Grid.fields["EL"]["data"]] + vrs = [np.ma.array(Grid[vel_field].values)] + azs = [Grid["AZ"].values] + els = [Grid["EL"].values] u = np.zeros((20, 20, 20)) v = np.zeros((20, 20, 20)) w = np.zeros((20, 20, 20)) @@ -60,14 +59,13 @@ def test_calculate_rad_velocity_cost_jax(): ) # a zero field - fdata3 = jnp.zeros((20, 20, 20)) - Grid.add_field("zero_field", {"data": fdata3, "_FillValue": -9999.0}) + fdata3 = np.zeros((20, 20, 20)) + Grid.fields["zero_field"] = {"data": fdata3, "units": "m/s"} + Grid = pydda.io.read_from_pyart_grid(Grid) vel_field = "zero_field" - pydda.retrieval.angles.add_azimuth_as_field(Grid, dz_name="zero_field") - pydda.retrieval.angles.add_elevation_as_field(Grid, dz_name="zero_field") - vrs = [jnp.array(Grid.fields[vel_field]["data"])] - azs = [Grid.fields["AZ"]["data"].filled()] - els = [Grid.fields["EL"]["data"].filled()] + vrs = [np.array(Grid[vel_field].values)] + azs = [Grid["AZ"].values] + els = [Grid["EL"].values] u = jnp.zeros((20, 20, 20)) v = jnp.zeros((20, 20, 20)) w = jnp.zeros((20, 20, 20)) @@ -93,14 +91,13 @@ def test_calculate_rad_velocity_cost_tf(): ) # a zero field - fdata3 = tf.zeros((20, 20, 20), dtype=tf.float32) - Grid.add_field("zero_field", {"data": fdata3, "_FillValue": -9999.0}) + fdata3 = np.zeros((20, 20, 20)) + Grid.fields["zero_field"] = {"data": fdata3, "units": "m/s"} + Grid = pydda.io.read_from_pyart_grid(Grid) vel_field = "zero_field" - pydda.retrieval.angles.add_azimuth_as_field(Grid, dz_name="zero_field") - pydda.retrieval.angles.add_elevation_as_field(Grid, dz_name="zero_field") - vrs = [tf.constant(Grid.fields[vel_field]["data"], dtype=tf.float32)] - azs = [tf.constant(Grid.fields["AZ"]["data"], dtype=tf.float32)] - els = [tf.constant(Grid.fields["EL"]["data"], dtype=tf.float32)] + vrs = [tf.constant(Grid[vel_field].values.squeeze(), dtype=tf.float32)] + azs = [tf.constant(Grid["AZ"].values.squeeze(), dtype=tf.float32)] + els = [tf.constant(Grid["EL"].values.squeeze(), dtype=tf.float32)] u = tf.zeros((20, 20, 20), dtype=tf.float32) v = tf.zeros((20, 20, 20), dtype=tf.float32) w = tf.zeros((20, 20, 20), dtype=tf.float32) @@ -126,11 +123,14 @@ def test_calculate_fall_speed(): grid = pyart.testing.make_empty_grid(grid_shape, grid_limits) field_dic = {"data": ref_field, "long_name": "reflectivity", "units": "dBZ"} grid.fields = {"reflectivity": field_dic} + grid.radar_latitude = {"data": np.array([0])} + grid.radar_longitude = {"data": np.array([0])} + grid.radar_altitude = {"data": np.array([0])} fall_speed = pydda.cost_functions.calculate_fall_speed( - grid, refl_field="reflectivity" + pydda.io.read_from_pyart_grid(grid), refl_field="reflectivity" ) - assert fall_speed.shape == (10, 100, 100) - assert fall_speed[1, 1, 1] < -3 + assert fall_speed.shape == (1, 10, 100, 100) + assert fall_speed[0, 1, 1, 1] < -3 def test_calculate_mass_continuity(): diff --git a/pydda/tests/test_era5.py b/pydda/tests/test_era5.py index 612a6763..2d6e06a7 100644 --- a/pydda/tests/test_era5.py +++ b/pydda/tests/test_era5.py @@ -8,12 +8,12 @@ def test_add_era_5_field(): - Grid0 = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0) + Grid0 = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0) Grid0 = pydda.constraints.make_constraint_from_era5( Grid0, pydda.tests.sample_files.ERA_PATH, vel_field="corrected_velocity" ) grid_time = datetime.strptime( - Grid0.time["units"], "seconds since %Y-%m-%dT%H:%M:%SZ" + Grid0["time"].attrs["units"], "seconds since %Y-%m-%dT%H:%M:%SZ" ) era_dataset = Dataset(pydda.tests.sample_files.ERA_PATH) @@ -27,14 +27,14 @@ def test_add_era_5_field(): time_step = np.argmin(np.abs(base_time - grid_time)) lat_inds = np.where( np.logical_and( - lat >= Grid0.point_latitude["data"].min(), - lat <= Grid0.point_latitude["data"].max(), + lat >= Grid0["point_latitude"].values.min(), + lat <= Grid0["point_latitude"].values.max(), ) ) lon_inds = np.where( np.logical_and( - lon >= Grid0.point_longitude["data"].min(), - lon <= Grid0.point_longitude["data"].max(), + lon >= Grid0["point_longitude"].values.min(), + lon <= Grid0["point_longitude"].values.max(), ) ) @@ -50,25 +50,25 @@ def test_add_era_5_field(): u_interp = interp1d(z, u, kind="nearest") u_new_gridded = u_interp( - np.asarray(Grid0.point_z["data"] + Grid0.radar_altitude["data"]) + np.asarray(Grid0["point_z"].values + Grid0["radar_altitude"].values) ) u_vertical = np.mean(u_new_gridded, axis=1).mean(axis=1) - u_grid = np.mean(Grid0.fields["U_era5"]["data"], axis=1).mean(axis=1) + u_grid = np.mean(Grid0["U_era5"].values.squeeze(), axis=1).mean(axis=1) np.testing.assert_allclose(u_grid, u_vertical, atol=4) def test_era_initialization(): - Grid0 = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0) + Grid0 = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0) Grid0 = pydda.constraints.make_constraint_from_era5( Grid0, pydda.tests.sample_files.ERA_PATH, vel_field="corrected_velocity" ) igrid = pydda.initialization.make_initialization_from_era5( Grid0, pydda.tests.sample_files.ERA_PATH, vel_field="corrected_velocity" ) - u = igrid.fields["u"]["data"] - v = igrid.fields["v"]["data"] - w = igrid.fields["w"]["data"] - np.testing.assert_allclose(u, Grid0.fields["U_era5"]["data"], atol=1e-2) - np.testing.assert_allclose(v, Grid0.fields["V_era5"]["data"], atol=1e-2) - np.testing.assert_allclose(w, Grid0.fields["W_era5"]["data"], atol=1e-2) + u = igrid["u"].values + v = igrid["v"].values + w = igrid["w"].values + np.testing.assert_allclose(u, Grid0["U_era5"].values, atol=1e-2) + np.testing.assert_allclose(v, Grid0["V_era5"].values, atol=1e-2) + np.testing.assert_allclose(w, Grid0["W_era5"].values, atol=1e-2) diff --git a/pydda/tests/test_initialization.py b/pydda/tests/test_initialization.py index 785ece0c..2970cbc6 100644 --- a/pydda/tests/test_initialization.py +++ b/pydda/tests/test_initialization.py @@ -20,14 +20,15 @@ def test_make_const_wind_field(): # a zero field fdata3 = np.zeros((20, 20, 20)) Grid.add_field("zero_field", {"data": fdata3, "_FillValue": -9999.0}) + Grid = pydda.io.read_from_pyart_grid(Grid) Grid = pydda.initialization.make_constant_wind_field( Grid, wind=(2.0, 3.0, 4.0), vel_field="zero_field" ) - assert np.all(Grid.fields["u"]["data"] == 2.0) - assert np.all(Grid.fields["v"]["data"] == 3.0) - assert np.all(Grid.fields["w"]["data"] == 4.0) + assert np.all(Grid["u"].values == 2.0) + assert np.all(Grid["v"].values == 3.0) + assert np.all(Grid["w"].values == 4.0) def test_make_wind_field_from_profile(): @@ -38,7 +39,7 @@ def test_make_wind_field_from_profile(): # a zero field fdata3 = np.zeros((20, 20, 20)) Grid.add_field("zero_field", {"data": fdata3, "_FillValue": -9999.0}) - + Grid = pydda.io.read_from_pyart_grid(Grid) height = np.arange(0, 10000, 100) u_sound = np.ones(height.shape) v_sound = np.ones(height.shape) @@ -49,15 +50,34 @@ def test_make_wind_field_from_profile(): Grid, profile, vel_field="zero_field" ) - assert np.all(np.round(Grid.fields["u"]["data"]) == 1) - assert np.all(np.round(Grid.fields["v"]["data"]) == 1) - assert np.all(Grid.fields["w"]["data"] == 0.0) + assert np.all(np.round(Grid["u"].values) == 1) + assert np.all(np.round(Grid["v"].values) == 1) + assert np.all(Grid["w"].values == 0.0) def test_get_iem_data(): Grid = pyart.testing.make_empty_grid( (20, 20, 20), ((0, 100000.0), (-100000.0, 100000.0), (-100000.0, 100000.0)) ) + fdata3 = np.zeros((20, 20, 20)) + Grid.add_field("zero_field", {"data": fdata3, "_FillValue": -9999.0}) + Grid = pydda.io.read_from_pyart_grid(Grid) station_obs = pydda.constraints.get_iem_obs(Grid) names = [x["site_id"] for x in station_obs] assert names == ["P28", "WLD", "WDG", "SWO", "END"] + + +def test_hrrr_data(): + Grid = pyart.testing.make_empty_grid( + (20, 20, 20), ((0, 100000.0), (-100000.0, 100000.0), (-100000.0, 100000.0)) + ) + fdata3 = np.zeros((20, 20, 20)) + Grid.add_field("zero_field", {"data": fdata3, "_FillValue": -9999.0}) + Grid = pydda.io.read_from_pyart_grid(Grid) + Grid = pydda.constraints.add_hrrr_constraint_to_grid( + Grid, pydda.tests.get_sample_file("ruc2anl_130_20110520_0800_001.grb2") + ) + + assert Grid["U_hrrr"].max() > 15 + assert Grid["V_hrrr"].max() > 15 + assert Grid["W_hrrr"].max() > 0 diff --git a/pydda/tests/test_plots.py b/pydda/tests/test_plots.py index 11c2bc0e..e8a36398 100644 --- a/pydda/tests/test_plots.py +++ b/pydda/tests/test_plots.py @@ -3,23 +3,16 @@ use("agg") import pydda -import pyart import pytest import matplotlib.pyplot as plt - -try: - import cartopy.crs as ccrs - - CARTOPY_AVAILABLE = True -except ImportError: - CARTOPY_AVAILABLE = False +import cartopy.crs as ccrs @pytest.mark.mpl_image_compare(tolerance=50) def test_plot_horiz_xsection_barbs(): Grids = [ - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0), - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1), ] fig = plt.figure(figsize=(7, 7)) pydda.vis.plot_horiz_xsection_barbs( @@ -30,6 +23,8 @@ def test_plot_horiz_xsection_barbs(): w_vel_contours=[3, 6, 9], barb_spacing_x_km=5.0, barb_spacing_y_km=15.0, + vmin=0, + vmax=70, ) return fig @@ -37,8 +32,8 @@ def test_plot_horiz_xsection_barbs(): @pytest.mark.mpl_image_compare(tolerance=50) def test_plot_horiz_xz_xsection_barbs(): Grids = [ - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0), - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1), ] fig = plt.figure(figsize=(9, 5)) pydda.vis.plot_xz_xsection_barbs( @@ -49,6 +44,8 @@ def test_plot_horiz_xz_xsection_barbs(): w_vel_contours=[3, 6, 9], barb_spacing_x_km=10.0, barb_spacing_z_km=2.0, + vmin=0, + vmax=70, ) return fig @@ -56,8 +53,8 @@ def test_plot_horiz_xz_xsection_barbs(): @pytest.mark.mpl_image_compare(tolerance=50) def test_plot_horiz_yz_xsection_barbs(): Grids = [ - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0), - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1), ] fig = plt.figure(figsize=(9, 5)) pydda.vis.plot_yz_xsection_barbs( @@ -68,15 +65,16 @@ def test_plot_horiz_yz_xsection_barbs(): w_vel_contours=[1, 3, 5, 7], barb_spacing_y_km=10.0, barb_spacing_z_km=2.0, + vmin=0, + vmax=70, ) return fig -@pytest.mark.skipif(CARTOPY_AVAILABLE=False) @pytest.mark.mpl_image_compare(tolerance=50) def test_plot_horiz_xsection_barbs_map(): - berr_grid = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0) - cpol_grid = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1) + berr_grid = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0) + cpol_grid = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1) fig = plt.figure(figsize=(9, 9)) ax = plt.axes(projection=ccrs.PlateCarree()) @@ -87,6 +85,8 @@ def test_plot_horiz_xsection_barbs_map(): bg_grid_no=-1, level=7, w_vel_contours=[3, 5, 8], + vmin=0, + vmax=70, ) return fig @@ -94,26 +94,37 @@ def test_plot_horiz_xsection_barbs_map(): @pytest.mark.mpl_image_compare(tolerance=60) def test_plot_horiz_xsection_streamlines(): Grids = [ - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0), - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1), ] fig = plt.figure(figsize=(7, 7)) pydda.vis.plot_horiz_xsection_streamlines( - Grids, None, "reflectivity", level=6, w_vel_contours=[3, 6, 9] + Grids, + None, + "reflectivity", + level=6, + w_vel_contours=[3, 6, 9], + vmin=0, + vmax=70, ) return fig -@pytest.mark.skipif(CARTOPY_AVAILABLE=False) @pytest.mark.mpl_image_compare(tolerance=60) def test_plot_horiz_xsection_streamlines_map(): - berr_grid = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0) - cpol_grid = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1) + berr_grid = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0) + cpol_grid = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1) fig = plt.figure(figsize=(9, 9)) ax = plt.axes(projection=ccrs.PlateCarree()) pydda.vis.plot_horiz_xsection_streamlines_map( - [cpol_grid, berr_grid], ax=ax, bg_grid_no=-1, level=7, w_vel_contours=[3, 5, 8] + [cpol_grid, berr_grid], + ax=ax, + bg_grid_no=-1, + level=7, + w_vel_contours=[3, 5, 8], + vmin=0, + vmax=70, ) return fig @@ -121,12 +132,18 @@ def test_plot_horiz_xsection_streamlines_map(): @pytest.mark.mpl_image_compare(tolerance=50) def test_plot_horiz_xz_xsection_streamlines(): Grids = [ - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0), - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1), ] fig = plt.figure(figsize=(9, 5)) pydda.vis.plot_xz_xsection_streamlines( - Grids, None, "reflectivity", level=40, w_vel_contours=[3, 6, 9] + Grids, + None, + "reflectivity", + level=40, + w_vel_contours=[3, 6, 9], + vmin=0, + vmax=70, ) plt.ylim([0, 15]) return fig @@ -135,12 +152,18 @@ def test_plot_horiz_xz_xsection_streamlines(): @pytest.mark.mpl_image_compare(tolerance=50) def test_plot_horiz_yz_xsection_streamlines(): Grids = [ - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0), - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1), ] fig = plt.figure(figsize=(9, 5)) pydda.vis.plot_yz_xsection_streamlines( - Grids, None, "reflectivity", level=40, w_vel_contours=[1, 3, 5, 7] + Grids, + None, + "reflectivity", + level=40, + w_vel_contours=[1, 3, 5, 7], + vmin=0, + vmax=70, ) return fig @@ -148,8 +171,8 @@ def test_plot_horiz_yz_xsection_streamlines(): @pytest.mark.mpl_image_compare(tolerance=50) def test_plot_horiz_xsection_quiver(): Grids = [ - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0), - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1), ] fig = plt.figure(figsize=(7, 7)) pydda.vis.plot_horiz_xsection_quiver( @@ -162,15 +185,16 @@ def test_plot_horiz_xsection_quiver(): quiver_spacing_y_km=5.0, quiver_width=0.005, quiverkey_len=10.0, + vmin=0, + vmax=70, ) return fig -@pytest.mark.skipif(CARTOPY_AVAILABLE=False) @pytest.mark.mpl_image_compare(tolerance=50) def test_plot_horiz_xsection_quiver_map(): - berr_grid = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0) - cpol_grid = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1) + berr_grid = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0) + cpol_grid = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1) fig = plt.figure(figsize=(9, 9)) ax = plt.axes(projection=ccrs.PlateCarree()) @@ -182,6 +206,8 @@ def test_plot_horiz_xsection_quiver_map(): w_vel_contours=[3, 5, 8], quiver_width=0.005, quiverkey_len=10.0, + vmin=0, + vmax=70, ) return fig @@ -189,8 +215,8 @@ def test_plot_horiz_xsection_quiver_map(): @pytest.mark.mpl_image_compare(tolerance=50) def test_plot_horiz_xz_xsection_quiver(): Grids = [ - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0), - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1), ] fig = plt.figure(figsize=(9, 5)) pydda.vis.plot_xz_xsection_quiver( @@ -203,6 +229,8 @@ def test_plot_horiz_xz_xsection_quiver(): quiver_spacing_z_km=1.0, quiver_width=0.005, quiverkey_len=10.0, + vmin=0, + vmax=70, ) plt.ylim([0, 15]) return fig @@ -211,8 +239,8 @@ def test_plot_horiz_xz_xsection_quiver(): @pytest.mark.mpl_image_compare(tolerance=50) def test_plot_horiz_yz_xsection_quiver(): Grids = [ - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0), - pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0), + pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1), ] fig = plt.figure(figsize=(9, 5)) pydda.vis.plot_yz_xsection_quiver( @@ -225,5 +253,7 @@ def test_plot_horiz_yz_xsection_quiver(): quiver_spacing_z_km=1.0, quiver_width=0.005, quiverkey_len=10.0, + vmin=0, + vmax=70, ) return fig diff --git a/pydda/tests/test_retrieval.py b/pydda/tests/test_retrieval.py index 37db456f..15a1ff5d 100644 --- a/pydda/tests/test_retrieval.py +++ b/pydda/tests/test_retrieval.py @@ -10,6 +10,10 @@ import pyart import pytest import numpy as np +import xarray as xr +import matplotlib.pyplot as plt + +from datatree import DataTree try: import tensorflow as tf @@ -50,6 +54,7 @@ def test_make_updraft_from_convergence_field(): back_v = 10.0 x_center = 0.0 y_center = 0.0 + Grid = pydda.io.read_from_pyart_grid(Grid) u, v, w = pydda.tests.make_test_divergence_field( Grid, wind_vel, z_ground, z_top, radius, back_u, back_v, x_center, y_center ) @@ -67,7 +72,7 @@ def test_make_updraft_from_convergence_field(): vel_name="one_field", refl_field="one_field", ) - new_w = new_grids[0].fields["w"]["data"] + new_w = new_grids[0]["w"].values # We should have a pretty strong updraft in the retrieval! assert np.ma.max(new_w > 3) @@ -76,12 +81,12 @@ def test_make_updraft_from_convergence_field(): @pytest.mark.skipif(not JAX_AVAILABLE, reason="Jax not installed") def test_twpice_case_jax(): """Use a test case from TWP-ICE""" - Grid0 = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0) - Grid1 = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1) + Grid0 = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0) + Grid1 = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1) sounding = pyart.io.read_arm_sonde(pydda.tests.SOUNDING_PATH) - Grid1 = pydda.initialization.make_wind_field_from_profile( - Grid1, sounding[1], vel_field="corrected_velocity" + Grid0 = pydda.initialization.make_wind_field_from_profile( + Grid0, sounding[1], vel_field="corrected_velocity" ) Grids, _ = pydda.retrieval.get_dd_wind_field( @@ -102,24 +107,24 @@ def test_twpice_case_jax(): # In this test grid, we expect the mean flow to be to the southeast # Maximum updrafts should be at least 10 m/s - u_mean = np.nanmean(Grids[0].fields["u"]["data"]) - v_mean = np.nanmean(Grids[0].fields["v"]["data"]) - w_max = np.max(Grids[0].fields["v"]["data"]) + u_mean = np.nanmean(Grids[0]["u"].values) + v_mean = np.nanmean(Grids[0]["v"].values) + w_max = np.nanmax(Grids[0]["w"].values) assert u_mean > 0 assert v_mean < 0 - assert w_max > 10 + assert w_max > 5 @pytest.mark.skipif(not TF_AVAILABLE, reason="TensorFlow not installed") def test_twpice_case_tensorflow(): """Use a test case from TWP-ICE""" - Grid0 = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0) - Grid1 = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1) + Grid0 = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0) + Grid1 = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1) sounding = pyart.io.read_arm_sonde(pydda.tests.SOUNDING_PATH) - pydda.initialization.make_wind_field_from_profile( - Grid1, sounding[1], vel_field="corrected_velocity" + Grid0 = pydda.initialization.make_wind_field_from_profile( + Grid0, sounding[1], vel_field="corrected_velocity" ) Grids, _ = pydda.retrieval.get_dd_wind_field( [Grid0, Grid1], @@ -139,23 +144,23 @@ def test_twpice_case_tensorflow(): # In this test grid, we expect the mean flow to be to the southeast # Maximum updrafts should be at least 10 m/s - u_mean = np.nanmean(Grids[0].fields["u"]["data"]) - v_mean = np.nanmean(Grids[0].fields["v"]["data"]) - w_max = np.max(Grids[0].fields["v"]["data"]) + u_mean = np.nanmean(Grids[0]["u"].values) + v_mean = np.nanmean(Grids[0]["v"].values) + w_max = np.nanmax(Grids[0]["w"].values) assert u_mean > 0 assert v_mean < 0 - assert w_max > 10 + assert w_max > 5 def test_twpice_case(): """Use a test case from TWP-ICE""" - Grid0 = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0) - Grid1 = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR1) + Grid0 = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0) + Grid1 = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR1) sounding = pyart.io.read_arm_sonde(pydda.tests.SOUNDING_PATH) - pydda.initialization.make_wind_field_from_profile( - Grid1, sounding[1], vel_field="corrected_velocity" + Grid0 = pydda.initialization.make_wind_field_from_profile( + Grid0, sounding[1], vel_field="corrected_velocity" ) Grids, _ = pydda.retrieval.get_dd_wind_field( [Grid0, Grid1], @@ -175,13 +180,13 @@ def test_twpice_case(): # In this test grid, we expect the mean flow to be to the southeast # Maximum updrafts should be at least 10 m/s - u_mean = np.nanmean(Grids[0].fields["u"]["data"]) - v_mean = np.nanmean(Grids[0].fields["v"]["data"]) - w_max = np.max(Grids[0].fields["v"]["data"]) + u_mean = np.nanmean(Grids[0]["u"].values) + v_mean = np.nanmean(Grids[0]["v"].values) + w_max = np.nanmax(Grids[0]["w"].values) assert u_mean > 0 assert v_mean < 0 - assert w_max > 10 + assert w_max > 5 def test_smoothing(): @@ -196,7 +201,7 @@ def test_smoothing(): Grid.add_field("zero_field", {"data": fdata3, "_FillValue": -9999.0}) odata3 = np.ma.ones((20, 40, 40)) Grid.add_field("one_field", {"data": odata3, "_FillValue": -9999.0}) - + Grid = pydda.io.read_from_pyart_grid(Grid) u = np.random.random((20, 40, 40)) v = np.random.random((20, 40, 40)) w = np.zeros((20, 40, 40)) @@ -214,8 +219,8 @@ def test_smoothing(): vel_name="one_field", refl_field="one_field", ) - new_u = new_grids[0].fields["u"]["data"] - new_v = new_grids[0].fields["v"]["data"] + new_u = new_grids[0]["u"].values + new_v = new_grids[0]["v"].values assert new_u.std() < u.std() assert new_v.std() < v.std() @@ -223,26 +228,16 @@ def test_smoothing(): def test_model_constraint(): """A retrieval with just the model constraint should converge to the model constraint.""" - Grid0 = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0) + Grid0 = pydda.io.read_grid(pydda.tests.EXAMPLE_RADAR0) """ Make fake model grid of just U = 1 m/s everywhere""" - Grid0.fields["U_fakemodel"] = deepcopy(Grid0.fields["corrected_velocity"]) - Grid0.fields["V_fakemodel"] = deepcopy(Grid0.fields["corrected_velocity"]) - Grid0.fields["W_fakemodel"] = deepcopy(Grid0.fields["corrected_velocity"]) - - Grid0.fields["U_fakemodel"]["data"] = np.ones( - Grid0.fields["U_fakemodel"]["data"].shape - ) - Grid0.fields["V_fakemodel"]["data"] = np.zeros( - Grid0.fields["V_fakemodel"]["data"].shape - ) - Grid0.fields["W_fakemodel"]["data"] = np.zeros( - Grid0.fields["W_fakemodel"]["data"].shape - ) + Grid0["U_fakemodel"] = xr.ones_like(Grid0["corrected_velocity"]) + Grid0["V_fakemodel"] = xr.ones_like(Grid0["corrected_velocity"]) + Grid0["W_fakemodel"] = xr.ones_like(Grid0["corrected_velocity"]) - u_init = np.zeros(Grid0.fields["U_fakemodel"]["data"].shape) - v_init = np.zeros(Grid0.fields["V_fakemodel"]["data"].shape) - w_init = np.zeros(Grid0.fields["W_fakemodel"]["data"].shape) + u_init = np.zeros_like(Grid0["U_fakemodel"].values).squeeze() + v_init = np.zeros_like(Grid0["U_fakemodel"].values).squeeze() + w_init = np.zeros_like(Grid0["U_fakemodel"].values).squeeze() new_grids, _ = pydda.retrieval.get_dd_wind_field( [Grid0], @@ -261,11 +256,80 @@ def test_model_constraint(): ) np.testing.assert_allclose( - new_grids[0].fields["u"]["data"], Grid0.fields["U_fakemodel"]["data"], atol=1e-2 + new_grids[0]["u"].values, Grid0["U_fakemodel"].values, atol=1e-2 ) np.testing.assert_allclose( - new_grids[0].fields["v"]["data"], Grid0.fields["V_fakemodel"]["data"], atol=1e-2 + new_grids[0]["v"].values, Grid0["V_fakemodel"].values, atol=1e-2 ) - np.testing.assert_allclose( - new_grids[0].fields["w"]["data"], Grid0.fields["W_fakemodel"]["data"], atol=1e-2 + + +@pytest.mark.mpl_image_compare(tolerance=50) +def test_nested_retrieval(): + test_coarse0 = pydda.io.read_grid(pydda.tests.get_sample_file("test_coarse0.nc")) + test_coarse1 = pydda.io.read_grid(pydda.tests.get_sample_file("test_coarse1.nc")) + test_fine0 = pydda.io.read_grid(pydda.tests.get_sample_file("test_fine0.nc")) + test_fine1 = pydda.io.read_grid(pydda.tests.get_sample_file("test_fine1.nc")) + + test_coarse0 = pydda.initialization.make_constant_wind_field( + test_coarse0, (0.0, 0.0, 0.0) + ) + + kwargs_dict = dict( + Cm=256.0, + Co=1e-2, + Cx=150.0, + Cy=150.0, + Cz=150.0, + Cmod=1e-5, + model_fields=["hrrr"], + refl_field="DBZ", + wind_tol=0.5, + max_iterations=50, + filter_order=3, + engine="scipy", + ) + + tree_dict = { + "/coarse/radar_ktlx": test_coarse0, + "/coarse/radar_kict": test_coarse1, + "/coarse/fine/radar_ktlx": test_fine0, + "/coarse/fine/radar_kict": test_fine1, + } + + tree = DataTree.from_dict(tree_dict) + tree["/coarse/"].attrs = kwargs_dict + tree["/coarse/fine"].attrs = kwargs_dict + + grid_tree = pydda.retrieval.get_dd_wind_field_nested(tree) + fig, ax = plt.subplots(1, 2, figsize=(10, 5)) + pydda.vis.plot_horiz_xsection_quiver( + grid_tree["coarse"], + ax=ax[0], + level=5, + cmap="ChaseSpectral", + vmin=-10, + vmax=80, + quiverkey_len=10.0, + background_field="DBZ", + bg_grid_no=1, + w_vel_contours=[1, 2, 5, 10], + quiver_spacing_x_km=50.0, + quiver_spacing_y_km=50.0, + quiverkey_loc="bottom_right", + ) + pydda.vis.plot_horiz_xsection_quiver( + grid_tree["coarse/fine"], + ax=ax[1], + level=5, + cmap="ChaseSpectral", + vmin=-10, + vmax=80, + quiverkey_len=10.0, + background_field="DBZ", + bg_grid_no=1, + w_vel_contours=[1, 2, 5, 10], + quiver_spacing_x_km=50.0, + quiver_spacing_y_km=50.0, + quiverkey_loc="bottom_right", ) + return fig diff --git a/pydda/vis/barb_plot.py b/pydda/vis/barb_plot.py index fb47bd99..d410719d 100644 --- a/pydda/vis/barb_plot.py +++ b/pydda/vis/barb_plot.py @@ -8,6 +8,7 @@ from .. import retrieval from matplotlib.axes import Axes +from datatree import DataTree try: from cartopy.mpl.geoaxes import GeoAxes @@ -25,8 +26,8 @@ def plot_horiz_xsection_barbs( background_field="reflectivity", level=1, cmap="ChaseSpectral", - vmin=None, - vmax=None, + vmin=0, + vmax=70, u_vel_contours=None, v_vel_contours=None, w_vel_contours=None, @@ -50,8 +51,8 @@ def plot_horiz_xsection_barbs( Parameters ---------- - Grids: list - List of Py-ART Grids to visualize + Grids: list or DataTree + List of Py-DDA Grids to visualize ax: matplotlib axis handle The axis handle to place the plot on. Set to None to plot on the current axis. @@ -110,11 +111,27 @@ def plot_horiz_xsection_barbs( ax: matplotlib axis Axis handle to output axis """ + if isinstance(Grids, DataTree): + child_list = list(Grids.children.keys()) + grid_list = [] + rad_names = [] + for child in child_list: + if "radar" in child: + grid_list.append(Grids[child].to_dataset()) + rad_names.append(child) + bca_min = math.radians(Grids[rad_names[0]][u_field].attrs["min_bca"]) + bca_max = math.radians(Grids[rad_names[0]][u_field].attrs["max_bca"]) + else: + grid_list = Grids + bca_min = math.radians(grid_list[0][u_field].attrs["min_bca"]) + bca_max = math.radians(grid_list[0][u_field].attrs["max_bca"]) if bg_grid_no > -1: - grid_bg = Grids[bg_grid_no].fields[background_field]["data"] + grid_bg = grid_list[bg_grid_no][background_field].values.squeeze() else: - grid_array = np.ma.stack([x.fields[background_field]["data"] for x in Grids]) + grid_array = np.ma.stack( + [x[background_field].values.squeeze() for x in grid_list] + ) grid_bg = grid_array.max(axis=0) if vmin is None: @@ -123,14 +140,14 @@ def plot_horiz_xsection_barbs( if vmax is None: vmax = grid_bg.max() - grid_h = Grids[0].point_altitude["data"] / 1e3 - grid_x = Grids[0].point_x["data"] / 1e3 - grid_y = Grids[0].point_y["data"] / 1e3 + grid_h = grid_list[0]["point_altitude"].values / 1e3 + grid_x = grid_list[0]["point_x"].values / 1e3 + grid_y = grid_list[0]["point_y"].values / 1e3 dx = np.diff(grid_x, axis=2)[0, 0, 0] dy = np.diff(grid_y, axis=1)[0, 0, 0] - u = Grids[0].fields[u_field]["data"] - v = Grids[0].fields[v_field]["data"] - w = Grids[0].fields[w_field]["data"] + u = grid_list[0][u_field].values.squeeze() + v = grid_list[0][v_field].values.squeeze() + w = grid_list[0][w_field].values.squeeze() if isinstance(u, np.ma.MaskedArray): u = u.filled(np.nan) @@ -162,9 +179,9 @@ def plot_horiz_xsection_barbs( ) if colorbar_flag is True: - cp = Grids[bg_grid_no].fields[background_field]["long_name"] + cp = grid_list[bg_grid_no][background_field].attrs["long_name"] cp.replace(" ", "_") - cp = cp + " [" + Grids[bg_grid_no].fields[background_field]["units"] + cp = cp + " [" + grid_list[bg_grid_no][background_field].attrs["units"] cp = cp + "]" plt.colorbar(the_mesh, ax=ax, label=(cp)) @@ -233,22 +250,11 @@ def plot_horiz_xsection_barbs( if colorbar_contour_flag is True: plt.colorbar(cs, ax=ax, label="|V| [m/s]") - bca_min = math.radians(Grids[0].fields[u_field]["min_bca"]) - bca_max = math.radians(Grids[0].fields[u_field]["max_bca"]) - if show_lobes is True: - for i in range(len(Grids)): - for j in range(len(Grids)): + for i in range(len(grid_list)): + for j in range(len(grid_list)): if i != j: - bca = retrieval.get_bca( - Grids[j].radar_longitude["data"], - Grids[j].radar_latitude["data"], - Grids[i].radar_longitude["data"], - Grids[i].radar_latitude["data"], - Grids[j].point_x["data"][0], - Grids[j].point_y["data"][0], - Grids[j].get_projparams(), - ) + bca = retrieval.get_bca(grid_list[i], grid_list[j]) ax.contour( grid_x[level, :, :], @@ -276,8 +282,8 @@ def plot_horiz_xsection_barbs_map( background_field="reflectivity", level=1, cmap="ChaseSpectral", - vmin=None, - vmax=None, + vmin=0, + vmax=70, u_vel_contours=None, v_vel_contours=None, w_vel_contours=None, @@ -302,8 +308,8 @@ def plot_horiz_xsection_barbs_map( Parameters ---------- - Grids: list - List of Py-ART Grids to visualize + Grids: list or DataTree + List of Py-DDA Grids to visualize ax: matplotlib axis handle (with cartopy ccrs) The axis handle to place the plot on. Set to None to create a new map. Note: the axis needs to be in a PlateCarree() projection. @@ -364,6 +370,20 @@ def plot_horiz_xsection_barbs_map( ax: matplotlib axis Axis handle to output axis """ + if isinstance(Grids, DataTree): + child_list = list(Grids.children.keys()) + grid_list = [] + rad_names = [] + for child in child_list: + if "radar" in child: + grid_list.append(Grids[child].to_dataset()) + rad_names.append(child) + bca_min = math.radians(Grids[u_field].attrs["min_bca"]) + bca_max = math.radians(Grids[u_field].attrs["max_bca"]) + else: + grid_list = Grids + bca_min = math.radians(grid_list[0][u_field].attrs["min_bca"]) + bca_max = math.radians(grid_list[0][u_field].attrs["max_bca"]) if not CARTOPY_AVAILABLE: raise ModuleNotFoundError( @@ -371,9 +391,11 @@ def plot_horiz_xsection_barbs_map( ) if bg_grid_no > -1: - grid_bg = Grids[bg_grid_no].fields[background_field]["data"] + grid_bg = grid_list[bg_grid_no][background_field].values.squeeze() else: - grid_array = np.ma.stack([x.fields[background_field]["data"] for x in Grids]) + grid_array = np.ma.stack( + [x[background_field].values.squeeze() for x in grid_list] + ) grid_bg = grid_array.max(axis=0) if vmin is None: @@ -382,17 +404,22 @@ def plot_horiz_xsection_barbs_map( if vmax is None: vmax = grid_bg.max() - grid_h = Grids[0].point_altitude["data"] / 1e3 - grid_x = Grids[0].point_x["data"] / 1e3 - grid_y = Grids[0].point_y["data"] / 1e3 - grid_lat = Grids[0].point_latitude["data"][level] - grid_lon = Grids[0].point_longitude["data"][level] + grid_h = grid_list[0]["point_altitude"].values / 1e3 + grid_x = grid_list[0]["point_x"].values / 1e3 + grid_y = grid_list[0]["point_y"].values / 1e3 + grid_lat = grid_list[0].point_latitude.values[level] + grid_lon = grid_list[0].point_longitude.values[level] dx = np.diff(grid_x, axis=2)[0, 0, 0] dy = np.diff(grid_y, axis=1)[0, 0, 0] - u = Grids[0].fields[u_field]["data"] - v = Grids[0].fields[v_field]["data"] - w = Grids[0].fields[w_field]["data"] + if isinstance(Grids, DataTree): + u = Grids[u_field].values.squeeze() + v = Grids[v_field].values.squeeze() + w = Grids[w_field].values.squeeze() + else: + u = grid_list[0][u_field].values.squeeze() + v = grid_list[0][v_field].values.squeeze() + w = grid_list[0][w_field].values.squeeze() transform = ccrs.PlateCarree() if ax is None: @@ -421,9 +448,9 @@ def plot_horiz_xsection_barbs_map( ) if colorbar_flag is True: - cp = Grids[bg_grid_no].fields[background_field]["long_name"] + cp = grid_list[bg_grid_no][background_field].attrs["long_name"] cp.replace(" ", "_") - cp = cp + " [" + Grids[bg_grid_no].fields[background_field]["units"] + cp = cp + " [" + grid_list[bg_grid_no][background_field].attrs["units"] cp = cp + "]" plt.colorbar(the_mesh, ax=ax, label=(cp)) @@ -558,22 +585,11 @@ def plot_horiz_xsection_barbs_map( RuntimeWarning, ) - bca_min = math.radians(Grids[0].fields[u_field]["min_bca"]) - bca_max = math.radians(Grids[0].fields[u_field]["max_bca"]) - if show_lobes is True: - for i in range(len(Grids)): - for j in range(len(Grids)): + for i in range(len(grid_list)): + for j in range(len(grid_list)): if i != j: - bca = retrieval.get_bca( - Grids[j].radar_longitude["data"], - Grids[j].radar_latitude["data"], - Grids[i].radar_longitude["data"], - Grids[i].radar_latitude["data"], - Grids[j].point_x["data"][0], - Grids[j].point_y["data"][0], - Grids[j].get_projparams(), - ) + bca = retrieval.get_bca(grid_list[i], grid_list[j]) ax.contour( grid_lon[::, ::], @@ -612,8 +628,8 @@ def plot_xz_xsection_barbs( background_field="reflectivity", level=1, cmap="ChaseSpectral", - vmin=None, - vmax=None, + vmin=0, + vmax=70, u_vel_contours=None, v_vel_contours=None, w_vel_contours=None, @@ -635,8 +651,8 @@ def plot_xz_xsection_barbs( Parameters ---------- - Grids: list - List of Py-ART Grids to visualize + Grids: list or DataTree + List of Py-DDA Grids to visualize ax: matplotlib axis handle The axis handle to place the plot on. Set to None to plot on the current axis. @@ -694,15 +710,27 @@ def plot_xz_xsection_barbs( ax: matplotlib axis Axis handle to output axis """ + if isinstance(Grids, DataTree): + child_list = list(Grids.children.keys()) + grid_list = [] + rad_names = [] + for child in child_list: + if "radar" in child: + grid_list.append(Grids[child].to_dataset()) + rad_names.append(child) + else: + grid_list = Grids if not CARTOPY_AVAILABLE: raise ModuleNotFoundError( "Cartopy needs to be installed in order to use plotting module!" ) if bg_grid_no > -1: - grid_bg = Grids[bg_grid_no].fields[background_field]["data"] + grid_bg = grid_list[bg_grid_no][background_field].values.squeeze() else: - grid_array = np.ma.stack([x.fields[background_field]["data"] for x in Grids]) + grid_array = np.ma.stack( + [x[background_field].values.squeeze() for x in grid_list] + ) grid_bg = grid_array.max(axis=0) if vmin is None: @@ -711,14 +739,19 @@ def plot_xz_xsection_barbs( if vmax is None: vmax = grid_bg.max() - grid_h = Grids[0].point_altitude["data"] / 1e3 - grid_x = Grids[0].point_x["data"] / 1e3 - grid_y = Grids[0].point_y["data"] / 1e3 + grid_h = grid_list[0]["point_altitude"].values / 1e3 + grid_x = grid_list[0]["point_x"].values / 1e3 + grid_y = grid_list[0]["point_y"].values / 1e3 dx = np.diff(grid_x, axis=2)[0, 0, 0] dz = np.diff(grid_y, axis=1)[0, 0, 0] - u = Grids[0].fields[u_field]["data"] - v = Grids[0].fields[v_field]["data"] - w = Grids[0].fields[w_field]["data"] + if isinstance(Grids, DataTree): + u = Grids[u_field].values.squeeze() + v = Grids[v_field].values.squeeze() + w = Grids[w_field].values.squeeze() + else: + u = grid_list[0][u_field].values.squeeze() + v = grid_list[0][v_field].values.squeeze() + w = grid_list[0][w_field].values.squeeze() if ax is None: ax = plt.gca() @@ -741,9 +774,9 @@ def plot_xz_xsection_barbs( ) if colorbar_flag is True: - cp = Grids[bg_grid_no].fields[background_field]["long_name"] + cp = grid_list[bg_grid_no][background_field].attrs["long_name"] cp.replace(" ", "_") - cp = cp + " [" + Grids[bg_grid_no].fields[background_field]["units"] + cp = cp + " [" + grid_list[bg_grid_no][background_field].attrs["units"] cp = cp + "]" plt.colorbar(the_mesh, ax=ax, label=(cp)) @@ -845,8 +878,8 @@ def plot_yz_xsection_barbs( background_field="reflectivity", level=1, cmap="ChaseSpectral", - vmin=None, - vmax=None, + vmin=0, + vmax=70, u_vel_contours=None, v_vel_contours=None, w_vel_contours=None, @@ -868,8 +901,8 @@ def plot_yz_xsection_barbs( Parameters ---------- - Grids: list - List of Py-ART Grids to visualize + Grids: list or DataTree + List of Py-DDA Grids to visualize ax: matplotlib axis handle The axis handle to place the plot on. Set to None to plot on the current axis. @@ -926,11 +959,23 @@ def plot_yz_xsection_barbs( ax: matplotlib axis Axis handle to output axis """ + if isinstance(Grids, DataTree): + child_list = list(Grids.children.keys()) + grid_list = [] + rad_names = [] + for child in child_list: + if "radar" in child: + grid_list.append(Grids[child].to_dataset()) + rad_names.append(child) + else: + grid_list = Grids if bg_grid_no > -1: - grid_bg = Grids[bg_grid_no].fields[background_field]["data"] + grid_bg = grid_list[bg_grid_no][background_field].values.squeeze() else: - grid_array = np.ma.stack([x.fields[background_field]["data"] for x in Grids]) + grid_array = np.ma.stack( + [x[background_field].values.squeeze() for x in grid_list] + ) grid_bg = grid_array.max(axis=0) if vmin is None: @@ -939,14 +984,19 @@ def plot_yz_xsection_barbs( if vmax is None: vmax = grid_bg.max() - grid_h = Grids[0].point_altitude["data"] / 1e3 - grid_x = Grids[0].point_x["data"] / 1e3 - grid_y = Grids[0].point_y["data"] / 1e3 + grid_h = grid_list[0]["point_altitude"].values / 1e3 + grid_x = grid_list[0]["point_x"].values / 1e3 + grid_y = grid_list[0]["point_y"].values / 1e3 dx = np.diff(grid_x, axis=2)[0, 0, 0] dz = np.diff(grid_y, axis=1)[0, 0, 0] - u = Grids[0].fields[u_field]["data"] - v = Grids[0].fields[v_field]["data"] - w = Grids[0].fields[w_field]["data"] + if isinstance(Grids, DataTree): + u = Grids[u_field].values.squeeze() + v = Grids[v_field].values.squeeze() + w = Grids[w_field].values.squeeze() + else: + u = grid_list[0][u_field].values.squeeze() + v = grid_list[0][v_field].values.squeeze() + w = grid_list[0][w_field].values.squeeze() if ax is None: ax = plt.gca() @@ -969,9 +1019,9 @@ def plot_yz_xsection_barbs( ) if colorbar_flag is True: - cp = Grids[bg_grid_no].fields[background_field]["long_name"] + cp = grid_list[bg_grid_no][background_field].attrs["long_name"] cp.replace(" ", "_") - cp = cp + " [" + Grids[bg_grid_no].fields[background_field]["units"] + cp = cp + " [" + grid_list[bg_grid_no][background_field].attrs["units"] cp = cp + "]" plt.colorbar(the_mesh, ax=ax, label=(cp)) diff --git a/pydda/vis/quiver_plot.py b/pydda/vis/quiver_plot.py index c6730799..17a4ed06 100644 --- a/pydda/vis/quiver_plot.py +++ b/pydda/vis/quiver_plot.py @@ -7,6 +7,7 @@ from .. import retrieval from matplotlib.axes import Axes +from datatree import DataTree try: from cartopy.mpl.geoaxes import GeoAxes @@ -24,8 +25,8 @@ def plot_horiz_xsection_quiver( background_field="reflectivity", level=1, cmap="ChaseSpectral", - vmin=None, - vmax=None, + vmin=0, + vmax=70, u_vel_contours=None, v_vel_contours=None, w_vel_contours=None, @@ -54,8 +55,8 @@ def plot_horiz_xsection_quiver( Parameters ---------- - Grids: list - List of Py-ART Grids to visualize + Grids: list or DataTree + List of Py-DDA Grids to visualize ax: matplotlib axis handle The axis handle to place the plot on. Set to None to plot on the current axis. @@ -154,8 +155,23 @@ def plot_horiz_xsection_quiver( ax: Matplotlib axis handle The matplotlib axis handle associated with the plot. """ + if isinstance(Grids, DataTree): + child_list = list(Grids.children.keys()) + grid_list = [] + rad_names = [] + for child in child_list: + if "radar" in child: + grid_list.append(Grids[child].to_dataset()) + rad_names.append(child) + bca_min = math.radians(Grids[u_field].attrs["min_bca"]) + bca_max = math.radians(Grids[u_field].attrs["max_bca"]) + else: + grid_list = Grids + bca_min = math.radians(grid_list[0][u_field].attrs["min_bca"]) + bca_max = math.radians(grid_list[0][u_field].attrs["max_bca"]) - grid_bg = Grids[bg_grid_no].fields[background_field]["data"] + grid_bg = grid_list[bg_grid_no][background_field].values.squeeze() + grid_bg = np.ma.masked_invalid(grid_bg) if not CARTOPY_AVAILABLE: raise ModuleNotFoundError( "Cartopy needs to be installed in order to use plotting module!" @@ -166,14 +182,19 @@ def plot_horiz_xsection_quiver( if vmax is None: vmax = grid_bg.max() - grid_h = Grids[0].point_altitude["data"] / 1e3 - grid_x = Grids[0].point_x["data"] / 1e3 - grid_y = Grids[0].point_y["data"] / 1e3 + grid_h = grid_list[0]["point_altitude"].values / 1e3 + grid_x = grid_list[0]["point_x"].values / 1e3 + grid_y = grid_list[0]["point_y"].values / 1e3 dx = np.diff(grid_x, axis=2)[0, 0, 0] dy = np.diff(grid_y, axis=1)[0, 0, 0] - u = Grids[0].fields[u_field]["data"] - v = Grids[0].fields[v_field]["data"] - w = Grids[0].fields[w_field]["data"] + if isinstance(Grids, DataTree): + u = Grids[u_field].values.squeeze() + v = Grids[v_field].values.squeeze() + w = Grids[w_field].values.squeeze() + else: + u = grid_list[0][u_field].values.squeeze() + v = grid_list[0][v_field].values.squeeze() + w = grid_list[0][w_field].values.squeeze() qloc_x, qloc_y = _parse_quiverkey_string( quiverkey_loc, grid_h[level], grid_x[level], grid_y[level], grid_bg[level] ) @@ -219,9 +240,9 @@ def plot_horiz_xsection_quiver( fontproperties=quiver_font, ) if colorbar_flag is True: - cp = Grids[bg_grid_no].fields[background_field]["long_name"] + cp = grid_list[bg_grid_no][background_field].attrs["long_name"] cp.replace(" ", "_") - cp = cp + " [" + Grids[bg_grid_no].fields[background_field]["units"] + cp = cp + " [" + grid_list[bg_grid_no][background_field].attrs["units"] cp = cp + "]" plt.colorbar(the_mesh, ax=ax, label=(cp)) @@ -281,21 +302,13 @@ def plot_horiz_xsection_quiver( if colorbar_contour_flag is True: plt.colorbar(cs, ax=ax, label="|V| [m/s]") - bca_min = math.radians(Grids[0].fields[u_field]["min_bca"]) - bca_max = math.radians(Grids[0].fields[u_field]["max_bca"]) - if show_lobes is True: - for i in range(len(Grids)): - for j in range(len(Grids)): + for i in range(len(grid_list)): + for j in range(len(grid_list)): if i != j: bca = retrieval.get_bca( - Grids[j].radar_longitude["data"], - Grids[j].radar_latitude["data"], - Grids[i].radar_longitude["data"], - Grids[i].radar_latitude["data"], - Grids[j].point_x["data"][0], - Grids[j].point_y["data"][0], - Grids[j].get_projparams(), + grid_list[i], + grid_list[j], ) ax.contour( @@ -324,8 +337,8 @@ def plot_horiz_xsection_quiver_map( background_field="reflectivity", level=1, cmap="ChaseSpectral", - vmin=None, - vmax=None, + vmin=0, + vmax=70, u_vel_contours=None, v_vel_contours=None, w_vel_contours=None, @@ -355,8 +368,8 @@ def plot_horiz_xsection_quiver_map( Parameters ---------- - Grids: list - List of Py-ART Grids to visualize + Grids: list or DataTree + List of Py-DDA Grids to visualize ax: matplotlib axis handle (with cartopy ccrs) The axis handle to place the plot on. Set to None to create a new map. Note: the axis needs to be in a PlateCarree() projection. Support for @@ -456,48 +469,69 @@ def plot_horiz_xsection_quiver_map( ax: matplotlib axis Axis handle to output axis """ + if isinstance(Grids, DataTree): + child_list = list(Grids.children.keys()) + grid_list = [] + rad_names = [] + for child in child_list: + if "radar" in child: + grid_list.append(Grids[child].to_dataset()) + rad_names.append(child) + bca_min = math.radians(Grids[u_field].attrs["min_bca"]) + bca_max = math.radians(Grids[u_field].attrs["max_bca"]) + else: + grid_list = Grids + bca_min = math.radians(grid_list[0][u_field].attrs["min_bca"]) + bca_max = math.radians(grid_list[0][u_field].attrs["max_bca"]) + if not CARTOPY_AVAILABLE: raise ModuleNotFoundError( "Cartopy needs to be installed in order to use plotting module!" ) if bg_grid_no > -1: - grid_bg = Grids[bg_grid_no].fields[background_field]["data"] + grid_bg = grid_list[bg_grid_no][background_field].values.squeeze() else: - grid_array = np.ma.stack([x.fields[background_field]["data"] for x in Grids]) + grid_array = np.ma.stack( + [x[background_field].values.squeeze() for x in grid_list] + ) grid_bg = grid_array.max(axis=0) - + grid_bg = np.ma.masked_invalid(grid_bg) if vmin is None: vmin = grid_bg.min() if vmax is None: vmax = grid_bg.max() - grid_h = Grids[0].point_altitude["data"] / 1e3 - grid_x = Grids[0].point_x["data"] / 1e3 - grid_y = Grids[0].point_y["data"] / 1e3 - grid_lat = Grids[0].point_latitude["data"][level] - grid_lon = Grids[0].point_longitude["data"][level] + grid_h = grid_list[0]["point_altitude"].values / 1e3 + grid_x = grid_list[0]["point_x"].values / 1e3 + grid_y = grid_list[0]["point_y"].values / 1e3 + grid_lat = grid_list[0].point_latitude.values[level] + grid_lon = grid_list[0].point_longitude.values[level] qloc_x, qloc_y = _parse_quiverkey_string( quiverkey_loc, grid_h[level], grid_x[level], grid_y[level], grid_bg[level] ) dx = np.diff(grid_x, axis=2)[0, 0, 0] dy = np.diff(grid_y, axis=1)[0, 0, 0] - - if np.ma.isMaskedArray(Grids[0].fields[u_field]["data"]): - u = Grids[0].fields[u_field]["data"].filled(fill_value=np.nan) + if isinstance(Grids, DataTree): + u = Grids[u_field].values.squeeze() + v = Grids[v_field].values.squeeze() + w = Grids[w_field].values.squeeze() else: - u = Grids[0].fields[u_field]["data"] + if np.ma.isMaskedArray(grid_list[0][u_field].values.squeeze()): + u = grid_list[0][u_field].values.squeeze().filled(fill_value=np.nan) + else: + u = grid_list[0][u_field].values.squeeze() - if np.ma.isMaskedArray(Grids[0].fields[v_field]["data"]): - v = Grids[0].fields[v_field]["data"].filled(fill_value=np.nan) - else: - v = Grids[0].fields[v_field]["data"] + if np.ma.isMaskedArray(grid_list[0][v_field].values): + v = grid_list[0][v_field].values.squeeze().filled(fill_value=np.nan) + else: + v = grid_list[0][v_field].values.squeeze() - if np.ma.isMaskedArray(Grids[0].fields[u_field]["data"]): - w = Grids[0].fields[w_field]["data"].filled(fill_value=np.nan) - else: - w = Grids[0].fields[w_field]["data"] + if np.ma.isMaskedArray(grid_list[0][u_field].values): + w = grid_list[0][w_field].values.squeeze().filled(fill_value=np.nan) + else: + w = grid_list[0][w_field].values.squeeze() transform = ccrs.PlateCarree() if ax is None: @@ -544,9 +578,9 @@ def plot_horiz_xsection_quiver_map( ) if colorbar_flag is True: - cp = Grids[bg_grid_no].fields[background_field]["long_name"] + cp = grid_list[bg_grid_no][background_field].attrs["long_name"] cp.replace(" ", "_") - cp = cp + " [" + Grids[bg_grid_no].fields[background_field]["units"] + cp = cp + " [" + grid_list[bg_grid_no][background_field].attrs["units"] cp = cp + "]" plt.colorbar(the_mesh, ax=ax, label=(cp)) @@ -684,22 +718,11 @@ def plot_horiz_xsection_quiver_map( RuntimeWarning, ) - bca_min = math.radians(Grids[0].fields[u_field]["min_bca"]) - bca_max = math.radians(Grids[0].fields[u_field]["max_bca"]) - if show_lobes is True: - for i in range(len(Grids)): - for j in range(len(Grids)): + for i in range(len(grid_list)): + for j in range(len(grid_list)): if i != j: - bca = retrieval.get_bca( - Grids[j].radar_longitude["data"], - Grids[j].radar_latitude["data"], - Grids[i].radar_longitude["data"], - Grids[i].radar_latitude["data"], - Grids[j].point_x["data"][0], - Grids[j].point_y["data"][0], - Grids[j].get_projparams(), - ) + bca = retrieval.get_bca(grid_list[i], grid_list[j]) ax.contour( grid_lon[:, :], @@ -739,8 +762,8 @@ def plot_xz_xsection_quiver( background_field="reflectivity", level=1, cmap="ChaseSpectral", - vmin=None, - vmax=None, + vmin=0, + vmax=70, u_vel_contours=None, v_vel_contours=None, w_vel_contours=None, @@ -767,8 +790,8 @@ def plot_xz_xsection_quiver( Parameters ---------- - Grids: list - List of Py-ART Grids to visualize + Grids: list or DataTree + List of Py-DDA Grids to visualize ax: matplotlib axis handle The axis handle to place the plot on. Set to None to plot on the current axis. @@ -863,23 +886,37 @@ def plot_xz_xsection_quiver( ax: matplotlib axis Axis handle to output axis """ - - grid_bg = Grids[bg_grid_no].fields[background_field]["data"] - + if isinstance(Grids, DataTree): + child_list = list(Grids.children.keys()) + grid_list = [] + rad_names = [] + for child in child_list: + if "radar" in child: + grid_list.append(Grids[child].to_dataset()) + rad_names.append(child) + else: + grid_list = Grids + grid_bg = grid_list[bg_grid_no][background_field].values.squeeze() + grid_bg = np.ma.masked_invalid(grid_bg) if vmin is None: vmin = grid_bg.min() if vmax is None: vmax = grid_bg.max() - grid_h = Grids[0].point_altitude["data"] / 1e3 - grid_x = Grids[0].point_x["data"] / 1e3 - grid_y = Grids[0].point_y["data"] / 1e3 + grid_h = grid_list[0]["point_altitude"].values / 1e3 + grid_x = grid_list[0]["point_x"].values / 1e3 + grid_y = grid_list[0]["point_y"].values / 1e3 dx = np.diff(grid_x, axis=2)[0, 0, 0] dz = np.diff(grid_y, axis=1)[0, 0, 0] - u = Grids[0].fields[u_field]["data"] - v = Grids[0].fields[v_field]["data"] - w = Grids[0].fields[w_field]["data"] + if isinstance(Grids, DataTree): + u = Grids[u_field].values.squeeze() + v = Grids[v_field].values.squeeze() + w = Grids[w_field].values.squeeze() + else: + u = grid_list[0][u_field].values.squeeze() + v = grid_list[0][v_field].values.squeeze() + w = grid_list[0][w_field].values.squeeze() qloc_x, qloc_y = _parse_quiverkey_string( quiverkey_loc, grid_h[:, level, :], @@ -929,9 +966,9 @@ def plot_xz_xsection_quiver( ) if colorbar_flag is True: - cp = Grids[bg_grid_no].fields[background_field]["long_name"] + cp = grid_list[bg_grid_no][background_field].attrs["long_name"] cp.replace(" ", "_") - cp = cp + " [" + Grids[bg_grid_no].fields[background_field]["units"] + cp = cp + " [" + grid_list[bg_grid_no][background_field].attrs["units"] cp = cp + "]" plt.colorbar(the_mesh, ax=ax, label=(cp)) @@ -1024,8 +1061,8 @@ def plot_yz_xsection_quiver( background_field="reflectivity", level=1, cmap="ChaseSpectral", - vmin=None, - vmax=None, + vmin=0, + vmax=70, u_vel_contours=None, v_vel_contours=None, w_vel_contours=None, @@ -1052,8 +1089,8 @@ def plot_yz_xsection_quiver( Parameters ---------- - Grids: list - List of Py-ART Grids to visualize + Grids: list or DataTree + List of Py-DDA Grids to visualize ax: matplotlib axis handle The axis handle to place the plot on. Set to None to plot on the current axis. @@ -1145,22 +1182,33 @@ def plot_yz_xsection_quiver( ax: matplotlib axis Axis handle to output axis """ + if isinstance(Grids, DataTree): + child_list = list(Grids.children.keys()) + grid_list = [] + rad_names = [] + for child in child_list: + if "radar" in child: + grid_list.append(Grids[child].to_dataset()) + rad_names.append(child) + else: + grid_list = Grids - grid_bg = Grids[bg_grid_no].fields[background_field]["data"] + grid_bg = grid_list[bg_grid_no][background_field].values.squeeze() + grid_bg = np.ma.masked_invalid(grid_bg) if vmin is None: vmin = grid_bg.min() if vmax is None: vmax = grid_bg.max() - grid_h = Grids[0].point_altitude["data"] / 1e3 - grid_x = Grids[0].point_x["data"] / 1e3 - grid_y = Grids[0].point_y["data"] / 1e3 + grid_h = grid_list[0]["point_altitude"].values / 1e3 + grid_x = grid_list[0]["point_x"].values / 1e3 + grid_y = grid_list[0]["point_y"].values / 1e3 dx = np.diff(grid_x, axis=2)[0, 0, 0] dz = np.diff(grid_y, axis=1)[0, 0, 0] - u = Grids[0].fields[u_field]["data"] - v = Grids[0].fields[v_field]["data"] - w = Grids[0].fields[w_field]["data"] + u = grid_list[0][u_field].values.squeeze() + v = grid_list[0][v_field].values.squeeze() + w = grid_list[0][w_field].values.squeeze() qloc_x, qloc_y = _parse_quiverkey_string( quiverkey_loc, grid_h[:, :, level], @@ -1213,9 +1261,9 @@ def plot_yz_xsection_quiver( ) if colorbar_flag is True: - cp = Grids[bg_grid_no].fields[background_field]["long_name"] + cp = grid_list[bg_grid_no][background_field].attrs["long_name"] cp.replace(" ", "_") - cp = cp + " [" + Grids[bg_grid_no].fields[background_field]["units"] + cp = cp + " [" + grid_list[bg_grid_no][background_field].attrs["units"] cp = cp + "]" plt.colorbar(the_mesh, ax=ax, label=(cp)) diff --git a/pydda/vis/streamline_plot.py b/pydda/vis/streamline_plot.py index 1e8d9afc..a189ac57 100644 --- a/pydda/vis/streamline_plot.py +++ b/pydda/vis/streamline_plot.py @@ -2,13 +2,13 @@ import warnings import cartopy.crs as ccrs -import cartopy import matplotlib.pyplot as plt import numpy as np import pyart from .. import retrieval from matplotlib.axes import Axes +from datatree import DataTree try: from cartopy.mpl.geoaxes import GeoAxes @@ -26,8 +26,8 @@ def plot_horiz_xsection_streamlines( background_field="reflectivity", level=1, cmap="ChaseSpectral", - vmin=None, - vmax=None, + vmin=0, + vmax=70, u_vel_contours=None, v_vel_contours=None, w_vel_contours=None, @@ -52,8 +52,8 @@ def plot_horiz_xsection_streamlines( Parameters ---------- - Grids: list - List of Py-ART Grids to visualize + Grids: list or DataTree + List of Py-DDA Grids to visualize ax: matplotlib axis handle The axis handle to place the plot on. Set to None to plot on the current axis. @@ -112,7 +112,22 @@ def plot_horiz_xsection_streamlines( Axis handle to output axis """ - grid_bg = Grids[bg_grid_no].fields[background_field]["data"] + if isinstance(Grids, DataTree): + child_list = list(Grids.children.keys()) + grid_list = [] + rad_names = [] + for child in child_list: + if "radar" in child: + grid_list.append(Grids[child].to_dataset()) + rad_names.append(child) + bca_min = math.radians(Grids[rad_names[0]][u_field].attrs["min_bca"]) + bca_max = math.radians(Grids[rad_names[0]][u_field].attrs["max_bca"]) + else: + grid_list = Grids + bca_min = math.radians(grid_list[0][u_field].attrs["min_bca"]) + bca_max = math.radians(grid_list[0][u_field].attrs["max_bca"]) + + grid_bg = grid_list[bg_grid_no][background_field].values.squeeze() if vmin is None: vmin = grid_bg.min() @@ -120,14 +135,14 @@ def plot_horiz_xsection_streamlines( if vmax is None: vmax = grid_bg.max() - grid_h = Grids[0].point_altitude["data"] / 1e3 - grid_x = Grids[0].point_x["data"] / 1e3 - grid_y = Grids[0].point_y["data"] / 1e3 + grid_h = grid_list[0]["point_altitude"].values / 1e3 + grid_x = grid_list[0]["point_x"].values / 1e3 + grid_y = grid_list[0]["point_y"].values / 1e3 np.diff(grid_x, axis=2)[0, 0, 0] np.diff(grid_y, axis=1)[0, 0, 0] - u = Grids[0].fields[u_field]["data"] - v = Grids[0].fields[v_field]["data"] - w = Grids[0].fields[w_field]["data"] + u = grid_list[0][u_field].values.squeeze() + v = grid_list[0][v_field].values.squeeze() + w = grid_list[0][w_field].values.squeeze() if isinstance(u, np.ma.MaskedArray): u = u.filled(np.nan) @@ -162,9 +177,9 @@ def plot_horiz_xsection_streamlines( ) if colorbar_flag is True: - cp = Grids[bg_grid_no].fields[background_field]["long_name"] + cp = grid_list[bg_grid_no][background_field].attrs["long_name"] cp.replace(" ", "_") - cp = cp + " [" + Grids[bg_grid_no].fields[background_field]["units"] + cp = cp + " [" + grid_list[bg_grid_no][background_field].attrs["units"] cp = cp + "]" plt.colorbar(the_mesh, ax=ax, label=(cp)) @@ -224,21 +239,13 @@ def plot_horiz_xsection_streamlines( if colorbar_contour_flag is True: plt.colorbar(cs, ax=ax, label="|V| [m/s]") - bca_min = math.radians(Grids[0].fields[u_field]["min_bca"]) - bca_max = math.radians(Grids[0].fields[u_field]["max_bca"]) - if show_lobes is True: - for i in range(len(Grids)): - for j in range(len(Grids)): + for i in range(len(grid_list)): + for j in range(len(grid_list)): if i != j: bca = retrieval.get_bca( - Grids[j].radar_longitude["data"], - Grids[j].radar_latitude["data"], - Grids[i].radar_longitude["data"], - Grids[i].radar_latitude["data"], - Grids[j].point_x["data"][0], - Grids[j].point_y["data"][0], - Grids[j].get_projparams(), + grid_list[i], + grid_list[j], ) ax.contour( @@ -267,8 +274,8 @@ def plot_horiz_xsection_streamlines_map( background_field="reflectivity", level=1, cmap="ChaseSpectral", - vmin=None, - vmax=None, + vmin=0, + vmax=70, u_vel_contours=None, v_vel_contours=None, w_vel_contours=None, @@ -295,8 +302,8 @@ def plot_horiz_xsection_streamlines_map( Parameters ---------- - Grids: list - List of Py-ART Grids to visualize + Grids: list or DataTree + List of Py-DDA Grids to visualize ax: matplotlib axis handle (with cartopy ccrs) The axis handle to place the plot on. Set to None to create a new map. Note: the axis needs to be in a PlateCarree() projection. @@ -359,15 +366,32 @@ def plot_horiz_xsection_streamlines_map( ax: matplotlib axis Axis handle to output axis """ + if isinstance(Grids, DataTree): + child_list = list(Grids.children.keys()) + grid_list = [] + rad_names = [] + for child in child_list: + if "radar" in child: + grid_list.append(Grids[child].to_dataset()) + rad_names.append(child) + bca_min = math.radians(Grids[rad_names[0]][u_field].attrs["min_bca"]) + bca_max = math.radians(Grids[rad_names[0]][u_field].attrs["max_bca"]) + else: + grid_list = Grids + bca_min = math.radians(grid_list[0][u_field].attrs["min_bca"]) + bca_max = math.radians(grid_list[0][u_field].attrs["max_bca"]) + if not CARTOPY_AVAILABLE: raise ModuleNotFoundError( "Cartopy needs to be installed in order to use plotting module!" ) if bg_grid_no > -1: - grid_bg = Grids[bg_grid_no].fields[background_field]["data"] + grid_bg = grid_list[bg_grid_no][background_field].values.squeeze() else: - grid_array = np.ma.stack([x.fields[background_field]["data"] for x in Grids]) + grid_array = np.ma.stack( + [x[background_field].values.squeeze() for x in grid_list] + ) grid_bg = grid_array.max(axis=0) if vmin is None: @@ -376,17 +400,22 @@ def plot_horiz_xsection_streamlines_map( if vmax is None: vmax = grid_bg.max() - grid_h = Grids[0].point_altitude["data"] / 1e3 - grid_x = Grids[0].point_x["data"] / 1e3 - grid_y = Grids[0].point_y["data"] / 1e3 - grid_lat = Grids[0].point_latitude["data"][level] - grid_lon = Grids[0].point_longitude["data"][level] + grid_h = grid_list[0]["point_altitude"].values / 1e3 + grid_x = grid_list[0]["point_x"].values / 1e3 + grid_y = grid_list[0]["point_y"].values / 1e3 + grid_lat = grid_list[0].point_latitude.values[level] + grid_lon = grid_list[0].point_longitude.values[level] np.diff(grid_x, axis=2)[0, 0, 0] np.diff(grid_y, axis=1)[0, 0, 0] - u = Grids[0].fields[u_field]["data"] - v = Grids[0].fields[v_field]["data"] - w = Grids[0].fields[w_field]["data"] + if isinstance(Grids, DataTree): + u = Grids[u_field].values.squeeze() + v = Grids[v_field].values.squeeze() + w = Grids[w_field].values.squeeze() + else: + u = grid_list[0][u_field].values.squeeze() + v = grid_list[0][v_field].values.squeeze() + w = grid_list[0][w_field].values.squeeze() if isinstance(u, np.ma.MaskedArray): u = u.filled(np.nan) @@ -426,9 +455,9 @@ def plot_horiz_xsection_streamlines_map( ) if colorbar_flag is True: - cp = Grids[bg_grid_no].fields[background_field]["long_name"] + cp = grid_list[bg_grid_no][background_field].attrs["long_name"] cp.replace(" ", "_") - cp = cp + " [" + Grids[bg_grid_no].fields[background_field]["units"] + cp = cp + " [" + grid_list[bg_grid_no][background_field].attrs["units"] cp = cp + "]" plt.colorbar(the_mesh, ax=ax, label=(cp)) @@ -567,22 +596,11 @@ def plot_horiz_xsection_streamlines_map( RuntimeWarning, ) - bca_min = math.radians(Grids[0].fields[u_field]["min_bca"]) - bca_max = math.radians(Grids[0].fields[u_field]["max_bca"]) - if show_lobes is True: - for i in range(len(Grids)): - for j in range(len(Grids)): + for i in range(len(grid_list)): + for j in range(len(grid_list)): if i != j: - bca = retrieval.get_bca( - Grids[j].radar_longitude["data"], - Grids[j].radar_latitude["data"], - Grids[i].radar_longitude["data"], - Grids[i].radar_latitude["data"], - Grids[j].point_x["data"][0], - Grids[j].point_y["data"][0], - Grids[j].get_projparams(), - ) + bca = retrieval.get_bca(grid_list[i], grid_list[j]) ax.contour( grid_lon[:, :], @@ -622,8 +640,8 @@ def plot_xz_xsection_streamlines( background_field="reflectivity", level=1, cmap="ChaseSpectral", - vmin=None, - vmax=None, + vmin=0, + vmax=70, u_vel_contours=None, v_vel_contours=None, w_vel_contours=None, @@ -647,8 +665,8 @@ def plot_xz_xsection_streamlines( Parameters ---------- - Grids: list - List of Py-ART Grids to visualize + Grids: list or DataTree + List of Py-DDA Grids to visualize ax: matplotlib axis handle The axis handle to place the plot on. Set to None to plot on the current axis. @@ -707,8 +725,18 @@ def plot_xz_xsection_streamlines( ax: matplotlib axis Axis handle to output axis """ + if isinstance(Grids, DataTree): + child_list = list(Grids.children.keys()) + grid_list = [] + rad_names = [] + for child in child_list: + if "radar" in child: + grid_list.append(Grids[child].to_dataset()) + rad_names.append(child) + else: + grid_list = Grids - grid_bg = Grids[bg_grid_no].fields[background_field]["data"] + grid_bg = grid_list[bg_grid_no][background_field].values.squeeze() if vmin is None: vmin = grid_bg.min() @@ -716,14 +744,17 @@ def plot_xz_xsection_streamlines( if vmax is None: vmax = grid_bg.max() - grid_h = Grids[0].point_altitude["data"] / 1e3 - grid_x = Grids[0].point_x["data"] / 1e3 - grid_y = Grids[0].point_y["data"] / 1e3 - np.diff(grid_x, axis=2)[0, 0, 0] - np.diff(grid_y, axis=1)[0, 0, 0] - u = Grids[0].fields[u_field]["data"] - v = Grids[0].fields[v_field]["data"] - w = Grids[0].fields[w_field]["data"] + grid_h = grid_list[0]["point_altitude"].values / 1e3 + grid_x = grid_list[0]["point_x"].values / 1e3 + grid_y = grid_list[0]["point_y"].values / 1e3 + if isinstance(Grids, DataTree): + u = Grids[u_field].values.squeeze() + v = Grids[v_field].values.squeeze() + w = Grids[w_field].values.squeeze() + else: + u = grid_list[0][u_field].values.squeeze() + v = grid_list[0][v_field].values.squeeze() + w = grid_list[0][w_field].values.squeeze() if isinstance(u, np.ma.MaskedArray): u = u.filled(np.nan) @@ -745,7 +776,7 @@ def plot_xz_xsection_streamlines( vmin=vmin, vmax=vmax, ) - np.ma.sqrt(u**2 + w**2) + ax.streamplot( grid_x[:, level, :], grid_h[:, level, :], @@ -757,9 +788,9 @@ def plot_xz_xsection_streamlines( ) if colorbar_flag is True: - cp = Grids[bg_grid_no].fields[background_field]["long_name"] + cp = grid_list[bg_grid_no][background_field].attrs["long_name"] cp.replace(" ", "_") - cp = cp + " [" + Grids[bg_grid_no].fields[background_field]["units"] + cp = cp + " [" + grid_list[bg_grid_no][background_field].attrs["units"] cp = cp + "]" plt.colorbar(the_mesh, ax=ax, label=(cp)) @@ -852,8 +883,8 @@ def plot_yz_xsection_streamlines( background_field="reflectivity", level=1, cmap="ChaseSpectral", - vmin=None, - vmax=None, + vmin=0, + vmax=70, u_vel_contours=None, v_vel_contours=None, w_vel_contours=None, @@ -877,8 +908,8 @@ def plot_yz_xsection_streamlines( Parameters ---------- - Grids: list - List of Py-ART Grids to visualize + Grids: list or DataTree + List of Py-DDA Grids to visualize ax: matplotlib axis handle The axis handle to place the plot on. Set to None to plot on the current axis. @@ -937,22 +968,37 @@ def plot_yz_xsection_streamlines( ax: Matplotlib axis handle The matplotlib axis handle corresponding to the plot """ + if isinstance(Grids, DataTree): + child_list = list(Grids.children.keys()) + grid_list = [] + rad_names = [] + for child in child_list: + if "radar" in child: + grid_list.append(Grids[child].to_dataset()) + rad_names.append(child) + else: + grid_list = Grids - grid_bg = Grids[bg_grid_no].fields[background_field]["data"] + grid_bg = grid_list[bg_grid_no][background_field].values.squeeze() if vmin is None: vmin = grid_bg.min() if vmax is None: vmax = grid_bg.max() - grid_h = Grids[0].point_altitude["data"] / 1e3 - grid_x = Grids[0].point_x["data"] / 1e3 - grid_y = Grids[0].point_y["data"] / 1e3 + grid_h = grid_list[0]["point_altitude"].values / 1e3 + grid_x = grid_list[0]["point_x"].values / 1e3 + grid_y = grid_list[0]["point_y"].values / 1e3 np.diff(grid_x, axis=2)[0, 0, 0] np.diff(grid_y, axis=1)[0, 0, 0] - u = Grids[0].fields[u_field]["data"] - v = Grids[0].fields[v_field]["data"] - w = Grids[0].fields[w_field]["data"] + if isinstance(Grids, DataTree): + u = Grids[u_field].values.squeeze() + v = Grids[v_field].values.squeeze() + w = Grids[w_field].values.squeeze() + else: + u = grid_list[0][u_field].values.squeeze() + v = grid_list[0][v_field].values.squeeze() + w = grid_list[0][w_field].values.squeeze() if isinstance(u, np.ma.MaskedArray): u = u.filled(np.nan) @@ -974,7 +1020,7 @@ def plot_yz_xsection_streamlines( vmin=vmin, vmax=vmax, ) - np.ma.sqrt(v**2 + w**2) + ax.streamplot( grid_y[:, :, level], grid_h[:, :, level], @@ -987,9 +1033,9 @@ def plot_yz_xsection_streamlines( ) if colorbar_flag is True: - cp = Grids[bg_grid_no].fields[background_field]["long_name"] + cp = grid_list[bg_grid_no][background_field].attrs["long_name"] cp.replace(" ", "_") - cp = cp + " [" + Grids[bg_grid_no].fields[background_field]["units"] + cp = cp + " [" + grid_list[bg_grid_no][background_field].attrs["units"] cp = cp + "]" plt.colorbar(the_mesh, ax=ax, label=(cp)) diff --git a/setup.py b/setup.py index 43f8b54a..fb25dc22 100644 --- a/setup.py +++ b/setup.py @@ -41,9 +41,9 @@ LONG_DESCRIPTION = "\n".join(DOCLINES[2:]) LICENSE = "BSD" PLATFORMS = "Linux, Windows, OSX" -MAJOR = 1 -MINOR = 5 -MICRO = 1 +MAJOR = 2 +MINOR = 0 +MICRO = 0 # SCRIPTS = glob.glob('scripts/*') # TEST_SUITE = 'nose.collector'