diff --git a/config/processing/mals_emm_intercomp.txt b/config/processing/mals_emm_intercomp.txt index b1c3e20..5ae07a9 100755 --- a/config/processing/mals_emm_intercomp.txt +++ b/config/processing/mals_emm_intercomp.txt @@ -34,7 +34,7 @@ loadbasepath STRARR 2 loadname STRARR 2 mals_emm_dataquality - rad4alp_PLA + rad4alp_PLA_avg # Base path for colocated gates data colocgatespath STRING /data/pyrad_products/mals_emm_dataquality/colocated_gates/ diff --git a/config/processing/paradiso_fvj_vol.txt b/config/processing/paradiso_fvj_vol.txt index 7c8c36f..c0ad413 100755 --- a/config/processing/paradiso_fvj_vol.txt +++ b/config/processing/paradiso_fvj_vol.txt @@ -9,23 +9,17 @@ name STRING paradiso_vol_fvj # Base path of the raw data. # Note: Must have a trailing '/' # ('///.') -datapath STRARR 1 - /data/DX50/rawdata/ +datapath STRING /data/DX50/rawdata/ # Base path for cosmo data -cosmopath STRARR 1 - /data/cosmo/ +cosmopath STRING /data/cosmo/ # Base path for dem data -dempath STRARR 1 - /data/DEM/DX50_PAY/ +dempath STRING /data/DEM/DX50_PARADISO/ # Loading of saved datasets -loadbasepath STRARR 1 - /data/pyrad_examples/ - -loadname STRARR 1 - paradiso_vol_fvj +loadbasepath STRING /data/pyrad_examples/ +loadname STRING paradiso_vol_fvj # ------------------------------------------------------------ # Auxiliary data pathes @@ -42,8 +36,7 @@ disdropath STRING /data/scattering/dsd_ts/ saveimgbasepath STRING /data/pyrad_examples/ saveimg INT 1 # Supported formats: eps, jpeg, jpg, pdf, pgf, png, ps, raw, rgba, svg, svgz, tif, tiff -imgformat STRARR 1 - png +imgformat STRING png # -------------------------------------------------------- # configuration files pathes diff --git a/config/processing/paradiso_fvj_vol_intercomp.txt b/config/processing/paradiso_fvj_vol_intercomp.txt index b105f91..a526954 100755 --- a/config/processing/paradiso_fvj_vol_intercomp.txt +++ b/config/processing/paradiso_fvj_vol_intercomp.txt @@ -1,6 +1,6 @@ # Name of this configuration: # Will be used in product generation save path -name STRING paradiso_vol_fvj +name STRING paradiso_vol_intercomp # --------------------------------------------------------- # Raw data pathes diff --git a/config/processing/paradiso_fvj_vol_intercomp_prod.txt b/config/processing/paradiso_fvj_vol_intercomp_prod.txt index c542878..af55612 100755 --- a/config/processing/paradiso_fvj_vol_intercomp_prod.txt +++ b/config/processing/paradiso_fvj_vol_intercomp_prod.txt @@ -4,30 +4,33 @@ # List of datasets to generate. # The detailed specification of each dataset is given below. -dataSetList STRARR 1 - DX50_PLD_dBZ_avg_intercomp +dataSetList STRARR 2 + dBZ_DX50 + dBZ_PLD # ===================================== # Raw data # ===================================== -#VIS STRUCT 3 -# type STRING RAW -# datatype STRING RADAR001:DEM:VIS -# products STRUCT 3 -# EL004_0 STRUCT 3 -# type STRING PPI_IMAGE -# anglenr INT 0 -# voltype STRING VIS -# EL006_0 STRUCT 3 -# type STRING PPI_IMAGE -# anglenr INT 1 -# voltype STRING VIS -# EL025_0 STRUCT 3 -# type STRING PPI_IMAGE -# anglenr INT 2 -# voltype STRING VIS -# +dBZ_DX50 STRUCT 3 + type STRING RAW + datatype STRARR 1 + RADAR001:RAINBOW:dBZ + products STRUCT 1 + EL004_0 STRUCT 3 + type STRING PPI_IMAGE + anglenr INT 0 + voltype STRING dBZ + +dBZ_PLD STRUCT 3 + type STRING RAW + datatype STRARR 1 + RADAR002:RAD4ALP:dBZ + products STRUCT 1 + EL001_0 STRUCT 3 + type STRING PPI_IMAGE + anglenr INT 2 + voltype STRING dBZ # ========================================================================================== # colocated gates @@ -109,37 +112,37 @@ dataSetList STRARR 1 # DX50_PLD_DATA STRUCT 2 # type STRING WRITE_INTERCOMP # voltype STRING dBZc - -DX50_PLD_dBZ_avg_intercomp STRUCT 12 - type STRING INTERCOMP_TIME_AVG - datatype STRARR 6 - RADAR001:CFRADIAL:dBZc,dBZ_avg,SAVEVOL - RADAR001:CFRADIAL:PhiDPc,PhiDP_avg,SAVEVOL - RADAR001:CFRADIAL:time_avg_flag,flag_avg,SAVEVOL - RADAR002:CFRADIAL:dBZc,dBZ_avg,SAVEVOL - RADAR002:CFRADIAL:PhiDPc,PhiDP_avg,SAVEVOL - RADAR002:CFRADIAL:time_avg_flag,flag_avg,SAVEVOL - coloc_data_dir STRING DX50_PLD_DATA # must be the same as product WRITE_INTERCOMP - coloc_radars_name STRING DX50_PLD - ele_tol FLOAT 0.5 - azi_tol FLOAT 0.5 - rng_tol FLOAT 100. - clt_max INT 0 - phi_excess_max INT 100 - non_rain_max INT 100 - phi_avg_max FLOAT 600. - products STRUCT 3 - DX50_PLD_PLOT STRUCT 3 - type STRING PLOT_SCATTER_INTERCOMP - voltype STRING dBZc - step FLOAT 0.5 - DX50_PLD_DATA STRUCT 2 - type STRING WRITE_INTERCOMP_TIME_AVG - voltype STRING dBZc - DX50_PLD_INTERCOMP_TS STRUCT 3 - type STRING PLOT_AND_WRITE_INTERCOMP_TS - step FLOAT 0.5 - voltype STRING dBZc +# +#DX50_PLD_dBZ_avg_intercomp STRUCT 12 +# type STRING INTERCOMP_TIME_AVG +# datatype STRARR 6 +# RADAR001:CFRADIAL:dBZc,dBZ_avg,SAVEVOL +# RADAR001:CFRADIAL:PhiDPc,PhiDP_avg,SAVEVOL +# RADAR001:CFRADIAL:time_avg_flag,flag_avg,SAVEVOL +# RADAR002:CFRADIAL:dBZc,dBZ_avg,SAVEVOL +# RADAR002:CFRADIAL:PhiDPc,PhiDP_avg,SAVEVOL +# RADAR002:CFRADIAL:time_avg_flag,flag_avg,SAVEVOL +# coloc_data_dir STRING DX50_PLD_DATA # must be the same as product WRITE_INTERCOMP +# coloc_radars_name STRING DX50_PLD +# ele_tol FLOAT 0.5 +# azi_tol FLOAT 0.5 +# rng_tol FLOAT 100. +# clt_max INT 0 +# phi_excess_max INT 100 +# non_rain_max INT 100 +# phi_avg_max FLOAT 600. +# products STRUCT 3 +# DX50_PLD_PLOT STRUCT 3 +# type STRING PLOT_SCATTER_INTERCOMP +# voltype STRING dBZc +# step FLOAT 0.5 +# DX50_PLD_DATA STRUCT 2 +# type STRING WRITE_INTERCOMP_TIME_AVG +# voltype STRING dBZc +# DX50_PLD_INTERCOMP_TS STRUCT 3 +# type STRING PLOT_AND_WRITE_INTERCOMP_TS +# step FLOAT 0.5 +# voltype STRING dBZc diff --git a/config/processing/paradiso_fvj_vol_loc.txt b/config/processing/paradiso_fvj_vol_loc.txt index 58fa624..143cd90 100755 --- a/config/processing/paradiso_fvj_vol_loc.txt +++ b/config/processing/paradiso_fvj_vol_loc.txt @@ -8,8 +8,7 @@ NumRadars INT 1 TimeTol FLOAT 3600. # Radar Name -RadarName STRARR 1 - DX50 +RadarName STRING DX50 # Names of the radar scans in the specified sector # NOTE: Must have a trailing '/' diff --git a/config/processing/paradiso_fvj_vol_prod.txt b/config/processing/paradiso_fvj_vol_prod.txt index 9b3f653..f101204 100755 --- a/config/processing/paradiso_fvj_vol_prod.txt +++ b/config/processing/paradiso_fvj_vol_prod.txt @@ -4,130 +4,178 @@ # List of datasets to generate. # The detailed specification of each dataset is given below. -dataSetList STRARR 7 - l0:echoID - l1:echoFilter - l2:Att_ZPhi - l3:hydroclass - l4:dBZ_avg - l4:PhiDP_avg - l4:flag_avg +dataSetList STRARR 1 + Nh -# ========================================================================================== -# echo identification -# ========================================================================================== -echoID STRUCT 3 - type STRING SAN - datatype STRARR 4 - dBZ - ZDR - uPhiDP - RhoHV - MAKE_GLOBAL INT 1 - - -# ========================================================================================== -# clutter and noise suppression -# ========================================================================================== -# echo type 3 : precip, 2 : clutter, 1 : noise -echoFilter STRUCT 4 - type STRING ECHO_FILTER - datatype STRARR 6 - PROC:echoID - dBZ - ZDR - RhoHV - KDP - PhiDP - echo_type INT 3 - MAKE_GLOBAL INT 1 - +# l0:echoID +# l1:echoFilter +# l2:Att_ZPhi +# l3:hydroclass +# l4:dBZ_avg +# l4:PhiDP_avg +# l4:flag_avg # ========================================================================================== -# Attenuation -# ========================================================================================== -Att_ZPhi STRUCT 4 - type STRING ATTENUATION - datatype STRARR 4 - PROC:dBZc - PROC:ZDRc - PROC:PhiDPc - COSMO:TEMP - MAKE_GLOBAL INT 1 - ATT_METHOD STRING ZPhi - - -# ========================================================================================== -# hydrometeor classification products -# ========================================================================================== -hydroclass STRUCT 6 - type STRING HYDROCLASS - datatype STRARR 5 - PROC:dBZc - PROC:ZDRc - PROC:RhoHVc - PROC:KDPc - COSMO:TEMP - HYDRO_METHOD STRING SEMISUPERVISED - RADARCENTROIDS STRING DX50 - MAKE_GLOBAL INT 1 - products STRUCT 1 - EL004_0 STRUCT 3 - type STRING PPI_IMAGE - voltype STRING hydro - anglenr INT 0 - - +# raw data # ========================================================================================== -# temporal average -# ========================================================================================== -dBZ_avg STRUCT 6 - type STRING TIME_AVG - datatype STRARR 1 - PROC:dBZc - start_average FLOAT 0. - period FLOAT 3600. - lin_trans INT 1 - products STRUCT 2 +#dBZ STRUCT 3 +# type STRING RAW +# datatype STRARR 1 +# dBZ +# products STRUCT 2 +# EL004_0 STRUCT 3 +# type STRING PPI_IMAGE +# anglenr INT 0 +# voltype STRING dBZ +# SAVEVOL_dBZ STRUCT 2 +# type STRING SAVEVOL +# voltype STRING dBZ +# +#TEMP STRUCT 3 +# type STRING RAW +# datatype STRARR 1 +# COSMO:TEMP +# products STRUCT 1 +# EL004_0 STRUCT 3 +# type STRING PPI_IMAGE +# anglenr INT 0 +# voltype STRING TEMP +# +#VIS STRUCT 3 +# type STRING RAW +# datatype STRARR 1 +# DEM:VIS +# products STRUCT 1 +# EL004_0 STRUCT 3 +# type STRING PPI_IMAGE +# anglenr INT 0 +# voltype STRING VIS +# +Nh STRUCT 3 + type STRING RAW + datatype STRARR 1 + Nh + products STRUCT 1 EL004_0 STRUCT 3 - type STRING PPI_IMAGE + type STRING PPI_IMAGE anglenr INT 0 - voltype STRING dBZc - SAVEVOL STRUCT 2 - type STRING SAVEVOL - voltype STRING dBZc + voltype STRING Nh -PhiDP_avg STRUCT 5 - type STRING WEIGHTED_TIME_AVG - datatype STRARR 2 - PROC:dBZc - PROC:PhiDPc - start_average FLOAT 0. - period FLOAT 3600. - products STRUCT 2 - EL004_0 STRUCT 3 - type STRING PPI_IMAGE - anglenr INT 0 - voltype STRING PhiDPc - SAVEVOL STRUCT 2 - type STRING SAVEVOL - voltype STRING PhiDPc - -flag_avg STRUCT 6 - type STRING FLAG_TIME_AVG - datatype STRARR 3 - PROC:PhiDPc - PROC:echoID - PROC:hydro - start_average FLOAT 0. - period FLOAT 3600. - phidpmax FLOAT 60. - products STRUCT 2 - EL004_0 STRUCT 3 - type STRING PPI_IMAGE - anglenr INT 0 - voltype STRING time_avg_flag - SAVEVOL STRUCT 2 - type STRING SAVEVOL - voltype STRING time_avg_flag +## ========================================================================================== +## echo identification +## ========================================================================================== +#echoID STRUCT 3 +# type STRING SAN +# datatype STRARR 4 +# dBZ +# ZDR +# uPhiDP +# RhoHV +# MAKE_GLOBAL INT 1 +# +# +## ========================================================================================== +## clutter and noise suppression +## ========================================================================================== +## echo type 3 : precip, 2 : clutter, 1 : noise +#echoFilter STRUCT 4 +# type STRING ECHO_FILTER +# datatype STRARR 6 +# PROC:echoID +# dBZ +# ZDR +# RhoHV +# KDP +# PhiDP +# echo_type INT 3 +# MAKE_GLOBAL INT 1 +# +# +## ========================================================================================== +## Attenuation +## ========================================================================================== +#Att_ZPhi STRUCT 4 +# type STRING ATTENUATION +# datatype STRARR 4 +# PROC:dBZc +# PROC:ZDRc +# PROC:PhiDPc +# COSMO:TEMP +# MAKE_GLOBAL INT 1 +# ATT_METHOD STRING ZPhi +# +# +## ========================================================================================== +## hydrometeor classification products +## ========================================================================================== +#hydroclass STRUCT 6 +# type STRING HYDROCLASS +# datatype STRARR 5 +# PROC:dBZc +# PROC:ZDRc +# PROC:RhoHVc +# PROC:KDPc +# COSMO:TEMP +# HYDRO_METHOD STRING SEMISUPERVISED +# RADARCENTROIDS STRING DX50 +# MAKE_GLOBAL INT 1 +# products STRUCT 1 +# EL004_0 STRUCT 3 +# type STRING PPI_IMAGE +# voltype STRING hydro +# anglenr INT 0 +# +# +## ========================================================================================== +## temporal average +## ========================================================================================== +#dBZ_avg STRUCT 6 +# type STRING TIME_AVG +# datatype STRARR 1 +# PROC:dBZc +# start_average FLOAT 0. +# period FLOAT 3600. +# lin_trans INT 1 +# products STRUCT 2 +# EL004_0 STRUCT 3 +# type STRING PPI_IMAGE +# anglenr INT 0 +# voltype STRING dBZc +# SAVEVOL STRUCT 2 +# type STRING SAVEVOL +# voltype STRING dBZc +# +#PhiDP_avg STRUCT 5 +# type STRING WEIGHTED_TIME_AVG +# datatype STRARR 2 +# PROC:dBZc +# PROC:PhiDPc +# start_average FLOAT 0. +# period FLOAT 3600. +# products STRUCT 2 +# EL004_0 STRUCT 3 +# type STRING PPI_IMAGE +# anglenr INT 0 +# voltype STRING PhiDPc +# SAVEVOL STRUCT 2 +# type STRING SAVEVOL +# voltype STRING PhiDPc +# +#flag_avg STRUCT 6 +# type STRING FLAG_TIME_AVG +# datatype STRARR 3 +# PROC:PhiDPc +# PROC:echoID +# PROC:hydro +# start_average FLOAT 0. +# period FLOAT 3600. +# phidpmax FLOAT 60. +# products STRUCT 2 +# EL004_0 STRUCT 3 +# type STRING PPI_IMAGE +# anglenr INT 0 +# voltype STRING time_avg_flag +# SAVEVOL STRUCT 2 +# type STRING SAVEVOL +# voltype STRING time_avg_flag \ No newline at end of file diff --git a/config/processing/rad4alp_PLD.txt b/config/processing/rad4alp_PLD.txt index 8201f12..220ceb4 100755 --- a/config/processing/rad4alp_PLD.txt +++ b/config/processing/rad4alp_PLD.txt @@ -10,6 +10,7 @@ name STRING rad4alp_PLD # Note: Must have a trailing '/' # ('///.') datapath STRING /data/rad4alp/rawdata/ +path_convention STRING MCH # Base path for cosmo data cosmopath STRING /data/cosmo/ @@ -17,6 +18,10 @@ cosmopath STRING /data/cosmo/ # Base path for dem data dempath STRING /data/rad4alp/visibRad4Alp/ +# Loading of saved datasets +loadbasepath STRING /data/pyrad_examples/ +loadname STRING rad4alp_PLD + # ------------------------------------------------------------ # Auxiliary data pathes diff --git a/config/processing/rad4alp_PLD_prod.txt b/config/processing/rad4alp_PLD_prod.txt index eb743ea..ec17eb0 100755 --- a/config/processing/rad4alp_PLD_prod.txt +++ b/config/processing/rad4alp_PLD_prod.txt @@ -4,203 +4,251 @@ # List of datasets to generate. # The detailed specification of each dataset is given below. -dataSetList STRARR 12 - l0:SNRh - l1:RhoHV - l2:echoID - l3:echoFilter - l4:SNRFilter - l5:PhiDPc_smooth2w - l6:KDPc - l7:Att_ZPhi - l8:hydroclass - l9:dBZ_avg - l9:PhiDP_avg - l9:flag_avg - - -# ========================================================================================== -# secondary moments products -# ========================================================================================== -SNRh STRUCT 4 - type STRING SNR - datatype STRARR 2 - RAD4ALP:dBZ - RAD4ALP:Nh - output_type STRING SNRh - MAKE_GLOBAL INT 1 - +dataSetList STRARR 1 + Nh -RhoHV STRUCT 3 - type STRING RHOHV_CORRECTION - datatype STRARR 5 - RAD4ALP:uRhoHV - RAD4ALP:ZDR - RAD4ALP:Nh - RAD4ALP:Nv - PROC:SNRh - MAKE_GLOBAL INT 1 - - -# ========================================================================================== -# echo identification -# ========================================================================================== -echoID STRUCT 3 - type STRING SAN - datatype STRARR 4 - RAD4ALP:dBZ - RAD4ALP:ZDR - RAD4ALP:uPhiDP - PROC:RhoHV - MAKE_GLOBAL INT 1 +# l0:SNRh +# l1:RhoHV +# l2:echoID +# l3:echoFilter +# l4:SNRFilter +# l5:PhiDPc_smooth2w +# l6:KDPc +# l7:Att_ZPhi +# l8:hydroclass +# l9:dBZ_avg +# l9:PhiDP_avg +# l9:flag_avg # ========================================================================================== -# clutter and noise suppression -# ========================================================================================== -# echo type 3 : precip, 2 : clutter, 1 : noise -echoFilter STRUCT 4 - type STRING ECHO_FILTER - datatype STRARR 5 - PROC:echoID - RAD4ALP:dBZ - RAD4ALP:ZDR - PROC:RhoHV - RAD4ALP:uPhiDP - echo_type INT 3 - MAKE_GLOBAL INT 1 - - +# raw data # ========================================================================================== -# filtration based on SNR -# ========================================================================================== -SNRFilter STRUCT 4 - type STRING SNR_FILTER - datatype STRARR 2 - PROC:SNRh - PROC:PhiDPc - SNRmin FLOAT 10. - MAKE_GLOBAL INT 1 - - -# ========================================================================================== -# PHIDP processing -# ========================================================================================== -PhiDPc_smooth2w STRUCT 11 - type STRING PHIDP_SMOOTH_2W - datatype STRARR 2 - PROC:PhiDPc - PROC:dBZc - rmin FLOAT 1000. - rmax FLOAT 50000. - rcell FLOAT 1000. - Zmin FLOAT 20. - Zmax FLOAT 40. - rwinds FLOAT 1000. - rwindl FLOAT 3000. - Zthr FLOAT 40. - MAKE_GLOBAL INT 1 - - -# ========================================================================================== -# KDP processing -# ========================================================================================== -KDPc STRUCT 6 - type STRING KDP_LEASTSQUARE_2W - datatype STRARR 2 - PROC:PhiDPc - PROC:dBZc - rwinds FLOAT 1000. - rwindl FLOAT 3000. - Zthr FLOAT 40. - MAKE_GLOBAL INT 1 - - -# ========================================================================================== -# Attenuation -# ========================================================================================== -Att_ZPhi STRUCT 4 - type STRING ATTENUATION - datatype STRARR 4 - PROC:dBZc - PROC:ZDRc - PROC:PhiDPc - RAD4ALPCOSMO:TEMP - MAKE_GLOBAL INT 1 - ATT_METHOD STRING ZPhi - - -# ========================================================================================== -# hydrometeor classification products -# ========================================================================================== -hydroclass STRUCT 6 - type STRING HYDROCLASS - datatype STRARR 5 - PROC:dBZc - PROC:ZDRc - PROC:RhoHVc - PROC:KDPc - RAD4ALPCOSMO:TEMP - HYDRO_METHOD STRING SEMISUPERVISED - RADARCENTROIDS STRING A - MAKE_GLOBAL INT 1 - products STRUCT 1 - EL001_0 STRUCT 3 - type STRING PPI_IMAGE - voltype STRING hydro - anglenr INT 2 - - -# ========================================================================================== -# temporal average -# ========================================================================================== -dBZ_avg STRUCT 6 - type STRING TIME_AVG - datatype STRARR 1 - PROC:dBZc - start_average FLOAT 0. - period FLOAT 3600. - lin_trans INT 1 - products STRUCT 2 +#dBZ STRUCT 3 +# type STRING RAW +# datatype STRARR 1 +# CFRADIAL:dBZ,dBZ,SAVEVOL_dBZ +# products STRUCT 1 +# EL001_0 STRUCT 3 +# type STRING PPI_IMAGE +# anglenr INT 2 +# voltype STRING dBZ +## SAVEVOL_dBZ STRUCT 2 +## type STRING SAVEVOL +## voltype STRING dBZ +# +#TEMP STRUCT 3 +# type STRING RAW +# datatype STRARR 1 +# RAD4ALPCOSMO:TEMP +# products STRUCT 1 +# EL001_0 STRUCT 3 +# type STRING PPI_IMAGE +# anglenr INT 2 +# voltype STRING TEMP +# +#VIS STRUCT 3 +# type STRING RAW +# datatype STRARR 1 +# RAD4ALPDEM:VIS +# products STRUCT 1 +# EL001_0 STRUCT 3 +# type STRING PPI_IMAGE +# anglenr INT 2 +# voltype STRING VIS +# +Nh STRUCT 3 + type STRING RAW + datatype STRARR 1 + RAD4ALP:Nh + products STRUCT 1 EL001_0 STRUCT 3 - type STRING PPI_IMAGE + type STRING PPI_IMAGE anglenr INT 2 - voltype STRING dBZc - SAVEVOL STRUCT 2 - type STRING SAVEVOL - voltype STRING dBZc - -PhiDP_avg STRUCT 5 - type STRING WEIGHTED_TIME_AVG - datatype STRARR 2 - PROC:dBZc - PROC:PhiDPc - start_average FLOAT 0. - period FLOAT 3600. - products STRUCT 2 - EL001_0 STRUCT 3 - type STRING PPI_IMAGE - anglenr INT 2 - voltype STRING PhiDPc - SAVEVOL STRUCT 2 - type STRING SAVEVOL - voltype STRING PhiDPc + voltype STRING Nh -flag_avg STRUCT 6 - type STRING FLAG_TIME_AVG - datatype STRARR 3 - PROC:PhiDPc - PROC:echoID - PROC:hydro - start_average FLOAT 0. - period FLOAT 3600. - phidpmax FLOAT 60. - products STRUCT 2 - EL001_0 STRUCT 3 - type STRING PPI_IMAGE - anglenr INT 2 - voltype STRING time_avg_flag - SAVEVOL STRUCT 2 - type STRING SAVEVOL - voltype STRING time_avg_flag +## ========================================================================================== +## secondary moments products +## ========================================================================================== +#SNRh STRUCT 4 +# type STRING SNR +# datatype STRARR 2 +# RAD4ALP:dBZ +# RAD4ALP:Nh +# output_type STRING SNRh +# MAKE_GLOBAL INT 1 +# +# +#RhoHV STRUCT 3 +# type STRING RHOHV_CORRECTION +# datatype STRARR 5 +# RAD4ALP:uRhoHV +# RAD4ALP:ZDR +# RAD4ALP:Nh +# RAD4ALP:Nv +# PROC:SNRh +# MAKE_GLOBAL INT 1 +# +# +## ========================================================================================== +## echo identification +## ========================================================================================== +#echoID STRUCT 3 +# type STRING SAN +# datatype STRARR 4 +# RAD4ALP:dBZ +# RAD4ALP:ZDR +# RAD4ALP:uPhiDP +# PROC:RhoHV +# MAKE_GLOBAL INT 1 +# +# +## ========================================================================================== +## clutter and noise suppression +## ========================================================================================== +## echo type 3 : precip, 2 : clutter, 1 : noise +#echoFilter STRUCT 4 +# type STRING ECHO_FILTER +# datatype STRARR 5 +# PROC:echoID +# RAD4ALP:dBZ +# RAD4ALP:ZDR +# PROC:RhoHV +# RAD4ALP:uPhiDP +# echo_type INT 3 +# MAKE_GLOBAL INT 1 +# +# +## ========================================================================================== +## filtration based on SNR +## ========================================================================================== +#SNRFilter STRUCT 4 +# type STRING SNR_FILTER +# datatype STRARR 2 +# PROC:SNRh +# PROC:PhiDPc +# SNRmin FLOAT 10. +# MAKE_GLOBAL INT 1 +# +# +## ========================================================================================== +## PHIDP processing +## ========================================================================================== +#PhiDPc_smooth2w STRUCT 11 +# type STRING PHIDP_SMOOTH_2W +# datatype STRARR 2 +# PROC:PhiDPc +# PROC:dBZc +# rmin FLOAT 1000. +# rmax FLOAT 50000. +# rcell FLOAT 1000. +# Zmin FLOAT 20. +# Zmax FLOAT 40. +# rwinds FLOAT 1000. +# rwindl FLOAT 3000. +# Zthr FLOAT 40. +# MAKE_GLOBAL INT 1 +# +# +## ========================================================================================== +## KDP processing +## ========================================================================================== +#KDPc STRUCT 6 +# type STRING KDP_LEASTSQUARE_2W +# datatype STRARR 2 +# PROC:PhiDPc +# PROC:dBZc +# rwinds FLOAT 1000. +# rwindl FLOAT 3000. +# Zthr FLOAT 40. +# MAKE_GLOBAL INT 1 +# +# +## ========================================================================================== +## Attenuation +## ========================================================================================== +#Att_ZPhi STRUCT 4 +# type STRING ATTENUATION +# datatype STRARR 4 +# PROC:dBZc +# PROC:ZDRc +# PROC:PhiDPc +# RAD4ALPCOSMO:TEMP +# MAKE_GLOBAL INT 1 +# ATT_METHOD STRING ZPhi +# +# +## ========================================================================================== +## hydrometeor classification products +## ========================================================================================== +#hydroclass STRUCT 6 +# type STRING HYDROCLASS +# datatype STRARR 5 +# PROC:dBZc +# PROC:ZDRc +# PROC:RhoHVc +# PROC:KDPc +# RAD4ALPCOSMO:TEMP +# HYDRO_METHOD STRING SEMISUPERVISED +# RADARCENTROIDS STRING A +# MAKE_GLOBAL INT 1 +# products STRUCT 1 +# EL001_0 STRUCT 3 +# type STRING PPI_IMAGE +# voltype STRING hydro +# anglenr INT 2 +# +# +## ========================================================================================== +## temporal average +## ========================================================================================== +#dBZ_avg STRUCT 6 +# type STRING TIME_AVG +# datatype STRARR 1 +# PROC:dBZc +# start_average FLOAT 0. +# period FLOAT 3600. +# lin_trans INT 1 +# products STRUCT 2 +# EL001_0 STRUCT 3 +# type STRING PPI_IMAGE +# anglenr INT 2 +# voltype STRING dBZc +# SAVEVOL STRUCT 2 +# type STRING SAVEVOL +# voltype STRING dBZc +# +#PhiDP_avg STRUCT 5 +# type STRING WEIGHTED_TIME_AVG +# datatype STRARR 2 +# PROC:dBZc +# PROC:PhiDPc +# start_average FLOAT 0. +# period FLOAT 3600. +# products STRUCT 2 +# EL001_0 STRUCT 3 +# type STRING PPI_IMAGE +# anglenr INT 2 +# voltype STRING PhiDPc +# SAVEVOL STRUCT 2 +# type STRING SAVEVOL +# voltype STRING PhiDPc +# +#flag_avg STRUCT 6 +# type STRING FLAG_TIME_AVG +# datatype STRARR 3 +# PROC:PhiDPc +# PROC:echoID +# PROC:hydro +# start_average FLOAT 0. +# period FLOAT 3600. +# phidpmax FLOAT 60. +# products STRUCT 2 +# EL001_0 STRUCT 3 +# type STRING PPI_IMAGE +# anglenr INT 2 +# voltype STRING time_avg_flag +# SAVEVOL STRUCT 2 +# type STRING SAVEVOL +# voltype STRING time_avg_flag \ No newline at end of file diff --git a/doc/pyrad_user_manual.docx b/doc/pyrad_user_manual.docx index d67bbe1..0c51dff 100755 Binary files a/doc/pyrad_user_manual.docx and b/doc/pyrad_user_manual.docx differ diff --git a/doc/pyrad_user_manual.pdf b/doc/pyrad_user_manual.pdf index ae88cca..ed14ab5 100755 Binary files a/doc/pyrad_user_manual.pdf and b/doc/pyrad_user_manual.pdf differ diff --git a/src/pyrad_proc/pyrad/flow/flow_control.py b/src/pyrad_proc/pyrad/flow/flow_control.py index d2509ed..e0c7720 100755 --- a/src/pyrad_proc/pyrad/flow/flow_control.py +++ b/src/pyrad_proc/pyrad/flow/flow_control.py @@ -31,6 +31,7 @@ import atexit import inspect import gc +import numpy as np from ..io.config import read_config from ..io.read_data_radar import get_data @@ -160,13 +161,24 @@ def main(cfgfile, starttime, endtime, infostr="", trajfile=""): master_voltime+timedelta(seconds=cfg['TimeTol']), datacfg, scan_list=cfg['ScanList']) - if len(filelist_ref) == 0: + nfiles_ref = len(filelist_ref) + if nfiles_ref == 0: warn("ERROR: Could not find any valid volume for reference " + "time " + master_voltime.strftime('%Y-%m-%d %H:%M:%S') + ' and radar RADAR'+'{:03d}'.format(i+1)) radar_list.append(None) + elif nfiles_ref == 1: + voltime_ref = get_datetime( + filelist_ref[ind], datatypedescr_ref) + radar_list.append( + get_data(voltime_ref, datatypesdescr_list[i], datacfg)) else: - voltime_ref = get_datetime(filelist_ref[0], datatypedescr_ref) + voltime_ref_list = [] + for j in range(nfiles_ref): + voltime_ref_list.append(get_datetime( + filelist_ref[j], datatypedescr_ref)) + voltime_ref = min( + voltime_ref_list, key=lambda x: abs(x-master_voltime)) radar_list.append( get_data(voltime_ref, datatypesdescr_list[i], datacfg)) @@ -325,6 +337,8 @@ def _create_cfg_dict(cfgfile): cfg.update({'ScanList': get_scan_list(cfg['ScanList'])}) if 'datapath' not in cfg: cfg.update({'datapath': None}) + if 'path_convention' not in cfg: + cfg.update({'path_convention': 'MCH'}) if 'cosmopath' not in cfg: cfg.update({'cosmopath': None}) if 'psrpath' not in cfg: diff --git a/src/pyrad_proc/pyrad/io/io_aux.py b/src/pyrad_proc/pyrad/io/io_aux.py index 88ec27f..2d96b74 100755 --- a/src/pyrad_proc/pyrad/io/io_aux.py +++ b/src/pyrad_proc/pyrad/io/io_aux.py @@ -425,7 +425,10 @@ def get_file_list(datadescriptor, starttime, endtime, cfg, scan=None): radar object """ - ndays = int(np.ceil(((endtime-starttime).total_seconds())/(3600.*24.))) + startdate = starttime.replace(hour=0, minute=0, second=0, microsecond=0) + enddate = endtime.replace(hour=0, minute=0, second=0, microsecond=0) + ndays = int((enddate-startdate).days)+1 + radarnr, datagroup, datatype, dataset, product = get_datatype_fields( datadescriptor) ind_rad = int(radarnr[5:8])-1 @@ -457,13 +460,13 @@ def get_file_list(datadescriptor, starttime, endtime, cfg, scan=None): basename = ('P'+cfg['RadarRes'][ind_rad] + cfg['RadarName'][ind_rad]+dayinfo) if cfg['path_convention'] == 'LTE': - yy = dayinfo[0:2] + yy = dayinfo[0:2] dy = dayinfo[2:] subf = ('P' + cfg['RadarRes'][ind_rad] + cfg['RadarName'][ind_rad] + yy + 'hdf' + dy) datapath = cfg['datapath'][ind_rad] + subf + '/' else: - datapath = cfg['datapath'][ind_rad] + dayinfo + '/' + basename + '/' + datapath = cfg['datapath'][ind_rad]+dayinfo+'/'+basename+'/' if (not os.path.isdir(datapath)): warn("WARNING: Unknown datapath '%s'" % datapath) continue @@ -491,12 +494,22 @@ def get_file_list(datadescriptor, starttime, endtime, cfg, scan=None): sub1 = str(starttime.year) sub2 = starttime.strftime('%m') sub3 = starttime.strftime('%d') - datapath = cfg['datapath'][ind_rad] + '/' + sub1 + '/' + sub2 + '/' + sub3 + '/' - basename = 'MXPol-polar-' + starttime.strftime('%Y%m%d') + '-*-' + scan + '*' + datapath = (cfg['datapath'][ind_rad]+'/'+sub1+'/'+sub2+'/' + + sub3+'/') + basename = ('MXPol-polar-'+starttime.strftime('%Y%m%d')+'-*-' + + scan+'*') dayfilelist = glob.glob(datapath+basename) else: - warn("insert MCH convention here") - return None + daydir = ( + starttime+datetime.timedelta(days=i)).strftime('%Y-%m-%d') + dayinfo = ( + starttime+datetime.timedelta(days=i)).strftime('%Y%m%d') + datapath = cfg['datapath'][ind_rad]+scan+'/'+daydir+'/' + if (not os.path.isdir(datapath)): + warn("WARNING: Unknown datapath '%s'" % datapath) + continue + dayfilelist = glob.glob( + datapath+'MXPol-polar-'+dayinfo+'-*-'+scan+'.nc') for filename in dayfilelist: t_filelist.append(filename) filelist = [] @@ -594,7 +607,7 @@ def get_datatype_fields(datadescriptor): elif datagroup == 'MXPOL': datatype = descrfields[2] dataset = None - product = None + product = None else: datatype = descrfields[2] dataset = None @@ -676,7 +689,7 @@ def get_datetime(fname, datadescriptor): datetimestr = bfile[3:12] fdatetime = datetime.datetime.strptime(datetimestr, '%y%j%H%M') elif datagroup == 'MXPOL': - datetimestr = re.findall(r"([0-9]{8}-[0-9]{6})",bfile)[0] + datetimestr = re.findall(r"([0-9]{8}-[0-9]{6})", bfile)[0] fdatetime = datetime.datetime.strptime(datetimestr, '%Y%m%d-%H%M%S') else: warn('unknown data group') diff --git a/src/pyrad_proc/pyrad/io/mxpol_config.py b/src/pyrad_proc/pyrad/io/mxpol_config.py index b5bb765..98cf36e 100644 --- a/src/pyrad_proc/pyrad/io/mxpol_config.py +++ b/src/pyrad_proc/pyrad/io/mxpol_config.py @@ -4,69 +4,71 @@ @author: fvanden -Configuration file for mxpol pyart.core.Radar class. Some information may be +Configuration file for mxpol pyart.core.Radar class. Some information may be redundant because this file is a copy from the ProfileLab toolkit. -Functions to retrieve data from this file may be found in pyrad.io.read_data_mxpol -under the utilities section +Functions to retrieve data from this file may be found in + pyrad.io.read_data_mxpol under the utilities section """ -# radar information +# radar information + +MCH_elev = [-0.2, 0.4, 1., 1.6, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5, 11., + 13., 16., 20., 25., 30., 35., 40.] +NYQUIST_VEL = [8.3, 9.6, 8.3, 12.4, 11.0, 12.4, 13.8, 12.4, 13.8, 16.5, 16.5, + 16.5, 20.6, 20.6, 20.6, 20.6, 20.6, 20.6, 20.6, 20.6] -MCH_elev=[-0.2,0.4,1,1.6,2.5,3.5,4.5,5.5,6.5,7.5,8.5,9.5,11,13,16,20,25,30,35,40] -NYQUIST_VEL=[8.3, 9.6,8.3,12.4,11.0,12.4,13.8,12.4,13.8,16.5,16.5,16.5,20.6,20.6,20.6,20.6,20.6,20.6,20.6,20.6] - RADAR_INFO = { - 'coordinates' : { - 'ALB' : [47.284,8.512], - 'DOL' : [46.425,6.099], - 'PPM' : [46.371,7.487], - 'MLE' : [46.041,8.833], - 'DX50' : [46.8425,6.9184], - 'MXPOL' : [46.8133,6.9428] + 'coordinates': { + 'ALB': [47.284, 8.512], + 'DOL': [46.425, 6.099], + 'PPM': [46.371, 7.487], + 'MLE': [46.041, 8.833], + 'DX50': [46.8425, 6.9184], + 'MXPOL': [46.8133, 6.9428] }, 'altitude': { - 'ALB' : 938, - 'DOL' : 1682, - 'PPM' : 2937, - 'MLE' : 1626, + 'ALB': 938, + 'DOL': 1682, + 'PPM': 2937, + 'MLE': 1626, 'DX50': 451, 'MXPOL': 489 }, - 'searchkey' : { - 'ALB' : 'PHA*hdf*', - 'DOL' : 'PHD*hdf*', - 'PPM' : 'PHP*hdf*', - 'MLE' : 'PHL*hdf*', - 'DX50' : None, - 'MXPOL' : None + 'searchkey': { + 'ALB': 'PHA*hdf*', + 'DOL': 'PHD*hdf*', + 'PPM': 'PHP*hdf*', + 'MLE': 'PHL*hdf*', + 'DX50': None, + 'MXPOL': None }, - 'radarID' : { - 'ALB' : 'ALB', - 'A':'ALB', - 'DOL' : 'DOL', - 'D':'DOL', - 'PPM' : 'PPM', - 'P':'PPM', - 'MLE' : 'MLE', - 'M':'MLE', - 'DX50' : 'DX50', - 'MXPOL' : 'MXPOL' + 'radarID': { + 'ALB': 'ALB', + 'A': 'ALB', + 'DOL': 'DOL', + 'D': 'DOL', + 'PPM': 'PPM', + 'P': 'PPM', + 'MLE': 'MLE', + 'M': 'MLE', + 'DX50': 'DX50', + 'MXPOL': 'MXPOL' }, - 'dbbeam' : { - 'ALB' : 1., - 'DOL' : 1., - 'PPM' : 1., - 'MLE' : 1., - 'MXPOL' : 1.4, - 'DX50' : 1.27 + 'dbbeam': { + 'ALB': 1., + 'DOL': 1., + 'PPM': 1., + 'MLE': 1., + 'MXPOL': 1.4, + 'DX50': 1.27 }, 'elevations': { 'ALB': MCH_elev, 'DOL': MCH_elev, 'PPM': MCH_elev, - 'MLE' : MCH_elev, + 'MLE': MCH_elev, 'DX50': None, 'MXPOL': None } @@ -74,42 +76,48 @@ MY_METADATA = { - 'nyq_vel' : NYQUIST_VEL, + 'nyq_vel': NYQUIST_VEL, # Metadata for instrument tables - 'Radar_info' : { - 'searchkey' : None, - 'coordinates' : None, - 'altitude' : None, - 'dbbeam' : None, - 'radarID' : None}, - - 'Polvar' : { - 'units' : None, - 'standard_name' : None, - 'short_name' : None, - 'long_name' : None, - 'valid_min': None, + 'Radar_info': { + 'searchkey': None, + 'coordinates': None, + 'altitude': None, + 'dbbeam': None, + 'radarID': None}, + + 'Polvar': { + 'units': None, + 'standard_name': None, + 'short_name': None, + 'long_name': None, + 'valid_min': None, 'valid_max': None, - 'plot_interval' : None}, + 'plot_interval': None}, } - -MY_POLARNAMES = { - - # Metadata for polarimetric short and long names - 'Zh' : ['reflectivity','reflectivity','dBZ', 0., 55.,1.], - 'Zdr' : ['differential_reflectivity','Differential reflectivity', 'dB', -1., 5.,0.1], - 'Kdp' : ['specific_differential_phase','Specific differential phase','deg/km',-2., 7., 0.1], - 'Phidp' : ['uncorrected_differential_phase','Differential phase', 'deg',0., 150.,1.], - 'Rhohv' : ['uncorrected_cross_correlation_ratio','Copolar correlation coefficient', '-',0.57, 1., 0.05], - 'ZhCorr' : ['corrected_unfiltered_reflectivity','Attenuation corrected reflectivity', 'dBZ', 0., 55.,1.], - 'ZdrCorr' : ['corrected_differential_reflectivity','Attenuation corrected differential reflectivity','dB', 0., 3., 0.1], - 'RVel' : ['velocity','Mean doppler velocity','m/s', -15., 15.,0.5], - 'Sw' : ['spectrum_width','Spectral Width','m2/s2', 0., 3., 0.1], - 'Zv' : ['reflectivity_vv', 'Vertical reflectivity','dBZ', 0., 45., 1.], - 'Clut' : ['clutter', 'Output clutter algorithm','-',0.,100.,10.], - 'corrected_Z' : ['corrected_reflectivity', 'Clutter filtered reflectivity', 'dBZ', 0., 55., 1.] +# Metadata for polarimetric short and long names +MY_POLARNAMES = { + 'Zh': ['reflectivity', 'reflectivity', 'dBZ', 0., 55., 1.], + 'Zdr': ['differential_reflectivity', 'Differential reflectivity', 'dB', + -1., 5., 0.1], + 'Kdp': ['specific_differential_phase', 'Specific differential phase', + 'deg/km', -2., 7., 0.1], + 'Phidp': ['uncorrected_differential_phase', 'Differential phase', 'deg', + 0., 150., 1.], + 'Rhohv': ['uncorrected_cross_correlation_ratio', + 'Copolar correlation coefficient', '-', 0.57, 1., 0.05], + 'ZhCorr': ['corrected_unfiltered_reflectivity', + 'Attenuation corrected reflectivity', 'dBZ', 0., 55., 1.], + 'ZdrCorr': ['corrected_differential_reflectivity', + 'Attenuation corrected differential reflectivity', 'dB', 0., + 3., 0.1], + 'RVel': ['velocity', 'Mean doppler velocity', 'm/s', -15., 15., 0.5], + 'Sw': ['spectrum_width', 'Spectral Width', 'm2/s2', 0., 3., 0.1], + 'Zv': ['reflectivity_vv', 'Vertical reflectivity', 'dBZ', 0., 45., 1.], + 'Clut': ['clutter', 'Output clutter algorithm', '-', 0., 100., 10.], + 'corrected_Z': ['corrected_reflectivity', 'Clutter filtered reflectivity', + 'dBZ', 0., 55., 1.] - } \ No newline at end of file + } diff --git a/src/pyrad_proc/pyrad/io/read_data_mxpol.py b/src/pyrad_proc/pyrad/io/read_data_mxpol.py index 6a1baef..5b6733b 100644 --- a/src/pyrad_proc/pyrad/io/read_data_mxpol.py +++ b/src/pyrad_proc/pyrad/io/read_data_mxpol.py @@ -6,7 +6,7 @@ .. autosummary:: :toctree: generated/ - + classes - MXPOL: pyrad_MXPOL classes - MCH: @@ -24,7 +24,7 @@ generate_radar_table generate_polvar_metadata convert_polvar_name - + """ import pyart @@ -39,69 +39,64 @@ from copy import deepcopy import warnings -########################### classes - MXPOL ################################## +# -------------------------- classes - MXPOL ------------------------------ # class pyrad_MXPOL(pyart.core.Radar): - def __init__(self,filename, field_names = None, max_range=np.Inf,min_range=10000): - - + def __init__(self, filename, field_names=None, max_range=np.Inf, + min_range=10000): + # find information based on filename - all_files = [filename] - - fname_basename=os.path.basename(filename) - + fname_basename = os.path.basename(filename) + if 'PPI' in fname_basename: - scan_type='ppi' + scan_type = 'ppi' elif 'RHI' in fname_basename: - scan_type='rhi' - + scan_type = 'rhi' + strdate = re.findall(r"([0-9]{8}-[0-9]{6})", fname_basename)[0] date = datetime.datetime.strptime(strdate, '%Y%m%d-%H%M%S') - + # if field name is None, take all available fields - + if field_names is None: - field_names = ['Zh','Zdr','Kdp','Phidp','Rhohv','ZhCorr','ZdrCorr','RVel','Sw','SNRh','SNRv','Psidp'] - + field_names = ['Zh', 'Zdr', 'Kdp', 'Phidp', 'Rhohv', 'ZhCorr', + 'ZdrCorr', 'RVel', 'Sw', 'SNRh', 'SNRv', 'Psidp'] + # convert fieldname if necessary - varnames = [] for fieldname in field_names: newname = convert_polvar_name('LTE', fieldname) varnames.append(newname) - - + # get labels, units etc - long_names = [] standard_names = [] units = [] vmin = [] vmax = [] - + for varname in varnames: metadata = generate_polvar_metadata(varname) - standard_names.append( metadata['standard_name'] ) - long_names.append( metadata['long_name'] ) - units.append( metadata['units'] ) - vmin.append( metadata['valid_min'] ) - vmax.append( metadata['valid_max'] ) - + standard_names.append(metadata['standard_name']) + long_names.append(metadata['long_name']) + units.append(metadata['units']) + vmin.append(metadata['valid_min']) + vmax.append(metadata['valid_max']) + # initiate empty vectors - N_sweeps = len(all_files) fields = {} fixed_angle = {} fixed_angle['data'] = np.zeros(N_sweeps, ) - + sweep_start_ray_index = {} sweep_start_ray_index['data'] = [] sweep_stop_ray_index = {} sweep_stop_ray_index['data'] = [] - - for i,k in enumerate(varnames): + + for i, k in enumerate(varnames): fields[k] = {} fields[k]['data'] = [] fields[k]['long_name'] = long_names[i] @@ -109,145 +104,152 @@ def __init__(self,filename, field_names = None, max_range=np.Inf,min_range=10000 fields[k]['units'] = units[i] fields[k]['valid_min'] = vmin[i] fields[k]['valid_max'] = vmax[i] - + idx_start = 0 idx_stop = 0 elevations = [] azimuths = [] nyquist = [] - - # read data and create dictionaries + # read data and create dictionaries for i in range(N_sweeps): - metadata, data = readMXPOLRadData(all_files[i], varnames, max_range) + metadata, data = readMXPOLRadData( + all_files[i], varnames, max_range) if scan_type == 'rhi': - fixed_angle['data'][i] = np.round( np.mean(data['azimuth']) ) + fixed_angle['data'][i] = np.round(np.mean(data['azimuth'])) elif scan_type == 'ppi': - fixed_angle['data'][i] = np.round( np.mean(data['elevation']) ) - - [N_az,N_ranges] = data[varnames[0]].shape + fixed_angle['data'][i] = np.round(np.mean(data['elevation'])) + + [N_az, N_ranges] = data[varnames[0]].shape idx_stop = idx_start + N_az - 1 sweep_start_ray_index['data'].append(idx_start) sweep_stop_ray_index['data'].append(idx_stop) - idx_start=idx_stop + 1 - elevations.extend( list(data['elevation']) ) - nyquist.extend( [data['nyquist_vel']]*N_az ) - azimuths.extend( list(data['azimuth']) ) - - for j,v in enumerate(varnames): + idx_start = idx_stop + 1 + elevations.extend(list(data['elevation'])) + nyquist.extend([data['nyquist_vel']]*N_az) + azimuths.extend(list(data['azimuth'])) + + for j, v in enumerate(varnames): if v in data.keys(): if not len(fields[v]['data']): fields[v]['data'] = data[v] else: - fields[v]['data'] = row_stack( fields[v]['data'],data[v] ) + fields[v]['data'] = row_stack( + fields[v]['data'], data[v]) else: print('Variable '+v+' was not found in file!') - + # mask NaNs - + for v in varnames: - fields[v]['data'] = np.ma.masked_equal(fields[v]['data'], -99900.0) - - [a,N_ranges] = fields[varnames[0]]['data'].shape - - # create dictionaries according to pyART standard - - latitude = {'data' : np.array([data['latitude']]), 'units' : data['lat_units']} - longitude = {'data' :np.array([data['longitude']]), 'units' : data['lon_units']} - altitude = {'data' : np.array([data['altitude']]), 'units' : data['alt_units']} - sweep_number = {'data' : np.arange(0,len(all_files))} - sweep_mode = {'data' : [scan_type]*N_sweeps} - instrument_parameters = {'nyquist_velocity': {'data':np.array(nyquist)}} - azimuth = {'data' : np.array(azimuths), 'units' : data['azim_units']} - rrange = {'data':np.arange(N_ranges)*data['resolution'], 'units' : data['range_units']} - elevation = {'data' :np.array(elevations), 'units' : data['elev_units']} - + fields[v]['data'] = np.ma.masked_equal( + fields[v]['data'], -99900.0) + + [a, N_ranges] = fields[varnames[0]]['data'].shape + + # create dictionaries according to pyART standard + latitude = {'data': np.asarray([data['latitude']]), + 'units': data['lat_units']} + longitude = {'data': np.asarray([data['longitude']]), + 'units': data['lon_units']} + altitude = {'data': np.asarray([data['altitude']]), + 'units': data['alt_units']} + sweep_number = {'data': np.arange(0, len(all_files))} + sweep_mode = {'data': np.asarray([scan_type]*N_sweeps)} + instrument_parameters = { + 'nyquist_velocity': {'data': np.asarray(nyquist)}} + azimuth = {'data': np.asarray(azimuths), 'units': data['azim_units']} + rrange = {'data': np.arange(N_ranges)*data['resolution'], + 'units': data['range_units']} + elevation = {'data': np.asarray(elevations), + 'units': data['elev_units']} + sweep_start_ray_index['data'] = np.asarray( + sweep_start_ray_index['data']) + sweep_stop_ray_index['data'] = np.asarray( + sweep_stop_ray_index['data']) + time_units = 'seconds since ' + str(date) - time = {'data' : data['time'],'units': time_units} - + time = {'data': data['time'], 'units': time_units} + # change keys to match pyART metranet keys fields_copy = deepcopy(fields) for keys in fields_copy: newkey = fields[keys]['standard_name'] fields[newkey] = fields.pop(keys) - + # Create PyART instance - pyart.core.Radar.__init__(self,time,rrange,fields,metadata,scan_type,latitude,longitude,altitude,sweep_number,sweep_mode,fixed_angle,\ - sweep_start_ray_index,sweep_stop_ray_index,azimuth,elevation,instrument_parameters=instrument_parameters) - - -############################ classes - MCH ################################### - + pyart.core.Radar.__init__( + self, time, rrange, fields, metadata, scan_type, latitude, + longitude, altitude, sweep_number, sweep_mode, fixed_angle, + sweep_start_ray_index, sweep_stop_ray_index, azimuth, elevation, + instrument_parameters=instrument_parameters) + + +# -------------------------- classes - MCH --------------------------- # + class pyrad_MCH(pyart.core.Radar): - def __init__(self,filename, field_names = None, max_range=np.Inf): - - + def __init__(self, filename, field_names=None, max_range=np.Inf): + # find information based on filename - all_files = [filename] N_sweeps = len(all_files) - + fname_basename = os.path.basename(filename) - + # Get name of radar index_letter = fname_basename[2] - + radar_info = generate_radar_table(index_letter) radar_name = radar_info['radarID'] - - + # Get radar resolution - if fname_basename[1] == 'L': rres = 500. else: rres = 83.3 - + scan_type = 'ppi' - - scandate = datetime.datetime.strptime(fname_basename[3:12], '%y%j%H%M') + + scandate = datetime.datetime.strptime( + fname_basename[3:12], '%y%j%H%M') self.scan_date = scandate.timetuple() - + # if field name is None, take all available fields - if field_names is None: - field_names = ['Z','ZDR','ZV','V','W','RHO','CLUT','PHIDP'] - + field_names = ['Z', 'ZDR', 'ZV', 'V', 'W', 'RHO', 'CLUT', 'PHIDP'] + # convert fieldname if necessary - varnames = [] for fieldname in field_names: newname = convert_polvar_name('MCH', fieldname) varnames.append(newname) - + # get labels, units etc - long_names = [] standard_names = [] units = [] vmin = [] vmax = [] - + for varname in varnames: metadata = generate_polvar_metadata(varname) - standard_names.append( metadata['standard_name'] ) - long_names.append( metadata['long_name'] ) - units.append( metadata['units'] ) - vmin.append( metadata['valid_min'] ) - vmax.append( metadata['valid_max'] ) - + standard_names.append(metadata['standard_name']) + long_names.append(metadata['long_name']) + units.append(metadata['units']) + vmin.append(metadata['valid_min']) + vmax.append(metadata['valid_max']) + # initiate empty vectors - fields = {} fixed_angle = {} fixed_angle['data'] = np.zeros(N_sweeps, ) - + sweep_start_ray_index = {} sweep_start_ray_index['data'] = [] sweep_stop_ray_index = {} sweep_stop_ray_index['data'] = [] - - for i,k in enumerate(varnames): + + for i, k in enumerate(varnames): fields[k] = {} fields[k]['data'] = [] fields[k]['long_name'] = long_names[i] @@ -255,7 +257,7 @@ def __init__(self,filename, field_names = None, max_range=np.Inf): fields[k]['units'] = units[i] fields[k]['valid_min'] = vmin[i] fields[k]['valid_max'] = vmax[i] - + # Initialize idx_start = 0 idx_stop = 0 @@ -263,250 +265,273 @@ def __init__(self,filename, field_names = None, max_range=np.Inf): azimuths = [] nyquist = [] time_lapse = [] - + # read and organise data - for i in range(N_sweeps): - data = readCHRadData(all_files[i],radar_name, varnames,rres,max_range) + data = readCHRadData( + all_files[i], radar_name, varnames, rres, max_range) fixed_angle['data'][i] = data['elevation'] - [N_ranges,N_az] = data[varnames[0]].shape + [N_ranges, N_az] = data[varnames[0]].shape idx_stop = idx_start + N_az - 1 sweep_start_ray_index['data'].append(idx_start) sweep_stop_ray_index['data'].append(idx_stop) idx_start = idx_stop + 1 - elevations.extend( [data['elevation']]*N_az ) - nyquist.extend( [data['nyquist_vel']]*N_az ) - azimuths.extend( list(data['azimuth']) ) + elevations.extend([data['elevation']]*N_az) + nyquist.extend([data['nyquist_vel']]*N_az) + azimuths.extend(list(data['azimuth'])) # create list of times at the center of each ray - sweep_rank = int(re.findall(r"\.([0-9]{3})\.",all_files[i])[0]) + sweep_rank = int(re.findall(r"\.([0-9]{3})\.", all_files[i])[0]) starttime, endtime = findTimes(sweep_rank) - interval = ( (endtime-starttime)/len(list(data['azimuth']))) - time_lapse.extend( np.arange(starttime+(0.5*interval), endtime,interval) ) - for j,v in enumerate(varnames): + interval = ((endtime-starttime)/len(list(data['azimuth']))) + time_lapse.extend(np.arange( + starttime+(0.5*interval), endtime, interval)) + for j, v in enumerate(varnames): if not len(fields[v]['data']): - fields[v]['data'] = data[v].T + fields[v]['data'] = data[v].T else: - fields[v]['data'] = row_stack(fields[v]['data'],data[v].T) - - # mask nans + fields[v]['data'] = row_stack( + fields[v]['data'], data[v].T) + + # mask nans for v in varnames: - fields[v]['data'] = np.ma.array(fields[v]['data'],mask=np.isnan(fields[v]['data'])) - - sweep_start_ray_index['data'] = np.asarray(sweep_start_ray_index['data']) - sweep_stop_ray_index['data'] = np.asarray(sweep_stop_ray_index['data']) + fields[v]['data'] = np.ma.array( + fields[v]['data'], mask=np.isnan(fields[v]['data'])) + + sweep_start_ray_index['data'] = np.asarray( + sweep_start_ray_index['data']) + sweep_stop_ray_index['data'] = np.asarray( + sweep_stop_ray_index['data']) metadata = {} - - [a,N_ranges] = fields[varnames[0]]['data'].shape - - latitude = {'data' : np.array([radar_info['coordinates'][0]]), 'units' : "DegreesNorth" } - longitude = {'data' : np.array([radar_info['coordinates'][1]]), 'units' : "DegreesEast"} - altitude = {'data' : np.array([radar_info['altitude']]), 'units' : "MetersAboveSeaLevel"} - sweep_number = {'data' : np.arange(0,len(all_files))} - sweep_mode = {'data' : np.asarray(['ppi']*N_sweeps)} - instrument_parameters = {'nyquist_velocity': {'data':np.array(nyquist)}} - - metadata['Source'] = "Operational radar data processed at MeteoSwiss Locarno-Monti" - metadata['Institution'] = "MeteoSwiss, MDR, Locarno-Monti, Switzerland" - metadata['History'] = ["created: %s, " % time.ctime(os.path.getctime(filename)) + "last modified: %s" % time.ctime(os.path.getmtime(filename))] + + [a, N_ranges] = fields[varnames[0]]['data'].shape + + latitude = {'data': np.array([radar_info['coordinates'][0]]), + 'units': "DegreesNorth"} + longitude = {'data': np.array([radar_info['coordinates'][1]]), + 'units': "DegreesEast"} + altitude = {'data': np.array([radar_info['altitude']]), + 'units': "MetersAboveSeaLevel"} + sweep_number = {'data': np.arange(0, len(all_files))} + sweep_mode = {'data': np.asarray(['ppi']*N_sweeps)} + instrument_parameters = { + 'nyquist_velocity': {'data': np.array(nyquist)}} + + metadata['Source'] = ( + "Operational radar data processed at MeteoSwiss Locarno-Monti") + metadata['Institution'] = ( + "MeteoSwiss, MDR, Locarno-Monti, Switzerland") + metadata['History'] = [ + "created: %s, " % time.ctime(os.path.getctime(filename)) + + "last modified: %s" % time.ctime(os.path.getmtime(filename))] metadata['ContactInformation'] = "marc.schneebeli@meteosvizzera.ch" - - azimuth = {'data' : np.array(azimuths), 'units' : "Degrees"} - rrange = {'data':np.arange(N_ranges)*data['resolution'], 'units' : "Meters"} - elevation = {'data' :np.array(elevations), 'units' : "Degrees"} - + + azimuth = {'data': np.array(azimuths), 'units': "Degrees"} + rrange = {'data': np.arange(N_ranges)*data['resolution'], + 'units': "Meters"} + elevation = {'data': np.array(elevations), 'units': "Degrees"} + time_units = 'seconds since '+str(scandate) time_lapse = np.asarray(time_lapse) - scantime = {'data' : time_lapse,'units': time_units} - + scantime = {'data': time_lapse, 'units': time_units} + # change keys to match pyART metranet keys fields_copy = deepcopy(fields) for keys in fields_copy: newkey = fields[keys]['standard_name'] fields[newkey] = fields.pop(keys) - + # Create PyART instance - pyart.core.Radar.__init__(self,scantime,rrange,fields,metadata,scan_type,latitude,longitude,altitude,sweep_number,sweep_mode,fixed_angle,\ - sweep_start_ray_index,sweep_stop_ray_index,azimuth, elevation,instrument_parameters=instrument_parameters) - - - -########################## utilities - read ################################## - -def row_stack(a1,a2): + pyart.core.Radar.__init__( + self, scantime, rrange, fields, metadata, scan_type, latitude, + longitude, altitude, sweep_number, sweep_mode, fixed_angle, + sweep_start_ray_index, sweep_stop_ray_index, azimuth, elevation, + instrument_parameters=instrument_parameters) + + +# ----------------------- utilities - read --------------------- # + +def row_stack(a1, a2): """ Stacks data from subsequent sweeps, while padding "empty" columns from subsequent sweeps. - + Inputs ------ a1: np.array destination array - + a2: np.array array which is added onto the first array - + Returns ------- out: np.array stacked destination and additional array, with uniform shape - + """ - [N1,M1] = a1.shape - [N2,M2] = a2.shape - - if M1>M2: - a2 = np.pad(a2,((0,0),(0,M1-M2)),mode='constant',constant_values=-9999999) - elif M2 M2: + a2 = np.pad(a2, ((0, 0), (0, M1-M2)), mode='constant', + constant_values=-9999999) + elif M2 < M1: + a1 = np.pad(a2, ((0, 0), (0, M2-M1)), mode='constant', + constant_values=-9999999) + + out = np.vstack((a1, a2)) out[out == -9999999] = np.nan - + return out - + + def findTimes(num_sweep): """ Finds the times at the beginning and at the end of each sweep. Information comes from the elapsed time since the beginning of the volume scan, from the Rad4Alp: Specifications/ Request for Proposal (RFP) document. - + Inputs ------ num_sweep: int rank of the sweep - + Returns ------- elapsed_times[num_sweep][0]: float - the elapsed time since the beginning of the volume scan at the beginning - of the sweep - + the elapsed time since the beginning of the volume scan at the + beginning of the sweep + elapsed_times[num_sweep][1]: float - the elapsed time since the beginning of the volume scan at the end of + the elapsed time since the beginning of the volume scan at the end of the sweep - + """ - - elapsed_times = {9 : [0, 11.4], - 7 : [11.4, 22.8], - 5 : [22.8, 39.2], - 3 : [39.3, 60.5], - 1 : [60.5, 84.7], - 19: [84.7, 97.2], - 17: [97.2, 109.6], - 15: [109.6, 121.6], - 13: [121.6, 133.1], - 11: [133.1, 144.4], - 10: [144.4, 155.8], - 8: [155.8, 172.2], - 6: [172.2, 188.6], - 4: [188.6, 204.9], - 2: [204.9, 229.4], - 20: [229.4, 241.9], - 18: [241.9, 254.4], - 16: [254.4, 266.6], - 14: [266.6, 278.3], - 12: [278.3, 289.9]} - + + elapsed_times = {9: [0, 11.4], + 7: [11.4, 22.8], + 5: [22.8, 39.2], + 3: [39.3, 60.5], + 1: [60.5, 84.7], + 19: [84.7, 97.2], + 17: [97.2, 109.6], + 15: [109.6, 121.6], + 13: [121.6, 133.1], + 11: [133.1, 144.4], + 10: [144.4, 155.8], + 8: [155.8, 172.2], + 6: [172.2, 188.6], + 4: [188.6, 204.9], + 2: [204.9, 229.4], + 20: [229.4, 241.9], + 18: [241.9, 254.4], + 16: [254.4, 266.6], + 14: [266.6, 278.3], + 12: [278.3, 289.9]} + return elapsed_times[num_sweep][0], elapsed_times[num_sweep][1] - + + def int2float_radar(data, varname, index_angle): """ Converts radar moments from bit to float - + Inputs ------ data: np.array moment data as loaded from h5 file - + varname: str name of the moment (i.e. 'ZH') - + index_angle: int rank of the sweep-1 (converted to base 0) - + Returns ------- output: np.array - moment data converted to float - + moment data converted to float + """ varname = convert_polvar_name('metranet', varname) NYQUIST_VEL = get_mymetadata('nyq_vel') - - - output=np.zeros(data.shape) - if varname in ['ZH','ZV', 'Z']: - output[data!=0] = (data[data!=0]-64)*0.5 - output[data==0] = float('nan') + + output = np.zeros(data.shape) + if varname in ['ZH', 'ZV', 'Z']: + output[data != 0] = (data[data != 0]-64)*0.5 + output[data == 0] = float('nan') elif varname == 'VEL': - output[data!=0] = (data[data!=0]-128)/127*NYQUIST_VEL[index_angle] - output[data==0] = float('nan') + output[data != 0] = (data[data != 0]-128)/127*NYQUIST_VEL[index_angle] + output[data == 0] = float('nan') elif varname == 'WID': output = data/255*NYQUIST_VEL[index_angle] elif varname == 'ZDR': - output[data!=0] = data[data!=0]*1.0/16.1259842 - 7.9375 - output[data==0] = float('nan') + output[data != 0] = data[data != 0]*1.0/16.1259842 - 7.9375 + output[data == 0] = float('nan') elif varname == 'RHO': - output[data!=0] = 1.003-10**(-(data[data!=0]-1.0)/100) - output[data==0] = float('nan') + output[data != 0] = 1.003-10**(-(data[data != 0]-1.0)/100) + output[data == 0] = float('nan') elif varname == 'PHI': - output[data!=0] = (data[data!=0]-32768)/32767*180 - output[data==0] = float('nan') + output[data != 0] = (data[data != 0]-32768)/32767*180 + output[data == 0] = float('nan') elif varname == 'CLUT': - output=data + output = data else: - warnings.warn( ("Warning, %s was not found and could not be converted") %(varname) ) + warnings.warn( + ("Warning, %s was not found and could not be converted") + % (varname)) return output - -def readMXPOLRadData(filename, variableList, max_range=np.Inf,min_range=0): + + +def readMXPOLRadData(filename, variableList, max_range=np.Inf, min_range=0): """ Reads a netcdf containing processed radar data in polar coordinates - + Parameters ---------- filename: str complete path of the file - + variableList: list list of variables to be read - + Returns ------- varPol: dict dictionary containing the variables, the azimuth and the range - + metadata: dict dictionary containing the metadata of the file - + """ varPol = {} metadata = {} ncid = netCDF4.Dataset(filename) - + time = ncid.variables['Time'] - time -= time[0] # To get time in seconds from beginning of scan + time -= time[0] # To get time in seconds from beginning of scan rrange = ncid.variables['Range'][:] - + # Get indexes between min_range and max_range - idx2keep = np.where(np.logical_and(rrangemin_range))[0] + idx2keep = np.where(np.logical_and( + rrange < max_range, rrange > min_range))[0] rrange = rrange[idx2keep] - + # Get variables in polar coordinates for varname in variableList: try: varPol[varname] = ncid.variables[varname][:].T except: pass - + varPol['resolution'] = ncid.__dict__['RangeResolution-value'] varPol['range'] = rrange varPol['range_units'] = ncid.__dict__['RangeResolution-unit'] varPol['azimuth'] = ncid.variables['Azimuth'][:] try: - varPol['azim_units'] = ncid.__dict__['Azimuth-unit'] + varPol['azim_units'] = ncid.__dict__['Azimuth-unit'] except KeyError: varPol['azim_units'] = ncid.variables['Azimuth'].Units varPol['elevation'] = ncid.variables['Elevation'][:] @@ -517,46 +542,48 @@ def readMXPOLRadData(filename, variableList, max_range=np.Inf,min_range=0): varPol['nyquist_vel'] = ncid.__dict__['NyquistVelocity-value'] varPol['longitude'] = ncid.__dict__['Longitude-value'] varPol['lon_units'] = ncid.__dict__['Longitude-unit'] - varPol['latitude'] = ncid.__dict__['Latitude-value'] + varPol['latitude'] = ncid.__dict__['Latitude-value'] varPol['lat_units'] = ncid.__dict__['Latitude-unit'] varPol['altitude'] = ncid.__dict__['Altitude-value'] varPol['alt_units'] = ncid.__dict__['Altitude-unit'] varPol['time'] = time - + metadata['Source'] = ncid.__dict__['Source'] metadata['Institution'] = ncid.__dict__['Institution'] metadata['History'] = ncid.__dict__['History'] metadata['ContactInformation'] = ncid.__dict__['ContactInformation'] - + # Close netcdf ncid.close() return metadata, varPol - -def readCHRadData(filename, radar_name, variableList, radial_resolution, max_range=np.Inf, min_range=0): - """ + + +def readCHRadData(filename, radar_name, variableList, radial_resolution, + max_range=np.Inf, min_range=0): + """ Reads a HDF5 file containing processed radar data in polar coordinates - + Parameters ---------- filename: str complete path of the file - + radar_name: str name of MCH radar - + variableList: list list of variables to be read - + radial_resolution: float resolution of the radar in metres (i.e. high: 83.3, low: 500.) - + max_range: float maximum range upto which to read data - + min_range: float mimimum range from which to read data - + Returns ------- varPol: dict @@ -564,164 +591,168 @@ def readCHRadData(filename, radar_name, variableList, radial_resolution, max_ran """ varPol = {} - h5id = h5py.File(filename,'r') - + h5id = h5py.File(filename, 'r') + ELEVATION_ANGLES = get_elevation_metadata(radar_name) radar_info = generate_radar_table(radar_name) ANG_RES = radar_info['dbbeam'] NYQUIST_VEL = get_mymetadata('nyq_vel') - # Get dimensions siz = h5id['moments']['Z'].shape - range = np.arange(0,siz[1])*radial_resolution - idx2keep = np.where(np.logical_and(rangemin_range))[0] + range = np.arange(0, siz[1])*radial_resolution + idx2keep = np.where(np.logical_and( + range < max_range, range > min_range))[0] range = range[idx2keep] - azimuth = np.arange(0,siz[0])*ANG_RES - index_angle = int(re.findall(r"\.([0-9]{3})\.",filename)[0])-1 - elevation = ELEVATION_ANGLES[index_angle] + azimuth = np.arange(0, siz[0])*ANG_RES + index_angle = int(re.findall(r"\.([0-9]{3})\.", filename)[0])-1 + elevation = ELEVATION_ANGLES[index_angle] # Get variables in polar coordinates for varname in variableList: varname = convert_polvar_name('MCH', varname) data = [] - data = h5id['moments'][varname][:].T + data = h5id['moments'][varname][:].T data = np.asarray(data) data = data.astype(float) - clut = h5id['moments']['CLUT'][:].T - data[clut>=100] = float('nan') # Remove clutter - data = data[idx2keep,:] - varPol[varname] = int2float_radar(data,varname,index_angle) - + clut = h5id['moments']['CLUT'][:].T + data[clut >= 100] = float('nan') # Remove clutter + data = data[idx2keep, :] + varPol[varname] = int2float_radar(data, varname, index_angle) + varPol['resolution'] = range[3]-range[2] varPol['range'] = range varPol['azimuth'] = azimuth varPol['elevation'] = elevation - varPol['nyquist_vel'] = NYQUIST_VEL[index_angle] + varPol['nyquist_vel'] = NYQUIST_VEL[index_angle] # Close netcdf h5id.close() return varPol -######################### utilities - config ################################# +# ------------------------ utilities - config ------------------------- # _dirname = os.path.dirname(__file__) _DEFAULT_CONFIG_FILE = os.path.join(_dirname, 'mxpol_config.py') -def load_myconfig(filename = None): + +def load_myconfig(filename=None): """ Load configuration from a config file. - + Parameters ---------- filename: str Filename of the configuration file. If None the default configuration file is loaded from the directory. - + Returns ------- _DEFAULT_METADATA: dict Dictionary with metadata - + """ - + if filename is None: filename = _DEFAULT_CONFIG_FILE - + # private: - + global cfile global _DEFAULT_POLARNAMES global _DEFAULT_METADATA global _DEFAULT_RADAR_INFO - + cfile = imp.load_source('metadata_config', filename) _DEFAULT_METADATA = cfile.MY_METADATA _DEFAULT_POLARNAMES = cfile.MY_POLARNAMES _DEFAULT_RADAR_INFO = cfile.RADAR_INFO return _DEFAULT_METADATA - -def get_mymetadata(p, filename = None): + + +def get_mymetadata(p, filename=None): """ Return a dictionary of metadata for a given parameter, p. An empty dictionary will be returned if no metadata dictionary exists for parameter p. - + Parameters ---------- p: str parameter name (i.e. Polvar) for which to return metadata - + filename: str Filename of the configuration file. If None the default configuration file is loaded from the directory. - + Returns ------- _DEFAULT_METADATA[p].copy(): dict a copy of the parameter of interest from the metadata dictionary - + """ - load_myconfig(filename = filename) - + load_myconfig(filename=filename) + if p in _DEFAULT_METADATA: return _DEFAULT_METADATA[p].copy() else: return {} - -def get_elevation_metadata(radarname, filename = None): + + +def get_elevation_metadata(radarname, filename=None): """ Gets the elevation angles for each sweep from the configuration file - + Inputs ------ radarname: str name of the radar for which to retrieve elevation angles - + filename: str name of the configuration file, if None, the default configuration file is used - + Returns ------- _DEFAULT_RADAR_INFO['elevations'][radarname]: list list of elevation angles in degrees - + or None if not available """ load_myconfig(filename=filename) - + if radarname in _DEFAULT_RADAR_INFO['elevations']: return _DEFAULT_RADAR_INFO['elevations'][radarname] else: - print( ("no elevation angles in configfile for radar %s") %(radarname) ) - -def generate_radar_table(radarname, filename = None): + print(("no elevation angles in configfile for radar %s") % (radarname)) + + +def generate_radar_table(radarname, filename=None): """ Generates a table with basic radar info, based on the given (or default) configfile - + Parameters ---------- radarname: str name of the radar (i.e. 'ALB' or 'A', 'MXPOL' etc) - + filename: str path and name of the configfile, if None, the default configfile is used - + Returns ------- radar_table: dict table containing basic radar info """ - load_myconfig(filename = filename) - + load_myconfig(filename=filename) + if radarname in _DEFAULT_RADAR_INFO['radarID']: radarname = _DEFAULT_RADAR_INFO['radarID'][radarname] - radar_table = get_mymetadata('Radar_info', filename = filename) + radar_table = get_mymetadata('Radar_info', filename=filename) for key in radar_table: if key in _DEFAULT_RADAR_INFO: radar_table[key] = _DEFAULT_RADAR_INFO[key][radarname] @@ -731,33 +762,36 @@ def generate_radar_table(radarname, filename = None): else: return None -def generate_polvar_metadata(polvar, filename = None): + +def generate_polvar_metadata(polvar, filename=None): """ Generates a dictionary with metadata for a polarimetric variable - + Parameters ---------- polvar: str polatimetric variable of interest - + filename: str Filename of the configuration file. If None the default configuration file is loaded from the directory. - + Returns ------- polvar_metadata: dict dictionary with metatdata for polarimetric variable of interest - + """ - load_myconfig(filename = filename) + load_myconfig(filename=filename) polvar = convert_polvar_name('LTE', polvar) - + if polvar in _DEFAULT_POLARNAMES: - standard_name, long_name, units, valid_min, valid_max, plot_interval = _DEFAULT_POLARNAMES[polvar] + (standard_name, long_name, units, valid_min, valid_max, + plot_interval) = _DEFAULT_POLARNAMES[polvar] else: - standard_name, long_name, units, valid_min, valid_max, plot_interval = None, None, None, None, None, None - + (standard_name, long_name, units, valid_min, valid_max, + plot_interval) = None, None, None, None, None, None + polvar_metadata = get_mymetadata('Polvar', filename) polvar_metadata['units'] = units polvar_metadata['standard_name'] = standard_name @@ -766,58 +800,59 @@ def generate_polvar_metadata(polvar, filename = None): polvar_metadata['valid_min'] = valid_min polvar_metadata['valid_max'] = valid_max polvar_metadata['plot_interval'] = plot_interval - + return polvar_metadata + def convert_polvar_name(convention, polvar): """ - Finds the correct variable name for a given convention (MXPOL, MCH) and - a given variable name which was spelled with a different case or - according to a different convention. For example, MXPOL convention uses - 'Z' for the reflectivity variable, but if a user inserted 'Zh' this - function will convert it to 'Z'. - + Finds the correct variable name for a given convention (MXPOL, MCH) and + a given variable name which was spelled with a different case or + according to a different convention. For example, MXPOL convention uses + 'Z' for the reflectivity variable, but if a user inserted 'Zh' this + function will convert it to 'Z'. + Parameters ---------- - convention : str, destination convention; either MCH or LTE - + convention : str, destination convention; either MCH or LTE + polvar : str, key of polarimetric variable to be converted - + Returns ------- mykey : str, polarimertric variable key as used within the ProfileLab toolbox context - + """ # Generate dictionary for the conversion - - metranet = ['ZH','ZV','ZDR','PHI','VEL','WID', 'RHO','CLUT', 'MPH','STA1', 'STA2', 'WBN'] - MCH = ['Z','ZV','ZDR','PHIDP','V','W','RHO','CLUT', 'MPH','STA1', 'STA2', 'WBN'] - LTE = ['Zh','Zv','Zdr','Phidp','RVel','Sw','Rhohv','Clut', 'mph','sta1', 'sta2', 'wbn'] + metranet = ['ZH', 'ZV', 'ZDR', 'PHI', 'VEL', 'WID', 'RHO', 'CLUT', 'MPH', + 'STA1', 'STA2', 'WBN'] + MCH = ['Z', 'ZV', 'ZDR', 'PHIDP', 'V', 'W', 'RHO', 'CLUT', 'MPH', 'STA1', + 'STA2', 'WBN'] + LTE = ['Zh', 'Zv', 'Zdr', 'Phidp', 'RVel', 'Sw', 'Rhohv', 'Clut', 'mph', + 'sta1', 'sta2', 'wbn'] - convertkeys = {} convertkeys['MCH'] = {} convertkeys['LTE'] = {} convertkeys['metranet'] = {} - - for i in range(0,len(MCH)): + + for i in range(0, len(MCH)): convertkeys['MCH'][MCH[i]] = [LTE[i], metranet[i]] - + convertkeys['LTE'] = {} - for i in range(0,len(LTE)): + for i in range(0, len(LTE)): convertkeys['LTE'][LTE[i]] = [MCH[i], metranet[i]] - - for i in range(0,len(metranet)): - convertkeys['metranet'][metranet[i]] = [MCH[i], LTE[i]] - + + for i in range(0, len(metranet)): + convertkeys['metranet'][metranet[i]] = [MCH[i], LTE[i]] + # translate between conventions mykey = polvar - + for key, value in convertkeys[convention].items(): if polvar in value: mykey = key break - + return mykey - \ No newline at end of file diff --git a/src/pyrad_proc/pyrad/io/read_data_radar.py b/src/pyrad_proc/pyrad/io/read_data_radar.py index d9994bb..ca6ee89 100755 --- a/src/pyrad_proc/pyrad/io/read_data_radar.py +++ b/src/pyrad_proc/pyrad/io/read_data_radar.py @@ -48,7 +48,6 @@ from .io_aux import find_cosmo_file, find_rad4alpcosmo_file - def get_data(voltime, datatypesdescr, cfg): """ Reads pyrad input data. @@ -100,7 +99,7 @@ def get_data(voltime, datatypesdescr, cfg): datatype_rad4alpdem.append(datatype) elif datagroup == 'MXPOL': datatype_mxpol.append(datatype) - + ind_rad = int(radarnr[5:8])-1 ndatatypes_rainbow = len(datatype_rainbow) @@ -129,10 +128,10 @@ def get_data(voltime, datatypesdescr, cfg): cfg['loadbasepath'][ind_rad], cfg['loadname'][ind_rad], voltime, datatype_cfradial, dataset_cfradial, product_cfradial) radar = add_field(radar, radar_aux) - + if ndatatypes_mxpol > 0: - radar = merge_scans_mxpol(cfg['datapath'][ind_rad], cfg['ScanList'][ind_rad], - cfg['RadarName'][ind_rad], cfg['RadarRes'][ind_rad], voltime, + radar = merge_scans_mxpol( + cfg['datapath'][ind_rad], cfg['ScanList'][ind_rad], voltime, datatype_mxpol, cfg, ind_rad=ind_rad) # add COSMO files to the radar field @@ -330,10 +329,9 @@ def merge_scans_rad4alp(basepath, scan_list, radar_name, radar_res, voltime, timeinfo = voltime.strftime('%H%M') basename = 'P'+radar_res+radar_name+dayinfo if cfg['path_convention'] == 'LTE': - yy = dayinfo[0:2] + yy = dayinfo[0:2] dy = dayinfo[2:] - subf = ('P'+radar_res + - radar_name+ yy + 'hdf'+dy) + subf = 'P'+radar_res+radar_name+yy+'hdf'+dy datapath = basepath+subf+'/' else: datapath = basepath+dayinfo+'/'+basename+'/' @@ -347,7 +345,8 @@ def merge_scans_rad4alp(basepath, scan_list, radar_name, radar_res, voltime, nelevs = len(scan_list) # merge the elevations into a single radar instance for i in range(1, nelevs): - filename = glob.glob(datapath+basename+timeinfo+'*.'+scan_list[i] + '*') + filename = glob.glob( + datapath+basename+timeinfo+'*.'+scan_list[i]+'*') if not filename: warn('No file found in '+datapath+basename+timeinfo+'*.' + scan_list[i]) @@ -361,9 +360,10 @@ def merge_scans_rad4alp(basepath, scan_list, radar_name, radar_res, voltime, radar = pyart.util.radar_utils.join_radar(radar, radar_aux) return radar - -def merge_scans_mxpol(basepath, scan_list, radar_name, radar_res, voltime, - datatype_list, cfg, ind_rad=0): + + +def merge_scans_mxpol(basepath, scan_list, voltime, datatype_list, cfg, + ind_rad=0): """ merge rad4alp data. @@ -372,14 +372,9 @@ def merge_scans_mxpol(basepath, scan_list, radar_name, radar_res, voltime, basepath : str base path of mxpol radar data scan_list : list - list of scans, in the case of mxpol, the elevation or azimuth denoted + list of scans, in the case of mxpol, the elevation or azimuth denoted as 005 or 090 (for 5 or 90 degrees elevation) or 330 (for 330 degrees azimuth respectively) - radar_name : str - radar_name (MXPol) - radar_res : str - not applicable, in this case, set to 'polar' - (could we use RHI/PPI here?) voltime: datetime object reference time of the scan datatype_list : list @@ -395,11 +390,6 @@ def merge_scans_mxpol(basepath, scan_list, radar_name, radar_res, voltime, radar object """ - if (radar_name is None) or (radar_res is None): - raise ValueError( - 'ERROR: Radar Name and Resolution not specified in config file.' + - ' Unable to load MXPol data') - radar = None if cfg['path_convention'] == 'LTE': sub1 = str(voltime.year) @@ -407,15 +397,22 @@ def merge_scans_mxpol(basepath, scan_list, radar_name, radar_res, voltime, sub3 = voltime.strftime('%d') dayinfo = voltime.strftime('%Y%m%d') timeinfo = voltime.strftime('%H%M') - datapath = cfg['datapath'][ind_rad] + '/' + sub1 + '/' + sub2 + '/' + sub3 + '/' - scanname = 'MXPol-polar-' + dayinfo + '-' + timeinfo + '*-' - filename = glob.glob(datapath + scanname + scan_list[0] + '*') + datapath = cfg['datapath'][ind_rad]+'/'+sub1+'/'+sub2+'/'+sub3+'/' + scanname = 'MXPol-polar-'+dayinfo+'-'+timeinfo+'*-' + filename = glob.glob(datapath+scanname+scan_list[0]+'*') else: - warn("insert MCH convention here") - filename = [] - return filename + daydir = voltime.strftime('%Y-%m-%d') + dayinfo = voltime.strftime('%Y%m%d') + timeinfo = voltime.strftime('%H%M') + datapath = cfg['datapath'][ind_rad]+scan_list[0]+'/'+daydir+'/' + if (not os.path.isdir(datapath)): + warn("WARNING: Unknown datapath '%s'" % datapath) + return None + filename = glob.glob( + datapath+'MXPol-polar-'+dayinfo+'-'+timeinfo+'*-' + + scan_list[0]+'.nc') if not filename: - warn('No file found matching '+ datapath + scanname + scan_list[0] + '*') + warn('No file found matching '+datapath+scanname+scan_list[0]+'*') else: radar = get_data_mxpol( filename[0], datatype_list, scan_list[0], cfg, ind_rad=ind_rad) @@ -423,7 +420,26 @@ def merge_scans_mxpol(basepath, scan_list, radar_name, radar_res, voltime, nelevs = len(scan_list) # merge the elevations into a single radar instance for i in range(1, nelevs): - filename = glob.glob(datapath+scanname+scan_list[i]) + if cfg['path_convention'] == 'LTE': + sub1 = str(voltime.year) + sub2 = voltime.strftime('%m') + sub3 = voltime.strftime('%d') + dayinfo = voltime.strftime('%Y%m%d') + timeinfo = voltime.strftime('%H%M') + datapath = cfg['datapath'][ind_rad]+'/'+sub1+'/'+sub2+'/'+sub3+'/' + scanname = 'MXPol-polar-'+dayinfo+'-'+timeinfo+'*-' + filename = glob.glob(datapath+scanname+scan_list[i]+'*') + else: + daydir = voltime.strftime('%Y-%m-%d') + dayinfo = voltime.strftime('%Y%m%d') + timeinfo = voltime.strftime('%H%M') + datapath = cfg['datapath'][ind_rad]+scan_list[i]+'/'+daydir+'/' + if (not os.path.isdir(datapath)): + warn("WARNING: Unknown datapath '%s'" % datapath) + return None + filename = glob.glob( + datapath+'MXPol-polar-'+dayinfo+'-'+timeinfo+'*-' + + scan_list[i]+'.nc') if not filename: warn('No file found in '+datapath+scanname+scan_list[i]) else: @@ -436,7 +452,6 @@ def merge_scans_mxpol(basepath, scan_list, radar_name, radar_res, voltime, radar = pyart.util.radar_utils.join_radar(radar, radar_aux) return radar - def merge_scans_cosmo(voltime, datatype_list, cfg, ind_rad=0): @@ -892,7 +907,7 @@ def get_data_rainbow(filename, datatype): single_slice = True common_slice_info = rbf['volume']['scan']['slice'] - if datatype[0] == 'Nh': + if datatype == 'Nh': noisedBZ1km_h = float(common_slice_info['noise_power_dbz']) noisedBZ_h = pyart.retrieve.compute_noisedBZ( radar.nrays, noisedBZ1km_h, radar.range['data'], 1., @@ -937,7 +952,7 @@ def get_data_rad4alp(filename, datatype_list, scan_name, cfg, ind_rad=0): for datatype in datatype_list: if (datatype != 'Nh') and (datatype != 'Nv'): metranet_field_names.update(get_datatype_metranet(datatype)) - + if cfg['path_convention'] == 'LTE': radar = pyrad_MCH(filename, field_names=metranet_field_names) else: @@ -986,7 +1001,8 @@ def get_data_rad4alp(filename, datatype_list, scan_name, cfg, ind_rad=0): radar.add_field('noisedBZ_vv', noisedBZ_v) return radar - + + def get_data_mxpol(filename, datatype_list, scan_name, cfg, ind_rad=0): """ gets MXPol radar data @@ -998,7 +1014,7 @@ def get_data_mxpol(filename, datatype_list, scan_name, cfg, ind_rad=0): datatype_list : list of strings list of data fields to get scan_name : list - list of scans, in the case of mxpol, the elevation or azimuth denoted + list of scans, in the case of mxpol, the elevation or azimuth denoted as 005 or 090 (for 5 or 90 degrees elevation) or 330 (for 330 degrees azimuth respectively) cfg : dict @@ -1016,13 +1032,12 @@ def get_data_mxpol(filename, datatype_list, scan_name, cfg, ind_rad=0): for datatype in datatype_list: if (datatype != 'Nh') and (datatype != 'Nv'): field_names.update(get_datatype_metranet(datatype)) - radar = pyrad_MXPOL(filename, field_names=field_names) - + # create secondary moments (TODO) if ('Nh' in datatype_list) or ('Nv' in datatype_list): pass - + return radar diff --git a/src/pyrad_proc/pyrad/prod/process_product.py b/src/pyrad_proc/pyrad/prod/process_product.py index ef8223e..dc0bda0 100755 --- a/src/pyrad_proc/pyrad/prod/process_product.py +++ b/src/pyrad_proc/pyrad/prod/process_product.py @@ -1452,4 +1452,4 @@ def generate_monitoring_products(dataset, prdcfg): else: warn(' Unsupported product type: ' + prdcfg['type']) - return None \ No newline at end of file + return None