diff --git a/config/processing/mals_emm_intercomp.txt b/config/processing/mals_emm_intercomp.txt index ffaff64..4eebaff 100755 --- a/config/processing/mals_emm_intercomp.txt +++ b/config/processing/mals_emm_intercomp.txt @@ -44,7 +44,7 @@ dempath STRARR 2 # -------------------------------------------- # Output data # ----------------------------------------------- -saveimgbasepath STRING /data/pyrad_products/ +saveimgbasepath STRING /data/pyrad_examples/ saveimg INT 1 imgformat STRING png diff --git a/config/processing/paradiso_fvj_vol_intercomp_prod.txt b/config/processing/paradiso_fvj_vol_intercomp_prod.txt index bdfbd0a..907c844 100755 --- a/config/processing/paradiso_fvj_vol_intercomp_prod.txt +++ b/config/processing/paradiso_fvj_vol_intercomp_prod.txt @@ -5,7 +5,7 @@ # List of datasets to generate. # The detailed specification of each dataset is given below. dataSetList STRARR 1 - DX50_PLD_intercomp + DX50_PLD_dBZ_avg_intercomp # ========================================================================================== # colocated gates @@ -65,24 +65,51 @@ dataSetList STRARR 1 # ========================================================================================== # intercomparison # ========================================================================================== -DX50_PLD_intercomp STRUCT 8 - type STRING INTERCOMP - datatype STRARR 2 +#DX50_PLD_intercomp STRUCT 8 +# type STRING INTERCOMP +# datatype STRARR 2 +# RADAR001:CFRADIAL:dBZc,dBZ_avg,SAVEVOL +# RADAR002:CFRADIAL:dBZc,dBZ_avg,SAVEVOL +# coloc_data_dir STRING DX50_PLD_DATA # must be the same as product WRITE_INTERCOMP +# coloc_radars_name STRING DX50_PLD +# ele_tol FLOAT 0.5 +# azi_tol FLOAT 0.5 +# rng_tol FLOAT 100. +# products STRUCT 2 +# DX50_PLD_PLOT STRUCT 3 +# type STRING PLOT_SCATTER_INTERCOMP +# voltype STRING dBZc +# step FLOAT 0.5 +# DX50_PLD_DATA STRUCT 2 +# type STRING WRITE_INTERCOMP +# voltype STRING dBZc + +DX50_PLD_dBZ_avg_intercomp STRUCT 12 + type STRING INTERCOMP_TIME_AVG + datatype STRARR 6 RADAR001:CFRADIAL:dBZc,dBZ_avg,SAVEVOL + RADAR001:CFRADIAL:PhiDPc,PhiDP_avg,SAVEVOL + RADAR001:CFRADIAL:time_avg_flag,flag_avg,SAVEVOL RADAR002:CFRADIAL:dBZc,dBZ_avg,SAVEVOL + RADAR002:CFRADIAL:PhiDPc,PhiDP_avg,SAVEVOL + RADAR002:CFRADIAL:time_avg_flag,flag_avg,SAVEVOL coloc_data_dir STRING DX50_PLD_DATA # must be the same as product WRITE_INTERCOMP coloc_radars_name STRING DX50_PLD ele_tol FLOAT 0.5 azi_tol FLOAT 0.5 rng_tol FLOAT 100. - products STRUCT 1 + clt_max INT 0 + phi_excess_max INT 0 + non_rain_max INT 100 + phi_avg_max FLOAT 20. + products STRUCT 2 DX50_PLD_PLOT STRUCT 3 type STRING PLOT_SCATTER_INTERCOMP voltype STRING dBZc step FLOAT 0.5 -# DX50_PLD_DATA STRUCT 2 -# type STRING WRITE_INTERCOMP -# voltype STRING dBZc + DX50_PLD_DATA STRUCT 2 + type STRING WRITE_INTERCOMP_TIME_AVG + voltype STRING dBZc diff --git a/doc/TODO_pyrad.txt b/doc/TODO_pyrad.txt index 72daf5e..c80f2be 100755 --- a/doc/TODO_pyrad.txt +++ b/doc/TODO_pyrad.txt @@ -1,7 +1,6 @@ Pending modifications for current code in Pyrad ----------------------------------------------- - In function process_echo_id: Add condition on differential reflectivity at low reflectivity values. Allow user to select the various thresholds. Add a speckle filter -- In selfconsistency functions: get tables as a function of elevation, allow user to define the tresholds - Functions for PhiDP and KDP processing consume too much memory up to the point that they cannot handle rad4alp PH data. It is probably due to the rolling window creating a monstrous matrix. We should try to reduce the memory consumption. @@ -26,9 +25,6 @@ Things to check before making it operational: - Check if transmission losses have to be subtracted from received power - Check if conversion from solar flux to received power is done properly - Polarimetric data monitoring: - - Add metadata in the density and histogram plot - - Check what happens to data that is out of the range of the histogram + - Add metadata in the density and histogram plot - Rainfall rate estimation: - - Distinguish between the various rainfall rate estimators (no single datatype RR) -- Intercomparison: - - Add metadata in the plot \ No newline at end of file + - Distinguish between the various rainfall rate estimators (no single datatype RR) \ No newline at end of file diff --git a/doc/pyrad_user_manual.docx b/doc/pyrad_user_manual.docx index 11cab43..d67bbe1 100755 Binary files a/doc/pyrad_user_manual.docx and b/doc/pyrad_user_manual.docx differ diff --git a/doc/pyrad_user_manual.pdf b/doc/pyrad_user_manual.pdf index 56c25ba..ae88cca 100755 Binary files a/doc/pyrad_user_manual.pdf and b/doc/pyrad_user_manual.pdf differ diff --git a/src/pyart b/src/pyart index 8ad4849..aed8ead 160000 --- a/src/pyart +++ b/src/pyart @@ -1 +1 @@ -Subproject commit 8ad4849e606db8ebbe620e8fe6679691286114b1 +Subproject commit aed8eadb59305f7e05e053514b3338752fba5c98 diff --git a/src/pyrad_proc/pyrad/flow/flow_control.py b/src/pyrad_proc/pyrad/flow/flow_control.py index 5ee2f36..6552d57 100755 --- a/src/pyrad_proc/pyrad/flow/flow_control.py +++ b/src/pyrad_proc/pyrad/flow/flow_control.py @@ -521,14 +521,14 @@ def _get_datatype_list(cfg, radarnr='RADAR001'): proclevel, dataset = get_dataset_fields(datasetdescr) if 'datatype' in cfg[dataset]: if isinstance(cfg[dataset]['datatype'], str): - (radarnr_descr, datagroup, datatype, dataset_save, + (radarnr_descr, datagroup, datatype_aux, dataset_save, product_save) = ( get_datatype_fields(cfg[dataset]['datatype'])) if datagroup != 'PROC' and radarnr_descr == radarnr: if ((dataset_save is None) and (product_save is None)): - datatypesdescr.add(datagroup+":"+datatype) + datatypesdescr.add(datagroup+":"+datatype_aux) else: - datatypesdescr.add(datagroup + ":" + datatype + + datatypesdescr.add(datagroup + ":" + datatype_aux + "," + dataset_save + "," + product_save) else: @@ -540,7 +540,7 @@ def _get_datatype_list(cfg, radarnr='RADAR001'): if ((dataset_save is None) and (product_save is None)): datatypesdescr.add(datagroup+":"+datatype_aux) else: - datatypesdescr.add(datagroup + ":" + datatype + + datatypesdescr.add(datagroup + ":" + datatype_aux + "," + dataset_save + "," + product_save) @@ -605,8 +605,7 @@ def _get_masterfile_list(datatypesdescr, starttime, endtime, datacfg, radarnr, datagroup, datatype, dataset, product = get_datatype_fields( datatypedescr) if ((datagroup != 'COSMO') and (datagroup != 'RAD4ALPCOSMO') and - (datagroup != 'DEM') and (datagroup != 'RAD4ALPDEM') and - (datagroup != 'SAVED')): + (datagroup != 'DEM') and (datagroup != 'RAD4ALPDEM')): masterdatatypedescr = datatypedescr if scan_list is not None: masterscan = scan_list[int(radarnr[5:8])-1][0] @@ -617,7 +616,7 @@ def _get_masterfile_list(datatypesdescr, starttime, endtime, datacfg, for datatypedescr in datatypesdescr: radarnr, datagroup, datatype, dataset, product = ( get_datatype_fields(datatypedescr)) - if ((datagroup == 'COSMO') or (datagroup == 'SAVED')): + if datagroup == 'COSMO': masterdatatypedescr = radarnr+':RAINBOW:dBZ' if scan_list is not None: masterscan = scan_list[int(radarnr[5:8])-1][0] diff --git a/src/pyrad_proc/pyrad/graph/plots.py b/src/pyrad_proc/pyrad/graph/plots.py index 9ecb356..588e54e 100755 --- a/src/pyrad_proc/pyrad/graph/plots.py +++ b/src/pyrad_proc/pyrad/graph/plots.py @@ -475,7 +475,7 @@ def plot_density(hist_obj, hist_type, field_name, ind_sweep, prdcfg, def plot_scatter(bins1, bins2, hist_2d, field_name1, field_name2, fname_list, - prdcfg, metadata=None): + prdcfg, metadata=None, lin_regr=None, lin_regr_slope1=None): """ 2D histogram @@ -523,6 +523,13 @@ def plot_scatter(bins1, bins2, hist_2d, field_name1, field_name2, fname_list, # plot reference plt.plot(bins1, bins2, 'k--') + + # plot linear regression + if lin_regr is not None: + plt.plot(bins1, lin_regr[0]*bins1+lin_regr[1], 'r') + if lin_regr_slope1 is not None: + plt.plot(bins1, bins1+lin_regr_slope1, 'g') + plt.autoscale(enable=True, axis='both', tight=True) plt.xlabel(labelx) diff --git a/src/pyrad_proc/pyrad/io/__init__.py b/src/pyrad_proc/pyrad/io/__init__.py index cc4e9ec..189f380 100755 --- a/src/pyrad_proc/pyrad/io/__init__.py +++ b/src/pyrad_proc/pyrad/io/__init__.py @@ -53,6 +53,7 @@ write_colocated_gates write_colocated_data + write_colocated_data_time_avg write_timeseries write_ts_polar_data write_monitoring_ts @@ -109,6 +110,7 @@ from .write_data import write_monitoring_ts from .write_data import write_sun_hits, write_sun_retrieval from .write_data import write_colocated_gates, write_colocated_data +from .write_data import write_colocated_data_time_avg from .io_aux import get_save_dir, make_filename from .io_aux import get_datetime, get_dataset_fields diff --git a/src/pyrad_proc/pyrad/io/read_data_other.py b/src/pyrad_proc/pyrad/io/read_data_other.py index 16e8fa5..5e89b19 100755 --- a/src/pyrad_proc/pyrad/io/read_data_other.py +++ b/src/pyrad_proc/pyrad/io/read_data_other.py @@ -12,6 +12,7 @@ read_rad4alp_vis read_colocated_gates read_colocated_data + read_colocated_data_time_avg read_timeseries read_monitoring_ts read_sun_hits_multiple_days @@ -287,6 +288,71 @@ def read_colocated_data(fname): return None, None, None, None, None, None, None, None +def read_colocated_data_time_avg(fname): + """ + Reads a csv files containing time averaged colocated data + + Parameters + ---------- + fname : str + path of time series file + + Returns + ------- + rad1_ele , rad1_azi, rad1_rng, rad1_val, rad2_ele, rad2_azi, rad2_rng, + rad2_val : tupple + A tupple with the data read. None otherwise + + """ + try: + with open(fname, 'r', newline='') as csvfile: + # first count the lines + reader = csv.DictReader( + row for row in csvfile if not row.startswith('#')) + nrows = sum(1 for row in reader) + rad1_ele = np.empty(nrows, dtype=float) + rad1_azi = np.empty(nrows, dtype=float) + rad1_rng = np.empty(nrows, dtype=float) + rad1_dBZavg = np.empty(nrows, dtype=float) + rad1_PhiDPavg = np.empty(nrows, dtype=float) + rad1_Flagavg = np.empty(nrows, dtype=float) + rad2_ele = np.empty(nrows, dtype=float) + rad2_azi = np.empty(nrows, dtype=float) + rad2_rng = np.empty(nrows, dtype=float) + rad2_dBZavg = np.empty(nrows, dtype=float) + rad2_PhiDPavg = np.empty(nrows, dtype=float) + rad2_Flagavg = np.empty(nrows, dtype=float) + + # now read the data + csvfile.seek(0) + reader = csv.DictReader( + row for row in csvfile if not row.startswith('#')) + i = 0 + for row in reader: + rad1_ele[i] = float(row['rad1_ele']) + rad1_azi[i] = float(row['rad1_azi']) + rad1_rng[i] = float(row['rad1_rng']) + rad1_dBZavg[i] = float(row['rad1_dBZavg']) + rad1_PhiDPavg[i] = float(row['rad1_PhiDPavg']) + rad1_Flagavg[i] = float(row['rad1_Flagavg']) + rad2_ele[i] = float(row['rad2_ele']) + rad2_azi[i] = float(row['rad2_azi']) + rad2_rng[i] = float(row['rad2_rng']) + rad2_dBZavg[i] = float(row['rad2_dBZavg']) + rad2_PhiDPavg[i] = float(row['rad2_PhiDPavg']) + rad2_Flagavg[i] = float(row['rad2_Flagavg']) + i += 1 + + return (rad1_ele, rad1_azi, rad1_rng, + rad1_dBZavg, rad1_PhiDPavg, rad1_Flagavg, + rad2_ele, rad2_azi, rad2_rng, + rad2_dBZavg, rad2_PhiDPavg, rad2_Flagavg) + except EnvironmentError: + warn('Unable to read file '+fname) + return (None, None, None, None, None, None, None, None, None, None, + None, None) + + def read_timeseries(fname): """ Reads a time series contained in a csv file diff --git a/src/pyrad_proc/pyrad/io/write_data.py b/src/pyrad_proc/pyrad/io/write_data.py index 3642afd..70fb849 100755 --- a/src/pyrad_proc/pyrad/io/write_data.py +++ b/src/pyrad_proc/pyrad/io/write_data.py @@ -11,6 +11,7 @@ write_monitoring_ts write_colocated_gates write_colocated_data + write_colocated_data_time_avg write_sun_hits write_sun_retrieval generate_field_name_str @@ -204,7 +205,7 @@ def write_colocated_gates(coloc_gates, fname): def write_colocated_data(coloc_data, fname): """ - Writes the position of gates colocated with two radars + Writes the data of gates colocated with two radars Parameters ---------- @@ -264,6 +265,82 @@ def write_colocated_data(coloc_data, fname): return fname +def write_colocated_data_time_avg(coloc_data, fname): + """ + Writes the time averaged data of gates colocated with two radars + + Parameters + ---------- + coloc_data : dict + dictionary containing the colocated data parameters + fname : str + file name where to store the data + + Returns + ------- + fname : str + the name of the file where data has written + + """ + filelist = glob.glob(fname) + ngates = len(coloc_data['rad1_ele']) + if len(filelist) == 0: + with open(fname, 'w', newline='') as csvfile: + csvfile.write('# Colocated radar gates data file\n') + csvfile.write('# Comment lines are preceded by "#"\n') + csvfile.write('#\n') + + fieldnames = [ + 'rad1_ele', 'rad1_azi', 'rad1_rng', + 'rad1_dBZavg', 'rad1_PhiDPavg', 'rad1_Flagavg', + 'rad2_ele', 'rad2_azi', 'rad2_rng', + 'rad2_dBZavg', 'rad2_PhiDPavg', 'rad2_Flagavg'] + writer = csv.DictWriter(csvfile, fieldnames) + writer.writeheader() + for i in range(ngates): + writer.writerow( + {'rad1_ele': coloc_data['rad1_ele'][i], + 'rad1_azi': coloc_data['rad1_azi'][i], + 'rad1_rng': coloc_data['rad1_rng'][i], + 'rad1_dBZavg': coloc_data['rad1_dBZavg'][i], + 'rad1_PhiDPavg': coloc_data['rad1_PhiDPavg'][i], + 'rad1_dBZavg': coloc_data['rad1_dBZavg'][i], + 'rad1_Flagavg': coloc_data['rad1_Flagavg'][i], + 'rad2_ele': coloc_data['rad2_ele'][i], + 'rad2_azi': coloc_data['rad2_azi'][i], + 'rad2_rng': coloc_data['rad2_rng'][i], + 'rad2_dBZavg': coloc_data['rad2_dBZavg'][i], + 'rad2_PhiDPavg': coloc_data['rad2_PhiDPavg'][i], + 'rad2_Flagavg': coloc_data['rad2_Flagavg'][i]}) + csvfile.close() + else: + with open(fname, 'a', newline='') as csvfile: + fieldnames = [ + 'rad1_ele', 'rad1_azi', 'rad1_rng', + 'rad1_dBZavg', 'rad1_PhiDPavg', 'rad1_Flagavg', + 'rad2_ele', 'rad2_azi', 'rad2_rng', + 'rad2_dBZavg', 'rad2_PhiDPavg', 'rad2_Flagavg'] + writer = csv.DictWriter(csvfile, fieldnames) + for i in range(ngates): + writer.writerow( + {'rad1_ele': coloc_data['rad1_ele'][i], + 'rad1_azi': coloc_data['rad1_azi'][i], + 'rad1_rng': coloc_data['rad1_rng'][i], + 'rad1_dBZavg': coloc_data['rad1_dBZavg'][i], + 'rad1_PhiDPavg': coloc_data['rad1_PhiDPavg'][i], + 'rad1_dBZavg': coloc_data['rad1_dBZavg'][i], + 'rad1_Flagavg': coloc_data['rad1_Flagavg'][i], + 'rad2_ele': coloc_data['rad2_ele'][i], + 'rad2_azi': coloc_data['rad2_azi'][i], + 'rad2_rng': coloc_data['rad2_rng'][i], + 'rad2_dBZavg': coloc_data['rad2_dBZavg'][i], + 'rad2_PhiDPavg': coloc_data['rad2_PhiDPavg'][i], + 'rad2_Flagavg': coloc_data['rad2_Flagavg'][i]}) + csvfile.close() + + return fname + + def write_sun_hits(sun_hits, fname): """ Writes sun hits data. diff --git a/src/pyrad_proc/pyrad/proc/__init__.py b/src/pyrad_proc/pyrad/proc/__init__.py index bbc11ec..113ddb4 100755 --- a/src/pyrad_proc/pyrad/proc/__init__.py +++ b/src/pyrad_proc/pyrad/proc/__init__.py @@ -68,6 +68,7 @@ process_time_avg_flag process_colocated_gates process_intercomp + process_intercomp_time_avg Retrievals ========== @@ -108,6 +109,7 @@ from .process_calib import process_time_avg, process_weighted_time_avg from .process_calib import process_time_avg_flag from .process_calib import process_colocated_gates, process_intercomp +from .process_calib import process_intercomp_time_avg from .process_calib import process_sun_hits from .process_retrieve import process_signal_power, process_snr diff --git a/src/pyrad_proc/pyrad/proc/process_aux.py b/src/pyrad_proc/pyrad/proc/process_aux.py index 069364c..5a3643e 100755 --- a/src/pyrad_proc/pyrad/proc/process_aux.py +++ b/src/pyrad_proc/pyrad/proc/process_aux.py @@ -120,6 +120,9 @@ def get_process_func(dataset_type, dsname): elif dataset_type == 'INTERCOMP': func_name = 'process_intercomp' dsformat = 'INTERCOMP' + elif dataset_type == 'INTERCOMP_TIME_AVG': + func_name = 'process_intercomp_time_avg' + dsformat = 'INTERCOMP' elif dataset_type == 'MONITORING': func_name = 'process_monitoring' dsformat = 'MONITORING' @@ -501,7 +504,7 @@ def process_traj_atplane(procstatus, dscfg, radar_list=None, trajectory=None): traj_time_vec = date2num(trajectory.time_vector, radar.time['units'], radar.time['calendar']) - + for tind in np.nditer(traj_ind): az = rad_traj.azimuth_vec[tind] el = rad_traj.elevation_vec[tind] diff --git a/src/pyrad_proc/pyrad/proc/process_calib.py b/src/pyrad_proc/pyrad/proc/process_calib.py index 1b5fe61..25da872 100755 --- a/src/pyrad_proc/pyrad/proc/process_calib.py +++ b/src/pyrad_proc/pyrad/proc/process_calib.py @@ -20,6 +20,7 @@ process_time_avg_flag process_colocated_gates process_intercomp + process_intercomp_time_avg process_sun_hits """ @@ -37,6 +38,7 @@ from ..io.read_data_other import read_selfconsistency, read_colocated_gates from ..io.read_data_other import read_sun_hits_multiple_days, read_solar_flux from ..io.read_data_other import read_colocated_data +from ..io.read_data_other import read_colocated_data_time_avg from ..io.read_data_radar import interpol_field from ..util.radar_utils import get_closest_solar_flux, get_histogram_bins @@ -1735,14 +1737,25 @@ def process_intercomp(procstatus, dscfg, radar_list=None): datatype : list of string. Dataset keyword The input data types + coloc_data_dir : string. Dataset keyword + name of the directory containing the csv file with colocated data + coloc_radars_name : string. Dataset keyword + string identifying the radar names + azi_tol : float. Dataset keyword + azimuth tolerance between the two radars. Default 0.5 deg + ele_tol : float. Dataset keyword + elevation tolerance between the two radars. Default 0.5 deg + rng_tol : float. Dataset keyword + range tolerance between the two radars. Default 50 m radar_list : list of Radar objects Optional. list of radar objects Returns ------- - sun_hits_dict : dict - dictionary containing a radar object, a sun_hits dict and a - sun_retrieval dictionary + new_dataset : dict + dictionary containing a dictionary with intercomparison data and the + key "final" which contains a boolean that is true when all volumes + have been processed ind_rad : int radar index @@ -1925,10 +1938,359 @@ def process_intercomp(procstatus, dscfg, radar_list=None): 'rad1_azi': coloc_data[1], 'rad1_rng': coloc_data[2], 'rad1_val': coloc_data[3], - 'rad2_ele': coloc_data[4], - 'rad2_azi': coloc_data[5], - 'rad2_rng': coloc_data[6], - 'rad2_val': coloc_data[7]} + 'rad2_ele': coloc_data[6], + 'rad2_azi': coloc_data[7], + 'rad2_rng': coloc_data[8], + 'rad2_val': coloc_data[9]} + + new_dataset = {'intercomp_dict': intercomp_dict, + 'timeinfo': dscfg['global_data']['timeinfo'], + 'final': True} + + return new_dataset, None + + +def process_intercomp_time_avg(procstatus, dscfg, radar_list=None): + """ + intercomparison between the average reflectivity of two radars + + Parameters + ---------- + procstatus : int + Processing status: 0 initializing, 1 processing volume, + 2 post-processing + dscfg : dictionary of dictionaries + data set configuration. Accepted Configuration Keywords:: + + datatype : list of string. Dataset keyword + The input data types + coloc_data_dir : string. Dataset keyword + name of the directory containing the csv file with colocated data + coloc_radars_name : string. Dataset keyword + string identifying the radar names + azi_tol : float. Dataset keyword + azimuth tolerance between the two radars. Default 0.5 deg + ele_tol : float. Dataset keyword + elevation tolerance between the two radars. Default 0.5 deg + rng_tol : float. Dataset keyword + range tolerance between the two radars. Default 50 m + clt_max : int. Dataset keyword + maximum number of samples that can be clutter contaminated. + Default 100 i.e. all + phi_excess_max : int. Dataset keyword + maximum number of samples that can have excess instantaneous + PhiDP. Default 100 i.e. all + non_rain_max : int. Dataset keyword + maximum number of samples that can be no rain. Default 100 i.e. all + phi_avg_max : float. Dataset keyword + maximum average PhiDP allowed. Default 600 deg i.e. any + + radar_list : list of Radar objects + Optional. list of radar objects + + Returns + ------- + new_dataset : dict + dictionary containing a dictionary with intercomparison data and the + key "final" which contains a boolean that is true when all volumes + have been processed + ind_rad : int + radar index + + """ + if procstatus == 0: + savedir = dscfg['colocgatespath']+dscfg['coloc_radars_name']+'/' + + fname = make_filename( + 'info', 'COLOCATED_GATES', dscfg['coloc_radars_name'], ['csv'], + timeinfo=None) + + (rad1_ele, rad1_azi, rad1_rng, + rad2_ele, rad2_azi, rad2_rng) = read_colocated_gates(savedir+fname[0]) + + if rad1_ele is None: + raise ValueError('Unable to intercompare radars. ' + + 'Missing colocated gates file') + + dscfg['global_data'] = { + 'rad1_ele': rad1_ele, + 'rad1_azi': rad1_azi, + 'rad1_rng': rad1_rng, + 'rad2_ele': rad2_ele, + 'rad2_azi': rad2_azi, + 'rad2_rng': rad2_rng} + + return None, None + + if procstatus == 1: + # check how many radars are there + ind_radar_list = set() + for datatypedescr in dscfg['datatype']: + radarnr = datatypedescr.split(':')[0] + ind_radar_list.add(int(radarnr[5:8])-1) + + ind_radar_list = list(ind_radar_list) + + if (len(ind_radar_list) != 2) or (len(radar_list) < 2): + warn('Intercomparison requires data from two different radars') + return None, None + + radarnr_list = ['RADAR'+'{:03d}'.format(ind_radar_list[0]+1), + 'RADAR'+'{:03d}'.format(ind_radar_list[1]+1)] + + # get field names + for datatypedescr in dscfg['datatype']: + radarnr, datagroup, datatype, dataset, product = ( + get_datatype_fields(datatypedescr)) + if radarnr == radarnr_list[0]: + if ((datatype == 'dBZ') or (datatype == 'dBZc') or + (datatype == 'dBuZ') or (datatype == 'dBZv') or + (datatype == 'dBZvc') or (datatype == 'dBuZv')): + rad1_refl_field = get_fieldname_pyart(datatype) + elif (datatype == 'PhiDP') or (datatype == 'PhiDPc'): + rad1_phidp_field = get_fieldname_pyart(datatype) + elif datatype == 'time_avg_flag': + rad1_flag_field = get_fieldname_pyart(datatype) + elif radarnr == radarnr_list[1]: + if ((datatype == 'dBZ') or (datatype == 'dBZc') or + (datatype == 'dBuZ') or (datatype == 'dBZv') or + (datatype == 'dBZvc') or (datatype == 'dBuZv')): + rad2_refl_field = get_fieldname_pyart(datatype) + elif (datatype == 'PhiDP') or (datatype == 'PhiDPc'): + rad2_phidp_field = get_fieldname_pyart(datatype) + elif datatype == 'time_avg_flag': + rad2_flag_field = get_fieldname_pyart(datatype) + + radar1 = radar_list[ind_radar_list[0]] + radar2 = radar_list[ind_radar_list[1]] + + if ((rad1_refl_field not in radar1.fields) or + (rad1_phidp_field not in radar1.fields) or + (rad1_flag_field not in radar1.fields) or + (rad2_refl_field not in radar2.fields) or + (rad2_phidp_field not in radar2.fields) or + (rad2_flag_field not in radar2.fields)): + warn('Unable to compare radar time avg fields. ' + + 'Fields missing') + return None, None + + if not dscfg['initialized']: + dscfg['global_data'].update({'timeinfo': dscfg['timeinfo']}) + dscfg['initialized'] = 1 + + refl1 = radar1.fields[rad1_refl_field]['data'] + refl2 = radar2.fields[rad2_refl_field]['data'] + + phidp1 = radar1.fields[rad1_phidp_field]['data'] + phidp2 = radar2.fields[rad2_phidp_field]['data'] + + flag1 = radar1.fields[rad1_flag_field]['data'] + flag2 = radar2.fields[rad2_flag_field]['data'] + + azi_tol = 0.5 + ele_tol = 0.5 + rng_tol = 50. + + if 'azi_tol' in dscfg: + azi_tol = dscfg['azi_tol'] + if 'ele_tol' in dscfg: + ele_tol = dscfg['ele_tol'] + if 'rng_tol' in dscfg: + rng_tol = dscfg['rng_tol'] + + intercomp_dict = { + 'rad1_ele': [], + 'rad1_azi': [], + 'rad1_rng': [], + 'rad1_dBZavg': [], + 'rad1_PhiDPavg': [], + 'rad1_Flagavg': [], + 'rad2_ele': [], + 'rad2_azi': [], + 'rad2_rng': [], + 'rad2_dBZavg': [], + 'rad2_PhiDPavg': [], + 'rad2_Flagavg': []} + + # determine if radar data has to be averaged + avg_rad1, avg_rad2, avg_rad_lim = get_range_bins_to_avg( + radar1.range['data'], radar2.range['data']) + + for i in range(len(dscfg['global_data']['rad1_ele'])): + ind_ray_rad1 = find_ray_index( + radar1.elevation['data'], radar1.azimuth['data'], + dscfg['global_data']['rad1_ele'][i], + dscfg['global_data']['rad1_azi'][i], + ele_tol=ele_tol, azi_tol=azi_tol) + if ind_ray_rad1 is None: + continue + ind_rng_rad1 = find_rng_index( + radar1.range['data'], dscfg['global_data']['rad1_rng'][i], + rng_tol=rng_tol) + if ind_rng_rad1 is None: + continue + + ind_ray_rad2 = find_ray_index( + radar2.elevation['data'], radar2.azimuth['data'], + dscfg['global_data']['rad2_ele'][i], + dscfg['global_data']['rad2_azi'][i], + ele_tol=ele_tol, azi_tol=azi_tol) + if ind_ray_rad2 is None: + continue + ind_rng_rad2 = find_rng_index( + radar2.range['data'], dscfg['global_data']['rad2_rng'][i], + rng_tol=rng_tol) + if ind_rng_rad2 is None: + continue + + refl1_val = np.ma.asarray(refl1[ind_ray_rad1[0], ind_rng_rad1[0]]) + refl2_val = np.ma.asarray(refl2[ind_ray_rad2[0], ind_rng_rad2[0]]) + + phidp1_val = np.ma.asarray( + phidp1[ind_ray_rad1[0], ind_rng_rad1[0]]) + phidp2_val = np.ma.asarray( + phidp2[ind_ray_rad2[0], ind_rng_rad2[0]]) + + flag1_val = flag1[ind_ray_rad1[0], ind_rng_rad1[0]] + flag2_val = flag2[ind_ray_rad2[0], ind_rng_rad2[0]] + + if avg_rad1: + if (ind_rng_rad1[0]+avg_rad_lim[1] >= radar1.ngates or + ind_rng_rad1[0]+avg_rad_lim[0] < 0): + continue + ind_rng = list(range( + ind_rng_rad1[0]+avg_rad_lim[0], + ind_rng_rad1[0]+avg_rad_lim[1]+1)) + refl1_val = np.ma.asarray(np.ma.mean( + refl1[ind_ray_rad1[0], ind_rng])) + phidp1_val = np.ma.asarray(np.ma.mean( + phidp1[ind_ray_rad1[0], ind_rng])) + + rad1_flag = flag1[ind_ray_rad1[0], ind_rng_rad1[0]] + + rad1_excess_phi = rad1_flag % 100 + rad1_clt = ((rad1_flag-rad1_excess_phi) % 10000) / 100 + rad1_prec = ( + ((rad1_flag-rad1_clt*100-rad1_excess_phi) % 1000000) / + 10000) + + flag1_val = int( + 10000*np.max(rad1_prec)+100*np.max(rad1_clt) + + np.max(rad1_excess_phi)) + + elif avg_rad2: + if (ind_rng_rad2[0]+avg_rad_lim[1] >= radar2.ngates or + ind_rng_rad2[0]+avg_rad_lim[0] < 0): + continue + ind_rng = list(range( + ind_rng_rad2[0]+avg_rad_lim[0], + ind_rng_rad2[0]+avg_rad_lim[1]+1)) + refl2_val = np.ma.asarray(np.ma.mean( + refl2[ind_ray_rad2[0], ind_rng])) + phidp2_val = np.ma.asarray(np.ma.mean( + phidp2[ind_ray_rad1[0], ind_rng])) + + rad2_flag = flag2[ind_ray_rad1[0], ind_rng_rad1[0]] + + rad2_excess_phi = rad2_flag % 100 + rad2_clt = ((rad2_flag-rad2_excess_phi) % 10000) / 100 + rad2_prec = ( + ((rad2_flag-rad2_clt*100-rad2_excess_phi) % 1000000) / + 10000) + + flag2_val = int( + 10000*np.max(rad2_prec)+100*np.max(rad2_clt) + + np.max(rad2_excess_phi)) + + if (refl1_val.mask or refl2_val.mask or phidp1_val.mask or + phidp2_val.mask): + continue + + intercomp_dict['rad1_ele'].append( + radar1.elevation['data'][ind_ray_rad1[0]]) + intercomp_dict['rad1_azi'].append( + radar1.azimuth['data'][ind_ray_rad1[0]]) + intercomp_dict['rad1_rng'].append( + radar1.range['data'][ind_rng_rad1[0]]) + intercomp_dict['rad1_dBZavg'].append(refl1_val) + intercomp_dict['rad1_PhiDPavg'].append(phidp1_val) + intercomp_dict['rad1_Flagavg'].append(flag1_val) + + intercomp_dict['rad2_ele'].append( + radar2.elevation['data'][ind_ray_rad2[0]]) + intercomp_dict['rad2_azi'].append( + radar2.azimuth['data'][ind_ray_rad2[0]]) + intercomp_dict['rad2_rng'].append( + radar2.range['data'][ind_rng_rad2[0]]) + intercomp_dict['rad2_dBZavg'].append(refl2_val) + intercomp_dict['rad2_PhiDPavg'].append(phidp2_val) + intercomp_dict['rad2_Flagavg'].append(flag2_val) + + new_dataset = {'intercomp_dict': intercomp_dict, + 'final': False} + return new_dataset, None + + if procstatus == 2: + savedir = get_save_dir( + dscfg['basepath'], dscfg['procname'], dscfg['dsname'], + dscfg['coloc_data_dir'], + timeinfo=dscfg['global_data']['timeinfo'], create_dir=False) + + fname = make_filename( + 'colocated_data', dscfg['type'], 'dBZc', ['csv'], + timeinfo=dscfg['global_data']['timeinfo'], timeformat='%Y%m%d') + + fname = savedir+fname[0] + + (rad1_ele, rad1_azi, rad1_rng, rad1_dBZ, rad1_phi, rad1_flag, + rad2_ele, rad2_azi, rad2_rng, rad2_dBZ, rad2_phi, rad2_flag) = ( + read_colocated_data_time_avg(fname)) + + rad1_excess_phi = (rad1_flag % 100).astype(int) + rad2_excess_phi = (rad2_flag % 100).astype(int) + + rad1_clt = (((rad1_flag-rad1_excess_phi) % 10000) / 100).astype(int) + rad2_clt = (((rad2_flag-rad2_excess_phi) % 10000) / 100).astype(int) + + rad1_non_rain = ( + ((rad1_flag-rad1_clt*100-rad1_excess_phi) % 1000000) / + 10000).astype(int) + rad2_non_rain = ( + ((rad2_flag-rad2_clt*100-rad2_excess_phi) % 1000000) / + 10000).astype(int) + + clt_max = 100 + phi_excess_max = 100 + non_rain_max = 100 + phi_avg_max = 600. + + if 'clt_max' in dscfg: + clt_max = dscfg['clt_max'] + if 'phi_excess_max' in dscfg: + phi_excess_max = dscfg['phi_excess_max'] + if 'non_rain_max' in dscfg: + non_rain_max = dscfg['non_rain_max'] + if 'phi_avg_max' in dscfg: + phi_avg_max = dscfg['phi_avg_max'] + + # filter out invalid data + ind_val = np.where( + np.logical_and.reduce(( + rad1_clt <= clt_max, rad2_clt <= clt_max, + rad1_excess_phi <= phi_excess_max, + rad2_excess_phi <= phi_excess_max, + rad1_non_rain <= non_rain_max, rad2_non_rain <= non_rain_max, + rad1_phi <= phi_avg_max, rad2_phi <= phi_avg_max)))[0] + + intercomp_dict = { + 'rad1_ele': rad1_ele[ind_val], + 'rad1_azi': rad1_azi[ind_val], + 'rad1_rng': rad1_rng[ind_val], + 'rad1_val': rad1_dBZ[ind_val], + 'rad2_ele': rad2_ele[ind_val], + 'rad2_azi': rad2_azi[ind_val], + 'rad2_rng': rad2_rng[ind_val], + 'rad2_val': rad2_dBZ[ind_val]} new_dataset = {'intercomp_dict': intercomp_dict, 'timeinfo': dscfg['global_data']['timeinfo'], diff --git a/src/pyrad_proc/pyrad/prod/process_product.py b/src/pyrad_proc/pyrad/prod/process_product.py index da83a3b..eeda756 100755 --- a/src/pyrad_proc/pyrad/prod/process_product.py +++ b/src/pyrad_proc/pyrad/prod/process_product.py @@ -34,6 +34,7 @@ from ..io.write_data import write_ts_polar_data, write_monitoring_ts from ..io.write_data import write_sun_hits, write_sun_retrieval from ..io.write_data import write_colocated_gates, write_colocated_data +from ..io.write_data import write_colocated_data_time_avg from ..graph.plots import plot_ppi, plot_rhi, plot_cappi, plot_bscope from ..graph.plots import plot_timeseries, plot_timeseries_comp @@ -285,6 +286,26 @@ def generate_intercomp_products(dataset, prdcfg): print('saved colocated data file: '+fname) + return fname + if prdcfg['type'] == 'WRITE_INTERCOMP_TIME_AVG': + if dataset['final']: + return None + + savedir = get_save_dir( + prdcfg['basepath'], prdcfg['procname'], prdcfg['dsname'], + prdcfg['prdname'], timeinfo=prdcfg['timeinfo']) + + fname = make_filename( + 'colocated_data', prdcfg['dstype'], prdcfg['voltype'], + ['csv'], timeinfo=prdcfg['timeinfo'], + timeformat='%Y%m%d') + + fname = savedir+fname[0] + + write_colocated_data_time_avg(dataset['intercomp_dict'], fname) + + print('saved colocated time averaged data file: '+fname) + return fname elif prdcfg['type'] == 'PLOT_SCATTER_INTERCOMP': if not dataset['final']: @@ -311,16 +332,23 @@ def generate_intercomp_products(dataset, prdcfg): np.ma.asarray(dataset['intercomp_dict']['rad1_val']), np.ma.asarray(dataset['intercomp_dict']['rad2_val']), field_name, field_name, step1=step, step2=step) + if hist_2d is None: + return None metadata = ( 'npoints: '+str(stats['npoints'])+'\n' + 'mode bias: '+str(stats['modebias'])+'\n' + 'median bias: '+str(stats['medianbias'])+'\n' + 'mean bias: '+str(stats['meanbias'])+'\n' + - 'corr: '+str(stats['npoints'])+'\n') + 'corr: '+str(stats['corr'])+'\n' + + 'slope: '+str(stats['slope'])+'\n' + + 'intercep: '+str(stats['intercep'])+'\n' + + 'intercep slope 1: '+str(stats['intercep_slope_1'])+'\n') plot_scatter(bins1, bins2, np.ma.asarray(hist_2d), field_name, - field_name, fname, prdcfg, metadata=metadata) + field_name, fname, prdcfg, metadata=metadata, + lin_regr=[stats['slope'], stats['intercep']], + lin_regr_slope1=stats['intercep_slope_1']) print('saved figures: '+' '.join(fname)) diff --git a/src/pyrad_proc/pyrad/util/radar_utils.py b/src/pyrad_proc/pyrad/util/radar_utils.py index 91729cc..9bc9985 100755 --- a/src/pyrad_proc/pyrad/util/radar_utils.py +++ b/src/pyrad_proc/pyrad/util/radar_utils.py @@ -29,6 +29,7 @@ import datetime import numpy as np +import scipy import pyart @@ -493,6 +494,10 @@ def compute_2d_stats(field1, field2, field_name1, field_name2, step1=None, values at each bin """ + if len(field1) == 0 or len(field2) == 0: + warn('Unable to compute 2D histogram. Empty field') + return None, None, None, None + hist_2d, bins1, bins2 = compute_2d_hist( field1, field2, field_name1, field_name2, step1=step1, step2=step2) npoints = len(field1) @@ -502,12 +507,19 @@ def compute_2d_stats(field1, field2, field_name1, field_name2, step1=None, medianbias = np.ma.median(field2-field1) ind_max_val1, ind_max_val2 = np.where(hist_2d == np.ma.amax(hist_2d)) modebias = bins2[ind_max_val2[0]]-bins1[ind_max_val1[0]] + slope, intercep, corr, pval, stderr = scipy.stats.linregress( + field1, y=field2) + intercep_slope_1 = np.ma.mean(field2-field1) stats = { 'npoints': npoints, 'meanbias': meanbias, 'medianbias': medianbias, - 'modebias': modebias + 'modebias': modebias, + 'corr': corr, + 'slope': slope, + 'intercep': intercep, + 'intercep_slope_1': intercep_slope_1 } return hist_2d, bins1, bins2, stats