From 120d072e1ac162bef93a8131980c042c145b129c Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Wed, 12 Oct 2016 17:26:35 -0600 Subject: [PATCH 1/6] update for ocn 62 level support and hi-res testing --- averager/pp_tests/control_ocn_series.py | 88 +++++++++++++++++++ averager/pp_tests/runAvg_ocn_mpi.sh | 38 ++++++++ .../pyAverager/pyaverager/climAverager.py | 4 + 3 files changed, 130 insertions(+) create mode 100755 averager/pp_tests/control_ocn_series.py create mode 100755 averager/pp_tests/runAvg_ocn_mpi.sh diff --git a/averager/pp_tests/control_ocn_series.py b/averager/pp_tests/control_ocn_series.py new file mode 100755 index 00000000..030fd706 --- /dev/null +++ b/averager/pp_tests/control_ocn_series.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python + +from __future__ import print_function +import sys + +# check the system python version and require 2.7.x or greater +if sys.hexversion < 0x02070000: + print(70 * '*') + print('ERROR: {0} requires python >= 2.7.x. '.format(sys.argv[0])) + print('It appears that you are running python {0}'.format( + '.'.join(str(x) for x in sys.version_info[0:3]))) + print(70 * '*') + sys.exit(1) + +import os + +# +# check the POSTPROCESS_PATH which must be set +# +try: + os.environ["POSTPROCESS_PATH"] +except KeyError: + err_msg = ('create_postprocess ERROR: please set the POSTPROCESS_PATH environment variable.' \ + ' For example on yellowstone: setenv POSTPROCESS_PATH /glade/p/cesm/postprocessing') + raise OSError(err_msg) + +cesm_pp_path = os.environ["POSTPROCESS_PATH"] + +# +# activate the virtual environment that was created by create_python_env +# +if not os.path.isfile('{0}/cesm-env2/bin/activate_this.py'.format(cesm_pp_path)): + err_msg = ('create_postprocess ERROR: the virtual environment cesm-env2 does not exist.' \ + ' Please run $POSTPROCESS_PATH/create_python_env -machine [machine name]') + raise OSError(err_msg) + +execfile('{0}/cesm-env2/bin/activate_this.py'.format(cesm_pp_path), dict(__file__='{0}/cesm-env2/bin/activate_this.py'.format(cesm_pp_path))) + +from pyaverager import PyAverager, specification + +#### User modify #### + +in_dir='/glade/scratch/aliceb/BRCP85C5CN_ne120_t12_pop62.c13b17.asdphys.001/ocn/proc/tseries/monthly' +out_dir= '/glade/scratch/aliceb/BRCP85C5CN_ne120_t12_pop62.c13b17.asdphys.001/ocn/proc/tavg.2041.2050' +pref= 'BRCP85C5CN_ne120_t12_pop62.c13b17.asdphys.001.pop.h' +htype= 'series' +average = ['tavg:2041:2050','mavg:2041:2050'] +wght= False +ncfrmt = 'netcdfLarge' +serial=False + +##var_list = ['TEMP', 'SALT'] +var_list = ['TEMP','SALT','PD','UVEL','VVEL','WVEL','IAGE','TAUX','TAUY','SSH','HMXL','HBLT','SFWF','PREC_F','MELT_F','MELTH_F','SHF','SHF_QSW','SENH_F','QFLUX','SNOW_F','SALT_F','EVAP_F','ROFF_F','LWUP_F','LWDN_F'] +mean_diff_rms_obs_dir = '/glade/p/cesm/omwg/timeseries_obs/' +region_nc_var = 'REGION_MASK' +regions={1:'Sou',2:'Pac',3:'Ind',6:'Atl',8:'Lab',9:'Gin',10:'Arc',11:'Hud',0:'Glo'} +region_wgt_var = 'TAREA' +obs_dir = '/glade/p/cesm/' +obs_file = 'obs.nc' +reg_obs_file_suffix = '_hor_mean_obs.nc' + +clobber = False +suffix = 'nc' +date_pattern= 'yyyymm-yyyymm' + +#### End user modify #### + +pyAveSpecifier = specification.create_specifier(in_directory=in_dir, + out_directory=out_dir, + prefix=pref, + suffix=suffix, + date_pattern=date_pattern, + hist_type=htype, + avg_list=average, + weighted=wght, + ncformat=ncfrmt, + varlist=var_list, + serial=serial, + clobber=clobber, + mean_diff_rms_obs_dir=mean_diff_rms_obs_dir, + region_nc_var=region_nc_var, + regions=regions, + region_wgt_var=region_wgt_var, + obs_dir=obs_dir, + obs_file=obs_file, + reg_obs_file_suffix=reg_obs_file_suffix) +PyAverager.run_pyAverager(pyAveSpecifier) + diff --git a/averager/pp_tests/runAvg_ocn_mpi.sh b/averager/pp_tests/runAvg_ocn_mpi.sh new file mode 100755 index 00000000..4b99de3c --- /dev/null +++ b/averager/pp_tests/runAvg_ocn_mpi.sh @@ -0,0 +1,38 @@ +#! /usr/bin/env bash + +#BSUB -n 6 +#BSUB -q geyser +#BSUB -N +#BSUB -W 06:00 +#BSUB -R "span[ptile=1]" +#BSUB -P P93300606 +#BSUB -o pyAve.%J.out # output file name in which %J is replaced by the job ID +#BSUB -e pyAve.%J.err # error file name in which %J is replaced by the job ID + +. /glade/apps/opt/lmod/lmod/init/bash + +module restore system +module load python/2.7.7 + +cd /glade/p/work/aliceb/sandboxes/dev/postprocessing/cesm-env2/bin +pwd +. activate + +module load python/2.7.7 +module load numpy/1.8.1 +module load scipy/0.15.1 +module load mpi4py/2.0.0 +module load pynio/1.4.1 +module load matplotlib/1.4.3 +module load intel/12.1.5 +module load netcdf/4.3.0 +module load nco/4.4.4 +module use /glade/apps/contrib/ncl-nightly/modules +module load ncltest-intel + +export POSTPROCESS_PATH=/glade/p/work/aliceb/sandboxes/dev/postprocessing + +mpirun.lsf /glade/p/work/aliceb/sandboxes/dev/postprocessing/averager/pp_tests/control_ocn_series.py + +deactivate + diff --git a/averager/pyAverager/pyaverager/climAverager.py b/averager/pyAverager/pyaverager/climAverager.py index 652dfe60..92d28e8b 100644 --- a/averager/pyAverager/pyaverager/climAverager.py +++ b/averager/pyAverager/pyaverager/climAverager.py @@ -358,6 +358,8 @@ def weighted_hor_avg_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year,hist weights = MA.expand_dims(slev_weights, axis=0) if var_val.ndim > 2: for lev in range(1,60): +## for 62 level ocn testing +## for lev in range(1,62): new_region_mask = MA.expand_dims(slev_mask, axis=0) region_mask = np.vstack((region_mask,new_region_mask)) new_weights = MA.expand_dims(slev_weights, axis=0) @@ -449,6 +451,8 @@ def weighted_rms_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year,hist_dic region_mask = MA.expand_dims(slev_mask, axis=0) weights = MA.expand_dims(slev_weights, axis=0) for lev in range(1,60): +## for 62 level ocn testing +## for lev in range(1,62): new_region_mask = MA.expand_dims(slev_mask, axis=0) region_mask = np.vstack((region_mask,new_region_mask)) new_weights = MA.expand_dims(slev_weights, axis=0) From 56cf23efd87d6da88affa51f8acabbfd7e303560 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Mon, 17 Oct 2016 11:41:01 -0600 Subject: [PATCH 2/6] bug fixes to ocn diag exception handling messages --- .../ocn/Plots/zonal_average_3d_fields.py | 8 ++++---- .../diagnostics/ocn/model_vs_control.py | 18 +++++++++--------- diagnostics/diagnostics/ocn/model_vs_obs.py | 10 +++++----- .../diagnostics/ocn/model_vs_obs_ecosys.py | 2 +- 4 files changed, 19 insertions(+), 19 deletions(-) diff --git a/diagnostics/diagnostics/ocn/Plots/zonal_average_3d_fields.py b/diagnostics/diagnostics/ocn/Plots/zonal_average_3d_fields.py index 883538e3..afab8cfe 100644 --- a/diagnostics/diagnostics/ocn/Plots/zonal_average_3d_fields.py +++ b/diagnostics/diagnostics/ocn/Plots/zonal_average_3d_fields.py @@ -75,7 +75,7 @@ def check_prerequisites(self, env): # call ncks to extract the UAREA variable try: subprocess.check_output( ['ncks','-A','-v','UAREA',env['TAVGFILE'],'{0}_tmp'.format(env['TOBSFILE']) ], env=env) - except CalledProcessError as e: + except subprocess.CalledProcessError as e: print('ERROR: {0} call to ncks failed with error:'.format(self.name())) print(' {0} - {1}'.format(e.cmd, e.output)) sys.exit(1) @@ -88,7 +88,7 @@ def check_prerequisites(self, env): try: subprocess.check_output( [zaCommand,'-O','-time_const','-grid_file',env['GRIDFILE'],'{0}_tmp'.format(env['TOBSFILE']) ], env=env) - except CalledProcessError as e: + except subprocess.CalledProcessError as e: print('ERROR: {0} call to {1} failed with error:'.format(self.name(), zaCommand)) print(' {0} - {1}'.format(e.cmd, e.output)) sys.exit(1) @@ -118,7 +118,7 @@ def check_prerequisites(self, env): # call ncks to extract the UAREA variable try: subprocess.check_output( ['ncks','-A','-v','UAREA',env['TAVGFILE'],'{0}_tmp'.format(env['SOBSFILE']) ], env=env) - except CalledProcessError as e: + except subprocess.CalledProcessError as e: print('ERROR: {0} call to ncks failed with error:'.format(self.name())) print(' {0} - {1}'.format(e.cmd, e.output)) sys.exit(1) @@ -131,7 +131,7 @@ def check_prerequisites(self, env): try: subprocess.check_output( [zaCommand,'-O','-time_const','-grid_file',env['GRIDFILE'],'{0}_tmp'.format(env['SOBSFILE']) ], env=env) - except CalledProcessError as e: + except subprocess.CalledProcessError as e: print('ERROR: {0} call to {1} failed with error:'.format(self.name(), zaCommand)) print(' {0} - {1}'.format(e.cmd, e.output)) sys.exit(1) diff --git a/diagnostics/diagnostics/ocn/model_vs_control.py b/diagnostics/diagnostics/ocn/model_vs_control.py index 7c80ab47..63ee233c 100755 --- a/diagnostics/diagnostics/ocn/model_vs_control.py +++ b/diagnostics/diagnostics/ocn/model_vs_control.py @@ -69,14 +69,14 @@ def check_prerequisites(self, env): diagUtilsLib.create_plot_dat(env['WORKDIR'], env['XYRANGE'], env['DEPTHS']) # setup prerequisites for the model - # setup the gridfile based on the resolution -## os.environ['gridfile'] = '{0}/tool_lib/zon_avg/grids/{1}_grid_info.nc'.format(env['DIAGROOTPATH'],env['RESOLUTION']) + # setup the gridfile based on the resolution and levels os.environ['gridfile'] = '{0}/omwg/za_grids/{1}_grid_info.nc'.format(env['DIAGOBSROOT'],env['RESOLUTION']) if env['VERTICAL'] == '42': -## os.environ['gridfile'] = '{0}/tool_lib/zon_avg/grids/{1}_42lev_grid_info.nc'.format(env['DIAGROOTPATH'],env['RESOLUTION']) - ## this file doesn't exist! - not sure if this even works now or not os.environ['gridfile'] = '{0}/omwg/za_grids/{1}_42lev_grid_info.nc'.format(env['DIAGOBSROOT'],env['RESOLUTION']) + if env['VERTICAL'] == '62': + os.environ['gridfile'] = '{0}/omwg/za_grids/{1}_62lev_grid_info.nc'.format(env['DIAGOBSROOT'],env['RESOLUTION']) + # check if gridfile exists and is readable rc, err_msg = cesmEnvLib.checkFile(os.environ['gridfile'], 'read') if not rc: @@ -97,14 +97,14 @@ def check_prerequisites(self, env): env['CNTRL_MAVGFILE'], env['CNTRL_TAVGFILE'] = diagUtilsLib.createLinks(env['CNTRLYEAR0'], env['CNTRLYEAR1'], env['CNTRLTAVGDIR'], env['WORKDIR'], env['CNTRLCASE'], control) env['CNTRLFILE'] = env['CNTRL_TAVGFILE'] - # setup the gridfile based on the resolution -## os.environ['gridfilecntrl'] = '{0}/tool_lib/zon_avg/grids/{1}_grid_info.nc'.format(env['DIAGROOTPATH'],env['CNTRLRESOLUTION']) + # setup the gridfile based on the resolution and vertical levels os.environ['gridfilecntrl'] = '{0}/omwg/za_grids/{1}_grid_info.nc'.format(env['DIAGOBSROOT'],env['CNTRLRESOLUTION']) if env['VERTICAL'] == '42': -## os.environ['gridfilecntrl'] = '{0}/tool_lib/zon_avg/grids/{1}_42lev_grid_info.nc'.format(env['DIAGROOTPATH'],env['CNTRLRESOLUTION']) - ## this file doesn't exist! - not sure if this even works now or not os.environ['gridfilecntrl'] = '{0}/omwg/za_grids/{1}_42lev_grid_info.nc'.format(env['DIAGOBSROOT'],env['CNTRLRESOLUTION']) + if env['VERTICAL'] == '62': + os.environ['gridfilecntrl'] = '{0}/omwg/za_grids/{1}_62lev_grid_info.nc'.format(env['DIAGOBSROOT'],env['CNTRLRESOLUTION']) + # check if gridfile exists and is readable rc, err_msg = cesmEnvLib.checkFile(os.environ['gridfilecntrl'], 'read') if not rc: @@ -195,7 +195,7 @@ def run_diagnostics(self, env, scomm): except RuntimeError as e: # unrecoverable error, bail! - print("Skipped '{0}' and continuing!".format(request_plot)) + print("Skipped '{0}' and continuing!".format(requested_plot)) print(e) scomm.sync() diff --git a/diagnostics/diagnostics/ocn/model_vs_obs.py b/diagnostics/diagnostics/ocn/model_vs_obs.py index 3ac86057..cf844b73 100644 --- a/diagnostics/diagnostics/ocn/model_vs_obs.py +++ b/diagnostics/diagnostics/ocn/model_vs_obs.py @@ -67,14 +67,14 @@ def check_prerequisites(self, env): # create the plot.dat file in the workdir used by all NCL plotting routines diagUtilsLib.create_plot_dat(env['WORKDIR'], env['XYRANGE'], env['DEPTHS']) - # setup the gridfile based on the resolution -## os.environ['gridfile'] = '{0}/tool_lib/zon_avg/grids/{1}_grid_info.nc'.format(env['DIAGROOTPATH'],env['RESOLUTION']) + # setup the gridfile based on the resolution and vertical levels os.environ['gridfile'] = '{0}/omwg/za_grids/{1}_grid_info.nc'.format(env['DIAGOBSROOT'],env['RESOLUTION']) if env['VERTICAL'] == '42': -## os.environ['gridfile'] = '{0}/tool_lib/zon_avg/grids/{1}_42lev_grid_info.nc'.format(env['DIAGROOTPATH'],env['RESOLUTION']) - ## this file doesn't exist! - not sure if this even works now or not os.environ['gridfile'] = '{0}/omwg/za_grids/{1}_42lev_grid_info.nc'.format(env['DIAGOBSROOT'],env['RESOLUTION']) + if env['VERTICAL'] == '62': + os.environ['gridfile'] = '{0}/omwg/za_grids/{1}_62lev_grid_info.nc'.format(env['DIAGOBSROOT'],env['RESOLUTION']) + # check if gridfile exists and is readable rc, err_msg = cesmEnvLib.checkFile(os.environ['gridfile'], 'read') if not rc: @@ -164,7 +164,7 @@ def run_diagnostics(self, env, scomm): except RuntimeError as e: print(e) - print("model vs. obs - Skipped '{0}' and continuing!".format(request_plot)) + print("model vs. obs - Skipped '{0}' and continuing!".format(requested_plot)) scomm.sync() diff --git a/diagnostics/diagnostics/ocn/model_vs_obs_ecosys.py b/diagnostics/diagnostics/ocn/model_vs_obs_ecosys.py index 51f93730..eff8c03c 100755 --- a/diagnostics/diagnostics/ocn/model_vs_obs_ecosys.py +++ b/diagnostics/diagnostics/ocn/model_vs_obs_ecosys.py @@ -179,7 +179,7 @@ def run_diagnostics(self, env, scomm): except RecoverableError as e: # catch all recoverable errors, print a message and continue. print(e) - print("model vs. obs ecosys - Skipped '{0}' and continuing!".format(request_plot)) + print("model vs. obs ecosys - Skipped '{0}' and continuing!".format(requested_plot)) except RuntimeError as e: # unrecoverable error, bail! print(e) From 3ad96d7d2df0c1ba7b6fc2924b66eb5bfa6cde2d Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Mon, 7 Nov 2016 16:49:55 -0700 Subject: [PATCH 3/6] updates for ocn hi-res diags, add vertical_levels to pyAverager, and bugfix to atm and lnd CLEANUP_FILES option in model_vs_model and model_vs_obs python class files --- Machines/machine_postprocess.xml | 1 + Machines/yellowstone_modules | 1 + Templates/postprocess.tmpl | 4 +- averager/pp_tests/control_ocn_series.py | 89 ++++++++++++ averager/pp_tests/runAvg_ocn_mpi.sh | 38 +++++ averager/pyAverager/pyaverager/PyAverager.py | 4 +- .../pyAverager/pyaverager/climAverager.py | 21 ++- .../pyAverager/pyaverager/specification.py | 8 +- cesm_utils/cesm_utils/create_postprocess | 12 +- diagnostics/diagnostics/atm/model_vs_model.py | 2 +- diagnostics/diagnostics/atm/model_vs_obs.py | 2 +- diagnostics/diagnostics/lnd/model_vs_model.py | 6 +- diagnostics/diagnostics/lnd/model_vs_obs.py | 4 +- .../ocn/Config/config_diags_ocn.xml | 16 +- .../ocn/Plots/zonal_average_3d_fields.py | 12 +- .../diagnostics/ocn/ocn_avg_generator.py | 34 +++-- ocn_diag/ncl_lib/compute_rho.ncl | 9 +- ocn_diag/ncl_lib/dwbc.ncl | 121 ++++++++++------ ocn_diag/ncl_lib/mld.ncl | 53 ++++--- ocn_diag/ncl_lib/mld_diff.ncl | 4 + ocn_diag/ncl_lib/sfcflx_za.ncl | 137 +++++++++++------- .../timeseries/cesm_tseries_generator.py | 5 +- 22 files changed, 433 insertions(+), 150 deletions(-) create mode 100755 averager/pp_tests/control_ocn_series.py create mode 100755 averager/pp_tests/runAvg_ocn_mpi.sh diff --git a/Machines/machine_postprocess.xml b/Machines/machine_postprocess.xml index 2a280193..46f5eee8 100644 --- a/Machines/machine_postprocess.xml +++ b/Machines/machine_postprocess.xml @@ -28,6 +28,7 @@ module load intel/12.1.5 module load netcdf/4.3.0 module load nco/4.4.4 + module load netcdf4python/1.1.1 module use /glade/apps/contrib/ncl-nightly/modules module load ncltest-intel diff --git a/Machines/yellowstone_modules b/Machines/yellowstone_modules index d943e098..75218ac1 100755 --- a/Machines/yellowstone_modules +++ b/Machines/yellowstone_modules @@ -15,6 +15,7 @@ module load intel/12.1.5 module load netcdf/4.3.0 module load nco/4.4.4 module load ncl/6.3.0 +module load netcdf4python/1.1.1 # prepend the virtualenv into the PATH PATH=/glade/apps/contrib/virtualenv/12.0.7:${PATH} diff --git a/Templates/postprocess.tmpl b/Templates/postprocess.tmpl index f184c938..e2bf4e9b 100644 --- a/Templates/postprocess.tmpl +++ b/Templates/postprocess.tmpl @@ -37,9 +37,9 @@ echo "Start {{ processName }} generation $(date)" echo "******************************************" {% if standalone %} -{{ mpirun|replace("{{ pes }}",pes) }} ./{{ postProcessCmd }} {{ debug }} {{ backtrace }} --caseroot {{ caseRoot }} --standalone >> {{ caseRoot }}/logs/{{ processName }}.log 2>&1 +{{ mpirun|replace("{{ pes }}",pes) }} ./{{ postProcessCmd }} {{ completechunk }} {{ debug }} {{ backtrace }} --caseroot {{ caseRoot }} --standalone >> {{ caseRoot }}/logs/{{ processName }}.log 2>&1 {% else %} -{{ mpirun|replace("{{ pes }}",pes) }} ./{{ postProcessCmd }} {{ debug }} {{ backtrace }} --caseroot {{ caseRoot }} >> {{ caseRoot }}/logs/{{ processName }}.log 2>&1 +{{ mpirun|replace("{{ pes }}",pes) }} ./{{ postProcessCmd }} {{ completechunk }} {{ debug }} {{ backtrace }} --caseroot {{ caseRoot }} >> {{ caseRoot }}/logs/{{ processName }}.log 2>&1 {% endif %} echo "******************************************" diff --git a/averager/pp_tests/control_ocn_series.py b/averager/pp_tests/control_ocn_series.py new file mode 100755 index 00000000..de03de20 --- /dev/null +++ b/averager/pp_tests/control_ocn_series.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python + +from __future__ import print_function +import sys + +# check the system python version and require 2.7.x or greater +if sys.hexversion < 0x02070000: + print(70 * '*') + print('ERROR: {0} requires python >= 2.7.x. '.format(sys.argv[0])) + print('It appears that you are running python {0}'.format( + '.'.join(str(x) for x in sys.version_info[0:3]))) + print(70 * '*') + sys.exit(1) + +import os + +# +# check the POSTPROCESS_PATH which must be set +# +try: + os.environ["POSTPROCESS_PATH"] +except KeyError: + err_msg = ('create_postprocess ERROR: please set the POSTPROCESS_PATH environment variable.' \ + ' For example on yellowstone: setenv POSTPROCESS_PATH /glade/p/cesm/postprocessing') + raise OSError(err_msg) + +cesm_pp_path = os.environ["POSTPROCESS_PATH"] + +# +# activate the virtual environment that was created by create_python_env +# +if not os.path.isfile('{0}/cesm-env2/bin/activate_this.py'.format(cesm_pp_path)): + err_msg = ('create_postprocess ERROR: the virtual environment cesm-env2 does not exist.' \ + ' Please run $POSTPROCESS_PATH/create_python_env -machine [machine name]') + raise OSError(err_msg) + +execfile('{0}/cesm-env2/bin/activate_this.py'.format(cesm_pp_path), dict(__file__='{0}/cesm-env2/bin/activate_this.py'.format(cesm_pp_path))) + +from pyaverager import PyAverager, specification + +#### User modify #### + +in_dir='/glade/scratch/aliceb/BRCP85C5CN_ne120_t12_pop62.c13b17.asdphys.001/ocn/proc/tseries/monthly' +out_dir= '/glade/scratch/aliceb/BRCP85C5CN_ne120_t12_pop62.c13b17.asdphys.001/ocn/proc/tavg.2006.2006' +pref= 'BRCP85C5CN_ne120_t12_pop62.c13b17.asdphys.001.pop.h' +htype= 'series' +average = ['hor.meanConcat:2006:2006'] +wght= False +ncfrmt = 'netcdfLarge' +serial=False + +var_list = ['TEMP', 'SALT'] +mean_diff_rms_obs_dir = '/glade/p/cesm/omwg/timeseries_obs_tx0.1v2_62lev/' +region_nc_var = 'REGION_MASK' +regions={1:'Sou',2:'Pac',3:'Ind',6:'Atl',8:'Lab',9:'Gin',10:'Arc',11:'Hud',0:'Glo'} +region_wgt_var = 'TAREA' +obs_dir = '/glade/p/cesm/omwg/timeseries_obs_tx0.1v2_62lev/' +obs_file = 'obs.nc' +reg_obs_file_suffix = '_hor_mean_obs.nc' +vertical_levels = 62 + +clobber = False +suffix = 'nc' +date_pattern= 'yyyymm-yyyymm' + +#### End user modify #### + +pyAveSpecifier = specification.create_specifier(in_directory=in_dir, + out_directory=out_dir, + prefix=pref, + suffix=suffix, + date_pattern=date_pattern, + hist_type=htype, + avg_list=average, + weighted=wght, + ncformat=ncfrmt, + varlist=var_list, + serial=serial, + clobber=clobber, + mean_diff_rms_obs_dir=mean_diff_rms_obs_dir, + region_nc_var=region_nc_var, + regions=regions, + region_wgt_var=region_wgt_var, + obs_dir=obs_dir, + obs_file=obs_file, + reg_obs_file_suffix=reg_obs_file_suffix, + vertical_levels=vertical_levels) +PyAverager.run_pyAverager(pyAveSpecifier) + diff --git a/averager/pp_tests/runAvg_ocn_mpi.sh b/averager/pp_tests/runAvg_ocn_mpi.sh new file mode 100755 index 00000000..85fd17f1 --- /dev/null +++ b/averager/pp_tests/runAvg_ocn_mpi.sh @@ -0,0 +1,38 @@ +#! /usr/bin/env bash + +#BSUB -n 2 +#BSUB -q geyser +#BSUB -N +#BSUB -W 06:00 +#BSUB -R "span[ptile=1]" +#BSUB -P P93300606 +#BSUB -o pyAve.%J.out # output file name in which %J is replaced by the job ID +#BSUB -e pyAve.%J.err # error file name in which %J is replaced by the job ID + +. /glade/apps/opt/lmod/lmod/init/bash + +module restore system +module load python/2.7.7 + +cd /glade/p/work/aliceb/sandboxes/dev/postprocessing/cesm-env2/bin +pwd +. activate + +module load python/2.7.7 +module load numpy/1.8.1 +module load scipy/0.15.1 +module load mpi4py/2.0.0 +module load pynio/1.4.1 +module load matplotlib/1.4.3 +module load intel/12.1.5 +module load netcdf/4.3.0 +module load nco/4.4.4 +module use /glade/apps/contrib/ncl-nightly/modules +module load ncltest-intel + +export POSTPROCESS_PATH=/glade/p/work/aliceb/sandboxes/dev/postprocessing + +mpirun.lsf /glade/p/work/aliceb/sandboxes/dev/postprocessing/averager/pp_tests/control_ocn_series.py + +deactivate + diff --git a/averager/pyAverager/pyaverager/PyAverager.py b/averager/pyAverager/pyaverager/PyAverager.py index ff4da025..104710b5 100755 --- a/averager/pyAverager/pyaverager/PyAverager.py +++ b/averager/pyAverager/pyaverager/PyAverager.py @@ -256,6 +256,8 @@ def compute_averages(self,spec): region_name = spec.regions[int(region_num)] # Remove the region number as part of the average name ave_descr[0] = ave_name_split[0] + # get the number of vertical levels + nlev = spec.vertical_levels else: region_name = 'null' region_num = -99 @@ -386,7 +388,7 @@ def compute_averages(self,spec): # The mean diff rsm function will send the variables once they are created var_avg_results,var_DIFF_results,var_RMS_results = climAverager.mean_diff_rms(var,region_name,region_num,spec.region_nc_var, spec.region_wgt_var,years,hist_dict,ave_t.average_types[ave_descr[0]],file_dict,obs_file, - reg_obs_file,inter_comm,spec.serial,VNAME_TAG,AVE_TAG) + reg_obs_file,inter_comm,spec.serial,VNAME_TAG,AVE_TAG,nlev) else: if ('__metaChar' in orig_var): # Handle special meta diff --git a/averager/pyAverager/pyaverager/climAverager.py b/averager/pyAverager/pyaverager/climAverager.py index 652dfe60..50954eec 100644 --- a/averager/pyAverager/pyaverager/climAverager.py +++ b/averager/pyAverager/pyaverager/climAverager.py @@ -317,7 +317,7 @@ def weighted_avg_var_missing(var,years,hist_dict,ave_info,file_dict,ave_type,fil return var_Ave -def weighted_hor_avg_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year,hist_dict,ave_info,file_dict): +def weighted_hor_avg_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year,hist_dict,ave_info,file_dict,nlev): ''' Computes the weighted hor mean rms diff for a year @@ -336,6 +336,8 @@ def weighted_hor_avg_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year,hist @param hist_dict A dictionary that holds file references for all years/months. + @param nlev Number of ocean vertical levels + @param ave_info A dictionary of the type of average that is to be done. Includes: type, months_to_average, fn, and weights (weights are not used in this function/average) @@ -344,6 +346,7 @@ def weighted_hor_avg_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year,hist are needed by this average calculation. @return var_Ave The averaged results for this variable across the designated time frame. + ''' # Get correct data slice from the yearly average file @@ -357,7 +360,7 @@ def weighted_hor_avg_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year,hist region_mask = MA.expand_dims(slev_mask, axis=0) weights = MA.expand_dims(slev_weights, axis=0) if var_val.ndim > 2: - for lev in range(1,60): + for lev in range(1,nlev): new_region_mask = MA.expand_dims(slev_mask, axis=0) region_mask = np.vstack((region_mask,new_region_mask)) new_weights = MA.expand_dims(slev_weights, axis=0) @@ -404,7 +407,7 @@ def diff_var(var, avg_test_slice, obs_file): return var_Avg_diff -def weighted_rms_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year,hist_dict,ave_info,file_dict,avg_test_slice,obs_file): +def weighted_rms_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year,hist_dict,ave_info,file_dict,avg_test_slice,obs_file,nlev): ''' Computes the weighted rms for a year @@ -434,6 +437,8 @@ def weighted_rms_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year,hist_dic @param obs_file Observation file that contains the values to be used in the caluculation. + @param nlev Number of ocean vertical levels + @return nrms The normalized rms results for this variable. ''' @@ -448,7 +453,7 @@ def weighted_rms_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year,hist_dic # Since weights and region mask are only one level, we need to expand them to all levels region_mask = MA.expand_dims(slev_mask, axis=0) weights = MA.expand_dims(slev_weights, axis=0) - for lev in range(1,60): + for lev in range(1,nlev): new_region_mask = MA.expand_dims(slev_mask, axis=0) region_mask = np.vstack((region_mask,new_region_mask)) new_weights = MA.expand_dims(slev_weights, axis=0) @@ -474,7 +479,7 @@ def weighted_rms_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year,hist_dic return nrms -def mean_diff_rms(var,reg_name,reg_num,mask_var,wgt_var,year,hist_dict,ave_info,file_dict,obs_file,reg_obs_file,simplecomm,serial,MPI_TAG,AVE_TAG): +def mean_diff_rms(var,reg_name,reg_num,mask_var,wgt_var,year,hist_dict,ave_info,file_dict,obs_file,reg_obs_file,simplecomm,serial,MPI_TAG,AVE_TAG,nlev): ''' Computes the weighted hor mean rms diff for a year @@ -510,6 +515,8 @@ def mean_diff_rms(var,reg_name,reg_num,mask_var,wgt_var,year,hist_dict,ave_info, @MPI_TAG Integer tag used to communicate message numbers. + @param nlev Number of ocean vertical levels + @return var_Ave The averaged results for this variable. @return var_DIFF The difference results for this variable. @@ -523,7 +530,7 @@ def mean_diff_rms(var,reg_name,reg_num,mask_var,wgt_var,year,hist_dict,ave_info, var_rms = var+'_RMS' ## Get the masked regional average - var_Avg = weighted_hor_avg_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year[0],hist_dict,ave_info,file_dict) + var_Avg = weighted_hor_avg_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year[0],hist_dict,ave_info,file_dict,nlev) ## Send var_Avg results to local root to write if (not serial): #md_message_v = {'name':var,'shape':var_Avg.shape,'dtype':var_Avg.dtype,'average':var_Avg} @@ -541,7 +548,7 @@ def mean_diff_rms(var,reg_name,reg_num,mask_var,wgt_var,year,hist_dict,ave_info, ## Get the RMS from the obs diff var_slice = rover.fetch_slice(hist_dict,year[0],0,var,file_dict) temp_diff = diff_var(var, var_slice, obs_file) - var_RMS = weighted_rms_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year[0],hist_dict,ave_info,file_dict,temp_diff,obs_file) + var_RMS = weighted_rms_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year[0],hist_dict,ave_info,file_dict,temp_diff,obs_file,nlev) ## Send var_RMS results to local root to write if (not serial): #md_message = {'name':var_rms,'shape':var_RMS.shape,'dtype':var_RMS.dtype,'average':var_RMS} diff --git a/averager/pyAverager/pyaverager/specification.py b/averager/pyAverager/pyaverager/specification.py index 6460636a..7081bfae 100644 --- a/averager/pyAverager/pyaverager/specification.py +++ b/averager/pyAverager/pyaverager/specification.py @@ -83,7 +83,8 @@ def __init__(self,in_directory, ncl_location='null', year0=-99, year1=-99, - collapse_dim=''): + collapse_dim='', + vertical_levels=60): ''' Initializes the internal data with optional arguments @@ -153,6 +154,8 @@ def __init__(self,in_directory, @param year1 The last year - only used to create the cice pre_proc file. @param collapse_dims Used to collapse/average over one dim. + + @param vertical_levels Number of ocean vertical levels ''' # Where the input is located @@ -266,3 +269,6 @@ def __init__(self,in_directory, # Used to collapse/average over one dim. self.collapse_dim = collapse_dim + + # Used to specify the number of ocean vertical levels + self.vertical_levels = vertical_levels diff --git a/cesm_utils/cesm_utils/create_postprocess b/cesm_utils/cesm_utils/create_postprocess index 979f78ce..fe62a63e 100755 --- a/cesm_utils/cesm_utils/create_postprocess +++ b/cesm_utils/cesm_utils/create_postprocess @@ -131,6 +131,11 @@ def commandline_options(): parser.add_argument('-cesmtag', '--cesmtag', nargs=1, required=False, help='CESM repository tag (optional)') + parser.add_argument('-completechunk', '--completechunk', nargs=1, required=False, type=int, default=1, + help='timeseries control - 1: do not create incomplete chunks, '\ + ' 0: create an incomplete chunk.' \ + ' 1 is default.') + options = parser.parse_args() return options @@ -380,6 +385,10 @@ def create_batch(ppDir, pes, batchTmpl, runTmpl, postProcessCmd, mpiCmd, outFile if options.backtrace: backtrace = '--backtrace' + completechunk = '' + if postProcessCmd == 'cesm_tseries_generator.py': + completechunk = '--completechunk {0}'.format(options.completechunk) + # all files look good so start parsing the template file starting with the batchTmpl templateLoader = jinja2.FileSystemLoader( searchpath='{0}/Templates'.format(ppDir) ) templateEnv = jinja2.Environment( loader=templateLoader ) @@ -404,6 +413,7 @@ def create_batch(ppDir, pes, batchTmpl, runTmpl, postProcessCmd, mpiCmd, outFile 'pythonpath': pythonpath, 'processName' : processName, 'postProcessCmd' : postProcessCmd, + 'completechunk' : completechunk, 'caseRoot' : caseRoot, 'virtualEnvDir' : virtualEnvDir, 'debug' : debug, @@ -569,7 +579,7 @@ def main(options): run_tmpl = 'postprocess.tmpl' # generate the timeseries batch submit script from template files - postProcessCmd = 'cesm_tseries_generator.py --completechunk 1 ' + postProcessCmd = 'cesm_tseries_generator.py' processName = 'timeseries' outFile = '{0}/{1}'.format(envDict['PP_CASE_PATH'],processName) diff --git a/diagnostics/diagnostics/atm/model_vs_model.py b/diagnostics/diagnostics/atm/model_vs_model.py index 07c5a8ed..ebde2473 100644 --- a/diagnostics/diagnostics/atm/model_vs_model.py +++ b/diagnostics/diagnostics/atm/model_vs_model.py @@ -284,7 +284,7 @@ def run_diagnostics(self, env, scomm): elif env['CLEANUP_FILES'].lower() in ['t','true']: # delete all the files in the diag_path directory - for root, dirs, files in os.walk('diag_path'): + for root, dirs, files in os.walk(diag_path): for f in files: os.unlink(os.path.join(root, f)) for d in dirs: diff --git a/diagnostics/diagnostics/atm/model_vs_obs.py b/diagnostics/diagnostics/atm/model_vs_obs.py index b1885f07..67335909 100644 --- a/diagnostics/diagnostics/atm/model_vs_obs.py +++ b/diagnostics/diagnostics/atm/model_vs_obs.py @@ -274,7 +274,7 @@ def run_diagnostics(self, env, scomm): elif env['CLEANUP_FILES'].lower() in ['t','true']: # delete all the files in the diag_path directory - for root, dirs, files in os.walk('diag_path'): + for root, dirs, files in os.walk(diag_path): for f in files: os.unlink(os.path.join(root, f)) for d in dirs: diff --git a/diagnostics/diagnostics/lnd/model_vs_model.py b/diagnostics/diagnostics/lnd/model_vs_model.py index 6e99592d..590d1380 100644 --- a/diagnostics/diagnostics/lnd/model_vs_model.py +++ b/diagnostics/diagnostics/lnd/model_vs_model.py @@ -66,7 +66,6 @@ def check_prerequisites(self, env, scomm): env['PLOTTYPE'] = env['p_type'] env['OBS_DATA'] = env['OBS_HOME'] env['INPUT_FILES'] = env['POSTPROCESS_PATH']+'/lnd_diag/inputFiles/' -## env['DIAG_RESOURCES'] = env['POSTPROCESS_PATH']+'/lnd_diag/resources/' env['DIAG_RESOURCES'] = env['DIAGOBSROOT']+'/resources/' env['RUNTYPE'] = 'model1-model2' @@ -277,7 +276,7 @@ def run_diagnostics(self, env, scomm): elif env['CLEANUP_FILES'].lower() in ['t','true']: # delete all the files in the diag_path directory - for root, dirs, files in os.walk('diag_path'): + for root, dirs, files in os.walk(diag_path): for f in files: os.unlink(os.path.join(root, f)) for d in dirs: @@ -296,7 +295,7 @@ def run_diagnostics(self, env, scomm): except OSError as e: print ('WARNING: Error renaming %s to %s: %s' % (web_dir, diag_path, e)) diag_path = web_dir - + print('DEBUG: model vs. model web_dir = {0}'.format(web_dir)) print('DEBUG: model vs. model diag_path = {0}'.format(diag_path)) @@ -304,7 +303,6 @@ def run_diagnostics(self, env, scomm): env_file = '{0}/env_diags_lnd.xml'.format(env['PP_CASE_PATH']) key = 'LNDDIAG_WEBDIR_{0}'.format(self._name) value = diag_path - ##web_file = '{0}/web_dirs/{1}.{2}-{3}'.format(env['PP_CASE_PATH'], key, scomm.get_size(), scomm.get_rank() ) web_file = '{0}/web_dirs/{1}.{2}'.format(env['PP_CASE_PATH'], key, datetime.datetime.now().strftime('%Y-%m-%d_%H%M%S')) try: diagUtilsLib.write_web_file(web_file, 'lnd', key, value) diff --git a/diagnostics/diagnostics/lnd/model_vs_obs.py b/diagnostics/diagnostics/lnd/model_vs_obs.py index 040762d7..15839292 100644 --- a/diagnostics/diagnostics/lnd/model_vs_obs.py +++ b/diagnostics/diagnostics/lnd/model_vs_obs.py @@ -69,7 +69,6 @@ def check_prerequisites(self, env, scomm): env['PLOTTYPE'] = env['p_type'] env['OBS_DATA'] = env['OBS_HOME'] env['INPUT_FILES'] = env['POSTPROCESS_PATH']+'/lnd_diag/inputFiles/' -## env['DIAG_RESOURCES'] = env['POSTPROCESS_PATH']+'/lnd_diag/resources/' env['DIAG_RESOURCES'] = env['DIAGOBSROOT']+'/resources/' env['RUNTYPE'] = 'model-obs' @@ -229,7 +228,7 @@ def run_diagnostics(self, env, scomm): elif env['CLEANUP_FILES'].lower() in ['t','true']: # delete all the files in the diag_path directory - for root, dirs, files in os.walk('diag_path'): + for root, dirs, files in os.walk(diag_path): for f in files: os.unlink(os.path.join(root, f)) for d in dirs: @@ -256,7 +255,6 @@ def run_diagnostics(self, env, scomm): env_file = '{0}/env_diags_lnd.xml'.format(env['PP_CASE_PATH']) key = 'LNDDIAG_WEBDIR_{0}'.format(self._name) value = diag_path - ##web_file = '{0}/web_dirs/{1}.{2}-{3}'.format(env['PP_CASE_PATH'], key, scomm.get_size(), scomm.get_rank() ) web_file = '{0}/web_dirs/{1}.{2}'.format(env['PP_CASE_PATH'], key, datetime.datetime.now().strftime('%Y-%m-%d_%H%M%S')) try: diagUtilsLib.write_web_file(web_file, 'lnd', key, value) diff --git a/diagnostics/diagnostics/ocn/Config/config_diags_ocn.xml b/diagnostics/diagnostics/ocn/Config/config_diags_ocn.xml index 623d0768..93cbb240 100644 --- a/diagnostics/diagnostics/ocn/Config/config_diags_ocn.xml +++ b/diagnostics/diagnostics/ocn/Config/config_diags_ocn.xml @@ -170,7 +170,7 @@ Applies to both model and control cases." valid_values="60,62,42" value="60" group="observations" - desc="Specifies number of vertical levels used to calculate averages as follows: 62 if using tx0.1v2, 42 if using gx3v7, and 60 otherwise. NOTE: VERTICAL only matters if the vertical levels equals 42; otherwise, it's not used" + desc="Specifies number of vertical levels used to calculate averages as follows: 62 if using tx0.1v2, 42 if using gx3v7, and 60 otherwise." > + + @@ -304,7 +312,7 @@ Applies to both model and control cases." diff --git a/diagnostics/diagnostics/ocn/Plots/zonal_average_3d_fields.py b/diagnostics/diagnostics/ocn/Plots/zonal_average_3d_fields.py index afab8cfe..f8b376a6 100644 --- a/diagnostics/diagnostics/ocn/Plots/zonal_average_3d_fields.py +++ b/diagnostics/diagnostics/ocn/Plots/zonal_average_3d_fields.py @@ -73,8 +73,12 @@ def check_prerequisites(self, env): shutil.copy2('{0}/{1}'.format(env['TSOBSDIR'], env['TOBSFILE']), '{0}_tmp'.format(env['TOBSFILE'])) # call ncks to extract the UAREA variable + za_args = ['ncks','-A','-v','UAREA',env['TAVGFILE'],'{0}_tmp'.format(env['TOBSFILE']) ] + if env['netcdf_format'] in ['netcdfLarge']: + za_args = ['ncks','-6','-A','-v','UAREA',env['TAVGFILE'],'{0}_tmp'.format(env['TOBSFILE']) ] try: - subprocess.check_output( ['ncks','-A','-v','UAREA',env['TAVGFILE'],'{0}_tmp'.format(env['TOBSFILE']) ], env=env) +## subprocess.check_output( ['ncks','-A','-v','UAREA',env['TAVGFILE'],'{0}_tmp'.format(env['TOBSFILE']) ], env=env) + subprocess.check_output(za_args, env=env) except subprocess.CalledProcessError as e: print('ERROR: {0} call to ncks failed with error:'.format(self.name())) print(' {0} - {1}'.format(e.cmd, e.output)) @@ -116,8 +120,12 @@ def check_prerequisites(self, env): shutil.copy2('{0}/{1}'.format(env['TSOBSDIR'], env['SOBSFILE']), '{0}_tmp'.format(env['SOBSFILE'])) # call ncks to extract the UAREA variable + za_args = ['ncks','-A','-v','UAREA',env['TAVGFILE'],'{0}_tmp'.format(env['SOBSFILE']) ] + if env['netcdf_format'] in ['netcdfLarge']: + za_args = ['ncks','-6','-A','-v','UAREA',env['TAVGFILE'],'{0}_tmp'.format(env['SOBSFILE']) ] try: - subprocess.check_output( ['ncks','-A','-v','UAREA',env['TAVGFILE'],'{0}_tmp'.format(env['SOBSFILE']) ], env=env) +## subprocess.check_output( ['ncks','-A','-v','UAREA',env['TAVGFILE'],'{0}_tmp'.format(env['SOBSFILE']) ], env=env) + subprocess.check_output(za_args, env=env) except subprocess.CalledProcessError as e: print('ERROR: {0} call to ncks failed with error:'.format(self.name())) print(' {0} - {1}'.format(e.cmd, e.output)) diff --git a/diagnostics/diagnostics/ocn/ocn_avg_generator.py b/diagnostics/diagnostics/ocn/ocn_avg_generator.py index ee88a1fb..d5761ca8 100644 --- a/diagnostics/diagnostics/ocn/ocn_avg_generator.py +++ b/diagnostics/diagnostics/ocn/ocn_avg_generator.py @@ -159,7 +159,9 @@ def buildOcnAvgList(start_year, stop_year, tavgdir, main_comm, debugMsg): #======================================================================== # callPyAverager - create the climatology files by calling the pyAverager #======================================================================== -def callPyAverager(in_dir, htype, tavgdir, case_prefix, averageList, varList, diag_obs_root, netcdf_format, main_comm, debugMsg): +def callPyAverager(in_dir, htype, tavgdir, case_prefix, averageList, varList, + diag_obs_root, netcdf_format, nlev, timeseries_obspath, + main_comm, debugMsg): """setup the pyAverager specifier class with specifications to create the climatology files in parallel. @@ -173,15 +175,19 @@ def callPyAverager(in_dir, htype, tavgdir, case_prefix, averageList, varList, di tseries (boolean) - TRUE if TIMESERIES plots are specified. diag_obs_root (string) - OCNDIAG_DIAGOBSROOT machine dependent path to input data root netcdf_format (string) - OCNDIAG_netcdf_format one of ['netcdf', 'netcdf4', 'netcdf4c', 'netcdfLarge'] + nlev (integer) - Number of ocean vertical levels + timeseries_obspath (string) - timeseries observation files path main_comm (object) - simple MPI communicator object """ # the following are used for timeseries averages and ignored otherwise - mean_diff_rms_obs_dir = '{0}/omwg/timeseries_obs'.format(diag_obs_root) +## mean_diff_rms_obs_dir = '{0}/omwg/timeseries_obs'.format(diag_obs_root) + mean_diff_rms_obs_dir = timeseries_obspath region_nc_var = 'REGION_MASK' regions={1:'Sou',2:'Pac',3:'Ind',6:'Atl',8:'Lab',9:'Gin',10:'Arc',11:'Hud',0:'Glo'} region_wgt_var = 'TAREA' - obs_dir = '{0}/omwg/timeseries_obs'.format(diag_obs_root) +## obs_dir = '{0}/omwg/timeseries_obs'.format(diag_obs_root) + obs_dir = timeseries_obspath obs_file = 'obs.nc' reg_obs_file_suffix = '_hor_mean_obs.nc' @@ -218,6 +224,7 @@ def callPyAverager(in_dir, htype, tavgdir, case_prefix, averageList, varList, di debugMsg('... obs_dir = {0}'.format(obs_dir), header=True) debugMsg('... obs_file = {0}'.format(obs_file), header=True) debugMsg('... reg_obs_file_suffix = {0}'.format(reg_obs_file_suffix), header=True) + debugMsg('... nlev = {0}'.format(nlev), header=True) main_comm.sync(); @@ -242,6 +249,7 @@ def callPyAverager(in_dir, htype, tavgdir, case_prefix, averageList, varList, di obs_dir = obs_dir, obs_file = obs_file, reg_obs_file_suffix = reg_obs_file_suffix, + vertical_levels = nlev, main_comm = main_comm) except Exception as error: print(str(error)) @@ -265,7 +273,8 @@ def callPyAverager(in_dir, htype, tavgdir, case_prefix, averageList, varList, di # createClimFiles - create the climatology files by calling the pyAverager #========================================================================= def createClimFiles(start_year, stop_year, in_dir, htype, tavgdir, case, tseries, inVarList, - tseries_start_year, tseries_stop_year, diag_obs_root, netcdf_format, main_comm, debugMsg): + tseries_start_year, tseries_stop_year, diag_obs_root, netcdf_format, + nlev, timeseries_obspath, main_comm, debugMsg): """setup the pyAverager specifier class with specifications to create the climatology files in parallel. @@ -282,6 +291,8 @@ def createClimFiles(start_year, stop_year, in_dir, htype, tavgdir, case, tseries tseries_stop_year (integer) - stop year for timeseries diagnostics diag_obs_root (string) - OCNDIAG_DIAGOBSROOT machine dependent path to input data root netcdf_format (string) - OCNDIAG_netcdf_format one of ['netcdf', 'netcdf4', 'netcdf4c', 'netcdfLarge'] + nlev (integer) - Number of ocean vertical levels + timeseries_obspath (string) - timeseries observation files path main_comm (object) - simple MPI communicator object """ @@ -298,7 +309,8 @@ def createClimFiles(start_year, stop_year, in_dir, htype, tavgdir, case, tseries # call the pyAverager with the inVarList if main_comm.is_manager(): debugMsg('Calling callPyAverager with averageList = {0}'.format(averageList), header=True, verbosity=1) - callPyAverager(in_dir, htype, tavgdir, case_prefix, averageList, inVarList, diag_obs_root, netcdf_format, main_comm, debugMsg) + callPyAverager(in_dir, htype, tavgdir, case_prefix, averageList, inVarList, diag_obs_root, + netcdf_format, nlev, timeseries_obspath, main_comm, debugMsg) main_comm.sync() # check if timeseries diagnostics is requested @@ -316,7 +328,8 @@ def createClimFiles(start_year, stop_year, in_dir, htype, tavgdir, case, tseries inVarList = ['SALT', 'TEMP'] if main_comm.is_manager(): debugMsg('Calling callPyAverager with averageListMoc = {0}'.format(averageListMoc), header=True, verbosity=1) - callPyAverager(in_dir, htype, tavgdir, case_prefix, averageListMoc, inVarList, diag_obs_root, netcdf_format, main_comm, debugMsg) + callPyAverager(in_dir, htype, tavgdir, case_prefix, averageListMoc, inVarList, diag_obs_root, + netcdf_format, nlev, timeseries_obspath, main_comm, debugMsg) main_comm.sync() # generate the horizontal mean files with just SALT and TEMP @@ -325,7 +338,8 @@ def createClimFiles(start_year, stop_year, in_dir, htype, tavgdir, case, tseries inVarList = ['SALT', 'TEMP'] if main_comm.is_manager(): debugMsg('Calling callPyAverager with averageList = {0}'.format(averageList), header=True, verbosity=1) - callPyAverager(in_dir, htype, tavgdir, case_prefix, averageList, inVarList, diag_obs_root, netcdf_format, main_comm, debugMsg) + callPyAverager(in_dir, htype, tavgdir, case_prefix, averageList, inVarList, diag_obs_root, + netcdf_format, nlev, timeseries_obspath, main_comm, debugMsg) main_comm.sync() #============================================ @@ -449,7 +463,8 @@ def main(options, main_comm, debugMsg): envDict['htype'], envDict['TAVGDIR'], envDict['CASE'], tseries, envDict['MODEL_VARLIST'], envDict['TSERIES_YEAR0'], envDict['TSERIES_YEAR1'], envDict['DIAGOBSROOT'], - envDict['netcdf_format'], main_comm, debugMsg) + envDict['netcdf_format'], envDict['VERTICAL'], envDict['TIMESERIES_OBSPATH'], + main_comm, debugMsg) except Exception as error: print(str(error)) traceback.print_exc() @@ -494,7 +509,8 @@ def main(options, main_comm, debugMsg): createClimFiles(envDict['CNTRLYEAR0'], envDict['CNTRLYEAR1'], envDict['cntrl_in_dir'], envDict['cntrl_htype'], envDict['CNTRLTAVGDIR'], envDict['CNTRLCASE'], False, envDict['CNTRL_VARLIST'], 0, 0, envDict['DIAGOBSROOT'], - envDict['netcdf_format'], main_comm, debugMsg) + envDict['netcdf_format'], envDict['VERTICAL'], envDict['TIMESERIES_OBSPATH'], + main_comm, debugMsg) except Exception as error: print(str(error)) traceback.print_exc() diff --git a/ocn_diag/ncl_lib/compute_rho.ncl b/ocn_diag/ncl_lib/compute_rho.ncl index 3346a42c..178fc008 100644 --- a/ocn_diag/ncl_lib/compute_rho.ncl +++ b/ocn_diag/ncl_lib/compute_rho.ncl @@ -9,9 +9,9 @@ begin file_S = getenv("SEASAVGSALT") file_out = getenv("SEASAVGRHO") -; print("compute_rho.ncl file_T = " + file_T) -; print("compute_rho.ncl file_S = " + file_S) -; print("compute_rho.ncl file_out = " + file_out) + print("compute_rho.ncl file_T = " + file_T) + print("compute_rho.ncl file_S = " + file_S) + print("compute_rho.ncl file_out = " + file_out) fileid = addfile(file_T,"r") TLONG = fileid->TLONG @@ -24,6 +24,7 @@ begin SALT = fileid->SALT size = dimsizes(TEMP) +;; print("size = " + size) nx = size(3) ny = size(2) nz = size(1) @@ -34,7 +35,7 @@ begin do n = 0, nt - 1 do k = 0, nz - 1 - ;print("month = " + (n+1) + " level = " + (k+1)) +;; print("month = " + (n+1) + " level = " + (k+1)) opt = False PD(n,k,:,:) = (eos(TEMP(n,k,:,:),SALT(n,k,:,:),0,opt) - 1.0d) * 1000.0d end do diff --git a/ocn_diag/ncl_lib/dwbc.ncl b/ocn_diag/ncl_lib/dwbc.ncl index 05bc7034..526a93c5 100644 --- a/ocn_diag/ncl_lib/dwbc.ncl +++ b/ocn_diag/ncl_lib/dwbc.ncl @@ -5,6 +5,22 @@ load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" begin +; Code to plot the North Atlantic Current and +; the Deep Western Boundary Current off the Grand Banks +; at about 42.5N. Longitudinal and latitudinal ranges +; based on Schott et al 2004, JPO, Vol 34, pages 817-843. +; Latitudinal range: 42N-43N, +; Longitudinal range: 43W-50W. + +; Plots VVEL and PD (as sigma) + +; Uses: +; Fields: VVEL, PD +; Coordinates needed: KMT, HT, TLAT, TLONG, ULONG, z_t + +; Moved the gridres command to get_environment.ncl +; gridres = getenv("RESOLUTION") + casename = getenv("CASE") file_in = getenv("TAVGFILE") gridres = getenv("RESOLUTION") @@ -15,11 +31,18 @@ begin year1 = stringtointeger(year1str) psout = "DWBC" + if (gridres.eq."tx0.1v2_40lev".or.gridres.eq."tx0.1v2_62lev".or.gridres.eq."tx0.1v2") then + ist = 605 + ien = 675 + jst = 1635 + end if + if (gridres.eq."gx1v5".or.gridres.eq."gx1v6".or.gridres.eq."gx1v3") then - ist = 290 - ien = 319 - jst = 309 + ist = 309 + ien = 317 + jst = 308 end if + if (gridres.eq."gx3v5".or.gridres.eq."gx3v6".or.gridres.eq."gx3v7") then ist = 95 ien = 99 @@ -27,24 +50,36 @@ begin end if ;; data files - f1 = addfile(file_in,"r") - kmt = f1->KMT(jst,ist:ien) - ht = f1->HT(jst,ist:ien) + f1 = addfile(file_in,"r") + kmt = f1->KMT(jst,ist:ien) + ht = f1->HT(jst,ist:ien) vvel = f1->VVEL(0,:,jst,ist:ien) - pd = f1->PD(0,:,jst,ist:ien) + pd = f1->PD(0,:,jst,ist:ien) tlat = f1->TLAT(jst,ist:ien) tlon = f1->TLONG(jst,ist:ien) ulon = f1->ULONG(jst,ist:ien) - z_t = f1->z_t + z_t = f1->z_t + +; Convert tlon & ulon to degrees West (negative) tlon = tlon-360. - ulon = ulon-360. + + if (gridres.eq."tx0.1v2".or.gridres.eq."tx0.1v2_62lev") then + ulon = ulon + end if + if (gridres.eq."gx1v5".or.gridres.eq."gx1v6".or.gridres.eq."gx1v3") then + ulon = ulon-360. + end if + if (gridres.eq."gx3v5".or.gridres.eq."gx3v6".or.gridres.eq."gx3v7") then + ulon = ulon-360. + end if + meanlat = dim_avg(tlat) nx = dimsizes(tlon) z_t = z_t/100. ; convert from cm to m - ht = ht/100. + ht = ht/100. z_t@long_name = "Depth (m)" z_t@units = "m" @@ -52,14 +87,14 @@ begin copy_VarMeta(pd,sigma) sigma@units = "kg/m^3" - vvel&z_t = z_t - vvel!1 = "lon_u" ; assign new coord info - vvel&lon_u= ulon + vvel&z_t = z_t + vvel!1 = "lon_u" ; assign new coord info + vvel&lon_u = ulon vvel@long_name = "VVEL" - sigma&z_t = z_t - sigma!1 = "lon_t" ; assign new coord info - sigma&lon_t= tlon + sigma&z_t = z_t + sigma!1 = "lon_t" ; assign new coord info + sigma&lon_t = tlon ht!0 = "lon_t" ht&lon_t = tlon @@ -70,52 +105,55 @@ begin print("plotting DWBC") wks = gsn_open_wks(img_format,psout) ; open a ps file gsn_define_colormap(wks,"amwg") ; choose colormap - i = NhlNewColor(wks,0.8,0.8,0.8) ; add gray to map + i = NhlNewColor(wks,0.8,0.8,0.8) ; add gray to map ; resources for each panel - res = True ; plot mods desired + res = True ; plot mods desired res@gsnDraw = False res@gsnFrame = False - res@cnFillOn = True ; turn on color fill - res@cnFillMode = "RasterFill" ; turn on raster mode + res@cnFillOn = True ; turn on color fill + res@cnFillMode = "RasterFill" ; turn on raster mode res@cnMissingValFillColor = "grey" - res@lbLabelAutoStride = True ; control labelbar labels - res@cnLinesOn = False ; turn off contour lines - res@cnLineLabelsOn = False ; turn the line labels off - res@lbLabelBarOn = True ; Turn off labelbar - res@lbOrientation = "vertical" ; vertical label bar - res@cnInfoLabelOn = False ; Turn off informational label + res@lbLabelAutoStride = True ; control labelbar labels + res@cnLinesOn = False ; turn off contour lines + res@cnLineLabelsOn = False ; turn the line labels off + res@lbLabelBarOn = True ; Turn off labelbar + res@lbOrientation = "vertical" ; vertical label bar + res@cnInfoLabelOn = False ; Turn off informational label res@cnLevelSelectionMode = "ExplicitLevels" - res@cnLevels = (/-12,-8,-6,-4,-2,-1,0,1,2,4,6,8,12/) + res@cnLevels = (/-18,-15,-12,-8,-6,-4,-2,-1,0,1,2,4,6,8,12,15,18/) + if (gridres.eq."tx0.1v2_62lev") then + res@cnLevels = (/-35,-25,-15,-12,-9,-6,-3,-1,0,1,3,6,9,12,15,25,35/) + end if res@trYReverse = True ; reverses y-axis - res@trXMinF = -52. - res@trXMaxF = -42. + res@trXMinF = -50. + res@trXMaxF = -43. res@tmXBMode = "Manual" res@tmXBTickStartF = -52. res@tmXBTickSpacingF = 2. res@gsnYAxisIrregular2Linear = True - res@gsnMaximize = True ; enlarge plot - res@gsnPaperOrientation = "Portrait" - res@gsnSpreadColors = True ; use full colormap - res@gsnSpreadColorStart = 2 ; start at color 10 - res@gsnSpreadColorEnd = 17 ; end at color 96 + res@gsnMaximize = True ; enlarge plot + res@gsnPaperOrientation = "Portrait" + res@gsnSpreadColors = True ; use full colormap + res@gsnSpreadColorStart = 2 ; start at color 10 + res@gsnSpreadColorEnd = 17 ; end at color 96 res@tiMainString = casename+", "+year0str+"-"+year1str ; resources for each panel - res2 = True ; plot mods desired + res2 = True ; plot mods desired res2@gsnDraw = False res2@gsnFrame = False - res2@cnFillOn = False ; turn on color fill + res2@cnFillOn = False ; turn on color fill res2@cnLinesOn = True ; turn off contour lines res2@cnMissingValFillColor = "black" - res2@cnInfoLabelOn = False + res2@cnInfoLabelOn = False res2@cnLevelSelectionMode = "ExplicitLevels" - res2@cnLevels = (/27.68,27.74,27.8,27.88/) - res2@cnLevelFlags = "LineAndLabel" + res2@cnLevels = (/27.68,27.74,27.8,27.88/) + res2@cnLevelFlags = "LineAndLabel" res2@trYReverse = True ; reverses y-axis res2@gsnYAxisIrregular2Linear = True - res2@gsnMaximize = True ; enlarge plot - res2@gsnPaperOrientation = "Portrait" + res2@gsnMaximize = True ; enlarge plot + res2@gsnPaperOrientation = "Portrait" sigma@long_name = "" sigma@units = "" @@ -130,3 +168,4 @@ begin gsn_text_ndc(wks, "Mean Latitude = "+meanlat,0.35,0.24,txres) frame(wks) end + diff --git a/ocn_diag/ncl_lib/mld.ncl b/ocn_diag/ncl_lib/mld.ncl index 8e086680..d19b438e 100644 --- a/ocn_diag/ncl_lib/mld.ncl +++ b/ocn_diag/ncl_lib/mld.ncl @@ -9,6 +9,10 @@ begin file_netcdf = getenv("SEASAVGRHO") file_tavg = getenv("TAVGFILE") + print("mld file_obs = " + file_obs) + print("mld file_netcdf = " + file_netcdf) + print("mld file_tavg = " + file_tavg) + winter_plot = 1 ; in sigma units @@ -28,7 +32,6 @@ begin tarea = fileid->TAREA ht = fileid->HT - fileid = addfile(file_netcdf,"r") field = fileid->PD @@ -54,12 +57,23 @@ begin ny = size(2) nz = size(1) nt = size(0) - field@_FillValue = missing_value +;; field@_FillValue = missing_value + field@_FillValue = default_fillvalue(typeof(field)) field = where(field .eq. field@missing_value,field@_FillValue,field) - PD = new((/nc,nt,nz,ny,nx/),double) + PD = new((/nz,ny,nx/),double) + + MLD = new((/2,nc,nt,ny,nx/),double) + + do n=0,nt-1 + PD(:,:,:) = field(n,:,:,:) + do off = 0, noff - 1 + MLD(off,0,n,:,:) = mixed_layer_depth(PD,kmt,ht,depth,pd_offset(off)) + end do + MLD@long_name = "mixed layer depth" + end do - PD(0,:,:,:,:) = field delete(field) + delete(PD) fileid = addfile(file_obs,"r") if (isfilevar(fileid,"PD")) then @@ -67,16 +81,22 @@ begin else field = fileid->POTENTIAL_DENSITY end if + field@missing_value = 0. if (.not.isatt(field,"_FillValue")) then - field@_FillValue = default_fillvalue("double") + field@_FillValue = default_fillvalue("float") end if field = where(field .eq. field@missing_value,field@_FillValue,field) - PD(1,:,:,:,:) = (/field/) - delete(field) + PD = new((/nz,ny,nx/),double) - MLD = new((/nc,nt,ny,nx/),double) + do n=0,nt-1 + PD(:,:,:) = (/field(n,:,:,:)/) + do off = 0, noff - 1 + MLD(off,1,n,:,:) = mixed_layer_depth(PD,kmt,ht,depth,pd_offset(off)) + end do + end do - MLD@long_name = "mixed layer depth" + delete(field) + delete(PD) lev_0 = (/ 0., 25., 50., 75., 100., 150., 200., 250., 500., 1000., 2000. /) lev_mod = (/ 0., 25., 50., 75., 100., 150., 200., 250., 500., 1000., 2000. /) @@ -111,21 +131,19 @@ begin coltab_2(0) = 0 delete(tmp) - do off = 0, noff - 1 - MLD = mixed_layer_depth(PD,kmt,ht,depth,pd_offset(off)) opt = False - MLD = wgt_area_smooth(MLD,tarea,opt) + MLD(off,:,:,:,:) = wgt_area_smooth(MLD(off,:,:,:,:),tarea,opt) MLD_mean = new((/nc,ny,nx/),double) MLD_nw_mean = new((/nc,ny,nx/),double) MLD_sw_mean = new((/nc,ny,nx/),double) MLD_winter_mean = new((/nc,ny,nx/),double) do m = 0,nc-1 - MLD_mean(m,:,:) = dim_avg_n_Wrap(MLD(m,:,:,:),0) - MLD_nw_mean(m,:,:) = dim_avg_n_Wrap(MLD(m,0:2,:,:),0) - MLD_sw_mean(m,:,:) = dim_avg_n_Wrap(MLD(m,6:8,:,:),0) + MLD_mean(m,:,:) = dim_avg_n_Wrap(MLD(off,m,:,:,:),0) + MLD_nw_mean(m,:,:) = dim_avg_n_Wrap(MLD(off,m,0:2,:,:),0) + MLD_sw_mean(m,:,:) = dim_avg_n_Wrap(MLD(off,m,6:8,:,:),0) end do tlat_c = conform(MLD_winter_mean,tlat,(/1,2/)) MLD_winter_mean = where(tlat_c .ge. 0, MLD_nw_mean,MLD_sw_mean) @@ -205,8 +223,7 @@ begin panel_res@pmLabelBarWidthF = .62 panel_res@lbLabelFontHeightF = 0.012 panel_res@gsnMaximize = True - panel_res@gsnPanelYWhiteSpacePercent = 3 - panel_res@gsnPanelXWhiteSpacePercent = 1 + panel_res@gsnPanelYWhiteSpacePercent = 1 panel_res@gsnPanelScalePlotIndex = 1 panel_res@gsnPanelTop = 1. panel_res@gsnPanelBottom = .385 @@ -217,6 +234,7 @@ begin opt@tmYLLabelsOn = True opt@tmXBLabelsOn = True +;; case_info = case_number + "- OBS MEAN" case_info = "OBS MEAN" plot5 = contour_plot(wks, MLD_mean_diff(:,:), tlon, tlat, kmt, region_mask, tarea, case_info, \ missing, units, dlev, lev_1, coltab_2, opt) @@ -237,6 +255,7 @@ begin opt@tmYLLabelsOn = False opt@lbLabelBarOn = False +;; case_info = case_number + "- WINTER OBS MEAN" case_info = "WINTER OBS MEAN" plot6 = contour_plot(wks, MLD_winter_mean_diff(:,:), tlon, tlat, kmt, region_mask, tarea, case_info, \ missing, units, dlev, lev_1, coltab_2, opt) diff --git a/ocn_diag/ncl_lib/mld_diff.ncl b/ocn_diag/ncl_lib/mld_diff.ncl index 8a9e09af..1892e547 100644 --- a/ocn_diag/ncl_lib/mld_diff.ncl +++ b/ocn_diag/ncl_lib/mld_diff.ncl @@ -9,6 +9,10 @@ begin file_netcdf = getenv("SEASAVGRHO") file_tavg = getenv("TAVGFILE") + print("mld_diff file_cntrl = " + file_cntrl) + print("mld_diff file_netcdf = " + file_netcdf) + print("mld_diff file_tavg = " + file_tavg) + winter_plot = 1 ; in sigma units diff --git a/ocn_diag/ncl_lib/sfcflx_za.ncl b/ocn_diag/ncl_lib/sfcflx_za.ncl index 88efa367..47fa760a 100644 --- a/ocn_diag/ncl_lib/sfcflx_za.ncl +++ b/ocn_diag/ncl_lib/sfcflx_za.ncl @@ -4,9 +4,21 @@ load "$NCLPATH/get_environment.ncl" begin -file_netcdf_za = "za_"+file_netcdf +print("") +print("Running sfcflx_za_highres.ncl") + +file_netcdf_za = "za_"+file_netcdf file_flux_obs_za = "za_"+file_flux_obs -file_wind_obs_za = "za_"+file_wind_obs + +fileid = addfile(file_netcdf,"r") +f_za = addfile(file_netcdf_za,"r") + +yesobs = isfilepresent(file_flux_obs_za) +if (yesobs) then + f_obs = addfile(file_flux_obs_za,"r") +else + print("Observational file of zonal average does not exisit.") +end if nlev = 21 missing = 1.0e30 @@ -21,54 +33,75 @@ region_index = (/ global, atlantic, pacific, indian, southern /) n_reg = dimsizes(region_index) -field_name = (/ "SHF_TOTAL", "SHF_QSW", "SFWF_TOTAL", "PREC_F", \ - "EVAP_F", "MELT_F", "ROFF_F", "SALT_F", \ - "SENH_F", "LWUP_F", "LWDN_F", "MELTH_F", \ - "QFLUX", "SNOW_F" /) -if (resolution .eq. "gx1v6") then - obsvar_name = (/ "nethflx", "swnet", "netfwflx", "rain", \ - "evap", "meltw", "roff", "", \ - "sen", "lwup", "lwdn", "melth", \ - "", "snow" /) +varsinfile = getfilevarnames(f_za) + + field_name = (/ "SHF_QSW", "MELTH_F", "SENH_F", \ + "LWUP_F", "LWDN_F", "EVAP_F", "PREC_F", \ + "MELT_F", "SALT_F", "SNOW_F" /) +; "MELT_F", "SALT_F", "SNOW_F", "ROFF_F" /) + +if (yesobs) then + obsvar_name = (/ "swnet", "melth", "sen", \ + "lwup", "lwdn", "evap", "rain", \ + "meltw", "", "snow" /) +; "meltw", "", "snow", "roff" /) else - obsvar_name = (/ "", "", "", "", \ - "", "", "", "", \ - "", "", "", "", \ - "", "" /) + obsvar_name = (/ "", "", "", \ + "", "", "", "", \ + "", "", "" /) end if - -fileid = addfile(file_netcdf,"r") -days_in_norm_year = fileid->days_in_norm_year -sflux_factor = fileid->sflux_factor -salinity_factor = fileid->salinity_factor -l_f = fileid->latent_heat_fusion -l_f = l_f / 1e4 -secperday = 86400. -secperyear = days_in_norm_year * secperday -rho_fw = 1.0 -rho_fw = rho_fw * 1000. - -f_za = addfile(file_netcdf_za,"r") -f_obs = addfile(file_flux_obs_za,"r") n_fields = dimsizes(field_name) print( " the number of fields to be processed is " + n_fields) -lat_t = f_za->lat_t -z_t = f_za->z_t -z_t = z_t / 1.0e5 if (cpl .eq. 6) then obs_prefix = "avXc2o_o_" else obs_prefix = "x2oavg_" end if + +;;-- Read details from "fileid" file --;; + +sflux_factor = fileid->sflux_factor +if ( sflux_factor .eq. 0. ) then + sflux_factor = 0.1 + print( "sflux_factor in file is zero. Prescribing new one." ) +end if + +salinity_factor = fileid->salinity_factor +if ( salinity_factor .eq. 0. ) then + ocn_ref_salinity = 34.7 + fwflux_factor = 1.e-4 + salinity_factor = -1*ocn_ref_salinity*fwflux_factor + print( "salinity_factor in file is zero. Computing new one." ) +end if + +l_f = fileid->latent_heat_fusion +l_f = l_f / 1e4 + +;;-- Read coordinates from "f_za" file --;; + +lat_t = f_za->lat_t + +;;-- Loop over fields --;; + do n=0, n_fields-1 - obsfile = f_obs - + if(.not.any(varsinfile.eq.field_name(n))) then + print(field_name(n) + " is not in file.") + continue + end if + + print(" processing zonal average of " + field_name(n)) + + if ( yesobs ) then + obsfile = f_obs + end if + if (obsvar_name(n) .eq. "") then obsvar = "" + print("No observations of " + field_name(n)) else if (field_name(n) .eq. "ROFF_F") then obsvar = obs_prefix + "Forr_" + obsvar_name(n) else @@ -76,27 +109,32 @@ do n=0, n_fields-1 end if end if - if (obsvar .ne. "") then + ;;-- Read observations from "obsfile" file --;; + + if (obsvar .ne. "" .and. yesobs ) then field_obs = obsfile->$obsvar$ end if - if ( field_name(n) .eq. "PREC_F" ) then + if ( field_name(n) .eq. "PREC_F" .and. yesobs ) then obsvar = obs_prefix + "Foxx_snow" snow = obsfile->$obsvar$ field_obs = where(field_obs .lt. 1.e10 .and. snow .lt. 1.e10, field_obs + snow, field_obs@_FillValue) end if + ;;-- Read model output from "f_za" file --;; + if ( field_name(n) .ne. "SHF_TOTAL" .and. \ field_name(n) .ne. "SFWF_TOTAL" ) then field = f_za->$field_name(n)$ else - field_q = f_za->QFLUX if ( field_name(n) .eq. "SHF_TOTAL" ) then - field = f_za->SHF - field = where(field .lt. 1e10 .and. field_q .lt. 1e10, field + field_q, field@_FillValue) + field_q = f_za->QFLUX + field = f_za->SHF + field = where(field .lt. 1e10 .and. field_q .lt. 1e10, field + field_q, field@_FillValue) end if if ( field_name(n) .eq. "SFWF_TOTAL" ) then - field = f_za->SFWF - field = tofloat(where(field .lt. 1e10 .and. field_q .lt. 1e10, field - field_q/l_f, field@_FillValue)) + field_q = f_za->QFLUX + field = f_za->SFWF + field = tofloat(where(field .lt. 1e10 .and. field_q .lt. 1e10, field - field_q/l_f, field@_FillValue)) end if end if @@ -115,19 +153,18 @@ do n=0, n_fields-1 end if if ( field_name(n) .eq. "SALT_F" ) then units = "x10~S~-5~N~ Kg m~S~-2~N~ s~S~-1~N~" - field = tofloat(field * sflux_factor / ( salinity_factor * 1.0e-5 )) - end if + if (abs(salinity_factor).gt.0.) then + field = tofloat(field * sflux_factor / ( salinity_factor * 1.0e-5 )) + end if + end if if ( field_name(n) .eq. "TAUX" .or. field_name(n) .eq. "TAUY" ) then units = "dyn cm~S~-2~N~" field_obs = where(field_obs .lt. 1e10,field_obs * 10, field_obs@_FillValue) - end if - - print( " plotting zonal average of " + field_name(n)) + end if ;wks = gsn_open_wks("x11",field_name(n)) wks = gsn_open_wks(img_format,field_name(n)+ "_GLO_za") - gsn_define_colormap(wks,"table42") - + case_info = field_name(n) +" ZONAL-AVE (GLO) " \ + case_number + " " + time_info subt = "" @@ -150,8 +187,8 @@ do n=0, n_fields-1 res@vpWidthF = .5 * 1.6 res@gsnMaximize = True res@xyLineColors = (/"blue", "red"/) - res@xyMonoDashPattern = True - res@xyDashPattern = 0 + res@xyDashPatterns = (/ 0, 1 /) + res@xyLineThicknessF = 4 res@gsnYRefLine = 0.0 res@gsnPaperOrientation = "portrait" diff --git a/timeseries/timeseries/cesm_tseries_generator.py b/timeseries/timeseries/cesm_tseries_generator.py index 9e9fe51b..8523056d 100755 --- a/timeseries/timeseries/cesm_tseries_generator.py +++ b/timeseries/timeseries/cesm_tseries_generator.py @@ -96,7 +96,7 @@ def readArchiveXML(caseroot, dout_s_root, casename, standalone, completechunk, d env_timeseries = '{0}/env_timeseries.xml'.format(caseroot) # read tseries log file to see if we've already started converting files, if so, where did we leave off - log = chunking.read_log(caseroot+'/ts_status.log') + log = chunking.read_log('{0}/logs/ts_status.log'.format(caseroot)) # check if the env_timeseries.xml file exists if ( not os.path.isfile(env_timeseries) ): @@ -344,7 +344,8 @@ def main(options, scomm, rank, size): reshpr.convert() if rank == 0: - chunking.write_log(caseroot+'/ts_status.log', log) # Update system log with the dates that were just converted + # Update system log with the dates that were just converted + chunking.write_log('{0}/logs/ts_status.log'.format(caseroot), log) scomm.sync() #=================================== From 457dc01b10e040e00723700817fb7de45ef40c54 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Thu, 17 Nov 2016 18:42:36 -0700 Subject: [PATCH 4/6] Update timeseries chunking code; enhance ocn diags and pyAverager for hi-res support and user specified vertical levels; numerous bug fixes. Problems still exists with chunking ocn daily data and ocn sfcflx_za.ncl script. Test suite: all components model vs. obs and ocn timeseries diagnostics and single variable timeseries generation for 9 years of data (6 for ocean because of missing data) using b.e20.B1850.f09_g16.pi_control.all.123 Fixes [Github issue #]: #36, #31 User interface changes: New XML id / value pairs and modified config_timeseries.xml schema. Input data changes: add obs data for ocn hi-res, 0.1 degree, 62 level in /glade/p/cesm/omwg/obs_data directories. THESE DATA SETS NEED TO BE ADDED TO THE SVN INPUTDATA REPO. Code review: --- Config/config_postprocess.xml | 8 ++ Config/config_timeseries.xml | 64 ----------- Config/config_timeseries.xsd | 4 - Machines/machine_postprocess.xml | 10 +- Templates/batch_yellowstone.tmpl | 39 +++++++ Templates/postprocess.tmpl | 4 +- averager/pp_tests/control_ocn_series.py | 25 +---- averager/pp_tests/runAvg_ocn_mpi.sh | 6 +- .../pyAverager/pyaverager/climAverager.py | 12 --- cesm_utils/cesm_utils/create_postprocess | 10 -- .../diagnostics/atm/atm_avg_generator.py | 3 +- diagnostics/diagnostics/lnd/lnd_diags_bc.py | 17 +-- diagnostics/diagnostics/lnd/model_vs_model.py | 43 ++++---- .../ocn/Config/config_diags_ocn.xml | 6 +- .../diagnostics/ocn/model_timeseries.py | 4 +- .../diagnostics/ocn/ocn_avg_generator.py | 4 +- lnd_diag/model1-model2/set_2.ncl | 41 ++++++- .../timeseries/cesm_tseries_generator.py | 101 +++++++++++------- timeseries/timeseries/chunking.py | 19 +++- 19 files changed, 214 insertions(+), 206 deletions(-) diff --git a/Config/config_postprocess.xml b/Config/config_postprocess.xml index 7c8532b9..c2df49b0 100644 --- a/Config/config_postprocess.xml +++ b/Config/config_postprocess.xml @@ -128,6 +128,14 @@ desc="If TRUE, create the single variable time series files using the history time slice files. All the time invariant metadata is included in each variable time series file header. Rules for how the time series variable files are created are specified in the env_archive.xml file." > + + hist TRUE netcdf4c - proc/tseries/monthly - monthly years 10 @@ -24,8 +22,6 @@ hist TRUE netcdf4c - proc/tseries/daily - daily years 5 @@ -33,8 +29,6 @@ hist TRUE netcdf4c - proc/tseries/hourly6 - hourly6 years 1 @@ -42,8 +36,6 @@ hist TRUE netcdf4c - proc/tseries/hourly3 - hourly3 years 1 @@ -51,8 +43,6 @@ hist TRUE netcdf4c - proc/tseries/hourly1 - hourly1 years 1 @@ -60,8 +50,6 @@ hist TRUE netcdf4c - proc/tseries/min30 - min30 years 1 @@ -69,8 +57,6 @@ hist FALSE netcdf4c - undefined - undefined years 10 @@ -78,8 +64,6 @@ hist FALSE netcdf4c - undefined - undefined years 10 @@ -87,8 +71,6 @@ hist FALSE netcdf4c - undefined - undefined years 10 @@ -120,8 +102,6 @@ hist TRUE netcdf4c - proc/tseries/monthly - monthly years 10 @@ -129,8 +109,6 @@ hist TRUE netcdf4c - proc/tseries/daily - daily years 5 @@ -138,8 +116,6 @@ hist TRUE netcdf4c - proc/tseries/hourly6 - hourly6 years 1 @@ -147,8 +123,6 @@ hist TRUE netcdf4c - proc/tseries/hourly3 - hourly3 years 1 @@ -156,8 +130,6 @@ hist TRUE netcdf4c - proc/tseries/hourly1 - hourly1 years 1 @@ -165,8 +137,6 @@ hist TRUE netcdf4c - proc/tseries/min30 - min30 years 1 @@ -192,8 +162,6 @@ hist TRUE netcdf4c - proc/tseries/monthly - monthly years 10 @@ -201,8 +169,6 @@ hist TRUE netcdf4c - proc/tseries/daily - daily years 5 @@ -210,8 +176,6 @@ hist TRUE netcdf4c - proc/tseries/hourly6 - hourly6 years 1 @@ -219,8 +183,6 @@ hist TRUE netcdf4c - proc/tseries/hourly3 - hourly3 years 1 @@ -246,8 +208,6 @@ hist TRUE netcdf4c - proc/tseries/monthly - monthly years 10 @@ -255,8 +215,6 @@ hist TRUE netcdf4c - proc/tseries/daily - daily years 5 @@ -264,8 +222,6 @@ hist TRUE netcdf4c - proc/tseries/hourly6 - hourly6 years 1 @@ -273,8 +229,6 @@ hist TRUE netcdf4c - proc/tseries/hourly3 - hourly3 years 1 @@ -300,8 +254,6 @@ hist TRUE netcdf4c - proc/tseries/monthly - monthly years 10 @@ -320,8 +272,6 @@ hist TRUE netcdf4c - proc/tseries/monthly - monthly years 10 @@ -329,8 +279,6 @@ hist TRUE netcdf4c - proc/tseries/daily - daily years 5 @@ -338,8 +286,6 @@ hist TRUE netcdf4c - proc/tseries/yearly - yearly years 10 @@ -347,8 +293,6 @@ hist FALSE netcdf4c - undefined - undefined years 10 @@ -356,8 +300,6 @@ hist TRUE netcdf4c - proc/tseries/annual - annual years 10 @@ -365,8 +307,6 @@ hist TRUE netcdf4c - proc/tseries/daily - daily years 5 @@ -385,8 +325,6 @@ hist TRUE netcdf4c - proc/tseries/monthly - monthly years 10 @@ -401,8 +339,6 @@ hist TRUE netcdf4c - proc/tseries/monthly - monthly years 10 diff --git a/Config/config_timeseries.xsd b/Config/config_timeseries.xsd index 2649c79b..f5f94103 100644 --- a/Config/config_timeseries.xsd +++ b/Config/config_timeseries.xsd @@ -12,8 +12,6 @@ - - @@ -25,8 +23,6 @@ - - diff --git a/Machines/machine_postprocess.xml b/Machines/machine_postprocess.xml index 46f5eee8..fbb361e2 100644 --- a/Machines/machine_postprocess.xml +++ b/Machines/machine_postprocess.xml @@ -3,7 +3,7 @@ - 128 + 128 mpirun.lsf f2py @@ -34,24 +34,24 @@ - 128 + 128 16 6 /glade/p/cesm/amwg/amwg_data - 128 + 128 4 /glade/p/cesm/pcwg/ice/data - 128 + 128 12 6 /glade/p/cesm/lmwg/diag/lnd_diag_data - 128 + 128 16 /glade/p/cesm diff --git a/Templates/batch_yellowstone.tmpl b/Templates/batch_yellowstone.tmpl index 7fec4f8b..bdd57452 100644 --- a/Templates/batch_yellowstone.tmpl +++ b/Templates/batch_yellowstone.tmpl @@ -1,3 +1,42 @@ +########## +## +## General rules for determining PE counts and distribution across nodes +## --------------------------------------------------------------------- +## +## Averages: +## +## For avearges, set -n equal to the number of variables to be averaged +## plus the number of averages to be computed. The ptile should always +## be set to 15 on yellowstone exclusive nodes. +## +## For ocean hi-resolution or atm data sets with a lot of variables, +## set the netcdf_format XML variable to netcdfLarge, change the queue to +## either geyser (shared) or bigmem (exclusive). For geyser, set -n to 16 +## and ptile to 2 or more. Or, set -n < 16 and ptile to 1 which will +## allow for more memory usage. The -W setting may also need to be +## increased for large data sets. +## +########## +## +## Diagnostics: +## +## For diagnostics, the queue should always be set to geyser or caldera +## with the -n not to exceed the number of plot sets to be created. +## The ptile can be adjusted depending on the size of the input climo +## and average files. +## +########## +## +## Variable Time series generation: +## +## On the yellowstone queues, -n should be set to (number of variables)/2 +## and ptile = 15. For geyser or caldera, the maximum -n is 16 and the +## ptile can be adjusted based on what the memory requirements might +## be depending on the variable size and number of history time slices +## to be included in the final single variable output file. +## +########## + #BSUB -n {{ pes }} #BSUB -R "span[ptile={{ ppn }}]" #BSUB -q {{ queue }} diff --git a/Templates/postprocess.tmpl b/Templates/postprocess.tmpl index e2bf4e9b..f184c938 100644 --- a/Templates/postprocess.tmpl +++ b/Templates/postprocess.tmpl @@ -37,9 +37,9 @@ echo "Start {{ processName }} generation $(date)" echo "******************************************" {% if standalone %} -{{ mpirun|replace("{{ pes }}",pes) }} ./{{ postProcessCmd }} {{ completechunk }} {{ debug }} {{ backtrace }} --caseroot {{ caseRoot }} --standalone >> {{ caseRoot }}/logs/{{ processName }}.log 2>&1 +{{ mpirun|replace("{{ pes }}",pes) }} ./{{ postProcessCmd }} {{ debug }} {{ backtrace }} --caseroot {{ caseRoot }} --standalone >> {{ caseRoot }}/logs/{{ processName }}.log 2>&1 {% else %} -{{ mpirun|replace("{{ pes }}",pes) }} ./{{ postProcessCmd }} {{ completechunk }} {{ debug }} {{ backtrace }} --caseroot {{ caseRoot }} >> {{ caseRoot }}/logs/{{ processName }}.log 2>&1 +{{ mpirun|replace("{{ pes }}",pes) }} ./{{ postProcessCmd }} {{ debug }} {{ backtrace }} --caseroot {{ caseRoot }} >> {{ caseRoot }}/logs/{{ processName }}.log 2>&1 {% endif %} echo "******************************************" diff --git a/averager/pp_tests/control_ocn_series.py b/averager/pp_tests/control_ocn_series.py index ab6191ad..df82b459 100755 --- a/averager/pp_tests/control_ocn_series.py +++ b/averager/pp_tests/control_ocn_series.py @@ -41,42 +41,25 @@ #### User modify #### in_dir='/glade/scratch/aliceb/BRCP85C5CN_ne120_t12_pop62.c13b17.asdphys.001/ocn/proc/tseries/monthly' -<<<<<<< HEAD out_dir= '/glade/scratch/aliceb/BRCP85C5CN_ne120_t12_pop62.c13b17.asdphys.001/ocn/proc/tavg.2041.2050' pref= 'BRCP85C5CN_ne120_t12_pop62.c13b17.asdphys.001.pop.h' htype= 'series' -average = ['tavg:2041:2050','mavg:2041:2050'] -======= -out_dir= '/glade/scratch/aliceb/BRCP85C5CN_ne120_t12_pop62.c13b17.asdphys.001/ocn/proc/tavg.2006.2006' -pref= 'BRCP85C5CN_ne120_t12_pop62.c13b17.asdphys.001.pop.h' -htype= 'series' -average = ['hor.meanConcat:2006:2006'] ->>>>>>> tseries_chunking +average = ['hor.meanConcat:2041:2050'] wght= False ncfrmt = 'netcdfLarge' serial=False -<<<<<<< HEAD -##var_list = ['TEMP', 'SALT'] -var_list = ['TEMP','SALT','PD','UVEL','VVEL','WVEL','IAGE','TAUX','TAUY','SSH','HMXL','HBLT','SFWF','PREC_F','MELT_F','MELTH_F','SHF','SHF_QSW','SENH_F','QFLUX','SNOW_F','SALT_F','EVAP_F','ROFF_F','LWUP_F','LWDN_F'] -mean_diff_rms_obs_dir = '/glade/p/cesm/omwg/timeseries_obs/' +#var_list = ['TEMP','SALT','PD','UVEL','VVEL','WVEL','IAGE','TAUX','TAUY','SSH','HMXL','HBLT','SFWF','PREC_F','MELT_F','MELTH_F','SHF','SHF_QSW','SENH_F','QFLUX','SNOW_F','SALT_F','EVAP_F','ROFF_F','LWUP_F','LWDN_F'] region_nc_var = 'REGION_MASK' regions={1:'Sou',2:'Pac',3:'Ind',6:'Atl',8:'Lab',9:'Gin',10:'Arc',11:'Hud',0:'Glo'} region_wgt_var = 'TAREA' -obs_dir = '/glade/p/cesm/' -obs_file = 'obs.nc' -reg_obs_file_suffix = '_hor_mean_obs.nc' -======= var_list = ['TEMP', 'SALT'] mean_diff_rms_obs_dir = '/glade/p/cesm/omwg/timeseries_obs_tx0.1v2_62lev/' region_nc_var = 'REGION_MASK' -regions={1:'Sou',2:'Pac',3:'Ind',6:'Atl',8:'Lab',9:'Gin',10:'Arc',11:'Hud',0:'Glo'} -region_wgt_var = 'TAREA' obs_dir = '/glade/p/cesm/omwg/timeseries_obs_tx0.1v2_62lev/' obs_file = 'obs.nc' reg_obs_file_suffix = '_hor_mean_obs.nc' vertical_levels = 62 ->>>>>>> tseries_chunking clobber = False suffix = 'nc' @@ -102,11 +85,7 @@ region_wgt_var=region_wgt_var, obs_dir=obs_dir, obs_file=obs_file, -<<<<<<< HEAD - reg_obs_file_suffix=reg_obs_file_suffix) -======= reg_obs_file_suffix=reg_obs_file_suffix, vertical_levels=vertical_levels) ->>>>>>> tseries_chunking PyAverager.run_pyAverager(pyAveSpecifier) diff --git a/averager/pp_tests/runAvg_ocn_mpi.sh b/averager/pp_tests/runAvg_ocn_mpi.sh index 6ac68682..775ccaf9 100755 --- a/averager/pp_tests/runAvg_ocn_mpi.sh +++ b/averager/pp_tests/runAvg_ocn_mpi.sh @@ -1,13 +1,9 @@ #! /usr/bin/env bash -<<<<<<< HEAD #BSUB -n 6 -======= -#BSUB -n 2 ->>>>>>> tseries_chunking #BSUB -q geyser #BSUB -N -#BSUB -W 06:00 +#BSUB -W 12:00 #BSUB -R "span[ptile=1]" #BSUB -P P93300606 #BSUB -o pyAve.%J.out # output file name in which %J is replaced by the job ID diff --git a/averager/pyAverager/pyaverager/climAverager.py b/averager/pyAverager/pyaverager/climAverager.py index 27b64645..50954eec 100644 --- a/averager/pyAverager/pyaverager/climAverager.py +++ b/averager/pyAverager/pyaverager/climAverager.py @@ -360,13 +360,7 @@ def weighted_hor_avg_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year,hist region_mask = MA.expand_dims(slev_mask, axis=0) weights = MA.expand_dims(slev_weights, axis=0) if var_val.ndim > 2: -<<<<<<< HEAD - for lev in range(1,60): -## for 62 level ocn testing -## for lev in range(1,62): -======= for lev in range(1,nlev): ->>>>>>> tseries_chunking new_region_mask = MA.expand_dims(slev_mask, axis=0) region_mask = np.vstack((region_mask,new_region_mask)) new_weights = MA.expand_dims(slev_weights, axis=0) @@ -459,13 +453,7 @@ def weighted_rms_var_from_yr(var,reg_name,reg_num,mask_var,wgt_var,year,hist_dic # Since weights and region mask are only one level, we need to expand them to all levels region_mask = MA.expand_dims(slev_mask, axis=0) weights = MA.expand_dims(slev_weights, axis=0) -<<<<<<< HEAD - for lev in range(1,60): -## for 62 level ocn testing -## for lev in range(1,62): -======= for lev in range(1,nlev): ->>>>>>> tseries_chunking new_region_mask = MA.expand_dims(slev_mask, axis=0) region_mask = np.vstack((region_mask,new_region_mask)) new_weights = MA.expand_dims(slev_weights, axis=0) diff --git a/cesm_utils/cesm_utils/create_postprocess b/cesm_utils/cesm_utils/create_postprocess index fe62a63e..786bd9bd 100755 --- a/cesm_utils/cesm_utils/create_postprocess +++ b/cesm_utils/cesm_utils/create_postprocess @@ -131,11 +131,6 @@ def commandline_options(): parser.add_argument('-cesmtag', '--cesmtag', nargs=1, required=False, help='CESM repository tag (optional)') - parser.add_argument('-completechunk', '--completechunk', nargs=1, required=False, type=int, default=1, - help='timeseries control - 1: do not create incomplete chunks, '\ - ' 0: create an incomplete chunk.' \ - ' 1 is default.') - options = parser.parse_args() return options @@ -385,10 +380,6 @@ def create_batch(ppDir, pes, batchTmpl, runTmpl, postProcessCmd, mpiCmd, outFile if options.backtrace: backtrace = '--backtrace' - completechunk = '' - if postProcessCmd == 'cesm_tseries_generator.py': - completechunk = '--completechunk {0}'.format(options.completechunk) - # all files look good so start parsing the template file starting with the batchTmpl templateLoader = jinja2.FileSystemLoader( searchpath='{0}/Templates'.format(ppDir) ) templateEnv = jinja2.Environment( loader=templateLoader ) @@ -413,7 +404,6 @@ def create_batch(ppDir, pes, batchTmpl, runTmpl, postProcessCmd, mpiCmd, outFile 'pythonpath': pythonpath, 'processName' : processName, 'postProcessCmd' : postProcessCmd, - 'completechunk' : completechunk, 'caseRoot' : caseRoot, 'virtualEnvDir' : virtualEnvDir, 'debug' : debug, diff --git a/diagnostics/diagnostics/atm/atm_avg_generator.py b/diagnostics/diagnostics/atm/atm_avg_generator.py index 7b34acee..2af4d828 100644 --- a/diagnostics/diagnostics/atm/atm_avg_generator.py +++ b/diagnostics/diagnostics/atm/atm_avg_generator.py @@ -277,7 +277,8 @@ def callPyAverager(start_year, stop_year, in_dir, htype, key_infile, out_dir, ca main_comm.sync() - if envDict['strip_off_vars']: + varList = [] + if envDict['strip_off_vars'].lower() in ['t','true']: varList = get_variable_list(envDict,in_dir,case_prefix,key_infile,htype,stream) main_comm.sync() diff --git a/diagnostics/diagnostics/lnd/lnd_diags_bc.py b/diagnostics/diagnostics/lnd/lnd_diags_bc.py index a1aadbed..beef66cf 100755 --- a/diagnostics/diagnostics/lnd/lnd_diags_bc.py +++ b/diagnostics/diagnostics/lnd/lnd_diags_bc.py @@ -49,9 +49,9 @@ def setup_workdir(self, env, t, scomm): workdir = '{0}/climo/{1}/{2}'.format(env['PTMPDIR_'+t], env['caseid_'+t], subdir) if (scomm.is_manager()): -## print('DEBUG lnd_diags_bc.setup_workdir t = {0}'.format(t)) -## print('DEBUG lnd_diags_bc.setup_workdir subdir = {0}'.format(subdir)) -## print('DEBUG lnd_diags_bc.setup_workdir first workdir = {0}'.format(workdir)) + print('DEBUG lnd_diags_bc.setup_workdir t = {0}'.format(t)) + print('DEBUG lnd_diags_bc.setup_workdir subdir = {0}'.format(subdir)) + print('DEBUG lnd_diags_bc.setup_workdir first workdir = {0}'.format(workdir)) try: os.makedirs(workdir) @@ -71,15 +71,15 @@ def setup_workdir(self, env, t, scomm): old_workdir = env['PTMPDIR_'+t]+'/climo/'+env['caseid_'+t]+'/'+env['caseid_'+t]+'.'+str(env['clim_first_yr_'+t])+'-'+str(endYr)+'/'+m_dir env['case'+t+'_path_climo'] = workdir -## print('DEBUG lnd_diags_bc.setup_workdir old_workdir = {0}'.format(old_workdir)) -## print('DEBUG lnd_diags_bc.setup_workdir case_t_path_climo = {0}'.format(env['case'+t+'_path_climo'])) + print('DEBUG lnd_diags_bc.setup_workdir old_workdir = {0}'.format(old_workdir)) + print('DEBUG lnd_diags_bc.setup_workdir case_t_path_climo = {0}'.format(env['case'+t+'_path_climo'])) if 'lnd' in model: workdir_mod = workdir else: workdir_mod = workdir + '/' + m_dir # Add links to the new wkrdir that use the expected file names (existing climos have dates, the NCL do not like dates) -## print('DEBUG lnd_diags_bc.setup_workdir workdir_mod = {0}'.format(workdir_mod)) + print('DEBUG lnd_diags_bc.setup_workdir workdir_mod = {0}'.format(workdir_mod)) climo_files = glob.glob(old_workdir+'/*.nc') for climo_file in climo_files: @@ -90,7 +90,10 @@ def setup_workdir(self, env, t, scomm): new_fn = workdir_mod + '/' +path_split[-1] # Take file name and add it to new path rc1, err_msg1 = cesmEnvLib.checkFile(new_fn, 'read') if not rc1: - os.symlink(climo_file,new_fn) + try: + os.symlink(climo_file,new_fn) + except: + print('INFO lnd_diags_bc.setup_workdir symlink {0} to {1} already exists.'.format(new_fn, climo_file)) env['DIAG_BASE'] = env['PTMPDIR_1'] env['PTMPDIR_'+t] = '{0}/climo/{1}/{2}'.format(env['PTMPDIR_'+t], env['caseid_'+t], subdir) diff --git a/diagnostics/diagnostics/lnd/model_vs_model.py b/diagnostics/diagnostics/lnd/model_vs_model.py index 590d1380..a2f0c3f5 100644 --- a/diagnostics/diagnostics/lnd/model_vs_model.py +++ b/diagnostics/diagnostics/lnd/model_vs_model.py @@ -180,7 +180,10 @@ def run_diagnostics(self, env, scomm): if len(imgs) > 0: for img in imgs: new_fn = set_dir + '/' + os.path.basename(img) - os.rename(img,new_fn) + print("... debug before rename img = {0}".format(img)) + print("... debug before rename basename = {0}".format(os.path.basename(img))) + print("... debug before rename new_fn = {0}".format(new_fn)) +## os.rename(img,new_fn) # Copy the set1Diff and set1Anom plots to set_1 web dir if n == '1': glob_string = web_dir+'/set1Diff'+'_*' @@ -236,27 +239,27 @@ def run_diagnostics(self, env, scomm): # lnd_create_webpage.pl call rc1, err_msg = cesmEnvLib.checkFile(web_script_1,'read') - if rc1: - try: - subprocess.check_call(web_script_1) - except subprocess.CalledProcessError as e: - print('WARNING: {0} error executing command:'.format(web_script_1)) - print(' {0}'.format(e.cmd)) - print(' rc = {0}'.format(e.returncode)) - else: - print('{0}... {1} file not found'.format(err_msg,web_script_1)) +## if rc1: +## try: +## subprocess.check_call(web_script_1) +## except subprocess.CalledProcessError as e: +## print('WARNING: {0} error executing command:'.format(web_script_1)) +## print(' {0}'.format(e.cmd)) +## print(' rc = {0}'.format(e.returncode)) +## else: +## print('{0}... {1} file not found'.format(err_msg,web_script_1)) # lnd_lookupTable.pl call rc2, err_msg = cesmEnvLib.checkFile(web_script_2,'read') - if rc2: - try: - subprocess.check_call(web_script_2) - except subprocess.CalledProcessError as e: - print('WARNING: {0} error executing command:'.format(web_script_2)) - print(' {0}'.format(e.cmd)) - print(' rc = {0}'.format(e.returncode)) - else: - print('{0}... {1} file not found'.format(err_msg,web_script_2)) +## if rc2: +## try: +## subprocess.check_call(web_script_2) +## except subprocess.CalledProcessError as e: +## print('WARNING: {0} error executing command:'.format(web_script_2)) +## print(' {0}'.format(e.cmd)) +## print(' rc = {0}'.format(e.returncode)) +## else: +## print('{0}... {1} file not found'.format(err_msg,web_script_2)) # move all the plots to the diag_path with the years appended to the path endYr1 = (int(env['clim_first_yr_1']) + int(env['clim_num_yrs_1'])) - 1 @@ -291,7 +294,7 @@ def run_diagnostics(self, env, scomm): if move_files: try: print('DEBUG: model_vs_model renaming web files') - os.rename(web_dir, diag_path) +## os.rename(web_dir, diag_path) except OSError as e: print ('WARNING: Error renaming %s to %s: %s' % (web_dir, diag_path, e)) diag_path = web_dir diff --git a/diagnostics/diagnostics/ocn/Config/config_diags_ocn.xml b/diagnostics/diagnostics/ocn/Config/config_diags_ocn.xml index 93cbb240..e594610c 100644 --- a/diagnostics/diagnostics/ocn/Config/config_diags_ocn.xml +++ b/diagnostics/diagnostics/ocn/Config/config_diags_ocn.xml @@ -200,7 +200,7 @@ Applies to both model and control cases." @@ -304,7 +304,7 @@ Applies to both model and control cases." @@ -312,7 +312,7 @@ Applies to both model and control cases." diff --git a/diagnostics/diagnostics/ocn/model_timeseries.py b/diagnostics/diagnostics/ocn/model_timeseries.py index e5b7be81..b348e3e3 100755 --- a/diagnostics/diagnostics/ocn/model_timeseries.py +++ b/diagnostics/diagnostics/ocn/model_timeseries.py @@ -225,14 +225,14 @@ def check_prerequisites(self, env): print(' {0}'.format(e.cmd)) print(' rc = {0}'.format(e.returncode)) else: - print('model timeseries - Ocean logs do not exist. Disabling MTS_PM_YPOPLOG and MTS_PM_ENSOWVLTmodules') + print('model timeseries - Ocean logs do not exist. Disabling MTS_PM_YPOPLOG and MTS_PM_ENSOWVLT modules') env['MTS_PM_YPOPLOG'] = os.environ['PM_YPOPLOG'] = 'FALSE' env['MTS_PM_ENSOWVLT'] = os.environ['PM_ENSOWVLT'] = 'FALSE' # check if dt files exist if len(env['DTFILEPATH']) == 0: # print a message that the dt file path isn't defined and turn off POPLOG plot module - print('model timeseries - DTFILEPATH is undefined. Disabling MTS_PM_YPOPLOG and MTS_PM_ENSOWVLTmodules') + print('model timeseries - DTFILEPATH is undefined. Disabling MTS_PM_YPOPLOG and MTS_PM_ENSOWVLT modules') env['MTS_PM_YPOPLOG'] = os.environ['PM_YPOPLOG'] = 'FALSE' env['MTS_PM_ENSOWVLT'] = os.environ['PM_ENSOWVLT'] = 'FALSE' diff --git a/diagnostics/diagnostics/ocn/ocn_avg_generator.py b/diagnostics/diagnostics/ocn/ocn_avg_generator.py index d5761ca8..0b604386 100644 --- a/diagnostics/diagnostics/ocn/ocn_avg_generator.py +++ b/diagnostics/diagnostics/ocn/ocn_avg_generator.py @@ -463,7 +463,7 @@ def main(options, main_comm, debugMsg): envDict['htype'], envDict['TAVGDIR'], envDict['CASE'], tseries, envDict['MODEL_VARLIST'], envDict['TSERIES_YEAR0'], envDict['TSERIES_YEAR1'], envDict['DIAGOBSROOT'], - envDict['netcdf_format'], envDict['VERTICAL'], envDict['TIMESERIES_OBSPATH'], + envDict['netcdf_format'], int(envDict['VERTICAL']), envDict['TIMESERIES_OBSPATH'], main_comm, debugMsg) except Exception as error: print(str(error)) @@ -509,7 +509,7 @@ def main(options, main_comm, debugMsg): createClimFiles(envDict['CNTRLYEAR0'], envDict['CNTRLYEAR1'], envDict['cntrl_in_dir'], envDict['cntrl_htype'], envDict['CNTRLTAVGDIR'], envDict['CNTRLCASE'], False, envDict['CNTRL_VARLIST'], 0, 0, envDict['DIAGOBSROOT'], - envDict['netcdf_format'], envDict['VERTICAL'], envDict['TIMESERIES_OBSPATH'], + envDict['netcdf_format'], int(envDict['VERTICAL']), envDict['TIMESERIES_OBSPATH'], main_comm, debugMsg) except Exception as error: print(str(error)) diff --git a/lnd_diag/model1-model2/set_2.ncl b/lnd_diag/model1-model2/set_2.ncl index 5de6d997..07990c1a 100755 --- a/lnd_diag/model1-model2/set_2.ncl +++ b/lnd_diag/model1-model2/set_2.ncl @@ -27,6 +27,10 @@ begin colormap = getenv("colormap") projection=getenv("projection") +;; print((/"PTMPDIR_1: "+ptmpdir1/)) +;; print((/"PTMPDIR_2: "+ptmpdir2/)) +;; print((/"wkdir: "+wkdir/)) + flandmask = stringtofloat(land_mask) nyrs1 = stringtointeger(getenv("clim_num_yrs_1")) @@ -709,12 +713,36 @@ begin end if if (vars(i) .eq. "SNOWDP") then if (snowFlag .eq. 1) then - wks = gsn_open_wks(plot_type,wkdir + "set2_" + seasons(n)+"_"+vars(i)+"_FOSTERDAVY") +;; print((/"plot_type: "+plot_type/)) +;; print((/"wkdir: "+wkdir/)) +;; print((/"seasons: "+seasons(n)/)) +;; print((/"vars: "+vars(i)/)) +;; print((/"Fullname: "+wkdir + "set2_" + seasons(n)+"_"+vars(i)+"_FOSTERDAVY"/)) + fullname = wkdir+"set2_"+seasons(n)+"_"+vars(i)+"_FOSTERDAVY" +;; print((/"Fullname: "+fullname/)) +;; wks = gsn_open_wks(plot_type,wkdir + "set2_" + seasons(n)+"_"+vars(i)+"_FOSTERDAVY") + wks = gsn_open_wks(plot_type,fullname) else - wks = gsn_open_wks(plot_type,wkdir + "set2_" + seasons(n)+"_"+vars(i)+"_CMC") +;; print((/"plot_type: "+plot_type/)) +;; print((/"wkdir: "+wkdir/)) +;; print((/"seasons: "+seasons(n)/)) +;; print((/"vars: "+vars(i)/)) +;; print((/"Fullname: "+wkdir + "set2_" + seasons(n)+"_"+vars(i)+"_CMC"/)) + fullname = wkdir + "set2_" + seasons(n)+"_"+vars(i)+"_CMC" +;; print((/"Fullname: "+fullname/)) +;; wks = gsn_open_wks(plot_type,wkdir + "set2_" + seasons(n)+"_"+vars(i)+"_CMC") + wks = gsn_open_wks(plot_type,fullname) end if else - wks = gsn_open_wks(plot_type,wkdir + "set2_" + seasons(n)+"_"+vars(i)) +;; print((/"plot_type: "+plot_type/)) +;; print((/"wkdir: "+wkdir/)) +;; print((/"seasons: "+seasons(n)/)) +;; print((/"vars: "+vars(i)/)) + fullname = wkdir + "set2_" + seasons(n)+"_"+vars(i) +;; print((/"Fullname: "+fullname/)) +;; print((/"Fullname: "+wkdir + "set2_" + seasons(n)+"_"+vars(i)/)) +;; wks = gsn_open_wks(plot_type,wkdir + "set2_" + seasons(n)+"_"+vars(i)) + wks = gsn_open_wks(plot_type,fullname) end if if (colormap.eq.0) then cmap = RGBtoCmap("$DIAG_RESOURCES/rgb_files/diag10.rgb") ; read in colormap @@ -777,7 +805,7 @@ begin ; Note: Don't delete res here - save until after obs are posted. res@tiMainString = cases(1) ; set case 2 titles res@gsnCenterString = " (yrs " + yrs_ave2 + ")" - if (vars(i) .eq. "CH4PROD" .or. vars(i) .eq. "CH4_SURF_EBUL_SAT" .or. vars(i) .eq. "CH4_SURF_EBUL_UNSAT") then + if (vars(i) .eq. "CH4PROD" .or. vars(i) .eq. "CH4_SURF_EBUL_SAT" .or. vars(i) .eq. "CH4_SURF_EBUL_UNSAT" .or. vars(i) .eq. "CPOOL") then if (isatt(res,"cnLabelBarEndStyle")) then if (res@cnLabelBarEndStyle.eq."IncludeMinMaxLabels") then res@cnLabelBarEndStyle = "IncludeOuterBoxes" ; temporarily turn off minmax labels. @@ -1194,6 +1222,11 @@ begin if (isvar("wks")) then delete(wks) end if +;; print((/"plot_type: "+plot_type/)) +;; print((/"wkdir: "+wkdir/)) +;; print((/"seasons: "+seasons(n)/)) +;; print((/"vars: "+vars(i)/)) +;; print((/"Fullname: "+wkdir + "set2_" + seasons(n)+"_"+vars(i)+"_"+k/)) wks = gsn_open_wks(plot_type,wkdir + "set2_" + seasons(n)+"_"+vars(i)+"_"+k) if (isvar("cmap")) then delete(cmap) diff --git a/timeseries/timeseries/cesm_tseries_generator.py b/timeseries/timeseries/cesm_tseries_generator.py index 8523056d..320cdf56 100755 --- a/timeseries/timeseries/cesm_tseries_generator.py +++ b/timeseries/timeseries/cesm_tseries_generator.py @@ -31,15 +31,16 @@ import string import sys import traceback +import warnings import xml.etree.ElementTree as ET from cesm_utils import cesmEnvLib import chunking -# import the MPI related module -from asaptools import partition, simplecomm, vprinter +# import the MPI related modules +from asaptools import partition, simplecomm, vprinter, timekeeper -# load the pyreshaper modules +# load the pyReshaper modules from pyreshaper import specification, reshaper #===================================================== @@ -60,9 +61,6 @@ def commandline_options(): parser.add_argument('--caseroot', nargs=1, required=True, help='fully quailfied path to case root directory') - parser.add_argument('--completechunk', nargs=1, type=int, required=False, - help='1: do not create incomplete chunks, 0: create an incomplete chunk') - parser.add_argument('--standalone', action='store_true', help='switch to indicate stand-alone post processing caseroot') @@ -137,27 +135,6 @@ def readArchiveXML(caseroot, dout_s_root, casename, standalone, completechunk, d err_msg = "cesm_tseries_generator.py error: tseries_output_format undefined for data stream {0}.*.{1}".format(comp,file_extension) raise TypeError(err_msg) - - # check if the tseries_output_subdir is specified and create the tseries_output_dir - if file_spec.find("tseries_output_subdir") is not None: - tseries_output_subdir = file_spec.find("tseries_output_subdir").text - tseries_output_dir = '/'.join( [dout_s_root, rootdir,tseries_output_subdir] ) - if not os.path.exists(tseries_output_dir): - os.makedirs(tseries_output_dir) - else: - err_msg = "cesm_tseries_generator.py error: tseries_output_subdir undefined for data stream {0}.*.{1}".format(comp,file_extension) - raise TypeError(err_msg) - - # check if tseries_tper is specified and is valid - if file_spec.find("tseries_tper") is not None: - tseries_tper = file_spec.find("tseries_tper").text - if tseries_tper not in ["annual","yearly","monthly","weekly","daily","hourly6","hourly3","hourly1","min30"]: - err_msg = "cesm_tseries_generator.py error: tseries_tper invalid for data stream {0}.*.{1}".format(comp,file_extension) - raise TypeError(err_msg) - else: - err_msg = "cesm_tseries_generator.py error: tseries_tper undefined for data stream {0}.*.{1}".format(comp,file_extension) - raise TypeError(err_msg) - # load the tseries_time_variant_variables into a list if comp_archive_spec.find("tseries_time_variant_variables") is not None: variable_list = list() @@ -175,7 +152,17 @@ def readArchiveXML(caseroot, dout_s_root, casename, standalone, completechunk, d comp_name = comp stream = file_extension.split('.[')[0] - stream_dates,file_slices,cal,units = chunking.get_input_dates(in_file_path+'/*'+file_extension+'*.nc') + stream_dates,file_slices,cal,units,time_period_freq = chunking.get_input_dates(in_file_path+'/*'+file_extension+'*.nc') + + # create the tseries_output_dir based on the time_period_freq global attribute + tseries_output_dir = '/'.join( [dout_s_root, rootdir, 'proc/tseries/unknown'] ) + if time_period_freq is not None: + tseries_output_dir = '/'.join( [dout_s_root, rootdir, 'proc/tseries', time_period_freq] ) + print ("tseries_output_dir = {0}".format(tseries_output_dir)) + + if not os.path.exists(tseries_output_dir): + os.makedirs(tseries_output_dir) + if comp+stream not in log.keys(): log[comp+stream] = {'slices':[],'index':0} ts_log_dates = log[comp+stream]['slices'] @@ -191,16 +178,32 @@ def readArchiveXML(caseroot, dout_s_root, casename, standalone, completechunk, d last_time_parts = cf['end'] # create the tseries output prefix needs to end with a "." - tseries_output_prefix = tseries_output_dir+"/"+casename+"."+comp_name+stream+"." +## tseries_output_prefix = tseries_output_dir+"/"+casename+"."+comp_name+stream+"." + tseries_output_prefix = "{0}/{1}.{2}{3}.".format(tseries_output_dir,casename,comp_name,stream) + print ("tseries_output_prefix = {0}".format(tseries_output_prefix)) # format the time series variable output suffix based on the - # tseries_tper setting suffix needs to start with a "." - if tseries_tper == "yearly": + # time_period_freq global attribute - suffix needs to start with a "." + if time_period_freq is None: + time_period_freq = "unknown" + start_time = ''.join(start_time_parts) + last_time = ''.join(last_time_parts) + freq_array = ["week","day","hour","min"] + if 'year' in time_period_freq: tseries_output_suffix = "."+start_time_parts[0]+"-"+last_time_parts[0]+".nc" - elif tseries_tper == "monthly": + print ("tseries_output_suffix = {0}".format(tseries_output_suffix)) + elif 'month' in time_period_freq: tseries_output_suffix = "."+start_time_parts[0]+start_time_parts[1]+start_time_parts[2]+"-"+last_time_parts[0]+last_time_parts[1]+last_time_parts[2]+".nc" - elif tseries_tper in ["weekly","daily","hourly6","hourly3","hourly1","min30"]: + print ("tseries_output_suffix = {0}".format(tseries_output_suffix)) + elif any(freq_string in time_period_freq for freq_string in freq_array): tseries_output_suffix = "."+start_time_parts[0]+start_time_parts[1]+start_time_parts[2]+start_time_parts[3]+"-"+last_time_parts[0]+last_time_parts[1]+last_time_parts[2]+last_time_parts[3]+".nc" + print ("tseries_output_suffix = {0}".format(tseries_output_suffix)) + elif 'unknown' in time_period_freq: + tseries_output_suffix = "."+start_time+"-"+last_time+".nc" + print ("tseries_output_suffix = {0}".format(tseries_output_suffix)) + else: + err_msg = "cesm_tseries_generator.py error: invalid time_period_freq {0}.".format(time_period_freq) + raise TypeError(err_msg) # get a reshaper specification object spec = specification.create_specifier() @@ -295,7 +298,12 @@ def main(options, scomm, rank, size): if rank == 0: dout_s_root = cesmEnv['DOUT_S_ROOT'] case = cesmEnv['CASE'] - specifiers,log = readArchiveXML(caseroot, dout_s_root, case, options.standalone, options.completechunk[0], debug) + completechunk = cesmEnv['TIMESERIES_COMPLETECHUNK'] + if completechunk.upper() in ['T','TRUE']: + completechunk = 1 + else: + completechunk = 0 + specifiers,log = readArchiveXML(caseroot, dout_s_root, case, options.standalone, completechunk, debug) scomm.sync() # specifiers is a list of pyreshaper specification objects ready to pass to the reshaper @@ -354,6 +362,10 @@ def main(options, scomm, rank, size): # initialize simplecomm object scomm = simplecomm.create_comm(serial=False) + # setup an overall timer + timer = timekeeper.TimeKeeper() + timer.start("Total Time") + # get commandline options options = commandline_options() @@ -363,9 +375,20 @@ def main(options, scomm, rank, size): if rank == 0: print('cesm_tseries_generator INFO Running on {0} cores'.format(size)) - main(options, scomm, rank, size) - if rank == 0: - print('************************************************************') - print('Successfully completed generating variable time-series files') - print('************************************************************') + try: + main(options, scomm, rank, size) + scomm.sync() + timer.stop("Total Time") + if rank == 0: + print('************************************************************') + print('Successfully completed generating variable time-series files') + print('Total Time: {0} seconds'.format(timer.get_time("Total Time"))) + print('************************************************************') + sys.exit(0) + except Exception as error: + print(str(error)) + if options.backtrace: + traceback.print_exc() + sys.exit(1) + diff --git a/timeseries/timeseries/chunking.py b/timeseries/timeseries/chunking.py index 6528b812..7c156f6d 100755 --- a/timeseries/timeseries/chunking.py +++ b/timeseries/timeseries/chunking.py @@ -20,6 +20,7 @@ def get_input_dates(glob_str): file_slices(dictionary) - keys->filename, values->the number of slices found in the file calendar(string) - the name of the calendar type (ie, noleap, ...) units(string) - the calendar unit (possibly in the form 'days since....') + time_period_freq(string) - time_period_freq global attribute from first file ''' stream_files = glob.glob(glob_str) @@ -29,13 +30,16 @@ def get_input_dates(glob_str): print glob_str if len(stream_files) < 1: - return stream_dates, file_slices, None, None + return stream_dates, file_slices, None, None, None + time_period_freq = None + first = True for fn in sorted(stream_files): print 'opening ',fn # open file and get time dimension f = nc.Dataset(fn,"r") all_t = f.variables['time'] + nc_atts = f.ncattrs() # add the file name are how many slices it contains file_slices[fn] = len(all_t) @@ -48,7 +52,16 @@ def get_input_dates(glob_str): for a in all_t.ncattrs(): att[a] = all_t.__getattribute__(a) - return stream_dates,file_slices,att['calendar'],att['units'] + # get the time_period_freq global attribut from the first file + if first: + try: + time_period_freq = f.getncattr('time_period_freq') + print 'time_period_freq = ',time_period_freq + except: + print 'Global attribute time_period_freq not found - set to None' + first = False + + return stream_dates,file_slices,att['calendar'].lower(),att['units'],time_period_freq def get_cesm_date(fn,t=None): @@ -82,7 +95,7 @@ def get_cesm_date(fn,t=None): else: d = f.variables['time'][1] - d1 = cf_units.num2date(d,att['units'],att['calendar']) + d1 = cf_units.num2date(d,att['units'],att['calendar'].lower()) return [str(d1.year).zfill(4),str(d1.month).zfill(2),str(d1.day).zfill(2),str(d1.hour).zfill(2)] From db5799737a9ca56081c58dc4b2651eda4f736c16 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Fri, 18 Nov 2016 17:35:02 -0700 Subject: [PATCH 5/6] updates and bug fixes for timeseries chunking - still not working for cism data --- Config/config_timeseries.xml | 62 +++++++++++++++++++ Config/config_timeseries.xsd | 4 ++ .../timeseries/cesm_tseries_generator.py | 45 +++++++------- timeseries/timeseries/chunking.py | 2 +- 4 files changed, 91 insertions(+), 22 deletions(-) diff --git a/Config/config_timeseries.xml b/Config/config_timeseries.xml index 27dc3ca6..3485b134 100644 --- a/Config/config_timeseries.xml +++ b/Config/config_timeseries.xml @@ -4,17 +4,41 @@ + + + + + + + + + + + + + + + + + + + + + + atm True + noleap hist TRUE netcdf4c + month_1 years 10 @@ -22,6 +46,7 @@ hist TRUE netcdf4c + day_1 years 5 @@ -29,6 +54,7 @@ hist TRUE netcdf4c + hour_6 years 1 @@ -36,6 +62,7 @@ hist TRUE netcdf4c + hour_3 years 1 @@ -43,6 +70,7 @@ hist TRUE netcdf4c + hour_1 years 1 @@ -50,6 +78,7 @@ hist TRUE netcdf4c + min_30 years 1 @@ -57,6 +86,7 @@ hist FALSE netcdf4c + undefined years 10 @@ -64,6 +94,7 @@ hist FALSE netcdf4c + undefined years 10 @@ -71,6 +102,7 @@ hist FALSE netcdf4c + undefined years 10 @@ -97,11 +129,13 @@ lnd True + noleap hist TRUE netcdf4c + month_1 years 10 @@ -109,6 +143,7 @@ hist TRUE netcdf4c + day_1 years 5 @@ -116,6 +151,7 @@ hist TRUE netcdf4c + hour_6 years 1 @@ -123,6 +159,7 @@ hist TRUE netcdf4c + hour_3 years 1 @@ -130,6 +167,7 @@ hist TRUE netcdf4c + hour_1 years 1 @@ -137,6 +175,7 @@ hist TRUE netcdf4c + min_30 years 1 @@ -157,11 +196,13 @@ rof True + noleap hist TRUE netcdf4c + month_1 years 10 @@ -169,6 +210,7 @@ hist TRUE netcdf4c + day_1 years 5 @@ -176,6 +218,7 @@ hist TRUE netcdf4c + hour_6 years 1 @@ -183,6 +226,7 @@ hist TRUE netcdf4c + hour_3 years 1 @@ -203,11 +247,13 @@ rof True + noleap hist TRUE netcdf4c + month_1 years 10 @@ -215,6 +261,7 @@ hist TRUE netcdf4c + day_1 years 5 @@ -222,6 +269,7 @@ hist TRUE netcdf4c + hour_6 years 1 @@ -229,6 +277,7 @@ hist TRUE netcdf4c + hour_3 years 1 @@ -249,11 +298,13 @@ ice True + noleap hist TRUE netcdf4c + month_1 years 10 @@ -267,11 +318,13 @@ ocn True + noleap hist TRUE netcdf4c + month_1 years 10 @@ -279,6 +332,7 @@ hist TRUE netcdf4c + day_1 years 5 @@ -286,6 +340,7 @@ hist TRUE netcdf4c + year_1 years 10 @@ -293,6 +348,7 @@ hist FALSE netcdf4c + month_1 years 10 @@ -300,6 +356,7 @@ hist TRUE netcdf4c + year_1 years 10 @@ -307,6 +364,7 @@ hist TRUE netcdf4c + day_1 years 5 @@ -320,11 +378,13 @@ glc True + noleap hist TRUE netcdf4c + year_1 years 10 @@ -334,11 +394,13 @@ wav True + noleap hist TRUE netcdf4c + month_1 years 10 diff --git a/Config/config_timeseries.xsd b/Config/config_timeseries.xsd index f5f94103..c6f2dbe0 100644 --- a/Config/config_timeseries.xsd +++ b/Config/config_timeseries.xsd @@ -9,9 +9,11 @@ + + @@ -23,6 +25,7 @@ + @@ -51,6 +54,7 @@ + diff --git a/timeseries/timeseries/cesm_tseries_generator.py b/timeseries/timeseries/cesm_tseries_generator.py index 320cdf56..d18a5404 100755 --- a/timeseries/timeseries/cesm_tseries_generator.py +++ b/timeseries/timeseries/cesm_tseries_generator.py @@ -109,6 +109,8 @@ def readArchiveXML(caseroot, dout_s_root, casename, standalone, completechunk, d comp = comp_archive_spec.get("name") rootdir = comp_archive_spec.find("rootdir").text multi_instance = comp_archive_spec.find("multi_instance").text + default_calendar = comp_archive_spec.find("default_calendar").text + print("default_calendar = {0}".format(default_calendar)) # for now, set instance value to empty string implying only 1 instance instance = "" @@ -124,7 +126,7 @@ def readArchiveXML(caseroot, dout_s_root, casename, standalone, completechunk, d # check if the tseries_create element is set to TRUE if tseries_create.upper() in ["T","TRUE"]: - + # check if tseries_format is an element for this file_spec and if it is valid if file_spec.find("tseries_output_format") is not None: tseries_output_format = file_spec.find("tseries_output_format").text @@ -145,6 +147,9 @@ def readArchiveXML(caseroot, dout_s_root, casename, standalone, completechunk, d history_files = list() in_file_path = '/'.join( [dout_s_root,rootdir,subdir] ) + # get XML tseries elements for chunking + if file_spec.find("tseries_tper") is not None: + tseries_tper = file_spec.find("tseries_tper").text if file_spec.find("tseries_filecat_tper") is not None: tper = file_spec.find("tseries_filecat_tper").text if file_spec.find("tseries_filecat_n") is not None: @@ -153,11 +158,16 @@ def readArchiveXML(caseroot, dout_s_root, casename, standalone, completechunk, d stream = file_extension.split('.[')[0] stream_dates,file_slices,cal,units,time_period_freq = chunking.get_input_dates(in_file_path+'/*'+file_extension+'*.nc') + # check if the calendar attribute was read or not + print("1 cal = {0}".format(cal)) + if cal is None or cal == "none": + cal = default_calendar + print("2 cal = {0}".format(cal)) - # create the tseries_output_dir based on the time_period_freq global attribute - tseries_output_dir = '/'.join( [dout_s_root, rootdir, 'proc/tseries/unknown'] ) + # the tseries_tper should be set in using the time_period_freq global file attribute if it exists if time_period_freq is not None: - tseries_output_dir = '/'.join( [dout_s_root, rootdir, 'proc/tseries', time_period_freq] ) + tseries_tper = time_period_freq + tseries_output_dir = '/'.join( [dout_s_root, rootdir, 'proc/tseries', tseries_tper] ) print ("tseries_output_dir = {0}".format(tseries_output_dir)) if not os.path.exists(tseries_output_dir): @@ -178,32 +188,22 @@ def readArchiveXML(caseroot, dout_s_root, casename, standalone, completechunk, d last_time_parts = cf['end'] # create the tseries output prefix needs to end with a "." -## tseries_output_prefix = tseries_output_dir+"/"+casename+"."+comp_name+stream+"." tseries_output_prefix = "{0}/{1}.{2}{3}.".format(tseries_output_dir,casename,comp_name,stream) print ("tseries_output_prefix = {0}".format(tseries_output_prefix)) # format the time series variable output suffix based on the - # time_period_freq global attribute - suffix needs to start with a "." - if time_period_freq is None: - time_period_freq = "unknown" - start_time = ''.join(start_time_parts) - last_time = ''.join(last_time_parts) + # tseries_tper setting suffix needs to start with a "." freq_array = ["week","day","hour","min"] - if 'year' in time_period_freq: + if "year" in tseries_tper: tseries_output_suffix = "."+start_time_parts[0]+"-"+last_time_parts[0]+".nc" - print ("tseries_output_suffix = {0}".format(tseries_output_suffix)) - elif 'month' in time_period_freq: + elif "month" in tseries_tper: tseries_output_suffix = "."+start_time_parts[0]+start_time_parts[1]+start_time_parts[2]+"-"+last_time_parts[0]+last_time_parts[1]+last_time_parts[2]+".nc" - print ("tseries_output_suffix = {0}".format(tseries_output_suffix)) - elif any(freq_string in time_period_freq for freq_string in freq_array): + elif any(freq_string in tseries_tper for freq_string in freq_array): tseries_output_suffix = "."+start_time_parts[0]+start_time_parts[1]+start_time_parts[2]+start_time_parts[3]+"-"+last_time_parts[0]+last_time_parts[1]+last_time_parts[2]+last_time_parts[3]+".nc" - print ("tseries_output_suffix = {0}".format(tseries_output_suffix)) - elif 'unknown' in time_period_freq: - tseries_output_suffix = "."+start_time+"-"+last_time+".nc" - print ("tseries_output_suffix = {0}".format(tseries_output_suffix)) else: - err_msg = "cesm_tseries_generator.py error: invalid time_period_freq {0}.".format(time_period_freq) + err_msg = "cesm_tseries_generator.py error: invalid tseries_tper = {0}.".format(tseries_tper) raise TypeError(err_msg) + print ("tseries_output_suffix = {0}".format(tseries_output_suffix)) # get a reshaper specification object spec = specification.create_specifier() @@ -356,6 +356,9 @@ def main(options, scomm, rank, size): chunking.write_log('{0}/logs/ts_status.log'.format(caseroot), log) scomm.sync() + + return 0 + #=================================== if __name__ == "__main__": @@ -376,7 +379,7 @@ def main(options, scomm, rank, size): print('cesm_tseries_generator INFO Running on {0} cores'.format(size)) try: - main(options, scomm, rank, size) + status = main(options, scomm, rank, size) scomm.sync() timer.stop("Total Time") if rank == 0: diff --git a/timeseries/timeseries/chunking.py b/timeseries/timeseries/chunking.py index 7c156f6d..fb124e63 100755 --- a/timeseries/timeseries/chunking.py +++ b/timeseries/timeseries/chunking.py @@ -52,7 +52,7 @@ def get_input_dates(glob_str): for a in all_t.ncattrs(): att[a] = all_t.__getattribute__(a) - # get the time_period_freq global attribut from the first file + # get the time_period_freq global attribute from the first file if first: try: time_period_freq = f.getncattr('time_period_freq') From 7dfcda13aefacacc887acecf9ab530f9eeaac814 Mon Sep 17 00:00:00 2001 From: Alice Bertini Date: Sun, 27 Nov 2016 15:00:09 -0700 Subject: [PATCH 6/6] minor update to sfcflx_za.ncl to handle both 1 degree and hi-res data. Merged Ernesto's changes to sfcflx_za.ncl for hi-res ocn data. Competed testing on chunking of timeseries data for all component models except CISM. CISM data fails because time units is in "common years since" which is CF compliant but not considered valid for CMIP6 data. Land-Ice WG is working on changing to hours or seconds. Also found a problem in the ocn raw history files not containing the global attribute TIME_PERIOD_FREQ. Test suite: b.e20.B1850.f09_g16.pi_control.all.123, 9 years of monthly data for atm, ice, lnd, and rof and 6 years of annual, monthly and daily data for ocn. Fixes [Github issue #]: Issue #36 and Issue #14 User interface changes: changes to timeseries.xml settings Input data changes: None Code review: --- ocn_diag/ncl_lib/sfcflx_za.ncl | 49 +++++++++++++++++++++++----------- 1 file changed, 33 insertions(+), 16 deletions(-) diff --git a/ocn_diag/ncl_lib/sfcflx_za.ncl b/ocn_diag/ncl_lib/sfcflx_za.ncl index 47fa760a..63548540 100644 --- a/ocn_diag/ncl_lib/sfcflx_za.ncl +++ b/ocn_diag/ncl_lib/sfcflx_za.ncl @@ -4,11 +4,9 @@ load "$NCLPATH/get_environment.ncl" begin -print("") -print("Running sfcflx_za_highres.ncl") - file_netcdf_za = "za_"+file_netcdf file_flux_obs_za = "za_"+file_flux_obs +file_wind_obs_za = "za_"+file_wind_obs fileid = addfile(file_netcdf,"r") f_za = addfile(file_netcdf_za,"r") @@ -17,7 +15,7 @@ yesobs = isfilepresent(file_flux_obs_za) if (yesobs) then f_obs = addfile(file_flux_obs_za,"r") else - print("Observational file of zonal average does not exisit.") + print("Observational file of zonal average does not exist.") end if nlev = 21 @@ -35,20 +33,33 @@ n_reg = dimsizes(region_index) varsinfile = getfilevarnames(f_za) - field_name = (/ "SHF_QSW", "MELTH_F", "SENH_F", \ - "LWUP_F", "LWDN_F", "EVAP_F", "PREC_F", \ - "MELT_F", "SALT_F", "SNOW_F" /) -; "MELT_F", "SALT_F", "SNOW_F", "ROFF_F" /) +field_name = (/ "SHF_TOTAL", "SHF_QSW", "SFWF_TOTAL", "PREC_F", \ + "EVAP_F", "MELT_F", "ROFF_F", "SALT_F", \ + "SENH_F", "LWUP_F", "LWDN_F", "MELTH_F", \ + "QFLUX", "SNOW_F" /) -if (yesobs) then +if (resolution .eq. "gx1v6") then + obsvar_name = (/ "nethflx", "swnet", "netfwflx", "rain", \ + "evap", "meltw", "roff", "", \ + "sen", "lwup", "lwdn", "melth", \ + "", "snow" /) +else + obsvar_name = (/ "", "", "", "", \ + "", "", "", "", \ + "", "", "", "", \ + "", "" /) +end if + + +if (resolution .eq. "tx0.1v2") then obsvar_name = (/ "swnet", "melth", "sen", \ "lwup", "lwdn", "evap", "rain", \ "meltw", "", "snow" /) -; "meltw", "", "snow", "roff" /) else - obsvar_name = (/ "", "", "", \ - "", "", "", "", \ - "", "", "" /) + obsvar_name = (/ "", "", "", "", \ + "", "", "", "", \ + "", "", "", "", \ + "", "" /) end if n_fields = dimsizes(field_name) @@ -111,13 +122,17 @@ do n=0, n_fields-1 ;;-- Read observations from "obsfile" file --;; - if (obsvar .ne. "" .and. yesobs ) then +;; if (obsvar .ne. "" .and. yesobs ) then + if (obsvar .ne. "") then field_obs = obsfile->$obsvar$ end if - if ( field_name(n) .eq. "PREC_F" .and. yesobs ) then +;; if ( field_name(n) .eq. "PREC_F" .and. yesobs ) then + if ( field_name(n) .eq. "PREC_F" ) then obsvar = obs_prefix + "Foxx_snow" snow = obsfile->$obsvar$ - field_obs = where(field_obs .lt. 1.e10 .and. snow .lt. 1.e10, field_obs + snow, field_obs@_FillValue) +;; ASB - error in logic since field_obs is not defined if PREC_F is not in obsfile +;; field_obs = where(field_obs .lt. 1.e10 .and. snow .lt. 1.e10, field_obs + snow, field_obs@_FillValue) + field_obs = where(snow .lt. 1.e10, snow, snow@_FillValue) end if ;;-- Read model output from "f_za" file --;; @@ -162,6 +177,8 @@ do n=0, n_fields-1 field_obs = where(field_obs .lt. 1e10,field_obs * 10, field_obs@_FillValue) end if + print( " plotting zonal average of " + field_name(n)) + ;wks = gsn_open_wks("x11",field_name(n)) wks = gsn_open_wks(img_format,field_name(n)+ "_GLO_za")