Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added several Mission atlantic developments and a shelve fixer tool #121

Open
wants to merge 19 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 26 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ Executable name | What it does | Command
`bgcval2_make_report` | makes the single model HTML report. | bgcval2_make_report jobID
`analysis_compare` | runs comparison of multiple single jobs | analysis_compare
`batch_timeseries` | Submits single job time series analysis to slurm | batch_timeseries
`revert_shelves` | Removes specific years or months from processed shelves | revert_shelves -j jobID -y years -m months


### Checking out development branches
Expand Down Expand Up @@ -607,6 +608,31 @@ The third place that these plots are kept is on the public facing jasmin directo

This is where the report is hosted.



Fixing mistakes
---------------

The `revert_shelves` tool is built to remove erroneous processed data from bgcval2
shelves (where processed data is stored.
This is useful for processed updated simulations, new runs, post hoc fixes
and for debugging.

The revert Shelves tool is set up to remove for a given jobID, year, and even month.

A typical useage would be:
```
revert_shelves -j jobID1 jobID2 -y 2009 2010 -m 02 11 -k AMOC
```

This would remove all AMOC data from February and November from the
years 2009 and 2010.
The default behaviour for the `-k` argument is to run this over all keys.

Test this command using the dry run argument `-d`.



Point to point analysis
-----------------------

Expand Down
2 changes: 1 addition & 1 deletion bgcval2/analysis_compare.py
Original file line number Diff line number Diff line change
Expand Up @@ -415,7 +415,7 @@ def timeseries_compare(jobs,
data=-999, # in situ data distribution
title=title,
filename=bvt.folder(imageFolder) +
'_'.join([name, region, layer, ts, smoothing + '.png']),
'_'.join([name, region, layer, ts, metric, smoothing + '.png']),
units=units,
plotStyle=ts,
smoothing=smoothing,
Expand Down
10 changes: 5 additions & 5 deletions bgcval2/bgcval.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,13 +38,13 @@
import sys
from multiprocessing import Pool

from .download_from_mass import findLastFinishedYear
from .analysis_timeseries import analysis_timeseries, singleTimeSeries, singleTimeSeriesProfile
from bgcval2.download_from_mass import findLastFinishedYear
from bgcval2.analysis_timeseries import analysis_timeseries, singleTimeSeries, singleTimeSeriesProfile
# these modules are no more and break the tests
# from .analysis_timeseries import level1KeysDict, physKeysDict, bgcKeysDict, keymetricsfirstDict
from .analysis_p2p import analysis_p2p, p2pDict_level2, p2pDict_physics, single_p2p
from .bgcval2_make_report import html5Maker
from .bv2tools import folder
from bgcval2.analysis_p2p import analysis_p2p, p2pDict_level2, p2pDict_physics, single_p2p
from bgcval2.bgcval2_make_report import html5Maker
from bgcval2.bv2tools import folder


def timeseriesParrallelL1(index):
Expand Down
68 changes: 49 additions & 19 deletions bgcval2/bgcval2_make_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,7 @@ def newImageLocation(fn):
Level1Profiles = True
level2Horizontal = True
level2Physics = False
level2_auto = True
summarySections = False
level3OMZ = False
Level3Salinity = False
Expand Down Expand Up @@ -953,27 +954,51 @@ def newImageLocation(fn):
FileLists=FileLists,
FileOrder=FileOrder)

if level2Physics:
l2Fields = [
'Temperature',
'Salinity',
'MLD',
'ZonalCurrent',
'MeridionalCurrent',
'VerticalCurrent',
]




if level2Physics or level2_auto:
if level2Physics:
l2Fields = [
'Temperature',
'Salinity',
'MLD',
'ZonalCurrent',
'MeridionalCurrent',
'VerticalCurrent',
]
slices = [
'Surface',
'1000m',
'Transect',
]
SectionTitle = 'Level 2 - Physics'
region = 'Global'

if level2_auto:
l2Fields = glob(imagedir + '/' + jobID + '/P2Pplots/*/*')
l2Fields = [os.path.basename(fn) for fn in sorted(l2Fields)]
levels = ['Surface', '4000m', '2000m', '1000m', '750m','500m','200m', '100m', '50m', 'Transect']
outdict = {}
outlevels = {}
for i, fn in enumerate(l2Fields):
for lv in levels:
if fn.find(lv) > -1:
outlevels[lv] = True
fn = fn.replace(lv, '')
outdict[fn] = True

l2Fields = [key for key, v in outdict.items()]
slices = [key for key, v in outlevels.items()]
SectionTitle = 'Level 2'
region = '*'

hrefs = []
Titles = {}
SidebarTitles = {}
Descriptions = {}
FileLists = {}
SectionTitle = 'Level 2 - Physics'
region = 'Global'
slices = [
'Surface',
'1000m',
'Transect',
]
FileOrder = {}

for key in sorted(l2Fields):
Expand Down Expand Up @@ -1003,7 +1028,7 @@ def newImageLocation(fn):
for s in slices:
if s in [
'Surface',
'1000m',
'*',
]:
vfiles.extend(
glob(imagedir + '/' + jobID + '/P2Pplots/*/*' + key +
Expand All @@ -1013,6 +1038,11 @@ def newImageLocation(fn):
glob(imagedir + '/' + jobID + '/P2Pplots/*/*' + key +
'*/*/*' + s + '*' + region + '*' + key + '*' +
year + '*robinquad-cartopy.png'))
vfiles.extend(
glob(imagedir + '/' + jobID + '/P2Pplots/*/*' + key +
'*/*/*' + s + '*' + region + '*' + key + '*' +
year + '*.png'))

if s in [
'Transect',
]:
Expand Down Expand Up @@ -1473,8 +1503,8 @@ def newImageLocation(fn):
'Salinty_Global_Surface',
'FreshwaterFlux_Global',
'TotalHeatFlux',
'MA_SST',
'MA_SSS',
'MA_SST_Global_Surface',
'MA_SSS_Global_Surface',
'MA_Drake',
'MA_AMOC_26N',
'MA_AEU',
Expand Down
2 changes: 1 addition & 1 deletion bgcval2/default-bgcval2-config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ standard-paths:
p2p_ppDir: "/local1/data/scratch/ledm/bgcval2_postProcessed"
imagedir: "images"
#ModelFolder_pref: "gle/dare_baseline" #AMM7
ModelFolder_pref: "ledm/MissionAtlantic/SENEMO"
ModelFolder_pref: "ledm/MissionAtlantic/"
orcaGridfn: "/data/proteus2/scratch/ledm/MissionAtlantic/SENEMO/eORCA025_mesh_mask_mes_v2.nc"
amm7Gridfn: "/users/modellers/ledm/workspace/coast/AMM7_grid/mesh_mask.nc"
ObsFolder: "/data/sthenno1/scratch/ledm/Observations/"
Expand Down
6 changes: 6 additions & 0 deletions bgcval2/functions/TotalIntPP.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@
from bgcval2.bgcvaltools.dataset import dataset
from bgcval2.functions.get_kwarg_file import get_kwarg_file
from bgcval2.functions.standard_functions import find_best_var
import os


global loadedArea
global model_area
Expand Down Expand Up @@ -84,6 +86,10 @@ def MA_TotalIntPP(nc, keys, **kwargs):
# This will only work for NEMO models:
thkfn = nc.filename.replace('diag_T', 'grid_T').replace('ptrc_T', 'grid_T')
print('MA_TotalIntPP: opening:', thkfn)
if not os.path.exists(thkfn):
print('MA_TotalIntPP: ERROR: does not exist:', thkfn)
return np.ma.masked
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

np.ma.masked what? That's just a generic Numpy function

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's a masked value, so that the arrays all line up corectly, even if there's no data here.

Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

that's a <class 'numpy.ma.core.MaskedConstant'> which is a stuffy class (that, in effect is just a -- with a value of , all you need is the value that gets masked (1e20 or -999 or whatever). Note that the fillvalue of np.ma.masked is not always what one needs be, it's 999999 so that might screw up things https://numpy.org/doc/stable/reference/maskedarray.baseclass.html


thknc = dataset(thkfn, 'r')
thick = thknc.variables['thkcello'][:]
thknc.close()
Expand Down
28 changes: 22 additions & 6 deletions bgcval2/functions/circulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,6 @@ def TwentySixNorth(nc,keys,**kwargs):

assert 0


zv = np.ma.array(nc.variables[keys[0]][..., latslice26Nnm, :]) # m/s
atlmoc = np.array(np.zeros_like(zv[0, :, :, 0]))
print('TwentySixNorth:', e3v_AMOC26N.shape, atlmoc.shape, zv.shape)
Expand Down Expand Up @@ -290,11 +289,28 @@ def twentysixnorth025(nc,keys,**kwargs):
depths = np.ma.masked_where(thkcello.mask + np.abs(depths)<500., depths) # masked above 500m depth.

e1v = e1v_AMOC26N[:,None, :, :]
flux = vo * depths * e1v_AMOC26N[:,None, :, :]/1.E06

moc=np.ma.zeros_like(flux)
np.ma.cumsum(flux[:,::-1], axis=1, out=moc ) # sum floor to surface
return moc.max()
# flux = vo * thkcello * e1v_AMOC26N[:,None, :, :]/1.E06
# moc=np.ma.zeros_like(flux)
# np.ma.cumsum(flux[:,::-1], axis=1, out=moc ) # sum floor to surface
# return moc.max()
atlmoc = np.array(np.zeros_like(vo.squeeze()))
#print(atlmoc.shape, thkcello.shape, depths.shape, vo.shape, e1v_AMOC26N.shape, e3v_AMOC26N.shape)
# (75, 264) (1, 75, 1, 264) (1, 75, 1, 264) (1, 75, 1, 264) (1, 1, 264) (75, 264)

#assert 0

Comment on lines +292 to +301
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
# flux = vo * thkcello * e1v_AMOC26N[:,None, :, :]/1.E06
# moc=np.ma.zeros_like(flux)
# np.ma.cumsum(flux[:,::-1], axis=1, out=moc ) # sum floor to surface
# return moc.max()
atlmoc = np.array(np.zeros_like(vo.squeeze()))
#print(atlmoc.shape, thkcello.shape, depths.shape, vo.shape, e1v_AMOC26N.shape, e3v_AMOC26N.shape)
# (75, 264) (1, 75, 1, 264) (1, 75, 1, 264) (1, 75, 1, 264) (1, 1, 264) (75, 264)
#assert 0

for (t, z, la, lo), vox in np.ndenumerate(vo):
if not vox:
continue
if not depths[t, z, la, lo] or np.ma.is_masked(depths[t, z, la, lo]):
continue
atlmoc[z, la] = atlmoc[z, la] - e1v[t, 0, la, lo] * thkcello[t, z, la, lo] * vo[t, z, la, lo] / 1.E06

for z in range(thkcello.shape[1] -2, 1, -1): # add from the bottom up
atlmoc[z, :] = atlmoc[z+1, :] + atlmoc[z, :]
print('AMOC:', atlmoc.max())
return atlmoc.max()



def AMOC26N(nc, keys, **kwargs):
Expand Down
16 changes: 15 additions & 1 deletion bgcval2/p2p/matchDataAndModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
# local imports
import bgcval2.bgcvaltools.bv2tools as bvt
from bgcval2.bgcvaltools.pftnames import CMIP5models

#####
# These are availalble in the module:
# https://gitlab.ecosystem-modelling.pml.ac.uk/ledm/netcdf_manip
Expand Down Expand Up @@ -268,8 +269,10 @@ def _convertDataTo1D_(self, ):
'100m',
'200m',
'500m',
'750m',
'1000m',
'2000m',
'4000m',
]:
print(
'matchDataAndModel:\tconvertDataTo1D:\tSlicing along depth direction.'
Expand All @@ -279,8 +282,10 @@ def _convertDataTo1D_(self, ):
if self.depthLevel == '100m': z = 100.
if self.depthLevel == '200m': z = 200.
if self.depthLevel == '500m': z = 500.
if self.depthLevel == '750m': z = 750.
if self.depthLevel == '1000m': z = 1000.
if self.depthLevel == '2000m': z = 2000.
if self.depthLevel == '4000m': z = 4000.

if nc.variables[self.datacoords['z']].ndim == 1:
k = bvt.getORCAdepth(
Expand Down Expand Up @@ -921,6 +926,7 @@ def loadMesh(self, modelfile=None):
if self.loncc.ndim == 1 and self.loncc.shape != self.latcc.shape:
self.loncc, self.latcc = np.meshgrid(self.loncc, self.latcc)


# self.depthcc = choose_best_ncvar(ncER, depthNames)[:]
# if 'deptht' in list(ncER.variables.keys()):
# self.depthcc = ncER.variables['deptht'][:].squeeze()
Expand All @@ -930,7 +936,13 @@ def loadMesh(self, modelfile=None):
# self.depthcc = ncER.variables['lev'][:].squeeze()
# else:
# print(self.modelcoords['z'], ncER.variables.keys())
self.depthcc = ncER.variables[self.modelcoords['z']][:]
if self.modelcoords['z'] in ncER.variables.keys():
self.depthcc = ncER.variables[self.modelcoords['z']][:]
else:
print('load mesh:', self.modelcoords['z'], 'is missing', ncER.variables.keys())
if not set(ncER.variables.keys()).intersection(set(depthNames)):
print('load mesh: no depth field available in ', modelfile)
self.depthcc = np.array([0])

# self.depthcc = choose_best_ncvar(ncER, depthNames)[:]
self.datescc = var_to_datetime(ncER.variables[self.modelcoords['t']])
Expand Down Expand Up @@ -1122,6 +1134,8 @@ def var_to_datetime(ncvar):
if units in ['months since 0000-01-01 00:00:00', ]:
units = 'months since 2000-01-01 00:00:00'
return num2date(ncvar[:], 'months since 2000-01-01 00:00:00', calendar='360_day')
elif units.find('months since') > -1:
return num2date(ncvar[:], units, calendar='360_day')

return num2date(ncvar[:], ncvar.units, calendar=calendar)

Expand Down
4 changes: 4 additions & 0 deletions bgcval2/p2p/testsuite_p2p.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@

"""
#Standard Python modules:
import os
from sys import argv, exit
from os.path import exists
from calendar import month_name
Expand Down Expand Up @@ -162,6 +163,9 @@ def testsuite_p2p(
# Match observations and model.
# Does not produce and plots.
#annual = True
if not os.path.exists(av[name].get('dataFiles', '')):
print('Data file missing:', name, av[name].get('dataFiles', ''))
assert 0
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

come on, get rid of all this old crappe - raise an exception or continue

b = matchDataAndModel(av[name]['dataFiles'],
av[name]['modelFiles'],
dataType=name,
Expand Down
Loading
Loading