-
Notifications
You must be signed in to change notification settings - Fork 0
/
convert4fuse.py
executable file
·137 lines (112 loc) · 5.2 KB
/
convert4fuse.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
#!/usr/bin/env python
import xarray as xr
import netCDF4
import numpy as np
import pandas as pd
import pyeto as pt
import glob, os
import aasi as ai
# User-supplied
mapfile = '/glade/p/work/manab/ff/forcinggeneration/mappingislandpark.nc'
forcdir = '/glade/p/work/manab/ff/islandpark/rawinput'
concatdir = '/glade/p/work/manab/ff/islandpark/rawinputconcat/'
obsfile = '/glade/p/work/manab/ff/forcinggeneration/pilot_basin_data/isli_bor_flow_data.txt'
outdir = '/glade/p/work/manab/ff/islandpark/inputnew/'
obsvar = 'unregflow'
starttime = '1970-01-01'
endtime = '2016-12-31'
def calcWeightedAvg(ncdat, varname):
'''
Calculates the weighted average
of any <varname> in the mapping file
'''
wAvg = sum(ncdat[varname]*ncdat['weight'])
return(wAvg)
def calcPET(lat, time, tmin, tmax, tmean):
'''
Calculates Potential Evapotranspiration using
Hargreaves equation (Hargreaves and Samani, 1985)
'''
latrad = pt.deg2rad(lat) #Latitude to radians
dayofyear = pd.Series(time).dt.day.values
etrad = []
pet = []
# Calculate ET radiation
for x in np.nditer(dayofyear):
soldec = pt.sol_dec(x) #Solar declination
sha = pt.sunset_hour_angle(latrad, soldec) #Sunset hour aingle
ird = pt.inv_rel_dist_earth_sun(x) #Inverse relative distance Earth-Sun
etrad.append(pt.et_rad(latrad, soldec, sha, ird))#Extraterrestrial radiation
# Calculate PET using hargreaves
for x in range(0, len(etrad)):
pet.append(pt.hargreaves(tmin[x], tmax[x], tmean[x], etrad[x]))
pet = np.array(pet)
return(pet)
def ncextract(ncfile):
'''
Open each forcing netCDF file
'''
forcdat = xr.open_dataset(ncfile)
# Extracts the variables values from forcing file
prcp = forcdat['prcp'].values
tmax = forcdat['tmax'].values
tmin = forcdat['tmin'].values
time = forcdat['time'].values
tmean = (tmax+tmin)/2
return(forcdat, prcp, time, tmin, tmax, tmean)
if __name__ == '__main__':
mapdat = xr.open_dataset(mapfile)
lat = calcWeightedAvg(mapdat, 'latitude')
lon = calcWeightedAvg(mapdat, 'longitude')
# Processing forcing files
forcfiles = glob.glob(forcdir + '/*.nc') #List of all forcing files
forcfiles.sort()
srchlist = glob.glob(forcdir + '/*ens_forc.PNW.0625.2010*.nc') #Search string list
srchlist.sort()
# Concatenates ensembles by time and exports to concatdir
for count, value in enumerate(srchlist):
srchstring = srchlist[count].split('.')[-2]+ '.' + srchlist[count].split('.')[-1] #Search string for each ensemble,002.nc
enslist = []
# Searches for a string in a list, concatenates all files corresponding to that string, and writes out netCDF file
for count, value in enumerate(forcfiles):
if value.endswith(srchstring):
enslist.append(value)
ncconcat_time = xr.open_mfdataset(enslist, concat_dim='time')
ncconcat_time.sortby('time')
ncconcat_time.to_netcdf(concatdir+srchstring)
print("Creating concatenated FUSE forcing file " , count+1, " of ", len(srchlist))
# Processing forcing files
concatforcfiles = glob.glob(concatdir + '/*.nc') #List of all files
concatforcfiles.sort()
#Import Flow observation for the period
obs = pd.read_table(obsfile, skiprows = 14, header = None,
names = ['time', 'gaugeheight', 'resinflow', 'dailydisc', 'unregflow'])
obs['time'] = pd.to_datetime(obs['time'])
obs = obs.set_index(['time']) #Set time as index
obs = obs.loc[starttime:endtime]
#obs = pd.to_numeric(obs[obsvar].values) # CFS
obs = pd.to_numeric(obs['unregflow'], errors = 'coerce')
obs[obs<0] = 0 #Convert all negative values to zero
obs = ((obs*0.028316847)/1279729289.0710001)*8.64e+7 #CFS -> CMS(*0.028316847) -> m/s (/1279729289.0710001) -> mm/day (*8.64e+7)
obs2 = np.reshape(obs,(obs.shape[0],1,1))
# Processes all concatenated files and puts them in the final input directory
for count, value in enumerate(concatforcfiles):
[forcdat, prcp, time, tmin, tmax, tmean] = ncextract(value)
pet = calcPET(lat, time, tmin, tmax, tmean)
prcp = np.reshape(prcp,(prcp.shape[0],1,1)) #mm/day
pet = np.reshape(pet,(pet.shape[0],1,1)) #mm/day
tmean = np.reshape(tmean,(tmean.shape[0],1,1)) # degree C
obs2 = np.reshape(obs,(obs.shape[0],1,1))
# Create new netCDF file
f = xr.Dataset({'pr': (['time','latitude', 'longitude'], prcp),
'pet': (['time','latitude', 'longitude'], pet),
'temp': (['time','latitude', 'longitude'], tmean),
'q_obs': (['time','latitude', 'longitude'], obs2)
},
coords={'time': time,
'latitude':[lat],
'longitude':[lon]}
)
f2 = f.where(f['pet']>=0, 0) #Replacing negative PET values with zero
f2.to_netcdf(outdir+os.path.basename(value))
print("Creating final FUSE forcing file " , count+1, " of ", len(concatforcfiles))