Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

hotfix(sourcedata::setup_array): remove dependency on original model top file in load context #81

Merged
merged 2 commits into from
May 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
84 changes: 45 additions & 39 deletions mfsetup/sourcedata.py
Original file line number Diff line number Diff line change
Expand Up @@ -1378,43 +1378,52 @@ def setup_array(model, package, var, data=None,
# (for lakes)
simulate_high_k_lakes = model.cfg['high_k_lakes']['simulate_high_k_lakes']
if var == 'botm':
bathy = model.lake_bathymetry
# only execute this code if building the model (not loading)
if not model._load:
bathy = model.lake_bathymetry

# save a copy of original top elevations
# (prior to adjustment for lake bathymetry)
# name of the copy:
original_top_file = Path(model.external_path,
f"{model.name}_{model.cfg[package]['top_filename_fmt']}.original")
# save a copy of original top elevations
# (prior to adjustment for lake bathymetry)
# name of the copy:
original_top_file = Path(model.external_path,
f"{model.name}_{model.cfg[package]['top_filename_fmt']}.original")

try:
top = model.load_array(original_top_file)
original_top_load_fail = False
except:
original_top_load_fail = True

# if the copy doesn't exist
# (or if the existing file is invalid), make it
if original_top_load_fail:
# if remake_top is False, however,
# there may be no preexisting top file to copy
# first check for a preexisting top file
# get the path and add to intermediate files dict if it's not in there
if 'top' not in model.cfg['intermediate_data']:
try:
top = model.load_array(original_top_file)
original_top_load_fail = False
except:
original_top_load_fail = True

# if the copy doesn't exist
# (or if the existing file is invalid), make it
if original_top_load_fail:
# if remake_top is False, however,
# there may be no preexisting top file to copy
# first check for a preexisting top file
# get the path and add to intermediate files dict if it's not in there
if 'top' not in model.cfg['intermediate_data']:
model.setup_external_filepaths('dis', 'top',
model.cfg['dis']['top_filename_fmt'])
existing_model_top_file = Path(model.cfg['intermediate_data']['top'][0])
if not existing_model_top_file.exists():
raise ValueError((f"Model top text array file {existing_model_top_file} doesn't exist.\n"
f"If remake_top is False in the dis configuration block, "
f"{existing_model_top_file} needs to have been made previously."))
# copy the preexisting top file
shutil.copy(model.cfg['intermediate_data']['top'][0],
original_top_file)
top = model.load_array(original_top_file)
lake_botm_elevations = top[bathy != 0] - bathy[bathy != 0]
if model.version == 'mf6':
# reset the model top to the lake bottom
top[bathy != 0] -= bathy[bathy != 0]
# if loading the model; use the model top that was just loaded in
else:
top_filename = model.cfg['dis']['griddata'].get('top')
if top_filename is None:
model.setup_external_filepaths('dis', 'top',
model.cfg['dis']['top_filename_fmt'])
existing_model_top_file = Path(model.cfg['intermediate_data']['top'][0])
if not existing_model_top_file.exists():
raise ValueError((f"Model top text array file {existing_model_top_file} doesn't exist.\n"
f"If remake_top is False in the dis configuration block, "
f"{existing_model_top_file} needs to have been made previously."))
# copy the preexisting top file
shutil.copy(model.cfg['intermediate_data']['top'][0],
original_top_file)
top = model.load_array(original_top_file)
lake_botm_elevations = top[bathy != 0] - bathy[bathy != 0]
if model.version == 'mf6':
# reset the model top to the lake bottom
top[bathy != 0] -= bathy[bathy != 0]
model.cfg['dis']['top_filename_fmt'])
top = model.load_array(model.cfg['dis']['griddata']['top'][0]['filename'])

# fill missing layers if any
if len(data) < model.nlay:
Expand Down Expand Up @@ -1467,7 +1476,6 @@ def setup_array(model, package, var, data=None,
if not isvalid:
raise Exception('Model layers less than {} {} thickness'.format(min_thickness,
model.length_units))

# fill nan values adjacent to active cells to avoid cell thickness errors
top, botm = fill_cells_vertically(top, botm)
# the top may have been modified by fill_cells_vertically
Expand Down Expand Up @@ -1512,21 +1520,19 @@ def setup_array(model, package, var, data=None,
elif var == 'ss' and simulate_high_k_lakes:
for i, arr in data.items():
data[i][model.isbc[i] == 2] = model.cfg['high_k_lakes']['ss']

# intermediate data
# set paths to intermediate files and external files
filepaths = model.setup_external_filepaths(package, var,
model.cfg[package]['{}_filename_fmt'.format(var)],
file_numbers=list(data.keys()))

# write out array data to intermediate files
# assign lake recharge values (water balance surplus) for any high-K lakes
if write_nodata is None:
write_nodata = model._nodata_value
for i, arr in data.items():
save_array(filepaths[i], arr,
nodata=write_nodata,
fmt=write_fmt)
nodata=write_nodata,
fmt=write_fmt)
# still write intermediate files for MODFLOW-6
# even though input and output filepaths are same
if model.version == 'mf6':
Expand Down
16 changes: 11 additions & 5 deletions mfsetup/tests/test_mf6_shellmound.py
Original file line number Diff line number Diff line change
Expand Up @@ -552,20 +552,26 @@ def test_rch_setup(shellmound_model_with_dis):
unit_conversion = convert_length_units('inches', 'meters')

def get_period_values(start, end):
period_data = ds['net_infiltration'].loc[start:end].mean(axis=0)
dsi = period_data.interp(x=x, y=y, method='linear',
#period_data = ds['net_infiltration'].loc[start:end].mean(axis=0)
#dsi = period_data.interp(x=x, y=y, method='linear',
# kwargs={'fill_value': np.nan,
# 'bounds_error': True})
dsi = ds.interp(x=x, y=y, method='linear',
kwargs={'fill_value': np.nan,
'bounds_error': True})
data = dsi.values * unit_conversion
period_data = dsi['net_infiltration'].loc[start:end].mean(axis=0)
data = period_data.values * unit_conversion
#data = dsi.values * unit_conversion
return np.reshape(data, (m.nrow, m.ncol))

# test steady-state avg. across all data
values = get_period_values('2012-01-01', '2017-12-31')

#assert np.allclose(values, m.rch.recharge.array[0, 0])
# test period 1 avg. for those times
values1 = get_period_values(m.perioddata['start_datetime'].values[1],
m.perioddata['end_datetime'].values[1])
start = m.cfg['rch']['source_data']['recharge']['period_stats'][1][0]
end = m.cfg['rch']['source_data']['recharge']['period_stats'][1][1]
values1 = get_period_values(start, end)
assert testing.rpd(values1.mean(), m.rch.recharge.array[1, 0].mean()) < 0.01

# check that nodata are written as 0.
Expand Down
Loading