Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Jul 4, 2023
1 parent 1f8ba7e commit 07505d7
Show file tree
Hide file tree
Showing 29 changed files with 3 additions and 47 deletions.
2 changes: 1 addition & 1 deletion cmip6_downscaling/analysis/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def load_big_cities(
if plot:
ax = plt.axes(projection=ccrs.PlateCarree())
ax.stock_img()
for (lat, lon) in big_cities[['lat', 'lng']].values:
for lat, lon in big_cities[['lat', 'lng']].values:
plt.plot(
lon,
lat,
Expand Down
2 changes: 0 additions & 2 deletions cmip6_downscaling/data/cmip.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ def postprocess(ds: xr.Dataset, to_standard_calendar: bool = True) -> xr.Dataset
ds = ds.reindex({"lat": ds.lat[::-1]})

if to_standard_calendar:

# checks calendar
ds = convert_to_standard_calendar(ds)

Expand Down Expand Up @@ -110,7 +109,6 @@ def load_cmip(
Dataset or zarr group with CMIP data
"""
with dask.config.set(**{'array.slicing.split_large_chunks': False}):

col = intake.open_esm_datastore(config.get("data_catalog.cmip.json"))

col_subset = col.search(
Expand Down
1 change: 0 additions & 1 deletion cmip6_downscaling/disagg/terraclimate.py
Original file line number Diff line number Diff line change
Expand Up @@ -437,7 +437,6 @@ def model(
soil_prev = awc

for i, row in df.iterrows():

radiation = row['srad'] * MGM2D_PER_WM2

# run snow routine
Expand Down
1 change: 0 additions & 1 deletion cmip6_downscaling/disagg/wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@ def run_terraclimate_model(ds_in: xr.Dataset) -> xr.Dataset:
)

with dask.config.set(scheduler='single-threaded'):

for index, mask_val in np.ndenumerate(ds_in['mask'].values):
if not mask_val:
# skip values outside the mask
Expand Down
1 change: 0 additions & 1 deletion cmip6_downscaling/methods/common/containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ def __str__(self) -> str:

@dataclass
class CMIP6Experiment:

model: str
scenario: str
member: str
Expand Down
1 change: 0 additions & 1 deletion cmip6_downscaling/methods/common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,6 @@ def resample_wrapper(ds, freq='1MS'):


def set_zarr_encoding(ds: xr.Dataset):

for da in ds.data_vars.values():
da.encoding = {'compressor': zarr.Blosc(clevel=1)}

Expand Down
1 change: 0 additions & 1 deletion cmip6_downscaling/methods/gard/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ def coarsen_and_interpolate(fine_path: UPath, coarse_path: UPath) -> UPath:


def _fit_and_predict_wrapper(xtrain, ytrain, xpred, scrf, run_parameters, dim='time'):

xpred = xpred.rename({'t2': 'time'})
scrf = scrf.rename({'t2': 'time'})
kws = default_none_kwargs(run_parameters.bias_correction_kwargs, copy=True)
Expand Down
1 change: 0 additions & 1 deletion cmip6_downscaling/methods/maca/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,6 @@ def _make_template(da):
# train a linear regression model for each day in coarsen GCM dataset, where the features are each coarsened observation
# analogs, and examples are each pixels within the coarsened domain
for i in range(len(y)):

# get data from the GCM day being downscaled
yi = y.isel(ndays_in_gcm=i)
# get data from the coarsened obs analogs
Expand Down
1 change: 0 additions & 1 deletion cmip6_downscaling/runtimes.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,6 @@ def run_config(self) -> RunConfig:

@cached_property
def executor(self) -> Executor:

executor = DaskExecutor(
cluster_kwargs={
'resources': {'taskslots': 1},
Expand Down
1 change: 0 additions & 1 deletion cmip6_downscaling/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ def str_to_hash(s: str) -> str:


def write(ds: xr.Dataset | datatree.DataTree, target, use_cache: bool = True) -> str:

from .methods.common.utils import zmetadata_exists

if use_cache and zmetadata_exists(target):
Expand Down
1 change: 0 additions & 1 deletion flows/catalogs/era5.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,6 @@ def parse_era5(path):

@task(log_stdout=True)
def build_catalog(*, name: str, bucket: str) -> None:

import ecgtools

print(ecgtools.__version__)
Expand Down
5 changes: 0 additions & 5 deletions flows/catalogs/final_catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@


def parse_store(store: str):

from ecgtools.builder import INVALID_ASSET, TRACEBACK

try:
Expand Down Expand Up @@ -54,13 +53,11 @@ def parse_store(store: str):
}

except Exception:

return {INVALID_ASSET: path, TRACEBACK: traceback.format_exc()}


@task(log_stdout=True)
def generate_intake_esm_catalog(*, intake_esm_catalog_bucket: str):

import ecgtools

builder = ecgtools.Builder(
Expand Down Expand Up @@ -102,7 +99,6 @@ def generate_intake_esm_catalog(*, intake_esm_catalog_bucket: str):
def generate_minified_web_catalog(
*, parent_catalog: str, web_catalog: str, cdn: str = None
) -> None:

with fsspec.open(parent_catalog) as f:
data = json.load(f)

Expand Down Expand Up @@ -146,7 +142,6 @@ def generate_minified_web_catalog(
storage=runtime.storage,
run_config=runtime.run_config,
) as flow:

parent_catalog = Parameter(
'parent-catalog',
default="az://scratch/results/pyramids/combined-cmip6-era5-pyramids-catalog-web.json",
Expand Down
7 changes: 0 additions & 7 deletions flows/catalogs/web_catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,13 +125,11 @@ def get_license(source_id: str, derived_product: bool = False) -> dict:


def parse_cmip6(store: str, root_path: str) -> dict[str, str]:

from ecgtools.builder import INVALID_ASSET, TRACEBACK

from cmip6_downscaling.methods.common.utils import zmetadata_exists

try:

path = UPath(store)
if not zmetadata_exists(path):
raise ValueError(f'{path} not a valid zarr store')
Expand Down Expand Up @@ -260,7 +258,6 @@ def from_az_to_https(uri: str, root: str) -> str:
def parse_cmip6_downscaled_pyramid(
data, cdn: str, root_path: str, derived_product: bool = True
) -> list[dict]:

"""
Parse metadata for given CMIP6 downscaled pyramid.
Expand Down Expand Up @@ -435,7 +432,6 @@ def parse(path: str):
def filter_version_results(
*, minimum_version: str, maximum_version: str, exclude_local_version: bool, results: list[str]
) -> list[str]:

"""
Filter the results by version.
Expand Down Expand Up @@ -496,7 +492,6 @@ def get_cmip6_downscaled_pyramids(
maximum_version: str = None,
exclude_local_version: bool = True,
):

"""
Get CMIP6 downscaled pyramids.
Expand Down Expand Up @@ -554,7 +549,6 @@ def create_catalog(
cmip6_downscaled_pyramids: list[dict] = None,
era5_pyramids: list[dict] = None,
):

"""
Create catalog.
Expand Down Expand Up @@ -603,7 +597,6 @@ def create_catalog(
with Flow(
'web-catalog', executor=runtime.executor, run_config=runtime.run_config, storage=runtime.storage
) as flow:

paths = Parameter('paths', default=['az://flow-outputs/results/cmip6-pyramids-raw'])
web_catalog_path = Parameter(
'web-catalog-path',
Expand Down
1 change: 0 additions & 1 deletion flows/cloud_flow_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ def make_grid(shape):
run_config=runtime.run_config,
executor=runtime.executor,
) as flow:

nums = range(4)
my_task.map(nums)

Expand Down
1 change: 0 additions & 1 deletion flows/cmip6_raw_pyramids.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ def get_assets(
grid_label: list[str] | str = 'gn',
member_id: list[str] | str = 'r1i1p1f1',
) -> list[tuple(str, str)]:

import intake

cat = intake.open_esm_datastore(cat_url).search(
Expand Down
1 change: 0 additions & 1 deletion flows/era5-resample-rechunk.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ def get_datasets_keys(*, catalog_path: str, start: int, stop: int):

@task(log_stdout=True)
def resample_to_daily(*, catalog_path: str, key: str):

import dask
import intake
import xarray as xr
Expand Down
2 changes: 0 additions & 2 deletions flows/gcm_obs_weights.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,6 @@ def generate_weights(stores: list[dict[str, str]], method: str = 'bilinear') ->
print(ds_out)

for store in stores:

target_prefix = (
static_dir / store['source_id'] / store['table_id'] / store['grid_label'] / method
)
Expand Down Expand Up @@ -128,7 +127,6 @@ def catalog(results):
run_config=runtime.run_config,
executor=runtime.executor,
) as flow:

cat_url = Parameter(
'cat_url', default='https://cmip6downscaling.blob.core.windows.net/cmip6/pangeo-cmip6.json'
)
Expand Down
1 change: 0 additions & 1 deletion flows/methods/bcsd/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@
with Flow(
name="bcsd", storage=runtime.storage, run_config=runtime.run_config, executor=runtime.executor
) as flow:

run_parameters = make_run_parameters(
method=Parameter("method"),
obs=Parameter("obs"),
Expand Down
2 changes: 0 additions & 2 deletions flows/methods/deepsd/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
with Flow(
name="deepsd", storage=runtime.storage, run_config=runtime.run_config, executor=runtime.executor
) as flow:

run_parameters = make_run_parameters(
method=Parameter("method"),
obs=Parameter("obs"),
Expand Down Expand Up @@ -136,7 +135,6 @@
)

if config.get('run_options.generate_pyramids'):

# since pyramids require full space we now rechunk everything into full
# space before passing into pyramid step. we probably want to add a cleanup
# to this step in particular since otherwise we will have an exact
Expand Down
2 changes: 0 additions & 2 deletions flows/methods/gard/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@
with Flow(
name="gard", storage=runtime.storage, run_config=runtime.run_config, executor=runtime.executor
) as flow:

run_parameters = make_run_parameters(
method=Parameter("method"),
obs=Parameter("obs"),
Expand Down Expand Up @@ -126,7 +125,6 @@
# analysis_location = run_analyses(model_output_path, run_parameters)

if config.get('run_options.generate_pyramids'):

# since pyramids require full space we now rechunk everything into full
# space before passing into pyramid step. we probably want to add a cleanup
# to this step in particular since otherwise we will have an exact
Expand Down
2 changes: 0 additions & 2 deletions flows/methods/maca/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,6 @@
)

if config.get('run_options.combine_regions'):

p['combined_analogs_full_time_path'] = combine_regions(
regions=p['region_numbers'],
region_paths=p['constructed_analogs_region_paths'],
Expand Down Expand Up @@ -184,7 +183,6 @@
# analysis_location = run_analyses(p['final_bias_corrected_full_time_path'], run_parameters)

if config.get('run_options.generate_pyramids'):

# make temporal summaries
p['monthly_summary_full_space_path'] = rechunk(
p['monthly_summary_path'], pattern='full_space'
Expand Down
4 changes: 2 additions & 2 deletions notebooks/check_model_completeness.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@
"source": [
"models = []\n",
"for scenario in [\"historical\", \"ssp245\", \"ssp370\", \"ssp585\"]:\n",
" for (gcm, ensemble_member) in gcms:\n",
" for gcm, ensemble_member in gcms:\n",
" models.append(\"{}-{}\".format(gcm, scenario))\n",
"path = get_store(\n",
" \"carbonplan-downscaling\",\n",
Expand All @@ -89,7 +89,7 @@
"outputs": [],
"source": [
"for scenario in [\"historical\", \"ssp245\", \"ssp370\", \"ssp585\"]:\n",
" for (gcm, ensemble_member) in gcms:\n",
" for gcm, ensemble_member in gcms:\n",
" path = get_store(\n",
" \"carbonplan-downscaling\",\n",
" zarr_template.format(gcm, scenario, ensemble_member),\n",
Expand Down
1 change: 0 additions & 1 deletion notebooks/deepsd_data_prep.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -849,7 +849,6 @@
"# for plotting with lat/lon potentially use the following\n",
"# batch.isel(input_batch=1).tf..plot.pcolormesh(strings)\n",
"for _ in range(10):\n",
"\n",
" i = random.randint(0, len(batch.input_batch))\n",
" patch = batch.isel(input_batch=i)\n",
" print(patch.lat.min().values, patch.lat.max().values)\n",
Expand Down
1 change: 0 additions & 1 deletion notebooks/deepsd_model_train.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,6 @@
"\n",
"\n",
"def read_and_decode(filename_queue, input_size, input_depth, output_depth):\n",
"\n",
" reader = tf.compat.v1.TFRecordReader()\n",
" _, serialized_example = reader.read(filename_queue)\n",
" features = tf.compat.v1.parse_single_example(serialized_example, features=feature)\n",
Expand Down
2 changes: 0 additions & 2 deletions notebooks/prefect_cloud_runner.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,6 @@
"\n",
"\n",
"def create_run_params_from_json(parameter_fpath: str) -> RunParameters:\n",
"\n",
" df = pd.read_json(parameter_fpath)\n",
" run_parameters = RunParameters(\n",
" method=df.method.iloc[0],\n",
Expand All @@ -96,7 +95,6 @@
"\n",
"\n",
"def run_flow(flow_id: str, param_file_path: str) -> list[str]:\n",
"\n",
" json_path = pathlib.Path(param_file_path).read_text()\n",
" flow_hash = str_to_hash(json_path)\n",
" param_dict = json.loads(json_path)\n",
Expand Down
2 changes: 0 additions & 2 deletions notebooks/terraclimate_model.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,6 @@
"# run our version of the terraclimate hydrology model for all points\n",
"v2 = {}\n",
"for k, df_point in data.items():\n",
"\n",
" df = df_point.copy(deep=True)\n",
"\n",
" awc = df[\"awc\"][0]\n",
Expand Down Expand Up @@ -147,7 +146,6 @@
"outputs": [],
"source": [
"def plot_points(v1, v2, var=\"pdsi\", tslice=slice(200, 300)):\n",
"\n",
" fig, axes = plt.subplots(nrows=2, ncols=len(v2), figsize=(24, 4), squeeze=False)\n",
"\n",
" for i, k in enumerate(v2):\n",
Expand Down
1 change: 0 additions & 1 deletion tests/methods/common/test_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,6 @@ def test_get_experiment(run_parameters):


def test_regrid(tmp_path):

pytest.importorskip('xesmf')

ds = xr.tutorial.open_dataset('air_temperature').chunk({'time': 10, 'lat': -1, 'lon': -1})
Expand Down
1 change: 0 additions & 1 deletion tests/test_runtimes.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@

@pytest.mark.parametrize('runtime', [LocalRuntime, CIRuntime, PangeoRuntime])
def test_runtimes(runtime):

_runtime = runtime()
assert isinstance(_runtime, BaseRuntime)

Expand Down
1 change: 0 additions & 1 deletion tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ def test_lon_to_180(shift):


def test_to_standard_calendar(da_noleap):

da_std = to_standard_calendar(da_noleap)
assert da_noleap.sizes['time'] == 365
assert da_std.sizes['time'] == 366
Expand Down

0 comments on commit 07505d7

Please sign in to comment.